gt
stringclasses 1
value | context
stringlengths 2.05k
161k
|
|---|---|
// OpenAIL - Open Android Indoor Localization
// Copyright (C) 2015 Michal Nowicki ([email protected])
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
// IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
// TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
// PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
// TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
// PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package org.dg.main;
import java.util.ArrayList;
import java.util.List;
import org.dg.camera.CameraSaver;
import org.dg.camera.Preview;
import android.app.Activity;
import android.app.Fragment;
import android.hardware.Camera;
import android.os.Bundle;
import android.os.Handler;
import android.util.Log;
import android.util.Pair;
import android.view.LayoutInflater;
import android.view.SurfaceView;
import android.view.View;
import android.view.View.OnLongClickListener;
import android.view.ViewGroup;
import android.view.View.OnClickListener;
import android.view.ViewGroup.LayoutParams;
import android.widget.Button;
import android.widget.CompoundButton;
import android.widget.CompoundButton.OnCheckedChangeListener;
import android.widget.EditText;
import android.widget.FrameLayout;
import android.widget.Switch;
import android.widget.TextView;
/**
* A fragment representing a single step in a wizard. The fragment shows a dummy
* title indicating the page number, along with some dummy text.
*
* <p>
* This class is used by the {@link CardFlipActivity} and
* {@link ScreenSlideActivity} samples.
* </p>
*/
public class ScreenSlidePageFragment extends Fragment {
private final String TAG = "ScreenSlidePageFragment";
/**
* The argument key for the page number this fragment represents.
*/
public static final String ARG_PAGE = "page";
// public static final String DEVICE_ORIENTATION = "deviceOrientation";
/**
* f The fragment's page number, which is set to the argument value for
* {@link #ARG_PAGE}.
*/
private int mPageNumber;
/**
* The orientation of device, which is set to the argument value for
* {@link #DEVICE_ORIENTATION}.
*
*/
// private int mDeviceOrientation;
/**
* Handlers to change GUI
*/
public Handler mHandlerOrient, mHandlerWiFiRecognition;
/**
*
*/
public Preview preview = null;
/**
*
*/
LocalizationView localizationView;
Camera camera = null;
int previewWidth = 640, previewHeight = 480;
/**
* Factory method for this fragment class. Constructs a new fragment for the
* given page number.
*/
public static ScreenSlidePageFragment create(int pageNumber) {// , int
// deviceOrientation
ScreenSlidePageFragment fragment = new ScreenSlidePageFragment();
Bundle args = new Bundle();
args.putInt(ARG_PAGE, pageNumber);
// args.putInt(DEVICE_ORIENTATION, deviceOrientation);
fragment.setArguments(args);
return fragment;
}
public ScreenSlidePageFragment() {
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mPageNumber = getArguments().getInt(ARG_PAGE);
// mDeviceOrientation = getArguments().getInt(DEVICE_ORIENTATION);
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
// Inflate the layout containing a title and body text.
final ViewGroup rootView;
if (mPageNumber == 0) {
Log.d("TEST", "Created camera view");
Log.d(TAG, String.format("mPageNumber = %d", mPageNumber));
rootView = (ViewGroup) inflater.inflate(
R.layout.fragment_screen_slide_page0, container, false);
// Camera.Parameters parameters = camera.getParameters();
// parameters.setPreviewSize(640, 480);
// camera.setParameters(parameters);
// Camera preview stuff
SurfaceView surfView = (SurfaceView) rootView
.findViewById(R.id.SurfaceView01);
preview = new Preview(surfView, previewWidth, previewHeight);
// If you you touch for longer time -> do the QR code scanning
surfView.setOnLongClickListener(new OnLongClickListener() {
@Override
public boolean onLongClick(View v) {
Log.d("Camera::preview", "onLongClick");
onSomeClick(v, "Decode QR");
return false;
}
});
// If you you touch for short time -> take image
surfView.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
Log.d("Camera::preview", "onClick");
onSomeClick(v, "Take picture");
}
});
// android.hardware.Camera.CameraInfo info = new
// android.hardware.Camera.CameraInfo();
// android.hardware.Camera.getCameraInfo(0, info);
// int rotation =
// activity.getWindowManager().getDefaultDisplay().getRotation();
preview.setCamera(camera);
preview.measure(surfView.getWidth(), surfView.getHeight());
preview.setLayoutParams(new LayoutParams(LayoutParams.FILL_PARENT,
LayoutParams.FILL_PARENT));
// ((FrameLayout)
// rootView.findViewById(R.id.previewFrameLayout)).addView(preview);
preview.setKeepScreenOn(true);
} else if (mPageNumber == 1) {
rootView = (ViewGroup) inflater.inflate(
R.layout.fragment_screen_slide_page1, container, false);
// Start/stop localization
initButtonStartLocalization(rootView, R.id.buttonStartLocalization);
// Barometer on/off
initSwitchBarometer(rootView, R.id.switchBarometer);
// Inertial sensors on/off
initSwitchInertialSensors(rootView, R.id.switchInertialSensors);
// Stepometer on/off
initSwitchStepometer(rootView, R.id.switchStepometer);
// WiFi on/off
initSwitchWiFi(rootView, R.id.switchWiFi);
/*
* --------------------
*/
// Optimize graph from file
initButtonStartGraphTestFromFile(rootView,
R.id.buttonOptimizeGraphFromFile);
// Process orientation estimation data from file
initButtonStartOrientationEstimationFromFile(rootView,
R.id.buttonOrientationTestFromFile);
// Save WiFi Map
initButtonSaveMapPoint(rootView, R.id.buttonSaveMapPoint);
// Save VPR
initButtonSaveVPR(rootView, R.id.buttonSaveVPR);
// Record All
initButtonRecordAll(rootView, R.id.buttonRecordAll);
// Take picute
// initButtonTakePicture(rootView, R.id.buttonTakePicture);
// Record inertial sensors
// initButtonRecordinertialSensors(rootView, R.id.buttonRecordInertial);
// Clear map
initButtonClearNewMap(rootView, R.id.buttonClearNewMap);
// Playback
initButtonPlayback(rootView, R.id.buttonPlayback);
// Test new functions
initButtonTest(rootView, R.id.buttonTest);
/**
* OLD:
*
* onSomeClick(v, "Add magnetic place to recognition");
*
* onSomeClick(v, "Start complementary filter"); onSomeClick(v,
* "Stop complementary filter");
*
* onSomeClick(v, "Visual Place Recognition");
*
* onSomeClick(v, "Decode QR");
*
* onSomeClick(v, "Do a single WiFi scan");
*
* onSomeClick(v, "Add WiFi to recognition");
*
* onSomeClick(v, "Take picture");
*
* onSomeClick(v, "Start record inertial sensors");
* onSomeClick(v, "Stop record inertial sensors");
*/
} else if (mPageNumber == 2) {
rootView = (ViewGroup) inflater.inflate(
R.layout.fragment_screen_slide_page2, container, false);
} else if (mPageNumber == 3 || true) {
Log.d("TEST", "Created localization view");
rootView = (ViewGroup) inflater.inflate(
R.layout.fragment_screen_slide_page_visualization,
container, false);
localizationView = (LocalizationView) rootView
.findViewById(R.id.SurfaceViewLocalization);
// // Add some data for testing
// List<Pair<Double, Double>> wifiScanLocations = new
// ArrayList<Pair<Double, Double>>();
//
// Pair<Double, Double> x = new Pair<Double, Double>(10.0, 10.0);
// wifiScanLocations.add(x);
//
// //
// x = new Pair<Double, Double>(-10.0, -10.0);
// wifiScanLocations.add(x);
//
// x = new Pair<Double, Double>(10.0, -10.0);
// wifiScanLocations.add(x);
//
// x = new Pair<Double, Double>(-10.0, 10.0);
// wifiScanLocations.add(x);
//
// localizationView.setWiFiScanLocations(wifiScanLocations);
//
// List<Pair<Double, Double>> userLocations = new
// ArrayList<Pair<Double, Double>>();
//
// x = new Pair<Double, Double>(0.0, 0.0);
// userLocations.add(x);
// x = new Pair<Double, Double>(0.5, 0.5);
// userLocations.add(x);
// x = new Pair<Double, Double>(-0.5, 0.5);
// userLocations.add(x);
//
// localizationView.setUserLocations(userLocations);
}
// Create those handlers
mHandlerOrient = new Handler();
mHandlerWiFiRecognition = new Handler();
return rootView;
}
public void setPreviewSize(int previewSizeParam) {
Log.d(TAG, "setPreviewSize");
if (previewSizeParam == 0) {
previewWidth = 640;
previewHeight = 480;
} else if (previewSizeParam == 1) {
previewWidth = 1280;
previewHeight = 720;
} else if (previewSizeParam == 2) {
previewWidth = 1920;
previewHeight = 1080;
}
}
/**
* @param rootView
*/
private void initSwitchInertialSensors(final ViewGroup rootView,
final int id) {
Switch switchInterialSensors = (Switch) rootView.findViewById(id);
switchInterialSensors.setEnabled(false);
switchInterialSensors.setChecked(false);
switchInterialSensors.setText("Inertial sensors");
switchInterialSensors
.setOnCheckedChangeListener(new OnCheckedChangeListener() {
@Override
public void onCheckedChanged(CompoundButton buttonView,
boolean isChecked) {
if (isChecked)
onSomeClick(buttonView, "Run inertial sensors");
else
onSomeClick(buttonView, "Stop inertial sensors");
}
});
}
/**
*
*/
private void initButtonClearNewMap(final ViewGroup rootView,
final int id) {
Button buttonClearNewMap = (Button) rootView.findViewById(id);
buttonClearNewMap.setText("Clear new map");
buttonClearNewMap.setOnClickListener(new OnClickListener() {
public void onClick(View v) {
EditText mapName = (EditText) rootView
.findViewById(R.id.editTextMapName);
onSomeClick(v, "Clear new map &"+mapName.getText());
}
});
}
/**
*
*/
private void initButtonPlayback(final ViewGroup rootView,
final int id) {
Button buttonPlayback = (Button) rootView.findViewById(id);
buttonPlayback.setText("Playback");
buttonPlayback.setOnClickListener(new OnClickListener() {
public void onClick(View v) {
onSomeClick(v, "Playback");
}
});
}
/**
*
*/
private void initButtonTest(final ViewGroup rootView,
final int id) {
Button buttonTest = (Button) rootView.findViewById(id);
buttonTest.setText("Test sth new!");
buttonTest.setOnClickListener(new OnClickListener() {
public void onClick(View v) {
onSomeClick(v, "Test");
}
});
}
private void initSwitchWiFi(final ViewGroup rootView, final int id) {
Switch switchWiFi = (Switch) rootView.findViewById(id);
// switchWiFi.setEnabled(false);
switchWiFi.setChecked(false);
switchWiFi.setText("WiFi");
switchWiFi.setOnCheckedChangeListener(new OnCheckedChangeListener() {
@Override
public void onCheckedChanged(CompoundButton buttonView,
boolean isChecked) {
if (isChecked)
onSomeClick(buttonView, "Start WiFi scans");
else
onSomeClick(buttonView, "Stop WiFi scans");
}
});
}
/**
*
*/
private void initSwitchStepometer(final ViewGroup rootView, final int id) {
Switch switchStepometer = (Switch) rootView.findViewById(id);
switchStepometer.setEnabled(false);
switchStepometer.setChecked(false);
switchStepometer.setText("Stepometer");
switchStepometer
.setOnCheckedChangeListener(new OnCheckedChangeListener() {
@Override
public void onCheckedChanged(CompoundButton buttonView,
boolean isChecked) {
if (isChecked)
onSomeClick(buttonView, "Start stepometer");
else
onSomeClick(buttonView, "Stop stepometer");
}
});
}
/**
*
*/
private void initSwitchBarometer(final ViewGroup rootView, final int id) {
Switch switchStartFloorDetection = (Switch) rootView.findViewById(id);
switchStartFloorDetection.setEnabled(false);
switchStartFloorDetection.setChecked(false);
switchStartFloorDetection.setText("Barometer");
switchStartFloorDetection
.setOnCheckedChangeListener(new OnCheckedChangeListener() {
@Override
public void onCheckedChanged(CompoundButton buttonView,
boolean isChecked) {
if (isChecked)
onSomeClick(buttonView, "Start barometer");
else
onSomeClick(buttonView, "Stop barometer");
}
});
}
/**
*
*/
private void initButtonStartLocalization(final ViewGroup rootView,
final int id) {
Button buttonStartGraphOnline = (Button) rootView.findViewById(id);
buttonStartGraphOnline.setText("Start Localization");
buttonStartGraphOnline.setOnClickListener(new OnClickListener() {
public void onClick(View v) {
{
Button buttonStartGraphOnline = (Button) rootView
.findViewById(id);
Switch switchStartFloorDetection = (Switch) rootView
.findViewById(R.id.switchBarometer);
Switch switchInterialSensors = (Switch) rootView
.findViewById(R.id.switchInertialSensors);
Switch switchStepometer = (Switch) rootView
.findViewById(R.id.switchStepometer);
// Switch switchWiFi = (Switch) rootView
// .findViewById(R.id.switchWiFi);
if (buttonStartGraphOnline.getText().toString()
.equals("Start Localization")) {
buttonStartGraphOnline.setText("Stop Localization");
switchStartFloorDetection.setEnabled(true);
switchInterialSensors.setEnabled(true);
switchStepometer.setEnabled(true);
// switchWiFi.setEnabled(true);
onSomeClick(v, "Start graph");
} else {
buttonStartGraphOnline.setText("Start Localization");
onSomeClick(v, "Optimize graph");
switchStartFloorDetection.setEnabled(false);
switchInterialSensors.setEnabled(false);
switchStepometer.setEnabled(false);
// switchWiFi.setEnabled(false);
}
}
}
});
}
/**
*
*/
private void initButtonStartGraphTestFromFile(final ViewGroup rootView,
final int id) {
Button buttonStartGraphTestFromFile = (Button) rootView
.findViewById(id);
buttonStartGraphTestFromFile.setText("Graph from file");
buttonStartGraphTestFromFile.setOnClickListener(new OnClickListener() {
public void onClick(View v) {
{
onSomeClick(v, "Graph from file");
}
}
});
}
/**
*
*/
// Side View 6 - Process orientation estimation data from file
private void initButtonStartOrientationEstimationFromFile(
final ViewGroup rootView, int id) {
Button buttonStartOrientFromFile = (Button) rootView.findViewById(id);
buttonStartOrientFromFile.setText("Orient file test");
buttonStartOrientFromFile.setOnClickListener(new OnClickListener() {
public void onClick(View v) {
onSomeClick(v, "Orient file test");
}
});
}
// Save map point
private void initButtonSaveMapPoint(final ViewGroup rootView, int id) {
EditText x = (EditText) rootView.findViewById(R.id.editTextWiFiPosX);
EditText y = (EditText) rootView.findViewById(R.id.editTextWiFiPosY);
EditText z = (EditText) rootView.findViewById(R.id.editTextWiFiPosZ);
EditText idText = (EditText) rootView
.findViewById(R.id.editTextMapPosID);
x.setText("0.0");
y.setText("0.0");
z.setText("0.0");
idText.setText("10000");
EditText mapName = (EditText) rootView
.findViewById(R.id.editTextMapName);
mapName.setText("newMap");
Button buttonStartOrientFromFile = (Button) rootView.findViewById(id);
buttonStartOrientFromFile.setText("Save map point");
buttonStartOrientFromFile.setOnClickListener(new OnClickListener() {
public void onClick(View v) {
EditText x = (EditText) rootView
.findViewById(R.id.editTextWiFiPosX);
EditText y = (EditText) rootView
.findViewById(R.id.editTextWiFiPosY);
EditText z = (EditText) rootView
.findViewById(R.id.editTextWiFiPosZ);
EditText mapName = (EditText) rootView
.findViewById(R.id.editTextMapName);
EditText id = (EditText) rootView
.findViewById(R.id.editTextMapPosID);
onSomeClick(v, "Save map point :&" + mapName.getText() + "&"
+ id.getText() + "&" + x.getText() + "&" + y.getText()
+ "&" + z.getText());
int idNum = Integer.parseInt(id.getText().toString());
id.setText(String.format("%d", idNum + 1));
}
});
}
// Save VPR image
private void initButtonSaveVPR(final ViewGroup rootView, int id) {
EditText x = (EditText) rootView.findViewById(R.id.editTextWiFiPosX);
EditText y = (EditText) rootView.findViewById(R.id.editTextWiFiPosY);
EditText z = (EditText) rootView.findViewById(R.id.editTextWiFiPosZ);
x.setText("0.0");
y.setText("0.0");
z.setText("0.0");
Button buttonStartOrientFromFile = (Button) rootView.findViewById(id);
buttonStartOrientFromFile.setText("Save image");
buttonStartOrientFromFile.setOnClickListener(new OnClickListener() {
public void onClick(View v) {
EditText x = (EditText) rootView
.findViewById(R.id.editTextWiFiPosX);
EditText y = (EditText) rootView
.findViewById(R.id.editTextWiFiPosY);
EditText z = (EditText) rootView
.findViewById(R.id.editTextWiFiPosZ);
onSomeClick(v,
"Save VPR place: " + x.getText() + " " + y.getText()
+ " " + z.getText());
}
});
}
// Button record all
private void initButtonRecordAll(final ViewGroup rootView, final int id) {
Button buttonRecordAll = (Button) rootView.findViewById(id);
buttonRecordAll.setText("Start record all");
buttonRecordAll.setOnClickListener(new OnClickListener() {
public void onClick(View v) {
Button buttonRecordAll = (Button) rootView.findViewById(id);
if (buttonRecordAll.getText().toString()
.equals("Start record all")) {
buttonRecordAll.setText("Stop record all");
} else {
buttonRecordAll.setText("Start record all");
}
onSomeClick(v, "Record all");
}
});
}
/**
* Returns the page number represented by this fragment object.
*/
public int getPageNumber() {
return mPageNumber;
}
/*
* Returns the localizationView for updating purposes
*/
public LocalizationView getLocalizationView() {
return localizationView;
}
// ...
// Define the listener of the interface type
// listener is the activity itself
private OnItemSelectedListener listener;
// Define the events that the fragment will use to communicate
public interface OnItemSelectedListener {
public void onButtonPressed(String link);
}
// Store the listener (activity) that will have events fired once the
// fragment is attached
@Override
public void onAttach(Activity activity) {
super.onAttach(activity);
if (activity instanceof OnItemSelectedListener) {
listener = (OnItemSelectedListener) activity;
} else {
throw new ClassCastException(activity.toString()
+ " must implement MyListFragment.OnItemSelectedListener");
}
}
// Now we can fire the event when the user selects something in the fragment
public void onSomeClick(View v, String value) {
listener.onButtonPressed(value);
}
public void updateGUIData(float[] _orient, float[] _compOrient,
String _strongestWiFi, int _wiFiCount, float _foundFreq,
float _stepCount, float _stepDistance, int _currentFloor,
float _estimatedHeight, float _accVariance, int _deviceOrientation,
float _stepometerAngle, float _gyroVariance) {
UpdateMeasurementsInGUI obj = new UpdateMeasurementsInGUI(_orient,
_compOrient, _strongestWiFi, _wiFiCount, _foundFreq,
_stepCount, _stepDistance, _currentFloor, _estimatedHeight,
_accVariance, _deviceOrientation, _stepometerAngle,
_gyroVariance);
mHandlerOrient.post(obj);
}
public void updateGUIPlaceRecognize(int _placeId, int _sizeOfPlaceDatabase,
int _magneticPlaceId, int _sizeOfMagneticPlaceDatabase) {
UpdateWiFiInGUI obj = new UpdateWiFiInGUI(_placeId,
_sizeOfPlaceDatabase, _magneticPlaceId,
_sizeOfMagneticPlaceDatabase);
mHandlerWiFiRecognition.post(obj);
}
class UpdateMeasurementsInGUI implements Runnable {
float[] orient, compOrient;
String strongestWiFi;
int wiFiCount;
float foundFreq;
float stepCount, stepDistance;
int currentFloor;
float estimatedHeight;
float accVariance, gyroVariance, stepometerAngle;
int deviceOrientation;
public UpdateMeasurementsInGUI(float[] _orient, float[] _compOrient,
String _strongestWiFi, int _wiFiCount, float _foundFreq,
float _stepCount, float _stepDistance, int _currentFloor,
float _estimatedHeight, float _accVariance,
int _deviceOrientation, float _stepometerAngle,
float _gyroVariance) {
orient = _orient.clone();
compOrient = _compOrient.clone();
strongestWiFi = _strongestWiFi;
wiFiCount = _wiFiCount;
foundFreq = _foundFreq;
stepCount = _stepCount;
stepDistance = _stepDistance;
currentFloor = _currentFloor;
estimatedHeight = _estimatedHeight;
accVariance = _accVariance;
deviceOrientation = _deviceOrientation;
stepometerAngle = _stepometerAngle;
gyroVariance = _gyroVariance;
}
public void run() {
if (mPageNumber == 2) {
// ORIENTATION X, Y, Z
TextView mTextViewRollX = (TextView) getView().findViewById(
R.id.textViewOrient1);
TextView mTextViewPitchY = (TextView) getView().findViewById(
R.id.textViewOrient2);
TextView mTextViewYawZ = (TextView) getView().findViewById(
R.id.textViewOrient3);
mTextViewRollX.setText("Roll (X): "
+ String.format("%.2f", orient[0]) + " deg");
mTextViewPitchY.setText("Pitch (Y): "
+ String.format("%.2f", orient[1]) + " deg");
mTextViewYawZ.setText("Yaw (Z): "
+ String.format("%.2f", orient[2]) + " deg");
// ORIENTATION COMPLEMENTARY X, Y, Z
TextView mTextViewCompRollX = (TextView) getView()
.findViewById(R.id.textViewOrientComp1);
TextView mTextViewCompPitchY = (TextView) getView()
.findViewById(R.id.textViewOrientComp2);
TextView mTextViewCompYawZ = (TextView) getView().findViewById(
R.id.textViewOrientComp3);
mTextViewCompRollX.setText("Comp Roll (X): "
+ String.format("%.2f", compOrient[0]) + " deg");
mTextViewCompPitchY.setText("Comp Pitch (Y): "
+ String.format("%.2f", compOrient[1]) + " deg");
mTextViewCompYawZ.setText("Comp Yaw (Z): "
+ String.format("%.2f", compOrient[2]) + " deg");
TextView mTextViewNetworkCount = (TextView) getView()
.findViewById(R.id.textViewWiFi1);
TextView mTextViewStrongestWiFi = (TextView) getView()
.findViewById(R.id.textViewWiFi2);
mTextViewNetworkCount.setText("Number of found networks: "
+ Integer.toString(wiFiCount));
mTextViewStrongestWiFi.setText("Strongest WiFi: "
+ strongestWiFi);
TextView mTextViewFoundFrequency = (TextView) getView()
.findViewById(R.id.textViewStepometer1);
TextView mTextViewStepCounter = (TextView) getView()
.findViewById(R.id.textViewStepometer2);
TextView mTextViewStepDistance = (TextView) getView()
.findViewById(R.id.textViewStepometer3);
TextView mTextViewAngle = (TextView) getView().findViewById(
R.id.textViewStepometer4);
TextView mTextViewAccVariance = (TextView) getView()
.findViewById(R.id.textViewStepometer5);
TextView mTextViewGyroVariance = (TextView) getView()
.findViewById(R.id.textViewStepometer6);
mTextViewFoundFrequency.setText("Freq: "
+ String.format("%.2f", foundFreq) + " Hz");
mTextViewStepCounter.setText("Steps: "
+ String.format("%.2f", stepCount));
mTextViewStepDistance.setText("Dist: "
+ String.format("%.2f", stepDistance) + " m");
mTextViewAngle.setText("Raw angle: "
+ String.format("%.2f", stepometerAngle));
mTextViewAccVariance.setText("Acc var: "
+ String.format("%.2f", accVariance));
mTextViewGyroVariance.setText("Gyro var: "
+ String.format("%.2f", gyroVariance));
TextView mTextViewCurrentFloor = (TextView) getView()
.findViewById(R.id.textViewBarometer1);
TextView mTextViewEstimatedHeight = (TextView) getView()
.findViewById(R.id.textViewBarometer2);
TextView mTextViewDeviceOrientation = (TextView) getView()
.findViewById(R.id.textViewDeviceOrientation);
mTextViewCurrentFloor.setText("Floor: "
+ Integer.toString(currentFloor));
mTextViewEstimatedHeight.setText("Height: "
+ String.format("%.2f", estimatedHeight) + " m");
if (deviceOrientation == 0)
mTextViewDeviceOrientation.setText("devOrient: VERT");
else if (deviceOrientation == 1)
mTextViewDeviceOrientation.setText("devOrient: HOR LEFT");
else if (deviceOrientation == 2)
mTextViewDeviceOrientation.setText("devOrient: HOR RIGHT");
else if (deviceOrientation == 3)
mTextViewDeviceOrientation.setText("devOrient: UNKNOWN");
}
}
}
public void setCamera(Camera icamera) {
// Log.d(TAG, String.format("setCamera, mPageNumber = %d",
// mPageNumber));
camera = icamera;
if (preview != null) {
preview.setCamera(camera);
}
// Camera.Parameters parameters = camera.getParameters();
// parameters.setPreviewSize(640, 480);
// camera.setParameters(parameters);
}
class UpdateWiFiInGUI implements Runnable {
int placeId, sizeOfPlaceDatabase;
int magneticPlaceId, sizeOfMagneticPlaceDatabase;
public UpdateWiFiInGUI(int _placeId, int _sizeOfPlaceDatabase,
int _magneticPlaceId, int _sizeOfMagneticDatabase) {
placeId = _placeId;
sizeOfPlaceDatabase = _sizeOfPlaceDatabase;
magneticPlaceId = _magneticPlaceId;
sizeOfMagneticPlaceDatabase = _sizeOfMagneticDatabase;
}
public void run() {
if (mPageNumber == 2) {
TextView mTextViewRecognizedPlace = (TextView) getView()
.findViewById(R.id.textViewWiFi3);
mTextViewRecognizedPlace.setText("Recognized place id: "
+ Integer.toString(placeId) + " (out of "
+ Integer.toString(sizeOfPlaceDatabase) + " places)");
TextView mTextViewMagneticRecognizedPlace = (TextView) getView()
.findViewById(R.id.textViewMagnetic);
mTextViewMagneticRecognizedPlace
.setText("Recognized mag place id: "
+ Integer.toString(magneticPlaceId)
+ " (out of "
+ Integer.toString(sizeOfMagneticPlaceDatabase)
+ " places)");
}
}
}
}
|
|
/*
* Copyright LWJGL. All rights reserved.
* License terms: https://www.lwjgl.org/license
* MACHINE GENERATED FILE, DO NOT EDIT
*/
package org.lwjgl.llvm;
import javax.annotation.*;
import java.nio.*;
import org.lwjgl.*;
import org.lwjgl.system.*;
import static org.lwjgl.system.APIUtil.*;
import static org.lwjgl.system.Checks.*;
import static org.lwjgl.system.JNI.*;
import static org.lwjgl.system.MemoryStack.*;
import static org.lwjgl.system.MemoryUtil.*;
/** Requires LLVM 11.0 or higher. */
public class LLVMLLJIT {
/** Contains the function pointers loaded from {@code LLVMCore.getLibrary()}. */
public static final class Functions {
private Functions() {}
/** Function address. */
public static final long
OrcCreateLLJITBuilder = apiGetFunctionAddress(LLVMCore.getLibrary(), "LLVMOrcCreateLLJITBuilder"),
OrcDisposeLLJITBuilder = apiGetFunctionAddress(LLVMCore.getLibrary(), "LLVMOrcDisposeLLJITBuilder"),
OrcLLJITBuilderSetJITTargetMachineBuilder = apiGetFunctionAddress(LLVMCore.getLibrary(), "LLVMOrcLLJITBuilderSetJITTargetMachineBuilder"),
OrcLLJITBuilderSetObjectLinkingLayerCreator = apiGetFunctionAddress(LLVMCore.getLibrary(), "LLVMOrcLLJITBuilderSetObjectLinkingLayerCreator"),
OrcCreateLLJIT = apiGetFunctionAddress(LLVMCore.getLibrary(), "LLVMOrcCreateLLJIT"),
OrcDisposeLLJIT = apiGetFunctionAddress(LLVMCore.getLibrary(), "LLVMOrcDisposeLLJIT"),
OrcLLJITGetExecutionSession = apiGetFunctionAddress(LLVMCore.getLibrary(), "LLVMOrcLLJITGetExecutionSession"),
OrcLLJITGetMainJITDylib = apiGetFunctionAddress(LLVMCore.getLibrary(), "LLVMOrcLLJITGetMainJITDylib"),
OrcLLJITGetTripleString = apiGetFunctionAddress(LLVMCore.getLibrary(), "LLVMOrcLLJITGetTripleString"),
OrcLLJITGetGlobalPrefix = apiGetFunctionAddress(LLVMCore.getLibrary(), "LLVMOrcLLJITGetGlobalPrefix"),
OrcLLJITMangleAndIntern = apiGetFunctionAddress(LLVMCore.getLibrary(), "LLVMOrcLLJITMangleAndIntern"),
OrcLLJITAddObjectFile = apiGetFunctionAddress(LLVMCore.getLibrary(), "LLVMOrcLLJITAddObjectFile"),
OrcLLJITAddObjectFileWithRT = apiGetFunctionAddress(LLVMCore.getLibrary(), "LLVMOrcLLJITAddObjectFileWithRT"),
OrcLLJITAddLLVMIRModule = apiGetFunctionAddress(LLVMCore.getLibrary(), "LLVMOrcLLJITAddLLVMIRModule"),
OrcLLJITAddLLVMIRModuleWithRT = apiGetFunctionAddress(LLVMCore.getLibrary(), "LLVMOrcLLJITAddLLVMIRModuleWithRT"),
OrcLLJITLookup = apiGetFunctionAddress(LLVMCore.getLibrary(), "LLVMOrcLLJITLookup"),
OrcLLJITGetObjLinkingLayer = apiGetFunctionAddress(LLVMCore.getLibrary(), "LLVMOrcLLJITGetObjLinkingLayer"),
OrcLLJITGetObjTransformLayer = apiGetFunctionAddress(LLVMCore.getLibrary(), "LLVMOrcLLJITGetObjTransformLayer"),
OrcLLJITGetIRTransformLayer = apiGetFunctionAddress(LLVMCore.getLibrary(), "LLVMOrcLLJITGetIRTransformLayer"),
OrcLLJITGetDataLayoutStr = apiGetFunctionAddress(LLVMCore.getLibrary(), "LLVMOrcLLJITGetDataLayoutStr");
}
protected LLVMLLJIT() {
throw new UnsupportedOperationException();
}
// --- [ LLVMOrcCreateLLJITBuilder ] ---
/**
* Create an {@code LLVMOrcLLJITBuilder}.
*
* <p>The client owns the resulting {@code LLJITBuilder} and should dispose of it using {@link #LLVMOrcDisposeLLJITBuilder OrcDisposeLLJITBuilder} once they are done with it.</p>
*/
@NativeType("LLVMOrcLLJITBuilderRef")
public static long LLVMOrcCreateLLJITBuilder() {
long __functionAddress = Functions.OrcCreateLLJITBuilder;
return invokeP(__functionAddress);
}
// --- [ LLVMOrcDisposeLLJITBuilder ] ---
/**
* Dispose of an {@code LLVMOrcLLJITBuilderRef}.
*
* <p>This should only be called if ownership has not been passed to {@code LLVMOrcCreateLLJIT} (e.g. because some error prevented that function from being
* called).</p>
*/
public static void LLVMOrcDisposeLLJITBuilder(@NativeType("LLVMOrcLLJITBuilderRef") long Builder) {
long __functionAddress = Functions.OrcDisposeLLJITBuilder;
if (CHECKS) {
check(Builder);
}
invokePV(Builder, __functionAddress);
}
// --- [ LLVMOrcLLJITBuilderSetJITTargetMachineBuilder ] ---
/**
* Set the {@code JITTargetMachineBuilder} to be used when constructing the {@code LLJIT} instance.
*
* <p>Calling this function is optional: if it is not called then the {@code LLJITBuilder} will use {@code JITTargeTMachineBuilder::detectHost} to construct
* a {@code JITTargetMachineBuilder}.</p>
*
* <p>This function takes ownership of the {@code JTMB} argument: clients should not dispose of the {@code JITTargetMachineBuilder} after calling this
* function.</p>
*/
public static void LLVMOrcLLJITBuilderSetJITTargetMachineBuilder(@NativeType("LLVMOrcLLJITBuilderRef") long Builder, @NativeType("LLVMOrcJITTargetMachineBuilderRef") long JTMB) {
long __functionAddress = Functions.OrcLLJITBuilderSetJITTargetMachineBuilder;
if (CHECKS) {
check(Builder);
check(JTMB);
}
invokePPV(Builder, JTMB, __functionAddress);
}
// --- [ LLVMOrcLLJITBuilderSetObjectLinkingLayerCreator ] ---
/** Unsafe version of: {@link #LLVMOrcLLJITBuilderSetObjectLinkingLayerCreator OrcLLJITBuilderSetObjectLinkingLayerCreator} */
public static void nLLVMOrcLLJITBuilderSetObjectLinkingLayerCreator(long Builder, long F, long Ctx) {
long __functionAddress = Functions.OrcLLJITBuilderSetObjectLinkingLayerCreator;
if (CHECKS) {
check(Builder);
check(Ctx);
}
invokePPPV(Builder, F, Ctx, __functionAddress);
}
/**
* Set an {@code ObjectLinkingLayer} creator function for this {@code LLJIT} instance.
*
* @since 12
*/
public static void LLVMOrcLLJITBuilderSetObjectLinkingLayerCreator(@NativeType("LLVMOrcLLJITBuilderRef") long Builder, @NativeType("LLVMOrcObjectLayerRef (*) (void *, LLVMOrcExecutionSessionRef, char const *)") LLVMOrcLLJITBuilderObjectLinkingLayerCreatorFunctionI F, @NativeType("void *") long Ctx) {
nLLVMOrcLLJITBuilderSetObjectLinkingLayerCreator(Builder, F.address(), Ctx);
}
// --- [ LLVMOrcCreateLLJIT ] ---
/** Unsafe version of: {@link #LLVMOrcCreateLLJIT OrcCreateLLJIT} */
public static long nLLVMOrcCreateLLJIT(long Result, long Builder) {
long __functionAddress = Functions.OrcCreateLLJIT;
if (CHECKS) {
check(Builder);
}
return invokePPP(Result, Builder, __functionAddress);
}
/**
* Create an {@code LLJIT} instance from an {@code LLJITBuilder}.
*
* <p>This operation takes ownership of the {@code Builder} argument: clients should not dispose of the builder after calling this function (even if the
* function returns an error). If a null {@code Builder} argument is provided then a default-constructed {@code LLJITBuilder} will be used.</p>
*
* <p>On success the resulting {@code LLJIT} instance is uniquely owned by the client and automatically manages the memory of all JIT'd code and all modules
* that are transferred to it (e.g. via {@link #LLVMOrcLLJITAddLLVMIRModule OrcLLJITAddLLVMIRModule}). Disposing of the {@code LLJIT} instance will free all memory managed by the JIT,
* including JIT'd code and not-yet compiled modules.</p>
*/
@NativeType("LLVMErrorRef")
public static long LLVMOrcCreateLLJIT(@NativeType("LLVMOrcLLJITRef *") PointerBuffer Result, @NativeType("LLVMOrcLLJITBuilderRef") long Builder) {
if (CHECKS) {
check(Result, 1);
}
return nLLVMOrcCreateLLJIT(memAddress(Result), Builder);
}
// --- [ LLVMOrcDisposeLLJIT ] ---
/** Dispose of an LLJIT instance. */
@NativeType("LLVMErrorRef")
public static long LLVMOrcDisposeLLJIT(@NativeType("LLVMOrcLLJITRef") long J) {
long __functionAddress = Functions.OrcDisposeLLJIT;
if (CHECKS) {
check(J);
}
return invokePP(J, __functionAddress);
}
// --- [ LLVMOrcLLJITGetExecutionSession ] ---
/**
* Get a reference to the {@code ExecutionSession} for this {@code LLJIT} instance.
*
* <p>The {@code ExecutionSession} is owned by the {@code LLJIT} instance. The client is not responsible for managing its memory.</p>
*/
@NativeType("LLVMOrcExecutionSessionRef")
public static long LLVMOrcLLJITGetExecutionSession(@NativeType("LLVMOrcLLJITRef") long J) {
long __functionAddress = Functions.OrcLLJITGetExecutionSession;
if (CHECKS) {
check(J);
}
return invokePP(J, __functionAddress);
}
// --- [ LLVMOrcLLJITGetMainJITDylib ] ---
/**
* Return a reference to the Main {@code JITDylib}.
*
* <p>The {@code JITDylib} is owned by the {@code LLJIT} instance. The client is not responsible for managing its memory.</p>
*/
@NativeType("LLVMOrcJITDylibRef")
public static long LLVMOrcLLJITGetMainJITDylib(@NativeType("LLVMOrcLLJITRef") long J) {
long __functionAddress = Functions.OrcLLJITGetMainJITDylib;
if (CHECKS) {
check(J);
}
return invokePP(J, __functionAddress);
}
// --- [ LLVMOrcLLJITGetTripleString ] ---
/** Unsafe version of: {@link #LLVMOrcLLJITGetTripleString OrcLLJITGetTripleString} */
public static long nLLVMOrcLLJITGetTripleString(long J) {
long __functionAddress = Functions.OrcLLJITGetTripleString;
if (CHECKS) {
check(J);
}
return invokePP(J, __functionAddress);
}
/** Return the target triple for this {@code LLJIT} instance. This string is owned by the {@code LLJIT} instance and should not be freed by the client. */
@Nullable
@NativeType("char const *")
public static String LLVMOrcLLJITGetTripleString(@NativeType("LLVMOrcLLJITRef") long J) {
long __result = nLLVMOrcLLJITGetTripleString(J);
return memUTF8Safe(__result);
}
// --- [ LLVMOrcLLJITGetGlobalPrefix ] ---
/** Returns the global prefix character according to the {@code LLJIT}'s {@code DataLayout}. */
@NativeType("char")
public static byte LLVMOrcLLJITGetGlobalPrefix(@NativeType("LLVMOrcLLJITRef") long J) {
long __functionAddress = Functions.OrcLLJITGetGlobalPrefix;
if (CHECKS) {
check(J);
}
return invokePB(J, __functionAddress);
}
// --- [ LLVMOrcLLJITMangleAndIntern ] ---
/** Unsafe version of: {@link #LLVMOrcLLJITMangleAndIntern OrcLLJITMangleAndIntern} */
public static long nLLVMOrcLLJITMangleAndIntern(long J, long UnmangledName) {
long __functionAddress = Functions.OrcLLJITMangleAndIntern;
if (CHECKS) {
check(J);
}
return invokePPP(J, UnmangledName, __functionAddress);
}
/**
* Mangles the given string according to the {@code LLJIT} instance's {@code DataLayout}, then interns the result in the {@code SymbolStringPool} and
* returns a reference to the pool entry.
*
* <p>Clients should call {@link LLVMOrc#LLVMOrcReleaseSymbolStringPoolEntry OrcReleaseSymbolStringPoolEntry} to decrement the ref-count on the pool entry once they are finished with this value.</p>
*/
@NativeType("LLVMOrcSymbolStringPoolEntryRef")
public static long LLVMOrcLLJITMangleAndIntern(@NativeType("LLVMOrcLLJITRef") long J, @NativeType("char const *") ByteBuffer UnmangledName) {
if (CHECKS) {
checkNT1(UnmangledName);
}
return nLLVMOrcLLJITMangleAndIntern(J, memAddress(UnmangledName));
}
/**
* Mangles the given string according to the {@code LLJIT} instance's {@code DataLayout}, then interns the result in the {@code SymbolStringPool} and
* returns a reference to the pool entry.
*
* <p>Clients should call {@link LLVMOrc#LLVMOrcReleaseSymbolStringPoolEntry OrcReleaseSymbolStringPoolEntry} to decrement the ref-count on the pool entry once they are finished with this value.</p>
*/
@NativeType("LLVMOrcSymbolStringPoolEntryRef")
public static long LLVMOrcLLJITMangleAndIntern(@NativeType("LLVMOrcLLJITRef") long J, @NativeType("char const *") CharSequence UnmangledName) {
MemoryStack stack = stackGet(); int stackPointer = stack.getPointer();
try {
stack.nUTF8(UnmangledName, true);
long UnmangledNameEncoded = stack.getPointerAddress();
return nLLVMOrcLLJITMangleAndIntern(J, UnmangledNameEncoded);
} finally {
stack.setPointer(stackPointer);
}
}
// --- [ LLVMOrcLLJITAddObjectFile ] ---
/**
* Add a buffer representing an object file to the given {@code JITDylib} in the given {@code LLJIT} instance. This operation transfers ownership of the
* buffer to the {@code LLJIT} instance. The buffer should not be disposed of or referenced once this function returns.
*
* <p>Resources associated with the given object will be tracked by the given {@code JITDylib}'s default resource tracker.</p>
*/
@NativeType("LLVMErrorRef")
public static long LLVMOrcLLJITAddObjectFile(@NativeType("LLVMOrcLLJITRef") long J, @NativeType("LLVMOrcJITDylibRef") long JD, @NativeType("LLVMMemoryBufferRef") long ObjBuffer) {
long __functionAddress = Functions.OrcLLJITAddObjectFile;
if (CHECKS) {
check(J);
check(JD);
check(ObjBuffer);
}
return invokePPPP(J, JD, ObjBuffer, __functionAddress);
}
// --- [ LLVMOrcLLJITAddObjectFileWithRT ] ---
/**
* Add a buffer representing an object file to the given {@code ResourceTracker}'s {@code JITDylib} in the given {@code LLJIT} instance. This operation
* transfers ownership of the buffer to the {@code LLJIT} instance. The buffer should not be disposed of or referenced once this function returns.
*
* <p>Resources associated with the given object will be tracked by {@code ResourceTracker} {@code RT}.</p>
*
* @since 12
*/
@NativeType("LLVMErrorRef")
public static long LLVMOrcLLJITAddObjectFileWithRT(@NativeType("LLVMOrcLLJITRef") long J, @NativeType("LLVMOrcResourceTrackerRef") long RT, @NativeType("LLVMMemoryBufferRef") long ObjBuffer) {
long __functionAddress = Functions.OrcLLJITAddObjectFileWithRT;
if (CHECKS) {
check(J);
check(RT);
check(ObjBuffer);
}
return invokePPPP(J, RT, ObjBuffer, __functionAddress);
}
// --- [ LLVMOrcLLJITAddLLVMIRModule ] ---
/**
* Add an IR module to the given {@code JITDylib} in the given {@code LLJIT} instance. This operation transfers ownership of the {@code TSM} argument to
* the {@code LLJIT} instance. The {@code TSM} argument should not be disposed of or referenced once this function returns.
*
* <p>Resources associated with the given {@code Module} will be tracked by the given {@code JITDylib}'s default resource tracker.</p>
*/
@NativeType("LLVMErrorRef")
public static long LLVMOrcLLJITAddLLVMIRModule(@NativeType("LLVMOrcLLJITRef") long J, @NativeType("LLVMOrcJITDylibRef") long JD, @NativeType("LLVMOrcThreadSafeModuleRef") long TSM) {
long __functionAddress = Functions.OrcLLJITAddLLVMIRModule;
if (CHECKS) {
check(J);
check(JD);
check(TSM);
}
return invokePPPP(J, JD, TSM, __functionAddress);
}
// --- [ LLVMOrcLLJITAddLLVMIRModuleWithRT ] ---
/**
* Add an IR module to the given {@code ResourceTracker}'s {@code JITDylib} in the given {@code LLJIT} instance. This operation transfers ownership of the
* {@code TSM} argument to the {@code LLJIT} instance. The {@code TSM} argument should not be disposed of or referenced once this function returns.
*
* <p>Resources associated with the given {@code Module} will be tracked by {@code ResourceTracker} {@code RT}.</p>
*
* @since 12
*/
@NativeType("LLVMErrorRef")
public static long LLVMOrcLLJITAddLLVMIRModuleWithRT(@NativeType("LLVMOrcLLJITRef") long J, @NativeType("LLVMOrcResourceTrackerRef") long JD, @NativeType("LLVMOrcThreadSafeModuleRef") long TSM) {
long __functionAddress = Functions.OrcLLJITAddLLVMIRModuleWithRT;
if (CHECKS) {
check(J);
check(JD);
check(TSM);
}
return invokePPPP(J, JD, TSM, __functionAddress);
}
// --- [ LLVMOrcLLJITLookup ] ---
/** Unsafe version of: {@link #LLVMOrcLLJITLookup OrcLLJITLookup} */
public static long nLLVMOrcLLJITLookup(long J, long Result, long Name) {
long __functionAddress = Functions.OrcLLJITLookup;
if (CHECKS) {
check(J);
}
return invokePPPP(J, Result, Name, __functionAddress);
}
/**
* Look up the given symbol in the main {@code JITDylib} of the given {@code LLJIT} instance.
*
* <p>This operation does not take ownership of the Name argument.</p>
*/
@NativeType("LLVMErrorRef")
public static long LLVMOrcLLJITLookup(@NativeType("LLVMOrcLLJITRef") long J, @NativeType("LLVMOrcExecutorAddress *") LongBuffer Result, @NativeType("char const *") ByteBuffer Name) {
if (CHECKS) {
check(Result, 1);
checkNT1(Name);
}
return nLLVMOrcLLJITLookup(J, memAddress(Result), memAddress(Name));
}
/**
* Look up the given symbol in the main {@code JITDylib} of the given {@code LLJIT} instance.
*
* <p>This operation does not take ownership of the Name argument.</p>
*/
@NativeType("LLVMErrorRef")
public static long LLVMOrcLLJITLookup(@NativeType("LLVMOrcLLJITRef") long J, @NativeType("LLVMOrcExecutorAddress *") LongBuffer Result, @NativeType("char const *") CharSequence Name) {
if (CHECKS) {
check(Result, 1);
}
MemoryStack stack = stackGet(); int stackPointer = stack.getPointer();
try {
stack.nUTF8(Name, true);
long NameEncoded = stack.getPointerAddress();
return nLLVMOrcLLJITLookup(J, memAddress(Result), NameEncoded);
} finally {
stack.setPointer(stackPointer);
}
}
// --- [ LLVMOrcLLJITGetObjLinkingLayer ] ---
/**
* Returns a non-owning reference to the {@code LLJIT} instance's object linking layer.
*
* @since 12
*/
@NativeType("LLVMOrcObjectLayerRef")
public static long LLVMOrcLLJITGetObjLinkingLayer(@NativeType("LLVMOrcLLJITRef") long J) {
long __functionAddress = Functions.OrcLLJITGetObjLinkingLayer;
if (CHECKS) {
check(J);
}
return invokePP(J, __functionAddress);
}
// --- [ LLVMOrcLLJITGetObjTransformLayer ] ---
/**
* Returns a non-owning reference to the {@code LLJIT} instance's object linking layer.
*
* @since 12
*/
@NativeType("LLVMOrcObjectTransformLayerRef")
public static long LLVMOrcLLJITGetObjTransformLayer(@NativeType("LLVMOrcLLJITRef") long J) {
long __functionAddress = Functions.OrcLLJITGetObjTransformLayer;
if (CHECKS) {
check(J);
}
return invokePP(J, __functionAddress);
}
// --- [ LLVMOrcLLJITGetIRTransformLayer ] ---
/**
* Returns a non-owning reference to the {@code LLJIT} instance's IR transform layer.
*
* @since 12
*/
@NativeType("LLVMOrcIRTransformLayerRef")
public static long LLVMOrcLLJITGetIRTransformLayer(@NativeType("LLVMOrcLLJITRef") long J) {
long __functionAddress = Functions.OrcLLJITGetIRTransformLayer;
if (CHECKS) {
check(J);
}
return invokePP(J, __functionAddress);
}
// --- [ LLVMOrcLLJITGetDataLayoutStr ] ---
/** Unsafe version of: {@link #LLVMOrcLLJITGetDataLayoutStr OrcLLJITGetDataLayoutStr} */
public static long nLLVMOrcLLJITGetDataLayoutStr(long J) {
long __functionAddress = Functions.OrcLLJITGetDataLayoutStr;
if (CHECKS) {
check(J);
}
return invokePP(J, __functionAddress);
}
/**
* Get the {@code LLJIT} instance's default data layout string.
*
* <p>This string is owned by the {@code LLJIT} instance and does not need to be freed by the caller.</p>
*
* @since 12
*/
@Nullable
@NativeType("char const *")
public static String LLVMOrcLLJITGetDataLayoutStr(@NativeType("LLVMOrcLLJITRef") long J) {
long __result = nLLVMOrcLLJITGetDataLayoutStr(J);
return memUTF8Safe(__result);
}
}
|
|
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.flowable.engine.impl.persistence.entity;
import java.io.Serializable;
import java.util.HashMap;
import java.util.Map;
import org.apache.commons.lang3.StringUtils;
import org.flowable.engine.common.impl.persistence.entity.AbstractEntity;
import org.flowable.engine.impl.variable.ValueFields;
import org.flowable.engine.impl.variable.VariableType;
/**
* @author Tom Baeyens
* @author Marcus Klimstra (CGI)
* @author Joram Barrez
*/
public class VariableInstanceEntityImpl extends AbstractEntity implements VariableInstanceEntity, ValueFields, Serializable {
private static final long serialVersionUID = 1L;
protected String name;
protected VariableType type;
protected String typeName;
protected String processInstanceId;
protected String executionId;
protected String taskId;
protected Long longValue;
protected Double doubleValue;
protected String textValue;
protected String textValue2;
protected ByteArrayRef byteArrayRef;
protected Object cachedValue;
protected boolean forcedUpdate;
protected boolean deleted;
public VariableInstanceEntityImpl() {
}
public Object getPersistentState() {
Map<String, Object> persistentState = new HashMap<>();
persistentState.put("name", name);
if (type != null) {
persistentState.put("typeName", type.getTypeName());
}
persistentState.put("executionId", executionId);
persistentState.put("longValue", longValue);
persistentState.put("doubleValue", doubleValue);
persistentState.put("textValue", textValue);
persistentState.put("textValue2", textValue2);
if (byteArrayRef != null && byteArrayRef.getId() != null) {
persistentState.put("byteArrayValueId", byteArrayRef.getId());
}
if (forcedUpdate) {
persistentState.put("forcedUpdate", Boolean.TRUE);
}
return persistentState;
}
public void setExecution(ExecutionEntity execution) {
this.executionId = execution.getId();
this.processInstanceId = execution.getProcessInstanceId();
forceUpdate();
}
public void forceUpdate() {
forcedUpdate = true;
}
public void setProcessInstanceId(String processInstanceId) {
this.processInstanceId = processInstanceId;
}
public void setExecutionId(String executionId) {
this.executionId = executionId;
}
// byte array value ///////////////////////////////////////////////////////////
@Override
public byte[] getBytes() {
ensureByteArrayRefInitialized();
return byteArrayRef.getBytes();
}
@Override
public void setBytes(byte[] bytes) {
ensureByteArrayRefInitialized();
byteArrayRef.setValue("var-" + name, bytes);
}
public ByteArrayRef getByteArrayRef() {
return byteArrayRef;
}
protected void ensureByteArrayRefInitialized() {
if (byteArrayRef == null) {
byteArrayRef = new ByteArrayRef();
}
}
// value //////////////////////////////////////////////////////////////////////
public Object getValue() {
if (!type.isCachable() || cachedValue == null) {
cachedValue = type.getValue(this);
}
return cachedValue;
}
public void setValue(Object value) {
type.setValue(value, this);
typeName = type.getTypeName();
cachedValue = value;
}
// getters and setters ////////////////////////////////////////////////////////
public void setName(String name) {
this.name = name;
}
public String getName() {
return name;
}
public String getTypeName() {
if (typeName != null) {
return typeName;
} else if (type != null) {
return type.getTypeName();
} else {
return typeName;
}
}
public void setTypeName(String typeName) {
this.typeName = typeName;
}
public VariableType getType() {
return type;
}
public void setType(VariableType type) {
this.type = type;
}
public String getProcessInstanceId() {
return processInstanceId;
}
public String getTaskId() {
return taskId;
}
public void setTaskId(String taskId) {
this.taskId = taskId;
}
public String getExecutionId() {
return executionId;
}
public Long getLongValue() {
return longValue;
}
public void setLongValue(Long longValue) {
this.longValue = longValue;
}
public Double getDoubleValue() {
return doubleValue;
}
public void setDoubleValue(Double doubleValue) {
this.doubleValue = doubleValue;
}
public String getTextValue() {
return textValue;
}
public void setTextValue(String textValue) {
this.textValue = textValue;
}
public String getTextValue2() {
return textValue2;
}
public void setTextValue2(String textValue2) {
this.textValue2 = textValue2;
}
public Object getCachedValue() {
return cachedValue;
}
public void setCachedValue(Object cachedValue) {
this.cachedValue = cachedValue;
}
// misc methods ///////////////////////////////////////////////////////////////
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("VariableInstanceEntity[");
sb.append("id=").append(id);
sb.append(", name=").append(name);
sb.append(", type=").append(type != null ? type.getTypeName() : "null");
if (longValue != null) {
sb.append(", longValue=").append(longValue);
}
if (doubleValue != null) {
sb.append(", doubleValue=").append(doubleValue);
}
if (textValue != null) {
sb.append(", textValue=").append(StringUtils.abbreviate(textValue, 40));
}
if (textValue2 != null) {
sb.append(", textValue2=").append(StringUtils.abbreviate(textValue2, 40));
}
if (byteArrayRef != null && byteArrayRef.getId() != null) {
sb.append(", byteArrayValueId=").append(byteArrayRef.getId());
}
sb.append("]");
return sb.toString();
}
}
|
|
/*
* The JTS Topology Suite is a collection of Java classes that
* implement the fundamental operations required to validate a given
* geo-spatial data set to a known topological specification.
*
* Copyright (C) 2001 Vivid Solutions
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*
* For more information, contact:
*
* Vivid Solutions
* Suite #1A
* 2328 Government Street
* Victoria BC V8T 5G5
* Canada
*
* (250)385-6040
* www.vividsolutions.com
*/
package com.vividsolutions.jts.geomgraph;
import com.vividsolutions.jts.geomgraph.Position;
import com.vividsolutions.jts.geom.Location;
/**
* A TopologyLocation is the labelling of a
* GraphComponent's topological relationship to a single Geometry.
* <p>
* If the parent component is an area edge, each side and the edge itself
* have a topological location. These locations are named
* <ul>
* <li> ON: on the edge
* <li> LEFT: left-hand side of the edge
* <li> RIGHT: right-hand side
* </ul>
* If the parent component is a line edge or node, there is a single
* topological relationship attribute, ON.
* <p>
* The possible values of a topological location are
* {Location.NONE, Location.EXTERIOR, Location.BOUNDARY, Location.INTERIOR}
* <p>
* The labelling is stored in an array location[j] where
* where j has the values ON, LEFT, RIGHT
* @version 1.7
*/
public class TopologyLocation {
int location[];
public TopologyLocation(int[] location)
{
init(location.length);
}
/**
* Constructs a TopologyLocation specifying how points on, to the left of, and to the
* right of some GraphComponent relate to some Geometry. Possible values for the
* parameters are Location.NULL, Location.EXTERIOR, Location.BOUNDARY,
* and Location.INTERIOR.
* @see Location
*/
public TopologyLocation(int on, int left, int right) {
init(3);
location[Position.ON] = on;
location[Position.LEFT] = left;
location[Position.RIGHT] = right;
}
public TopologyLocation(int on) {
init(1);
location[Position.ON] = on;
}
public TopologyLocation(TopologyLocation gl) {
init(gl.location.length);
if (gl != null) {
for (int i = 0; i < location.length; i++) {
location[i] = gl.location[i];
}
}
}
private void init(int size)
{
location = new int[size];
setAllLocations(Location.NONE);
}
public int get(int posIndex)
{
if (posIndex < location.length) return location[posIndex];
return Location.NONE;
}
/**
* @return true if all locations are NULL
*/
public boolean isNull()
{
for (int i = 0; i < location.length; i++) {
if (location[i] != Location.NONE) return false;
}
return true;
}
/**
* @return true if any locations are NULL
*/
public boolean isAnyNull()
{
for (int i = 0; i < location.length; i++) {
if (location[i] == Location.NONE) return true;
}
return false;
}
public boolean isEqualOnSide(TopologyLocation le, int locIndex)
{
return location[locIndex] == le.location[locIndex];
}
public boolean isArea() { return location.length > 1; }
public boolean isLine() { return location.length == 1; }
public void flip()
{
if (location.length <= 1) return;
int temp = location[Position.LEFT];
location[Position.LEFT] = location[Position.RIGHT];
location[Position.RIGHT] = temp;
}
public void setAllLocations(int locValue)
{
for (int i = 0; i < location.length; i++) {
location[i] = locValue;
}
}
public void setAllLocationsIfNull(int locValue)
{
for (int i = 0; i < location.length; i++) {
if (location[i] == Location.NONE) location[i] = locValue;
}
}
public void setLocation(int locIndex, int locValue)
{
location[locIndex] = locValue;
}
public void setLocation(int locValue)
{
setLocation(Position.ON, locValue);
}
public int[] getLocations() { return location; }
public void setLocations(int on, int left, int right) {
location[Position.ON] = on;
location[Position.LEFT] = left;
location[Position.RIGHT] = right;
}
public boolean allPositionsEqual(int loc)
{
for (int i = 0; i < location.length; i++) {
if (location[i] != loc) return false;
}
return true;
}
/**
* merge updates only the NULL attributes of this object
* with the attributes of another.
*/
public void merge(TopologyLocation gl)
{
// if the src is an Area label & and the dest is not, increase the dest to be an Area
if (gl.location.length > location.length) {
int [] newLoc = new int[3];
newLoc[Position.ON] = location[Position.ON];
newLoc[Position.LEFT] = Location.NONE;
newLoc[Position.RIGHT] = Location.NONE;
location = newLoc;
}
for (int i = 0; i < location.length; i++) {
if (location[i] == Location.NONE && i < gl.location.length)
location[i] = gl.location[i];
}
}
public String toString()
{
StringBuffer buf = new StringBuffer();
if (location.length > 1) buf.append(Location.toLocationSymbol(location[Position.LEFT]));
buf.append(Location.toLocationSymbol(location[Position.ON]));
if (location.length > 1) buf.append(Location.toLocationSymbol(location[Position.RIGHT]));
return buf.toString();
}
}
|
|
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1beta1/tensorboard_service.proto
package com.google.cloud.aiplatform.v1beta1;
/**
*
*
* <pre>
* Request message for [TensorboardService.UpdateTensorboardTimeSeries][google.cloud.aiplatform.v1beta1.TensorboardService.UpdateTensorboardTimeSeries].
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1beta1.UpdateTensorboardTimeSeriesRequest}
*/
public final class UpdateTensorboardTimeSeriesRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.UpdateTensorboardTimeSeriesRequest)
UpdateTensorboardTimeSeriesRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateTensorboardTimeSeriesRequest.newBuilder() to construct.
private UpdateTensorboardTimeSeriesRequest(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateTensorboardTimeSeriesRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateTensorboardTimeSeriesRequest();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private UpdateTensorboardTimeSeriesRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.protobuf.FieldMask.Builder subBuilder = null;
if (updateMask_ != null) {
subBuilder = updateMask_.toBuilder();
}
updateMask_ =
input.readMessage(com.google.protobuf.FieldMask.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(updateMask_);
updateMask_ = subBuilder.buildPartial();
}
break;
}
case 18:
{
com.google.cloud.aiplatform.v1beta1.TensorboardTimeSeries.Builder subBuilder = null;
if (tensorboardTimeSeries_ != null) {
subBuilder = tensorboardTimeSeries_.toBuilder();
}
tensorboardTimeSeries_ =
input.readMessage(
com.google.cloud.aiplatform.v1beta1.TensorboardTimeSeries.parser(),
extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(tensorboardTimeSeries_);
tensorboardTimeSeries_ = subBuilder.buildPartial();
}
break;
}
default:
{
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1beta1.TensorboardServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_UpdateTensorboardTimeSeriesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1beta1.TensorboardServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_UpdateTensorboardTimeSeriesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1beta1.UpdateTensorboardTimeSeriesRequest.class,
com.google.cloud.aiplatform.v1beta1.UpdateTensorboardTimeSeriesRequest.Builder.class);
}
public static final int UPDATE_MASK_FIELD_NUMBER = 1;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* TensorboardTimeSeries resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten if new
* values are specified.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return updateMask_ != null;
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* TensorboardTimeSeries resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten if new
* values are specified.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* TensorboardTimeSeries resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten if new
* values are specified.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return getUpdateMask();
}
public static final int TENSORBOARD_TIME_SERIES_FIELD_NUMBER = 2;
private com.google.cloud.aiplatform.v1beta1.TensorboardTimeSeries tensorboardTimeSeries_;
/**
*
*
* <pre>
* Required. The TensorboardTimeSeries' `name` field is used to identify the
* TensorboardTimeSeries to be updated.
* Format:
* `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}/timeSeries/{time_series}`
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.TensorboardTimeSeries tensorboard_time_series = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the tensorboardTimeSeries field is set.
*/
@java.lang.Override
public boolean hasTensorboardTimeSeries() {
return tensorboardTimeSeries_ != null;
}
/**
*
*
* <pre>
* Required. The TensorboardTimeSeries' `name` field is used to identify the
* TensorboardTimeSeries to be updated.
* Format:
* `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}/timeSeries/{time_series}`
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.TensorboardTimeSeries tensorboard_time_series = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The tensorboardTimeSeries.
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.TensorboardTimeSeries getTensorboardTimeSeries() {
return tensorboardTimeSeries_ == null
? com.google.cloud.aiplatform.v1beta1.TensorboardTimeSeries.getDefaultInstance()
: tensorboardTimeSeries_;
}
/**
*
*
* <pre>
* Required. The TensorboardTimeSeries' `name` field is used to identify the
* TensorboardTimeSeries to be updated.
* Format:
* `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}/timeSeries/{time_series}`
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.TensorboardTimeSeries tensorboard_time_series = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.TensorboardTimeSeriesOrBuilder
getTensorboardTimeSeriesOrBuilder() {
return getTensorboardTimeSeries();
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (updateMask_ != null) {
output.writeMessage(1, getUpdateMask());
}
if (tensorboardTimeSeries_ != null) {
output.writeMessage(2, getTensorboardTimeSeries());
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (updateMask_ != null) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getUpdateMask());
}
if (tensorboardTimeSeries_ != null) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(2, getTensorboardTimeSeries());
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.aiplatform.v1beta1.UpdateTensorboardTimeSeriesRequest)) {
return super.equals(obj);
}
com.google.cloud.aiplatform.v1beta1.UpdateTensorboardTimeSeriesRequest other =
(com.google.cloud.aiplatform.v1beta1.UpdateTensorboardTimeSeriesRequest) obj;
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (hasTensorboardTimeSeries() != other.hasTensorboardTimeSeries()) return false;
if (hasTensorboardTimeSeries()) {
if (!getTensorboardTimeSeries().equals(other.getTensorboardTimeSeries())) return false;
}
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
if (hasTensorboardTimeSeries()) {
hash = (37 * hash) + TENSORBOARD_TIME_SERIES_FIELD_NUMBER;
hash = (53 * hash) + getTensorboardTimeSeries().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.aiplatform.v1beta1.UpdateTensorboardTimeSeriesRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.UpdateTensorboardTimeSeriesRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.UpdateTensorboardTimeSeriesRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.UpdateTensorboardTimeSeriesRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.UpdateTensorboardTimeSeriesRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.UpdateTensorboardTimeSeriesRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.UpdateTensorboardTimeSeriesRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.UpdateTensorboardTimeSeriesRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.UpdateTensorboardTimeSeriesRequest
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.UpdateTensorboardTimeSeriesRequest
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.UpdateTensorboardTimeSeriesRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.UpdateTensorboardTimeSeriesRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.aiplatform.v1beta1.UpdateTensorboardTimeSeriesRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for [TensorboardService.UpdateTensorboardTimeSeries][google.cloud.aiplatform.v1beta1.TensorboardService.UpdateTensorboardTimeSeries].
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1beta1.UpdateTensorboardTimeSeriesRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1beta1.UpdateTensorboardTimeSeriesRequest)
com.google.cloud.aiplatform.v1beta1.UpdateTensorboardTimeSeriesRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1beta1.TensorboardServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_UpdateTensorboardTimeSeriesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1beta1.TensorboardServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_UpdateTensorboardTimeSeriesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1beta1.UpdateTensorboardTimeSeriesRequest.class,
com.google.cloud.aiplatform.v1beta1.UpdateTensorboardTimeSeriesRequest.Builder.class);
}
// Construct using
// com.google.cloud.aiplatform.v1beta1.UpdateTensorboardTimeSeriesRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {}
}
@java.lang.Override
public Builder clear() {
super.clear();
if (updateMaskBuilder_ == null) {
updateMask_ = null;
} else {
updateMask_ = null;
updateMaskBuilder_ = null;
}
if (tensorboardTimeSeriesBuilder_ == null) {
tensorboardTimeSeries_ = null;
} else {
tensorboardTimeSeries_ = null;
tensorboardTimeSeriesBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.aiplatform.v1beta1.TensorboardServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_UpdateTensorboardTimeSeriesRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.UpdateTensorboardTimeSeriesRequest
getDefaultInstanceForType() {
return com.google.cloud.aiplatform.v1beta1.UpdateTensorboardTimeSeriesRequest
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.UpdateTensorboardTimeSeriesRequest build() {
com.google.cloud.aiplatform.v1beta1.UpdateTensorboardTimeSeriesRequest result =
buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.UpdateTensorboardTimeSeriesRequest buildPartial() {
com.google.cloud.aiplatform.v1beta1.UpdateTensorboardTimeSeriesRequest result =
new com.google.cloud.aiplatform.v1beta1.UpdateTensorboardTimeSeriesRequest(this);
if (updateMaskBuilder_ == null) {
result.updateMask_ = updateMask_;
} else {
result.updateMask_ = updateMaskBuilder_.build();
}
if (tensorboardTimeSeriesBuilder_ == null) {
result.tensorboardTimeSeries_ = tensorboardTimeSeries_;
} else {
result.tensorboardTimeSeries_ = tensorboardTimeSeriesBuilder_.build();
}
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.aiplatform.v1beta1.UpdateTensorboardTimeSeriesRequest) {
return mergeFrom(
(com.google.cloud.aiplatform.v1beta1.UpdateTensorboardTimeSeriesRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.aiplatform.v1beta1.UpdateTensorboardTimeSeriesRequest other) {
if (other
== com.google.cloud.aiplatform.v1beta1.UpdateTensorboardTimeSeriesRequest
.getDefaultInstance()) return this;
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
if (other.hasTensorboardTimeSeries()) {
mergeTensorboardTimeSeries(other.getTensorboardTimeSeries());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.cloud.aiplatform.v1beta1.UpdateTensorboardTimeSeriesRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage =
(com.google.cloud.aiplatform.v1beta1.UpdateTensorboardTimeSeriesRequest)
e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* TensorboardTimeSeries resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten if new
* values are specified.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return updateMaskBuilder_ != null || updateMask_ != null;
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* TensorboardTimeSeries resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten if new
* values are specified.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* TensorboardTimeSeries resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten if new
* values are specified.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
onChanged();
} else {
updateMaskBuilder_.setMessage(value);
}
return this;
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* TensorboardTimeSeries resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten if new
* values are specified.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
onChanged();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* TensorboardTimeSeries resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten if new
* values are specified.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (updateMask_ != null) {
updateMask_ =
com.google.protobuf.FieldMask.newBuilder(updateMask_).mergeFrom(value).buildPartial();
} else {
updateMask_ = value;
}
onChanged();
} else {
updateMaskBuilder_.mergeFrom(value);
}
return this;
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* TensorboardTimeSeries resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten if new
* values are specified.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearUpdateMask() {
if (updateMaskBuilder_ == null) {
updateMask_ = null;
onChanged();
} else {
updateMask_ = null;
updateMaskBuilder_ = null;
}
return this;
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* TensorboardTimeSeries resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten if new
* values are specified.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* TensorboardTimeSeries resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten if new
* values are specified.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* TensorboardTimeSeries resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten if new
* values are specified.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
private com.google.cloud.aiplatform.v1beta1.TensorboardTimeSeries tensorboardTimeSeries_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1beta1.TensorboardTimeSeries,
com.google.cloud.aiplatform.v1beta1.TensorboardTimeSeries.Builder,
com.google.cloud.aiplatform.v1beta1.TensorboardTimeSeriesOrBuilder>
tensorboardTimeSeriesBuilder_;
/**
*
*
* <pre>
* Required. The TensorboardTimeSeries' `name` field is used to identify the
* TensorboardTimeSeries to be updated.
* Format:
* `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}/timeSeries/{time_series}`
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.TensorboardTimeSeries tensorboard_time_series = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the tensorboardTimeSeries field is set.
*/
public boolean hasTensorboardTimeSeries() {
return tensorboardTimeSeriesBuilder_ != null || tensorboardTimeSeries_ != null;
}
/**
*
*
* <pre>
* Required. The TensorboardTimeSeries' `name` field is used to identify the
* TensorboardTimeSeries to be updated.
* Format:
* `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}/timeSeries/{time_series}`
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.TensorboardTimeSeries tensorboard_time_series = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The tensorboardTimeSeries.
*/
public com.google.cloud.aiplatform.v1beta1.TensorboardTimeSeries getTensorboardTimeSeries() {
if (tensorboardTimeSeriesBuilder_ == null) {
return tensorboardTimeSeries_ == null
? com.google.cloud.aiplatform.v1beta1.TensorboardTimeSeries.getDefaultInstance()
: tensorboardTimeSeries_;
} else {
return tensorboardTimeSeriesBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The TensorboardTimeSeries' `name` field is used to identify the
* TensorboardTimeSeries to be updated.
* Format:
* `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}/timeSeries/{time_series}`
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.TensorboardTimeSeries tensorboard_time_series = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setTensorboardTimeSeries(
com.google.cloud.aiplatform.v1beta1.TensorboardTimeSeries value) {
if (tensorboardTimeSeriesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
tensorboardTimeSeries_ = value;
onChanged();
} else {
tensorboardTimeSeriesBuilder_.setMessage(value);
}
return this;
}
/**
*
*
* <pre>
* Required. The TensorboardTimeSeries' `name` field is used to identify the
* TensorboardTimeSeries to be updated.
* Format:
* `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}/timeSeries/{time_series}`
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.TensorboardTimeSeries tensorboard_time_series = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setTensorboardTimeSeries(
com.google.cloud.aiplatform.v1beta1.TensorboardTimeSeries.Builder builderForValue) {
if (tensorboardTimeSeriesBuilder_ == null) {
tensorboardTimeSeries_ = builderForValue.build();
onChanged();
} else {
tensorboardTimeSeriesBuilder_.setMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Required. The TensorboardTimeSeries' `name` field is used to identify the
* TensorboardTimeSeries to be updated.
* Format:
* `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}/timeSeries/{time_series}`
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.TensorboardTimeSeries tensorboard_time_series = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeTensorboardTimeSeries(
com.google.cloud.aiplatform.v1beta1.TensorboardTimeSeries value) {
if (tensorboardTimeSeriesBuilder_ == null) {
if (tensorboardTimeSeries_ != null) {
tensorboardTimeSeries_ =
com.google.cloud.aiplatform.v1beta1.TensorboardTimeSeries.newBuilder(
tensorboardTimeSeries_)
.mergeFrom(value)
.buildPartial();
} else {
tensorboardTimeSeries_ = value;
}
onChanged();
} else {
tensorboardTimeSeriesBuilder_.mergeFrom(value);
}
return this;
}
/**
*
*
* <pre>
* Required. The TensorboardTimeSeries' `name` field is used to identify the
* TensorboardTimeSeries to be updated.
* Format:
* `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}/timeSeries/{time_series}`
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.TensorboardTimeSeries tensorboard_time_series = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearTensorboardTimeSeries() {
if (tensorboardTimeSeriesBuilder_ == null) {
tensorboardTimeSeries_ = null;
onChanged();
} else {
tensorboardTimeSeries_ = null;
tensorboardTimeSeriesBuilder_ = null;
}
return this;
}
/**
*
*
* <pre>
* Required. The TensorboardTimeSeries' `name` field is used to identify the
* TensorboardTimeSeries to be updated.
* Format:
* `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}/timeSeries/{time_series}`
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.TensorboardTimeSeries tensorboard_time_series = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.aiplatform.v1beta1.TensorboardTimeSeries.Builder
getTensorboardTimeSeriesBuilder() {
onChanged();
return getTensorboardTimeSeriesFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The TensorboardTimeSeries' `name` field is used to identify the
* TensorboardTimeSeries to be updated.
* Format:
* `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}/timeSeries/{time_series}`
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.TensorboardTimeSeries tensorboard_time_series = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.aiplatform.v1beta1.TensorboardTimeSeriesOrBuilder
getTensorboardTimeSeriesOrBuilder() {
if (tensorboardTimeSeriesBuilder_ != null) {
return tensorboardTimeSeriesBuilder_.getMessageOrBuilder();
} else {
return tensorboardTimeSeries_ == null
? com.google.cloud.aiplatform.v1beta1.TensorboardTimeSeries.getDefaultInstance()
: tensorboardTimeSeries_;
}
}
/**
*
*
* <pre>
* Required. The TensorboardTimeSeries' `name` field is used to identify the
* TensorboardTimeSeries to be updated.
* Format:
* `projects/{project}/locations/{location}/tensorboards/{tensorboard}/experiments/{experiment}/runs/{run}/timeSeries/{time_series}`
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.TensorboardTimeSeries tensorboard_time_series = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1beta1.TensorboardTimeSeries,
com.google.cloud.aiplatform.v1beta1.TensorboardTimeSeries.Builder,
com.google.cloud.aiplatform.v1beta1.TensorboardTimeSeriesOrBuilder>
getTensorboardTimeSeriesFieldBuilder() {
if (tensorboardTimeSeriesBuilder_ == null) {
tensorboardTimeSeriesBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1beta1.TensorboardTimeSeries,
com.google.cloud.aiplatform.v1beta1.TensorboardTimeSeries.Builder,
com.google.cloud.aiplatform.v1beta1.TensorboardTimeSeriesOrBuilder>(
getTensorboardTimeSeries(), getParentForChildren(), isClean());
tensorboardTimeSeries_ = null;
}
return tensorboardTimeSeriesBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.UpdateTensorboardTimeSeriesRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.UpdateTensorboardTimeSeriesRequest)
private static final com.google.cloud.aiplatform.v1beta1.UpdateTensorboardTimeSeriesRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1beta1.UpdateTensorboardTimeSeriesRequest();
}
public static com.google.cloud.aiplatform.v1beta1.UpdateTensorboardTimeSeriesRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateTensorboardTimeSeriesRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateTensorboardTimeSeriesRequest>() {
@java.lang.Override
public UpdateTensorboardTimeSeriesRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new UpdateTensorboardTimeSeriesRequest(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<UpdateTensorboardTimeSeriesRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateTensorboardTimeSeriesRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.UpdateTensorboardTimeSeriesRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
|
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.flowable.http.impl;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.StringReader;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.List;
import java.util.Timer;
import java.util.TimerTask;
import javax.net.ssl.HostnameVerifier;
import javax.net.ssl.SSLSession;
import org.apache.commons.lang3.StringUtils;
import org.apache.http.Header;
import org.apache.http.HttpMessage;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.config.RequestConfig;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpDelete;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpPut;
import org.apache.http.client.methods.HttpRequestBase;
import org.apache.http.client.utils.URIBuilder;
import org.apache.http.conn.ssl.SSLConnectionSocketFactory;
import org.apache.http.conn.ssl.TrustSelfSignedStrategy;
import org.apache.http.entity.StringEntity;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.DefaultHttpRequestRetryHandler;
import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.http.ssl.SSLContextBuilder;
import org.apache.http.util.EntityUtils;
import org.flowable.bpmn.model.FieldExtension;
import org.flowable.bpmn.model.FlowableHttpRequestHandler;
import org.flowable.bpmn.model.FlowableHttpResponseHandler;
import org.flowable.bpmn.model.HttpServiceTask;
import org.flowable.bpmn.model.ImplementationType;
import org.flowable.bpmn.model.ServiceTask;
import org.flowable.engine.cfg.HttpClientConfig;
import org.flowable.engine.common.api.FlowableException;
import org.flowable.engine.delegate.DelegateExecution;
import org.flowable.engine.delegate.Expression;
import org.flowable.engine.impl.bpmn.parser.FieldDeclaration;
import org.flowable.engine.impl.cfg.ProcessEngineConfigurationImpl;
import org.flowable.engine.impl.el.FixedValue;
import org.flowable.engine.impl.util.CommandContextUtil;
import org.flowable.http.HttpActivityBehavior;
import org.flowable.http.HttpRequest;
import org.flowable.http.HttpResponse;
import org.flowable.http.delegate.HttpRequestHandler;
import org.flowable.http.delegate.HttpResponseHandler;
import org.flowable.http.impl.handler.ClassDelegateHttpHandler;
import org.flowable.http.impl.handler.DelegateExpressionHttpHandler;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Implementation of HttpActivityBehavior using Apache HTTP Client
*
* @author Harsha Teja Kanna.
*/
public class HttpActivityBehaviorImpl extends HttpActivityBehavior {
private static final long serialVersionUID = 1L;
private static final Logger LOGGER = LoggerFactory.getLogger(HttpActivityBehaviorImpl.class);
protected HttpServiceTask httpServiceTask;
protected final Timer timer = new Timer(true);
protected final CloseableHttpClient client;
public HttpActivityBehaviorImpl() {
HttpClientConfig config = CommandContextUtil.getProcessEngineConfiguration().getHttpClientConfig();
HttpClientBuilder httpClientBuilder = HttpClientBuilder.create();
// https settings
if (config.isDisableCertVerify()) {
try {
SSLContextBuilder builder = new SSLContextBuilder();
builder.loadTrustMaterial(null, new TrustSelfSignedStrategy());
httpClientBuilder.setSSLSocketFactory(
new SSLConnectionSocketFactory(builder.build(), new HostnameVerifier() {
public boolean verify(String s, SSLSession sslSession) {
return true;
}
}));
} catch (Exception e) {
LOGGER.error("Could not configure HTTP client SSL self signed strategy", e);
}
}
// request retry settings
int retryCount = 0;
if (config.getRequestRetryLimit() > 0) {
retryCount = config.getRequestRetryLimit();
}
httpClientBuilder.setRetryHandler(new DefaultHttpRequestRetryHandler(retryCount, false));
// Build http client
client = httpClientBuilder.build();
LOGGER.info("HTTP client is initialized");
// Shutdown hook to close the http client
Runtime.getRuntime().addShutdownHook(new Thread() {
@Override
public void run() {
if (client != null) {
try {
client.close();
LOGGER.info("HTTP client is closed");
} catch (Throwable e) {
LOGGER.error("Could not close http client", e);
}
}
}
});
}
@Override
public HttpResponse perform(final DelegateExecution execution, final HttpRequest requestInfo) {
HttpRequestBase request = null;
CloseableHttpResponse response = null;
ProcessEngineConfigurationImpl processEngineConfiguration = CommandContextUtil.getProcessEngineConfiguration();
try {
if (httpServiceTask.getHttpRequestHandler() != null) {
HttpRequestHandler httpRequestHandler = createHttpRequestHandler(httpServiceTask.getHttpRequestHandler(), processEngineConfiguration);
httpRequestHandler.handleHttpRequest(execution, requestInfo, client);
}
} catch (Exception e) {
throw new FlowableException("Exception while invoking HttpRequestHandler: " + e.getMessage(), e);
}
try {
URIBuilder uri = new URIBuilder(requestInfo.getUrl());
switch (requestInfo.getMethod()) {
case "GET": {
request = new HttpGet(uri.toString());
break;
}
case "POST": {
HttpPost post = new HttpPost(uri.toString());
post.setEntity(new StringEntity(requestInfo.getBody()));
request = post;
break;
}
case "PUT": {
HttpPut put = new HttpPut(uri.toString());
put.setEntity(new StringEntity(requestInfo.getBody()));
request = put;
break;
}
case "DELETE": {
HttpDelete delete = new HttpDelete(uri.toString());
request = delete;
break;
}
default: {
throw new FlowableException(requestInfo.getMethod() + " HTTP method not supported");
}
}
if (requestInfo.getHeaders() != null) {
setHeaders(request, requestInfo.getHeaders());
}
setConfig(request, requestInfo, CommandContextUtil.getProcessEngineConfiguration().getHttpClientConfig());
if (requestInfo.getTimeout() > 0) {
timer.schedule(new TimeoutTask(request), requestInfo.getTimeout());
}
response = client.execute(request);
HttpResponse responseInfo = new HttpResponse();
if (response.getStatusLine() != null) {
responseInfo.setStatusCode(response.getStatusLine().getStatusCode());
responseInfo.setProtocol(response.getStatusLine().getProtocolVersion().toString());
responseInfo.setReason(response.getStatusLine().getReasonPhrase());
}
if (response.getAllHeaders() != null) {
responseInfo.setHeaders(getHeadersAsString(response.getAllHeaders()));
}
if (response.getEntity() != null) {
responseInfo.setBody(EntityUtils.toString(response.getEntity()));
}
try {
if (httpServiceTask.getHttpResponseHandler() != null) {
HttpResponseHandler httpResponseHandler = createHttpResponseHandler(httpServiceTask.getHttpResponseHandler(), processEngineConfiguration);
httpResponseHandler.handleHttpResponse(execution, responseInfo);
}
} catch (Exception e) {
throw new FlowableException("Exception while invoking HttpResponseHandler: " + e.getMessage(), e);
}
return responseInfo;
} catch (final ClientProtocolException e) {
throw new FlowableException("HTTP exception occurred", e);
} catch (final IOException e) {
throw new FlowableException("IO exception occurred", e);
} catch (final URISyntaxException e) {
throw new FlowableException("Invalid URL exception occurred", e);
} catch (final FlowableException e) {
throw e;
} finally {
if (response != null) {
try {
response.close();
} catch (Throwable e) {
LOGGER.error("Could not close http response", e);
}
}
}
}
protected void setConfig(final HttpRequestBase base, final HttpRequest requestInfo, final HttpClientConfig config) {
base.setConfig(RequestConfig.custom()
.setRedirectsEnabled(!requestInfo.isNoRedirects())
.setSocketTimeout(config.getSocketTimeout())
.setConnectTimeout(config.getConnectTimeout())
.setConnectionRequestTimeout(config.getConnectionRequestTimeout())
.build());
}
protected String getHeadersAsString(final Header[] headers) {
StringBuilder hb = new StringBuilder();
for (Header header : headers) {
hb.append(header.getName()).append(": ").append(header.getValue()).append('\n');
}
return hb.toString();
}
protected void setHeaders(final HttpMessage base, final String headers) throws IOException {
try (BufferedReader reader = new BufferedReader(new StringReader(headers))) {
String line = reader.readLine();
while (line != null) {
String[] header = line.split(":");
if (header.length == 2) {
base.addHeader(header[0], header[1]);
line = reader.readLine();
} else {
throw new FlowableException(HTTP_TASK_REQUEST_HEADERS_INVALID);
}
}
}
}
protected HttpRequestHandler createHttpRequestHandler(FlowableHttpRequestHandler handler, ProcessEngineConfigurationImpl processEngineConfiguration) {
HttpRequestHandler requestHandler = null;
if (ImplementationType.IMPLEMENTATION_TYPE_CLASS.equalsIgnoreCase(handler.getImplementationType())) {
requestHandler = new ClassDelegateHttpHandler(handler.getImplementation(),
createFieldDeclarations(handler.getFieldExtensions(), processEngineConfiguration));
} else if (ImplementationType.IMPLEMENTATION_TYPE_DELEGATEEXPRESSION.equalsIgnoreCase(handler.getImplementationType())) {
requestHandler = new DelegateExpressionHttpHandler(processEngineConfiguration.getExpressionManager().createExpression(handler.getImplementation()),
createFieldDeclarations(handler.getFieldExtensions(), processEngineConfiguration));
}
return requestHandler;
}
protected HttpResponseHandler createHttpResponseHandler(FlowableHttpResponseHandler handler, ProcessEngineConfigurationImpl processEngineConfiguration) {
HttpResponseHandler responseHandler = null;
if (ImplementationType.IMPLEMENTATION_TYPE_CLASS.equalsIgnoreCase(handler.getImplementationType())) {
responseHandler = new ClassDelegateHttpHandler(handler.getImplementation(),
createFieldDeclarations(handler.getFieldExtensions(), processEngineConfiguration));
} else if (ImplementationType.IMPLEMENTATION_TYPE_DELEGATEEXPRESSION.equalsIgnoreCase(handler.getImplementationType())) {
responseHandler = new DelegateExpressionHttpHandler(processEngineConfiguration.getExpressionManager().createExpression(handler.getImplementation()),
createFieldDeclarations(handler.getFieldExtensions(), processEngineConfiguration));
}
return responseHandler;
}
protected List<FieldDeclaration> createFieldDeclarations(List<FieldExtension> fieldList, ProcessEngineConfigurationImpl processEngineConfiguration) {
List<FieldDeclaration> fieldDeclarations = new ArrayList<>();
for (FieldExtension fieldExtension : fieldList) {
FieldDeclaration fieldDeclaration = null;
if (StringUtils.isNotEmpty(fieldExtension.getExpression())) {
fieldDeclaration = new FieldDeclaration(fieldExtension.getFieldName(), Expression.class.getName(),
processEngineConfiguration.getExpressionManager().createExpression(fieldExtension.getExpression()));
} else {
fieldDeclaration = new FieldDeclaration(fieldExtension.getFieldName(), Expression.class.getName(),
new FixedValue(fieldExtension.getStringValue()));
}
fieldDeclarations.add(fieldDeclaration);
}
return fieldDeclarations;
}
protected static class TimeoutTask extends TimerTask {
private HttpRequestBase request;
public TimeoutTask(HttpRequestBase request) {
this.request = request;
}
@Override
public void run() {
if (request != null) {
request.abort();
}
}
}
public void setServiceTask(ServiceTask serviceTask) {
this.httpServiceTask = (HttpServiceTask) serviceTask;
}
}
|
|
/*
* Copyright 2015 BISEL, Heriot-Watt University, Edinburgh, UK (http://www.bisel.org).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.ac.hw.macs.bisel.phis.iqs.v103;
import java.io.IOException;
import java.io.PrintWriter;
import java.net.URLEncoder;
import java.util.Enumeration;
import java.util.Map;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import uk.ac.hw.macs.bisel.phis.iqs.CommunicateWithSolr;
import uk.ac.hw.macs.bisel.phis.iqs.GetHost;
/**
*
* @author kcm
*/
@WebServlet(name = "v103GRs", urlPatterns = {"/v103GRs"})
public class v103GRs extends HttpServlet {
private static final String url = GetHost.getEBI("103") + "getRois?"; // stem of every SOLR query
private static final Logger logger = Logger.getLogger(System.class.getName());
/**
* Processes requests for both HTTP <code>GET</code> and <code>POST</code>
* methods.
*
* @param request servlet request
* @param response servlet response
* @throws ServletException if a servlet-specific error occurs
* @throws IOException if an I/O error occurs
*/
protected void processRequest(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
response.setHeader("Access-Control-Allow-Origin", "*");
boolean error = false; // has an error been detected?
String solrResult = ""; // JSON doc sent back to UI
// create URL for SOLR query
String callback = "";
String queryURL = url;
boolean first = true;
Map<String, String[]> params = request.getParameterMap(); // get map of parameters and their values
Enumeration<String> allParams = request.getParameterNames(); // get a list of parameter names
while (allParams.hasMoreElements()) {
String param = allParams.nextElement();
if (param.toLowerCase().contains("callback")) {
callback = params.get(param)[0];
} else if (param.equalsIgnoreCase("imageId")) { // deal with phenotypes
if (!first) { // if this is not the first parameter added to queryURL include separator
queryURL += "&";
}
queryURL += "imageId=" + URLEncoder.encode(params.get("imageId")[0], "UTF-8"); // extend stem with parameter
first = false; // next time you need a seperator
// v001
} else if (param.equalsIgnoreCase("userId")) {
if (!first) {
queryURL += "&";
}
queryURL += "userId=" + URLEncoder.encode(params.get("userId")[0], "UTF-8");
} else if (param.equalsIgnoreCase("userGroup")) {
if (!first) {
queryURL += "&";
}
queryURL += "userGroup=" + URLEncoder.encode(params.get("userGroup")[0], "UTF-8");
// weird date crap
} else if (param.equalsIgnoreCase("lastEditBefore")) {
if (!first) {
queryURL += "&";
}
queryURL += "lastEditBefore=" + URLEncoder.encode(params.get("lastEditBefore")[0], "UTF-8");
} else if (param.equalsIgnoreCase("lastEditAfter")) {
if (!first) {
queryURL += "&";
}
queryURL += "lastEditAfter=" + URLEncoder.encode(params.get("lastEditAfter")[0], "UTF-8");
} else if (param.equalsIgnoreCase("createdBefore")) {
if (!first) {
queryURL += "&";
}
queryURL += "createdBefore=" + URLEncoder.encode(params.get("createdBefore")[0], "UTF-8");
} else if (param.equalsIgnoreCase("createdAfter")) {
if (!first) {
queryURL += "&";
}
queryURL += "createdAfter=" + URLEncoder.encode(params.get("createdAfter")[0], "UTF-8");
// pagination
} else if (param.equalsIgnoreCase("start")) { // number of initial result
if (!first) {
queryURL += "&";
}
queryURL += "start=" + URLEncoder.encode(params.get("start")[0], "UTF-8");
first = false;
} else if (param.equalsIgnoreCase("num")) { // number of results to return
if (!first) {
queryURL += "&";
}
// ensure a number is supplied by GUI
Integer temp = 1;
try {
temp = new Integer(params.get("num")[0]);
} catch (NumberFormatException nfe) {
error = true;
solrResult = "{\"invalid_num_specified\": \"" + params.get("num")[0] + "\"}";
break;
}
queryURL += "resultNo=" + URLEncoder.encode(params.get("num")[0], "UTF-8");
} else if (param.equalsIgnoreCase("version")) {
// do nothing
// error handling
} else { // parameter was not recognised, send error
error = true; // error has been detected
logger.log(Level.WARNING, "Client sent invalid parameter: " + param);
solrResult = "{\"invalid_paramater\": \"" + param + "\"}";
break;
}
}
// run solr query
if (!error) { // if no error detected
CommunicateWithSolr cws = new CommunicateWithSolr();
solrResult = cws.talk(queryURL);
} else {
logger.log(Level.SEVERE, "[BAD QUERY] " + queryURL);
}
// jsonp code from yiya
String type = "";
if (null == callback || callback.trim().equals("")) {
type = "application/json; charset=utf-8";
} else {
type = "application/javascript; charset=utf-8";
// you may want to get sender url to check whether
// this operation is allowed
// out.print('Access-Control-Allow-Origin: http://www.example.com/');
if (null != solrResult) {
solrResult = callback + "(" + solrResult + ")";
} else {
solrResult = callback + "()";
}
}
response.setContentType(type);
try ( // send result to client (UI)
PrintWriter out = response.getWriter()) {
out.print(solrResult); // may be error or genuine result
}
}
// <editor-fold defaultstate="collapsed" desc="HttpServlet methods. Click on the + sign on the left to edit the code.">
/**
* Handles the HTTP <code>GET</code> method.
*
* @param request servlet request
* @param response servlet response
* @throws ServletException if a servlet-specific error occurs
* @throws IOException if an I/O error occurs
*/
@Override
protected void doGet(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
processRequest(request, response);
}
/**
* Handles the HTTP <code>POST</code> method.
*
* @param request servlet request
* @param response servlet response
* @throws ServletException if a servlet-specific error occurs
* @throws IOException if an I/O error occurs
*/
@Override
protected void doPost(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
processRequest(request, response);
}
/**
* Returns a short description of the servlet.
*
* @return a String containing servlet description
*/
@Override
public String getServletInfo() {
return "Short description";
}// </editor-fold>
}
|
|
/**
* Copyright The Apache Software Foundation
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with this
* work for additional information regarding copyright ownership. The ASF
* licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.alibaba.wasp.util;
import com.alibaba.wasp.ReadModel;
import com.alibaba.wasp.conf.WaspConfiguration;
import org.apache.hadoop.conf.Configuration;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
public class Utils {
/**
* An 0-size byte array.
*/
public static final byte[] EMPTY_BYTES = {};
private static final HashMap<String, byte[]> RESOURCES = New.hashMap();
private Utils() {
// utility class
}
/**
* Create a new byte array and copy all the data. If the size of the byte
* array is zero, the same array is returned.
*
* @param b
* the byte array (may not be null)
* @return a new byte array
*/
public static byte[] cloneByteArray(byte[] b) {
if (b == null) {
return null;
}
int len = b.length;
if (len == 0) {
return EMPTY_BYTES;
}
byte[] copy = new byte[len];
System.arraycopy(b, 0, copy, 0, len);
return copy;
}
/**
* Get the system property. If the system property is not set, or if a
* security exception occurs, the default value is returned.
*
* @param key
* the key
* @param defaultValue
* the default value
* @return the value
*/
public static String getProperty(Configuration conf, String key,
String defaultValue) {
try {
return conf.get(key, defaultValue);
} catch (SecurityException se) {
return defaultValue;
}
}
/**
* Get the system property. If the system property is not set, or if a
* security exception occurs, the default value is returned.
*
* @param key
* the key
* @param defaultValue
* the default value
* @return the value
*/
public static int getProperty(Configuration conf, String key, int defaultValue) {
String s = getProperty(conf, key, null);
if (s != null) {
try {
return Integer.decode(s).intValue();
} catch (NumberFormatException e) {
// ignore
}
}
return defaultValue;
}
/**
* Get the system property. If the system property is not set, or if a
* security exception occurs, the default value is returned.
*
* @param key
* the key
* @param defaultValue
* the default value
* @return the value
*/
public static boolean getProperty(Configuration conf, String key,
boolean defaultValue) {
String s = getProperty(conf, key, null);
if (s != null) {
try {
return Boolean.parseBoolean(s);
} catch (NumberFormatException e) {
// ignore
}
}
return defaultValue;
}
/**
* Read a long value from the byte array at the given position. The most
* significant byte is read first.
*
* @param buff
* the byte array
* @param pos
* the position
* @return the value
*/
public static long readLong(byte[] buff, int pos) {
return (((long) readInt(buff, pos)) << 32)
+ (readInt(buff, pos + 4) & 0xffffffffL);
}
public static int readInt(byte[] buff, int pos) {
return (buff[pos++] << 24) + ((buff[pos++] & 0xff) << 16)
+ ((buff[pos++] & 0xff) << 8) + (buff[pos] & 0xff);
}
public static double readDouble(byte[] b) {
long l;
l = b[0];
l &= 0xff;
l |= ((long) b[1] << 8);
l &= 0xffff;
l |= ((long) b[2] << 16);
l &= 0xffffff;
l |= ((long) b[3] << 24);
l &= 0xffffffffl;
l |= ((long) b[4] << 32);
l &= 0xffffffffffl;
l |= ((long) b[5] << 40);
l &= 0xffffffffffffl;
l |= ((long) b[6] << 48);
l |= ((long) b[7] << 56);
return Double.longBitsToDouble(l);
}
/**
* Compare the contents of two byte arrays. If the content or length of the
* first array is smaller than the second array, -1 is returned. If the
* content or length of the second array is smaller than the first array, 1 is
* returned. If the contents and lengths are the same, 0 is returned.
*
* @param data1
* the first byte array (must not be null)
* @param data2
* the second byte array (must not be null)
* @return the result of the comparison (-1, 1 or 0)
*/
public static int compareNotNull(byte[] data1, byte[] data2) {
if (data1 == data2) {
return 0;
}
int len = Math.min(data1.length, data2.length);
for (int i = 0; i < len; i++) {
byte b = data1[i];
byte b2 = data2[i];
if (b != b2) {
return b > b2 ? 1 : -1;
}
}
return Integer.signum(data1.length - data2.length);
}
/**
* Calculate the hash code of the given byte array.
*
* @param value
* the byte array
* @return the hash code
*/
public static int getByteArrayHash(byte[] value) {
int len = value.length;
int h = len;
if (len < 50) {
for (int i = 0; i < len; i++) {
h = 31 * h + value[i];
}
} else {
int step = len / 16;
for (int i = 0; i < 4; i++) {
h = 31 * h + value[i];
h = 31 * h + value[--len];
}
for (int i = 4 + step; i < len; i += step) {
h = 31 * h + value[i];
}
}
return h;
}
public static float readFloat(byte[] values) {
int accum = 0;
for (int shiftBy = 0; shiftBy < values.length; shiftBy++) {
accum |= (values[shiftBy] & 0xff) << shiftBy * 8;
}
return Float.intBitsToFloat(accum);
}
/**
* Get a resource from the resource map.
*
* @param name
* the name of the resource
* @return the resource data
*/
public static byte[] getResource(String name) throws IOException {
byte[] data = RESOURCES.get(name);
if (data == null) {
data = loadResource(name);
RESOURCES.put(name, data);
}
return data == null ? EMPTY_BYTES : data;
}
private static byte[] loadResource(String name) throws IOException {
InputStream in = Utils.class.getResourceAsStream("data.zip");
if (in == null) {
in = Utils.class.getResourceAsStream(name);
if (in == null) {
return null;
}
return IOUtils.readBytesAndClose(in, 0);
}
ZipInputStream zipIn = new ZipInputStream(in);
try {
while (true) {
ZipEntry entry = zipIn.getNextEntry();
if (entry == null) {
break;
}
String entryName = entry.getName();
if (!entryName.startsWith("/")) {
entryName = "/" + entryName;
}
if (entryName.equals(name)) {
ByteArrayOutputStream out = new ByteArrayOutputStream();
IOUtils.copy(zipIn, out);
zipIn.closeEntry();
return out.toByteArray();
}
zipIn.closeEntry();
}
} catch (IOException e) {
// if this happens we have a real problem
e.printStackTrace();
} finally {
zipIn.close();
}
return null;
}
public static Properties convertConfigurationToProperties(Configuration conf) {
Properties properties = new Properties();
for (Map.Entry<String, String> configurationEntry : conf) {
properties
.put(configurationEntry.getKey(), configurationEntry.getValue());
}
return properties;
}
public static Configuration convertPropertiesToConfiguration(
Properties properties) {
Configuration conf = WaspConfiguration.create();
for (Map.Entry entry : properties.entrySet()) {
if (entry.getKey() instanceof String) {
Object value = properties.get(entry.getKey());
if (value instanceof String) {
conf.set((String) entry.getKey(), (String) value);
}
}
}
return conf;
}
public static ReadModel getReadModel(String readModel) {
if(StringUtils.isNullOrEmpty(readModel)) {
return ReadModel.SNAPSHOT;
}
for (ReadModel model : ReadModel.values()) {
if (readModel.equals(model)) {
return model;
}
}
return ReadModel.SNAPSHOT;
}
}
|
|
package ai.elimu.appstore.synchronization;
import android.content.Intent;
import android.os.Build;
import android.os.Bundle;
import android.os.Handler;
import android.support.annotation.NonNull;
import android.support.v7.app.AppCompatActivity;
import android.view.View;
import android.widget.Toast;
import com.google.gson.Gson;
import com.google.gson.reflect.TypeToken;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.IOException;
import java.lang.reflect.Type;
import java.util.Calendar;
import java.util.List;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import ai.elimu.appstore.BaseApplication;
import ai.elimu.appstore.R;
import ai.elimu.appstore.dao.ApplicationDao;
import ai.elimu.appstore.dao.ApplicationVersionDao;
import ai.elimu.appstore.model.Application;
import ai.elimu.appstore.model.ApplicationVersion;
import ai.elimu.appstore.rest.ApplicationService;
import ai.elimu.appstore.util.AppPrefs;
import ai.elimu.appstore.util.ChecksumHelper;
import ai.elimu.appstore.util.ConnectivityHelper;
import ai.elimu.appstore.util.DeviceInfoHelper;
import ai.elimu.appstore.util.UserPrefsHelper;
import ai.elimu.model.enums.admin.ApplicationStatus;
import ai.elimu.model.gson.admin.ApplicationGson;
import ai.elimu.model.gson.admin.ApplicationVersionGson;
import okhttp3.ResponseBody;
import retrofit2.Call;
import retrofit2.Callback;
import retrofit2.Response;
import timber.log.Timber;
public class AppSynchronizationActivity extends AppCompatActivity {
private ApplicationService applicationService;
private View appSyncLoadingContainer;
private ExecutorService executorService = Executors.newSingleThreadExecutor();
private Handler mainThreadHandler;
@Override
protected void onCreate(Bundle savedInstanceState) {
Timber.i("onCreate");
super.onCreate(savedInstanceState);
// Create a Handler in UI thread to use for updating view from background threads
mainThreadHandler = new Handler();
setContentView(R.layout.activity_app_synchronization);
appSyncLoadingContainer = findViewById(R.id.appSyncLoadingContainer);
BaseApplication baseApplication = (BaseApplication) getApplication();
applicationService = baseApplication.getRetrofit(null).create(ApplicationService.class);
}
@Override
protected void onStart() {
Timber.i("onStart");
super.onStart();
boolean isWifiEnabled = ConnectivityHelper.isWifiEnabled(this);
Timber.i("isWifiEnabled: " + isWifiEnabled);
boolean isWifiConnected = ConnectivityHelper.isWifiConnected(this);
Timber.i("isWifiConnected: " + isWifiConnected);
// Check if server is reachable to start network API call
executorService.execute(new Runnable() {
@Override
public void run() {
boolean isServerReachable = ConnectivityHelper.isServerReachable(AppSynchronizationActivity.this);
Timber.i("isServerReachable: " + isServerReachable);
if (!isServerReachable) {
Timber.w(getString(R.string.server_is_not_reachable));
mainThreadHandler.post(new Runnable() {
@Override
public void run() {
Toast.makeText(AppSynchronizationActivity.this, getString(R.string.server_is_not_reachable),
Toast.LENGTH_SHORT).show();
}
});
// Display app list when there is no internet connection
displayAppList();
} else {
// Start download applications info
appSyncLoadingContainer.setVisibility(View.VISIBLE);
Call<ResponseBody> call = applicationService.getApplicationList(
DeviceInfoHelper.getDeviceId(getApplicationContext()),
ChecksumHelper.getChecksum(getApplicationContext()),
UserPrefsHelper.getLocale(getApplicationContext()).toString(),
DeviceInfoHelper.getDeviceModel(getApplicationContext()),
Build.VERSION.SDK_INT,
DeviceInfoHelper.getApplicationId(getApplicationContext()),
DeviceInfoHelper.getAppVersionCode(getApplicationContext()));
call.enqueue(new Callback<ResponseBody>() {
@Override
public void onResponse(Call<ResponseBody> call, Response<ResponseBody> response) {
Timber.i("onResponse");
appSyncLoadingContainer.setVisibility(View.GONE);
processAppListData(response);
displayAppList();
}
@Override
public void onFailure(Call<ResponseBody> call, Throwable t) {
Timber.e(t, "onFailure");
appSyncLoadingContainer.setVisibility(View.GONE);
mainThreadHandler.post(new Runnable() {
@Override
public void run() {
Toast.makeText(AppSynchronizationActivity.this, getString(R.string.server_is_not_reachable),
Toast.LENGTH_SHORT).show();
}
});
// Display app list when failing to get application list
displayAppList();
}
});
}
}
});
}
/**
* Handle app list response from REST API by storing the data in the SQLite database
* @param response The API response
*/
private void processAppListData(@NonNull Response<ResponseBody> response) {
Timber.i("processAppListData");
ApplicationDao applicationDao = ((BaseApplication) getApplicationContext()).getDaoSession().getApplicationDao();
ApplicationVersionDao applicationVersionDao = ((BaseApplication) getApplicationContext()).getDaoSession().getApplicationVersionDao();
try {
String jsonResponse = response.body().string();
Timber.i("jsonResponse: " + jsonResponse);
JSONObject jsonObject = new JSONObject(jsonResponse);
if (!"success".equals(jsonObject.getString("result"))) {
Timber.w("Download failed");
String errorDescription = jsonObject.getString("description");
Timber.w("errorDescription: " + errorDescription);
} else {
JSONArray jsonArrayApplications = jsonObject.getJSONArray("applications");
for (int i = 0; i < jsonArrayApplications.length(); i++) {
Type type = new TypeToken<ApplicationGson>(){}.getType();
ApplicationGson applicationGson = new Gson().fromJson(jsonArrayApplications.getString(i), type);
int listOrder = i + 1;
Timber.i("Synchronizing APK " + listOrder + "/" + jsonArrayApplications.length() + ": " + applicationGson.getPackageName() + " (status " + applicationGson.getApplicationStatus() + ")");
Application application = applicationDao.load(applicationGson.getId());
if (application == null) {
// Store new Application in database
application = new Application();
application.setId(applicationGson.getId());
application.setLocale(applicationGson.getLocale());
application.setPackageName(applicationGson.getPackageName());
application.setInfrastructural(applicationGson.isInfrastructural());
application.setLiteracySkills(applicationGson.getLiteracySkills());
application.setNumeracySkills(applicationGson.getNumeracySkills());
application.setApplicationStatus(applicationGson.getApplicationStatus());
application.setListOrder(listOrder);
long id = applicationDao.insert(application);
Timber.i("Stored Application in database with id " + id);
if (application.getApplicationStatus() == ApplicationStatus.ACTIVE) {
// Store ApplicationVersions
List<ApplicationVersionGson> applicationVersionGsons = applicationGson.getApplicationVersions();
Timber.i("applicationVersionGsons.size(): " + applicationVersionGsons.size());
for (ApplicationVersionGson applicationVersionGson : applicationVersionGsons) {
ApplicationVersion applicationVersion = applicationVersionDao.load(applicationVersionGson.getId());
if (applicationVersion == null) {
// Store new ApplicationVersion in database
applicationVersion = new ApplicationVersion();
applicationVersion.setId(applicationVersionGson.getId());
applicationVersion.setApplication(application);
applicationVersion.setFileSizeInKb(applicationVersionGson.getFileSizeInKb());
applicationVersion.setFileUrl(applicationVersionGson.getFileUrl());
applicationVersion.setChecksumMd5(applicationVersionGson.getChecksumMd5());
applicationVersion.setContentType(applicationVersionGson.getContentType());
applicationVersion.setVersionCode(applicationVersionGson.getVersionCode());
applicationVersion.setVersionName(applicationVersionGson.getVersionName());
applicationVersion.setLabel(applicationVersionGson.getLabel());
applicationVersion.setMinSdkVersion(applicationVersionGson.getMinSdkVersion());
applicationVersion.setStartCommand(applicationVersionGson.getStartCommand());
applicationVersion.setTimeUploaded(applicationVersionGson.getTimeUploaded());
long applicationVersionId = applicationVersionDao.insert(applicationVersion);
Timber.i("Stored ApplicationVersion in database with id " + applicationVersionId);
}
}
}
} else {
// Update existing Application in database
application.setId(applicationGson.getId());
application.setLocale(applicationGson.getLocale());
application.setPackageName(applicationGson.getPackageName());
application.setInfrastructural(applicationGson.isInfrastructural());
application.setLiteracySkills(applicationGson.getLiteracySkills());
application.setNumeracySkills(applicationGson.getNumeracySkills());
application.setApplicationStatus(applicationGson.getApplicationStatus());
application.setListOrder(listOrder);
applicationDao.update(application);
Timber.i("Updated Application in database with id " + application.getId());
if (application.getApplicationStatus() == ApplicationStatus.ACTIVE) {
// Update ApplicationVersions
List<ApplicationVersionGson> applicationVersionGsons = applicationGson.getApplicationVersions();
Timber.i("applicationVersionGsons.size(): " + applicationVersionGsons.size());
for (ApplicationVersionGson applicationVersionGson : applicationVersionGsons) {
ApplicationVersion applicationVersion = applicationVersionDao.load(applicationVersionGson.getId());
if (applicationVersion == null) {
// Store new ApplicationVersion in database
applicationVersion = new ApplicationVersion();
applicationVersion.setId(applicationVersionGson.getId());
applicationVersion.setApplication(application);
applicationVersion.setFileSizeInKb(applicationVersionGson.getFileSizeInKb());
applicationVersion.setFileUrl(applicationVersionGson.getFileUrl());
applicationVersion.setChecksumMd5(applicationVersionGson.getChecksumMd5());
applicationVersion.setContentType(applicationVersionGson.getContentType());
applicationVersion.setVersionCode(applicationVersionGson.getVersionCode());
applicationVersion.setVersionName(applicationVersionGson.getVersionName());
applicationVersion.setLabel(applicationVersionGson.getLabel());
applicationVersion.setMinSdkVersion(applicationVersionGson.getMinSdkVersion());
applicationVersion.setStartCommand(applicationVersionGson.getStartCommand());
applicationVersion.setTimeUploaded(applicationVersionGson.getTimeUploaded());
long applicationVersionId = applicationVersionDao.insert(applicationVersion);
Timber.i("Stored ApplicationVersion in database with id " + applicationVersionId);
} else {
// Update existing ApplicationVersion in database
// TODO
}
}
}
}
}
Timber.i("Synchronization complete!");
// Update time of last synchronization
AppPrefs.saveLastSyncTime(Calendar.getInstance().getTimeInMillis());
}
} catch (JSONException e) {
Timber.e(e);
} catch (IOException e) {
Timber.e(e);
}
}
/**
* Go to app list activity
*/
private void displayAppList() {
Timber.i("displayAppList");
Intent intent = new Intent(getApplicationContext(), AppListActivity.class);
startActivity(intent);
finish();
}
}
|
|
/*
* This file is generated by jOOQ.
*/
package org.killbill.billing.plugin.stripe.dao.gen.tables.records;
import java.time.LocalDateTime;
import org.jooq.Field;
import org.jooq.Record1;
import org.jooq.Record10;
import org.jooq.Row10;
import org.jooq.impl.UpdatableRecordImpl;
import org.jooq.types.ULong;
import org.killbill.billing.plugin.stripe.dao.gen.tables.StripePaymentMethods;
/**
* This class is generated by jOOQ.
*/
@SuppressWarnings({ "all", "unchecked", "rawtypes" })
public class StripePaymentMethodsRecord extends UpdatableRecordImpl<StripePaymentMethodsRecord> implements Record10<ULong, String, String, String, Short, Short, String, LocalDateTime, LocalDateTime, String> {
private static final long serialVersionUID = 1159142458;
/**
* Setter for <code>killbill.stripe_payment_methods.record_id</code>.
*/
public void setRecordId(ULong value) {
set(0, value);
}
/**
* Getter for <code>killbill.stripe_payment_methods.record_id</code>.
*/
public ULong getRecordId() {
return (ULong) get(0);
}
/**
* Setter for <code>killbill.stripe_payment_methods.kb_account_id</code>.
*/
public void setKbAccountId(String value) {
set(1, value);
}
/**
* Getter for <code>killbill.stripe_payment_methods.kb_account_id</code>.
*/
public String getKbAccountId() {
return (String) get(1);
}
/**
* Setter for <code>killbill.stripe_payment_methods.kb_payment_method_id</code>.
*/
public void setKbPaymentMethodId(String value) {
set(2, value);
}
/**
* Getter for <code>killbill.stripe_payment_methods.kb_payment_method_id</code>.
*/
public String getKbPaymentMethodId() {
return (String) get(2);
}
/**
* Setter for <code>killbill.stripe_payment_methods.stripe_id</code>.
*/
public void setStripeId(String value) {
set(3, value);
}
/**
* Getter for <code>killbill.stripe_payment_methods.stripe_id</code>.
*/
public String getStripeId() {
return (String) get(3);
}
/**
* Setter for <code>killbill.stripe_payment_methods.is_default</code>.
*/
public void setIsDefault(Short value) {
set(4, value);
}
/**
* Getter for <code>killbill.stripe_payment_methods.is_default</code>.
*/
public Short getIsDefault() {
return (Short) get(4);
}
/**
* Setter for <code>killbill.stripe_payment_methods.is_deleted</code>.
*/
public void setIsDeleted(Short value) {
set(5, value);
}
/**
* Getter for <code>killbill.stripe_payment_methods.is_deleted</code>.
*/
public Short getIsDeleted() {
return (Short) get(5);
}
/**
* Setter for <code>killbill.stripe_payment_methods.additional_data</code>.
*/
public void setAdditionalData(String value) {
set(6, value);
}
/**
* Getter for <code>killbill.stripe_payment_methods.additional_data</code>.
*/
public String getAdditionalData() {
return (String) get(6);
}
/**
* Setter for <code>killbill.stripe_payment_methods.created_date</code>.
*/
public void setCreatedDate(LocalDateTime value) {
set(7, value);
}
/**
* Getter for <code>killbill.stripe_payment_methods.created_date</code>.
*/
public LocalDateTime getCreatedDate() {
return (LocalDateTime) get(7);
}
/**
* Setter for <code>killbill.stripe_payment_methods.updated_date</code>.
*/
public void setUpdatedDate(LocalDateTime value) {
set(8, value);
}
/**
* Getter for <code>killbill.stripe_payment_methods.updated_date</code>.
*/
public LocalDateTime getUpdatedDate() {
return (LocalDateTime) get(8);
}
/**
* Setter for <code>killbill.stripe_payment_methods.kb_tenant_id</code>.
*/
public void setKbTenantId(String value) {
set(9, value);
}
/**
* Getter for <code>killbill.stripe_payment_methods.kb_tenant_id</code>.
*/
public String getKbTenantId() {
return (String) get(9);
}
// -------------------------------------------------------------------------
// Primary key information
// -------------------------------------------------------------------------
@Override
public Record1<ULong> key() {
return (Record1) super.key();
}
// -------------------------------------------------------------------------
// Record10 type implementation
// -------------------------------------------------------------------------
@Override
public Row10<ULong, String, String, String, Short, Short, String, LocalDateTime, LocalDateTime, String> fieldsRow() {
return (Row10) super.fieldsRow();
}
@Override
public Row10<ULong, String, String, String, Short, Short, String, LocalDateTime, LocalDateTime, String> valuesRow() {
return (Row10) super.valuesRow();
}
@Override
public Field<ULong> field1() {
return StripePaymentMethods.STRIPE_PAYMENT_METHODS.RECORD_ID;
}
@Override
public Field<String> field2() {
return StripePaymentMethods.STRIPE_PAYMENT_METHODS.KB_ACCOUNT_ID;
}
@Override
public Field<String> field3() {
return StripePaymentMethods.STRIPE_PAYMENT_METHODS.KB_PAYMENT_METHOD_ID;
}
@Override
public Field<String> field4() {
return StripePaymentMethods.STRIPE_PAYMENT_METHODS.STRIPE_ID;
}
@Override
public Field<Short> field5() {
return StripePaymentMethods.STRIPE_PAYMENT_METHODS.IS_DEFAULT;
}
@Override
public Field<Short> field6() {
return StripePaymentMethods.STRIPE_PAYMENT_METHODS.IS_DELETED;
}
@Override
public Field<String> field7() {
return StripePaymentMethods.STRIPE_PAYMENT_METHODS.ADDITIONAL_DATA;
}
@Override
public Field<LocalDateTime> field8() {
return StripePaymentMethods.STRIPE_PAYMENT_METHODS.CREATED_DATE;
}
@Override
public Field<LocalDateTime> field9() {
return StripePaymentMethods.STRIPE_PAYMENT_METHODS.UPDATED_DATE;
}
@Override
public Field<String> field10() {
return StripePaymentMethods.STRIPE_PAYMENT_METHODS.KB_TENANT_ID;
}
@Override
public ULong component1() {
return getRecordId();
}
@Override
public String component2() {
return getKbAccountId();
}
@Override
public String component3() {
return getKbPaymentMethodId();
}
@Override
public String component4() {
return getStripeId();
}
@Override
public Short component5() {
return getIsDefault();
}
@Override
public Short component6() {
return getIsDeleted();
}
@Override
public String component7() {
return getAdditionalData();
}
@Override
public LocalDateTime component8() {
return getCreatedDate();
}
@Override
public LocalDateTime component9() {
return getUpdatedDate();
}
@Override
public String component10() {
return getKbTenantId();
}
@Override
public ULong value1() {
return getRecordId();
}
@Override
public String value2() {
return getKbAccountId();
}
@Override
public String value3() {
return getKbPaymentMethodId();
}
@Override
public String value4() {
return getStripeId();
}
@Override
public Short value5() {
return getIsDefault();
}
@Override
public Short value6() {
return getIsDeleted();
}
@Override
public String value7() {
return getAdditionalData();
}
@Override
public LocalDateTime value8() {
return getCreatedDate();
}
@Override
public LocalDateTime value9() {
return getUpdatedDate();
}
@Override
public String value10() {
return getKbTenantId();
}
@Override
public StripePaymentMethodsRecord value1(ULong value) {
setRecordId(value);
return this;
}
@Override
public StripePaymentMethodsRecord value2(String value) {
setKbAccountId(value);
return this;
}
@Override
public StripePaymentMethodsRecord value3(String value) {
setKbPaymentMethodId(value);
return this;
}
@Override
public StripePaymentMethodsRecord value4(String value) {
setStripeId(value);
return this;
}
@Override
public StripePaymentMethodsRecord value5(Short value) {
setIsDefault(value);
return this;
}
@Override
public StripePaymentMethodsRecord value6(Short value) {
setIsDeleted(value);
return this;
}
@Override
public StripePaymentMethodsRecord value7(String value) {
setAdditionalData(value);
return this;
}
@Override
public StripePaymentMethodsRecord value8(LocalDateTime value) {
setCreatedDate(value);
return this;
}
@Override
public StripePaymentMethodsRecord value9(LocalDateTime value) {
setUpdatedDate(value);
return this;
}
@Override
public StripePaymentMethodsRecord value10(String value) {
setKbTenantId(value);
return this;
}
@Override
public StripePaymentMethodsRecord values(ULong value1, String value2, String value3, String value4, Short value5, Short value6, String value7, LocalDateTime value8, LocalDateTime value9, String value10) {
value1(value1);
value2(value2);
value3(value3);
value4(value4);
value5(value5);
value6(value6);
value7(value7);
value8(value8);
value9(value9);
value10(value10);
return this;
}
// -------------------------------------------------------------------------
// Constructors
// -------------------------------------------------------------------------
/**
* Create a detached StripePaymentMethodsRecord
*/
public StripePaymentMethodsRecord() {
super(StripePaymentMethods.STRIPE_PAYMENT_METHODS);
}
/**
* Create a detached, initialised StripePaymentMethodsRecord
*/
public StripePaymentMethodsRecord(ULong recordId, String kbAccountId, String kbPaymentMethodId, String stripeId, Short isDefault, Short isDeleted, String additionalData, LocalDateTime createdDate, LocalDateTime updatedDate, String kbTenantId) {
super(StripePaymentMethods.STRIPE_PAYMENT_METHODS);
set(0, recordId);
set(1, kbAccountId);
set(2, kbPaymentMethodId);
set(3, stripeId);
set(4, isDefault);
set(5, isDeleted);
set(6, additionalData);
set(7, createdDate);
set(8, updatedDate);
set(9, kbTenantId);
}
}
|
|
/**
* Copyright 2005-2014 The Kuali Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl2.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.rice.kew.superuser.web;
import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.struts.action.ActionForm;
import org.apache.struts.action.ActionForward;
import org.apache.struts.action.ActionMapping;
import org.kuali.rice.kew.actionrequest.ActionRequestValue;
import org.kuali.rice.kew.api.KewApiConstants;
import org.kuali.rice.kew.api.KewApiServiceLocator;
import org.kuali.rice.kew.api.WorkflowDocumentFactory;
import org.kuali.rice.kew.api.action.ActionRequestType;
import org.kuali.rice.kew.api.action.AdHocRevoke;
import org.kuali.rice.kew.api.action.DocumentActionParameters;
import org.kuali.rice.kew.api.action.ReturnPoint;
import org.kuali.rice.kew.api.action.WorkflowDocumentActionsService;
import org.kuali.rice.kew.api.document.WorkflowDocumentService;
import org.kuali.rice.kew.api.document.node.RouteNodeInstance;
import org.kuali.rice.kew.api.exception.WorkflowException;
import org.kuali.rice.kew.doctype.bo.DocumentType;
import org.kuali.rice.kew.exception.WorkflowServiceErrorException;
import org.kuali.rice.kew.exception.WorkflowServiceErrorImpl;
import org.kuali.rice.kew.routeheader.DocumentRouteHeaderValue;
import org.kuali.rice.kew.service.KEWServiceLocator;
import org.kuali.rice.kew.web.AppSpecificRouteRecipient;
import org.kuali.rice.kew.web.KewKualiAction;
import org.kuali.rice.kim.api.group.GroupService;
import org.kuali.rice.kim.api.identity.principal.Principal;
import org.kuali.rice.kim.api.services.KimApiServiceLocator;
import org.kuali.rice.krad.UserSession;
import org.kuali.rice.krad.exception.ValidationException;
import org.kuali.rice.krad.util.GlobalVariables;
import org.kuali.rice.krad.util.KRADConstants;
import org.kuali.rice.ksb.api.KsbApiServiceLocator;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.xml.namespace.QName;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.Iterator;
import java.util.List;
/**
* A Struts Action which provides super user functionality.
*
* @author Kuali Rice Team ([email protected])
*/
public class SuperUserAction extends KewKualiAction {
private static final org.apache.log4j.Logger LOG = org.apache.log4j.Logger.getLogger(SuperUserAction.class);
public static final String UNAUTHORIZED = "authorizationFailure";
//public ActionForward start(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception {
// defaultDispatch(mapping, form, request, response);
//}
@Override
public ActionForward execute(ActionMapping mapping, ActionForm form,
HttpServletRequest request, HttpServletResponse response)
throws Exception {
initForm(request, form);
return super.execute(mapping, form, request, response);
}
@Override
public ActionForward refresh(ActionMapping mapping, ActionForm form, HttpServletRequest request,
HttpServletResponse response) throws Exception {
((SuperUserForm) form).getActionRequests().clear();
initForm(request, form);
return defaultDispatch(mapping, form, request, response);
}
public ActionForward displaySuperUserDocument(ActionMapping mapping, ActionForm form, HttpServletRequest request,
HttpServletResponse response) throws Exception {
SuperUserForm superUserForm = (SuperUserForm) form;
superUserForm.setDocHandlerUrl(KewApiConstants.DOC_HANDLER_REDIRECT_PAGE + "?docId="
+ superUserForm.getDocumentId() + "&" + KewApiConstants.COMMAND_PARAMETER + "="
+ KewApiConstants.SUPERUSER_COMMAND);
return defaultDispatch(mapping, form, request, response);
}
public ActionForward routeLevelApprove(ActionMapping mapping, ActionForm form, HttpServletRequest request,
HttpServletResponse response) throws Exception {
LOG.info("entering routeLevelApprove()...");
SuperUserForm superUserForm = (SuperUserForm) form;
String documentId = superUserForm.getRouteHeader().getDocumentId();
WorkflowDocumentActionsService documentActions = getWorkflowDocumentActionsService(documentId);
DocumentActionParameters parameters = DocumentActionParameters.create(documentId, getUserSession(request)
.getPrincipalId(), superUserForm.getAnnotation());
documentActions.superUserNodeApprove(parameters, superUserForm.isRunPostProcessorLogic(),
superUserForm.getDestNodeName());
saveDocumentMessage("general.routing.superuser.routeLevelApproved", request, superUserForm.getDocumentId(),
null);
LOG.info("exiting routeLevelApprove()...");
superUserForm.getActionRequests().clear();
initForm(request, form);
return defaultDispatch(mapping, form, request, response);
}
public ActionForward approve(ActionMapping mapping, ActionForm form, HttpServletRequest request,
HttpServletResponse response) throws Exception {
LOG.info("entering approve() ...");
SuperUserForm superUserForm = (SuperUserForm) form;
String documentId = superUserForm.getRouteHeader().getDocumentId();
WorkflowDocumentActionsService documentActions = getWorkflowDocumentActionsService(documentId);
DocumentActionParameters parameters = DocumentActionParameters.create(documentId, getUserSession(request)
.getPrincipalId(), superUserForm.getAnnotation());
documentActions.superUserBlanketApprove(parameters, superUserForm.isRunPostProcessorLogic());
saveDocumentMessage("general.routing.superuser.approved", request, superUserForm.getDocumentId(), null);
LOG.info("exiting approve() ...");
superUserForm.getActionRequests().clear();
initForm(request, form);
return defaultDispatch(mapping, form, request, response);
}
public ActionForward disapprove(ActionMapping mapping, ActionForm form, HttpServletRequest request,
HttpServletResponse response) throws Exception {
LOG.info("entering disapprove() ...");
SuperUserForm superUserForm = (SuperUserForm) form;
String documentId = superUserForm.getRouteHeader().getDocumentId();
WorkflowDocumentActionsService documentActions = getWorkflowDocumentActionsService(documentId);
DocumentActionParameters parameters = DocumentActionParameters.create(documentId, getUserSession(request)
.getPrincipalId(), superUserForm.getAnnotation());
documentActions.superUserDisapprove(parameters, superUserForm.isRunPostProcessorLogic());
saveDocumentMessage("general.routing.superuser.disapproved", request, superUserForm.getDocumentId(), null);
LOG.info("exiting disapprove() ...");
superUserForm.getActionRequests().clear();
initForm(request, form);
return defaultDispatch(mapping, form, request, response);
}
public ActionForward cancel(ActionMapping mapping, ActionForm form, HttpServletRequest request,
HttpServletResponse response) throws Exception {
LOG.info("entering cancel() ...");
SuperUserForm superUserForm = (SuperUserForm) form;
String documentId = superUserForm.getRouteHeader().getDocumentId();
WorkflowDocumentActionsService documentActions = getWorkflowDocumentActionsService(documentId);
DocumentActionParameters parameters = DocumentActionParameters.create(documentId, getUserSession(request)
.getPrincipalId(), superUserForm.getAnnotation());
documentActions.superUserCancel(parameters, superUserForm.isRunPostProcessorLogic());
saveDocumentMessage("general.routing.superuser.canceled", request, superUserForm.getDocumentId(), null);
LOG.info("exiting cancel() ...");
superUserForm.getActionRequests().clear();
initForm(request, form);
return defaultDispatch(mapping, form, request, response);
}
public ActionForward returnToPreviousNode(ActionMapping mapping, ActionForm form, HttpServletRequest request,
HttpServletResponse response) throws Exception {
LOG.info("entering returnToPreviousNode() ...");
SuperUserForm superUserForm = (SuperUserForm) form;
String documentId = superUserForm.getRouteHeader().getDocumentId();
WorkflowDocumentActionsService documentActions = getWorkflowDocumentActionsService(documentId);
DocumentActionParameters parameters = DocumentActionParameters.create(documentId, getUserSession(request)
.getPrincipalId(), superUserForm.getAnnotation());
documentActions.superUserReturnToPreviousNode(parameters, superUserForm.isRunPostProcessorLogic(),
ReturnPoint.create(superUserForm.getReturnDestNodeName()));
saveDocumentMessage("general.routing.returnedToPreviousNode", request, "document", superUserForm
.getReturnDestNodeName().toString());
LOG.info("exiting returnToPreviousRouteLevel() ...");
superUserForm.getActionRequests().clear();
initForm(request, form);
return defaultDispatch(mapping, form, request, response);
}
public ActionForward actionRequestApprove(ActionMapping mapping, ActionForm form, HttpServletRequest request,
HttpServletResponse response) throws Exception {
LOG.info("entering actionRequestApprove() ...");
SuperUserForm superUserForm = (SuperUserForm) form;
// Retrieve the relevant arguments from the "methodToCall" parameter.
String methodToCallAttr = (String) request.getAttribute(KRADConstants.METHOD_TO_CALL_ATTRIBUTE);
superUserForm.setActionTakenRecipientCode(StringUtils.substringBetween(methodToCallAttr,
KRADConstants.METHOD_TO_CALL_PARM1_LEFT_DEL, KRADConstants.METHOD_TO_CALL_PARM1_RIGHT_DEL));
superUserForm.setActionTakenNetworkId(StringUtils.substringBetween(methodToCallAttr,
KRADConstants.METHOD_TO_CALL_PARM2_LEFT_DEL, KRADConstants.METHOD_TO_CALL_PARM2_RIGHT_DEL));
superUserForm.setActionTakenWorkGroupId(StringUtils.substringBetween(methodToCallAttr,
KRADConstants.METHOD_TO_CALL_PARM4_LEFT_DEL, KRADConstants.METHOD_TO_CALL_PARM4_RIGHT_DEL));
superUserForm.setActionTakenActionRequestId(StringUtils.substringBetween(methodToCallAttr,
KRADConstants.METHOD_TO_CALL_PARM5_LEFT_DEL, KRADConstants.METHOD_TO_CALL_PARM5_RIGHT_DEL));
LOG.debug("Routing super user action request approve action");
boolean runPostProcessorLogic = ArrayUtils.contains(superUserForm.getActionRequestRunPostProcessorCheck(),
superUserForm.getActionTakenActionRequestId());
String documentId = superUserForm.getRouteHeader().getDocumentId();
WorkflowDocumentActionsService documentActions = getWorkflowDocumentActionsService(documentId);
DocumentActionParameters parameters = DocumentActionParameters.create(documentId, getUserSession(request)
.getPrincipalId(), superUserForm.getAnnotation());
documentActions.superUserTakeRequestedAction(parameters, runPostProcessorLogic,
superUserForm.getActionTakenActionRequestId());
String messageString;
String actionReqest = StringUtils.substringBetween(methodToCallAttr,
KRADConstants.METHOD_TO_CALL_PARM6_LEFT_DEL, KRADConstants.METHOD_TO_CALL_PARM6_RIGHT_DEL);
if (actionReqest.equalsIgnoreCase("acknowledge")) {
messageString = "general.routing.superuser.actionRequestAcknowledged";
} else if (actionReqest.equalsIgnoreCase("FYI")) {
messageString = "general.routing.superuser.actionRequestFYI";
} else if (actionReqest.equalsIgnoreCase("complete")) {
messageString = "general.routing.superuser.actionRequestCompleted";
} else if (actionReqest.equalsIgnoreCase("approved")) {
messageString = "general.routing.superuser.actionRequestApproved";
} else {
messageString = "general.routing.superuser.actionRequestApproved";
}
saveDocumentMessage(messageString, request, superUserForm.getDocumentId(),
superUserForm.getActionTakenActionRequestId());
LOG.info("exiting actionRequestApprove() ...");
superUserForm.getActionRequests().clear();
initForm(request, form);
// If the action request was also an app specific request, remove it from the app specific route recipient list.
int removalIndex = findAppSpecificRecipientIndex(superUserForm, superUserForm.getActionTakenActionRequestId());
if (removalIndex >= 0) {
superUserForm.getAppSpecificRouteList().remove(removalIndex);
}
return defaultDispatch(mapping, form, request, response);
}
/**
* Finds the index in the app specific route recipient list of the recipient whose routing was
* handled by the given action request.
*
* @param superUserForm The SuperUserForm currently being processed.
* @param actionRequestId The ID of the action request that handled the routing of the app
* specific recipient that is being removed.
* @return The index of the app specific route recipient that was handled by the given action
* request, or -1 if no such recipient was found.
*/
private int findAppSpecificRecipientIndex(SuperUserForm superUserForm, String actionRequestId) {
int tempIndex = 0;
for (Iterator<?> appRouteIter = superUserForm.getAppSpecificRouteList().iterator(); appRouteIter.hasNext();) {
String tempActnReqId = ((AppSpecificRouteRecipient) appRouteIter.next()).getActionRequestId();
if (StringUtils.equals(tempActnReqId, actionRequestId)) {
return tempIndex;
}
tempIndex++;
}
return -1;
}
public ActionForward initForm(HttpServletRequest request, ActionForm form) throws Exception {
request.setAttribute("Constants", getServlet().getServletContext().getAttribute("KewApiConstants"));
SuperUserForm superUserForm = (SuperUserForm) form;
DocumentRouteHeaderValue routeHeader = KEWServiceLocator.getRouteHeaderService().getRouteHeader(
superUserForm.getDocumentId());
if(routeHeader == null) {
throw new ValidationException("No route header ID found. Try searching for the document again using the super user document search.");
}
superUserForm.setRouteHeader(routeHeader);
String principalId = getUserSession(request).getPrincipalId();
boolean isAuthorized = KEWServiceLocator.getDocumentTypePermissionService().canAdministerRouting(principalId,
routeHeader.getDocumentType());
superUserForm.setAuthorized(isAuthorized);
if (!isAuthorized) {
saveDocumentMessage("general.routing.superuser.notAuthorized", request, superUserForm.getDocumentId(), null);
return null;
}
superUserForm.setFutureNodeNames(KEWServiceLocator.getRouteNodeService().findFutureNodeNames(
routeHeader.getDocumentId()));
Collection actionRequests = KEWServiceLocator.getActionRequestService().findPendingByDoc(
routeHeader.getDocumentId());
Iterator requestIterator = actionRequests.iterator();
while (requestIterator.hasNext()) {
ActionRequestValue req = (ActionRequestValue) requestIterator.next();
// if (KewApiConstants.ACTION_REQUEST_APPROVE_REQ.equalsIgnoreCase(req.getActionRequested())) {
superUserForm.getActionRequests().add(req);
// }
}
superUserForm.setDocId(superUserForm.getDocumentId());
if (superUserForm.getDocId() != null) {
superUserForm.setWorkflowDocument(WorkflowDocumentFactory.loadDocument(getUserSession(request)
.getPrincipalId(), superUserForm.getDocId()));
superUserForm.establishVisibleActionRequestCds();
}
return null;
}
private void saveDocumentMessage(String messageKey, HttpServletRequest request, String subVariable1,
String subVariable2) {
if (subVariable2 == null) {
GlobalVariables.getMessageMap().putInfo("document", messageKey, subVariable1);
} else {
GlobalVariables.getMessageMap().putInfo("document", messageKey, subVariable1, subVariable2);
}
}
public ActionForward routeToAppSpecificRecipient(ActionMapping mapping, ActionForm form,
HttpServletRequest request, HttpServletResponse response) throws Exception {
SuperUserForm superUserForm = (SuperUserForm) form;
//super.routeToAppSpecificRecipient(mapping, form, request, response);
//WorkflowRoutingForm routingForm = (WorkflowRoutingForm) form;
String routeType = StringUtils.substringBetween(
(String) request.getAttribute(KRADConstants.METHOD_TO_CALL_ATTRIBUTE),
KRADConstants.METHOD_TO_CALL_PARM1_LEFT_DEL, KRADConstants.METHOD_TO_CALL_PARM1_RIGHT_DEL);
AppSpecificRouteRecipient recipient = null;
if (KewApiConstants.PERSON.equals(routeType)) {
recipient = superUserForm.getAppSpecificRouteRecipient();
recipient.setActionRequested(superUserForm.getAppSpecificRouteActionRequestCd());
superUserForm.setAppSpecificPersonId(recipient.getId());
} else {
recipient = superUserForm.getAppSpecificRouteRecipient2();
recipient.setActionRequested(superUserForm.getAppSpecificRouteActionRequestCd2());
superUserForm.setAppSpecificWorkgroupId(recipient.getId());
}
validateAppSpecificRoute(recipient);
// Make sure that the requested action is still available.
superUserForm.establishVisibleActionRequestCds();
if (superUserForm.getAppSpecificRouteActionRequestCds().get(recipient.getActionRequested()) == null) {
GlobalVariables.getMessageMap().putError("appSpecificRouteRecipient" +
((KewApiConstants.WORKGROUP.equals(recipient.getType())) ? "2" : "") + ".id",
"appspecificroute.actionrequested.invalid");
throw new ValidationException("The requested action of '" + recipient.getActionRequested()
+ "' is no longer available for this document");
}
try {
String routeNodeName = getAdHocRouteNodeName(superUserForm.getWorkflowDocument().getDocumentId());
//if (KewApiConstants.PERSON.equals(recipient.getType())) {
if (KewApiConstants.PERSON.equals(routeType)) {
String recipientPrincipalId = KEWServiceLocator.getIdentityHelperService().getIdForPrincipalName(
recipient.getId());
superUserForm.getWorkflowDocument().adHocToPrincipal(
ActionRequestType.fromCode(recipient.getActionRequested()), routeNodeName,
superUserForm.getAnnotation(), recipientPrincipalId, "", true);
} else {
String recipientGroupId = KEWServiceLocator.getIdentityHelperService().getIdForGroupName(
recipient.getNamespaceCode(), recipient.getId());
superUserForm.getWorkflowDocument().adHocToGroup(
ActionRequestType.fromCode(recipient.getActionRequested()), routeNodeName,
superUserForm.getAnnotation(), recipientGroupId, "", true);
}
} catch (Exception e) {
LOG.error("Error generating app specific route request", e);
throw new WorkflowServiceErrorException("AppSpecific Route Error", new WorkflowServiceErrorImpl(
"AppSpecific Route Error", "appspecificroute.systemerror"));
}
superUserForm.getActionRequests().clear();
initForm(request, form);
// Retrieve the ID of the latest action request and store it with the app specific route recipient.
ActionRequestValue latestActnReq = getLatestActionRequest(superUserForm);
if (latestActnReq != null) {
recipient.setActionRequestId(latestActnReq.getActionRequestId());
}
// Add the recipient to the list.
superUserForm.getAppSpecificRouteList().add(recipient);
superUserForm.resetAppSpecificRoute();
return start(mapping, form, request, response);
}
/**
* Searches the current action requests list for the most recent request, which is the one with
* the highest ID.
* @param superUserForm The SuperUserForm currently being processed.
* @return The action request on the form with the highest ID, or null if no action requests
* exist in the list.
*/
private ActionRequestValue getLatestActionRequest(SuperUserForm superUserForm) {
ActionRequestValue latestActnReq = null;
// long latestId = -1;
// FIXME: KULRICE-5201 required the following refactor since action request ids are no longer numeric (and in any case the assumption that
// they are strictly ordinal by time of creation may be false)
List<ActionRequestValue> actionRequests = superUserForm.getActionRequests();
if (actionRequests != null && actionRequests.size() > 0) {
Collections.sort(actionRequests, new Comparator<ActionRequestValue>() {
@Override
// Should should by date in descending order
public int compare(ActionRequestValue o1, ActionRequestValue o2) {
if (o1 == null && o2 == null)
return 0;
if (o1 == null)
return -1;
if (o2 == null)
return 1;
if (o1.getCreateDate() == null && o2.getCreateDate() == null)
return 0;
if (o1.getCreateDate() == null)
return -1;
if (o2.getCreateDate() == null)
return 1;
return o2.getCreateDate().compareTo(o1.getCreateDate());
}
});
// If the list above is sorted in descending order then the first item should be the most recent
latestActnReq = actionRequests.get(0);
}
// TODO: As part of KULRICE-5329 this change above needs to be verified and compared with code below
// // Search the list for the action request with the highest action request value.
// for (Iterator<?> actnReqIter = superUserForm.getActionRequests().iterator(); actnReqIter.hasNext();) {
// ActionRequestValue tmpActnReq = (ActionRequestValue) actnReqIter.next();
// if (tmpActnReq.getActionRequestId().longValue() > latestId) {
// latestActnReq = tmpActnReq;
// latestId = tmpActnReq.getActionRequestId().longValue();
// }
// }
return latestActnReq;
}
/**
* Removes an existing AppSpecificRouteRecipient from the list.
*/
public ActionForward removeAppSpecificRecipient(ActionMapping mapping, ActionForm form, HttpServletRequest request,
HttpServletResponse response) throws Exception {
SuperUserForm superUserForm = (SuperUserForm) form;
// Make sure a valid route recipient index was specified in the "methodToCall" attribute.
String strIndex = StringUtils.substringBetween(
(String) request.getAttribute(KRADConstants.METHOD_TO_CALL_ATTRIBUTE),
KRADConstants.METHOD_TO_CALL_PARM1_LEFT_DEL, KRADConstants.METHOD_TO_CALL_PARM1_RIGHT_DEL);
if (StringUtils.isBlank(strIndex)) {
throw new WorkflowException("No adhoc route recipient index specified");
}
int removeIndex = Integer.parseInt(strIndex);
if (removeIndex < 0 || removeIndex >= superUserForm.getAppSpecificRouteList().size()) {
throw new WorkflowException("Invalid adhoc route recipient index specified");
}
// Remove the specified recipient from the routing, based on the recipient's ID and the ID of the action request that handled the recipient.
AppSpecificRouteRecipient removedRec = (AppSpecificRouteRecipient) superUserForm.getAppSpecificRouteList().get(
removeIndex);
if (removedRec.getActionRequestId() != null) {
superUserForm.getWorkflowDocument().revokeAdHocRequestById(removedRec.getActionRequestId().toString(), "");
} else {
AdHocRevoke adHocRevoke = null;
// Set the ID according to whether the recipient is a person or a group.
if (KewApiConstants.PERSON.equals(removedRec.getType())) {
adHocRevoke = AdHocRevoke.createRevokeFromPrincipal(KEWServiceLocator.getIdentityHelperService()
.getIdForPrincipalName(removedRec.getId()));
} else {
adHocRevoke = AdHocRevoke.createRevokeFromGroup(KEWServiceLocator.getIdentityHelperService()
.getIdForGroupName(removedRec.getNamespaceCode(), removedRec.getId()));
}
superUserForm.getWorkflowDocument().revokeAdHocRequests(adHocRevoke, "");
}
superUserForm.getAppSpecificRouteList().remove(removeIndex);
superUserForm.getActionRequests().clear();
initForm(request, form);
return start(mapping, form, request, response);
}
private WorkflowDocumentActionsService getWorkflowDocumentActionsService(String documentId) {
DocumentType documentType = KEWServiceLocator.getDocumentTypeService().findByDocumentId(documentId);
String applicationId = documentType.getApplicationId();
QName serviceName = new QName(KewApiConstants.Namespaces.KEW_NAMESPACE_2_0,
KewApiConstants.ServiceNames.WORKFLOW_DOCUMENT_ACTIONS_SERVICE_SOAP);
WorkflowDocumentActionsService service = (WorkflowDocumentActionsService) KsbApiServiceLocator.getServiceBus()
.getService(serviceName, applicationId);
if (service == null) {
service = KewApiServiceLocator.getWorkflowDocumentActionsService();
}
return service;
}
protected void validateAppSpecificRoute(AppSpecificRouteRecipient recipient) {
if (recipient.getId() == null || recipient.getId().trim().equals("")) {
GlobalVariables.getMessageMap().putError("appSpecificRouteRecipient" +
((KewApiConstants.WORKGROUP.equals(recipient.getType())) ? "2" : "") + ".id",
"appspecificroute.recipient.required");
} else {
if (KewApiConstants.PERSON.equals(recipient.getType())) {
Principal principal = KimApiServiceLocator.getIdentityService().getPrincipalByPrincipalName(
recipient.getId());
if (principal == null) {
LOG.error("App Specific user recipient not found");
GlobalVariables.getMessageMap().putError("appSpecificRouteRecipient.id",
"appspecificroute.user.invalid");
}
} else if (KewApiConstants.WORKGROUP.equals(recipient.getType())) {
//if (getIdentityManagementService().getGroup(recipient.getId()) == null) {
if (getGroupService().getGroupByNamespaceCodeAndName(recipient.getNamespaceCode(), recipient.getId()) == null) {
GlobalVariables.getMessageMap().putError("appSpecificRouteRecipient2.id",
"appspecificroute.workgroup.invalid");
}
}
}
if (GlobalVariables.getMessageMap().hasErrors()) {
throw new ValidationException("AppSpecific Route validation Errors");
}
}
protected String getAdHocRouteNodeName(String documentId) throws WorkflowException {
WorkflowDocumentService workflowDocumentService = KewApiServiceLocator.getWorkflowDocumentService();
List<RouteNodeInstance> nodeInstances = workflowDocumentService.getActiveRouteNodeInstances(documentId);
if (nodeInstances == null || nodeInstances.isEmpty()) {
nodeInstances = workflowDocumentService.getTerminalRouteNodeInstances(documentId);
}
if (nodeInstances == null || nodeInstances.isEmpty()) {
throw new WorkflowException("Could not locate a node on the document to send the ad hoc request to.");
}
return nodeInstances.get(0).getName();
}
private GroupService getGroupService() {
return KimApiServiceLocator.getGroupService();
}
public static UserSession getUserSession(HttpServletRequest request) {
return GlobalVariables.getUserSession();
}
}
|
|
package com.holub.hml;
import com.holub.text.ReportingStream;
import com.holub.text.Text;
import com.holub.util.ExtendedLogger;
import com.holub.util.Places;
import java.io.FileWriter;
import java.io.IOException;
import java.io.FileReader;
import java.io.OutputStreamWriter;
import java.io.Reader;
import java.io.StringWriter;
import java.io.Writer;
import java.util.Calendar;
import java.util.Date;
public class Hml
{
private static ExtendedLogger log = ExtendedLogger.getLogger(Hml.class);
private static final String USAGE =
new Text("\n",
"Usage: java [-DCONFIG=/path/to/config/directory] com.holub.hml.Hml [-o outputFile] [--out outputFile] [files...]",
"",
"Process the listed files (or take input from standard input",
"if no files are listed). Send processed input to standard output or to the file",
"specified by most recent -o or --out.",
"",
"The -DCONFIG flag, if present, lets you specify a location for custom configuration files.",
"",
"This program (c)2013, Allen I Holub. Permission is granted to use this program",
"for personal use only. There are no restrictions on distributing the output of this program."
).toString();
//----------------------------------------------------------------------
/** Was invoked from the command line (via main()) */
private final Configuration config;
private final ReportingStream error;
private final Printer outputPrinter;
private final NoteSet endNotes;
private final Macro macroManager;
private final Filter include;
private final Tags tags;
private final Filter codeSnippets;
private final Listing listing;
private final Filter unmapEntities;
private final Titles title;
//----------------------------------------------------------------------
/** Create an Hml processor that writes to the indicated defaultOutput
* writer and writes errors on the specified error writer.
* @param defaultOutput The initial output writer. This writer will
* can be replaced by calling {@link #openNewOutputStream(String)}.
* @param errors all error messages are sent here.
*/
public Hml( Writer defaultOutput, Writer errorWriter )
{
this.outputPrinter = new Printer(defaultOutput);
this.error = new ReportingStream(errorWriter);
this.config = new Configuration ( error );
endNotes = new NoteSet ( config );
macroManager = new Macro ( config );
include = new Include ( config );
tags = new Tags ( config, endNotes );
codeSnippets = new CodeSnippets ( config );
listing = new Listing ( config );
unmapEntities = new EntityUnmapper( config );
title = new Titles ( config );
}
//----------------------------------------------------------------------
public static void main( String[] args ) throws Exception
{
Calendar c = Calendar.getInstance();
new Text("HML 2.01 (c) %s, Allen Holub. [compiled %s]\n", c.get(Calendar.YEAR), new Date().toString() ).write(System.err);
new Text("Please download the most recent version of this program from handymarkup.org rather than redistributing it\n").write(System.err);
StringWriter errors = new StringWriter();
Hml processor = new Hml( new OutputStreamWriter(System.out), errors );
int errorCount = doMain(processor, args);
if( errorCount > 0 )
System.err.println(errors.toString());
new Text ("%s errors\n", errorCount ).write(System.err);
System.exit( errorCount );
}
/** The integration tests call doMain() instead of main() in order to
* avoid the System.exit() call in main(). System.exit(...) terminates
* the Eclipse debugger abruptly. It also logs an error if any exceptions are
* encountered.
* @return the error count or -1 if an exception was caught, -2 if there's
* a missing output file name, -3 if an unknown command-line
* argument is encountered. Reports error messages for
* all of these conditions.
*/
public static int doMain( Hml processor, String[] args ) throws Exception
{
try
{
String fileName = null;
for( int i = 0; i < args.length; ++i )
{
String argument = args[i].trim();
if( argument.equals("-o") || argument.equals("--out") )
{
if( ++i >= args.length )
{
processor.reportError( "Missing filename for -o or --out.\n%s", USAGE );
return -2;
}
argument = args[i];
if( fileName != null ) // Have processed some input,
processor.closeCurrentOutputStream(); // so a close is required.
processor.openNewOutputStream( argument );
}
else if( argument.startsWith("-") )
{
processor.reportError("Unknown command-line argument: %s\n%s", argument, USAGE );
return -3;
}
else
{
fileName = argument;
Text t = new Text(new FileReader(argument));
processor.expandAndPrint( t );
}
}
if( fileName == null ) // Then no input file was specified in the argument list. Use standard input.
{
fileName = "standard input";
Text t = new Text( System.in );
processor.expandAndPrint(t);
}
processor.closeCurrentOutputStream();
return processor.getErrorCount();
}
catch( Exception e )
{
log.error( "Uncaught exception in main", e );
new Text( "%s\n", e.getMessage() ).write(System.err);
return 1;
}
}
//----------------------------------------------------------------------
/** This method is for use by non command-line based systems (servlets?).
* @param input The HML input
* @param output the .html output is sent to this writer
* @param errors error messages are sent to this writer.
* @return the error count or -1 if an unexpected exception was caught.
*/
public static int processInWebContext( String input, Writer output, Writer errors )
{
try
{
Hml processor = new Hml ( output, errors );
Text content = new Text( input );
int errorCount = processor.expandAndPrint( content );
processor.closeCurrentOutputStream();
return errorCount;
}
catch( Exception e )
{
log.error( "Uncaught exception in main", e );
new Text( "%s", e.getMessage() ).write(errors);
}
return -1;
}
//----------------------------------------------------------------------
// Highest-level methods, called from main() and processInWebContext(...)
//
/** Close the current output stream, flushing all buffered output
* as necessary.
* @throws IOException
*/
private void closeCurrentOutputStream() throws IOException
{ outputPrinter.close();
}
/** Open a new output stream. If any output has been written to the current
* output stream, you should call {@link #closeCurrentOutputStream()} before
* calling this method.
*
* @param fileName
* @throws IOException
*/
private void openNewOutputStream( String fileName ) throws IOException
{ outputPrinter.open(fileName) ;
}
private void reportError( String format, Object... args )
{ error.report( format, args );
}
private int getErrorCount()
{ return error.getErrorCount();
}
/** Expand the specified text and print it to the current output
* stream. The head and tail files are added as required.
*
* @param t
* @return
* @throws IOException
*/
private int expandAndPrint( Text t ) throws IOException
{
int errorCount = expand(t);
outputPrinter.print(t);
return errorCount;
}
//----------------------------------------------------------------------
/** This method runs all the HML tags, macros, etc. It does not
* wrap head and tail files around the processed input, however.
*
* @return the error count after processing.
*/
public int expand( Text input )
{
// TODO. Change {section ...}, etc. to macros that use special replacement variables.
// Need to process macros much later in the chain if we do that, but moving the macro processing
// introduces a bunch of test errors that I don't have time to deal with.
if( new Pass(config, include ).process(input))
if( new Pass(config, config ).process(input))
if( new Pass(config, codeSnippets ).process(input))
if( new Pass(config, macroManager.getTextFilter() ).process(input))
if( new Pass(config, tags ).process(input))
if( new Pass(config, listing ).process(input))
if( new Pass(config, macroManager.getCodeFilter() ).process(input))
if( new Pass(config, title ).process(input))
if( new Pass(config, macroManager.getRefFilter() ).process(input))
if( new Pass(config, listing.getReferenceExpander() ).process(input))
if( new Pass(config, title.getReferenceExpander(endNotes)).process(input))
if( new Pass(config, title.getTocReplacementFilter() ).process(input))
new Pass(config, unmapEntities ).process(input);
return error.getErrorCount();
}
//----------------------------------------------------------------------
private class Printer
{
private Writer output;
public Printer( Writer output )
{ this.output = output;
}
// The following two calls are mocked in IntegreationTests.java. If you change
// them, change the test.
//
private Reader head = Places.CONFIG.reader("hml.head");
private Reader tail = Places.CONFIG.reader("hml.tail");
private Text tailContents = null;
private Text headContents = null;
private Text contents = new Text();
/** Outputs the specified processed content to the current
* output file.
* The hml.head file is output before the content,
* only if the head has not been output since the last
* close() call.
*
* For this method to work properly, you can't
* output anything until all of the hml tags have been
* processed (because some of the tags effectively
* modify hml.head).
*
* @param content
* @throws IOException
*/
public void print( Text content ) throws IOException
{
if( output == null )
throw new IOException("No output file currently active");
contents.append(content);
}
/** Finishes up processing by flushing the buffer and outputting the tail file. You
* should only call this method once, at the end of processing. Use {@link #open(String)} to
* change output files.
*/
public void close() throws IOException
{
if( output == null ) // nothing to do
return;
if( head == null )
error.report( "ERROR: Cannot locate hml.head" );
else
{
if( headContents == null )
headContents = new Text(head);
Text augmentedHead = tags.appendAdditionsToHead( headContents );
augmentedHead.write(output);
}
contents.write(output);
contents.clear();
if( tail == null )
error.report( "ERROR: Cannot locate hml.tail." );
else
{
if( tailContents == null )
tailContents = new Text(tail);
tailContents.write(output);
output.flush();
}
output.close();
output = null;
}
/** Close the current output and open a new one
* @throws IOException
*/
public void open( String fileName ) throws IOException
{ output = new FileWriter( fileName );
}
}
}
|
|
package controllers;
import helpers.SubdomainCheck;
import java.lang.reflect.InvocationTargetException;
import java.util.concurrent.TimeoutException;
import lagoon.PlintRobot;
import ls.LSUser;
import play.Logger;
import play.data.validation.Required;
import play.libs.Crypto;
import play.mvc.Before;
import play.mvc.Controller;
import play.mvc.Http;
import play.mvc.Util;
import play.utils.Java;
import exceptions.LintException;
/**
* Manages login and authentication
*
* @author linda.velte
*
*/
public class SecureLint extends Controller {
@Before(unless = { "login", "authenticate", "logout", "register", "registerUser" })
static void checkAccess() throws Throwable {
Lintity.invoke("beforeCheckAccess");
if(getControllerAnnotation(Unsheltered.class) != null){
Logger.debug("Unsheltered Controller :: " + request.action);
}else if(getActionAnnotation(Unsheltered.class) != null){
Logger.debug("Unsheltered Action :: " + request.action);
}else if (getControllerInheritedAnnotation(Unsheltered.class) != null){
Logger.debug("Unsheltered Inherited Controller :: " + request.action);
}else{
// Check Authentication
if (!session.contains("username") || !session.contains("id")) {
flash.put("url", "GET".equals(request.method) ? request.url : "/");
login();
}
// Check Authorization
if (!PlintRobot.getInstance().checkRequest(request, Long.parseLong(session.get("id")), Lintity.currentContext())) {
Lintity.invoke("onCheckFailed");
}
}
Lintity.invoke("afterCheckAccess");
}
// ~~~ Login
public static void login() throws Throwable {
Http.Cookie remember = request.cookies.get("rememberme");
if (remember != null && remember.value.indexOf("-") > 0) {
String sign = remember.value.substring(0, remember.value.indexOf("-"));
String username = remember.value.substring(remember.value.indexOf("-") + 1);
if (Crypto.sign(username).equals(sign)) {
session.put("username", username);
redirectToOriginalURL();
}
}
flash.keep("url");
render();
}
public static void authenticate(@Required String username, String password, boolean remember) throws Throwable {
Boolean allowed = (Boolean) Lintity.invoke("authenticate", username, password);
if (allowed) {
try {
LSUser login = PlintRobot.getInstance().login(username, password, Lintity.currentContext());
if (login != null) {
session.put("id", login.id);
}
} catch (LintException e) {
Logger.error("Lint - " + e.getMessage());
validation.addError("Lint Login", "Cannot identify user");
} catch (TimeoutException e) {
Logger.error("Lint - " + e.getMessage());
validation.addError("Lint Login", "Timeout");
}
}
if (validation.hasErrors() || !allowed) {
flash.keep("url");
flash.error("lint.error");
params.flash();
login();
}
// Mark user as connected
session.put("username", username);
// Remember if needed
if (remember) {
response.setCookie("rememberme", Crypto.sign(username) + "-" + username, "30d");
}
redirectToOriginalURL();
}
public static void logout() throws Throwable {
Lintity.invoke("onDisconnect");
PlintRobot.getInstance().logout(Long.parseLong(session.get("id")), Lintity.currentContext());
session.clear();
response.removeCookie("rememberme");
Lintity.invoke("onDisconnected");
flash.success("lint.logout");
login();
}
// ~~~ Utils
@Util
private static void redirectToOriginalURL() throws Throwable {
Lintity.invoke("onAuthenticated");
String url = flash.get("url");
if (url == null) {
url = "/";
}
redirect(url);
}
public static class Lintity extends Controller {
public static void register() {
renderTemplate("SecureLint/Lintity/register.html");
}
public static void activation(String token) {
renderTemplate("SecureLint/Lintity/activation.html", token);
}
public static void passwordRecovery() {
renderTemplate("SecureLint/Lintity/password_recovery.html");
}
public static void registerUser(@Required String username, @Required String email, @Required String name) throws Exception {
throw new UnsupportedOperationException();
}
public static void activateUser(@Required String passowrd, @Required String passwordConf, String token) throws Exception {
throw new UnsupportedOperationException();
}
public static void resendPassword(@Required String username) throws Exception {
throw new UnsupportedOperationException();
}
/**
* computes current subdomain
*
* @return subdomain name
*/
public static String currentContext(){
return SubdomainCheck.currentSubdomain(request);
}
/**
* This method is called during the authentication process. This is where you check if the user is allowed to log in into the system. This is
* the actual authentication process against a third party system (most of the time a DB).
*
* @param username
* @param password
* @return true if the authentication process succeeded
* @throws LintException
*/
static boolean authenticate(String username, String password) {
return true;
}
/**
* This method returns the current connected username
*
* @return
*/
static String connected() {
return session.get("id");
}
/**
* This method is called before check request access is verified.
*/
static void beforeCheckAccess() {
}
/**
* Indicate if a user is currently connected
*
* @return true if the user is connected
*/
static boolean isConnected() {
return session.contains("id");
}
/**
* This method is called after a successful authentication. You need to override this method if you with to perform specific actions (eg.
* Record the time the user signed in)
*/
static void onAuthenticated() {
}
/**
* This method is called before a user tries to sign off. You need to override this method if you wish to perform specific actions (eg. Record
* the name of the user who signed off)
*/
static void onDisconnect() {
}
/**
* This method is called after a successful sign off. You need to override this method if you wish to perform specific actions (eg. Record the
* time the user signed off)
*/
static void onDisconnected() {
}
/**
* This method is called if a check does not succeed. By default it shows the not allowed page (the controller forbidden method).
*/
static void onCheckFailed() {
Logger.debug("Access Denied to " + request.action);
forbidden();
}
/**
* This method is called after check request access is verified.
*/
static void afterCheckAccess() {
}
private static Object invoke(String m, Object... args) throws Throwable {
try {
return Java.invokeChildOrStatic(Lintity.class, m, args);
} catch (InvocationTargetException e) {
throw e.getTargetException();
}
}
}
}
|
|
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dataproc/v1/workflow_templates.proto
package com.google.cloud.dataproc.v1;
/**
*
*
* <pre>
* A selector that chooses target cluster for jobs based on metadata.
* </pre>
*
* Protobuf type {@code google.cloud.dataproc.v1.ClusterSelector}
*/
public final class ClusterSelector extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.dataproc.v1.ClusterSelector)
ClusterSelectorOrBuilder {
private static final long serialVersionUID = 0L;
// Use ClusterSelector.newBuilder() to construct.
private ClusterSelector(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ClusterSelector() {
zone_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ClusterSelector();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private ClusterSelector(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
java.lang.String s = input.readStringRequireUtf8();
zone_ = s;
break;
}
case 18:
{
if (!((mutable_bitField0_ & 0x00000001) != 0)) {
clusterLabels_ =
com.google.protobuf.MapField.newMapField(
ClusterLabelsDefaultEntryHolder.defaultEntry);
mutable_bitField0_ |= 0x00000001;
}
com.google.protobuf.MapEntry<java.lang.String, java.lang.String> clusterLabels__ =
input.readMessage(
ClusterLabelsDefaultEntryHolder.defaultEntry.getParserForType(),
extensionRegistry);
clusterLabels_
.getMutableMap()
.put(clusterLabels__.getKey(), clusterLabels__.getValue());
break;
}
default:
{
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dataproc.v1.WorkflowTemplatesProto
.internal_static_google_cloud_dataproc_v1_ClusterSelector_descriptor;
}
@SuppressWarnings({"rawtypes"})
@java.lang.Override
protected com.google.protobuf.MapField internalGetMapField(int number) {
switch (number) {
case 2:
return internalGetClusterLabels();
default:
throw new RuntimeException("Invalid map field number: " + number);
}
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dataproc.v1.WorkflowTemplatesProto
.internal_static_google_cloud_dataproc_v1_ClusterSelector_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dataproc.v1.ClusterSelector.class,
com.google.cloud.dataproc.v1.ClusterSelector.Builder.class);
}
public static final int ZONE_FIELD_NUMBER = 1;
private volatile java.lang.Object zone_;
/**
*
*
* <pre>
* Optional. The zone where workflow process executes. This parameter does not
* affect the selection of the cluster.
* If unspecified, the zone of the first cluster matching the selector
* is used.
* </pre>
*
* <code>string zone = 1 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The zone.
*/
@java.lang.Override
public java.lang.String getZone() {
java.lang.Object ref = zone_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
zone_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. The zone where workflow process executes. This parameter does not
* affect the selection of the cluster.
* If unspecified, the zone of the first cluster matching the selector
* is used.
* </pre>
*
* <code>string zone = 1 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for zone.
*/
@java.lang.Override
public com.google.protobuf.ByteString getZoneBytes() {
java.lang.Object ref = zone_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
zone_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int CLUSTER_LABELS_FIELD_NUMBER = 2;
private static final class ClusterLabelsDefaultEntryHolder {
static final com.google.protobuf.MapEntry<java.lang.String, java.lang.String> defaultEntry =
com.google.protobuf.MapEntry.<java.lang.String, java.lang.String>newDefaultInstance(
com.google.cloud.dataproc.v1.WorkflowTemplatesProto
.internal_static_google_cloud_dataproc_v1_ClusterSelector_ClusterLabelsEntry_descriptor,
com.google.protobuf.WireFormat.FieldType.STRING,
"",
com.google.protobuf.WireFormat.FieldType.STRING,
"");
}
private com.google.protobuf.MapField<java.lang.String, java.lang.String> clusterLabels_;
private com.google.protobuf.MapField<java.lang.String, java.lang.String>
internalGetClusterLabels() {
if (clusterLabels_ == null) {
return com.google.protobuf.MapField.emptyMapField(
ClusterLabelsDefaultEntryHolder.defaultEntry);
}
return clusterLabels_;
}
public int getClusterLabelsCount() {
return internalGetClusterLabels().getMap().size();
}
/**
*
*
* <pre>
* Required. The cluster labels. Cluster must have all labels
* to match.
* </pre>
*
* <code>map<string, string> cluster_labels = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public boolean containsClusterLabels(java.lang.String key) {
if (key == null) {
throw new java.lang.NullPointerException();
}
return internalGetClusterLabels().getMap().containsKey(key);
}
/** Use {@link #getClusterLabelsMap()} instead. */
@java.lang.Override
@java.lang.Deprecated
public java.util.Map<java.lang.String, java.lang.String> getClusterLabels() {
return getClusterLabelsMap();
}
/**
*
*
* <pre>
* Required. The cluster labels. Cluster must have all labels
* to match.
* </pre>
*
* <code>map<string, string> cluster_labels = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public java.util.Map<java.lang.String, java.lang.String> getClusterLabelsMap() {
return internalGetClusterLabels().getMap();
}
/**
*
*
* <pre>
* Required. The cluster labels. Cluster must have all labels
* to match.
* </pre>
*
* <code>map<string, string> cluster_labels = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public java.lang.String getClusterLabelsOrDefault(
java.lang.String key, java.lang.String defaultValue) {
if (key == null) {
throw new java.lang.NullPointerException();
}
java.util.Map<java.lang.String, java.lang.String> map = internalGetClusterLabels().getMap();
return map.containsKey(key) ? map.get(key) : defaultValue;
}
/**
*
*
* <pre>
* Required. The cluster labels. Cluster must have all labels
* to match.
* </pre>
*
* <code>map<string, string> cluster_labels = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public java.lang.String getClusterLabelsOrThrow(java.lang.String key) {
if (key == null) {
throw new java.lang.NullPointerException();
}
java.util.Map<java.lang.String, java.lang.String> map = internalGetClusterLabels().getMap();
if (!map.containsKey(key)) {
throw new java.lang.IllegalArgumentException();
}
return map.get(key);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(zone_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, zone_);
}
com.google.protobuf.GeneratedMessageV3.serializeStringMapTo(
output, internalGetClusterLabels(), ClusterLabelsDefaultEntryHolder.defaultEntry, 2);
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(zone_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, zone_);
}
for (java.util.Map.Entry<java.lang.String, java.lang.String> entry :
internalGetClusterLabels().getMap().entrySet()) {
com.google.protobuf.MapEntry<java.lang.String, java.lang.String> clusterLabels__ =
ClusterLabelsDefaultEntryHolder.defaultEntry
.newBuilderForType()
.setKey(entry.getKey())
.setValue(entry.getValue())
.build();
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, clusterLabels__);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.dataproc.v1.ClusterSelector)) {
return super.equals(obj);
}
com.google.cloud.dataproc.v1.ClusterSelector other =
(com.google.cloud.dataproc.v1.ClusterSelector) obj;
if (!getZone().equals(other.getZone())) return false;
if (!internalGetClusterLabels().equals(other.internalGetClusterLabels())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + ZONE_FIELD_NUMBER;
hash = (53 * hash) + getZone().hashCode();
if (!internalGetClusterLabels().getMap().isEmpty()) {
hash = (37 * hash) + CLUSTER_LABELS_FIELD_NUMBER;
hash = (53 * hash) + internalGetClusterLabels().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.dataproc.v1.ClusterSelector parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dataproc.v1.ClusterSelector parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dataproc.v1.ClusterSelector parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dataproc.v1.ClusterSelector parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dataproc.v1.ClusterSelector parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dataproc.v1.ClusterSelector parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dataproc.v1.ClusterSelector parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dataproc.v1.ClusterSelector parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dataproc.v1.ClusterSelector parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.dataproc.v1.ClusterSelector parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dataproc.v1.ClusterSelector parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dataproc.v1.ClusterSelector parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.dataproc.v1.ClusterSelector prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* A selector that chooses target cluster for jobs based on metadata.
* </pre>
*
* Protobuf type {@code google.cloud.dataproc.v1.ClusterSelector}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.dataproc.v1.ClusterSelector)
com.google.cloud.dataproc.v1.ClusterSelectorOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dataproc.v1.WorkflowTemplatesProto
.internal_static_google_cloud_dataproc_v1_ClusterSelector_descriptor;
}
@SuppressWarnings({"rawtypes"})
protected com.google.protobuf.MapField internalGetMapField(int number) {
switch (number) {
case 2:
return internalGetClusterLabels();
default:
throw new RuntimeException("Invalid map field number: " + number);
}
}
@SuppressWarnings({"rawtypes"})
protected com.google.protobuf.MapField internalGetMutableMapField(int number) {
switch (number) {
case 2:
return internalGetMutableClusterLabels();
default:
throw new RuntimeException("Invalid map field number: " + number);
}
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dataproc.v1.WorkflowTemplatesProto
.internal_static_google_cloud_dataproc_v1_ClusterSelector_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dataproc.v1.ClusterSelector.class,
com.google.cloud.dataproc.v1.ClusterSelector.Builder.class);
}
// Construct using com.google.cloud.dataproc.v1.ClusterSelector.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {}
}
@java.lang.Override
public Builder clear() {
super.clear();
zone_ = "";
internalGetMutableClusterLabels().clear();
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.dataproc.v1.WorkflowTemplatesProto
.internal_static_google_cloud_dataproc_v1_ClusterSelector_descriptor;
}
@java.lang.Override
public com.google.cloud.dataproc.v1.ClusterSelector getDefaultInstanceForType() {
return com.google.cloud.dataproc.v1.ClusterSelector.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.dataproc.v1.ClusterSelector build() {
com.google.cloud.dataproc.v1.ClusterSelector result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.dataproc.v1.ClusterSelector buildPartial() {
com.google.cloud.dataproc.v1.ClusterSelector result =
new com.google.cloud.dataproc.v1.ClusterSelector(this);
int from_bitField0_ = bitField0_;
result.zone_ = zone_;
result.clusterLabels_ = internalGetClusterLabels();
result.clusterLabels_.makeImmutable();
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.dataproc.v1.ClusterSelector) {
return mergeFrom((com.google.cloud.dataproc.v1.ClusterSelector) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.dataproc.v1.ClusterSelector other) {
if (other == com.google.cloud.dataproc.v1.ClusterSelector.getDefaultInstance()) return this;
if (!other.getZone().isEmpty()) {
zone_ = other.zone_;
onChanged();
}
internalGetMutableClusterLabels().mergeFrom(other.internalGetClusterLabels());
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.cloud.dataproc.v1.ClusterSelector parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.cloud.dataproc.v1.ClusterSelector) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private java.lang.Object zone_ = "";
/**
*
*
* <pre>
* Optional. The zone where workflow process executes. This parameter does not
* affect the selection of the cluster.
* If unspecified, the zone of the first cluster matching the selector
* is used.
* </pre>
*
* <code>string zone = 1 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The zone.
*/
public java.lang.String getZone() {
java.lang.Object ref = zone_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
zone_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. The zone where workflow process executes. This parameter does not
* affect the selection of the cluster.
* If unspecified, the zone of the first cluster matching the selector
* is used.
* </pre>
*
* <code>string zone = 1 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for zone.
*/
public com.google.protobuf.ByteString getZoneBytes() {
java.lang.Object ref = zone_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
zone_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. The zone where workflow process executes. This parameter does not
* affect the selection of the cluster.
* If unspecified, the zone of the first cluster matching the selector
* is used.
* </pre>
*
* <code>string zone = 1 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The zone to set.
* @return This builder for chaining.
*/
public Builder setZone(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
zone_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The zone where workflow process executes. This parameter does not
* affect the selection of the cluster.
* If unspecified, the zone of the first cluster matching the selector
* is used.
* </pre>
*
* <code>string zone = 1 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearZone() {
zone_ = getDefaultInstance().getZone();
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The zone where workflow process executes. This parameter does not
* affect the selection of the cluster.
* If unspecified, the zone of the first cluster matching the selector
* is used.
* </pre>
*
* <code>string zone = 1 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for zone to set.
* @return This builder for chaining.
*/
public Builder setZoneBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
zone_ = value;
onChanged();
return this;
}
private com.google.protobuf.MapField<java.lang.String, java.lang.String> clusterLabels_;
private com.google.protobuf.MapField<java.lang.String, java.lang.String>
internalGetClusterLabels() {
if (clusterLabels_ == null) {
return com.google.protobuf.MapField.emptyMapField(
ClusterLabelsDefaultEntryHolder.defaultEntry);
}
return clusterLabels_;
}
private com.google.protobuf.MapField<java.lang.String, java.lang.String>
internalGetMutableClusterLabels() {
onChanged();
;
if (clusterLabels_ == null) {
clusterLabels_ =
com.google.protobuf.MapField.newMapField(ClusterLabelsDefaultEntryHolder.defaultEntry);
}
if (!clusterLabels_.isMutable()) {
clusterLabels_ = clusterLabels_.copy();
}
return clusterLabels_;
}
public int getClusterLabelsCount() {
return internalGetClusterLabels().getMap().size();
}
/**
*
*
* <pre>
* Required. The cluster labels. Cluster must have all labels
* to match.
* </pre>
*
* <code>map<string, string> cluster_labels = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public boolean containsClusterLabels(java.lang.String key) {
if (key == null) {
throw new java.lang.NullPointerException();
}
return internalGetClusterLabels().getMap().containsKey(key);
}
/** Use {@link #getClusterLabelsMap()} instead. */
@java.lang.Override
@java.lang.Deprecated
public java.util.Map<java.lang.String, java.lang.String> getClusterLabels() {
return getClusterLabelsMap();
}
/**
*
*
* <pre>
* Required. The cluster labels. Cluster must have all labels
* to match.
* </pre>
*
* <code>map<string, string> cluster_labels = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public java.util.Map<java.lang.String, java.lang.String> getClusterLabelsMap() {
return internalGetClusterLabels().getMap();
}
/**
*
*
* <pre>
* Required. The cluster labels. Cluster must have all labels
* to match.
* </pre>
*
* <code>map<string, string> cluster_labels = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public java.lang.String getClusterLabelsOrDefault(
java.lang.String key, java.lang.String defaultValue) {
if (key == null) {
throw new java.lang.NullPointerException();
}
java.util.Map<java.lang.String, java.lang.String> map = internalGetClusterLabels().getMap();
return map.containsKey(key) ? map.get(key) : defaultValue;
}
/**
*
*
* <pre>
* Required. The cluster labels. Cluster must have all labels
* to match.
* </pre>
*
* <code>map<string, string> cluster_labels = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public java.lang.String getClusterLabelsOrThrow(java.lang.String key) {
if (key == null) {
throw new java.lang.NullPointerException();
}
java.util.Map<java.lang.String, java.lang.String> map = internalGetClusterLabels().getMap();
if (!map.containsKey(key)) {
throw new java.lang.IllegalArgumentException();
}
return map.get(key);
}
public Builder clearClusterLabels() {
internalGetMutableClusterLabels().getMutableMap().clear();
return this;
}
/**
*
*
* <pre>
* Required. The cluster labels. Cluster must have all labels
* to match.
* </pre>
*
* <code>map<string, string> cluster_labels = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder removeClusterLabels(java.lang.String key) {
if (key == null) {
throw new java.lang.NullPointerException();
}
internalGetMutableClusterLabels().getMutableMap().remove(key);
return this;
}
/** Use alternate mutation accessors instead. */
@java.lang.Deprecated
public java.util.Map<java.lang.String, java.lang.String> getMutableClusterLabels() {
return internalGetMutableClusterLabels().getMutableMap();
}
/**
*
*
* <pre>
* Required. The cluster labels. Cluster must have all labels
* to match.
* </pre>
*
* <code>map<string, string> cluster_labels = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder putClusterLabels(java.lang.String key, java.lang.String value) {
if (key == null) {
throw new java.lang.NullPointerException();
}
if (value == null) {
throw new java.lang.NullPointerException();
}
internalGetMutableClusterLabels().getMutableMap().put(key, value);
return this;
}
/**
*
*
* <pre>
* Required. The cluster labels. Cluster must have all labels
* to match.
* </pre>
*
* <code>map<string, string> cluster_labels = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder putAllClusterLabels(java.util.Map<java.lang.String, java.lang.String> values) {
internalGetMutableClusterLabels().getMutableMap().putAll(values);
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.dataproc.v1.ClusterSelector)
}
// @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.ClusterSelector)
private static final com.google.cloud.dataproc.v1.ClusterSelector DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.dataproc.v1.ClusterSelector();
}
public static com.google.cloud.dataproc.v1.ClusterSelector getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ClusterSelector> PARSER =
new com.google.protobuf.AbstractParser<ClusterSelector>() {
@java.lang.Override
public ClusterSelector parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new ClusterSelector(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<ClusterSelector> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ClusterSelector> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.dataproc.v1.ClusterSelector getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
|
/* Copyright (c) 2013 OpenPlans. All rights reserved.
* This code is licensed under the BSD New License, available at the root
* application directory.
*/
package org.geogit.api.porcelain;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import org.geogit.api.AbstractGeoGitOp;
import org.geogit.api.FeatureInfo;
import org.geogit.api.NodeRef;
import org.geogit.api.Ref;
import org.geogit.api.RevFeature;
import org.geogit.api.RevFeatureType;
import org.geogit.api.plumbing.RevObjectParse;
import org.geogit.api.plumbing.diff.AttributeDiff;
import org.geogit.api.plumbing.diff.AttributeDiff.TYPE;
import org.geogit.api.plumbing.diff.FeatureDiff;
import org.geogit.api.plumbing.diff.FeatureTypeDiff;
import org.geogit.api.plumbing.diff.Patch;
import org.geogit.api.plumbing.diff.VerifyPatchOp;
import org.geogit.api.plumbing.diff.VerifyPatchResults;
import org.geogit.repository.DepthSearch;
import org.geogit.repository.WorkingTree;
import org.geogit.storage.StagingDatabase;
import org.geotools.feature.simple.SimpleFeatureBuilder;
import org.geotools.feature.simple.SimpleFeatureTypeBuilder;
import org.opengis.feature.simple.SimpleFeature;
import org.opengis.feature.simple.SimpleFeatureType;
import org.opengis.feature.type.AttributeDescriptor;
import org.opengis.feature.type.Name;
import org.opengis.feature.type.PropertyDescriptor;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.inject.Inject;
/**
* Applies a patch to the working tree. If partial application of the patch is allowed, it returns a
* patch with the elements that could not be applied (might be an empty patch), or null otherwise
*
* @see WorkingTree
* @see Patch
*/
public class ApplyPatchOp extends AbstractGeoGitOp<Patch> {
private Patch patch;
private WorkingTree workTree;
private boolean applyPartial;
private StagingDatabase indexDb;
private boolean reverse;
/**
* Constructs a new {@code ApplyPatchOp} with the given parameters.
*
* @param workTree the working tree to modify when applying the patch
*/
@Inject
public ApplyPatchOp(final WorkingTree workTree, StagingDatabase indexDb) {
this.workTree = workTree;
this.indexDb = indexDb;
}
/**
* Sets the patch to apply
*
* @param patch the patch to apply
* @return {@code this}
*/
public ApplyPatchOp setPatch(Patch patch) {
this.patch = patch;
return this;
}
/**
* Sets whether to apply the original patch or its reversed version
*
* @param reverse true if the patch should be applied in its reversed version
* @return {@code this}
*/
public ApplyPatchOp setReverse(boolean reverse) {
this.reverse = reverse;
return this;
}
/**
* Sets whether the patch can be applied partially or not
*
* @param applyPartial whether the patch can be applied partially or not
* @return {@code this}
*/
public ApplyPatchOp setApplyPartial(boolean applyPartial) {
this.applyPartial = applyPartial;
return this;
}
/**
* Sets whether to use the index instead of the working tree.
*
* TODO: This option is currently unused
*
* @param cached whether to use the index instead of the working tree.
* @return {@code this}
*/
public ApplyPatchOp setCached(boolean cached) {
// this.cached = cached;
return this;
}
/**
* Executes the apply command, applying the given patch If it cannot be applied and no partial
* application is allowed, a {@link CannotApplyPatchException} exception is thrown. Returns a
* patch with rejected entries, in case partial application is allowed
*
* @return the modified {@link WorkingTree working tree}.
*/
@Override
public Patch call() throws RuntimeException {
Preconditions.checkArgument(patch != null, "No patch file provided");
VerifyPatchResults verify = command(VerifyPatchOp.class).setPatch(patch)
.setReverse(reverse).call();
Patch toReject = verify.getToReject();
Patch toApply = verify.getToApply();
if (!applyPartial) {
if (!toReject.isEmpty()) {
throw new CannotApplyPatchException(toReject);
}
applyPatch(toApply);
return null;
} else {
applyPatch(toApply);
return toReject;
}
}
private void applyPatch(Patch patch) {
if (reverse) {
patch = patch.reversed();
}
List<FeatureInfo> removed = patch.getRemovedFeatures();
for (FeatureInfo feature : removed) {
workTree.delete(NodeRef.parentPath(feature.getPath()),
NodeRef.nodeFromPath(feature.getPath()));
}
List<FeatureInfo> added = patch.getAddedFeatures();
for (FeatureInfo feature : added) {
workTree.insert(NodeRef.parentPath(feature.getPath()), feature.getFeature());
}
List<FeatureDiff> diffs = patch.getModifiedFeatures();
for (FeatureDiff diff : diffs) {
String path = diff.getPath();
DepthSearch depthSearch = new DepthSearch(indexDb);
Optional<NodeRef> noderef = depthSearch.find(workTree.getTree(), path);
RevFeatureType oldRevFeatureType = command(RevObjectParse.class)
.setObjectId(noderef.get().getMetadataId()).call(RevFeatureType.class).get();
String refSpec = Ref.WORK_HEAD + ":" + path;
RevFeature feature = command(RevObjectParse.class).setRefSpec(refSpec)
.call(RevFeature.class).get();
RevFeatureType newRevFeatureType = getFeatureType(diff, feature, oldRevFeatureType);
ImmutableList<Optional<Object>> values = feature.getValues();
ImmutableList<PropertyDescriptor> oldDescriptors = oldRevFeatureType
.sortedDescriptors();
ImmutableList<PropertyDescriptor> newDescriptors = newRevFeatureType
.sortedDescriptors();
SimpleFeatureBuilder featureBuilder = new SimpleFeatureBuilder(
(SimpleFeatureType) newRevFeatureType.type());
Map<Name, Optional<?>> attrs = Maps.newHashMap();
for (int i = 0; i < oldDescriptors.size(); i++) {
PropertyDescriptor descriptor = oldDescriptors.get(i);
if (newDescriptors.contains(descriptor)) {
Optional<Object> value = values.get(i);
attrs.put(descriptor.getName(), value);
}
}
Set<Entry<PropertyDescriptor, AttributeDiff>> featureDiffs = diff.getDiffs().entrySet();
for (Iterator<Entry<PropertyDescriptor, AttributeDiff>> iterator = featureDiffs
.iterator(); iterator.hasNext();) {
Entry<PropertyDescriptor, AttributeDiff> entry = iterator.next();
if (!entry.getValue().getType().equals(TYPE.REMOVED)) {
Optional<?> oldValue = attrs.get(entry.getKey().getName());
attrs.put(entry.getKey().getName(), entry.getValue().applyOn(oldValue));
}
}
Set<Entry<Name, Optional<?>>> entries = attrs.entrySet();
for (Iterator<Entry<Name, Optional<?>>> iterator = entries.iterator(); iterator
.hasNext();) {
Entry<Name, Optional<?>> entry = iterator.next();
featureBuilder.set(entry.getKey(), entry.getValue().orNull());
}
SimpleFeature featureToInsert = featureBuilder.buildFeature(NodeRef.nodeFromPath(path));
workTree.insert(NodeRef.parentPath(path), featureToInsert);
}
ImmutableList<FeatureTypeDiff> alteredTrees = patch.getAlteredTrees();
for (FeatureTypeDiff diff : alteredTrees) {
Optional<RevFeatureType> featureType;
if (diff.getOldFeatureType().isNull()) {
featureType = patch.getFeatureTypeFromId(diff.getNewFeatureType());
workTree.createTypeTree(diff.getPath(), featureType.get().type());
} else if (diff.getNewFeatureType().isNull()) {
workTree.delete(diff.getPath());
} else {
featureType = patch.getFeatureTypeFromId(diff.getNewFeatureType());
workTree.updateTypeTree(diff.getPath(), featureType.get().type());
}
}
}
private RevFeatureType getFeatureType(FeatureDiff diff, RevFeature oldFeature,
RevFeatureType oldRevFeatureType) {
List<String> removed = Lists.newArrayList();
List<AttributeDescriptor> added = Lists.newArrayList();
Set<Entry<PropertyDescriptor, AttributeDiff>> featureDiffs = diff.getDiffs().entrySet();
for (Iterator<Entry<PropertyDescriptor, AttributeDiff>> iterator = featureDiffs.iterator(); iterator
.hasNext();) {
Entry<PropertyDescriptor, AttributeDiff> entry = iterator.next();
if (entry.getValue().getType() == TYPE.REMOVED) {
removed.add(entry.getKey().getName().getLocalPart());
} else if (entry.getValue().getType() == TYPE.ADDED) {
PropertyDescriptor pd = entry.getKey();
added.add((AttributeDescriptor) pd);
}
}
SimpleFeatureType sft = (SimpleFeatureType) oldRevFeatureType.type();
List<AttributeDescriptor> descriptors = (sft).getAttributeDescriptors();
SimpleFeatureTypeBuilder featureTypeBuilder = new SimpleFeatureTypeBuilder();
featureTypeBuilder.setCRS(sft.getCoordinateReferenceSystem());
featureTypeBuilder.setDefaultGeometry(sft.getGeometryDescriptor().getLocalName());
featureTypeBuilder.setName(sft.getName());
for (int i = 0; i < descriptors.size(); i++) {
AttributeDescriptor descriptor = descriptors.get(i);
if (!removed.contains(descriptor.getName().getLocalPart())) {
featureTypeBuilder.add(descriptor);
}
}
for (AttributeDescriptor descriptor : added) {
featureTypeBuilder.add(descriptor);
}
SimpleFeatureType featureType = featureTypeBuilder.buildFeatureType();
return RevFeatureType.build(featureType);
}
}
|
|
// Copyright Yahoo. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
package com.yahoo.vespa.hosted.controller;
import com.google.common.hash.HashCode;
import com.google.common.hash.Hashing;
import com.google.common.io.BaseEncoding;
import com.yahoo.config.application.api.DeploymentInstanceSpec;
import com.yahoo.config.application.api.DeploymentSpec;
import com.yahoo.config.provision.ApplicationId;
import com.yahoo.config.provision.ClusterSpec;
import com.yahoo.config.provision.Environment;
import com.yahoo.config.provision.InstanceName;
import com.yahoo.config.provision.SystemName;
import com.yahoo.config.provision.zone.RoutingMethod;
import com.yahoo.config.provision.zone.ZoneId;
import com.yahoo.vespa.hosted.controller.api.identifiers.DeploymentId;
import com.yahoo.vespa.hosted.controller.api.integration.configserver.ContainerEndpoint;
import com.yahoo.vespa.hosted.controller.api.integration.dns.Record;
import com.yahoo.vespa.hosted.controller.api.integration.dns.RecordData;
import com.yahoo.vespa.hosted.controller.api.integration.dns.RecordName;
import com.yahoo.vespa.hosted.controller.application.Endpoint;
import com.yahoo.vespa.hosted.controller.application.Endpoint.Port;
import com.yahoo.vespa.hosted.controller.application.EndpointId;
import com.yahoo.vespa.hosted.controller.application.EndpointList;
import com.yahoo.vespa.hosted.controller.application.SystemApplication;
import com.yahoo.vespa.hosted.controller.application.TenantAndApplicationId;
import com.yahoo.vespa.hosted.controller.dns.NameServiceQueue.Priority;
import com.yahoo.vespa.hosted.controller.routing.RoutingId;
import com.yahoo.vespa.hosted.controller.routing.RoutingPolicies;
import com.yahoo.vespa.hosted.controller.routing.context.DeploymentRoutingContext;
import com.yahoo.vespa.hosted.controller.routing.context.DeploymentRoutingContext.ExclusiveDeploymentRoutingContext;
import com.yahoo.vespa.hosted.controller.routing.context.DeploymentRoutingContext.SharedDeploymentRoutingContext;
import com.yahoo.vespa.hosted.controller.routing.context.ExclusiveZoneRoutingContext;
import com.yahoo.vespa.hosted.controller.routing.context.RoutingContext;
import com.yahoo.vespa.hosted.controller.routing.context.SharedZoneRoutingContext;
import com.yahoo.vespa.hosted.controller.routing.rotation.Rotation;
import com.yahoo.vespa.hosted.controller.routing.rotation.RotationLock;
import com.yahoo.vespa.hosted.controller.routing.rotation.RotationRepository;
import com.yahoo.vespa.hosted.rotation.config.RotationsConfig;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.OptionalInt;
import java.util.Set;
import java.util.TreeMap;
import java.util.stream.Collectors;
/**
* The routing controller encapsulates state and methods for inspecting and manipulating deployment endpoints in a
* hosted Vespa system.
*
* The one-stop shop for all your routing needs!
*
* @author mpolden
*/
public class RoutingController {
private final Controller controller;
private final RoutingPolicies routingPolicies;
private final RotationRepository rotationRepository;
public RoutingController(Controller controller, RotationsConfig rotationsConfig) {
this.controller = Objects.requireNonNull(controller, "controller must be non-null");
this.routingPolicies = new RoutingPolicies(controller);
this.rotationRepository = new RotationRepository(Objects.requireNonNull(rotationsConfig, "rotationsConfig must be non-null"),
controller.applications(),
controller.curator());
}
/** Create a routing context for given deployment */
public DeploymentRoutingContext of(DeploymentId deployment) {
if (usesSharedRouting(deployment.zoneId())) {
return new SharedDeploymentRoutingContext(deployment,
this,
controller.serviceRegistry().configServer(),
controller.clock());
}
return new ExclusiveDeploymentRoutingContext(deployment, this);
}
/** Create a routing context for given zone */
public RoutingContext of(ZoneId zone) {
if (usesSharedRouting(zone)) {
return new SharedZoneRoutingContext(zone, controller.serviceRegistry().configServer());
}
return new ExclusiveZoneRoutingContext(zone, routingPolicies);
}
public RoutingPolicies policies() {
return routingPolicies;
}
public RotationRepository rotations() {
return rotationRepository;
}
/** Read and return zone-scoped endpoints for given deployment */
public EndpointList readEndpointsOf(DeploymentId deployment) {
Set<Endpoint> endpoints = new LinkedHashSet<>();
boolean isSystemApplication = SystemApplication.matching(deployment.applicationId()).isPresent();
// To discover the cluster name for a zone-scoped endpoint, we need to read routing policies
for (var policy : routingPolicies.read(deployment)) {
if (!policy.status().isActive()) continue;
for (var routingMethod : controller.zoneRegistry().routingMethods(policy.id().zone())) {
endpoints.addAll(policy.zoneEndpointsIn(controller.system(), routingMethod, controller.zoneRegistry()));
endpoints.add(policy.regionEndpointIn(controller.system(), routingMethod));
}
}
return EndpointList.copyOf(endpoints);
}
/** Read application and return declared endpoints for given instance */
public EndpointList readDeclaredEndpointsOf(ApplicationId instance) {
if (SystemApplication.matching(instance).isPresent()) return EndpointList.EMPTY;
return readDeclaredEndpointsOf(TenantAndApplicationId.from(instance)).instance(instance.instance());
}
/** Read application and return declared endpoints for given application */
public EndpointList readDeclaredEndpointsOf(TenantAndApplicationId application) {
return declaredEndpointsOf(controller.applications().requireApplication(application));
}
/** Returns endpoints declared in {@link DeploymentSpec} for given application */
public EndpointList declaredEndpointsOf(Application application) {
Set<Endpoint> endpoints = new LinkedHashSet<>();
DeploymentSpec deploymentSpec = application.deploymentSpec();
for (var spec : deploymentSpec.instances()) {
ApplicationId instance = application.id().instance(spec.name());
// Add endpoint declared with legacy syntax
spec.globalServiceId().ifPresent(clusterId -> {
List<DeploymentId> deployments = spec.zones().stream()
.filter(zone -> zone.concerns(Environment.prod))
.map(zone -> new DeploymentId(instance, ZoneId.from(Environment.prod, zone.region().get())))
.collect(Collectors.toList());
RoutingId routingId = RoutingId.of(instance, EndpointId.defaultId());
endpoints.addAll(computeGlobalEndpoints(routingId, ClusterSpec.Id.from(clusterId), deployments, deploymentSpec));
});
// Add endpoints declared with current syntax
spec.endpoints().forEach(declaredEndpoint -> {
RoutingId routingId = RoutingId.of(instance, EndpointId.of(declaredEndpoint.endpointId()));
List<DeploymentId> deployments = declaredEndpoint.regions().stream()
.map(region -> new DeploymentId(instance,
ZoneId.from(Environment.prod, region)))
.collect(Collectors.toList());
endpoints.addAll(computeGlobalEndpoints(routingId, ClusterSpec.Id.from(declaredEndpoint.containerId()), deployments, deploymentSpec));
});
}
// Add application endpoints
for (var declaredEndpoint : deploymentSpec.endpoints()) {
Map<DeploymentId, Integer> deployments = declaredEndpoint.targets().stream()
.collect(Collectors.toMap(t -> new DeploymentId(application.id().instance(t.instance()),
ZoneId.from(Environment.prod, t.region())),
t -> t.weight()));
// An application endpoint can only target a single zone, so we just pick the zone of any deployment target
ZoneId zone = deployments.keySet().iterator().next().zoneId();
// Application endpoints are only supported when using direct routing methods
RoutingMethod routingMethod = usesSharedRouting(zone) ? RoutingMethod.sharedLayer4 : RoutingMethod.exclusive;
endpoints.add(Endpoint.of(application.id())
.targetApplication(EndpointId.of(declaredEndpoint.endpointId()),
ClusterSpec.Id.from(declaredEndpoint.containerId()),
deployments)
.routingMethod(routingMethod)
.on(Port.fromRoutingMethod(routingMethod))
.in(controller.system()));
}
return EndpointList.copyOf(endpoints);
}
/** Read test runner endpoints for given deployments, grouped by their zone */
public Map<ZoneId, List<Endpoint>> readTestRunnerEndpointsOf(Collection<DeploymentId> deployments) {
TreeMap<ZoneId, List<Endpoint>> endpoints = new TreeMap<>(Comparator.comparing(ZoneId::value));
for (var deployment : deployments) {
EndpointList zoneEndpoints = readEndpointsOf(deployment).scope(Endpoint.Scope.zone)
.not().legacy();
EndpointList directEndpoints = zoneEndpoints.direct();
if (!directEndpoints.isEmpty()) {
zoneEndpoints = directEndpoints; // Use only direct endpoints if we have any
}
if ( ! zoneEndpoints.isEmpty()) {
endpoints.put(deployment.zoneId(), zoneEndpoints.asList());
}
}
return Collections.unmodifiableSortedMap(endpoints);
}
/** Returns certificate DNS names (CN and SAN values) for given deployment */
public List<String> certificateDnsNames(DeploymentId deployment, DeploymentSpec deploymentSpec) {
List<String> endpointDnsNames = new ArrayList<>();
// We add first an endpoint name based on a hash of the application ID,
// as the certificate provider requires the first CN to be < 64 characters long.
endpointDnsNames.add(commonNameHashOf(deployment.applicationId(), controller.system()));
List<Endpoint.EndpointBuilder> builders = new ArrayList<>();
if (deployment.zoneId().environment().isProduction()) {
// Add default and wildcard names for global endpoints
builders.add(Endpoint.of(deployment.applicationId()).target(EndpointId.defaultId()));
builders.add(Endpoint.of(deployment.applicationId()).wildcard());
// Add default and wildcard names for each region targeted by application endpoints
List<DeploymentId> deploymentTargets = deploymentSpec.endpoints().stream()
.map(com.yahoo.config.application.api.Endpoint::targets)
.flatMap(Collection::stream)
.map(com.yahoo.config.application.api.Endpoint.Target::region)
.distinct()
.map(region -> new DeploymentId(deployment.applicationId(), ZoneId.from(Environment.prod, region)))
.collect(Collectors.toUnmodifiableList());
TenantAndApplicationId application = TenantAndApplicationId.from(deployment.applicationId());
for (var targetDeployment : deploymentTargets) {
builders.add(Endpoint.of(application).targetApplication(EndpointId.defaultId(), targetDeployment));
builders.add(Endpoint.of(application).wildcardApplication(targetDeployment));
}
}
// Add default and wildcard names for zone endpoints
builders.add(Endpoint.of(deployment.applicationId()).target(ClusterSpec.Id.from("default"), deployment));
builders.add(Endpoint.of(deployment.applicationId()).wildcard(deployment));
// Build all certificate names
for (var builder : builders) {
Endpoint endpoint = builder.certificateName()
.routingMethod(RoutingMethod.exclusive)
.on(Port.tls())
.in(controller.system());
endpointDnsNames.add(endpoint.dnsName());
}
return Collections.unmodifiableList(endpointDnsNames);
}
/** Returns the global and application-level endpoints for given deployment, as container endpoints */
public Set<ContainerEndpoint> containerEndpointsOf(LockedApplication application, InstanceName instanceName, ZoneId zone) {
// Assign rotations to application
for (var deploymentInstanceSpec : application.get().deploymentSpec().instances()) {
if (deploymentInstanceSpec.concerns(Environment.prod)) {
application = controller.routing().assignRotations(application, deploymentInstanceSpec.name());
}
}
// Add endpoints backed by a rotation, and register them in DNS if necessary
boolean registerLegacyNames = requiresLegacyNames(application.get().deploymentSpec(), instanceName);
Instance instance = application.get().require(instanceName);
Set<ContainerEndpoint> containerEndpoints = new HashSet<>();
DeploymentId deployment = new DeploymentId(instance.id(), zone);
EndpointList endpoints = declaredEndpointsOf(application.get()).targets(deployment);
EndpointList globalEndpoints = endpoints.scope(Endpoint.Scope.global);
for (var assignedRotation : instance.rotations()) {
EndpointList rotationEndpoints = globalEndpoints.named(assignedRotation.endpointId())
.requiresRotation();
// Skip rotations which do not apply to this zone. Legacy names always point to all zones
if (!registerLegacyNames && !assignedRotation.regions().contains(zone.region())) {
continue;
}
// Omit legacy DNS names when assigning rotations using <endpoints/> syntax
if (!registerLegacyNames) {
rotationEndpoints = rotationEndpoints.not().legacy();
}
// Register names in DNS
Rotation rotation = rotationRepository.requireRotation(assignedRotation.rotationId());
for (var endpoint : rotationEndpoints) {
controller.nameServiceForwarder().createCname(RecordName.from(endpoint.dnsName()),
RecordData.fqdn(rotation.name()),
Priority.normal);
List<String> names = List.of(endpoint.dnsName(),
// Include rotation ID as a valid name of this container endpoint
// (required by global routing health checks)
assignedRotation.rotationId().asString());
containerEndpoints.add(new ContainerEndpoint(assignedRotation.clusterId().value(),
asString(Endpoint.Scope.global),
names,
OptionalInt.empty(),
endpoint.routingMethod()));
}
}
// Add endpoints not backed by a rotation (i.e. other routing methods so that the config server always knows
// about global names, even when not using rotations)
globalEndpoints.not().requiresRotation()
.groupingBy(Endpoint::cluster)
.forEach((clusterId, clusterEndpoints) -> {
containerEndpoints.add(new ContainerEndpoint(clusterId.value(),
asString(Endpoint.Scope.global),
clusterEndpoints.mapToList(Endpoint::dnsName),
OptionalInt.empty(),
RoutingMethod.exclusive));
});
// Add application endpoints
EndpointList applicationEndpoints = endpoints.scope(Endpoint.Scope.application);
for (var endpoint : applicationEndpoints.shared()) { // DNS for non-shared endpoints is handled by RoutingPolicies
Set<ZoneId> targetZones = endpoint.targets().stream()
.map(t -> t.deployment().zoneId())
.collect(Collectors.toUnmodifiableSet());
if (targetZones.size() != 1) throw new IllegalArgumentException("Endpoint '" + endpoint.name() +
"' must target a single zone, got " +
targetZones);
ZoneId targetZone = targetZones.iterator().next();
String vipHostname = controller.zoneRegistry().getVipHostname(targetZone)
.orElseThrow(() -> new IllegalArgumentException("No VIP configured for zone " + targetZone));
controller.nameServiceForwarder().createCname(RecordName.from(endpoint.dnsName()),
RecordData.fqdn(vipHostname),
Priority.normal);
}
Map<ClusterSpec.Id, EndpointList> applicationEndpointsByCluster = applicationEndpoints.groupingBy(Endpoint::cluster);
for (var kv : applicationEndpointsByCluster.entrySet()) {
ClusterSpec.Id clusterId = kv.getKey();
EndpointList clusterEndpoints = kv.getValue();
for (var endpoint : clusterEndpoints) {
Optional<Endpoint.Target> matchingTarget = endpoint.targets().stream()
.filter(t -> t.routesTo(deployment))
.findFirst();
if (matchingTarget.isEmpty()) throw new IllegalStateException("No target found routing to " + deployment + " in " + endpoint);
containerEndpoints.add(new ContainerEndpoint(clusterId.value(),
asString(Endpoint.Scope.application),
List.of(endpoint.dnsName()),
OptionalInt.of(matchingTarget.get().weight()),
endpoint.routingMethod()));
}
}
return Collections.unmodifiableSet(containerEndpoints);
}
/** Remove endpoints in DNS for all rotations assigned to given instance */
public void removeEndpointsInDns(Application application, InstanceName instanceName) {
Set<Endpoint> endpointsToRemove = new LinkedHashSet<>();
Instance instance = application.require(instanceName);
// Compute endpoints from rotations. When removing DNS records for rotation-based endpoints we cannot use the
// deployment spec, because submitting an empty deployment spec is the first step of removing an application
for (var rotation : instance.rotations()) {
var deployments = rotation.regions().stream()
.map(region -> new DeploymentId(instance.id(), ZoneId.from(Environment.prod, region)))
.collect(Collectors.toList());
endpointsToRemove.addAll(computeGlobalEndpoints(RoutingId.of(instance.id(), rotation.endpointId()),
rotation.clusterId(), deployments, application.deploymentSpec()));
}
endpointsToRemove.forEach(endpoint -> controller.nameServiceForwarder()
.removeRecords(Record.Type.CNAME,
RecordName.from(endpoint.dnsName()),
Priority.normal));
}
/**
* Assigns one or more global rotations to given application, if eligible. The given application is implicitly
* stored, ensuring that the assigned rotation(s) are persisted when this returns.
*/
private LockedApplication assignRotations(LockedApplication application, InstanceName instanceName) {
try (RotationLock rotationLock = rotationRepository.lock()) {
var rotations = rotationRepository.getOrAssignRotations(application.get().deploymentSpec(),
application.get().require(instanceName),
rotationLock);
application = application.with(instanceName, instance -> instance.with(rotations));
controller.applications().store(application); // store assigned rotation even if deployment fails
}
return application;
}
private boolean usesSharedRouting(ZoneId zone) {
return controller.zoneRegistry().routingMethods(zone).stream().anyMatch(RoutingMethod::isShared);
}
/** Returns the routing methods that are available across all given deployments */
private List<RoutingMethod> routingMethodsOfAll(Collection<DeploymentId> deployments) {
var deploymentsByMethod = new HashMap<RoutingMethod, Set<DeploymentId>>();
for (var deployment : deployments) {
for (var method : controller.zoneRegistry().routingMethods(deployment.zoneId())) {
deploymentsByMethod.computeIfAbsent(method, k -> new LinkedHashSet<>())
.add(deployment);
}
}
var routingMethods = new ArrayList<RoutingMethod>();
deploymentsByMethod.forEach((method, supportedDeployments) -> {
if (supportedDeployments.containsAll(deployments)) {
routingMethods.add(method);
}
});
return Collections.unmodifiableList(routingMethods);
}
/** Compute global endpoints for given routing ID, application and deployments */
private List<Endpoint> computeGlobalEndpoints(RoutingId routingId, ClusterSpec.Id cluster, List<DeploymentId> deployments, DeploymentSpec deploymentSpec) {
var endpoints = new ArrayList<Endpoint>();
var directMethods = 0;
var availableRoutingMethods = routingMethodsOfAll(deployments);
for (var method : availableRoutingMethods) {
if (method.isDirect() && ++directMethods > 1) {
throw new IllegalArgumentException("Invalid routing methods for " + routingId + ": Exceeded maximum " +
"direct methods");
}
endpoints.add(Endpoint.of(routingId.instance())
.target(routingId.endpointId(), cluster, deployments)
.on(Port.fromRoutingMethod(method))
.routingMethod(method)
.in(controller.system()));
}
return endpoints;
}
/** Whether legacy global DNS names should be available for given application */
private static boolean requiresLegacyNames(DeploymentSpec deploymentSpec, InstanceName instanceName) {
return deploymentSpec.instance(instanceName)
.flatMap(DeploymentInstanceSpec::globalServiceId)
.isPresent();
}
/** Create a common name based on a hash of given application. This must be less than 64 characters long. */
private static String commonNameHashOf(ApplicationId application, SystemName system) {
HashCode sha1 = Hashing.sha1().hashString(application.serializedForm(), StandardCharsets.UTF_8);
String base32 = BaseEncoding.base32().omitPadding().lowerCase().encode(sha1.asBytes());
return 'v' + base32 + Endpoint.internalDnsSuffix(system);
}
private static String asString(Endpoint.Scope scope) {
switch (scope) {
case application: return "application";
case global: return "global";
case weighted: return "weighted";
case zone: return "zone";
}
throw new IllegalArgumentException("Unknown scope " + scope);
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.igfs;
import java.io.Externalizable;
import java.io.IOException;
import java.io.ObjectInput;
import java.io.ObjectOutput;
import java.nio.ByteBuffer;
import org.apache.ignite.internal.processors.task.GridInternal;
import org.apache.ignite.internal.util.typedef.F;
import org.apache.ignite.internal.util.typedef.internal.S;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.lang.IgniteUuid;
import org.apache.ignite.plugin.extensions.communication.Message;
import org.apache.ignite.plugin.extensions.communication.MessageReader;
import org.apache.ignite.plugin.extensions.communication.MessageWriter;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
/**
* File's binary data block key.
*/
@GridInternal
public final class IgfsBlockKey implements Message, Externalizable, Comparable<IgfsBlockKey> {
/** */
private static final long serialVersionUID = 0L;
/** File system file ID. */
private IgniteUuid fileId;
/** Block ID. */
private long blockId;
/** Block affinity key. */
private IgniteUuid affKey;
/** Eviction exclude flag. */
private boolean evictExclude;
/**
* Constructs file's binary data block key.
*
* @param fileId File ID.
* @param affKey Affinity key.
* @param evictExclude Evict exclude flag.
* @param blockId Block ID.
*/
public IgfsBlockKey(IgniteUuid fileId, @Nullable IgniteUuid affKey, boolean evictExclude, long blockId) {
assert fileId != null;
assert blockId >= 0;
this.fileId = fileId;
this.affKey = affKey;
this.evictExclude = evictExclude;
this.blockId = blockId;
}
/**
* Empty constructor required for {@link Externalizable}.
*/
public IgfsBlockKey() {
// No-op.
}
/**
* @return File ID.
*/
public IgniteUuid getFileId() {
return fileId;
}
/**
* @return Block affinity key.
*/
public IgniteUuid affinityKey() {
return affKey;
}
/**
* @return Evict exclude flag.
*/
public boolean evictExclude() {
return evictExclude;
}
/**
* @return Block ID.
*/
public long getBlockId() {
return blockId;
}
/** {@inheritDoc} */
@Override public int compareTo(@NotNull IgfsBlockKey o) {
int res = fileId.compareTo(o.fileId);
if (res != 0)
return res;
long v1 = blockId;
long v2 = o.blockId;
if (v1 != v2)
return v1 > v2 ? 1 : -1;
if (affKey == null && o.affKey == null)
return 0;
if (affKey != null && o.affKey != null)
return affKey.compareTo(o.affKey);
return affKey != null ? -1 : 1;
}
/** {@inheritDoc} */
@Override public void writeExternal(ObjectOutput out) throws IOException {
U.writeGridUuid(out, fileId);
U.writeGridUuid(out, affKey);
out.writeBoolean(evictExclude);
out.writeLong(blockId);
}
/** {@inheritDoc} */
@Override public void readExternal(ObjectInput in) throws IOException {
fileId = U.readGridUuid(in);
affKey = U.readGridUuid(in);
evictExclude = in.readBoolean();
blockId = in.readLong();
}
/** {@inheritDoc} */
@Override public int hashCode() {
return fileId.hashCode() + (int)(blockId ^ (blockId >>> 32));
}
/** {@inheritDoc} */
@Override public boolean equals(Object o) {
if (o == this)
return true;
if (o == null || o.getClass() != getClass())
return false;
IgfsBlockKey that = (IgfsBlockKey)o;
return blockId == that.blockId && fileId.equals(that.fileId) && F.eq(affKey, that.affKey) &&
evictExclude == that.evictExclude;
}
/** {@inheritDoc} */
@Override public boolean writeTo(ByteBuffer buf, MessageWriter writer) {
writer.setBuffer(buf);
if (!writer.isHeaderWritten()) {
if (!writer.writeHeader(directType(), fieldsCount()))
return false;
writer.onHeaderWritten();
}
switch (writer.state()) {
case 0:
if (!writer.writeIgniteUuid("affKey", affKey))
return false;
writer.incrementState();
case 1:
if (!writer.writeLong("blockId", blockId))
return false;
writer.incrementState();
case 2:
if (!writer.writeBoolean("evictExclude", evictExclude))
return false;
writer.incrementState();
case 3:
if (!writer.writeIgniteUuid("fileId", fileId))
return false;
writer.incrementState();
}
return true;
}
/** {@inheritDoc} */
@Override public boolean readFrom(ByteBuffer buf, MessageReader reader) {
reader.setBuffer(buf);
if (!reader.beforeMessageRead())
return false;
switch (reader.state()) {
case 0:
affKey = reader.readIgniteUuid("affKey");
if (!reader.isLastRead())
return false;
reader.incrementState();
case 1:
blockId = reader.readLong("blockId");
if (!reader.isLastRead())
return false;
reader.incrementState();
case 2:
evictExclude = reader.readBoolean("evictExclude");
if (!reader.isLastRead())
return false;
reader.incrementState();
case 3:
fileId = reader.readIgniteUuid("fileId");
if (!reader.isLastRead())
return false;
reader.incrementState();
}
return reader.afterMessageRead(IgfsBlockKey.class);
}
/** {@inheritDoc} */
@Override public byte directType() {
return 65;
}
/** {@inheritDoc} */
@Override public byte fieldsCount() {
return 4;
}
/** {@inheritDoc} */
@Override public String toString() {
return S.toString(IgfsBlockKey.class, this);
}
}
|
|
/*
* Copyright 2015 Adaptris Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.adaptris.core.ftp;
import static com.adaptris.core.ftp.EmbeddedFtpServer.DEFAULT_FILENAME;
import static com.adaptris.core.ftp.EmbeddedFtpServer.DEFAULT_PASSWORD;
import static com.adaptris.core.ftp.EmbeddedFtpServer.DEFAULT_USERNAME;
import static com.adaptris.core.ftp.EmbeddedFtpServer.DEFAULT_WORK_DIR_CANONICAL;
import static com.adaptris.core.ftp.EmbeddedFtpServer.PAYLOAD;
import static com.adaptris.core.ftp.EmbeddedFtpServer.SLASH;
import org.mockftpserver.fake.FakeFtpServer;
import org.mockftpserver.fake.filesystem.FileSystem;
import com.adaptris.core.AdaptrisMessage;
import com.adaptris.core.AdaptrisMessageFactory;
import com.adaptris.core.CoreException;
import com.adaptris.core.ServiceException;
import com.adaptris.core.services.aggregator.AggregatingServiceExample;
import com.adaptris.core.services.aggregator.ConsumeDestinationFromMetadata;
import com.adaptris.core.services.aggregator.ConsumeDestinationGenerator;
import com.adaptris.core.services.aggregator.IgnoreOriginalMimeAggregator;
import com.adaptris.core.services.aggregator.MessageAggregator;
import com.adaptris.core.services.aggregator.ReplaceWithFirstMessage;
import com.adaptris.core.stubs.DefectiveMessageFactory;
import com.adaptris.core.stubs.MockMessageListener;
import com.adaptris.core.util.LifecycleHelper;
import com.adaptris.core.util.MimeHelper;
import com.adaptris.util.text.mime.MultiPartInput;
public class AggregatingFtpConsumeServiceTest extends AggregatingServiceExample {
public AggregatingFtpConsumeServiceTest(String name) {
super(name);
}
public void setUp() throws Exception {
super.setUp();
}
public void tearDown() throws Exception {
super.tearDown();
}
public void testInit() throws Exception {
AggregatingFtpConsumeService service = new AggregatingFtpConsumeService();
try {
LifecycleHelper.prepare(service);
LifecycleHelper.init(service);
fail();
}
catch (CoreException expected) {
}
service = new AggregatingFtpConsumeService();
service.setConnection(new FtpConnection());
try {
LifecycleHelper.prepare(service);
LifecycleHelper.init(service);
fail();
}
catch (CoreException expected) {
}
service = new AggregatingFtpConsumeService();
service.setConsumer(new AggregatingFtpConsumer());
try {
LifecycleHelper.prepare(service);
LifecycleHelper.init(service);
fail();
}
catch (CoreException expected) {
}
service = new AggregatingFtpConsumeService(new FtpConnection(),
createConsumer(createConsumeDestination("ftp://localhost/work", null), new ReplaceWithFirstMessage()));
LifecycleHelper.prepare(service);
LifecycleHelper.init(service);
LifecycleHelper.close(service);
}
public void testService_SingleFile() throws Exception {
int count = 1;
EmbeddedFtpServer helper = new EmbeddedFtpServer();
MockMessageListener listener = new MockMessageListener();
FakeFtpServer server = helper.createAndStart(helper.createFilesystem(count));
try {
// should be ftp://localhost/home/user/work/file0 which is created when you
// create the filesystem.
String ftpConsumeUrl = "ftp://localhost" + DEFAULT_WORK_DIR_CANONICAL + SLASH + DEFAULT_FILENAME + 0;
ConsumeDestinationFromMetadata dest = createConsumeDestination(ftpConsumeUrl, null);
FtpConnection conn = createConnection(server);
AggregatingFtpConsumer consumer = createConsumer(dest, new ReplaceWithFirstMessage());
AggregatingFtpConsumeService service = new AggregatingFtpConsumeService(conn, consumer);
AdaptrisMessage msg = AdaptrisMessageFactory.getDefaultInstance().newMessage();
execute(service, msg);
assertEquals(PAYLOAD, msg.getContent());
}
finally {
server.stop();
}
}
public void testService_SingleFile_Failure() throws Exception {
int count = 1;
EmbeddedFtpServer helper = new EmbeddedFtpServer();
MockMessageListener listener = new MockMessageListener();
FakeFtpServer server = helper.createAndStart(helper.createFilesystem(count));
try {
// should be ftp://localhost/home/user/work/file0 which is created when you
// create the filesystem.
String ftpConsumeUrl = "ftp://localhost" + DEFAULT_WORK_DIR_CANONICAL + SLASH + DEFAULT_FILENAME + 0;
ConsumeDestinationFromMetadata dest = createConsumeDestination(ftpConsumeUrl, null);
FtpConnection conn = createConnection(server);
AggregatingFtpConsumer consumer = createConsumer(dest, new ReplaceWithFirstMessage());
AggregatingFtpConsumeService service = new AggregatingFtpConsumeService(conn, consumer);
AdaptrisMessage msg = new DefectiveMessageFactory().newMessage();
try {
execute(service, msg);
fail();
}
catch (ServiceException expected) {
}
}
finally {
server.stop();
}
}
public void testService_Single_NoDelete() throws Exception {
int count = 1;
EmbeddedFtpServer helper = new EmbeddedFtpServer();
MockMessageListener listener = new MockMessageListener();
FileSystem filesystem = helper.createFilesystem(count);
FakeFtpServer server = helper.createAndStart(filesystem);
try {
// should be ftp://localhost/home/user/work/file0 which is created when you
// create the filesystem.
String ftpConsumeUrl = "ftp://localhost" + DEFAULT_WORK_DIR_CANONICAL + SLASH + DEFAULT_FILENAME + 0;
ConsumeDestinationFromMetadata dest = createConsumeDestination(ftpConsumeUrl, null);
FtpConnection conn = createConnection(server);
conn.setAdditionalDebug(false);
AggregatingFtpConsumer consumer = createConsumer(dest, new ReplaceWithFirstMessage());
consumer.setDeleteAggregatedFiles(Boolean.FALSE);
AggregatingFtpConsumeService service = new AggregatingFtpConsumeService(conn, consumer);
AdaptrisMessage msg = AdaptrisMessageFactory.getDefaultInstance().newMessage();
execute(service, msg);
assertEquals(PAYLOAD, msg.getContent());
// didn't get dleted so should still exist.
assertEquals(count, filesystem.listFiles(DEFAULT_WORK_DIR_CANONICAL).size());
}
finally {
server.stop();
}
}
public void testService_MultipleFiles() throws Exception {
int count = 5;
EmbeddedFtpServer helper = new EmbeddedFtpServer();
MockMessageListener listener = new MockMessageListener();
FakeFtpServer server = helper.createAndStart(helper.createFilesystem(count));
try {
// should be ftp://localhost/home/user/work/ which is created when you
// create the filesystem.
String ftpConsumeUrl = "ftp://localhost" + DEFAULT_WORK_DIR_CANONICAL;
ConsumeDestinationFromMetadata dest = createConsumeDestination(ftpConsumeUrl, ".*");
FtpConnection conn = createConnection(server);
AggregatingFtpConsumer consumer = createConsumer(dest, new IgnoreOriginalMimeAggregator());
AggregatingFtpConsumeService service = new AggregatingFtpConsumeService(conn, consumer);
AdaptrisMessage msg = AdaptrisMessageFactory.getDefaultInstance().newMessage();
execute(service, msg);
MultiPartInput input = MimeHelper.create(msg, false);
assertEquals(count, input.size());
}
finally {
server.stop();
}
}
public void testService_MultipleFiles_NoDelete() throws Exception {
int count = 5;
EmbeddedFtpServer helper = new EmbeddedFtpServer();
MockMessageListener listener = new MockMessageListener();
FileSystem filesystem = helper.createFilesystem(count);
FakeFtpServer server = helper.createAndStart(filesystem);
try {
// should be ftp://localhost/home/user/work
String ftpConsumeUrl = "ftp://localhost" + DEFAULT_WORK_DIR_CANONICAL;
ConsumeDestinationFromMetadata dest = createConsumeDestination(ftpConsumeUrl, ".*");
FtpConnection conn = createConnection(server);
conn.setAdditionalDebug(false);
AggregatingFtpConsumer consumer = createConsumer(dest, new IgnoreOriginalMimeAggregator());
consumer.setDeleteAggregatedFiles(Boolean.FALSE);
AggregatingFtpConsumeService service = new AggregatingFtpConsumeService(conn, consumer);
AdaptrisMessage msg = AdaptrisMessageFactory.getDefaultInstance().newMessage();
execute(service, msg);
MultiPartInput input = MimeHelper.create(msg, false);
assertEquals(count, input.size());
// didn't get dleted so should still exist.
assertEquals(count, filesystem.listFiles(DEFAULT_WORK_DIR_CANONICAL).size());
}
finally {
server.stop();
}
}
public void testService_MultipleFiles_Failure() throws Exception {
int count = 5;
EmbeddedFtpServer helper = new EmbeddedFtpServer();
MockMessageListener listener = new MockMessageListener();
FakeFtpServer server = helper.createAndStart(helper.createFilesystem(count));
try {
// should be ftp://localhost/home/user/work/ which is created when you
// create the filesystem.
String ftpConsumeUrl = "ftp://localhost" + DEFAULT_WORK_DIR_CANONICAL;
ConsumeDestinationFromMetadata dest = createConsumeDestination(ftpConsumeUrl, ".*");
FtpConnection conn = createConnection(server);
AggregatingFtpConsumer consumer = createConsumer(dest, new IgnoreOriginalMimeAggregator());
AggregatingFtpConsumeService service = new AggregatingFtpConsumeService(conn, consumer);
AdaptrisMessage msg = new DefectiveMessageFactory().newMessage();
try {
execute(service, msg);
fail();
}
catch (ServiceException expected) {
}
}
finally {
server.stop();
}
}
private ConsumeDestinationFromMetadata createConsumeDestination(String dir, String filterExp) {
ConsumeDestinationFromMetadata d = new ConsumeDestinationFromMetadata();
d.setDefaultDestination(dir);
d.setDefaultFilterExpression(filterExp);
return d;
}
private AggregatingFtpConsumer createConsumer(ConsumeDestinationGenerator cdg, MessageAggregator aggr) {
AggregatingFtpConsumer consumer = new AggregatingFtpConsumer(cdg);
consumer.setMessageAggregator(aggr);
return consumer;
}
private FtpConnection createConnection(FakeFtpServer server) {
FtpConnection consumeConnection = new FtpConnection();
consumeConnection.setDefaultControlPort(server.getServerControlPort());
consumeConnection.setDefaultPassword(DEFAULT_PASSWORD);
consumeConnection.setDefaultUserName(DEFAULT_USERNAME);
consumeConnection.setCacheConnection(true);
consumeConnection.setAdditionalDebug(true);
return consumeConnection;
}
@Override
protected Object retrieveObjectForSampleConfig() {
ConsumeDestinationFromMetadata mfd = new ConsumeDestinationFromMetadata();
mfd.setDestinationMetadataKey("url");
mfd.setDefaultDestination("ftp://localhost:22/path/to/default");
mfd.setDefaultFilterExpression(".*\\*.xml");
AggregatingFtpConsumer consumer = new AggregatingFtpConsumer(mfd);
consumer.setMessageAggregator(new IgnoreOriginalMimeAggregator());
FtpConnection conn = FtpExampleHelper.ftpConnection();
return new AggregatingFtpConsumeService(conn, consumer);
}
@Override
protected String getExampleCommentHeader(Object o) {
return super.getExampleCommentHeader(o)
+ "\n<!-- \n In the example here, you aggregate the contents of the ftp-server specified by the metadata-key 'url'"
+ "\nmatching only files that correspond to the Perl pattern .*\\.xml. "
+ "\nIf aggrUrl does not exist as metadata, then we attempt to connect to ftp://myhost.com/path/to/default"
+ "\nto pick up any files that correspond to the pattern."
+ "\nThese are then aggregated into a single MIME Multipart message. The original message is ignored." + "\n-->\n";
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.query.scan;
import com.google.common.base.Function;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.ObjectArrays;
import com.google.common.collect.Sets;
import com.google.common.hash.Hashing;
import com.google.common.io.CharSource;
import com.google.common.io.LineProcessor;
import org.apache.commons.lang.ArrayUtils;
import org.apache.druid.common.config.NullHandling;
import org.apache.druid.hll.HyperLogLogCollector;
import org.apache.druid.java.util.common.DateTimes;
import org.apache.druid.java.util.common.ISE;
import org.apache.druid.java.util.common.Intervals;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.query.DefaultGenericQueryMetricsFactory;
import org.apache.druid.query.DirectQueryProcessingPool;
import org.apache.druid.query.Druids;
import org.apache.druid.query.QueryContexts;
import org.apache.druid.query.QueryPlus;
import org.apache.druid.query.QueryRunner;
import org.apache.druid.query.QueryRunnerTestHelper;
import org.apache.druid.query.QueryTimeoutException;
import org.apache.druid.query.TableDataSource;
import org.apache.druid.query.context.DefaultResponseContext;
import org.apache.druid.query.context.ResponseContext;
import org.apache.druid.query.expression.TestExprMacroTable;
import org.apache.druid.query.extraction.MapLookupExtractor;
import org.apache.druid.query.filter.AndDimFilter;
import org.apache.druid.query.filter.SelectorDimFilter;
import org.apache.druid.query.lookup.LookupExtractionFn;
import org.apache.druid.query.spec.LegacySegmentSpec;
import org.apache.druid.query.spec.QuerySegmentSpec;
import org.apache.druid.segment.TestIndex;
import org.apache.druid.segment.VirtualColumn;
import org.apache.druid.segment.column.ColumnHolder;
import org.apache.druid.segment.column.ColumnType;
import org.apache.druid.segment.virtual.ExpressionVirtualColumn;
import org.apache.druid.testing.InitializedNullHandlingTest;
import org.joda.time.DateTime;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
*
*/
@RunWith(Parameterized.class)
public class ScanQueryRunnerTest extends InitializedNullHandlingTest
{
private static final VirtualColumn EXPR_COLUMN =
new ExpressionVirtualColumn("expr", "index * 2", ColumnType.LONG, TestExprMacroTable.INSTANCE);
// Read the first set of 12 lines from the sample data, which covers the day 2011-01-12T00:00:00.000Z
public static final String[] V_0112 = readLinesFromSample(0, 13).toArray(new String[0]);
// Read the second set of 12 lines from the sample data, which covers the day 2011-01-13T00:00:00.000Z
public static final String[] V_0113 = readLinesFromSample(13, 26).toArray(new String[0]);
private static List<String> readLinesFromSample(
int startLineNum,
int endLineNum
)
{
CharSource sampleData = TestIndex.getResourceCharSource("druid.sample.numeric.tsv");
List<String> lines = new ArrayList<>();
try {
sampleData.readLines(
new LineProcessor<Object>()
{
int count = 0;
@Override
public boolean processLine(String line)
{
if (count >= startLineNum && count < endLineNum) {
lines.add(line);
}
count++;
return count < endLineNum;
}
@Override
public Object getResult()
{
return null;
}
}
);
}
catch (Exception e) {
throw new RuntimeException(e);
}
return lines;
}
public static final QuerySegmentSpec I_0112_0114 = new LegacySegmentSpec(
Intervals.of("2011-01-12T00:00:00.000Z/2011-01-14T00:00:00.000Z")
);
public static final String[] V_0112_0114 = ObjectArrays.concat(V_0112, V_0113, String.class);
private static final ScanQueryQueryToolChest TOOL_CHEST = new ScanQueryQueryToolChest(
new ScanQueryConfig(),
DefaultGenericQueryMetricsFactory.instance()
);
private static final ScanQueryRunnerFactory FACTORY = new ScanQueryRunnerFactory(
TOOL_CHEST,
new ScanQueryEngine(),
new ScanQueryConfig()
);
@Parameterized.Parameters(name = "{0}, legacy = {1}")
public static Iterable<Object[]> constructorFeeder()
{
return QueryRunnerTestHelper.cartesian(
QueryRunnerTestHelper.makeQueryRunners(
FACTORY
),
ImmutableList.of(false, true)
);
}
private final QueryRunner runner;
private final boolean legacy;
private final List<String> columns;
public ScanQueryRunnerTest(final QueryRunner runner, final boolean legacy)
{
this.runner = runner;
this.legacy = legacy;
this.columns = Lists.newArrayList(
getTimestampName(),
"expr",
"market",
"quality",
"qualityLong",
"qualityFloat",
"qualityDouble",
"qualityNumericString",
"longNumericNull",
"floatNumericNull",
"doubleNumericNull",
"placement",
"placementish",
"partial_null_column",
"null_column",
"index",
"indexMin",
"indexMaxPlusTen",
"quality_uniques",
"indexFloat",
"indexMaxFloat",
"indexMinFloat"
);
}
private Druids.ScanQueryBuilder newTestQuery()
{
return Druids.newScanQueryBuilder()
.dataSource(new TableDataSource(QueryRunnerTestHelper.DATA_SOURCE))
.columns(Collections.emptyList())
.intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC)
.limit(3)
.legacy(legacy);
}
@Test
public void testFullOnSelect()
{
ScanQuery query = newTestQuery()
.intervals(I_0112_0114)
.virtualColumns(EXPR_COLUMN)
.build();
Iterable<ScanResultValue> results = runner.run(QueryPlus.wrap(query)).toList();
List<ScanResultValue> expectedResults = toExpected(
toFullEvents(V_0112_0114),
columns,
0,
3
);
verify(expectedResults, populateNullColumnAtLastForQueryableIndexCase(results, "null_column"));
}
@Test
public void testFullOnSelectAsCompactedList()
{
ScanQuery query = newTestQuery()
.intervals(I_0112_0114)
.virtualColumns(EXPR_COLUMN)
.resultFormat(ScanQuery.ResultFormat.RESULT_FORMAT_COMPACTED_LIST)
.build();
Iterable<ScanResultValue> results = runner.run(QueryPlus.wrap(query)).toList();
List<ScanResultValue> expectedResults = toExpected(
toFullEvents(V_0112_0114),
columns,
0,
3
);
verify(expectedResults, populateNullColumnAtLastForQueryableIndexCase(compactedListToRow(results), "null_column"));
}
@Test
public void testSelectWithUnderscoreUnderscoreTime()
{
ScanQuery query = newTestQuery()
.intervals(I_0112_0114)
.columns(
ColumnHolder.TIME_COLUMN_NAME,
QueryRunnerTestHelper.MARKET_DIMENSION,
QueryRunnerTestHelper.INDEX_METRIC
)
.build();
Iterable<ScanResultValue> results = runner.run(QueryPlus.wrap(query)).toList();
final List<List<Map<String, Object>>> expectedEvents = toEvents(
new String[]{
getTimestampName() + ":TIME",
QueryRunnerTestHelper.MARKET_DIMENSION + ":STRING",
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
QueryRunnerTestHelper.INDEX_METRIC + ":DOUBLE"
},
V_0112_0114
);
// Add "__time" to all the expected events in legacy mode
if (legacy) {
for (List<Map<String, Object>> batch : expectedEvents) {
for (Map<String, Object> event : batch) {
event.put("__time", ((DateTime) event.get("timestamp")).getMillis());
}
}
}
List<ScanResultValue> expectedResults = toExpected(
expectedEvents,
legacy
? Lists.newArrayList(getTimestampName(), "__time", "market", "index")
: Lists.newArrayList("__time", "market", "index"),
0,
3
);
verify(expectedResults, results);
}
@Test
public void testSelectWithDimsAndMets()
{
ScanQuery query = newTestQuery()
.intervals(I_0112_0114)
.columns(QueryRunnerTestHelper.MARKET_DIMENSION, QueryRunnerTestHelper.INDEX_METRIC)
.build();
Iterable<ScanResultValue> results = runner.run(QueryPlus.wrap(query)).toList();
List<ScanResultValue> expectedResults = toExpected(
toEvents(
new String[]{
legacy ? getTimestampName() + ":TIME" : null,
QueryRunnerTestHelper.MARKET_DIMENSION + ":STRING",
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
QueryRunnerTestHelper.INDEX_METRIC + ":DOUBLE"
},
V_0112_0114
),
legacy ? Lists.newArrayList(getTimestampName(), "market", "index") : Lists.newArrayList("market", "index"),
0,
3
);
verify(expectedResults, results);
}
@Test
public void testSelectWithDimsAndMetsAsCompactedList()
{
ScanQuery query = newTestQuery()
.intervals(I_0112_0114)
.columns(QueryRunnerTestHelper.MARKET_DIMENSION, QueryRunnerTestHelper.INDEX_METRIC)
.resultFormat(ScanQuery.ResultFormat.RESULT_FORMAT_COMPACTED_LIST)
.build();
Iterable<ScanResultValue> results = runner.run(QueryPlus.wrap(query)).toList();
List<ScanResultValue> expectedResults = toExpected(
toEvents(
new String[]{
legacy ? getTimestampName() + ":TIME" : null,
QueryRunnerTestHelper.MARKET_DIMENSION + ":STRING",
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
QueryRunnerTestHelper.INDEX_METRIC + ":DOUBLE"
},
V_0112_0114
),
legacy ? Lists.newArrayList(getTimestampName(), "market", "index") : Lists.newArrayList("market", "index"),
0,
3
);
verify(expectedResults, compactedListToRow(results));
}
@Test
public void testFullOnSelectWithFilterAndLimit()
{
// limits
for (int limit : new int[]{3, 1, 5, 7, 0}) {
ScanQuery query = newTestQuery()
.intervals(I_0112_0114)
.filters(new SelectorDimFilter(QueryRunnerTestHelper.MARKET_DIMENSION, "spot", null))
.columns(QueryRunnerTestHelper.QUALITY_DIMENSION, QueryRunnerTestHelper.INDEX_METRIC)
.limit(limit)
.build();
Iterable<ScanResultValue> results = runner.run(QueryPlus.wrap(query)).toList();
final List<List<Map<String, Object>>> events = toEvents(
new String[]{
legacy ? getTimestampName() + ":TIME" : null,
null,
QueryRunnerTestHelper.QUALITY_DIMENSION + ":STRING",
null,
null,
QueryRunnerTestHelper.INDEX_METRIC + ":DOUBLE"
},
// filtered values with day granularity
new String[]{
"2011-01-12T00:00:00.000Z\tspot\tautomotive\tpreferred\tapreferred\t100.000000",
"2011-01-12T00:00:00.000Z\tspot\tbusiness\tpreferred\tbpreferred\t100.000000",
"2011-01-12T00:00:00.000Z\tspot\tentertainment\tpreferred\tepreferred\t100.000000",
"2011-01-12T00:00:00.000Z\tspot\thealth\tpreferred\thpreferred\t100.000000",
"2011-01-12T00:00:00.000Z\tspot\tmezzanine\tpreferred\tmpreferred\t100.000000",
"2011-01-12T00:00:00.000Z\tspot\tnews\tpreferred\tnpreferred\t100.000000",
"2011-01-12T00:00:00.000Z\tspot\tpremium\tpreferred\tppreferred\t100.000000",
"2011-01-12T00:00:00.000Z\tspot\ttechnology\tpreferred\ttpreferred\t100.000000",
"2011-01-12T00:00:00.000Z\tspot\ttravel\tpreferred\ttpreferred\t100.000000"
},
new String[]{
"2011-01-13T00:00:00.000Z\tspot\tautomotive\tpreferred\tapreferred\t94.874713",
"2011-01-13T00:00:00.000Z\tspot\tbusiness\tpreferred\tbpreferred\t103.629399",
"2011-01-13T00:00:00.000Z\tspot\tentertainment\tpreferred\tepreferred\t110.087299",
"2011-01-13T00:00:00.000Z\tspot\thealth\tpreferred\thpreferred\t114.947403",
"2011-01-13T00:00:00.000Z\tspot\tmezzanine\tpreferred\tmpreferred\t104.465767",
"2011-01-13T00:00:00.000Z\tspot\tnews\tpreferred\tnpreferred\t102.851683",
"2011-01-13T00:00:00.000Z\tspot\tpremium\tpreferred\tppreferred\t108.863011",
"2011-01-13T00:00:00.000Z\tspot\ttechnology\tpreferred\ttpreferred\t111.356672",
"2011-01-13T00:00:00.000Z\tspot\ttravel\tpreferred\ttpreferred\t106.236928"
}
);
List<ScanResultValue> expectedResults = toExpected(
events,
legacy ? Lists.newArrayList(getTimestampName(), "quality", "index") : Lists.newArrayList("quality", "index"),
0,
limit
);
verify(expectedResults, results);
}
}
@Test
public void testSelectWithFilterLookupExtractionFn()
{
Map<String, String> extractionMap = new HashMap<>();
extractionMap.put("total_market", "replaced");
MapLookupExtractor mapLookupExtractor = new MapLookupExtractor(extractionMap, false);
LookupExtractionFn lookupExtractionFn = new LookupExtractionFn(mapLookupExtractor, false, null, true, true);
ScanQuery query = newTestQuery()
.intervals(I_0112_0114)
.filters(new SelectorDimFilter(QueryRunnerTestHelper.MARKET_DIMENSION, "replaced", lookupExtractionFn))
.columns(QueryRunnerTestHelper.QUALITY_DIMENSION, QueryRunnerTestHelper.INDEX_METRIC)
.build();
Iterable<ScanResultValue> results = runner.run(QueryPlus.wrap(query)).toList();
Iterable<ScanResultValue> resultsOptimize = TOOL_CHEST
.postMergeQueryDecoration(TOOL_CHEST.mergeResults(TOOL_CHEST.preMergeQueryDecoration(runner)))
.run(QueryPlus.wrap(query))
.toList();
final List<List<Map<String, Object>>> events = toEvents(
new String[]{
legacy ? getTimestampName() + ":TIME" : null,
null,
QueryRunnerTestHelper.QUALITY_DIMENSION + ":STRING",
null,
null,
QueryRunnerTestHelper.INDEX_METRIC + ":DOUBLE"
},
// filtered values with day granularity
new String[]{
"2011-01-12T00:00:00.000Z\ttotal_market\tmezzanine\tpreferred\tmpreferred\t1000.000000",
"2011-01-12T00:00:00.000Z\ttotal_market\tpremium\tpreferred\tppreferred\t1000.000000"
},
new String[]{
"2011-01-13T00:00:00.000Z\ttotal_market\tmezzanine\tpreferred\tmpreferred\t1040.945505",
"2011-01-13T00:00:00.000Z\ttotal_market\tpremium\tpreferred\tppreferred\t1689.012875"
}
);
List<ScanResultValue> expectedResults = toExpected(
events,
legacy ? Lists.newArrayList(
getTimestampName(),
QueryRunnerTestHelper.QUALITY_DIMENSION,
QueryRunnerTestHelper.INDEX_METRIC
) : Lists.newArrayList(
QueryRunnerTestHelper.QUALITY_DIMENSION,
QueryRunnerTestHelper.INDEX_METRIC
),
0,
3
);
verify(expectedResults, results);
verify(expectedResults, resultsOptimize);
}
@Test
public void testFullSelectNoResults()
{
ScanQuery query = newTestQuery()
.intervals(I_0112_0114)
.filters(
new AndDimFilter(
Arrays.asList(
new SelectorDimFilter(QueryRunnerTestHelper.MARKET_DIMENSION, "spot", null),
new SelectorDimFilter(QueryRunnerTestHelper.MARKET_DIMENSION, "foo", null)
)
)
)
.build();
Iterable<ScanResultValue> results = runner.run(QueryPlus.wrap(query)).toList();
List<ScanResultValue> expectedResults = Collections.emptyList();
verify(expectedResults, populateNullColumnAtLastForQueryableIndexCase(results, "null_column"));
}
@Test
public void testFullSelectNoDimensionAndMetric()
{
ScanQuery query = newTestQuery()
.intervals(I_0112_0114)
.columns("foo", "foo2")
.build();
Iterable<ScanResultValue> results = runner.run(QueryPlus.wrap(query)).toList();
final List<List<Map<String, Object>>> events = toEvents(
legacy ? new String[]{getTimestampName() + ":TIME"} : new String[0],
V_0112_0114
);
List<ScanResultValue> expectedResults = toExpected(
events,
legacy ? Lists.newArrayList(getTimestampName(), "foo", "foo2") : Lists.newArrayList("foo", "foo2"),
0,
3
);
verify(expectedResults, results);
}
@Test
public void testFullOnSelectWithFilterLimitAndAscendingTimeOrderingListFormat()
{
// limits shouldn't matter -> all rows should be returned if time-ordering on the broker is occurring
for (int limit : new int[]{3, 1, 5, 7, 0}) {
ScanQuery query = newTestQuery()
.intervals(I_0112_0114)
.filters(new SelectorDimFilter(QueryRunnerTestHelper.MARKET_DIMENSION, "spot", null))
.columns(
QueryRunnerTestHelper.TIME_DIMENSION,
QueryRunnerTestHelper.QUALITY_DIMENSION,
QueryRunnerTestHelper.INDEX_METRIC
)
.limit(limit)
.order(ScanQuery.Order.ASCENDING)
.context(ImmutableMap.of(ScanQuery.CTX_KEY_OUTERMOST, false))
.build();
Iterable<ScanResultValue> results = runner.run(QueryPlus.wrap(query)).toList();
String[] seg1Results = new String[]{
"2011-01-12T00:00:00.000Z\tspot\tautomotive\tpreferred\tapreferred\t100.000000",
"2011-01-12T00:00:00.000Z\tspot\tbusiness\tpreferred\tbpreferred\t100.000000",
"2011-01-12T00:00:00.000Z\tspot\tentertainment\tpreferred\tepreferred\t100.000000",
"2011-01-12T00:00:00.000Z\tspot\thealth\tpreferred\thpreferred\t100.000000",
"2011-01-12T00:00:00.000Z\tspot\tmezzanine\tpreferred\tmpreferred\t100.000000",
"2011-01-12T00:00:00.000Z\tspot\tnews\tpreferred\tnpreferred\t100.000000",
"2011-01-12T00:00:00.000Z\tspot\tpremium\tpreferred\tppreferred\t100.000000",
"2011-01-12T00:00:00.000Z\tspot\ttechnology\tpreferred\ttpreferred\t100.000000",
"2011-01-12T00:00:00.000Z\tspot\ttravel\tpreferred\ttpreferred\t100.000000"
};
String[] seg2Results = new String[]{
"2011-01-13T00:00:00.000Z\tspot\tautomotive\tpreferred\tapreferred\t94.874713",
"2011-01-13T00:00:00.000Z\tspot\tbusiness\tpreferred\tbpreferred\t103.629399",
"2011-01-13T00:00:00.000Z\tspot\tentertainment\tpreferred\tepreferred\t110.087299",
"2011-01-13T00:00:00.000Z\tspot\thealth\tpreferred\thpreferred\t114.947403",
"2011-01-13T00:00:00.000Z\tspot\tmezzanine\tpreferred\tmpreferred\t104.465767",
"2011-01-13T00:00:00.000Z\tspot\tnews\tpreferred\tnpreferred\t102.851683",
"2011-01-13T00:00:00.000Z\tspot\tpremium\tpreferred\tppreferred\t108.863011",
"2011-01-13T00:00:00.000Z\tspot\ttechnology\tpreferred\ttpreferred\t111.356672",
"2011-01-13T00:00:00.000Z\tspot\ttravel\tpreferred\ttpreferred\t106.236928"
};
final List<List<Map<String, Object>>> ascendingEvents = toEvents(
new String[]{
legacy ? getTimestampName() + ":TIME" : ColumnHolder.TIME_COLUMN_NAME,
null,
QueryRunnerTestHelper.QUALITY_DIMENSION + ":STRING",
null,
null,
QueryRunnerTestHelper.INDEX_METRIC + ":DOUBLE"
},
(String[]) ArrayUtils.addAll(seg1Results, seg2Results)
);
if (legacy) {
for (List<Map<String, Object>> batch : ascendingEvents) {
for (Map<String, Object> event : batch) {
event.put("__time", ((DateTime) event.get("timestamp")).getMillis());
}
}
} else {
for (List<Map<String, Object>> batch : ascendingEvents) {
for (Map<String, Object> event : batch) {
event.put("__time", (DateTimes.of((String) event.get("__time"))).getMillis());
}
}
}
List<ScanResultValue> ascendingExpectedResults = toExpected(
ascendingEvents,
legacy ?
Lists.newArrayList(
QueryRunnerTestHelper.TIME_DIMENSION,
getTimestampName(),
"quality",
"index"
) :
Lists.newArrayList(
QueryRunnerTestHelper.TIME_DIMENSION,
"quality",
"index"
),
0,
limit
);
verify(ascendingExpectedResults, results);
}
}
@Test
public void testFullOnSelectWithFilterLimitAndDescendingTimeOrderingListFormat()
{
// limits shouldn't matter -> all rows should be returned if time-ordering on the broker is occurring
for (int limit : new int[]{3, 1, 5, 7, 0}) {
ScanQuery query = newTestQuery()
.intervals(I_0112_0114)
.filters(new SelectorDimFilter(QueryRunnerTestHelper.MARKET_DIMENSION, "spot", null))
.columns(
QueryRunnerTestHelper.TIME_DIMENSION,
QueryRunnerTestHelper.QUALITY_DIMENSION,
QueryRunnerTestHelper.INDEX_METRIC
)
.limit(limit)
.order(ScanQuery.Order.DESCENDING)
.build();
Iterable<ScanResultValue> results = runner.run(QueryPlus.wrap(query)).toList();
String[] seg1Results = new String[]{
"2011-01-12T00:00:00.000Z\tspot\tautomotive\tpreferred\tapreferred\t100.000000",
"2011-01-12T00:00:00.000Z\tspot\tbusiness\tpreferred\tbpreferred\t100.000000",
"2011-01-12T00:00:00.000Z\tspot\tentertainment\tpreferred\tepreferred\t100.000000",
"2011-01-12T00:00:00.000Z\tspot\thealth\tpreferred\thpreferred\t100.000000",
"2011-01-12T00:00:00.000Z\tspot\tmezzanine\tpreferred\tmpreferred\t100.000000",
"2011-01-12T00:00:00.000Z\tspot\tnews\tpreferred\tnpreferred\t100.000000",
"2011-01-12T00:00:00.000Z\tspot\tpremium\tpreferred\tppreferred\t100.000000",
"2011-01-12T00:00:00.000Z\tspot\ttechnology\tpreferred\ttpreferred\t100.000000",
"2011-01-12T00:00:00.000Z\tspot\ttravel\tpreferred\ttpreferred\t100.000000"
};
String[] seg2Results = new String[]{
"2011-01-13T00:00:00.000Z\tspot\tautomotive\tpreferred\tapreferred\t94.874713",
"2011-01-13T00:00:00.000Z\tspot\tbusiness\tpreferred\tbpreferred\t103.629399",
"2011-01-13T00:00:00.000Z\tspot\tentertainment\tpreferred\tepreferred\t110.087299",
"2011-01-13T00:00:00.000Z\tspot\thealth\tpreferred\thpreferred\t114.947403",
"2011-01-13T00:00:00.000Z\tspot\tmezzanine\tpreferred\tmpreferred\t104.465767",
"2011-01-13T00:00:00.000Z\tspot\tnews\tpreferred\tnpreferred\t102.851683",
"2011-01-13T00:00:00.000Z\tspot\tpremium\tpreferred\tppreferred\t108.863011",
"2011-01-13T00:00:00.000Z\tspot\ttechnology\tpreferred\ttpreferred\t111.356672",
"2011-01-13T00:00:00.000Z\tspot\ttravel\tpreferred\ttpreferred\t106.236928"
};
String[] expectedRet = (String[]) ArrayUtils.addAll(seg1Results, seg2Results);
ArrayUtils.reverse(expectedRet);
final List<List<Map<String, Object>>> descendingEvents = toEvents(
new String[]{
legacy ? getTimestampName() + ":TIME" : ColumnHolder.TIME_COLUMN_NAME,
null,
QueryRunnerTestHelper.QUALITY_DIMENSION + ":STRING",
null,
null,
QueryRunnerTestHelper.INDEX_METRIC + ":DOUBLE"
},
expectedRet
);
if (legacy) {
for (List<Map<String, Object>> batch : descendingEvents) {
for (Map<String, Object> event : batch) {
event.put("__time", ((DateTime) event.get("timestamp")).getMillis());
}
}
} else {
for (List<Map<String, Object>> batch : descendingEvents) {
for (Map<String, Object> event : batch) {
event.put("__time", (DateTimes.of((String) event.get("__time"))).getMillis());
}
}
}
List<ScanResultValue> descendingExpectedResults = toExpected(
descendingEvents,
legacy ?
Lists.newArrayList(
QueryRunnerTestHelper.TIME_DIMENSION,
getTimestampName(),
// getTimestampName() always returns the legacy timestamp when legacy is true
"quality",
"index"
) :
Lists.newArrayList(
QueryRunnerTestHelper.TIME_DIMENSION,
"quality",
"index"
),
0,
limit
);
verify(descendingExpectedResults, results);
}
}
@Test
public void testFullOnSelectWithFilterLimitAndAscendingTimeOrderingCompactedListFormat()
{
String[] seg1Results = new String[]{
"2011-01-12T00:00:00.000Z\tspot\tautomotive\tpreferred\tapreferred\t100.000000",
"2011-01-12T00:00:00.000Z\tspot\tbusiness\tpreferred\tbpreferred\t100.000000",
"2011-01-12T00:00:00.000Z\tspot\tentertainment\tpreferred\tepreferred\t100.000000",
"2011-01-12T00:00:00.000Z\tspot\thealth\tpreferred\thpreferred\t100.000000",
"2011-01-12T00:00:00.000Z\tspot\tmezzanine\tpreferred\tmpreferred\t100.000000",
"2011-01-12T00:00:00.000Z\tspot\tnews\tpreferred\tnpreferred\t100.000000",
"2011-01-12T00:00:00.000Z\tspot\tpremium\tpreferred\tppreferred\t100.000000",
"2011-01-12T00:00:00.000Z\tspot\ttechnology\tpreferred\ttpreferred\t100.000000",
"2011-01-12T00:00:00.000Z\tspot\ttravel\tpreferred\ttpreferred\t100.000000"
};
String[] seg2Results = new String[]{
"2011-01-13T00:00:00.000Z\tspot\tautomotive\tpreferred\tapreferred\t94.874713",
"2011-01-13T00:00:00.000Z\tspot\tbusiness\tpreferred\tbpreferred\t103.629399",
"2011-01-13T00:00:00.000Z\tspot\tentertainment\tpreferred\tepreferred\t110.087299",
"2011-01-13T00:00:00.000Z\tspot\thealth\tpreferred\thpreferred\t114.947403",
"2011-01-13T00:00:00.000Z\tspot\tmezzanine\tpreferred\tmpreferred\t104.465767",
"2011-01-13T00:00:00.000Z\tspot\tnews\tpreferred\tnpreferred\t102.851683",
"2011-01-13T00:00:00.000Z\tspot\tpremium\tpreferred\tppreferred\t108.863011",
"2011-01-13T00:00:00.000Z\tspot\ttechnology\tpreferred\ttpreferred\t111.356672",
"2011-01-13T00:00:00.000Z\tspot\ttravel\tpreferred\ttpreferred\t106.236928"
};
// limits shouldn't matter -> all rows should be returned if time-ordering on the broker is occurring
for (int limit : new int[]{3, 0}) {
/* Ascending */
ScanQuery query = newTestQuery()
.intervals(I_0112_0114)
.filters(new SelectorDimFilter(QueryRunnerTestHelper.MARKET_DIMENSION, "spot", null))
.columns(
QueryRunnerTestHelper.TIME_DIMENSION,
QueryRunnerTestHelper.QUALITY_DIMENSION,
QueryRunnerTestHelper.INDEX_METRIC
)
.resultFormat(ScanQuery.ResultFormat.RESULT_FORMAT_COMPACTED_LIST)
.order(ScanQuery.Order.ASCENDING)
.limit(limit)
.build();
Iterable<ScanResultValue> results = runner.run(QueryPlus.wrap(query)).toList();
final List<List<Map<String, Object>>> ascendingEvents = toEvents(
new String[]{
legacy ? getTimestampName() + ":TIME" : ColumnHolder.TIME_COLUMN_NAME,
null,
QueryRunnerTestHelper.QUALITY_DIMENSION + ":STRING",
null,
null,
QueryRunnerTestHelper.INDEX_METRIC + ":DOUBLE"
},
(String[]) ArrayUtils.addAll(seg1Results, seg2Results)
);
if (legacy) {
for (List<Map<String, Object>> batch : ascendingEvents) {
for (Map<String, Object> event : batch) {
event.put("__time", ((DateTime) event.get("timestamp")).getMillis());
}
}
} else {
for (List<Map<String, Object>> batch : ascendingEvents) {
for (Map<String, Object> event : batch) {
event.put("__time", ((DateTimes.of((String) event.get("__time"))).getMillis()));
}
}
}
List<ScanResultValue> ascendingExpectedResults = toExpected(
ascendingEvents,
legacy ?
Lists.newArrayList(
QueryRunnerTestHelper.TIME_DIMENSION,
getTimestampName(),
// getTimestampName() always returns the legacy timestamp when legacy is true
"quality",
"index"
) :
Lists.newArrayList(
QueryRunnerTestHelper.TIME_DIMENSION,
"quality",
"index"
),
0,
limit
);
results = compactedListToRow(results);
verify(ascendingExpectedResults, results);
}
}
@Test
public void testFullOnSelectWithFilterLimitAndDescendingTimeOrderingCompactedListFormat()
{
String[] seg1Results = new String[]{
"2011-01-12T00:00:00.000Z\tspot\tautomotive\tpreferred\tapreferred\t100.000000",
"2011-01-12T00:00:00.000Z\tspot\tbusiness\tpreferred\tbpreferred\t100.000000",
"2011-01-12T00:00:00.000Z\tspot\tentertainment\tpreferred\tepreferred\t100.000000",
"2011-01-12T00:00:00.000Z\tspot\thealth\tpreferred\thpreferred\t100.000000",
"2011-01-12T00:00:00.000Z\tspot\tmezzanine\tpreferred\tmpreferred\t100.000000",
"2011-01-12T00:00:00.000Z\tspot\tnews\tpreferred\tnpreferred\t100.000000",
"2011-01-12T00:00:00.000Z\tspot\tpremium\tpreferred\tppreferred\t100.000000",
"2011-01-12T00:00:00.000Z\tspot\ttechnology\tpreferred\ttpreferred\t100.000000",
"2011-01-12T00:00:00.000Z\tspot\ttravel\tpreferred\ttpreferred\t100.000000"
};
String[] seg2Results = new String[]{
"2011-01-13T00:00:00.000Z\tspot\tautomotive\tpreferred\tapreferred\t94.874713",
"2011-01-13T00:00:00.000Z\tspot\tbusiness\tpreferred\tbpreferred\t103.629399",
"2011-01-13T00:00:00.000Z\tspot\tentertainment\tpreferred\tepreferred\t110.087299",
"2011-01-13T00:00:00.000Z\tspot\thealth\tpreferred\thpreferred\t114.947403",
"2011-01-13T00:00:00.000Z\tspot\tmezzanine\tpreferred\tmpreferred\t104.465767",
"2011-01-13T00:00:00.000Z\tspot\tnews\tpreferred\tnpreferred\t102.851683",
"2011-01-13T00:00:00.000Z\tspot\tpremium\tpreferred\tppreferred\t108.863011",
"2011-01-13T00:00:00.000Z\tspot\ttechnology\tpreferred\ttpreferred\t111.356672",
"2011-01-13T00:00:00.000Z\tspot\ttravel\tpreferred\ttpreferred\t106.236928"
};
// limits shouldn't matter -> all rows should be returned if time-ordering on the broker is occurring
for (int limit : new int[]{3, 1}) {
/* Descending */
ScanQuery query = newTestQuery()
.intervals(I_0112_0114)
.filters(new SelectorDimFilter(QueryRunnerTestHelper.MARKET_DIMENSION, "spot", null))
.columns(
QueryRunnerTestHelper.TIME_DIMENSION,
QueryRunnerTestHelper.QUALITY_DIMENSION,
QueryRunnerTestHelper.INDEX_METRIC
)
.resultFormat(ScanQuery.ResultFormat.RESULT_FORMAT_COMPACTED_LIST)
.order(ScanQuery.Order.DESCENDING)
.context(ImmutableMap.of(ScanQuery.CTX_KEY_OUTERMOST, false))
.limit(limit)
.build();
Iterable<ScanResultValue> results = runner.run(QueryPlus.wrap(query)).toList();
String[] expectedRet = (String[]) ArrayUtils.addAll(seg1Results, seg2Results);
ArrayUtils.reverse(expectedRet);
final List<List<Map<String, Object>>> descendingEvents = toEvents(
new String[]{
legacy ? getTimestampName() + ":TIME" : ColumnHolder.TIME_COLUMN_NAME,
null,
QueryRunnerTestHelper.QUALITY_DIMENSION + ":STRING",
null,
null,
QueryRunnerTestHelper.INDEX_METRIC + ":DOUBLE"
},
expectedRet //segments in reverse order from above
);
if (legacy) {
for (List<Map<String, Object>> batch : descendingEvents) {
for (Map<String, Object> event : batch) {
event.put("__time", ((DateTime) event.get("timestamp")).getMillis());
}
}
} else {
for (List<Map<String, Object>> batch : descendingEvents) {
for (Map<String, Object> event : batch) {
event.put("__time", ((DateTimes.of((String) event.get("__time"))).getMillis()));
}
}
}
List<ScanResultValue> descendingExpectedResults = toExpected(
descendingEvents,
legacy ?
Lists.newArrayList(
QueryRunnerTestHelper.TIME_DIMENSION,
getTimestampName(),
// getTimestampName() always returns the legacy timestamp when legacy is true
"quality",
"index"
) :
Lists.newArrayList(
QueryRunnerTestHelper.TIME_DIMENSION,
"quality",
"index"
),
0,
limit
);
results = compactedListToRow(results);
verify(descendingExpectedResults, results);
}
}
@Test
public void testScanQueryTimeout()
{
ScanQuery query = newTestQuery()
.intervals(I_0112_0114)
.virtualColumns(EXPR_COLUMN)
.context(ImmutableMap.of(QueryContexts.TIMEOUT_KEY, 1))
.build();
ResponseContext responseContext = DefaultResponseContext.createEmpty();
responseContext.putTimeoutTime(System.currentTimeMillis());
try {
runner.run(QueryPlus.wrap(query), responseContext).toList();
Assert.fail("didn't timeout");
}
catch (RuntimeException e) {
Assert.assertTrue(e instanceof QueryTimeoutException);
Assert.assertEquals("Query timeout", ((QueryTimeoutException) e).getErrorCode());
}
}
@Test
public void testScanQueryTimeoutMerge()
{
ScanQuery query = newTestQuery()
.intervals(I_0112_0114)
.virtualColumns(EXPR_COLUMN)
.context(ImmutableMap.of(QueryContexts.TIMEOUT_KEY, 1))
.build();
try {
FACTORY.mergeRunners(
DirectQueryProcessingPool.INSTANCE,
ImmutableList.of(
(queryPlus, responseContext) -> {
try {
Thread.sleep(2);
}
catch (InterruptedException ignored) {
}
return runner.run(queryPlus, responseContext);
})
).run(QueryPlus.wrap(query), DefaultResponseContext.createEmpty()).toList();
Assert.fail("didn't timeout");
}
catch (RuntimeException e) {
Assert.assertTrue(e instanceof QueryTimeoutException);
Assert.assertEquals("Query timeout", ((QueryTimeoutException) e).getErrorCode());
}
}
@Test
public void testScanQueryTimeoutZeroDoesntTimeOut()
{
ScanQuery query = newTestQuery()
.intervals(I_0112_0114)
.virtualColumns(EXPR_COLUMN)
.context(ImmutableMap.of(QueryContexts.TIMEOUT_KEY, 0))
.build();
Iterable<ScanResultValue> results = FACTORY.mergeRunners(
DirectQueryProcessingPool.INSTANCE,
ImmutableList.of(
(queryPlus, responseContext) -> {
try {
Thread.sleep(2);
}
catch (InterruptedException ignored) {
}
return runner.run(queryPlus, responseContext);
})
).run(QueryPlus.wrap(query), DefaultResponseContext.createEmpty()).toList();
List<ScanResultValue> expectedResults = toExpected(
toFullEvents(V_0112_0114),
columns,
0,
3
);
verify(expectedResults, populateNullColumnAtLastForQueryableIndexCase(results, "null_column"));
}
private List<List<Map<String, Object>>> toFullEvents(final String[]... valueSet)
{
return toEvents(
new String[]{
getTimestampName() + ":TIME",
QueryRunnerTestHelper.MARKET_DIMENSION + ":STRING",
QueryRunnerTestHelper.QUALITY_DIMENSION + ":STRING",
"qualityLong" + ":LONG",
"qualityFloat" + ":FLOAT",
"qualityDouble" + ":DOUBLE",
"qualityNumericString" + ":STRING",
"longNumericNull" + ":LONG",
"floatNumericNull" + ":FLOAT",
"doubleNumericNull" + ":DOUBLE",
QueryRunnerTestHelper.PLACEMENT_DIMENSION + ":STRING",
QueryRunnerTestHelper.PLACEMENTISH_DIMENSION + ":STRINGS",
QueryRunnerTestHelper.INDEX_METRIC + ":DOUBLE",
QueryRunnerTestHelper.PARTIAL_NULL_DIMENSION + ":STRING",
"expr",
"indexMin",
"indexFloat",
"indexMaxPlusTen",
"indexMinFloat",
"indexMaxFloat",
"quality_uniques"
},
valueSet
);
}
private List<List<Map<String, Object>>> toEvents(final String[] dimSpecs, final String[]... valueSet)
{
List<String> values = new ArrayList<>();
for (String[] vSet : valueSet) {
values.addAll(Arrays.asList(vSet));
}
List<List<Map<String, Object>>> events = new ArrayList<>();
events.add(
Lists.newArrayList(
Iterables.transform(
values,
input -> {
Map<String, Object> event = new HashMap<>();
String[] values1 = input.split("\\t");
for (int i = 0; i < dimSpecs.length; i++) {
if (dimSpecs[i] == null || i >= dimSpecs.length) {
continue;
}
// For testing metrics and virtual columns we have some special handling here, since
// they don't appear in the source data.
if (dimSpecs[i].equals(EXPR_COLUMN.getOutputName())) {
event.put(
EXPR_COLUMN.getOutputName(),
(double) event.get(QueryRunnerTestHelper.INDEX_METRIC) * 2
);
continue;
} else if (dimSpecs[i].equals("indexMin")) {
event.put("indexMin", (double) event.get(QueryRunnerTestHelper.INDEX_METRIC));
continue;
} else if (dimSpecs[i].equals("indexFloat")) {
event.put("indexFloat", (float) (double) event.get(QueryRunnerTestHelper.INDEX_METRIC));
continue;
} else if (dimSpecs[i].equals("indexMaxPlusTen")) {
event.put("indexMaxPlusTen", (double) event.get(QueryRunnerTestHelper.INDEX_METRIC) + 10);
continue;
} else if (dimSpecs[i].equals("indexMinFloat")) {
event.put("indexMinFloat", (float) (double) event.get(QueryRunnerTestHelper.INDEX_METRIC));
continue;
} else if (dimSpecs[i].equals("indexMaxFloat")) {
event.put("indexMaxFloat", (float) (double) event.get(QueryRunnerTestHelper.INDEX_METRIC));
continue;
} else if (dimSpecs[i].equals("quality_uniques")) {
final HyperLogLogCollector collector = HyperLogLogCollector.makeLatestCollector();
collector.add(
Hashing.murmur3_128()
.hashBytes(StringUtils.toUtf8((String) event.get("quality")))
.asBytes()
);
event.put("quality_uniques", collector);
}
if (i >= values1.length) {
continue;
}
String[] specs = dimSpecs[i].split(":");
Object eventVal;
if (specs.length == 1 || specs[1].equals("STRING")) {
eventVal = values1[i];
} else if (specs[1].equals("TIME")) {
eventVal = toTimestamp(values1[i]);
} else if (specs[1].equals("FLOAT")) {
eventVal = values1[i].isEmpty() ? NullHandling.defaultFloatValue() : Float.valueOf(values1[i]);
} else if (specs[1].equals("DOUBLE")) {
eventVal = values1[i].isEmpty() ? NullHandling.defaultDoubleValue() : Double.valueOf(values1[i]);
} else if (specs[1].equals("LONG")) {
eventVal = values1[i].isEmpty() ? NullHandling.defaultLongValue() : Long.valueOf(values1[i]);
} else if (specs[1].equals(("NULL"))) {
eventVal = null;
} else if (specs[1].equals("STRINGS")) {
eventVal = Arrays.asList(values1[i].split("\u0001"));
} else {
eventVal = values1[i];
}
event.put(specs[0], eventVal);
}
return event;
}
)
)
);
return events;
}
private Object toTimestamp(final String value)
{
if (legacy) {
return DateTimes.of(value);
} else {
return DateTimes.of(value).getMillis();
}
}
private String getTimestampName()
{
return legacy ? "timestamp" : ColumnHolder.TIME_COLUMN_NAME;
}
private List<ScanResultValue> toExpected(
List<List<Map<String, Object>>> targets,
List<String> columns,
final int offset,
final int limit
)
{
List<ScanResultValue> expected = Lists.newArrayListWithExpectedSize(targets.size());
for (List<Map<String, Object>> group : targets) {
List<Map<String, Object>> events = Lists.newArrayListWithExpectedSize(limit);
int end = Math.min(group.size(), offset + limit);
if (end == 0) {
end = group.size();
}
events.addAll(group.subList(offset, end));
expected.add(new ScanResultValue(QueryRunnerTestHelper.SEGMENT_ID.toString(), columns, events));
}
return expected;
}
public static void verify(
Iterable<ScanResultValue> expectedResults,
Iterable<ScanResultValue> actualResults
)
{
Iterator<ScanResultValue> expectedIter = expectedResults.iterator();
Iterator<ScanResultValue> actualIter = actualResults.iterator();
while (expectedIter.hasNext()) {
ScanResultValue expected = expectedIter.next();
ScanResultValue actual = actualIter.next();
Assert.assertEquals(expected.getSegmentId(), actual.getSegmentId());
Set exColumns = Sets.newTreeSet(expected.getColumns());
Set acColumns = Sets.newTreeSet(actual.getColumns());
Assert.assertEquals(exColumns, acColumns);
Iterator<Map<String, Object>> expectedEvts = ((List<Map<String, Object>>) expected.getEvents()).iterator();
Iterator<Map<String, Object>> actualEvts = ((List<Map<String, Object>>) actual.getEvents()).iterator();
while (expectedEvts.hasNext()) {
Map<String, Object> exHolder = expectedEvts.next();
Map<String, Object> acHolder = actualEvts.next();
for (Map.Entry<String, Object> ex : exHolder.entrySet()) {
Object actVal = acHolder.get(ex.getKey());
if (actVal instanceof String[]) {
actVal = Arrays.asList((String[]) actVal);
}
Object exValue = ex.getValue();
if (exValue instanceof Double || exValue instanceof Float) {
final double expectedDoubleValue = ((Number) exValue).doubleValue();
Assert.assertEquals(
"invalid value for " + ex.getKey(),
expectedDoubleValue,
((Number) actVal).doubleValue(),
expectedDoubleValue * 1e-6
);
} else {
Assert.assertEquals("invalid value for " + ex.getKey(), ex.getValue(), actVal);
}
}
for (Map.Entry<String, Object> ac : acHolder.entrySet()) {
Object exVal = exHolder.get(ac.getKey());
Object actVal = ac.getValue();
if (actVal instanceof String[]) {
actVal = Arrays.asList((String[]) actVal);
}
if (exVal instanceof Double || exVal instanceof Float) {
final double exDoubleValue = ((Number) exVal).doubleValue();
Assert.assertEquals(
"invalid value for " + ac.getKey(),
exDoubleValue,
((Number) actVal).doubleValue(),
exDoubleValue * 1e-6
);
} else {
Assert.assertEquals("invalid value for " + ac.getKey(), exVal, actVal);
}
}
}
if (actualEvts.hasNext()) {
throw new ISE("This event iterator should be exhausted!");
}
}
if (actualIter.hasNext()) {
throw new ISE("This iterator should be exhausted!");
}
}
private static Iterable<ScanResultValue> populateNullColumnAtLastForQueryableIndexCase(
Iterable<ScanResultValue> results,
String columnName
)
{
// A Queryable index does not have the null column when it has loaded a index.
for (ScanResultValue value : results) {
List<String> columns = value.getColumns();
if (columns.contains(columnName)) {
break;
}
columns.add(columnName);
}
return results;
}
private Iterable<ScanResultValue> compactedListToRow(Iterable<ScanResultValue> results)
{
return Lists.newArrayList(Iterables.transform(results, new Function<ScanResultValue, ScanResultValue>()
{
@Override
public ScanResultValue apply(ScanResultValue input)
{
List<Map<String, Object>> mapEvents = new ArrayList<>();
List<?> events = ((List<?>) input.getEvents());
for (Object event : events) {
Iterator<?> compactedEventIter = ((List<?>) event).iterator();
Map<String, Object> mapEvent = new LinkedHashMap<>();
for (String column : input.getColumns()) {
mapEvent.put(column, compactedEventIter.next());
}
mapEvents.add(mapEvent);
}
return new ScanResultValue(input.getSegmentId(), input.getColumns(), mapEvents);
}
}));
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.shindig.gadgets.rewrite;
import org.apache.shindig.common.uri.Uri;
import org.apache.shindig.common.xml.XmlUtil;
import org.apache.shindig.config.AbstractContainerConfig;
import org.apache.shindig.expressions.Expressions;
import org.apache.shindig.gadgets.Gadget;
import org.apache.shindig.gadgets.GadgetContext;
import org.apache.shindig.gadgets.GadgetException;
import org.apache.shindig.gadgets.parse.ParseModule;
import org.apache.shindig.gadgets.parse.nekohtml.NekoSimplifiedHtmlParser;
import org.apache.shindig.gadgets.render.FakeMessageBundleFactory;
import org.apache.shindig.gadgets.spec.GadgetSpec;
import org.apache.shindig.gadgets.spec.SpecParserException;
import org.apache.shindig.gadgets.templates.ContainerTagLibraryFactory;
import org.apache.shindig.gadgets.templates.DefaultTemplateProcessor;
import org.apache.shindig.gadgets.templates.TemplateLibrary;
import org.apache.shindig.gadgets.templates.TemplateLibraryFactory;
import org.apache.shindig.gadgets.templates.TemplateProcessor;
import org.apache.shindig.gadgets.templates.XmlTemplateLibrary;
import org.apache.shindig.gadgets.templates.tags.AbstractTagHandler;
import org.apache.shindig.gadgets.templates.tags.DefaultTagRegistry;
import org.apache.shindig.gadgets.templates.tags.TagHandler;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Maps;
import com.google.inject.Provider;
import org.json.JSONException;
import org.json.JSONObject;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import org.junit.Before;
import org.junit.Test;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import java.util.Map;
import java.util.Set;
/**
* Tests for TemplateRewriter
*/
public class TemplateRewriterTest {
private GadgetSpec gadgetSpec;
private Gadget gadget;
private MutableContent content;
private TemplateRewriter rewriter;
private final Map<String, Object> config = Maps.newHashMap();
private static final Uri GADGET_URI = Uri.parse("http://example.org/gadget.php");
private static final String CONTENT_PLAIN =
"<script type='text/os-template'>Hello, ${user.name}</script>";
private static final String CONTENT_WITH_MESSAGE =
"<script type='text/os-template'>Hello, ${Msg.name}</script>";
private static final String CONTENT_REQUIRE =
"<script type='text/os-template' require='user'>Hello, ${user.name}</script>";
private static final String CONTENT_REQUIRE_MISSING =
"<script type='text/os-template' require='foo'>Hello, ${user.name}</script>";
private static final String CONTENT_WITH_TAG =
"<script type='text/os-template' xmlns:foo='#foo' tag='foo:Bar'>Hello, ${user.name}</script>";
private static final String CONTENT_WITH_AUTO_UPDATE =
"<script type='text/os-template' autoUpdate='true'>Hello, ${user.name}</script>";
private static final String TEMPLATE_LIBRARY =
"<Templates xmlns:my='#my'>" +
" <Namespace prefix='my' url='#my'/>" +
" <JavaScript>script</JavaScript>" +
" <Style>style</Style>" +
" <Template tag='my:Tag1'>external1</Template>" +
" <Template tag='my:Tag2'>external2</Template>" +
" <Template tag='my:Tag3'>external3</Template>" +
" <Template tag='my:Tag4'>external4</Template>" +
"</Templates>";
private static final String TEMPLATE_LIBRARY_URI = "http://example.org/library.xml";
private static final String CONTENT_WITH_TAG_FROM_LIBRARY =
"<script type='text/os-template' xmlns:my='#my'><my:Tag4/></script>"; ;
private static final String CONTENT_TESTING_PRECEDENCE_RULES =
"<script type='text/os-template' xmlns:my='#my' tag='my:Tag1'>inline1</script>" +
"<script type='text/os-template' xmlns:my='#my' tag='my:Tag2'>inline2</script>" +
"<script type='text/os-template' xmlns:my='#my' tag='my:Tag3'>inline3</script>" +
"<script type='text/os-template' xmlns:my='#my'><my:Tag1/><my:Tag2/><my:Tag3/><my:Tag4/></script>"; ;
@Before
public void setUp() {
Set<TagHandler> handlers = ImmutableSet.of(testTagHandler("Tag1", "default1"));
rewriter = new TemplateRewriter(
new Provider<TemplateProcessor>() {
public TemplateProcessor get() {
return new DefaultTemplateProcessor(Expressions.forTesting());
}
},
new FakeMessageBundleFactory(),
Expressions.forTesting(),
new DefaultTagRegistry(handlers),
new FakeTemplateLibraryFactory(),
new ContainerTagLibraryFactory(new FakeContainerConfig()));
}
private static TagHandler testTagHandler(String name, final String content) {
return new AbstractTagHandler("#my", name) {
public void process(Node result, Element tag, TemplateProcessor processor) {
result.appendChild(result.getOwnerDocument().createTextNode(content));
}
};
}
@Test
public void simpleTemplate() throws Exception {
// Render a simple template
testExpectingTransform(getGadgetXml(CONTENT_PLAIN), "simple");
testFeatureRemoved();
}
@Test
public void noTemplateFeature() throws Exception {
// Without opensocial-templates feature, shouldn't render
testExpectingNoTransform(getGadgetXml(CONTENT_PLAIN, false), "no feature");
}
@Test
public void requiredDataPresent() throws Exception {
// Required data is present - render
testExpectingTransform(getGadgetXml(CONTENT_REQUIRE), "required data");
testFeatureRemoved();
}
@Test
public void requiredDataMissing() throws Exception {
// Required data is missing - don't render
testExpectingNoTransform(getGadgetXml(CONTENT_REQUIRE_MISSING), "missing data");
testFeatureNotRemoved();
}
@Test
public void tagAttributePresent() throws Exception {
// Don't render templates with a @tag
testExpectingNoTransform(getGadgetXml(CONTENT_WITH_TAG), "with @tag");
testFeatureRemoved();
}
@Test
public void templateUsingMessage() throws Exception {
// Render a simple template
testExpectingTransform(getGadgetXml(CONTENT_WITH_MESSAGE), "simple");
testFeatureRemoved();
}
@Test
public void autoUpdateTemplate() throws Exception {
setupGadget(getGadgetXml(CONTENT_WITH_AUTO_UPDATE));
rewriter.rewrite(gadget, content);
// The template should get transformed, but not removed
assertTrue("Template wasn't transformed",
content.getContent().indexOf("Hello, John") > 0);
assertTrue("Template tag was removed",
content.getContent().contains("text/os-template"));
assertTrue("ID span was not created",
content.getContent().contains("<span id=\"_T_template_auto0\">"));
testFeatureNotRemoved();
}
@Test
public void templateWithLibrary() throws Exception {
setupGadget(getGadgetXmlWithLibrary(CONTENT_WITH_TAG_FROM_LIBRARY));
rewriter.rewrite(gadget, content);
assertTrue("Script not inserted", content.getContent().indexOf(
"<script type=\"text/javascript\">script</script>") > 0);
assertTrue("Style not inserted", content.getContent().indexOf(
"<style type=\"text/css\">style</style>") > 0);
assertTrue("Tag not executed", content.getContent().indexOf(
"external4") > 0);
testFeatureRemoved();
}
@Test
public void tagPrecedenceRules() throws Exception {
// Tag definitions include:
// Default handlers: tag1 default1
// OSML: tag1 osml1 tag2 osml2
// inline tags: tag1 inline1 tag2 inline2 tag3 inline3
// External tags: tag1 external1 tag2 external2 tag3 external3 tag4 external4
config.put("${Cur['gadgets.features'].osml.library}",
"org/apache/shindig/gadgets/rewrite/OSML_test.xml");
setupGadget(getGadgetXmlWithLibrary(CONTENT_TESTING_PRECEDENCE_RULES));
rewriter.rewrite(gadget, content);
assertTrue("Precedence rules violated",
content.getContent().indexOf("default1osml2inline3external4") > 0);
testFeatureRemoved();
}
@Test
public void tagPrecedenceRulesWithoutOSML() throws Exception {
// Tag definitions include:
// Default handlers: tag1 default1
// OSML: tag1 osml1 tag2 osml2
// inline tags: tag1 inline1 tag2 inline2 tag3 inline3
// External tags: tag1 external1 tag2 external2 tag3 external3 tag4 external4
// Explicitly don't support OSML
config.put("${Cur['gadgets.features'].osml.library}", "");
setupGadget(getGadgetXmlWithLibrary(CONTENT_TESTING_PRECEDENCE_RULES));
rewriter.rewrite(gadget, content);
assertTrue("Precedence rules violated",
content.getContent().indexOf("default1inline2inline3external4") > 0);
testFeatureRemoved();
}
@Test
public void testClientOverride() throws Exception {
// Should normally remove feature
testExpectingTransform(getGadgetXml(CONTENT_PLAIN, true, "true"), "keep client");
testFeatureNotRemoved();
// Should normally keep feature
testExpectingNoTransform(getGadgetXml(CONTENT_WITH_TAG, true, "false"), "remove client");
testFeatureRemoved();
}
private void testFeatureRemoved() {
assertFalse("Feature wasn't removed",
gadget.getDirectFeatureDeps().contains("opensocial-templates"));
}
private void testFeatureNotRemoved() {
assertTrue("Feature was removed",
gadget.getDirectFeatureDeps().contains("opensocial-templates"));
}
private void testExpectingTransform(String code, String condition) throws Exception {
setupGadget(code);
rewriter.rewrite(gadget, content);
assertTrue("Template wasn't transformed (" + condition + ")",
content.getContent().indexOf("Hello, John") > 0);
assertTrue("Template tag wasn't removed (" + condition + ")",
!content.getContent().contains("text/os-template"));
}
private void testExpectingNoTransform(String code, String condition) throws Exception {
setupGadget(code);
rewriter.rewrite(gadget, content);
assertTrue("Template was transformed (" + condition + ")",
content.getContent().indexOf("${user.name}") > 0);
assertTrue("Template tag was removed (" + condition + ")",
content.getContent().indexOf("text/os-template") > 0);
}
private void setupGadget(String gadgetXml) throws SpecParserException, JSONException {
gadgetSpec = new GadgetSpec(GADGET_URI, gadgetXml);
gadget = new Gadget();
gadget.setSpec(gadgetSpec);
gadget.setContext(new GadgetContext() {
@Override
public Uri getUrl() {
return GADGET_URI;
}
});
gadget.setCurrentView(gadgetSpec.getView("default"));
content = new MutableContent(new NekoSimplifiedHtmlParser(
new ParseModule.DOMImplementationProvider().get()), gadget.getCurrentView().getContent());
putPipelinedData("user", new JSONObject("{ name: 'John'}"));
}
private void putPipelinedData(String key, JSONObject data) {
content.addPipelinedData(key, data);
}
private static String getGadgetXml(String content) {
return getGadgetXml(content, true);
}
private static String getGadgetXml(String content, boolean requireFeature) {
return getGadgetXml(content, requireFeature, null);
}
private static String getGadgetXml(String content, boolean requireFeature,
String clientParam) {
String feature = requireFeature ?
"<Require feature='opensocial-templates'" +
(clientParam != null ?
("><Param name='client'>" + clientParam + "</Param></Require>")
: "/>")
: "";
return "<Module>" + "<ModulePrefs title='Title'>"
+ feature
+ " <Locale>"
+ " <msg name='name'>John</msg>"
+ " </Locale>"
+ "</ModulePrefs>"
+ "<Content>"
+ " <![CDATA[" + content + "]]>"
+ "</Content></Module>";
}
private static String getGadgetXmlWithLibrary(String content) {
return "<Module>" + "<ModulePrefs title='Title'>"
+ " <Require feature='opensocial-templates'>"
+ " <Param name='" + TemplateRewriter.REQUIRE_LIBRARY_PARAM + "'>"
+ TEMPLATE_LIBRARY_URI
+ " </Param>"
+ " </Require>"
+ "</ModulePrefs>"
+ "<Content>"
+ " <![CDATA[" + content + "]]>"
+ "</Content></Module>";
}
private static class FakeTemplateLibraryFactory extends TemplateLibraryFactory {
public FakeTemplateLibraryFactory() {
super(null, null);
}
@Override
public TemplateLibrary loadTemplateLibrary(GadgetContext context, Uri uri)
throws GadgetException {
assertEquals(TEMPLATE_LIBRARY_URI, uri.toString());
return new XmlTemplateLibrary(uri, XmlUtil.parseSilent(TEMPLATE_LIBRARY),
TEMPLATE_LIBRARY);
}
}
private class FakeContainerConfig extends AbstractContainerConfig {
@Override
public Object getProperty(String container, String name) {
return config.get(name);
}
}
}
|
|
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.cloudwatchevents.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/events-2015-10-07/DescribeApiDestination" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class DescribeApiDestinationResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable {
/**
* <p>
* The ARN of the API destination retrieved.
* </p>
*/
private String apiDestinationArn;
/**
* <p>
* The name of the API destination retrieved.
* </p>
*/
private String name;
/**
* <p>
* The description for the API destination retrieved.
* </p>
*/
private String description;
/**
* <p>
* The state of the API destination retrieved.
* </p>
*/
private String apiDestinationState;
/**
* <p>
* The ARN of the connection specified for the API destination retrieved.
* </p>
*/
private String connectionArn;
/**
* <p>
* The URL to use to connect to the HTTP endpoint.
* </p>
*/
private String invocationEndpoint;
/**
* <p>
* The method to use to connect to the HTTP endpoint.
* </p>
*/
private String httpMethod;
/**
* <p>
* The maximum number of invocations per second to specified for the API destination. Note that if you set the
* invocation rate maximum to a value lower the rate necessary to send all events received on to the destination
* HTTP endpoint, some events may not be delivered within the 24-hour retry window. If you plan to set the rate
* lower than the rate necessary to deliver all events, consider using a dead-letter queue to catch events that are
* not delivered within 24 hours.
* </p>
*/
private Integer invocationRateLimitPerSecond;
/**
* <p>
* A time stamp for the time that the API destination was created.
* </p>
*/
private java.util.Date creationTime;
/**
* <p>
* A time stamp for the time that the API destination was last modified.
* </p>
*/
private java.util.Date lastModifiedTime;
/**
* <p>
* The ARN of the API destination retrieved.
* </p>
*
* @param apiDestinationArn
* The ARN of the API destination retrieved.
*/
public void setApiDestinationArn(String apiDestinationArn) {
this.apiDestinationArn = apiDestinationArn;
}
/**
* <p>
* The ARN of the API destination retrieved.
* </p>
*
* @return The ARN of the API destination retrieved.
*/
public String getApiDestinationArn() {
return this.apiDestinationArn;
}
/**
* <p>
* The ARN of the API destination retrieved.
* </p>
*
* @param apiDestinationArn
* The ARN of the API destination retrieved.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeApiDestinationResult withApiDestinationArn(String apiDestinationArn) {
setApiDestinationArn(apiDestinationArn);
return this;
}
/**
* <p>
* The name of the API destination retrieved.
* </p>
*
* @param name
* The name of the API destination retrieved.
*/
public void setName(String name) {
this.name = name;
}
/**
* <p>
* The name of the API destination retrieved.
* </p>
*
* @return The name of the API destination retrieved.
*/
public String getName() {
return this.name;
}
/**
* <p>
* The name of the API destination retrieved.
* </p>
*
* @param name
* The name of the API destination retrieved.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeApiDestinationResult withName(String name) {
setName(name);
return this;
}
/**
* <p>
* The description for the API destination retrieved.
* </p>
*
* @param description
* The description for the API destination retrieved.
*/
public void setDescription(String description) {
this.description = description;
}
/**
* <p>
* The description for the API destination retrieved.
* </p>
*
* @return The description for the API destination retrieved.
*/
public String getDescription() {
return this.description;
}
/**
* <p>
* The description for the API destination retrieved.
* </p>
*
* @param description
* The description for the API destination retrieved.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeApiDestinationResult withDescription(String description) {
setDescription(description);
return this;
}
/**
* <p>
* The state of the API destination retrieved.
* </p>
*
* @param apiDestinationState
* The state of the API destination retrieved.
* @see ApiDestinationState
*/
public void setApiDestinationState(String apiDestinationState) {
this.apiDestinationState = apiDestinationState;
}
/**
* <p>
* The state of the API destination retrieved.
* </p>
*
* @return The state of the API destination retrieved.
* @see ApiDestinationState
*/
public String getApiDestinationState() {
return this.apiDestinationState;
}
/**
* <p>
* The state of the API destination retrieved.
* </p>
*
* @param apiDestinationState
* The state of the API destination retrieved.
* @return Returns a reference to this object so that method calls can be chained together.
* @see ApiDestinationState
*/
public DescribeApiDestinationResult withApiDestinationState(String apiDestinationState) {
setApiDestinationState(apiDestinationState);
return this;
}
/**
* <p>
* The state of the API destination retrieved.
* </p>
*
* @param apiDestinationState
* The state of the API destination retrieved.
* @return Returns a reference to this object so that method calls can be chained together.
* @see ApiDestinationState
*/
public DescribeApiDestinationResult withApiDestinationState(ApiDestinationState apiDestinationState) {
this.apiDestinationState = apiDestinationState.toString();
return this;
}
/**
* <p>
* The ARN of the connection specified for the API destination retrieved.
* </p>
*
* @param connectionArn
* The ARN of the connection specified for the API destination retrieved.
*/
public void setConnectionArn(String connectionArn) {
this.connectionArn = connectionArn;
}
/**
* <p>
* The ARN of the connection specified for the API destination retrieved.
* </p>
*
* @return The ARN of the connection specified for the API destination retrieved.
*/
public String getConnectionArn() {
return this.connectionArn;
}
/**
* <p>
* The ARN of the connection specified for the API destination retrieved.
* </p>
*
* @param connectionArn
* The ARN of the connection specified for the API destination retrieved.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeApiDestinationResult withConnectionArn(String connectionArn) {
setConnectionArn(connectionArn);
return this;
}
/**
* <p>
* The URL to use to connect to the HTTP endpoint.
* </p>
*
* @param invocationEndpoint
* The URL to use to connect to the HTTP endpoint.
*/
public void setInvocationEndpoint(String invocationEndpoint) {
this.invocationEndpoint = invocationEndpoint;
}
/**
* <p>
* The URL to use to connect to the HTTP endpoint.
* </p>
*
* @return The URL to use to connect to the HTTP endpoint.
*/
public String getInvocationEndpoint() {
return this.invocationEndpoint;
}
/**
* <p>
* The URL to use to connect to the HTTP endpoint.
* </p>
*
* @param invocationEndpoint
* The URL to use to connect to the HTTP endpoint.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeApiDestinationResult withInvocationEndpoint(String invocationEndpoint) {
setInvocationEndpoint(invocationEndpoint);
return this;
}
/**
* <p>
* The method to use to connect to the HTTP endpoint.
* </p>
*
* @param httpMethod
* The method to use to connect to the HTTP endpoint.
* @see ApiDestinationHttpMethod
*/
public void setHttpMethod(String httpMethod) {
this.httpMethod = httpMethod;
}
/**
* <p>
* The method to use to connect to the HTTP endpoint.
* </p>
*
* @return The method to use to connect to the HTTP endpoint.
* @see ApiDestinationHttpMethod
*/
public String getHttpMethod() {
return this.httpMethod;
}
/**
* <p>
* The method to use to connect to the HTTP endpoint.
* </p>
*
* @param httpMethod
* The method to use to connect to the HTTP endpoint.
* @return Returns a reference to this object so that method calls can be chained together.
* @see ApiDestinationHttpMethod
*/
public DescribeApiDestinationResult withHttpMethod(String httpMethod) {
setHttpMethod(httpMethod);
return this;
}
/**
* <p>
* The method to use to connect to the HTTP endpoint.
* </p>
*
* @param httpMethod
* The method to use to connect to the HTTP endpoint.
* @return Returns a reference to this object so that method calls can be chained together.
* @see ApiDestinationHttpMethod
*/
public DescribeApiDestinationResult withHttpMethod(ApiDestinationHttpMethod httpMethod) {
this.httpMethod = httpMethod.toString();
return this;
}
/**
* <p>
* The maximum number of invocations per second to specified for the API destination. Note that if you set the
* invocation rate maximum to a value lower the rate necessary to send all events received on to the destination
* HTTP endpoint, some events may not be delivered within the 24-hour retry window. If you plan to set the rate
* lower than the rate necessary to deliver all events, consider using a dead-letter queue to catch events that are
* not delivered within 24 hours.
* </p>
*
* @param invocationRateLimitPerSecond
* The maximum number of invocations per second to specified for the API destination. Note that if you set
* the invocation rate maximum to a value lower the rate necessary to send all events received on to the
* destination HTTP endpoint, some events may not be delivered within the 24-hour retry window. If you plan
* to set the rate lower than the rate necessary to deliver all events, consider using a dead-letter queue to
* catch events that are not delivered within 24 hours.
*/
public void setInvocationRateLimitPerSecond(Integer invocationRateLimitPerSecond) {
this.invocationRateLimitPerSecond = invocationRateLimitPerSecond;
}
/**
* <p>
* The maximum number of invocations per second to specified for the API destination. Note that if you set the
* invocation rate maximum to a value lower the rate necessary to send all events received on to the destination
* HTTP endpoint, some events may not be delivered within the 24-hour retry window. If you plan to set the rate
* lower than the rate necessary to deliver all events, consider using a dead-letter queue to catch events that are
* not delivered within 24 hours.
* </p>
*
* @return The maximum number of invocations per second to specified for the API destination. Note that if you set
* the invocation rate maximum to a value lower the rate necessary to send all events received on to the
* destination HTTP endpoint, some events may not be delivered within the 24-hour retry window. If you plan
* to set the rate lower than the rate necessary to deliver all events, consider using a dead-letter queue
* to catch events that are not delivered within 24 hours.
*/
public Integer getInvocationRateLimitPerSecond() {
return this.invocationRateLimitPerSecond;
}
/**
* <p>
* The maximum number of invocations per second to specified for the API destination. Note that if you set the
* invocation rate maximum to a value lower the rate necessary to send all events received on to the destination
* HTTP endpoint, some events may not be delivered within the 24-hour retry window. If you plan to set the rate
* lower than the rate necessary to deliver all events, consider using a dead-letter queue to catch events that are
* not delivered within 24 hours.
* </p>
*
* @param invocationRateLimitPerSecond
* The maximum number of invocations per second to specified for the API destination. Note that if you set
* the invocation rate maximum to a value lower the rate necessary to send all events received on to the
* destination HTTP endpoint, some events may not be delivered within the 24-hour retry window. If you plan
* to set the rate lower than the rate necessary to deliver all events, consider using a dead-letter queue to
* catch events that are not delivered within 24 hours.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeApiDestinationResult withInvocationRateLimitPerSecond(Integer invocationRateLimitPerSecond) {
setInvocationRateLimitPerSecond(invocationRateLimitPerSecond);
return this;
}
/**
* <p>
* A time stamp for the time that the API destination was created.
* </p>
*
* @param creationTime
* A time stamp for the time that the API destination was created.
*/
public void setCreationTime(java.util.Date creationTime) {
this.creationTime = creationTime;
}
/**
* <p>
* A time stamp for the time that the API destination was created.
* </p>
*
* @return A time stamp for the time that the API destination was created.
*/
public java.util.Date getCreationTime() {
return this.creationTime;
}
/**
* <p>
* A time stamp for the time that the API destination was created.
* </p>
*
* @param creationTime
* A time stamp for the time that the API destination was created.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeApiDestinationResult withCreationTime(java.util.Date creationTime) {
setCreationTime(creationTime);
return this;
}
/**
* <p>
* A time stamp for the time that the API destination was last modified.
* </p>
*
* @param lastModifiedTime
* A time stamp for the time that the API destination was last modified.
*/
public void setLastModifiedTime(java.util.Date lastModifiedTime) {
this.lastModifiedTime = lastModifiedTime;
}
/**
* <p>
* A time stamp for the time that the API destination was last modified.
* </p>
*
* @return A time stamp for the time that the API destination was last modified.
*/
public java.util.Date getLastModifiedTime() {
return this.lastModifiedTime;
}
/**
* <p>
* A time stamp for the time that the API destination was last modified.
* </p>
*
* @param lastModifiedTime
* A time stamp for the time that the API destination was last modified.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeApiDestinationResult withLastModifiedTime(java.util.Date lastModifiedTime) {
setLastModifiedTime(lastModifiedTime);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getApiDestinationArn() != null)
sb.append("ApiDestinationArn: ").append(getApiDestinationArn()).append(",");
if (getName() != null)
sb.append("Name: ").append(getName()).append(",");
if (getDescription() != null)
sb.append("Description: ").append(getDescription()).append(",");
if (getApiDestinationState() != null)
sb.append("ApiDestinationState: ").append(getApiDestinationState()).append(",");
if (getConnectionArn() != null)
sb.append("ConnectionArn: ").append(getConnectionArn()).append(",");
if (getInvocationEndpoint() != null)
sb.append("InvocationEndpoint: ").append(getInvocationEndpoint()).append(",");
if (getHttpMethod() != null)
sb.append("HttpMethod: ").append(getHttpMethod()).append(",");
if (getInvocationRateLimitPerSecond() != null)
sb.append("InvocationRateLimitPerSecond: ").append(getInvocationRateLimitPerSecond()).append(",");
if (getCreationTime() != null)
sb.append("CreationTime: ").append(getCreationTime()).append(",");
if (getLastModifiedTime() != null)
sb.append("LastModifiedTime: ").append(getLastModifiedTime());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof DescribeApiDestinationResult == false)
return false;
DescribeApiDestinationResult other = (DescribeApiDestinationResult) obj;
if (other.getApiDestinationArn() == null ^ this.getApiDestinationArn() == null)
return false;
if (other.getApiDestinationArn() != null && other.getApiDestinationArn().equals(this.getApiDestinationArn()) == false)
return false;
if (other.getName() == null ^ this.getName() == null)
return false;
if (other.getName() != null && other.getName().equals(this.getName()) == false)
return false;
if (other.getDescription() == null ^ this.getDescription() == null)
return false;
if (other.getDescription() != null && other.getDescription().equals(this.getDescription()) == false)
return false;
if (other.getApiDestinationState() == null ^ this.getApiDestinationState() == null)
return false;
if (other.getApiDestinationState() != null && other.getApiDestinationState().equals(this.getApiDestinationState()) == false)
return false;
if (other.getConnectionArn() == null ^ this.getConnectionArn() == null)
return false;
if (other.getConnectionArn() != null && other.getConnectionArn().equals(this.getConnectionArn()) == false)
return false;
if (other.getInvocationEndpoint() == null ^ this.getInvocationEndpoint() == null)
return false;
if (other.getInvocationEndpoint() != null && other.getInvocationEndpoint().equals(this.getInvocationEndpoint()) == false)
return false;
if (other.getHttpMethod() == null ^ this.getHttpMethod() == null)
return false;
if (other.getHttpMethod() != null && other.getHttpMethod().equals(this.getHttpMethod()) == false)
return false;
if (other.getInvocationRateLimitPerSecond() == null ^ this.getInvocationRateLimitPerSecond() == null)
return false;
if (other.getInvocationRateLimitPerSecond() != null && other.getInvocationRateLimitPerSecond().equals(this.getInvocationRateLimitPerSecond()) == false)
return false;
if (other.getCreationTime() == null ^ this.getCreationTime() == null)
return false;
if (other.getCreationTime() != null && other.getCreationTime().equals(this.getCreationTime()) == false)
return false;
if (other.getLastModifiedTime() == null ^ this.getLastModifiedTime() == null)
return false;
if (other.getLastModifiedTime() != null && other.getLastModifiedTime().equals(this.getLastModifiedTime()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getApiDestinationArn() == null) ? 0 : getApiDestinationArn().hashCode());
hashCode = prime * hashCode + ((getName() == null) ? 0 : getName().hashCode());
hashCode = prime * hashCode + ((getDescription() == null) ? 0 : getDescription().hashCode());
hashCode = prime * hashCode + ((getApiDestinationState() == null) ? 0 : getApiDestinationState().hashCode());
hashCode = prime * hashCode + ((getConnectionArn() == null) ? 0 : getConnectionArn().hashCode());
hashCode = prime * hashCode + ((getInvocationEndpoint() == null) ? 0 : getInvocationEndpoint().hashCode());
hashCode = prime * hashCode + ((getHttpMethod() == null) ? 0 : getHttpMethod().hashCode());
hashCode = prime * hashCode + ((getInvocationRateLimitPerSecond() == null) ? 0 : getInvocationRateLimitPerSecond().hashCode());
hashCode = prime * hashCode + ((getCreationTime() == null) ? 0 : getCreationTime().hashCode());
hashCode = prime * hashCode + ((getLastModifiedTime() == null) ? 0 : getLastModifiedTime().hashCode());
return hashCode;
}
@Override
public DescribeApiDestinationResult clone() {
try {
return (DescribeApiDestinationResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
|
|
package com.dlsu.getbetter.getbetter.activities;
import android.app.ProgressDialog;
import android.content.DialogInterface;
import android.content.Intent;
import android.os.AsyncTask;
import android.support.v7.app.AlertDialog;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import android.widget.Button;
import android.widget.ListView;
import com.dlsu.getbetter.getbetter.DirectoryConstants;
import com.dlsu.getbetter.getbetter.R;
import com.dlsu.getbetter.getbetter.adapters.PatientUploadAdapter;
import com.dlsu.getbetter.getbetter.database.DataAdapter;
import com.dlsu.getbetter.getbetter.objects.Patient;
import com.dlsu.getbetter.getbetter.sessionmanagers.SystemSessionManager;
import com.loopj.android.http.AsyncHttpClient;
import com.loopj.android.http.RequestParams;
import com.loopj.android.http.TextHttpResponseHandler;
import java.io.File;
import java.io.FileNotFoundException;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import cz.msebera.android.httpclient.Header;
public class UploadPatientToServerActivity extends AppCompatActivity implements View.OnClickListener {
private static final String TAG = "UploadPatientActivity";
private static final String ID_KEY = "user_id";
private static final String FIRST_NAME_KEY = "first_name";
private static final String MIDDLE_NAME_KEY = "middle_name";
private static final String LAST_NAME_KEY = "last_name";
private static final String BIRTHDATE_KEY = "birthdate";
private static final String GENDER_ID_KEY = "gender_id";
private static final String CIVIL_STATUS_KEY = "civil_status_id";
private static final String HEALTH_CENTER_KEY = "default_health_center";
private static final String BLOOD_TYPE_KEY = "blood_type";
private static final String PROFILE_URL_KEY = "profile_url";
private static final int TIMEOUT_VALUE = 60 * 1000;
private ArrayList<Patient> patientsUpload;
private DataAdapter getBetterDb;
private int healthCenterId;
private ProgressDialog pDialog = null;
private long newUserId;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_upload_to_server);
SystemSessionManager systemSessionManager = new SystemSessionManager(this);
if(systemSessionManager.checkLogin())
finish();
HashMap<String, String> user = systemSessionManager.getUserDetails();
HashMap<String, String> hc = systemSessionManager.getHealthCenter();
healthCenterId = Integer.parseInt(hc.get(SystemSessionManager.HEALTH_CENTER_ID));
patientsUpload = new ArrayList<>();
ListView patientList = (ListView)findViewById(R.id.upload_page_patient_list);
Button uploadBtn = (Button)findViewById(R.id.upload_patient_upload_btn);
Button backBtn = (Button)findViewById(R.id.upload_patient_back_btn);
initializeDatabase();
new GetPatientListTask().execute();
PatientUploadAdapter patientUploadAdapter = new PatientUploadAdapter(this, R.layout.patient_list_item_checkbox, patientsUpload);
patientList.setAdapter(patientUploadAdapter);
uploadBtn.setOnClickListener(this);
backBtn.setOnClickListener(this);
}
private void initializeDatabase () {
getBetterDb = new DataAdapter(this);
try {
getBetterDb.createDatabase();
} catch (SQLException e) {
e.printStackTrace();
}
}
private void getPatientListUpload (int healthCenterId) {
try {
getBetterDb.openDatabase();
} catch (SQLException e) {
e.printStackTrace();
}
patientsUpload.addAll(getBetterDb.getPatientsUpload(healthCenterId));
Log.e("patient list size", patientsUpload.size() + "");
getBetterDb.closeDatabase();
}
private int getGenderId(String genderName) {
try {
getBetterDb.openDatabase();
} catch (SQLException e) {
e.printStackTrace();
}
int result = getBetterDb.getGenderId(genderName);
getBetterDb.closeDatabase();
return result;
}
private int getCivilStatusId(String civilStatusName) {
try {
getBetterDb.openDatabase();
} catch (SQLException e) {
e.printStackTrace();
}
int result = getBetterDb.getCivilStatusId(civilStatusName);
getBetterDb.closeDatabase();
return result;
}
private void removePatientUpload (long userId) {
try {
getBetterDb.openDatabase();
} catch (SQLException e) {
e.printStackTrace();
}
getBetterDb.removePatientUpload(userId);
getBetterDb.closeDatabase();
}
private void updateUserId(long newUserId, long oldUserId) {
try {
getBetterDb.openDatabase();
} catch (SQLException e) {
e.printStackTrace();
}
getBetterDb.updateUserId(newUserId, oldUserId);
getBetterDb.closeDatabase();
}
private void updateUser (long updatedUserId) {
try {
getBetterDb.openDatabase();
} catch (SQLException e) {
e.printStackTrace();
}
getBetterDb.updateUserUploaded(updatedUserId);
getBetterDb.closeDatabase();
}
@Override
public void onClick(View v) {
int id = v.getId();
if (id == R.id.upload_patient_upload_btn) {
for(int i = 0; i < patientsUpload.size(); i++) {
Patient selectedPatient = patientsUpload.get(i);
if(selectedPatient.isChecked()) {
uploadPatient(selectedPatient);
}
}
} else if (id == R.id.upload_patient_back_btn) {
Intent intent = new Intent(this, ExistingPatientActivity.class);
startActivity(intent);
finish();
}
}
private class GetPatientListTask extends AsyncTask<Void, Void, Void> {
@Override
protected void onPreExecute() {
showPopulateProgressDialog();
}
@Override
protected Void doInBackground(Void... params) {
getPatientListUpload(healthCenterId);
return null;
}
@Override
protected void onPostExecute(Void aVoid) {
dismissProgressDialog();
}
}
private void uploadPatient(final Patient patientUpload) {
AsyncHttpClient asyncHttpClient = new AsyncHttpClient();
asyncHttpClient.setTimeout(TIMEOUT_VALUE);
RequestParams params = new RequestParams();
final String contentType = RequestParams.APPLICATION_OCTET_STREAM;
params.setForceMultipartEntityContentType(true);
String imageFileName = patientUpload.getFirstName().toLowerCase() + "_" +
patientUpload.getLastName().toLowerCase() + ".jpg";
String patientId = String.valueOf(patientUpload.getId());
// params.put(ID_KEY, patientId);
params.put(FIRST_NAME_KEY, patientUpload.getFirstName());
params.put(MIDDLE_NAME_KEY, patientUpload.getMiddleName());
params.put(LAST_NAME_KEY, patientUpload.getLastName());
params.put(BIRTHDATE_KEY, patientUpload.getBirthdate());
params.put(GENDER_ID_KEY, getGenderId(patientUpload.getGender()));
params.put(CIVIL_STATUS_KEY, getCivilStatusId(patientUpload.getCivilStatus()));
params.put(BLOOD_TYPE_KEY, patientUpload.getBloodType());
params.put(HEALTH_CENTER_KEY, healthCenterId);
File profileImage = new File(patientUpload.getProfileImageBytes());
try {
params.put(PROFILE_URL_KEY, profileImage, contentType, imageFileName);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
params.setHttpEntityIsRepeatable(true);
params.setUseJsonStreamer(false);
asyncHttpClient.post(this, DirectoryConstants.UPLOAD_PATIENT_SERVER_SCRIPT_URL , params, new TextHttpResponseHandler() {
@Override
public void onStart() {
super.onStart();
showUploadProgressDialog();
}
@Override
public void onSuccess(int statusCode, Header[] headers, String responseBody) {
featureAlertMessage("Upload Success");
Log.d(TAG, responseBody);
newUserId = Long.parseLong(responseBody);
updateUser(patientUpload.getId());
updateUserId(newUserId, patientUpload.getId());
// removePatientUpload(patientUpload.getId());
}
@Override
public void onFailure(int statusCode, Header[] headers, String responseBody, Throwable error) {
featureAlertMessage("Upload Failed");
Log.d(TAG, "onFailure: " + responseBody);
Log.d(TAG, "onFailure: " + statusCode);
}
@Override
public void onFinish() {
super.onFinish();
dismissProgressDialog();
}
});
}
private void featureAlertMessage(String result) {
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setTitle("STATUS");
builder.setMessage(result);
builder.setNeutralButton("OK", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
}
});
builder.show();
}
private void showPopulateProgressDialog() {
if(pDialog == null) {
pDialog = new ProgressDialog(UploadPatientToServerActivity.this);
pDialog.setTitle("Populating Patient List");
pDialog.setMessage("Please wait a moment...");
pDialog.setIndeterminate(true);
pDialog.setProgressStyle(ProgressDialog.STYLE_SPINNER);
}
pDialog.show();
}
private void showUploadProgressDialog() {
if(pDialog == null) {
pDialog = new ProgressDialog(UploadPatientToServerActivity.this);
pDialog.setTitle("GetBetter Server");
pDialog.setMessage("Uploading Patient Records...");
// pDialog.setProgress(0);
// pDialog.setMax(100);
pDialog.setIndeterminate(true);
pDialog.setProgressStyle(ProgressDialog.STYLE_SPINNER);
}
pDialog.show();
}
private void dismissProgressDialog() {
if(pDialog != null && pDialog.isShowing()) {
pDialog.dismiss();
pDialog = null;
}
}
@Override
protected void onDestroy() {
super.onDestroy();
dismissProgressDialog();
}
}
|
|
// THIS SOURCE CODE IS SUPPLIED "AS IS" WITHOUT WARRANTY OF ANY KIND, AND ITS AUTHOR AND THE JOURNAL OF MACHINE LEARNING RESEARCH (JMLR) AND JMLR'S PUBLISHERS AND DISTRIBUTORS, DISCLAIM ANY AND ALL WARRANTIES, INCLUDING BUT NOT LIMITED TO ANY IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE, AND ANY WARRANTIES OR NON INFRINGEMENT. THE USER ASSUMES ALL LIABILITY AND RESPONSIBILITY FOR USE OF THIS SOURCE CODE, AND NEITHER THE AUTHOR NOR JMLR, NOR JMLR'S PUBLISHERS AND DISTRIBUTORS, WILL BE LIABLE FOR DAMAGES OF ANY KIND RESULTING FROM ITS USE. Without lim- iting the generality of the foregoing, neither the author, nor JMLR, nor JMLR's publishers and distributors, warrant that the Source Code will be error-free, will operate without interruption, or will meet the needs of the user.
//
// --------------------------------------------------------------------------
//
// Copyright 2011 Stephen Piccolo
//
// This file is part of ML-Flex.
//
// ML-Flex is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// any later version.
//
// ML-Flex is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with ML-Flex. If not, see <http://www.gnu.org/licenses/>.
package mlflex.core;
import mlflex.helper.FileUtilities;
import mlflex.helper.ListUtilities;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
/** This class stores information about predictions that have been made. It contains methods to make it easier to deal with multiple predictions.
* @author Stephen Piccolo
*/
public class Predictions
{
private HashMap<String, Prediction> _predictionMap = new HashMap<String, Prediction>();
/** Default constructor
*/
public Predictions()
{
}
/** Alternate constructor
*
* @param predictions List of predictions
*/
public Predictions(ArrayList<Prediction> predictions)
{
for (Prediction prediction : predictions)
Add(prediction);
}
/** Adds a prediction to this set of predictions
*
* @param prediction A single prediction to add
* @return This instance
*/
private Predictions Add(Prediction prediction)
{
_predictionMap.put(prediction.InstanceID, prediction);
return this;
}
/** Retrieves the prediction for a given instance ID.
*
* @param instanceID Data instance ID
* @return A prediction
*/
public Prediction Get(String instanceID)
{
return _predictionMap.get(instanceID);
}
private ArrayList<Prediction> _all = null;
/** Gets a list of all predictions that are in this object, sorted by instance ID.
*
* @return List of all predictions that are in this object
*/
public ArrayList<Prediction> GetAll()
{
if (_all == null)
{
_all = new ArrayList<Prediction>();
for (String instanceID : GetInstanceIDs())
_all.add(Get(instanceID));
}
return _all;
}
private ArrayList<String> _instanceIDs = null;
/** Retrieves a list of instance IDs for which predictions have been made
*
* @return List of instance IDs for which predictions have been made
*/
public ArrayList<String> GetInstanceIDs()
{
if (_instanceIDs == null)
_instanceIDs = ListUtilities.SortStringList(new ArrayList<String>(_predictionMap.keySet()));
return _instanceIDs;
}
/** Convenience method to indicate how many predictions matched the specified class value
*
* @param predictedClass Predicted class value
* @return Number of predictions matching the specified class value
*/
public int GetNumMatchingPredictedClasses(String predictedClass)
{
int count = 0;
for (String instanceID : GetInstanceIDs())
if (Get(instanceID).Prediction.equals(predictedClass))
count++;
return count;
}
/** Gets a list of all the class predictions for the instances represented here.
*
* @return List of all class predictions
*/
public ArrayList<String> GetPredictedClasses()
{
ArrayList<String> predictedClasses = new ArrayList<String>();
for (Prediction prediction : _predictionMap.values())
predictedClasses.add(prediction.Prediction);
return predictedClasses;
}
/** Indicates the number of unique predicted classes
*
* @return Number of unique predicted classes
*/
public ArrayList<String> GetUniquePredictedClasses()
{
return ListUtilities.SortStringList(new ArrayList<String>(new HashSet<String>(GetPredictedClasses())));
}
/** Indicates whether a prediction has been made for a given instance ID
*
* @param instanceID Data instance ID
* @return Whether a prediction has been made
* @throws Exception
*/
public boolean HasPrediction(String instanceID) throws Exception
{
return _predictionMap.containsKey(instanceID);
}
/** Reads predictions from a text file when those predictions have already been made and stored.
*
* @param filePath Absolute path to the file containing predictions
* @return Predictions that were in the file
* @throws Exception
*/
public static Predictions ReadFromFile(String filePath) throws Exception
{
if (!FileUtilities.FileExists(filePath))
return new Predictions();
// Retrieve the predictions text from an existing file
ArrayList<ArrayList<String>> fileLines = FileUtilities.ParseDelimitedFile(filePath);
// Make sure the file is not empty
if (fileLines.size() == 0)
return new Predictions();
// Remove the header information
fileLines.remove(0);
Predictions predictions = new Predictions();
// Loop through the text and parse out the prediction information
for (ArrayList<String> row : fileLines)
{
String id = row.get(0);
String actualClass = row.get(1);
String predictedClass = row.get(2);
ArrayList<Double> classProbabilities = new ArrayList<Double>();
for (int i=3; i<row.size(); i++)
classProbabilities.add(Double.parseDouble(row.get(i)));
predictions.Add(new Prediction(id, actualClass, predictedClass, classProbabilities));
}
return predictions;
}
/** Saves predictions that have already been made, to a file.
*
* @param filePath Absolute file path where the predictions will be stored
* @throws Exception
*/
public void SaveToFile(String filePath) throws Exception
{
ArrayList<String> header = new ArrayList<String>();
// Create the header
header.addAll(ListUtilities.CreateStringList("Instance_ID", "Dependent_Variable_Value", "Prediction"));
for (String x : Singletons.InstanceVault.TransformedDependentVariableOptions)
header.add(x + "_Probability");
StringBuffer buffer = new StringBuffer();
buffer.append(ListUtilities.Join(header, "\t") + "\n");
// Loop through the predictions and construct the output
for (String instanceID : GetInstanceIDs())
{
Prediction prediction = Get(instanceID);
ArrayList<String> outputVals = new ArrayList<String>();
outputVals.add(prediction.InstanceID);
outputVals.add(prediction.DependentVariableValue);
outputVals.add(prediction.Prediction);
for (double classProbability : prediction.ClassProbabilities)
outputVals.add(String.valueOf(classProbability));
buffer.append(ListUtilities.Join(outputVals, "\t") + "\n");
}
// Save the output to a file
FileUtilities.WriteLineToFile(filePath, buffer.toString());
}
/** Indicates the number of predictions that have been made
*
* @return The number of predictions that have been made
*/
public int Size()
{
return _predictionMap.size();
}
@Override
public String toString()
{
StringBuilder builder = new StringBuilder();
for (String instanceID : GetInstanceIDs())
builder.append("\n" + Get(instanceID).toString());
return builder.toString();
}
@Override
public boolean equals(Object obj)
{
if (obj == null)
return false;
if (!(obj instanceof Predictions))
return false;
return ((Predictions)obj)._predictionMap.equals(_predictionMap);
}
@Override
public int hashCode()
{
return _predictionMap.hashCode();
}
}
|
|
/******************************************************************
* File: Login.java
* Created by: Dave Reynolds
* Created on: 1 Apr 2013
*
* (c) Copyright 2013, Epimorphics Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*****************************************************************/
package com.epimorphics.registry.webapi;
import com.epimorphics.registry.core.Registry;
import com.epimorphics.registry.security.*;
import com.epimorphics.server.webapi.WebApiException;
import org.apache.shiro.SecurityUtils;
import org.apache.shiro.subject.Subject;
import org.apache.shiro.util.ByteSource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.servlet.ServletContext;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import javax.ws.rs.*;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriInfo;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.List;
@Path("/system/security")
public class Login {
static final Logger log = LoggerFactory.getLogger( Login.class );
public static final String NOCACHE_COOKIE = "nocache";
// Velocity binding names
public static final String VN_SUBJECT = "subject";
public static final String VN_REGISTRATION_STATUS = "registrationStatus";
public static final String RS_NEW = "new";
public static final String RS_ALREADY_REGISTERED = "already";
public static final String RS_LOGIN = "login";
protected @Context UriInfo uriInfo;
protected @Context ServletContext servletContext;
protected @Context HttpServletRequest request;
protected @Context HttpServletResponse response;
@Path("/login")
@POST
public Response login(@FormParam("provider") String provider, @FormParam("return") String returnURL) {
new ProcessOpenID(uriInfo, servletContext).processOpenID(request, response, provider, returnURL, false);
return Response.ok().build();
}
@Path("/register")
@POST
public Response register(@FormParam("provider") String provider, @FormParam("return") String returnURL) {
new ProcessOpenID(uriInfo, servletContext).processOpenID(request, response, provider, returnURL, true);
return Response.ok().build();
}
@Path("/loginoa")
@POST
public Response loginOauth2(@FormParam("provider") String provider, @FormParam("return") String returnURL) {
new ProcessOauth2(uriInfo, servletContext).processOpenID(request, response, provider, returnURL, false);
return Response.ok().build();
}
@Path("/registeroa")
@POST
public Response registerOauth2(@FormParam("provider") String provider, @FormParam("return") String returnURL) {
new ProcessOauth2(uriInfo, servletContext).processOpenID(request, response, provider, returnURL, true);
return Response.ok().build();
}
@Path("/logout")
@POST
public void logout(@Context HttpServletResponse response) throws IOException {
// TODO set session attribute as part of Shiro realm
HttpSession session = request.getSession(false);
if (session != null) {
session.removeAttribute(VN_SUBJECT);
SecurityUtils.getSubject().logout();
}
removeNocache(response);
String redirect = request.getServletContext().getContextPath();
if (redirect == null || redirect.isEmpty()) {
redirect = "/";
}
response.sendRedirect(redirect);
}
@Path("/pwlogin")
@POST
public Response pwlogin(@FormParam("userid") String userid, @FormParam("password") String password, @FormParam("return") String returnURL) {
try {
RegToken token = new RegToken(userid, password);
Subject subject = SecurityUtils.getSubject();
subject.login(token);
log.info("Password login for userid " + userid);
setNocache(response);
if (returnURL == null || returnURL.isEmpty()) {
returnURL = "/ui/admin";
}
return redirectTo( returnURL );
} catch (Exception e) {
log.warn(String.format("Password login failure for userid %s [%s]: %s", userid, e.getClass().toString(), e.getMessage()));
return error("Login failed");
}
}
@Path("/pwregister")
@POST
public Response pwregister(
@FormParam("userid") String userid,
@FormParam("password") String password,
@FormParam("name") String name,
@FormParam("return") String returnURL) {
if (userid == null || userid.isEmpty() || password == null || password.isEmpty() || name == null || name.isEmpty()) {
return error( "You must supply all of a username, display name and password to register" );
}
UserStore userstore = Registry.get().getUserStore();
UserInfo userinfo = new UserInfo(userid, name);
if (userstore.register( userinfo )) {
try {
RegToken token = new RegToken(userid, true);
Subject subject = SecurityUtils.getSubject();
subject.login(token);
userstore.setCredentials(userid, ByteSource.Util.bytes(password), Integer.MAX_VALUE);
if (returnURL == null || returnURL.isEmpty()) {
returnURL = "/ui/admin";
}
return redirectTo( returnURL );
} catch (Exception e) {
return error("Failed to register the password: " + e);
}
} else {
return error( "That username is already registered" );
}
}
@Path("/apilogin")
@POST
public Response apilogin(@FormParam("userid") String userid, @FormParam("password") String password) {
try {
RegToken token = new RegToken(userid, password);
Subject subject = SecurityUtils.getSubject();
subject.login(token);
log.info("API Login for userid " + userid);
return Response.ok().build();
} catch (Exception e) {
log.warn(String.format("API Login failure for userid %s [%s]: %s", userid, e.getClass().toString(), e.getMessage()));
}
return Response.status(Response.Status.UNAUTHORIZED).build();
}
@Path("/response")
@GET
public Response openIDResponse() {
return new ProcessOpenID(uriInfo, servletContext).verifyResponse(request, response);
}
@Path("/responseoa")
@GET
public Response openIDConnectResponse() {
return new ProcessOauth2(uriInfo, servletContext).verifyResponse(request, response);
}
// Return the name of the loggedin user on this session, for test purposes
@Path("/username")
@GET
@Produces(MediaType.TEXT_PLAIN)
public String getUsername() {
Subject subject = SecurityUtils.getSubject();
if (subject.isAuthenticated()) {
return ((UserInfo)subject.getPrincipal()).getName();
} else {
throw new WebApiException(Response.Status.UNAUTHORIZED, "No logged in user in this session");
}
}
@Path("/listusers")
@GET
@Produces(MediaType.TEXT_HTML)
public Response listusers(@QueryParam("query") String query, @QueryParam("grant") String action, @QueryParam("uri") String uri) {
if (SecurityUtils.getSubject().isPermitted("Grant:"+uri)) {
List<UserInfo> users = Registry.get().getUserStore().listUsers(query);
return RequestProcessor.render("user-list.vm", uriInfo, servletContext, request, "grant", action, "uri", uri, "users", users);
} else {
return error("You do not have sufficient privileges to grant further access");
}
}
@Path("/grant")
@POST
public Response grant(@FormParam("user") String id, @FormParam("grant") String action, @FormParam("path") String inpath) {
String path = inpath;
if (path == null || path.isEmpty()) {
// Occurrs when setting global admin permissions
path = "/ui/administrators";
}
UserStore userstore = Registry.get().getUserStore();
try {
if (action.equals("administrator")) {
userstore.setRole(id, RegAuthorizationInfo.ADMINSTRATOR_ROLE);
} else {
userstore.addPermision(id, new RegPermission(action, path));
}
return redirectTo(path);
} catch (Exception e) {
return error("Permission grant failed: " + e);
}
}
@Path("/ungrant")
@POST
public Response ungrant(@FormParam("user") String id, @FormParam("path") String path) {
UserStore userstore = Registry.get().getUserStore();
try {
userstore.removePermission(id, path);
return redirectTo(path);
} catch (Exception e) {
return error("Permission grant failed: " + e);
}
}
@Path("/createpassword")
@POST
public Response createpassword(@FormParam("minstolive") String minstolive) {
int mins = 0;
try {
mins = Integer.parseInt(minstolive);
} catch (Exception e) {
return error("Minutes to live must be an integer");
}
Subject subject = SecurityUtils.getSubject();
if (subject.isAuthenticated()) {
UserStore userstore = Registry.get().getUserStore();
try {
String id = ((UserInfo)subject.getPrincipal()).getOpenid();
String pwd = userstore.createCredentials(id, mins);
log.info("Created temporary password for user " + id);
return RequestProcessor.render("api-key-result.vm", uriInfo, servletContext, request, "password", pwd, "id", id);
} catch (Exception e) {
return error("Password creation failed: " + e);
}
} else {
return error("You must be logged in to do this");
}
}
@Path("/setpassword")
@POST
public Response setPassword(@FormParam("currentPassword") String currentPassword, @FormParam("newPassword") String newPassword, @FormParam("return") String returnURL) {
Subject subject = SecurityUtils.getSubject();
if (!subject.isAuthenticated()) {
return error("You must be logged in to reset your password");
}
if (newPassword == null || newPassword.isEmpty()) {
return error("Must give a new password");
}
String userid = ((UserInfo)subject.getPrincipal()).getOpenid();
try {
// Check current password in case left screen optn
RegToken token = new RegToken(userid, currentPassword);
subject.login(token);
// Now set the password
UserStore userstore = Registry.get().getUserStore();
userstore.setCredentials(userid, ByteSource.Util.bytes(newPassword), Integer.MAX_VALUE);
log.info("Changed password for user " + userid);
setNocache(response);
if (returnURL == null || returnURL.isEmpty()) {
returnURL = "/ui/admin";
}
return redirectTo( returnURL );
} catch (Exception e) {
log.warn(String.format("Failed to change password for userid %s [%s]: %s", userid, e.getClass().toString(), e.getMessage()));
return error("Failed to confirm login before changing password");
}
}
@Path("/resetpassword")
@POST
public Response resetPassword(@FormParam("userid") String userid, @FormParam("newPassword") String newPassword, @FormParam("return") String returnURL) {
if (userid == null || userid.isEmpty() || newPassword == null || newPassword.isEmpty()) {
return error("Must give a user and a new password");
}
Subject subject = SecurityUtils.getSubject();
if (subject.isAuthenticated() && subject.hasRole(RegAuthorizationInfo.ADMINSTRATOR_ROLE)) {
try {
UserStore userstore = Registry.get().getUserStore();
userstore.setCredentials(userid, ByteSource.Util.bytes(newPassword), Integer.MAX_VALUE);
log.info("Administrator " + subject.getPrincipal() + " changed password for user " + userid);
setNocache(response);
if (returnURL == null || returnURL.isEmpty()) {
returnURL = "/ui/admin";
}
return redirectTo( returnURL );
} catch (Exception e) {
log.warn(String.format("Administrator failed to change password for userid %s [%s]: %s", userid, e.getClass().toString(), e.getMessage()));
return error("Failed to change password: " + e);
}
} else {
return error("You must be logged in as an adminstrator to do this");
}
}
@Path("/listadmins")
@GET
public Response listadmins() {
Subject subject = SecurityUtils.getSubject();
if (subject.isAuthenticated() && subject.hasRole(RegAuthorizationInfo.ADMINSTRATOR_ROLE)) {
UserStore userstore = Registry.get().getUserStore();
return RequestProcessor.render("admin-list.vm", uriInfo, servletContext, request, "admins", userstore.listAdminUsers());
} else {
return error("You must be logged in as an administrator to do this");
}
}
@Path("/setrole")
@POST
public Response setrole(@FormParam("id") String id, @FormParam("role") String role) {
Subject subject = SecurityUtils.getSubject();
if (subject.isAuthenticated() && subject.hasRole(RegAuthorizationInfo.ADMINSTRATOR_ROLE)) {
UserStore userstore = Registry.get().getUserStore();
try {
userstore.setRole(id, role.isEmpty() ? null : role);
return redirectTo("/ui/admin");
} catch (Exception e) {
return error("Role assignment failed: " + e);
}
} else {
return error("You must be logged in as an administrator to do this");
}
}
private Response error(String message) {
setNocache(response);
return RequestProcessor.render("error.vm", uriInfo, servletContext, request, "message", message);
}
public static Response redirectTo(String path) {
URI uri;
try {
uri = new URI(path);
return Response.seeOther(uri).build();
} catch (URISyntaxException e) {
// TODO Auto-generated catch block
e.printStackTrace();
return null;
}
}
public static void setNocache(HttpServletResponse httpresponse) {
setNocache(httpresponse, "cache bypass", 60 * 60 *24);
}
private void removeNocache(HttpServletResponse httpresponse) {
setNocache(httpresponse, null, 0);
}
private static void setNocache(HttpServletResponse httpresponse, String value, int age) {
Cookie cookie = new Cookie(NOCACHE_COOKIE, value);
cookie.setComment("Bypass proxy cache when logged in");
cookie.setMaxAge(age);
cookie.setHttpOnly(true);
cookie.setPath("/");
httpresponse.addCookie(cookie);
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.presto;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import org.apache.carbondata.core.cache.dictionary.Dictionary;
import org.apache.carbondata.core.datastore.block.TableBlockInfo;
import org.apache.carbondata.core.keygenerator.directdictionary.DirectDictionaryGenerator;
import org.apache.carbondata.core.keygenerator.directdictionary.DirectDictionaryKeyGeneratorFactory;
import org.apache.carbondata.core.metadata.datatype.DataType;
import org.apache.carbondata.core.metadata.datatype.DataTypes;
import org.apache.carbondata.core.metadata.encoder.Encoding;
import org.apache.carbondata.core.scan.executor.QueryExecutor;
import org.apache.carbondata.core.scan.executor.QueryExecutorFactory;
import org.apache.carbondata.core.scan.executor.exception.QueryExecutionException;
import org.apache.carbondata.core.scan.model.QueryDimension;
import org.apache.carbondata.core.scan.model.QueryMeasure;
import org.apache.carbondata.core.scan.model.QueryModel;
import org.apache.carbondata.core.scan.result.iterator.AbstractDetailQueryResultIterator;
import org.apache.carbondata.core.scan.result.vector.CarbonColumnVector;
import org.apache.carbondata.core.scan.result.vector.CarbonColumnarBatch;
import org.apache.carbondata.core.util.CarbonUtil;
import org.apache.carbondata.hadoop.AbstractRecordReader;
import org.apache.carbondata.hadoop.CarbonInputSplit;
import org.apache.carbondata.hadoop.CarbonMultiBlockSplit;
import org.apache.carbondata.hadoop.util.CarbonTypeUtil;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.spark.memory.MemoryMode;
import org.apache.spark.sql.execution.vectorized.ColumnarBatch;
import org.apache.spark.sql.types.DecimalType;
import org.apache.spark.sql.types.StructField;
import org.apache.spark.sql.types.StructType;
/**
* A specialized RecordReader that reads into InternalRows or ColumnarBatches directly using the
* carbondata column APIs and fills the data directly into columns.
*/
class CarbonVectorizedRecordReader extends AbstractRecordReader<Object> {
private int batchIdx = 0;
private int numBatched = 0;
private ColumnarBatch columnarBatch;
private CarbonColumnarBatch carbonColumnarBatch;
/**
* If true, this class returns batches instead of rows.
*/
private boolean returnColumnarBatch;
/**
* The default config on whether columnarBatch should be offheap.
*/
private static final MemoryMode DEFAULT_MEMORY_MODE = MemoryMode.OFF_HEAP;
private QueryModel queryModel;
private AbstractDetailQueryResultIterator iterator;
private QueryExecutor queryExecutor;
public CarbonVectorizedRecordReader(QueryExecutor queryExecutor, QueryModel queryModel, AbstractDetailQueryResultIterator iterator) {
this.queryModel = queryModel;
this.iterator = iterator;
this.queryExecutor = queryExecutor;
enableReturningBatches();
}
/**
* Implementation of RecordReader API.
*/
@Override public void initialize(InputSplit inputSplit, TaskAttemptContext taskAttemptContext)
throws IOException, InterruptedException, UnsupportedOperationException {
// The input split can contain single HDFS block or multiple blocks, so firstly get all the
// blocks and then set them in the query model.
List<CarbonInputSplit> splitList;
if (inputSplit instanceof CarbonInputSplit) {
splitList = new ArrayList<>(1);
splitList.add((CarbonInputSplit) inputSplit);
} else if (inputSplit instanceof CarbonMultiBlockSplit) {
// contains multiple blocks, this is an optimization for concurrent query.
CarbonMultiBlockSplit multiBlockSplit = (CarbonMultiBlockSplit) inputSplit;
splitList = multiBlockSplit.getAllSplits();
} else {
throw new RuntimeException("unsupported input split type: " + inputSplit);
}
List<TableBlockInfo> tableBlockInfoList = CarbonInputSplit.createBlocks(splitList);
queryModel.setTableBlockInfos(tableBlockInfoList);
queryModel.setVectorReader(true);
try {
queryExecutor = QueryExecutorFactory.getQueryExecutor(queryModel);
iterator = (AbstractDetailQueryResultIterator) queryExecutor.execute(queryModel);
} catch (QueryExecutionException e) {
throw new InterruptedException(e.getMessage());
}
}
@Override public void close() throws IOException {
logStatistics(rowCount, queryModel.getStatisticsRecorder());
if (columnarBatch != null) {
columnarBatch.close();
columnarBatch = null;
}
// clear dictionary cache
Map<String, Dictionary> columnToDictionaryMapping = queryModel.getColumnToDictionaryMapping();
if (null != columnToDictionaryMapping) {
for (Map.Entry<String, Dictionary> entry : columnToDictionaryMapping.entrySet()) {
CarbonUtil.clearDictionaryCache(entry.getValue());
}
}
try {
queryExecutor.finish();
} catch (QueryExecutionException e) {
throw new IOException(e);
}
}
@Override public boolean nextKeyValue() throws IOException, InterruptedException {
resultBatch();
if (returnColumnarBatch) return nextBatch();
if (batchIdx >= numBatched) {
if (!nextBatch()) return false;
}
++batchIdx;
return true;
}
@Override public Object getCurrentValue() throws IOException, InterruptedException {
if (returnColumnarBatch) {
rowCount += columnarBatch.numValidRows();
return columnarBatch;
}
rowCount += 1;
return columnarBatch.getRow(batchIdx - 1);
}
@Override public Void getCurrentKey() throws IOException, InterruptedException {
return null;
}
@Override public float getProgress() throws IOException, InterruptedException {
// TODO : Implement it based on total number of rows it is going to retrive.
return 0;
}
/**
* Returns the ColumnarBatch object that will be used for all rows returned by this reader.
* This object is reused. Calling this enables the vectorized reader. This should be called
* before any calls to nextKeyValue/nextBatch.
*/
private void initBatch(MemoryMode memMode) {
List<QueryDimension> queryDimension = queryModel.getQueryDimension();
List<QueryMeasure> queryMeasures = queryModel.getQueryMeasures();
StructField[] fields = new StructField[queryDimension.size() + queryMeasures.size()];
for (int i = 0; i < queryDimension.size(); i++) {
QueryDimension dim = queryDimension.get(i);
if (dim.getDimension().hasEncoding(Encoding.DIRECT_DICTIONARY)) {
DirectDictionaryGenerator generator = DirectDictionaryKeyGeneratorFactory
.getDirectDictionaryGenerator(dim.getDimension().getDataType());
fields[dim.getQueryOrder()] = new StructField(dim.getColumnName(),
CarbonTypeUtil.convertCarbonToSparkDataType(generator.getReturnType()), true, null);
} else if (!dim.getDimension().hasEncoding(Encoding.DICTIONARY)) {
fields[dim.getQueryOrder()] = new StructField(dim.getColumnName(),
CarbonTypeUtil.convertCarbonToSparkDataType(dim.getDimension().getDataType()), true,
null);
} else if (dim.getDimension().isComplex()) {
fields[dim.getQueryOrder()] = new StructField(dim.getColumnName(),
CarbonTypeUtil.convertCarbonToSparkDataType(dim.getDimension().getDataType()), true,
null);
} else {
fields[dim.getQueryOrder()] = new StructField(dim.getColumnName(),
CarbonTypeUtil.convertCarbonToSparkDataType(DataTypes.INT), true, null);
}
}
for (int i = 0; i < queryMeasures.size(); i++) {
QueryMeasure msr = queryMeasures.get(i);
DataType dataType = msr.getMeasure().getDataType();
if (dataType == DataTypes.SHORT || dataType == DataTypes.INT || dataType == DataTypes.LONG) {
fields[msr.getQueryOrder()] = new StructField(msr.getColumnName(),
CarbonTypeUtil.convertCarbonToSparkDataType(msr.getMeasure().getDataType()), true,
null);
} else if (DataTypes.isDecimal(dataType)) {
fields[msr.getQueryOrder()] = new StructField(msr.getColumnName(),
new DecimalType(msr.getMeasure().getPrecision(), msr.getMeasure().getScale()), true,
null);
} else {
fields[msr.getQueryOrder()] = new StructField(msr.getColumnName(),
CarbonTypeUtil.convertCarbonToSparkDataType(DataTypes.DOUBLE), true, null);
}
}
columnarBatch = ColumnarBatch.allocate(new StructType(fields), memMode);
CarbonColumnVector[] vectors = new CarbonColumnVector[fields.length];
boolean[] filteredRows = new boolean[columnarBatch.capacity()];
for (int i = 0; i < fields.length; i++) {
vectors[i] = new ColumnarVectorWrapper(columnarBatch.column(i), filteredRows);
}
carbonColumnarBatch = new CarbonColumnarBatch(vectors, columnarBatch.capacity(), filteredRows);
}
private void initBatch() {
initBatch(DEFAULT_MEMORY_MODE);
}
private ColumnarBatch resultBatch() {
if (columnarBatch == null) initBatch();
return columnarBatch;
}
/*
* Can be called before any rows are returned to enable returning columnar batches directly.
*/
private void enableReturningBatches() {
returnColumnarBatch = true;
}
/**
* Advances to the next batch of rows. Returns false if there are no more.
*/
private boolean nextBatch() {
columnarBatch.reset();
carbonColumnarBatch.reset();
if (iterator.hasNext()) {
iterator.processNextBatch(carbonColumnarBatch);
int actualSize = carbonColumnarBatch.getActualSize();
columnarBatch.setNumRows(actualSize);
numBatched = actualSize;
batchIdx = 0;
return true;
}
return false;
}
}
|
|
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package cerl.gui.forms;
import cerl.gui.utilities.ClassTableItemModel;
import cerl.gui.utilities.DigPopGUIUtilityClass;
import cerl.gui.utilities.HelpFileScreenNames;
import java.util.ArrayList;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.swing.RowFilter;
import javax.swing.table.TableRowSorter;
/**
* Called from Step 3
* Allows the user to provide descriptive names for census classes
* @author ajohnson
*/
public class CensusClassUserDefinitions extends javax.swing.JFrame {
private ClassTableItemModel classTableItemModel;
private StepThree parentStep;
private ArrayList<cerl.gui.utilities.Class> nonEditedClasses = new ArrayList<>();
private final String SCREEN_NAME = HelpFileScreenNames.STEP_THREE_HELP_FILE_NAME.toString();
/**
* Creates new form CensusClassUserDefinitions
* @param parentStep
*/
public CensusClassUserDefinitions(StepThree parentStep) {
this.parentStep = parentStep;
/**
* Creates a clean deep clone of the census classes.
* This will be used for when the user hits cancel.
*/
parentStep.censusSurveyClasses.getCensusClasses().stream().forEach((c) -> {
try {
this.nonEditedClasses.add(c.clone());
} catch (CloneNotSupportedException ex) {
Logger.getLogger(CensusClassUserDefinitions.class.getName()).log(Level.SEVERE, null, ex);
}
});
this.classTableItemModel = new ClassTableItemModel(parentStep.censusSurveyClasses.getCensusClasses());
initComponents();
RowFilter<Object, Object> filter = new RowFilter<Object, Object>() {
@Override
public boolean include(Entry entry) {
if (entry.getValue(0) != null) {
return true;
} else {
return false;
}
}
};
TableRowSorter<ClassTableItemModel> sorter = new TableRowSorter<ClassTableItemModel>(classTableItemModel);
sorter.setRowFilter(filter);
this.jTable1.setRowSorter(sorter);
this.jTable1.setModel(classTableItemModel);
}
/**
* This method is called from within the constructor to initialize the form.
* WARNING: Do NOT modify this code. The content of this method is always
* regenerated by the Form Editor.
*/
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
jPanel1 = new javax.swing.JPanel();
jScrollPane1 = new javax.swing.JScrollPane();
jTable1 = new javax.swing.JTable();
jLabel_Header = new javax.swing.JLabel();
btnSave = new javax.swing.JButton();
btnCancel = new javax.swing.JButton();
jMenuBar1 = new javax.swing.JMenuBar();
jMenu1 = new javax.swing.JMenu();
jMenuHelp = new javax.swing.JMenu();
jMenuAbout = new javax.swing.JMenu();
setDefaultCloseOperation(javax.swing.WindowConstants.DISPOSE_ON_CLOSE);
addWindowListener(new java.awt.event.WindowAdapter() {
public void windowClosing(java.awt.event.WindowEvent evt) {
formWindowClosing(evt);
}
});
jPanel1.setBorder(javax.swing.BorderFactory.createEtchedBorder());
jScrollPane1.setViewportView(jTable1);
jLabel_Header.setFont(new java.awt.Font("Tahoma", 1, 18)); // NOI18N
jLabel_Header.setHorizontalAlignment(javax.swing.SwingConstants.CENTER);
jLabel_Header.setText("Enter User Defined Description for Each Class");
btnSave.setText("Save");
btnSave.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
btnSaveActionPerformed(evt);
}
});
btnCancel.setText("Cancel");
btnCancel.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
btnCancelActionPerformed(evt);
}
});
javax.swing.GroupLayout jPanel1Layout = new javax.swing.GroupLayout(jPanel1);
jPanel1.setLayout(jPanel1Layout);
jPanel1Layout.setHorizontalGroup(
jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel1Layout.createSequentialGroup()
.addContainerGap()
.addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jScrollPane1, javax.swing.GroupLayout.DEFAULT_SIZE, 517, Short.MAX_VALUE)
.addComponent(jLabel_Header, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, jPanel1Layout.createSequentialGroup()
.addComponent(btnCancel)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(btnSave)))
.addContainerGap())
);
jPanel1Layout.setVerticalGroup(
jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel1Layout.createSequentialGroup()
.addGap(15, 15, 15)
.addComponent(jLabel_Header)
.addGap(18, 18, 18)
.addComponent(jScrollPane1, javax.swing.GroupLayout.PREFERRED_SIZE, 325, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(btnSave)
.addComponent(btnCancel))
.addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
);
jMenu1.setText("File");
jMenuBar1.add(jMenu1);
jMenuHelp.setText("Help");
jMenuHelp.addMouseListener(new java.awt.event.MouseAdapter() {
public void mouseClicked(java.awt.event.MouseEvent evt) {
jMenuHelpMouseClicked(evt);
}
});
jMenuBar1.add(jMenuHelp);
jMenuAbout.setText("About");
jMenuAbout.addMouseListener(new java.awt.event.MouseAdapter() {
public void mouseClicked(java.awt.event.MouseEvent evt) {
jMenuAboutMouseClicked(evt);
}
});
jMenuBar1.add(jMenuAbout);
setJMenuBar(jMenuBar1);
javax.swing.GroupLayout layout = new javax.swing.GroupLayout(getContentPane());
getContentPane().setLayout(layout);
layout.setHorizontalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addContainerGap()
.addComponent(jPanel1, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addContainerGap())
);
layout.setVerticalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addContainerGap()
.addComponent(jPanel1, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addContainerGap())
);
pack();
}// </editor-fold>//GEN-END:initComponents
/**
* Closes the window, cancels updates, and returns to step 3.
* @param evt
*/
private void formWindowClosing(java.awt.event.WindowEvent evt) {//GEN-FIRST:event_formWindowClosing
closeAndDisposeReturnToStepThree(true);
}//GEN-LAST:event_formWindowClosing
/**
* Saves the current information and returns to step 3
* @param evt
*/
private void btnSaveActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnSaveActionPerformed
closeAndDisposeReturnToStepThree(false);
}//GEN-LAST:event_btnSaveActionPerformed
/**
* Cancels the current changes and returns to step 3
* @param evt
*/
private void btnCancelActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnCancelActionPerformed
closeAndDisposeReturnToStepThree(true);
}//GEN-LAST:event_btnCancelActionPerformed
/**
* Handles the Help menu item click, opens information about this step
* @param evt
*/
private void jMenuHelpMouseClicked(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_jMenuHelpMouseClicked
DigPopGUIUtilityClass.loadDefaultHelpGUIByScreenName(SCREEN_NAME);
}//GEN-LAST:event_jMenuHelpMouseClicked
/**
* Handles the About menu item click, opens the About pop-up window
* @param evt
*/
private void jMenuAboutMouseClicked(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_jMenuAboutMouseClicked
new About().setVisible(true);
}//GEN-LAST:event_jMenuAboutMouseClicked
/**
* Closes the window and updates the parent step's censusSureyClasses object
* @param cancelOnWindowClose - true if the user opted to cancel their changes
* , false if changes should be saved.
*/
private void closeAndDisposeReturnToStepThree(boolean cancelOnWindowClose){
if(cancelOnWindowClose){
this.parentStep.censusSurveyClasses.setCensusClasses(this.nonEditedClasses);
this.parentStep.updateCensusSelectedListModel();
}
this.parentStep.setVisible(true);
this.parentStep.setAlwaysOnTop(true);
this.dispose();
}
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JButton btnCancel;
private javax.swing.JButton btnSave;
private javax.swing.JLabel jLabel_Header;
private javax.swing.JMenu jMenu1;
private javax.swing.JMenu jMenuAbout;
private javax.swing.JMenuBar jMenuBar1;
private javax.swing.JMenu jMenuHelp;
private javax.swing.JPanel jPanel1;
private javax.swing.JScrollPane jScrollPane1;
private javax.swing.JTable jTable1;
// End of variables declaration//GEN-END:variables
}
|
|
package gov.hhs.fha.nhinc.compliance;
import gov.hhs.fha.nhinc.properties.PropertyAccessor;
import java.util.Iterator;
import org.apache.log4j.Logger;
import org.hl7.v3.BinaryDataEncoding;
import org.hl7.v3.PRPAIN201305UV02;
import org.hl7.v3.PRPAIN201305UV02QUQIMT021001UV01ControlActProcess;
import org.hl7.v3.PRPAMT201306UV02LivingSubjectAdministrativeGender;
import org.hl7.v3.PRPAMT201306UV02LivingSubjectBirthPlaceAddress;
import org.hl7.v3.PRPAMT201306UV02LivingSubjectBirthPlaceName;
import org.hl7.v3.PRPAMT201306UV02LivingSubjectBirthTime;
import org.hl7.v3.PRPAMT201306UV02LivingSubjectId;
import org.hl7.v3.PRPAMT201306UV02LivingSubjectName;
import org.hl7.v3.PRPAMT201306UV02MothersMaidenName;
import org.hl7.v3.PRPAMT201306UV02ParameterList;
import org.hl7.v3.PRPAMT201306UV02PatientAddress;
import org.hl7.v3.PRPAMT201306UV02PatientTelecom;
import org.hl7.v3.PRPAMT201306UV02PrincipalCareProviderId;
import org.hl7.v3.QUQIMT021001UV01AuthorOrPerformer;
import org.hl7.v3.STExplicit;
/**
* Utility class to ensure that a patient discovery request message meets
* specification compliance. This approach was chosen as message generated
* at the consumer level are not enforced by the WSDL contract so this is a
* fail-safe step before sending the request message.
*
* The initial implementation of this utility only checks known problems and
* should not be considered conclusive of all potential discrepancies with
* the specification.
*
* @author Neil Webb
*
*/
public class PatientDiscoveryRequestComplianceChecker implements ComplianceChecker {
private static final Logger LOG = Logger.getLogger(PatientDiscoveryRequestComplianceChecker.class);
private static final String PATIENT_DISCOVERY_REQUEST_COMPLIANCE_CHECK_ENABLED_KEY = "PatientDiscoveryRequestComplianceCheckEnabled";
public static final String ASSIGNED_DEVICE_CLASS_CODE = "ASSIGNED";
public static final String SEMANTICS_TEXT_REPRESENTATION_PATIENT_GENDER = "LivingSubject.administrativeGender";
public static final String SEMANTICS_TEXT_REPRESENTATION_PATIENT_BIRTH_TIME = "LivingSubject.birthTime";
public static final String SEMANTICS_TEXT_REPRESENTATION_SUBJECT_ID = "LivingSubject.id";
public static final String SEMANTICS_TEXT_REPRESENTATION_PATIENT_NAME = "LivingSubject.name";
public static final String SEMANTICS_TEXT_REPRESENTATION_PATIENT_ADDRESS = "Patient.addr";
public static final String SEMANTICS_TEXT_REPRESENTATION_PATIENT_BIRTH_PLACE_ADDRESS = "LivingSubject.BirthPlace.Addr";
public static final String SEMANTICS_TEXT_REPRESENTATION_PATIENT_BIRTH_PLACE_NAME = "LivingSubject.BirthPlace.Place.Name";
public static final String SEMANTICS_TEXT_REPRESENTATION_PRINCIPAL_CARE_PROVIDER = "AssignedProvider.id";
public static final String SEMANTICS_TEXT_REPRESENTATION_PATIENT_MAIDEN_NAME_MOTHER = "Person.MothersMaidenName";
public static final String SEMANTICS_TEXT_REPRESENTATION_PATIENT_TELECOM = "Patient.telecom";
private PRPAIN201305UV02 pdRequest = null;
public PatientDiscoveryRequestComplianceChecker(
PRPAIN201305UV02 pdRequest) {
this.pdRequest = pdRequest;
}
protected boolean isComplianceCheckEnabled() {
boolean complianceCheckEnabled = true;
try {
complianceCheckEnabled = PropertyAccessor.getInstance().getPropertyBoolean(PROPERTIES_FILE_GATEWAY, PATIENT_DISCOVERY_REQUEST_COMPLIANCE_CHECK_ENABLED_KEY);
} catch(Throwable t) {
LOG.error("Error checking PD request compliance check flag: " + t.getMessage(), t);
}
return complianceCheckEnabled;
}
@Override
public void update2011SpecCompliance() {
if(pdRequest == null) {
LOG.debug("Patient discovery request compliance check. Request was null - bypassing.");
return;
}
if(!isComplianceCheckEnabled()) {
LOG.debug("Patient discovery request compliance check was not enabled - bypassing.");
return;
}
LOG.debug("Patient discovery request compliance check was enabled - performing updates.");
updateControlActProcess();
}
private void updateControlActProcess() {
if((pdRequest != null) && (pdRequest.getControlActProcess() != null)) {
PRPAIN201305UV02QUQIMT021001UV01ControlActProcess controlActProcess = pdRequest.getControlActProcess();
if(controlActProcess != null) {
updateAuthorOrPerformer(controlActProcess);
updateQueryByParameter(controlActProcess);
}
}
}
/**
* Update the author or performer
* Set the author or performer -> assigned device class code value
*
* @param controlActProcess
*/
private void updateAuthorOrPerformer(PRPAIN201305UV02QUQIMT021001UV01ControlActProcess controlActProcess) {
if(!controlActProcess.getAuthorOrPerformer().isEmpty()) {
for(QUQIMT021001UV01AuthorOrPerformer authorOrPerformer : controlActProcess.getAuthorOrPerformer()) {
if((authorOrPerformer.getAssignedDevice() != null) && (authorOrPerformer.getAssignedDevice().getValue() != null)) {
// Update the value of authorOrPerformer/assignedDevice/@classCode
authorOrPerformer.getAssignedDevice().getValue().setClassCode(ASSIGNED_DEVICE_CLASS_CODE);
}
}
}
}
/**
* Update query by parameter values as needed
*
* @param controlActProcess
*/
private void updateQueryByParameter(PRPAIN201305UV02QUQIMT021001UV01ControlActProcess controlActProcess) {
if((controlActProcess.getQueryByParameter() != null) && (controlActProcess.getQueryByParameter().getValue() != null) && (controlActProcess.getQueryByParameter().getValue().getParameterList() != null)) {
PRPAMT201306UV02ParameterList parameterList = controlActProcess.getQueryByParameter().getValue().getParameterList();
updateQueryParamPatientGender(parameterList);
updateQueryParamPatientBirthTime(parameterList);
updateQueryParamSubjectId(parameterList);
updateQueryParamPatientName(parameterList);
updateQueryParamPatientAddress(parameterList);
updateQueryParamPatientBirthPlaceAddress(parameterList);
updateQueryParamPatientBirthPlaceName(parameterList);
updateQueryParamPrincipalCareProviderId(parameterList);
updateQueryParamMothersMaidenName(parameterList);
updateQueryParamPatientTelecom(parameterList);
}
}
/**
* Update the patient gender query parameter
* Set the semantics text value.
*
* @param queryByParameter
*/
private void updateQueryParamPatientGender(PRPAMT201306UV02ParameterList parameterList) {
if(!parameterList.getLivingSubjectAdministrativeGender().isEmpty()) {
for(PRPAMT201306UV02LivingSubjectAdministrativeGender gender : parameterList.getLivingSubjectAdministrativeGender()) {
gender.setSemanticsText(createSTExplicit(SEMANTICS_TEXT_REPRESENTATION_PATIENT_GENDER));
}
}
}
/**
* Update the patient birth time query parameter
* Set the semantics text value.
*
* @param queryByParameter
*/
private void updateQueryParamPatientBirthTime(PRPAMT201306UV02ParameterList parameterList) {
if(!parameterList.getLivingSubjectBirthTime().isEmpty()) {
for(PRPAMT201306UV02LivingSubjectBirthTime birthTime : parameterList.getLivingSubjectBirthTime()) {
birthTime.setSemanticsText(createSTExplicit(SEMANTICS_TEXT_REPRESENTATION_PATIENT_BIRTH_TIME));
}
}
}
/**
* Update the subject id query parameter
* Set the semantics text value.
*
* @param queryByParameter
*/
private void updateQueryParamSubjectId(PRPAMT201306UV02ParameterList parameterList) {
if(!parameterList.getLivingSubjectId().isEmpty()) {
for(PRPAMT201306UV02LivingSubjectId subjectId : parameterList.getLivingSubjectId()) {
subjectId.setSemanticsText(createSTExplicit(SEMANTICS_TEXT_REPRESENTATION_SUBJECT_ID));
}
}
}
/**
* Update the patient name query parameter
* Set the semantics text value.
*
* @param queryByParameter
*/
private void updateQueryParamPatientName(PRPAMT201306UV02ParameterList parameterList) {
if(!parameterList.getLivingSubjectName().isEmpty()) {
for(PRPAMT201306UV02LivingSubjectName name : parameterList.getLivingSubjectName()) {
name.setSemanticsText(createSTExplicit(SEMANTICS_TEXT_REPRESENTATION_PATIENT_NAME));
}
}
}
/**
* Update the patient address query parameter
* Set the semantics text value.
*
* @param queryByParameter
*/
private void updateQueryParamPatientAddress(PRPAMT201306UV02ParameterList parameterList) {
if(!parameterList.getPatientAddress().isEmpty()) {
for(PRPAMT201306UV02PatientAddress patientAddress : parameterList.getPatientAddress()) {
patientAddress.setSemanticsText(createSTExplicit(SEMANTICS_TEXT_REPRESENTATION_PATIENT_ADDRESS));
}
}
}
/**
* Update the patient birth place address query parameter
* Set the semantics text value.
*
* @param queryByParameter
*/
private void updateQueryParamPatientBirthPlaceAddress(PRPAMT201306UV02ParameterList parameterList) {
if(!parameterList.getLivingSubjectBirthPlaceAddress().isEmpty()) {
for(PRPAMT201306UV02LivingSubjectBirthPlaceAddress patientBirthPlaceAddress : parameterList.getLivingSubjectBirthPlaceAddress()) {
patientBirthPlaceAddress.setSemanticsText(createSTExplicit(SEMANTICS_TEXT_REPRESENTATION_PATIENT_BIRTH_PLACE_ADDRESS));
}
}
}
/**
* Update the patient birth place name query parameter
* Set the semantics text value.
*
* @param queryByParameter
*/
private void updateQueryParamPatientBirthPlaceName(PRPAMT201306UV02ParameterList parameterList) {
if(!parameterList.getLivingSubjectBirthPlaceName().isEmpty()) {
for(PRPAMT201306UV02LivingSubjectBirthPlaceName birthPlaceName : parameterList.getLivingSubjectBirthPlaceName()) {
birthPlaceName.setSemanticsText(createSTExplicit(SEMANTICS_TEXT_REPRESENTATION_PATIENT_BIRTH_PLACE_NAME));
}
}
}
/**
* Update the principal care provider ID query parameter
* Set the semantics text value.
*
* @param queryByParameter
*/
private void updateQueryParamPrincipalCareProviderId(PRPAMT201306UV02ParameterList parameterList) {
if(!parameterList.getPrincipalCareProviderId().isEmpty()) {
for(PRPAMT201306UV02PrincipalCareProviderId principalCareProviderId : parameterList.getPrincipalCareProviderId()) {
principalCareProviderId.setSemanticsText(createSTExplicit(SEMANTICS_TEXT_REPRESENTATION_PRINCIPAL_CARE_PROVIDER));
}
}
}
/**
* Update the mother's maiden name query parameter.
* Remove if the maiden name value is null or an empty string.
* Set the semantics text value.
*
* @param queryByParameter
*/
private void updateQueryParamMothersMaidenName(PRPAMT201306UV02ParameterList parameterList) {
if(!parameterList.getMothersMaidenName().isEmpty()) {
Iterator<PRPAMT201306UV02MothersMaidenName> maidenNameIter = parameterList.getMothersMaidenName().iterator();
while(maidenNameIter.hasNext()) {
PRPAMT201306UV02MothersMaidenName mothersMaidenName = maidenNameIter.next();
String maidenNameValue = null;
if((!mothersMaidenName.getValue().isEmpty()) && (mothersMaidenName.getValue().get(0) != null) && (!mothersMaidenName.getValue().get(0).getContent().isEmpty())) {
Object serializableValue = mothersMaidenName.getValue().get(0).getContent().get(0);
if(serializableValue instanceof String) {
maidenNameValue = (String)serializableValue;
}
}
if((maidenNameValue == null) || (maidenNameValue.trim().equals(""))) {
maidenNameIter.remove();
} else {
mothersMaidenName.setSemanticsText(createSTExplicit(SEMANTICS_TEXT_REPRESENTATION_PATIENT_MAIDEN_NAME_MOTHER));
}
}
}
}
/**
* Update the Patient Telecom query parameter
* Set the semantics text value.
*
* @param queryByParameter
*/
private void updateQueryParamPatientTelecom(PRPAMT201306UV02ParameterList parameterList) {
if(!parameterList.getPatientTelecom().isEmpty()) {
for(PRPAMT201306UV02PatientTelecom patientTelecom : parameterList.getPatientTelecom()) {
patientTelecom.setSemanticsText(createSTExplicit(SEMANTICS_TEXT_REPRESENTATION_PATIENT_TELECOM));
}
}
}
private STExplicit createSTExplicit(String stContent) {
STExplicit st = new STExplicit();
st.setRepresentation(BinaryDataEncoding.TXT);
st.getContent().add(stContent);
return st;
}
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapreduce.v2.app;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.util.EnumSet;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileContext;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.WrappedJvmID;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.JobID;
import org.apache.hadoop.mapreduce.JobStatus.State;
import org.apache.hadoop.mapreduce.MRJobConfig;
import org.apache.hadoop.mapreduce.OutputCommitter;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.TypeConverter;
import org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent;
import org.apache.hadoop.mapreduce.jobhistory.NormalizedResourceEvent;
import org.apache.hadoop.mapreduce.security.TokenCache;
import org.apache.hadoop.mapreduce.security.token.JobTokenSecretManager;
import org.apache.hadoop.mapreduce.split.JobSplit.TaskSplitMetaInfo;
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
import org.apache.hadoop.mapreduce.v2.api.records.JobReport;
import org.apache.hadoop.mapreduce.v2.api.records.JobState;
import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptReport;
import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState;
import org.apache.hadoop.mapreduce.v2.api.records.TaskReport;
import org.apache.hadoop.mapreduce.v2.api.records.TaskState;
import org.apache.hadoop.mapreduce.v2.app.client.ClientService;
import org.apache.hadoop.mapreduce.v2.app.client.MRClientService;
import org.apache.hadoop.mapreduce.v2.app.commit.CommitterEvent;
import org.apache.hadoop.mapreduce.v2.app.commit.CommitterEventHandler;
import org.apache.hadoop.mapreduce.v2.app.job.Job;
import org.apache.hadoop.mapreduce.v2.app.job.JobStateInternal;
import org.apache.hadoop.mapreduce.v2.app.job.Task;
import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
import org.apache.hadoop.mapreduce.v2.app.job.TaskAttemptStateInternal;
import org.apache.hadoop.mapreduce.v2.app.job.TaskStateInternal;
import org.apache.hadoop.mapreduce.v2.app.job.event.JobEvent;
import org.apache.hadoop.mapreduce.v2.app.job.event.JobEventType;
import org.apache.hadoop.mapreduce.v2.app.job.event.JobFinishEvent;
import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptContainerAssignedEvent;
import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptContainerLaunchedEvent;
import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent;
import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEventType;
import org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl;
import org.apache.hadoop.mapreduce.v2.app.job.impl.TaskAttemptImpl;
import org.apache.hadoop.mapreduce.v2.app.job.impl.TaskImpl;
import org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncher;
import org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherEvent;
import org.apache.hadoop.mapreduce.v2.app.rm.ContainerAllocator;
import org.apache.hadoop.mapreduce.v2.app.rm.ContainerAllocatorEvent;
import org.apache.hadoop.mapreduce.v2.app.rm.RMHeartbeatHandler;
import org.apache.hadoop.mapreduce.v2.util.MRApps;
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.service.Service;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.Container;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.api.records.Priority;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.api.records.Token;
import org.apache.hadoop.yarn.event.EventHandler;
import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
import org.apache.hadoop.yarn.security.ContainerTokenIdentifier;
import org.apache.hadoop.yarn.state.StateMachine;
import org.apache.hadoop.yarn.state.StateMachineFactory;
import org.apache.hadoop.yarn.util.Clock;
import org.apache.hadoop.yarn.util.SystemClock;
import org.junit.Assert;
/**
* Mock MRAppMaster. Doesn't start RPC servers.
* No threads are started except of the event Dispatcher thread.
*/
@SuppressWarnings("unchecked")
public class MRApp extends MRAppMaster {
private static final Log LOG = LogFactory.getLog(MRApp.class);
int maps;
int reduces;
private File testWorkDir;
private Path testAbsPath;
private ClusterInfo clusterInfo;
// Queue to pretend the RM assigned us
private String assignedQueue;
public static String NM_HOST = "localhost";
public static int NM_PORT = 1234;
public static int NM_HTTP_PORT = 8042;
//if true, tasks complete automatically as soon as they are launched
protected boolean autoComplete = false;
static ApplicationId applicationId;
static {
applicationId = ApplicationId.newInstance(0, 0);
}
public MRApp(int maps, int reduces, boolean autoComplete, String testName,
boolean cleanOnStart, Clock clock) {
this(maps, reduces, autoComplete, testName, cleanOnStart, 1, clock, null);
}
public MRApp(int maps, int reduces, boolean autoComplete, String testName,
boolean cleanOnStart, Clock clock, boolean unregistered) {
this(maps, reduces, autoComplete, testName, cleanOnStart, 1, clock,
unregistered);
}
public MRApp(int maps, int reduces, boolean autoComplete, String testName,
boolean cleanOnStart) {
this(maps, reduces, autoComplete, testName, cleanOnStart, 1);
}
public MRApp(int maps, int reduces, boolean autoComplete, String testName,
boolean cleanOnStart, String assignedQueue) {
this(maps, reduces, autoComplete, testName, cleanOnStart, 1,
new SystemClock(), assignedQueue);
}
public MRApp(int maps, int reduces, boolean autoComplete, String testName,
boolean cleanOnStart, boolean unregistered) {
this(maps, reduces, autoComplete, testName, cleanOnStart, 1, unregistered);
}
@Override
protected void initJobCredentialsAndUGI(Configuration conf) {
// Fake a shuffle secret that normally is provided by the job client.
String shuffleSecret = "fake-shuffle-secret";
TokenCache.setShuffleSecretKey(shuffleSecret.getBytes(), getCredentials());
}
private static ApplicationAttemptId getApplicationAttemptId(
ApplicationId applicationId, int startCount) {
ApplicationAttemptId applicationAttemptId =
ApplicationAttemptId.newInstance(applicationId, startCount);
return applicationAttemptId;
}
private static ContainerId getContainerId(ApplicationId applicationId,
int startCount) {
ApplicationAttemptId appAttemptId =
getApplicationAttemptId(applicationId, startCount);
ContainerId containerId =
ContainerId.newContainerId(appAttemptId, startCount);
return containerId;
}
public MRApp(int maps, int reduces, boolean autoComplete, String testName,
boolean cleanOnStart, int startCount) {
this(maps, reduces, autoComplete, testName, cleanOnStart, startCount,
new SystemClock(), null);
}
public MRApp(int maps, int reduces, boolean autoComplete, String testName,
boolean cleanOnStart, int startCount, boolean unregistered) {
this(maps, reduces, autoComplete, testName, cleanOnStart, startCount,
new SystemClock(), unregistered);
}
public MRApp(int maps, int reduces, boolean autoComplete, String testName,
boolean cleanOnStart, int startCount, Clock clock, boolean unregistered) {
this(getApplicationAttemptId(applicationId, startCount), getContainerId(
applicationId, startCount), maps, reduces, autoComplete, testName,
cleanOnStart, startCount, clock, unregistered, null);
}
public MRApp(int maps, int reduces, boolean autoComplete, String testName,
boolean cleanOnStart, int startCount, Clock clock, String assignedQueue) {
this(getApplicationAttemptId(applicationId, startCount), getContainerId(
applicationId, startCount), maps, reduces, autoComplete, testName,
cleanOnStart, startCount, clock, true, assignedQueue);
}
public MRApp(ApplicationAttemptId appAttemptId, ContainerId amContainerId,
int maps, int reduces, boolean autoComplete, String testName,
boolean cleanOnStart, int startCount, boolean unregistered) {
this(appAttemptId, amContainerId, maps, reduces, autoComplete, testName,
cleanOnStart, startCount, new SystemClock(), unregistered, null);
}
public MRApp(ApplicationAttemptId appAttemptId, ContainerId amContainerId,
int maps, int reduces, boolean autoComplete, String testName,
boolean cleanOnStart, int startCount) {
this(appAttemptId, amContainerId, maps, reduces, autoComplete, testName,
cleanOnStart, startCount, new SystemClock(), true, null);
}
public MRApp(ApplicationAttemptId appAttemptId, ContainerId amContainerId,
int maps, int reduces, boolean autoComplete, String testName,
boolean cleanOnStart, int startCount, Clock clock, boolean unregistered,
String assignedQueue) {
super(appAttemptId, amContainerId, NM_HOST, NM_PORT, NM_HTTP_PORT, clock,
System.currentTimeMillis());
this.testWorkDir = new File("target", testName);
testAbsPath = new Path(testWorkDir.getAbsolutePath());
LOG.info("PathUsed: " + testAbsPath);
if (cleanOnStart) {
testAbsPath = new Path(testWorkDir.getAbsolutePath());
try {
FileContext.getLocalFSFileContext().delete(testAbsPath, true);
} catch (Exception e) {
LOG.warn("COULD NOT CLEANUP: " + testAbsPath, e);
throw new YarnRuntimeException("could not cleanup test dir", e);
}
}
this.maps = maps;
this.reduces = reduces;
this.autoComplete = autoComplete;
// If safeToReportTerminationToUser is set to true, we can verify whether
// the job can reaches the final state when MRAppMaster shuts down.
this.successfullyUnregistered.set(unregistered);
this.assignedQueue = assignedQueue;
}
@Override
protected void serviceInit(Configuration conf) throws Exception {
try {
//Create the staging directory if it does not exist
String user = UserGroupInformation.getCurrentUser().getShortUserName();
Path stagingDir = MRApps.getStagingAreaDir(conf, user);
FileSystem fs = getFileSystem(conf);
fs.mkdirs(stagingDir);
} catch (Exception e) {
throw new YarnRuntimeException("Error creating staging dir", e);
}
super.serviceInit(conf);
if (this.clusterInfo != null) {
getContext().getClusterInfo().setMaxContainerCapability(
this.clusterInfo.getMaxContainerCapability());
} else {
getContext().getClusterInfo().setMaxContainerCapability(
Resource.newInstance(10240, 1));
}
}
public Job submit(Configuration conf) throws Exception {
//TODO: fix the bug where the speculator gets events with
//not-fully-constructed objects. For now, disable speculative exec
return submit(conf, false, false);
}
public Job submit(Configuration conf, boolean mapSpeculative,
boolean reduceSpeculative) throws Exception {
String user = conf.get(MRJobConfig.USER_NAME, UserGroupInformation
.getCurrentUser().getShortUserName());
conf.set(MRJobConfig.USER_NAME, user);
conf.set(MRJobConfig.MR_AM_STAGING_DIR, testAbsPath.toString());
conf.setBoolean(MRJobConfig.MR_AM_CREATE_JH_INTERMEDIATE_BASE_DIR, true);
// TODO: fix the bug where the speculator gets events with
// not-fully-constructed objects. For now, disable speculative exec
conf.setBoolean(MRJobConfig.MAP_SPECULATIVE, mapSpeculative);
conf.setBoolean(MRJobConfig.REDUCE_SPECULATIVE, reduceSpeculative);
init(conf);
start();
DefaultMetricsSystem.shutdown();
Job job = getContext().getAllJobs().values().iterator().next();
if (assignedQueue != null) {
job.setQueueName(assignedQueue);
}
// Write job.xml
String jobFile = MRApps.getJobFile(conf, user,
TypeConverter.fromYarn(job.getID()));
LOG.info("Writing job conf to " + jobFile);
new File(jobFile).getParentFile().mkdirs();
conf.writeXml(new FileOutputStream(jobFile));
return job;
}
public void waitForInternalState(JobImpl job,
JobStateInternal finalState) throws Exception {
int timeoutSecs = 0;
JobStateInternal iState = job.getInternalState();
while (!finalState.equals(iState) && timeoutSecs++ < 20) {
System.out.println("Job Internal State is : " + iState
+ " Waiting for Internal state : " + finalState);
Thread.sleep(500);
iState = job.getInternalState();
}
System.out.println("Task Internal State is : " + iState);
Assert.assertEquals("Task Internal state is not correct (timedout)",
finalState, iState);
}
public void waitForInternalState(TaskImpl task,
TaskStateInternal finalState) throws Exception {
int timeoutSecs = 0;
TaskReport report = task.getReport();
TaskStateInternal iState = task.getInternalState();
while (!finalState.equals(iState) && timeoutSecs++ < 20) {
System.out.println("Task Internal State is : " + iState
+ " Waiting for Internal state : " + finalState + " progress : "
+ report.getProgress());
Thread.sleep(500);
report = task.getReport();
iState = task.getInternalState();
}
System.out.println("Task Internal State is : " + iState);
Assert.assertEquals("Task Internal state is not correct (timedout)",
finalState, iState);
}
public void waitForInternalState(TaskAttemptImpl attempt,
TaskAttemptStateInternal finalState) throws Exception {
int timeoutSecs = 0;
TaskAttemptReport report = attempt.getReport();
TaskAttemptStateInternal iState = attempt.getInternalState();
while (!finalState.equals(iState) && timeoutSecs++ < 20) {
System.out.println("TaskAttempt Internal State is : " + iState
+ " Waiting for Internal state : " + finalState + " progress : "
+ report.getProgress());
Thread.sleep(500);
report = attempt.getReport();
iState = attempt.getInternalState();
}
System.out.println("TaskAttempt Internal State is : " + iState);
Assert.assertEquals("TaskAttempt Internal state is not correct (timedout)",
finalState, iState);
}
public void waitForState(TaskAttempt attempt,
TaskAttemptState finalState) throws Exception {
int timeoutSecs = 0;
TaskAttemptReport report = attempt.getReport();
while (!finalState.equals(report.getTaskAttemptState()) &&
timeoutSecs++ < 20) {
System.out.println("TaskAttempt State is : " + report.getTaskAttemptState() +
" Waiting for state : " + finalState +
" progress : " + report.getProgress());
report = attempt.getReport();
Thread.sleep(500);
}
System.out.println("TaskAttempt State is : " + report.getTaskAttemptState());
Assert.assertEquals("TaskAttempt state is not correct (timedout)",
finalState,
report.getTaskAttemptState());
}
public void waitForState(Task task, TaskState finalState) throws Exception {
int timeoutSecs = 0;
TaskReport report = task.getReport();
while (!finalState.equals(report.getTaskState()) &&
timeoutSecs++ < 20) {
System.out.println("Task State for " + task.getID() + " is : "
+ report.getTaskState() + " Waiting for state : " + finalState
+ " progress : " + report.getProgress());
report = task.getReport();
Thread.sleep(500);
}
System.out.println("Task State is : " + report.getTaskState());
Assert.assertEquals("Task state is not correct (timedout)", finalState,
report.getTaskState());
}
public void waitForState(Job job, JobState finalState) throws Exception {
int timeoutSecs = 0;
JobReport report = job.getReport();
while (!finalState.equals(report.getJobState()) &&
timeoutSecs++ < 20) {
System.out.println("Job State is : " + report.getJobState() +
" Waiting for state : " + finalState +
" map progress : " + report.getMapProgress() +
" reduce progress : " + report.getReduceProgress());
report = job.getReport();
Thread.sleep(500);
}
System.out.println("Job State is : " + report.getJobState());
Assert.assertEquals("Job state is not correct (timedout)", finalState,
job.getState());
}
public void waitForState(Service.STATE finalState) throws Exception {
if (finalState == Service.STATE.STOPPED) {
Assert.assertTrue("Timeout while waiting for MRApp to stop",
waitForServiceToStop(20 * 1000));
} else {
int timeoutSecs = 0;
while (!finalState.equals(getServiceState()) && timeoutSecs++ < 20) {
System.out.println("MRApp State is : " + getServiceState()
+ " Waiting for state : " + finalState);
Thread.sleep(500);
}
System.out.println("MRApp State is : " + getServiceState());
Assert.assertEquals("MRApp state is not correct (timedout)", finalState,
getServiceState());
}
}
public void verifyCompleted() {
for (Job job : getContext().getAllJobs().values()) {
JobReport jobReport = job.getReport();
System.out.println("Job start time :" + jobReport.getStartTime());
System.out.println("Job finish time :" + jobReport.getFinishTime());
Assert.assertTrue("Job start time is not less than finish time",
jobReport.getStartTime() <= jobReport.getFinishTime());
Assert.assertTrue("Job finish time is in future",
jobReport.getFinishTime() <= System.currentTimeMillis());
for (Task task : job.getTasks().values()) {
TaskReport taskReport = task.getReport();
System.out.println("Task start time : " + taskReport.getStartTime());
System.out.println("Task finish time : " + taskReport.getFinishTime());
Assert.assertTrue("Task start time is not less than finish time",
taskReport.getStartTime() <= taskReport.getFinishTime());
for (TaskAttempt attempt : task.getAttempts().values()) {
TaskAttemptReport attemptReport = attempt.getReport();
Assert.assertTrue("Attempt start time is not less than finish time",
attemptReport.getStartTime() <= attemptReport.getFinishTime());
}
}
}
}
@Override
protected Job createJob(Configuration conf, JobStateInternal forcedState,
String diagnostic) {
UserGroupInformation currentUser = null;
try {
currentUser = UserGroupInformation.getCurrentUser();
} catch (IOException e) {
throw new YarnRuntimeException(e);
}
Job newJob = new TestJob(getJobId(), getAttemptID(), conf,
getDispatcher().getEventHandler(),
getTaskAttemptListener(), getContext().getClock(),
getCommitter(), isNewApiCommitter(),
currentUser.getUserName(), getContext(),
forcedState, diagnostic);
((AppContext) getContext()).getAllJobs().put(newJob.getID(), newJob);
getDispatcher().register(JobFinishEvent.Type.class,
new EventHandler<JobFinishEvent>() {
@Override
public void handle(JobFinishEvent event) {
stop();
}
});
return newJob;
}
@Override
protected TaskAttemptFinishingMonitor
createTaskAttemptFinishingMonitor(
EventHandler eventHandler) {
return new TaskAttemptFinishingMonitor(eventHandler) {
@Override
public synchronized void register(TaskAttemptId attemptID) {
getContext().getEventHandler().handle(
new TaskAttemptEvent(attemptID,
TaskAttemptEventType.TA_CONTAINER_COMPLETED));
}
};
}
@Override
protected TaskAttemptListener createTaskAttemptListener(AppContext context) {
return new TaskAttemptListener(){
@Override
public InetSocketAddress getAddress() {
return NetUtils.createSocketAddr("localhost:54321");
}
@Override
public void registerLaunchedTask(TaskAttemptId attemptID,
WrappedJvmID jvmID) {
}
@Override
public void unregister(TaskAttemptId attemptID, WrappedJvmID jvmID) {
}
@Override
public void registerPendingTask(org.apache.hadoop.mapred.Task task,
WrappedJvmID jvmID) {
}
};
}
@Override
protected EventHandler<JobHistoryEvent> createJobHistoryHandler(
AppContext context) {//disable history
return new EventHandler<JobHistoryEvent>() {
@Override
public void handle(JobHistoryEvent event) {
}
};
}
@Override
protected ContainerLauncher createContainerLauncher(AppContext context) {
return new MockContainerLauncher();
}
protected class MockContainerLauncher implements ContainerLauncher {
//We are running locally so set the shuffle port to -1
int shufflePort = -1;
public MockContainerLauncher() {
}
@Override
public void handle(ContainerLauncherEvent event) {
switch (event.getType()) {
case CONTAINER_REMOTE_LAUNCH:
getContext().getEventHandler().handle(
new TaskAttemptContainerLaunchedEvent(event.getTaskAttemptID(),
shufflePort));
attemptLaunched(event.getTaskAttemptID());
break;
case CONTAINER_REMOTE_CLEANUP:
getContext().getEventHandler().handle(
new TaskAttemptEvent(event.getTaskAttemptID(),
TaskAttemptEventType.TA_CONTAINER_CLEANED));
break;
case CONTAINER_COMPLETED:
break;
}
}
}
protected void attemptLaunched(TaskAttemptId attemptID) {
if (autoComplete) {
// send the done event
getContext().getEventHandler().handle(
new TaskAttemptEvent(attemptID,
TaskAttemptEventType.TA_DONE));
}
}
@Override
protected ContainerAllocator createContainerAllocator(
ClientService clientService, final AppContext context) {
return new MRAppContainerAllocator();
}
protected class MRAppContainerAllocator
implements ContainerAllocator, RMHeartbeatHandler {
private int containerCount;
@Override
public void handle(ContainerAllocatorEvent event) {
ContainerId cId =
ContainerId.newContainerId(getContext().getApplicationAttemptId(),
containerCount++);
NodeId nodeId = NodeId.newInstance(NM_HOST, NM_PORT);
Resource resource = Resource.newInstance(1234, 2);
ContainerTokenIdentifier containerTokenIdentifier =
new ContainerTokenIdentifier(cId, nodeId.toString(), "user",
resource, System.currentTimeMillis() + 10000, 42, 42,
Priority.newInstance(0), 0);
Token containerToken = newContainerToken(nodeId, "password".getBytes(),
containerTokenIdentifier);
Container container = Container.newInstance(cId, nodeId,
NM_HOST + ":" + NM_HTTP_PORT, resource, null, containerToken);
JobID id = TypeConverter.fromYarn(applicationId);
JobId jobId = TypeConverter.toYarn(id);
getContext().getEventHandler().handle(new JobHistoryEvent(jobId,
new NormalizedResourceEvent(
org.apache.hadoop.mapreduce.TaskType.REDUCE,
100)));
getContext().getEventHandler().handle(new JobHistoryEvent(jobId,
new NormalizedResourceEvent(
org.apache.hadoop.mapreduce.TaskType.MAP,
100)));
getContext().getEventHandler().handle(
new TaskAttemptContainerAssignedEvent(event.getAttemptID(),
container, null));
}
@Override
public long getLastHeartbeatTime() {
return getContext().getClock().getTime();
}
@Override
public void runOnNextHeartbeat(Runnable callback) {
callback.run();
}
}
@Override
protected EventHandler<CommitterEvent> createCommitterEventHandler(
AppContext context, final OutputCommitter committer) {
// create an output committer with the task methods stubbed out
OutputCommitter stubbedCommitter = new OutputCommitter() {
@Override
public void setupJob(JobContext jobContext) throws IOException {
committer.setupJob(jobContext);
}
@SuppressWarnings("deprecation")
@Override
public void cleanupJob(JobContext jobContext) throws IOException {
committer.cleanupJob(jobContext);
}
@Override
public void commitJob(JobContext jobContext) throws IOException {
committer.commitJob(jobContext);
}
@Override
public void abortJob(JobContext jobContext, State state)
throws IOException {
committer.abortJob(jobContext, state);
}
@Override
public boolean isRecoverySupported(JobContext jobContext) throws IOException{
return committer.isRecoverySupported(jobContext);
}
@SuppressWarnings("deprecation")
@Override
public boolean isRecoverySupported() {
return committer.isRecoverySupported();
}
@Override
public void setupTask(TaskAttemptContext taskContext)
throws IOException {
}
@Override
public boolean needsTaskCommit(TaskAttemptContext taskContext)
throws IOException {
return false;
}
@Override
public void commitTask(TaskAttemptContext taskContext)
throws IOException {
}
@Override
public void abortTask(TaskAttemptContext taskContext)
throws IOException {
}
@Override
public void recoverTask(TaskAttemptContext taskContext)
throws IOException {
}
};
return new CommitterEventHandler(context, stubbedCommitter,
getRMHeartbeatHandler());
}
@Override
protected ClientService createClientService(AppContext context) {
return new MRClientService(context) {
@Override
public InetSocketAddress getBindAddress() {
return NetUtils.createSocketAddr("localhost:9876");
}
@Override
public int getHttpPort() {
return -1;
}
};
}
public void setClusterInfo(ClusterInfo clusterInfo) {
// Only useful if set before a job is started.
if (getServiceState() == Service.STATE.NOTINITED
|| getServiceState() == Service.STATE.INITED) {
this.clusterInfo = clusterInfo;
} else {
throw new IllegalStateException(
"ClusterInfo can only be set before the App is STARTED");
}
}
class TestJob extends JobImpl {
//override the init transition
private final TestInitTransition initTransition = new TestInitTransition(
maps, reduces);
StateMachineFactory<JobImpl, JobStateInternal, JobEventType, JobEvent> localFactory
= stateMachineFactory.addTransition(JobStateInternal.NEW,
EnumSet.of(JobStateInternal.INITED, JobStateInternal.FAILED),
JobEventType.JOB_INIT,
// This is abusive.
initTransition);
private final StateMachine<JobStateInternal, JobEventType, JobEvent>
localStateMachine;
@Override
protected StateMachine<JobStateInternal, JobEventType, JobEvent> getStateMachine() {
return localStateMachine;
}
@SuppressWarnings("rawtypes")
public TestJob(JobId jobId, ApplicationAttemptId applicationAttemptId,
Configuration conf, EventHandler eventHandler,
TaskAttemptListener taskAttemptListener, Clock clock,
OutputCommitter committer, boolean newApiCommitter,
String user, AppContext appContext,
JobStateInternal forcedState, String diagnostic) {
super(jobId, getApplicationAttemptId(applicationId, getStartCount()),
conf, eventHandler, taskAttemptListener,
new JobTokenSecretManager(), new Credentials(), clock,
getCompletedTaskFromPreviousRun(), metrics, committer,
newApiCommitter, user, System.currentTimeMillis(), getAllAMInfos(),
appContext, forcedState, diagnostic);
// This "this leak" is okay because the retained pointer is in an
// instance variable.
localStateMachine = localFactory.make(this);
}
}
//Override InitTransition to not look for split files etc
static class TestInitTransition extends JobImpl.InitTransition {
private int maps;
private int reduces;
TestInitTransition(int maps, int reduces) {
this.maps = maps;
this.reduces = reduces;
}
@Override
protected void setup(JobImpl job) throws IOException {
super.setup(job);
job.conf.setInt(MRJobConfig.NUM_REDUCES, reduces);
job.remoteJobConfFile = new Path("test");
}
@Override
protected TaskSplitMetaInfo[] createSplits(JobImpl job, JobId jobId) {
TaskSplitMetaInfo[] splits = new TaskSplitMetaInfo[maps];
for (int i = 0; i < maps ; i++) {
splits[i] = new TaskSplitMetaInfo();
}
return splits;
}
}
public static Token newContainerToken(NodeId nodeId,
byte[] password, ContainerTokenIdentifier tokenIdentifier) {
// RPC layer client expects ip:port as service for tokens
InetSocketAddress addr =
NetUtils.createSocketAddrForHost(nodeId.getHost(), nodeId.getPort());
// NOTE: use SecurityUtil.setTokenService if this becomes a "real" token
Token containerToken =
Token.newInstance(tokenIdentifier.getBytes(),
ContainerTokenIdentifier.KIND.toString(), password, SecurityUtil
.buildTokenService(addr).toString());
return containerToken;
}
public static ContainerId newContainerId(int appId, int appAttemptId,
long timestamp, int containerId) {
ApplicationId applicationId = ApplicationId.newInstance(timestamp, appId);
ApplicationAttemptId applicationAttemptId =
ApplicationAttemptId.newInstance(applicationId, appAttemptId);
return ContainerId.newContainerId(applicationAttemptId, containerId);
}
public static ContainerTokenIdentifier newContainerTokenIdentifier(
Token containerToken) throws IOException {
org.apache.hadoop.security.token.Token<ContainerTokenIdentifier> token =
new org.apache.hadoop.security.token.Token<ContainerTokenIdentifier>(
containerToken.getIdentifier()
.array(), containerToken.getPassword().array(), new Text(
containerToken.getKind()),
new Text(containerToken.getService()));
return token.decodeIdentifier();
}
@Override
protected void shutdownTaskLog() {
// Avoid closing the logging system during unit tests,
// otherwise subsequent MRApp instances in the same test
// will fail to log anything.
}
@Override
protected void shutdownLogManager() {
// Avoid closing the logging system during unit tests,
// otherwise subsequent MRApp instances in the same test
// will fail to log anything.
}
}
|
|
/*
* Copyright (c) the original author or authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* You may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Copyright 2002-2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.proliming.commons.utils;
import java.lang.reflect.Constructor;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.lang.reflect.UndeclaredThrowableException;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
/**
* Simple utility class for working with the reflection API and handling
* reflection exceptions.
* <p/>
* <p>Only intended for internal use.
*/
public class ReflectionUtils {
/**
* ReflectionUtils instances should NOT be constructed in standard programming.
*/
private ReflectionUtils() {
}
/**
* Cache for {@link Class#getDeclaredMethods()} plus equivalent default methods
* from Java 8 based interfaces, allowing for fast iteration.
*/
private static final Map<Class<?>, Method[]> declaredMethodsCache =
new ConcurrentHashMap<Class<?>, Method[]>(256);
/**
* Cache for {@link Class#getDeclaredFields()}, allowing for fast iteration.
*/
private static final Map<Class<?>, Field[]> declaredFieldsCache =
new ConcurrentHashMap<Class<?>, Field[]>(256);
/**
* Attempt to find a {@link Field field} on the supplied {@link Class} with the
* supplied {@code name}. Searches all superclasses up to {@link Object}.
*
* @param cls the class to introspect
* @param name the name of the field
*
* @return the corresponding Field object, or {@code null} if not found
*/
public static Field findField(Class<?> cls, String name) {
return findField(cls, name, null);
}
/**
* Attempt to find a {@link Field field} on the supplied {@link Class} with the
* supplied {@code name} and/or {@link Class type}. Searches all superclasses
* up to {@link Object}.
*
* @param cls the class to introspect
* @param name the name of the field (may be {@code null} if type is specified)
* @param type the type of the field (may be {@code null} if name is specified)
*
* @return the corresponding Field object, or {@code null} if not found
*/
public static Field findField(Class<?> cls, String name, Class<?> type) {
Verify.notNull(cls, "Class must not be null");
Verify.verify(name != null || type != null, "Either name or type of the field must be specified");
Class<?> searchType = cls;
while (Object.class != searchType && searchType != null) {
Field[] fields = getDeclaredFields(searchType);
for (Field field : fields) {
if ((name == null || name.equals(field.getName())) &&
(type == null || type.equals(field.getType()))) {
return field;
}
}
searchType = searchType.getSuperclass();
}
return null;
}
/**
* Set the field represented by the supplied {@link Field field object} on the
* specified {@link Object target object} to the specified {@code value}.
* In accordance with {@link Field#set(Object, Object)} semantics, the new value
* is automatically unwrapped if the underlying field has a primitive type.
* <p>Thrown exceptions are handled via a call to {@link #handleReflectionException(Exception)}.
*
* @param field the field to set
* @param target the target object on which to set the field
* @param value the value to set; may be {@code null}
*/
public static void setField(Field field, Object target, Object value) {
try {
field.set(target, value);
} catch (IllegalAccessException ex) {
handleReflectionException(ex);
throw new IllegalStateException(
"Unexpected reflection exception - " + ex.getClass().getName() + ": " + ex.getMessage());
}
}
/**
* Get the field represented by the supplied {@link Field field object} on the
* specified {@link Object target object}. In accordance with {@link Field#get(Object)}
* semantics, the returned value is automatically wrapped if the underlying field
* has a primitive type.
* <p>Thrown exceptions are handled via a call to {@link #handleReflectionException(Exception)}.
*
* @param field the field to get
* @param target the target object from which to get the field
*
* @return the field's current value
*/
public static Object getField(Field field, Object target) {
try {
return field.get(target);
} catch (IllegalAccessException ex) {
handleReflectionException(ex);
throw new IllegalStateException(
"Unexpected reflection exception - " + ex.getClass().getName() + ": " + ex.getMessage());
}
}
/**
* Attempt to find a {@link Method} on the supplied class with the supplied name
* and no parameters. Searches all superclasses up to {@code Object}.
* <p>Returns {@code null} if no {@link Method} can be found.
*
* @param cls the class to introspect
* @param name the name of the method
*
* @return the Method object, or {@code null} if none found
*/
public static Method findMethod(Class<?> cls, String name) {
return findMethod(cls, name, new Class<?>[0]);
}
/**
* Attempt to find a {@link Method} on the supplied class with the supplied name
* and parameter types. Searches all superclasses up to {@code Object}.
* <p>Returns {@code null} if no {@link Method} can be found.
*
* @param cls the class to introspect
* @param name the name of the method
* @param paramTypes the parameter types of the method
* (may be {@code null} to indicate any signature)
*
* @return the Method object, or {@code null} if none found
*/
public static Method findMethod(Class<?> cls, String name, Class<?>... paramTypes) {
Verify.notNull(cls, "Class must not be null");
Verify.notNull(name, "Method name must not be null");
Class<?> searchType = cls;
while (searchType != null) {
Method[] methods = (searchType.isInterface() ? searchType.getMethods() : getDeclaredMethods(searchType));
for (Method method : methods) {
if (name.equals(method.getName()) &&
(paramTypes == null || Arrays.equals(paramTypes, method.getParameterTypes()))) {
return method;
}
}
searchType = searchType.getSuperclass();
}
return null;
}
/**
* Invoke the specified {@link Method} against the supplied target object with no arguments.
* The target object can be {@code null} when invoking a static {@link Method}.
* <p>Thrown exceptions are handled via a call to {@link #handleReflectionException}.
*
* @param method the method to invoke
* @param target the target object to invoke the method on
*
* @return the invocation result, if any
*
* @see #invokeMethod(Method, Object, Object[])
*/
public static Object invokeMethod(Method method, Object target) {
return invokeMethod(method, target, new Object[0]);
}
/**
* Invoke the specified {@link Method} against the supplied target object with the
* supplied arguments. The target object can be {@code null} when invoking a
* static {@link Method}.
* <p>Thrown exceptions are handled via a call to {@link #handleReflectionException}.
*
* @param method the method to invoke
* @param target the target object to invoke the method on
* @param args the invocation arguments (may be {@code null})
*
* @return the invocation result, if any
*/
public static Object invokeMethod(Method method, Object target, Object... args) {
try {
return method.invoke(target, args);
} catch (Exception ex) {
handleReflectionException(ex);
}
throw new IllegalStateException("Should never get here");
}
/**
* Invoke the specified JDBC API {@link Method} against the supplied target
* object with no arguments.
*
* @param method the method to invoke
* @param target the target object to invoke the method on
*
* @return the invocation result, if any
*
* @throws SQLException the JDBC API SQLException to rethrow (if any)
* @see #invokeJdbcMethod(Method, Object, Object[])
*/
public static Object invokeJdbcMethod(Method method, Object target) throws SQLException {
return invokeJdbcMethod(method, target, new Object[0]);
}
/**
* Invoke the specified JDBC API {@link Method} against the supplied target
* object with the supplied arguments.
*
* @param method the method to invoke
* @param target the target object to invoke the method on
* @param args the invocation arguments (may be {@code null})
*
* @return the invocation result, if any
*
* @throws SQLException the JDBC API SQLException to rethrow (if any)
* @see #invokeMethod(Method, Object, Object[])
*/
public static Object invokeJdbcMethod(Method method, Object target, Object... args) throws SQLException {
try {
return method.invoke(target, args);
} catch (IllegalAccessException ex) {
handleReflectionException(ex);
} catch (InvocationTargetException ex) {
if (ex.getTargetException() instanceof SQLException) {
throw (SQLException) ex.getTargetException();
}
handleInvocationTargetException(ex);
}
throw new IllegalStateException("Should never get here");
}
/**
* Handle the given reflection exception. Should only be called if no
* checked exception is expected to be thrown by the target method.
* <p>Throws the underlying RuntimeException or Error in case of an
* InvocationTargetException with such a root cause. Throws an
* IllegalStateException with an appropriate message else.
*
* @param ex the reflection exception to handle
*/
public static void handleReflectionException(Exception ex) {
if (ex instanceof NoSuchMethodException) {
throw new IllegalStateException("Method not found: " + ex.getMessage());
}
if (ex instanceof IllegalAccessException) {
throw new IllegalStateException("Could not access method: " + ex.getMessage());
}
if (ex instanceof InvocationTargetException) {
handleInvocationTargetException((InvocationTargetException) ex);
}
if (ex instanceof RuntimeException) {
throw (RuntimeException) ex;
}
throw new UndeclaredThrowableException(ex);
}
/**
* Handle the given invocation target exception. Should only be called if no
* checked exception is expected to be thrown by the target method.
* <p>Throws the underlying RuntimeException or Error in case of such a root
* cause. Throws an IllegalStateException else.
*
* @param ex the invocation target exception to handle
*/
public static void handleInvocationTargetException(InvocationTargetException ex) {
rethrowRuntimeException(ex.getTargetException());
}
/**
* Rethrow the given {@link Throwable exception}, which is presumably the
* <em>target exception</em> of an {@link InvocationTargetException}. Should
* only be called if no checked exception is expected to be thrown by the
* target method.
* <p>Rethrows the underlying exception cast to an {@link RuntimeException} or
* {@link Error} if appropriate; otherwise, throws an
* {@link IllegalStateException}.
*
* @param ex the exception to rethrow
*
* @throws RuntimeException the rethrown exception
*/
public static void rethrowRuntimeException(Throwable ex) {
if (ex instanceof RuntimeException) {
throw (RuntimeException) ex;
}
if (ex instanceof Error) {
throw (Error) ex;
}
throw new UndeclaredThrowableException(ex);
}
/**
* Rethrow the given {@link Throwable exception}, which is presumably the
* <em>target exception</em> of an {@link InvocationTargetException}. Should
* only be called if no checked exception is expected to be thrown by the
* target method.
* <p>Rethrows the underlying exception cast to an {@link Exception} or
* {@link Error} if appropriate; otherwise, throws an
* {@link IllegalStateException}.
*
* @param ex the exception to rethrow
*
* @throws Exception the rethrown exception (in case of a checked exception)
*/
public static void rethrowException(Throwable ex) throws Exception {
if (ex instanceof Exception) {
throw (Exception) ex;
}
if (ex instanceof Error) {
throw (Error) ex;
}
throw new UndeclaredThrowableException(ex);
}
/**
* Determine whether the given method explicitly declares the given
* exception or one of its superclasses, which means that an exception of
* that type can be propagated as-is within a reflective invocation.
*
* @param method the declaring method
* @param exceptionType the exception to throw
*
* @return {@code true} if the exception can be thrown as-is;
* {@code false} if it needs to be wrapped
*/
public static boolean declaresException(Method method, Class<?> exceptionType) {
Verify.notNull(method, "Method must not be null");
Class<?>[] declaredExceptions = method.getExceptionTypes();
for (Class<?> declaredException : declaredExceptions) {
if (declaredException.isAssignableFrom(exceptionType)) {
return true;
}
}
return false;
}
/**
* Determine whether the given field is a "public static final" constant.
*
* @param field the field to check
*/
public static boolean isPublicStaticFinal(Field field) {
int modifiers = field.getModifiers();
return (Modifier.isPublic(modifiers) && Modifier.isStatic(modifiers) && Modifier.isFinal(modifiers));
}
/**
* Determine whether the given method is an "equals" method.
*
* @see Object#equals(Object)
*/
public static boolean isEqualsMethod(Method method) {
if (method == null || !method.getName().equals("equals")) {
return false;
}
Class<?>[] paramTypes = method.getParameterTypes();
return (paramTypes.length == 1 && paramTypes[0] == Object.class);
}
/**
* Determine whether the given method is a "hashCode" method.
*
* @see Object#hashCode()
*/
public static boolean isHashCodeMethod(Method method) {
return (method != null && method.getName().equals("hashCode") && method.getParameterTypes().length == 0);
}
/**
* Determine whether the given method is a "toString" method.
*
* @see Object#toString()
*/
public static boolean isToStringMethod(Method method) {
return (method != null && method.getName().equals("toString") && method.getParameterTypes().length == 0);
}
/**
* Determine whether the given method is originally declared by {@link Object}.
*/
public static boolean isObjectMethod(Method method) {
if (method == null) {
return false;
}
try {
Object.class.getDeclaredMethod(method.getName(), method.getParameterTypes());
return true;
} catch (Exception ex) {
return false;
}
}
/**
* Make the given field accessible, explicitly setting it accessible if
* necessary. The {@code setAccessible(true)} method is only called
* when actually necessary, to avoid unnecessary conflicts with a JVM
* SecurityManager (if active).
*
* @param field the field to make accessible
*
* @see Field#setAccessible
*/
public static void makeAccessible(Field field) {
if ((!Modifier.isPublic(field.getModifiers()) ||
!Modifier.isPublic(field.getDeclaringClass().getModifiers()) ||
Modifier.isFinal(field.getModifiers())) && !field.isAccessible()) {
field.setAccessible(true);
}
}
/**
* Make the given method accessible, explicitly setting it accessible if
* necessary. The {@code setAccessible(true)} method is only called
* when actually necessary, to avoid unnecessary conflicts with a JVM
* SecurityManager (if active).
*
* @param method the method to make accessible
*
* @see Method#setAccessible
*/
public static void makeAccessible(Method method) {
if ((!Modifier.isPublic(method.getModifiers()) ||
!Modifier.isPublic(method.getDeclaringClass().getModifiers())) && !method.isAccessible()) {
method.setAccessible(true);
}
}
/**
* Make the given constructor accessible, explicitly setting it accessible
* if necessary. The {@code setAccessible(true)} method is only called
* when actually necessary, to avoid unnecessary conflicts with a JVM
* SecurityManager (if active).
*
* @param ctor the constructor to make accessible
*
* @see Constructor#setAccessible
*/
public static void makeAccessible(Constructor<?> ctor) {
if ((!Modifier.isPublic(ctor.getModifiers()) ||
!Modifier.isPublic(ctor.getDeclaringClass().getModifiers())) && !ctor.isAccessible()) {
ctor.setAccessible(true);
}
}
/**
* Perform the given callback operation on all matching methods of the given
* class, as locally declared or equivalent thereof (such as default methods
* on Java 8 based interfaces that the given class implements).
*
* @param cls the class to introspect
* @param mc the callback to invoke for each method
*
* @see #doWithMethods
* @since 4.2
*/
public static void doWithLocalMethods(Class<?> cls, MethodCallback mc) {
Method[] methods = getDeclaredMethods(cls);
for (Method method : methods) {
try {
mc.doWith(method);
} catch (IllegalAccessException ex) {
throw new IllegalStateException("Not allowed to access method '" + method.getName() + "': " + ex);
}
}
}
/**
* Perform the given callback operation on all matching methods of the given
* class and superclasses.
* <p>The same named method occurring on subclass and superclass will appear
* twice, unless excluded by a {@link MethodFilter}.
*
* @param cls the class to introspect
* @param mc the callback to invoke for each method
*
* @see #doWithMethods(Class, MethodCallback, MethodFilter)
*/
public static void doWithMethods(Class<?> cls, MethodCallback mc) {
doWithMethods(cls, mc, null);
}
/**
* Perform the given callback operation on all matching methods of the given
* class and superclasses (or given interface and super-interfaces).
* <p>The same named method occurring on subclass and superclass will appear
* twice, unless excluded by the specified {@link MethodFilter}.
*
* @param cls the class to introspect
* @param mc the callback to invoke for each method
* @param mf the filter that determines the methods to apply the callback to
*/
public static void doWithMethods(Class<?> cls, MethodCallback mc, MethodFilter mf) {
// Keep backing up the inheritance hierarchy.
Method[] methods = getDeclaredMethods(cls);
for (Method method : methods) {
if (mf != null && !mf.matches(method)) {
continue;
}
try {
mc.doWith(method);
} catch (IllegalAccessException ex) {
throw new IllegalStateException("Not allowed to access method '" + method.getName() + "': " + ex);
}
}
if (cls.getSuperclass() != null) {
doWithMethods(cls.getSuperclass(), mc, mf);
} else if (cls.isInterface()) {
for (Class<?> superIfc : cls.getInterfaces()) {
doWithMethods(superIfc, mc, mf);
}
}
}
/**
* Get all declared methods on the leaf class and all superclasses.
* Leaf class methods are included first.
*
* @param leafClass the class to introspect
*/
public static Method[] getAllDeclaredMethods(Class<?> leafClass) {
final List<Method> methods = new ArrayList<Method>(32);
doWithMethods(leafClass, new MethodCallback() {
@Override
public void doWith(Method method) {
methods.add(method);
}
});
return methods.toArray(new Method[methods.size()]);
}
/**
* Get the unique set of declared methods on the leaf class and all superclasses.
* Leaf class methods are included first and while traversing the superclass hierarchy
* any methods found with signatures matching a method already included are filtered out.
*
* @param leafClass the class to introspect
*/
public static Method[] getUniqueDeclaredMethods(Class<?> leafClass) {
final List<Method> methods = new ArrayList<Method>(32);
doWithMethods(leafClass, new MethodCallback() {
@Override
public void doWith(Method method) {
boolean knownSignature = false;
Method methodBeingOverriddenWithCovariantReturnType = null;
for (Method existingMethod : methods) {
if (method.getName().equals(existingMethod.getName()) &&
Arrays.equals(method.getParameterTypes(), existingMethod.getParameterTypes())) {
// Is this a covariant return type situation?
if (existingMethod.getReturnType() != method.getReturnType() &&
existingMethod.getReturnType().isAssignableFrom(method.getReturnType())) {
methodBeingOverriddenWithCovariantReturnType = existingMethod;
} else {
knownSignature = true;
}
break;
}
}
if (methodBeingOverriddenWithCovariantReturnType != null) {
methods.remove(methodBeingOverriddenWithCovariantReturnType);
}
if (!knownSignature) {
methods.add(method);
}
}
});
return methods.toArray(new Method[methods.size()]);
}
/**
* This variant retrieves {@link Class#getDeclaredMethods()} from a local cache
* in order to avoid the JVM's SecurityManager check and defensive array copying.
* In addition, it also includes Java 8 default methods from locally implemented
* interfaces, since those are effectively to be treated just like declared methods.
*
* @param cls the class to introspect
*
* @return the cached array of methods
*
* @see Class#getDeclaredMethods()
*/
private static Method[] getDeclaredMethods(Class<?> cls) {
Method[] result = declaredMethodsCache.get(cls);
if (result == null) {
Method[] declaredMethods = cls.getDeclaredMethods();
List<Method> defaultMethods = findConcreteMethodsOnInterfaces(cls);
if (defaultMethods != null) {
result = new Method[declaredMethods.length + defaultMethods.size()];
System.arraycopy(declaredMethods, 0, result, 0, declaredMethods.length);
int index = declaredMethods.length;
for (Method defaultMethod : defaultMethods) {
result[index] = defaultMethod;
index++;
}
} else {
result = declaredMethods;
}
declaredMethodsCache.put(cls, result);
}
return result;
}
private static List<Method> findConcreteMethodsOnInterfaces(Class<?> cls) {
List<Method> result = null;
for (Class<?> ifc : cls.getInterfaces()) {
for (Method ifcMethod : ifc.getMethods()) {
if (!Modifier.isAbstract(ifcMethod.getModifiers())) {
if (result == null) {
result = new LinkedList<Method>();
}
result.add(ifcMethod);
}
}
}
return result;
}
/**
* Invoke the given callback on all fields in the target class, going up the
* class hierarchy to get all declared fields.
*
* @param cls the target class to analyze
* @param fc the callback to invoke for each field
*
* @see #doWithFields
* @since 4.2
*/
public static void doWithLocalFields(Class<?> cls, FieldCallback fc) {
for (Field field : getDeclaredFields(cls)) {
try {
fc.doWith(field);
} catch (IllegalAccessException ex) {
throw new IllegalStateException("Not allowed to access field '" + field.getName() + "': " + ex);
}
}
}
/**
* Invoke the given callback on all fields in the target class, going up the
* class hierarchy to get all declared fields.
*
* @param cls the target class to analyze
* @param fc the callback to invoke for each field
*/
public static void doWithFields(Class<?> cls, FieldCallback fc) {
doWithFields(cls, fc, null);
}
/**
* Invoke the given callback on all fields in the target class, going up the
* class hierarchy to get all declared fields.
*
* @param cls the target class to analyze
* @param fc the callback to invoke for each field
* @param ff the filter that determines the fields to apply the callback to
*/
public static void doWithFields(Class<?> cls, FieldCallback fc, FieldFilter ff) {
// Keep backing up the inheritance hierarchy.
Class<?> targetClass = cls;
do {
Field[] fields = getDeclaredFields(targetClass);
for (Field field : fields) {
if (ff != null && !ff.matches(field)) {
continue;
}
try {
fc.doWith(field);
} catch (IllegalAccessException ex) {
throw new IllegalStateException("Not allowed to access field '" + field.getName() + "': " + ex);
}
}
targetClass = targetClass.getSuperclass();
}
while (targetClass != null && targetClass != Object.class);
}
/**
* This variant retrieves {@link Class#getDeclaredFields()} from a local cache
* in order to avoid the JVM's SecurityManager check and defensive array copying.
*
* @param cls the class to introspect
*
* @return the cached array of fields
*
* @see Class#getDeclaredFields()
*/
private static Field[] getDeclaredFields(Class<?> cls) {
Field[] result = declaredFieldsCache.get(cls);
if (result == null) {
result = cls.getDeclaredFields();
declaredFieldsCache.put(cls, result);
}
return result;
}
/**
* Given the source object and the destination, which must be the same class
* or a subclass, copy all fields, including inherited fields. Designed to
* work on objects with public no-arg constructors.
*/
public static void shallowCopyFieldState(final Object src, final Object dest) {
if (src == null) {
throw new IllegalArgumentException("Source for field copy cannot be null");
}
if (dest == null) {
throw new IllegalArgumentException("Destination for field copy cannot be null");
}
if (!src.getClass().isAssignableFrom(dest.getClass())) {
throw new IllegalArgumentException("Destination class [" + dest.getClass().getName() +
"] must be same or subclass as source class [" + src.getClass().getName() + "]");
}
doWithFields(src.getClass(), new FieldCallback() {
@Override
public void doWith(Field field) throws IllegalArgumentException, IllegalAccessException {
makeAccessible(field);
Object srcValue = field.get(src);
field.set(dest, srcValue);
}
}, COPYABLE_FIELDS);
}
/**
* Action to take on each method.
*/
public interface MethodCallback {
/**
* Perform an operation using the given method.
*
* @param method the method to operate on
*/
void doWith(Method method) throws IllegalArgumentException, IllegalAccessException;
}
/**
* Callback optionally used to filter methods to be operated on by a method callback.
*/
public interface MethodFilter {
/**
* Determine whether the given method matches.
*
* @param method the method to check
*/
boolean matches(Method method);
}
/**
* Callback interface invoked on each field in the hierarchy.
*/
public interface FieldCallback {
/**
* Perform an operation using the given field.
*
* @param field the field to operate on
*/
void doWith(Field field) throws IllegalArgumentException, IllegalAccessException;
}
/**
* Callback optionally used to filter fields to be operated on by a field callback.
*/
public interface FieldFilter {
/**
* Determine whether the given field matches.
*
* @param field the field to check
*/
boolean matches(Field field);
}
/**
* Pre-built FieldFilter that matches all non-static, non-final fields.
*/
public static FieldFilter COPYABLE_FIELDS = new FieldFilter() {
@Override
public boolean matches(Field field) {
return !(Modifier.isStatic(field.getModifiers()) || Modifier.isFinal(field.getModifiers()));
}
};
/**
* Pre-built MethodFilter that matches all non-bridge methods.
*/
public static MethodFilter NON_BRIDGED_METHODS = new MethodFilter() {
@Override
public boolean matches(Method method) {
return !method.isBridge();
}
};
/**
* Pre-built MethodFilter that matches all non-bridge methods
* which are not declared on {@code java.lang.Object}.
*/
public static MethodFilter USER_DECLARED_METHODS = new MethodFilter() {
@Override
public boolean matches(Method method) {
return (!method.isBridge() && method.getDeclaringClass() != Object.class);
}
};
}
|
|
package voltric.io.model.bif;
import voltric.data.DiscreteData;
import voltric.learning.score.LearningScore;
import voltric.model.DiscreteBayesNet;
import voltric.model.DiscreteBeliefNode;
import voltric.potential.Function;
import voltric.potential.util.FunctionIterator;
import voltric.variables.DiscreteVariable;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.List;
/**
* Writes Bayesian networks in BIF format.
*
* Version HLTA que escribe en un formato entendible por bnlearn.
* TODO: No escribe bien las probabilidades en formato state, hay que revisar el orden de los parents y la manera en que
* itera las celdas de la Funciton.
*
* @author leonard
*
*/
@Deprecated
public class HltaBifWriter {
/**
* Constructs this writer with an underlying output stream, using the
* default UTF-8 encoding.
*
* @param output
* output stream where the network is written to.
* @throws UnsupportedEncodingException
*/
public HltaBifWriter(OutputStream output) throws UnsupportedEncodingException {
this(output, false, "UTF-8");
}
/**
* Constructs this writer with an underlying output stream, using the
* default UTF-8 encoding.
*
* @param output
* output stream where the network is written to.
* @param useTableFormat
* whether to use table format in probability definition
* @throws UnsupportedEncodingException
*/
public HltaBifWriter(OutputStream output, boolean useTableFormat) throws UnsupportedEncodingException {
this(output, useTableFormat, "UTF-8");
}
/**
* Constructs this writer with an underlying output stream.
*
* @param output
* output stream where the network is written to.
* @param useTableFormat
* whether to use table format in probability definition
* @param encoding
* charset used for the output.
* @throws UnsupportedEncodingException
*/
public HltaBifWriter(OutputStream output, boolean useTableFormat, String encoding) throws UnsupportedEncodingException {
this.useTableFormat = useTableFormat;
writer = new PrintWriter(new OutputStreamWriter(output, encoding));
}
/**
* Writes the network.
*/
public void write(DiscreteBayesNet network) {
writeNetworkDeclaration(network);
writeVariables(network);
writeProbabilities(network);
writer.close();
}
/**
* Writes the network declaration.
*
* @param network
* network to write.
*/
private void writeNetworkDeclaration(DiscreteBayesNet network) {
writer.format("network \"%s\" {\n}\n", network.getName());
writer.println();
}
/**
* Writes the variables part.
*
* @param network
* network to write.
*/
private void writeVariables(DiscreteBayesNet network) {
List<DiscreteBeliefNode> nodes = network.getNodes();
for (DiscreteBeliefNode node : nodes) {
DiscreteBeliefNode beliefNode = (DiscreteBeliefNode) node;
writeNode(beliefNode);
}
}
/**
* Writes the information of a belief node.
*
* @param node
* node to write.
*/
private void writeNode(DiscreteBeliefNode node) {
List<String> states = node.getVariable().getStates();
writer.format("variable \"%s\" {\n", node.getName());
// write the states
writer.format("\ttype discrete[%d] { ", states.size());
for (int i = 0; i < states.size() - 1; i++) {
writer.format("\"%s\", ", states.get(i));
}
writer.format("\"%s\", ", states.get(states.size() - 1)); // Last one without comma
writer.println("};");
writer.println("}");
writer.println();
}
/**
* Writes the probabilities definition part.
*
* @param network network to write.
*/
private void writeProbabilities(DiscreteBayesNet network) {
List<DiscreteBeliefNode> nodes = network.getNodes();
for (DiscreteBeliefNode node : nodes) {
writeProbabilities(node);
}
}
/**
* Writes the probabilities definition for a belief node.
*
* @param node
* node to write.
*/
private void writeProbabilities(DiscreteBeliefNode node) {
Function function = node.getCpt();
List<DiscreteVariable> variables = node.getCpt().getVariables();
// write the related variables
writer.format("probability (\"%s\"", variables.get(0).getName());
// check if it has parent variables
if (variables.size() > 1) {
writer.print(" | ");
for (int i = 1; i < variables.size(); i++) {
writer.format("\"%s\"", variables.get(i).getName());
if (i != variables.size() - 1) {
writer.print(", ");
}
}
}
writer.println(") {");
if (useTableFormat)
writeProbabilitiesTable(function, variables);
else
writeProbabilitiesWithStates(function, variables);
writer.println("}");
writer.println();
}
private void writeProbabilitiesTable(Function function, List<DiscreteVariable> variables) {
double[] cells = function.getCells(variables);
writer.print("\ttable ");
for (int i = 0; i < cells.length; i++) {
writer.print(cells[i]);
if (i != cells.length - 1) {
writer.print(" ");
}
}
writer.println(";");
}
private void writeProbabilitiesWithStates(Function function, List<DiscreteVariable> variables) {
// use table format for root variable
if (variables.size() == 1) {
writeProbabilitiesTable(function, variables);
return;
}
// put the parent variables at the beginning for iteration
ArrayList<DiscreteVariable> order = new ArrayList<>(variables.size());
for (int i = 1; i < variables.size(); i++)
order.add(variables.get(i));
order.add(variables.get(0));
FunctionIterator iterator = new FunctionIterator(function, order);
iterator.iterate(new StateVisitor());
}
private void writeScore(DiscreteBayesNet network, DiscreteData data) {
writer.println();
writer.format("//Loglikelihood: %f\n", LearningScore.calculateLogLikelihood(data, network));
writer.format("//BIC Score: %f\n", LearningScore.calculateBIC(data, network));
writer.println();
}
/**
* The print writer encapsulating the underlying output stream.
*/
private final PrintWriter writer;
private boolean useTableFormat = true;
private class StateVisitor implements FunctionIterator.Visitor {
public void visit(List<DiscreteVariable> order, int[] states, double value) {
// the node state and variable (instead of parent variables)
int nodeState = states[states.length - 1];
DiscreteVariable nodeVariable = order.get(states.length - 1);
if (nodeState == 0) {
writeStart(order, states);
}
writer.print(value);
if (nodeState == nodeVariable.getCardinality() - 1) {
writeEnd();
} else {
writer.print(", ");
}
}
private void writeStart(List<DiscreteVariable> order, int[] states) {
writer.print("\t(");
// write parent states, which excludes the last state
for (int i = 0; i < states.length - 1; i++) {
String stateName = order.get(i).getStates().get(states[i]);
writer.format("\"%s\"", stateName);
if (i < states.length - 2) {
writer.write(" ");
}
}
writer.print(") ");
}
private void writeEnd() {
writer.println(";");
}
}
}
|
|
package org.coursera.potlatch4u.ui.gift;
import org.coursera.potlatch4u.R;
import org.coursera.potlatch4u.orm.FormerResolver;
import org.coursera.potlatch4u.orm.GiftData;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.graphics.Color;
import android.net.Uri;
import android.os.Bundle;
import android.os.RemoteException;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.TextView;
import android.widget.Toast;
public class GiftViewFragment extends BaseGiftFragment {
// private static final String LOG_TAG = GiftViewFragment.class
// .getCanonicalName();
private static final String LOG_TAG = GiftListActivity.LOG_TAG;
private FormerResolver resolver;
public final static String rowIdentifyerTAG = "index";
private OnOpenWindowInterface mOpener;
GiftData giftData;
TextView titleTV;
TextView descriptionTV;
TextView touchedCountTV;
ImageView imageView;
// buttons for edit and delete
Button touchedButton;
Button reportButton;
Button deleteButton;
OnClickListener myOnClickListener = new OnClickListener() {
@Override
public void onClick(View view) {
switch (view.getId()) {
case R.id.button_gift_view_to_delete:
deleteButtonPressed();
break;
case R.id.button_gift_view_touched:
touchedButtonPressed();
break;
case R.id.button_gift_view_report:
reportInappropriateButtonPressed();
break;
default:
break;
}
}
private void touchedButtonPressed() {
// TODO Auto-generated method stub
}
private void reportInappropriateButtonPressed() {
// TODO Auto-generated method stub
}
};
public static GiftViewFragment newInstance(long index) {
GiftViewFragment f = new GiftViewFragment();
// Supply index input as an argument.
Bundle args = new Bundle();
args.putLong(rowIdentifyerTAG, index);
f.setArguments(args);
return f;
}
// this fragment was attached to an activity
@Override
public void onAttach(Activity activity) {
super.onAttach(activity);
try {
mOpener = (OnOpenWindowInterface) activity;
resolver = new FormerResolver(activity);
} catch (ClassCastException e) {
throw new ClassCastException(activity.toString() + " must implement OnOpenWindowListener");
}
}
// this fragment is creating its view before it can be modified
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
View view = inflater.inflate(R.layout.gift_view_fragment, container, false);
container.setBackgroundColor(Color.GRAY);
return view;
}
// this fragment is modifying its view before display
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
titleTV = (TextView) getView().findViewById(R.id.gift_view_value_title);
descriptionTV = (TextView) getView().findViewById(R.id.gift_view_value_description);
touchedCountTV = (TextView) getView().findViewById(R.id.gift_view_value_touched_count);
imageView = (ImageView) getView().findViewById(R.id.gift_view_value_image);
titleTV.setText("" + "");
descriptionTV.setText("" + "");
touchedCountTV.setText("");
touchedButton = (Button) getView().findViewById(R.id.button_gift_view_touched);
reportButton = (Button) getView().findViewById(R.id.button_gift_view_report);
deleteButton = (Button) getView().findViewById(R.id.button_gift_view_to_delete);
touchedButton.setOnClickListener(myOnClickListener);
reportButton.setOnClickListener(myOnClickListener);
deleteButton.setOnClickListener(myOnClickListener);
try {
setUiToGiftData(getUniqueKey());
} catch (RemoteException e) {
Toast.makeText(getActivity(), "Error retrieving information from local data store.", Toast.LENGTH_LONG)
.show();
Log.e(LOG_TAG, "Error getting Gift data from C.P.");
// e.printStackTrace();
}
}
public void setUiToGiftData(long getUniqueKey) throws RemoteException {
Log.d(LOG_TAG, "setUiToGiftData");
giftData = resolver.getGift(getUniqueKey);
if (giftData == null) {
getView().setVisibility(View.GONE);
} else { // else it just displays empty screen
Log.d(LOG_TAG, "setUiToGiftData + giftData:" + giftData.toString());
titleTV.setText(String.valueOf(giftData.title).toString());
descriptionTV.setText(String.valueOf(giftData.description).toString());
// Display the image data
String imageUri = String.valueOf(giftData.imageUri).toString();
if (imageUri != null) {
showImage(imageUri);
}
touchedCountTV.setText(getTouchedCountDisplayValue(giftData.touchCount));
}
}
private String getTouchedCountDisplayValue(long touchedCount) {
String text;
if (touchedCount == 0) {
text = "Gift has not touched any users yet.";
} else if (touchedCount == 1) {
text = "Gift has not touched one user.";
} else {
text = "Gift has not touched " + touchedCount + " users.";
}
return text;
}
private void showImage(String imageMetaDataPath) {
Uri imageUri = Uri.parse(imageMetaDataPath);
// TDO - Set the URI of the ImageView to the image path stored in
// the string
// imageMetaDataPath, using the setImageURI function from the ImageView
Log.d(LOG_TAG, "Loading image from " + imageUri);
imageView.setImageURI(imageUri);
}
// action to be performed when the delete button is pressed
private void deleteButtonPressed() {
String message;
message = getResources().getString(R.string.gift_view_deletion_dialog_message);
new AlertDialog.Builder(getActivity()).setIcon(android.R.drawable.ic_dialog_alert)
.setTitle(R.string.gift_view_deletion_dialog_title).setMessage(message)
.setPositiveButton(R.string.gift_view_deletion_dialog_yes, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
try {
resolver.deleteGift(giftData.id);
} catch (RemoteException e) {
Log.e(LOG_TAG, "RemoteException Caught => " + e.getMessage());
e.printStackTrace();
}
mOpener.openListGiftFragment();
if (getResources().getBoolean(R.bool.isTablet) == true) {
mOpener.openViewGiftFragment(-1);
} else {
getActivity().finish();
}
}
}).setNegativeButton(R.string.gift_view_deletion_dialog_no, null).show();
}
public long getUniqueKey() {
return getArguments().getLong(rowIdentifyerTAG, 0);
}
@Override
public void onDetach() {
super.onDetach();
mOpener = null;
resolver = null;
}
@Override
public void onResume() {
super.onResume();
try {
setUiToGiftData(getUniqueKey());
} catch (RemoteException e) {
Toast.makeText(getActivity(), "Error retrieving information from local data store.", Toast.LENGTH_LONG)
.show();
Log.e(LOG_TAG, "Error getting Gift data from C.P.");
}
}
}
|
|
/*
* Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.text.dvb;
import static java.lang.Math.min;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.PorterDuff;
import android.graphics.PorterDuffXfermode;
import android.util.SparseArray;
import androidx.annotation.Nullable;
import com.google.android.exoplayer2.text.Cue;
import com.google.android.exoplayer2.util.Log;
import com.google.android.exoplayer2.util.ParsableBitArray;
import com.google.android.exoplayer2.util.Util;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
/**
* Parses {@link Cue}s from a DVB subtitle bitstream.
*/
/* package */ final class DvbParser {
private static final String TAG = "DvbParser";
// Segment types, as defined by ETSI EN 300 743 Table 2
private static final int SEGMENT_TYPE_PAGE_COMPOSITION = 0x10;
private static final int SEGMENT_TYPE_REGION_COMPOSITION = 0x11;
private static final int SEGMENT_TYPE_CLUT_DEFINITION = 0x12;
private static final int SEGMENT_TYPE_OBJECT_DATA = 0x13;
private static final int SEGMENT_TYPE_DISPLAY_DEFINITION = 0x14;
// Page states, as defined by ETSI EN 300 743 Table 3
private static final int PAGE_STATE_NORMAL = 0; // Update. Only changed elements.
// private static final int PAGE_STATE_ACQUISITION = 1; // Refresh. All elements.
// private static final int PAGE_STATE_CHANGE = 2; // New. All elements.
// Region depths, as defined by ETSI EN 300 743 Table 5
// private static final int REGION_DEPTH_2_BIT = 1;
private static final int REGION_DEPTH_4_BIT = 2;
private static final int REGION_DEPTH_8_BIT = 3;
// Object codings, as defined by ETSI EN 300 743 Table 8
private static final int OBJECT_CODING_PIXELS = 0;
private static final int OBJECT_CODING_STRING = 1;
// Pixel-data types, as defined by ETSI EN 300 743 Table 9
private static final int DATA_TYPE_2BP_CODE_STRING = 0x10;
private static final int DATA_TYPE_4BP_CODE_STRING = 0x11;
private static final int DATA_TYPE_8BP_CODE_STRING = 0x12;
private static final int DATA_TYPE_24_TABLE_DATA = 0x20;
private static final int DATA_TYPE_28_TABLE_DATA = 0x21;
private static final int DATA_TYPE_48_TABLE_DATA = 0x22;
private static final int DATA_TYPE_END_LINE = 0xF0;
// Clut mapping tables, as defined by ETSI EN 300 743 10.4, 10.5, 10.6
private static final byte[] defaultMap2To4 = {
(byte) 0x00, (byte) 0x07, (byte) 0x08, (byte) 0x0F};
private static final byte[] defaultMap2To8 = {
(byte) 0x00, (byte) 0x77, (byte) 0x88, (byte) 0xFF};
private static final byte[] defaultMap4To8 = {
(byte) 0x00, (byte) 0x11, (byte) 0x22, (byte) 0x33,
(byte) 0x44, (byte) 0x55, (byte) 0x66, (byte) 0x77,
(byte) 0x88, (byte) 0x99, (byte) 0xAA, (byte) 0xBB,
(byte) 0xCC, (byte) 0xDD, (byte) 0xEE, (byte) 0xFF};
private final Paint defaultPaint;
private final Paint fillRegionPaint;
private final Canvas canvas;
private final DisplayDefinition defaultDisplayDefinition;
private final ClutDefinition defaultClutDefinition;
private final SubtitleService subtitleService;
private @MonotonicNonNull Bitmap bitmap;
/**
* Construct an instance for the given subtitle and ancillary page ids.
*
* @param subtitlePageId The id of the subtitle page carrying the subtitle to be parsed.
* @param ancillaryPageId The id of the ancillary page containing additional data.
*/
public DvbParser(int subtitlePageId, int ancillaryPageId) {
defaultPaint = new Paint();
defaultPaint.setStyle(Paint.Style.FILL_AND_STROKE);
defaultPaint.setXfermode(new PorterDuffXfermode(PorterDuff.Mode.SRC));
defaultPaint.setPathEffect(null);
fillRegionPaint = new Paint();
fillRegionPaint.setStyle(Paint.Style.FILL);
fillRegionPaint.setXfermode(new PorterDuffXfermode(PorterDuff.Mode.DST_OVER));
fillRegionPaint.setPathEffect(null);
canvas = new Canvas();
defaultDisplayDefinition = new DisplayDefinition(719, 575, 0, 719, 0, 575);
defaultClutDefinition = new ClutDefinition(0, generateDefault2BitClutEntries(),
generateDefault4BitClutEntries(), generateDefault8BitClutEntries());
subtitleService = new SubtitleService(subtitlePageId, ancillaryPageId);
}
/**
* Resets the parser.
*/
public void reset() {
subtitleService.reset();
}
/**
* Decodes a subtitling packet, returning a list of parsed {@link Cue}s.
*
* @param data The subtitling packet data to decode.
* @param limit The limit in {@code data} at which to stop decoding.
* @return The parsed {@link Cue}s.
*/
public List<Cue> decode(byte[] data, int limit) {
// Parse the input data.
ParsableBitArray dataBitArray = new ParsableBitArray(data, limit);
while (dataBitArray.bitsLeft() >= 48 // sync_byte (8) + segment header (40)
&& dataBitArray.readBits(8) == 0x0F) {
parseSubtitlingSegment(dataBitArray, subtitleService);
}
@Nullable PageComposition pageComposition = subtitleService.pageComposition;
if (pageComposition == null) {
return Collections.emptyList();
}
// Update the canvas bitmap if necessary.
DisplayDefinition displayDefinition = subtitleService.displayDefinition != null
? subtitleService.displayDefinition : defaultDisplayDefinition;
if (bitmap == null || displayDefinition.width + 1 != bitmap.getWidth()
|| displayDefinition.height + 1 != bitmap.getHeight()) {
bitmap = Bitmap.createBitmap(displayDefinition.width + 1, displayDefinition.height + 1,
Bitmap.Config.ARGB_8888);
canvas.setBitmap(bitmap);
}
// Build the cues.
List<Cue> cues = new ArrayList<>();
SparseArray<PageRegion> pageRegions = pageComposition.regions;
for (int i = 0; i < pageRegions.size(); i++) {
// Save clean clipping state.
canvas.save();
PageRegion pageRegion = pageRegions.valueAt(i);
int regionId = pageRegions.keyAt(i);
RegionComposition regionComposition = subtitleService.regions.get(regionId);
// Clip drawing to the current region and display definition window.
int baseHorizontalAddress = pageRegion.horizontalAddress
+ displayDefinition.horizontalPositionMinimum;
int baseVerticalAddress = pageRegion.verticalAddress
+ displayDefinition.verticalPositionMinimum;
int clipRight =
min(
baseHorizontalAddress + regionComposition.width,
displayDefinition.horizontalPositionMaximum);
int clipBottom =
min(
baseVerticalAddress + regionComposition.height,
displayDefinition.verticalPositionMaximum);
canvas.clipRect(baseHorizontalAddress, baseVerticalAddress, clipRight, clipBottom);
ClutDefinition clutDefinition = subtitleService.cluts.get(regionComposition.clutId);
if (clutDefinition == null) {
clutDefinition = subtitleService.ancillaryCluts.get(regionComposition.clutId);
if (clutDefinition == null) {
clutDefinition = defaultClutDefinition;
}
}
SparseArray<RegionObject> regionObjects = regionComposition.regionObjects;
for (int j = 0; j < regionObjects.size(); j++) {
int objectId = regionObjects.keyAt(j);
RegionObject regionObject = regionObjects.valueAt(j);
ObjectData objectData = subtitleService.objects.get(objectId);
if (objectData == null) {
objectData = subtitleService.ancillaryObjects.get(objectId);
}
if (objectData != null) {
@Nullable Paint paint = objectData.nonModifyingColorFlag ? null : defaultPaint;
paintPixelDataSubBlocks(objectData, clutDefinition, regionComposition.depth,
baseHorizontalAddress + regionObject.horizontalPosition,
baseVerticalAddress + regionObject.verticalPosition, paint, canvas);
}
}
if (regionComposition.fillFlag) {
int color;
if (regionComposition.depth == REGION_DEPTH_8_BIT) {
color = clutDefinition.clutEntries8Bit[regionComposition.pixelCode8Bit];
} else if (regionComposition.depth == REGION_DEPTH_4_BIT) {
color = clutDefinition.clutEntries4Bit[regionComposition.pixelCode4Bit];
} else {
color = clutDefinition.clutEntries2Bit[regionComposition.pixelCode2Bit];
}
fillRegionPaint.setColor(color);
canvas.drawRect(baseHorizontalAddress, baseVerticalAddress,
baseHorizontalAddress + regionComposition.width,
baseVerticalAddress + regionComposition.height,
fillRegionPaint);
}
cues.add(
new Cue.Builder()
.setBitmap(
Bitmap.createBitmap(
bitmap,
baseHorizontalAddress,
baseVerticalAddress,
regionComposition.width,
regionComposition.height))
.setPosition((float) baseHorizontalAddress / displayDefinition.width)
.setPositionAnchor(Cue.ANCHOR_TYPE_START)
.setLine(
(float) baseVerticalAddress / displayDefinition.height, Cue.LINE_TYPE_FRACTION)
.setLineAnchor(Cue.ANCHOR_TYPE_START)
.setSize((float) regionComposition.width / displayDefinition.width)
.setBitmapHeight((float) regionComposition.height / displayDefinition.height)
.build());
canvas.drawColor(Color.TRANSPARENT, PorterDuff.Mode.CLEAR);
// Restore clean clipping state.
canvas.restore();
}
return Collections.unmodifiableList(cues);
}
// Static parsing.
/**
* Parses a subtitling segment, as defined by ETSI EN 300 743 7.2
* <p>
* The {@link SubtitleService} is updated with the parsed segment data.
*/
private static void parseSubtitlingSegment(ParsableBitArray data, SubtitleService service) {
int segmentType = data.readBits(8);
int pageId = data.readBits(16);
int dataFieldLength = data.readBits(16);
int dataFieldLimit = data.getBytePosition() + dataFieldLength;
if ((dataFieldLength * 8) > data.bitsLeft()) {
Log.w(TAG, "Data field length exceeds limit");
// Skip to the very end.
data.skipBits(data.bitsLeft());
return;
}
switch (segmentType) {
case SEGMENT_TYPE_DISPLAY_DEFINITION:
if (pageId == service.subtitlePageId) {
service.displayDefinition = parseDisplayDefinition(data);
}
break;
case SEGMENT_TYPE_PAGE_COMPOSITION:
if (pageId == service.subtitlePageId) {
@Nullable PageComposition current = service.pageComposition;
PageComposition pageComposition = parsePageComposition(data, dataFieldLength);
if (pageComposition.state != PAGE_STATE_NORMAL) {
service.pageComposition = pageComposition;
service.regions.clear();
service.cluts.clear();
service.objects.clear();
} else if (current != null && current.version != pageComposition.version) {
service.pageComposition = pageComposition;
}
}
break;
case SEGMENT_TYPE_REGION_COMPOSITION:
@Nullable PageComposition pageComposition = service.pageComposition;
if (pageId == service.subtitlePageId && pageComposition != null) {
RegionComposition regionComposition = parseRegionComposition(data, dataFieldLength);
if (pageComposition.state == PAGE_STATE_NORMAL) {
@Nullable
RegionComposition existingRegionComposition = service.regions.get(regionComposition.id);
if (existingRegionComposition != null) {
regionComposition.mergeFrom(existingRegionComposition);
}
}
service.regions.put(regionComposition.id, regionComposition);
}
break;
case SEGMENT_TYPE_CLUT_DEFINITION:
if (pageId == service.subtitlePageId) {
ClutDefinition clutDefinition = parseClutDefinition(data, dataFieldLength);
service.cluts.put(clutDefinition.id, clutDefinition);
} else if (pageId == service.ancillaryPageId) {
ClutDefinition clutDefinition = parseClutDefinition(data, dataFieldLength);
service.ancillaryCluts.put(clutDefinition.id, clutDefinition);
}
break;
case SEGMENT_TYPE_OBJECT_DATA:
if (pageId == service.subtitlePageId) {
ObjectData objectData = parseObjectData(data);
service.objects.put(objectData.id, objectData);
} else if (pageId == service.ancillaryPageId) {
ObjectData objectData = parseObjectData(data);
service.ancillaryObjects.put(objectData.id, objectData);
}
break;
default:
// Do nothing.
break;
}
// Skip to the next segment.
data.skipBytes(dataFieldLimit - data.getBytePosition());
}
/**
* Parses a display definition segment, as defined by ETSI EN 300 743 7.2.1.
*/
private static DisplayDefinition parseDisplayDefinition(ParsableBitArray data) {
data.skipBits(4); // dds_version_number (4).
boolean displayWindowFlag = data.readBit();
data.skipBits(3); // Skip reserved.
int width = data.readBits(16);
int height = data.readBits(16);
int horizontalPositionMinimum;
int horizontalPositionMaximum;
int verticalPositionMinimum;
int verticalPositionMaximum;
if (displayWindowFlag) {
horizontalPositionMinimum = data.readBits(16);
horizontalPositionMaximum = data.readBits(16);
verticalPositionMinimum = data.readBits(16);
verticalPositionMaximum = data.readBits(16);
} else {
horizontalPositionMinimum = 0;
horizontalPositionMaximum = width;
verticalPositionMinimum = 0;
verticalPositionMaximum = height;
}
return new DisplayDefinition(width, height, horizontalPositionMinimum,
horizontalPositionMaximum, verticalPositionMinimum, verticalPositionMaximum);
}
/**
* Parses a page composition segment, as defined by ETSI EN 300 743 7.2.2.
*/
private static PageComposition parsePageComposition(ParsableBitArray data, int length) {
int timeoutSecs = data.readBits(8);
int version = data.readBits(4);
int state = data.readBits(2);
data.skipBits(2);
int remainingLength = length - 2;
SparseArray<PageRegion> regions = new SparseArray<>();
while (remainingLength > 0) {
int regionId = data.readBits(8);
data.skipBits(8); // Skip reserved.
int regionHorizontalAddress = data.readBits(16);
int regionVerticalAddress = data.readBits(16);
remainingLength -= 6;
regions.put(regionId, new PageRegion(regionHorizontalAddress, regionVerticalAddress));
}
return new PageComposition(timeoutSecs, version, state, regions);
}
/**
* Parses a region composition segment, as defined by ETSI EN 300 743 7.2.3.
*/
private static RegionComposition parseRegionComposition(ParsableBitArray data, int length) {
int id = data.readBits(8);
data.skipBits(4); // Skip region_version_number
boolean fillFlag = data.readBit();
data.skipBits(3); // Skip reserved.
int width = data.readBits(16);
int height = data.readBits(16);
int levelOfCompatibility = data.readBits(3);
int depth = data.readBits(3);
data.skipBits(2); // Skip reserved.
int clutId = data.readBits(8);
int pixelCode8Bit = data.readBits(8);
int pixelCode4Bit = data.readBits(4);
int pixelCode2Bit = data.readBits(2);
data.skipBits(2); // Skip reserved
int remainingLength = length - 10;
SparseArray<RegionObject> regionObjects = new SparseArray<>();
while (remainingLength > 0) {
int objectId = data.readBits(16);
int objectType = data.readBits(2);
int objectProvider = data.readBits(2);
int objectHorizontalPosition = data.readBits(12);
data.skipBits(4); // Skip reserved.
int objectVerticalPosition = data.readBits(12);
remainingLength -= 6;
int foregroundPixelCode = 0;
int backgroundPixelCode = 0;
if (objectType == 0x01 || objectType == 0x02) { // Only seems to affect to char subtitles.
foregroundPixelCode = data.readBits(8);
backgroundPixelCode = data.readBits(8);
remainingLength -= 2;
}
regionObjects.put(objectId, new RegionObject(objectType, objectProvider,
objectHorizontalPosition, objectVerticalPosition, foregroundPixelCode,
backgroundPixelCode));
}
return new RegionComposition(id, fillFlag, width, height, levelOfCompatibility, depth, clutId,
pixelCode8Bit, pixelCode4Bit, pixelCode2Bit, regionObjects);
}
/**
* Parses a CLUT definition segment, as defined by ETSI EN 300 743 7.2.4.
*/
private static ClutDefinition parseClutDefinition(ParsableBitArray data, int length) {
int clutId = data.readBits(8);
data.skipBits(8); // Skip clut_version_number (4), reserved (4)
int remainingLength = length - 2;
int[] clutEntries2Bit = generateDefault2BitClutEntries();
int[] clutEntries4Bit = generateDefault4BitClutEntries();
int[] clutEntries8Bit = generateDefault8BitClutEntries();
while (remainingLength > 0) {
int entryId = data.readBits(8);
int entryFlags = data.readBits(8);
remainingLength -= 2;
int[] clutEntries;
if ((entryFlags & 0x80) != 0) {
clutEntries = clutEntries2Bit;
} else if ((entryFlags & 0x40) != 0) {
clutEntries = clutEntries4Bit;
} else {
clutEntries = clutEntries8Bit;
}
int y;
int cr;
int cb;
int t;
if ((entryFlags & 0x01) != 0) {
y = data.readBits(8);
cr = data.readBits(8);
cb = data.readBits(8);
t = data.readBits(8);
remainingLength -= 4;
} else {
y = data.readBits(6) << 2;
cr = data.readBits(4) << 4;
cb = data.readBits(4) << 4;
t = data.readBits(2) << 6;
remainingLength -= 2;
}
if (y == 0x00) {
cr = 0x00;
cb = 0x00;
t = 0xFF;
}
int a = (byte) (0xFF - (t & 0xFF));
int r = (int) (y + (1.40200 * (cr - 128)));
int g = (int) (y - (0.34414 * (cb - 128)) - (0.71414 * (cr - 128)));
int b = (int) (y + (1.77200 * (cb - 128)));
clutEntries[entryId] = getColor(a, Util.constrainValue(r, 0, 255),
Util.constrainValue(g, 0, 255), Util.constrainValue(b, 0, 255));
}
return new ClutDefinition(clutId, clutEntries2Bit, clutEntries4Bit, clutEntries8Bit);
}
/**
* Parses an object data segment, as defined by ETSI EN 300 743 7.2.5.
*
* @return The parsed object data.
*/
private static ObjectData parseObjectData(ParsableBitArray data) {
int objectId = data.readBits(16);
data.skipBits(4); // Skip object_version_number
int objectCodingMethod = data.readBits(2);
boolean nonModifyingColorFlag = data.readBit();
data.skipBits(1); // Skip reserved.
byte[] topFieldData = Util.EMPTY_BYTE_ARRAY;
byte[] bottomFieldData = Util.EMPTY_BYTE_ARRAY;
if (objectCodingMethod == OBJECT_CODING_STRING) {
int numberOfCodes = data.readBits(8);
// TODO: Parse and use character_codes.
data.skipBits(numberOfCodes * 16); // Skip character_codes.
} else if (objectCodingMethod == OBJECT_CODING_PIXELS) {
int topFieldDataLength = data.readBits(16);
int bottomFieldDataLength = data.readBits(16);
if (topFieldDataLength > 0) {
topFieldData = new byte[topFieldDataLength];
data.readBytes(topFieldData, 0, topFieldDataLength);
}
if (bottomFieldDataLength > 0) {
bottomFieldData = new byte[bottomFieldDataLength];
data.readBytes(bottomFieldData, 0, bottomFieldDataLength);
} else {
bottomFieldData = topFieldData;
}
}
return new ObjectData(objectId, nonModifyingColorFlag, topFieldData, bottomFieldData);
}
private static int[] generateDefault2BitClutEntries() {
int[] entries = new int[4];
entries[0] = 0x00000000;
entries[1] = 0xFFFFFFFF;
entries[2] = 0xFF000000;
entries[3] = 0xFF7F7F7F;
return entries;
}
private static int[] generateDefault4BitClutEntries() {
int[] entries = new int[16];
entries[0] = 0x00000000;
for (int i = 1; i < entries.length; i++) {
if (i < 8) {
entries[i] = getColor(
0xFF,
((i & 0x01) != 0 ? 0xFF : 0x00),
((i & 0x02) != 0 ? 0xFF : 0x00),
((i & 0x04) != 0 ? 0xFF : 0x00));
} else {
entries[i] = getColor(
0xFF,
((i & 0x01) != 0 ? 0x7F : 0x00),
((i & 0x02) != 0 ? 0x7F : 0x00),
((i & 0x04) != 0 ? 0x7F : 0x00));
}
}
return entries;
}
private static int[] generateDefault8BitClutEntries() {
int[] entries = new int[256];
entries[0] = 0x00000000;
for (int i = 0; i < entries.length; i++) {
if (i < 8) {
entries[i] = getColor(
0x3F,
((i & 0x01) != 0 ? 0xFF : 0x00),
((i & 0x02) != 0 ? 0xFF : 0x00),
((i & 0x04) != 0 ? 0xFF : 0x00));
} else {
switch (i & 0x88) {
case 0x00:
entries[i] = getColor(
0xFF,
(((i & 0x01) != 0 ? 0x55 : 0x00) + ((i & 0x10) != 0 ? 0xAA : 0x00)),
(((i & 0x02) != 0 ? 0x55 : 0x00) + ((i & 0x20) != 0 ? 0xAA : 0x00)),
(((i & 0x04) != 0 ? 0x55 : 0x00) + ((i & 0x40) != 0 ? 0xAA : 0x00)));
break;
case 0x08:
entries[i] = getColor(
0x7F,
(((i & 0x01) != 0 ? 0x55 : 0x00) + ((i & 0x10) != 0 ? 0xAA : 0x00)),
(((i & 0x02) != 0 ? 0x55 : 0x00) + ((i & 0x20) != 0 ? 0xAA : 0x00)),
(((i & 0x04) != 0 ? 0x55 : 0x00) + ((i & 0x40) != 0 ? 0xAA : 0x00)));
break;
case 0x80:
entries[i] = getColor(
0xFF,
(127 + ((i & 0x01) != 0 ? 0x2B : 0x00) + ((i & 0x10) != 0 ? 0x55 : 0x00)),
(127 + ((i & 0x02) != 0 ? 0x2B : 0x00) + ((i & 0x20) != 0 ? 0x55 : 0x00)),
(127 + ((i & 0x04) != 0 ? 0x2B : 0x00) + ((i & 0x40) != 0 ? 0x55 : 0x00)));
break;
case 0x88:
entries[i] = getColor(
0xFF,
(((i & 0x01) != 0 ? 0x2B : 0x00) + ((i & 0x10) != 0 ? 0x55 : 0x00)),
(((i & 0x02) != 0 ? 0x2B : 0x00) + ((i & 0x20) != 0 ? 0x55 : 0x00)),
(((i & 0x04) != 0 ? 0x2B : 0x00) + ((i & 0x40) != 0 ? 0x55 : 0x00)));
break;
}
}
}
return entries;
}
private static int getColor(int a, int r, int g, int b) {
return (a << 24) | (r << 16) | (g << 8) | b;
}
// Static drawing.
/** Draws a pixel data sub-block, as defined by ETSI EN 300 743 7.2.5.1, into a canvas. */
private static void paintPixelDataSubBlocks(
ObjectData objectData,
ClutDefinition clutDefinition,
int regionDepth,
int horizontalAddress,
int verticalAddress,
@Nullable Paint paint,
Canvas canvas) {
int[] clutEntries;
if (regionDepth == REGION_DEPTH_8_BIT) {
clutEntries = clutDefinition.clutEntries8Bit;
} else if (regionDepth == REGION_DEPTH_4_BIT) {
clutEntries = clutDefinition.clutEntries4Bit;
} else {
clutEntries = clutDefinition.clutEntries2Bit;
}
paintPixelDataSubBlock(objectData.topFieldData, clutEntries, regionDepth, horizontalAddress,
verticalAddress, paint, canvas);
paintPixelDataSubBlock(objectData.bottomFieldData, clutEntries, regionDepth, horizontalAddress,
verticalAddress + 1, paint, canvas);
}
/** Draws a pixel data sub-block, as defined by ETSI EN 300 743 7.2.5.1, into a canvas. */
private static void paintPixelDataSubBlock(
byte[] pixelData,
int[] clutEntries,
int regionDepth,
int horizontalAddress,
int verticalAddress,
@Nullable Paint paint,
Canvas canvas) {
ParsableBitArray data = new ParsableBitArray(pixelData);
int column = horizontalAddress;
int line = verticalAddress;
@Nullable byte[] clutMapTable2To4 = null;
@Nullable byte[] clutMapTable2To8 = null;
@Nullable byte[] clutMapTable4To8 = null;
while (data.bitsLeft() != 0) {
int dataType = data.readBits(8);
switch (dataType) {
case DATA_TYPE_2BP_CODE_STRING:
@Nullable byte[] clutMapTable2ToX;
if (regionDepth == REGION_DEPTH_8_BIT) {
clutMapTable2ToX = clutMapTable2To8 == null ? defaultMap2To8 : clutMapTable2To8;
} else if (regionDepth == REGION_DEPTH_4_BIT) {
clutMapTable2ToX = clutMapTable2To4 == null ? defaultMap2To4 : clutMapTable2To4;
} else {
clutMapTable2ToX = null;
}
column = paint2BitPixelCodeString(data, clutEntries, clutMapTable2ToX, column, line,
paint, canvas);
data.byteAlign();
break;
case DATA_TYPE_4BP_CODE_STRING:
@Nullable byte[] clutMapTable4ToX;
if (regionDepth == REGION_DEPTH_8_BIT) {
clutMapTable4ToX = clutMapTable4To8 == null ? defaultMap4To8 : clutMapTable4To8;
} else {
clutMapTable4ToX = null;
}
column = paint4BitPixelCodeString(data, clutEntries, clutMapTable4ToX, column, line,
paint, canvas);
data.byteAlign();
break;
case DATA_TYPE_8BP_CODE_STRING:
column =
paint8BitPixelCodeString(
data, clutEntries, /* clutMapTable= */ null, column, line, paint, canvas);
break;
case DATA_TYPE_24_TABLE_DATA:
clutMapTable2To4 = buildClutMapTable(4, 4, data);
break;
case DATA_TYPE_28_TABLE_DATA:
clutMapTable2To8 = buildClutMapTable(4, 8, data);
break;
case DATA_TYPE_48_TABLE_DATA:
clutMapTable4To8 = buildClutMapTable(16, 8, data);
break;
case DATA_TYPE_END_LINE:
column = horizontalAddress;
line += 2;
break;
default:
// Do nothing.
break;
}
}
}
/** Paint a 2-bit/pixel code string, as defined by ETSI EN 300 743 7.2.5.2, to a canvas. */
private static int paint2BitPixelCodeString(
ParsableBitArray data,
int[] clutEntries,
@Nullable byte[] clutMapTable,
int column,
int line,
@Nullable Paint paint,
Canvas canvas) {
boolean endOfPixelCodeString = false;
do {
int runLength = 0;
int clutIndex = 0;
int peek = data.readBits(2);
if (peek != 0x00) {
runLength = 1;
clutIndex = peek;
} else if (data.readBit()) {
runLength = 3 + data.readBits(3);
clutIndex = data.readBits(2);
} else if (data.readBit()) {
runLength = 1;
} else {
switch (data.readBits(2)) {
case 0x00:
endOfPixelCodeString = true;
break;
case 0x01:
runLength = 2;
break;
case 0x02:
runLength = 12 + data.readBits(4);
clutIndex = data.readBits(2);
break;
case 0x03:
runLength = 29 + data.readBits(8);
clutIndex = data.readBits(2);
break;
}
}
if (runLength != 0 && paint != null) {
paint.setColor(clutEntries[clutMapTable != null ? clutMapTable[clutIndex] : clutIndex]);
canvas.drawRect(column, line, column + runLength, line + 1, paint);
}
column += runLength;
} while (!endOfPixelCodeString);
return column;
}
/** Paint a 4-bit/pixel code string, as defined by ETSI EN 300 743 7.2.5.2, to a canvas. */
private static int paint4BitPixelCodeString(
ParsableBitArray data,
int[] clutEntries,
@Nullable byte[] clutMapTable,
int column,
int line,
@Nullable Paint paint,
Canvas canvas) {
boolean endOfPixelCodeString = false;
do {
int runLength = 0;
int clutIndex = 0;
int peek = data.readBits(4);
if (peek != 0x00) {
runLength = 1;
clutIndex = peek;
} else if (!data.readBit()) {
peek = data.readBits(3);
if (peek != 0x00) {
runLength = 2 + peek;
clutIndex = 0x00;
} else {
endOfPixelCodeString = true;
}
} else if (!data.readBit()) {
runLength = 4 + data.readBits(2);
clutIndex = data.readBits(4);
} else {
switch (data.readBits(2)) {
case 0x00:
runLength = 1;
break;
case 0x01:
runLength = 2;
break;
case 0x02:
runLength = 9 + data.readBits(4);
clutIndex = data.readBits(4);
break;
case 0x03:
runLength = 25 + data.readBits(8);
clutIndex = data.readBits(4);
break;
}
}
if (runLength != 0 && paint != null) {
paint.setColor(clutEntries[clutMapTable != null ? clutMapTable[clutIndex] : clutIndex]);
canvas.drawRect(column, line, column + runLength, line + 1, paint);
}
column += runLength;
} while (!endOfPixelCodeString);
return column;
}
/** Paint an 8-bit/pixel code string, as defined by ETSI EN 300 743 7.2.5.2, to a canvas. */
private static int paint8BitPixelCodeString(
ParsableBitArray data,
int[] clutEntries,
@Nullable byte[] clutMapTable,
int column,
int line,
@Nullable Paint paint,
Canvas canvas) {
boolean endOfPixelCodeString = false;
do {
int runLength = 0;
int clutIndex = 0;
int peek = data.readBits(8);
if (peek != 0x00) {
runLength = 1;
clutIndex = peek;
} else {
if (!data.readBit()) {
peek = data.readBits(7);
if (peek != 0x00) {
runLength = peek;
clutIndex = 0x00;
} else {
endOfPixelCodeString = true;
}
} else {
runLength = data.readBits(7);
clutIndex = data.readBits(8);
}
}
if (runLength != 0 && paint != null) {
paint.setColor(clutEntries[clutMapTable != null ? clutMapTable[clutIndex] : clutIndex]);
canvas.drawRect(column, line, column + runLength, line + 1, paint);
}
column += runLength;
} while (!endOfPixelCodeString);
return column;
}
private static byte[] buildClutMapTable(int length, int bitsPerEntry, ParsableBitArray data) {
byte[] clutMapTable = new byte[length];
for (int i = 0; i < length; i++) {
clutMapTable[i] = (byte) data.readBits(bitsPerEntry);
}
return clutMapTable;
}
// Private inner classes.
/**
* The subtitle service definition.
*/
private static final class SubtitleService {
public final int subtitlePageId;
public final int ancillaryPageId;
public final SparseArray<RegionComposition> regions;
public final SparseArray<ClutDefinition> cluts;
public final SparseArray<ObjectData> objects;
public final SparseArray<ClutDefinition> ancillaryCluts;
public final SparseArray<ObjectData> ancillaryObjects;
@Nullable public DisplayDefinition displayDefinition;
@Nullable public PageComposition pageComposition;
public SubtitleService(int subtitlePageId, int ancillaryPageId) {
this.subtitlePageId = subtitlePageId;
this.ancillaryPageId = ancillaryPageId;
regions = new SparseArray<>();
cluts = new SparseArray<>();
objects = new SparseArray<>();
ancillaryCluts = new SparseArray<>();
ancillaryObjects = new SparseArray<>();
}
public void reset() {
regions.clear();
cluts.clear();
objects.clear();
ancillaryCluts.clear();
ancillaryObjects.clear();
displayDefinition = null;
pageComposition = null;
}
}
/**
* Contains the geometry and active area of the subtitle service.
* <p>
* See ETSI EN 300 743 7.2.1
*/
private static final class DisplayDefinition {
public final int width;
public final int height;
public final int horizontalPositionMinimum;
public final int horizontalPositionMaximum;
public final int verticalPositionMinimum;
public final int verticalPositionMaximum;
public DisplayDefinition(int width, int height, int horizontalPositionMinimum,
int horizontalPositionMaximum, int verticalPositionMinimum, int verticalPositionMaximum) {
this.width = width;
this.height = height;
this.horizontalPositionMinimum = horizontalPositionMinimum;
this.horizontalPositionMaximum = horizontalPositionMaximum;
this.verticalPositionMinimum = verticalPositionMinimum;
this.verticalPositionMaximum = verticalPositionMaximum;
}
}
/**
* The page is the definition and arrangement of regions in the screen.
* <p>
* See ETSI EN 300 743 7.2.2
*/
private static final class PageComposition {
public final int timeOutSecs; // TODO: Use this or remove it.
public final int version;
public final int state;
public final SparseArray<PageRegion> regions;
public PageComposition(int timeoutSecs, int version, int state,
SparseArray<PageRegion> regions) {
this.timeOutSecs = timeoutSecs;
this.version = version;
this.state = state;
this.regions = regions;
}
}
/**
* A region within a {@link PageComposition}.
* <p>
* See ETSI EN 300 743 7.2.2
*/
private static final class PageRegion {
public final int horizontalAddress;
public final int verticalAddress;
public PageRegion(int horizontalAddress, int verticalAddress) {
this.horizontalAddress = horizontalAddress;
this.verticalAddress = verticalAddress;
}
}
/**
* An area of the page composed of a list of objects and a CLUT.
* <p>
* See ETSI EN 300 743 7.2.3
*/
private static final class RegionComposition {
public final int id;
public final boolean fillFlag;
public final int width;
public final int height;
public final int levelOfCompatibility; // TODO: Use this or remove it.
public final int depth;
public final int clutId;
public final int pixelCode8Bit;
public final int pixelCode4Bit;
public final int pixelCode2Bit;
public final SparseArray<RegionObject> regionObjects;
public RegionComposition(int id, boolean fillFlag, int width, int height,
int levelOfCompatibility, int depth, int clutId, int pixelCode8Bit, int pixelCode4Bit,
int pixelCode2Bit, SparseArray<RegionObject> regionObjects) {
this.id = id;
this.fillFlag = fillFlag;
this.width = width;
this.height = height;
this.levelOfCompatibility = levelOfCompatibility;
this.depth = depth;
this.clutId = clutId;
this.pixelCode8Bit = pixelCode8Bit;
this.pixelCode4Bit = pixelCode4Bit;
this.pixelCode2Bit = pixelCode2Bit;
this.regionObjects = regionObjects;
}
public void mergeFrom(RegionComposition otherRegionComposition) {
SparseArray<RegionObject> otherRegionObjects = otherRegionComposition.regionObjects;
for (int i = 0; i < otherRegionObjects.size(); i++) {
regionObjects.put(otherRegionObjects.keyAt(i), otherRegionObjects.valueAt(i));
}
}
}
/**
* An object within a {@link RegionComposition}.
* <p>
* See ETSI EN 300 743 7.2.3
*/
private static final class RegionObject {
public final int type; // TODO: Use this or remove it.
public final int provider; // TODO: Use this or remove it.
public final int horizontalPosition;
public final int verticalPosition;
public final int foregroundPixelCode; // TODO: Use this or remove it.
public final int backgroundPixelCode; // TODO: Use this or remove it.
public RegionObject(int type, int provider, int horizontalPosition,
int verticalPosition, int foregroundPixelCode, int backgroundPixelCode) {
this.type = type;
this.provider = provider;
this.horizontalPosition = horizontalPosition;
this.verticalPosition = verticalPosition;
this.foregroundPixelCode = foregroundPixelCode;
this.backgroundPixelCode = backgroundPixelCode;
}
}
/**
* CLUT family definition containing the color tables for the three bit depths defined
* <p>
* See ETSI EN 300 743 7.2.4
*/
private static final class ClutDefinition {
public final int id;
public final int[] clutEntries2Bit;
public final int[] clutEntries4Bit;
public final int[] clutEntries8Bit;
public ClutDefinition(int id, int[] clutEntries2Bit, int[] clutEntries4Bit,
int[] clutEntries8bit) {
this.id = id;
this.clutEntries2Bit = clutEntries2Bit;
this.clutEntries4Bit = clutEntries4Bit;
this.clutEntries8Bit = clutEntries8bit;
}
}
/**
* The textual or graphical representation of an object.
* <p>
* See ETSI EN 300 743 7.2.5
*/
private static final class ObjectData {
public final int id;
public final boolean nonModifyingColorFlag;
public final byte[] topFieldData;
public final byte[] bottomFieldData;
public ObjectData(int id, boolean nonModifyingColorFlag, byte[] topFieldData,
byte[] bottomFieldData) {
this.id = id;
this.nonModifyingColorFlag = nonModifyingColorFlag;
this.topFieldData = topFieldData;
this.bottomFieldData = bottomFieldData;
}
}
}
|
|
package controller.admin;
import java.io.File;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import javax.servlet.ServletException;
import javax.servlet.annotation.MultipartConfig;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.fileupload.FileItem;
import org.apache.commons.fileupload.disk.DiskFileItemFactory;
import org.apache.commons.fileupload.servlet.ServletFileUpload;
import org.apache.commons.io.FilenameUtils;
import com.google.gson.JsonObject;
import model.bean.Vaccine;
import model.bean.VaccineDisease;
import model.bo.VaccineBO;
import model.bo.VaccineDiseaseBO;
/**
* Servlet implementation class CreateNewsServlet
*/
@WebServlet("/UpdateVaccineServlet")
@MultipartConfig
public class UpdateVaccineServlet extends HttpServlet {
private static final long serialVersionUID = 1L;
private static final String UPLOAD_DIRECTORY = "upload";
private static final int THRESHOLD_SIZE = 1024 * 1024 * 3; // 3MB
private static final int MAX_FILE_SIZE = 1024 * 1024 * 40; // 40MB
private static final int MAX_REQUEST_SIZE = 1024 * 1024 * 50; // 50MB
/**
* @see HttpServlet#HttpServlet()
*/
public UpdateVaccineServlet() {
super();
}
/**
* @see HttpServlet#doGet(HttpServletRequest request, HttpServletResponse
* response)
*/
protected void doGet(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
doPost(request, response);
}
/**
* @see HttpServlet#doPost(HttpServletRequest request, HttpServletResponse
* response)
*/
protected void doPost(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
request.setCharacterEncoding("UTF-8");
response.setContentType("text/plain; charset=utf-8");
response.setCharacterEncoding("UTF-8");
VaccineDiseaseBO vaccineDiseaseBO = new VaccineDiseaseBO();
VaccineBO vaccineBO = new VaccineBO();
String vaccineId = null, vaccineName = null, manufacturer = null, price = null, numberOfDoses = null,
sideEffects = null, indication = null, contraindication = null, dosageAndUsage = null, image = null,
disease = null, oldImage = null;
ArrayList<String> listDiseases = new ArrayList<>();
String status = "fail";
// checks if the request actually contains upload file
if (!ServletFileUpload.isMultipartContent(request)) {
PrintWriter writer = response.getWriter();
writer.println("Request does not contain upload data");
writer.flush();
return;
}
// configures upload settings
DiskFileItemFactory factory = new DiskFileItemFactory();
factory.setSizeThreshold(THRESHOLD_SIZE);
factory.setRepository(new File(System.getProperty("java.io.tmpdir")));
ServletFileUpload upload = new ServletFileUpload(factory);
upload.setFileSizeMax(MAX_FILE_SIZE);
upload.setSizeMax(MAX_REQUEST_SIZE);
String uploadPath = getServletContext().getRealPath("") + File.separator + UPLOAD_DIRECTORY;
File uploadDir = new File(uploadPath);
if (!uploadDir.exists()) {
uploadDir.mkdir();
}
// end configures upload settings
try {
List<FileItem> formItems = upload.parseRequest(request);
Iterator<FileItem> iter = formItems.iterator();
while (iter.hasNext()) {
FileItem item = (FileItem) iter.next();
if (!item.isFormField()) {
if (item.getSize() != 0) {
String fileName = item.getName();
String endFileName = FilenameUtils.getExtension(fileName);
fileName = "Vaccine-" + System.nanoTime() + "." + endFileName;
item.setFieldName(fileName);
String root = getServletContext().getRealPath("/");
File path = new File(root + "/uploads");
if (!path.exists()) {
path.mkdirs();
}
if (!fileName.equals("")) {
File uploadedFile = new File(path + "/" + fileName);
item.write(uploadedFile);
image = fileName;
}
}
} else {
switch (item.getFieldName()) {
case "vaccineId":
vaccineId = item.getString("UTF-8");
break;
case "vaccineName":
vaccineName = item.getString("UTF-8");
break;
case "manufacturer":
manufacturer = item.getString("UTF-8");
break;
case "price":
price = item.getString("UTF-8");
break;
case "numberOfDoses":
numberOfDoses = item.getString("UTF-8");
break;
case "sideEffects":
sideEffects = item.getString("UTF-8");
break;
case "indication":
indication = item.getString("UTF-8");
break;
case "contraindication":
contraindication = item.getString("UTF-8");
break;
case "dosageAndUsage":
dosageAndUsage = item.getString("UTF-8");
break;
case "nameImage":
oldImage = item.getString("UTF-8");
break;
case "disease":
disease = item.getString("UTF-8");
listDiseases.add(disease);
break;
default:
break;
}
}
}
} catch (Exception ex) {
image = null;
}
if (image == null) {
image = oldImage;
}
// Validate
boolean hasError = false;
if (!hasError) {
Vaccine vaccine = new Vaccine(Integer.parseInt(vaccineId), vaccineName, manufacturer,
Double.parseDouble(price), Integer.parseInt(numberOfDoses), sideEffects, indication,
contraindication, dosageAndUsage, image);
if (vaccineBO.updateVaccine(vaccine) > 0) {
ArrayList<VaccineDisease> listVaccineDisease = vaccineDiseaseBO
.getVaccineDiseasesByVaccineId(Integer.parseInt(vaccineId));
if (listVaccineDisease.size() == 0) {
if (createVaccineDisease(Integer.parseInt(vaccineId), listDiseases) > 0) {
status = "success";
}
} else {
if (vaccineDiseaseBO.deleteVaccineDiseaseByVaccineId(Integer.parseInt(vaccineId)) > 0) {
if (createVaccineDisease(Integer.parseInt(vaccineId), listDiseases) > 0) {
status = "success";
}
}
}
}
}
// send data
JsonObject jsonObj = new JsonObject();
jsonObj.addProperty("status", status);
response.setContentType("application/json");
response.setCharacterEncoding("UTF-8");
response.getWriter().write(jsonObj.toString());
}
public int createVaccineDisease(int idVaccine, ArrayList<String> listDiseases) {
int rs = 1;
VaccineDiseaseBO vaccineDiseaseBO = new VaccineDiseaseBO();
for (String idDisease : listDiseases) {
if (vaccineDiseaseBO
.insertVaccineDisease(new VaccineDisease(idVaccine, Integer.parseInt(idDisease), "")) <= 0) {
rs = 0;
break;
}
}
return rs;
}
}
|
|
// Copyright 2004-present Facebook. All Rights Reserved.
// This is based on ElementalHttpServer.java in the Apache httpcore
// examples.
/*
* ====================================================================
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*
*/
package com.facebook.stetho.server;
import android.annotation.SuppressLint;
import android.net.LocalServerSocket;
import android.net.LocalSocket;
import android.util.Log;
import com.facebook.stetho.common.LogUtil;
import com.facebook.stetho.common.ProcessUtil;
import com.facebook.stetho.common.Util;
import org.apache.http.ConnectionClosedException;
import org.apache.http.HttpException;
import org.apache.http.HttpServerConnection;
import org.apache.http.impl.DefaultConnectionReuseStrategy;
import org.apache.http.impl.DefaultHttpResponseFactory;
import org.apache.http.params.BasicHttpParams;
import org.apache.http.params.CoreConnectionPNames;
import org.apache.http.params.CoreProtocolPNames;
import org.apache.http.params.HttpParams;
import org.apache.http.protocol.*;
import javax.annotation.Nonnull;
import java.io.IOException;
import java.io.InterruptedIOException;
import java.net.BindException;
import java.net.SocketException;
import java.util.concurrent.atomic.AtomicInteger;
public class LocalSocketHttpServer {
private static final String WORKDER_THREAD_NAME_PREFIX = "StethoWorker";
private static final int MAX_BIND_RETRIES = 2;
private static final int TIME_BETWEEN_BIND_RETRIES_MS = 1000;
private static final String SOCKET_NAME_PREFIX = "stetho_";
/**
* Convince {@code chrome://inspect/devices} that we're "one of them" :)
*/
private static final String SOCKET_NAME_SUFFIX = "_devtools_remote";
private static final AtomicInteger sThreadId = new AtomicInteger();
private final RegistryInitializer mRegistryInitializer;
private final String mAddress;
private Thread mListenerThread;
private boolean mStopped;
private LocalServerSocket mServerSocket;
/**
* @param registryInitializer lazy initializer for the {@link HttpRequestHandlerRegistry}.
* This is only initialized after the first socket has connected, and this determines
* what handlers this server uses to process requests.
*/
public LocalSocketHttpServer(RegistryInitializer registryInitializer) {
this(registryInitializer, null /* address */);
}
/**
* @param registryInitializer lazy initializer for the {@link HttpRequestHandlerRegistry}.
* This is only initialized after the first socket has connected, and this determines
* what handlers this server uses to process requests.
* @param address the local socket address to listen on.
*/
public LocalSocketHttpServer(RegistryInitializer registryInitializer, String address) {
mRegistryInitializer = Util.throwIfNull(registryInitializer);
mAddress = address;
}
/**
* Binds to the address and listens for connections.
* <p/>
* If successful, this thread blocks forever or until {@link #stop} is called, whichever
* happens first.
*
* @throws IOException Thrown on failure to bind the socket.
*/
public void run() throws IOException {
synchronized (this) {
if (mStopped) {
return;
}
mListenerThread = Thread.currentThread();
}
String address = (mAddress != null) ? mAddress : getDefaultAddress();
listenOnAddress(address);
}
private void listenOnAddress(String address) throws IOException {
mServerSocket = bindToSocket(address);
LogUtil.i("Listening on @" + address);
HttpParams params = null;
HttpService service = null;
while (!Thread.interrupted()) {
LocalSocketHttpServerConnection connection = new LocalSocketHttpServerConnection();
try {
// Use previously accepted socket the first time around, otherwise wait to
// accept another.
LocalSocket socket = mServerSocket.accept();
if (params == null) {
params = createParams();
}
if (service == null) {
service = createService(params);
}
connection.bind(socket, params);
// Start worker thread
Thread t = new WorkerThread(service, connection);
t.setDaemon(true);
t.start();
} catch (SocketException se) {
// ignore exception if interrupting the thread
if (!Thread.interrupted()) {
LogUtil.w(se, "I/O error");
}
} catch (InterruptedIOException ex) {
break;
} catch (IOException e) {
LogUtil.w(e, "I/O error initialising connection thread");
break;
}
}
}
private static String getDefaultAddress() throws IOException {
return
SOCKET_NAME_PREFIX +
ProcessUtil.getProcessName() +
SOCKET_NAME_SUFFIX;
}
private HttpParams createParams() {
return new BasicHttpParams()
.setIntParameter(CoreConnectionPNames.SO_TIMEOUT, 5000)
.setIntParameter(CoreConnectionPNames.SOCKET_BUFFER_SIZE, 8 * 1024)
.setBooleanParameter(CoreConnectionPNames.STALE_CONNECTION_CHECK, false)
.setBooleanParameter(CoreConnectionPNames.TCP_NODELAY, true)
.setParameter(CoreProtocolPNames.ORIGIN_SERVER, "Stetho")
.setParameter(CoreProtocolPNames.PROTOCOL_VERSION, "HTTP/1.1");
}
private HttpService createService(HttpParams params) {
HttpRequestHandlerRegistry registry = mRegistryInitializer.getRegistry();
BasicHttpProcessor httpproc = new BasicHttpProcessor();
httpproc.addInterceptor(new ResponseDate());
httpproc.addInterceptor(new ResponseServer());
httpproc.addInterceptor(new ResponseContent());
httpproc.addInterceptor(new ResponseConnControl());
HttpService service = new HttpService(
httpproc,
new DefaultConnectionReuseStrategy(),
new DefaultHttpResponseFactory());
service.setParams(params);
service.setHandlerResolver(registry);
return service;
}
/**
* Stops the listener thread and unbinds the address.
*/
public void stop() {
synchronized (this) {
mStopped = true;
if (mListenerThread == null) {
return;
}
}
mListenerThread.interrupt();
try {
if (mServerSocket != null) {
mServerSocket.close();
}
} catch (IOException e) {}
}
@Nonnull
private static LocalServerSocket bindToSocket(String address) throws IOException {
int retries = MAX_BIND_RETRIES;
IOException firstException = null;
do {
try {
if (LogUtil.isLoggable(Log.DEBUG)) {
LogUtil.d("Trying to bind to @" + address);
}
return new LocalServerSocket(address);
} catch (BindException be) {
LogUtil.w(be, "Binding error, sleep " + TIME_BETWEEN_BIND_RETRIES_MS + " ms...");
if (firstException == null) {
firstException = be;
}
Util.sleepUninterruptibly(TIME_BETWEEN_BIND_RETRIES_MS);
}
} while (retries-- > 0);
throw firstException;
}
private static class WorkerThread extends Thread {
private final HttpService httpservice;
private final HttpServerConnection conn;
public WorkerThread(
final HttpService httpservice,
final HttpServerConnection conn) {
super(WORKDER_THREAD_NAME_PREFIX + sThreadId.incrementAndGet());
this.httpservice = httpservice;
this.conn = conn;
}
@Override
@SuppressLint("LogMethodNoExceptionInCatch")
public void run() {
HttpContext context = new BasicHttpContext(null);
try {
if (!Thread.interrupted() && conn.isOpen()) {
httpservice.handleRequest(conn, context);
}
} catch (ConnectionClosedException ex) {
LogUtil.w("Client closed connection: %s", ex);
} catch (IOException ex) {
LogUtil.w("I/O error: %s", ex);
} catch (HttpException ex) {
LogUtil.w("Unrecoverable HTTP protocol violation: %s", ex);
} finally {
try {
conn.close();
} catch (IOException ignore) {
}
}
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.jackrabbit.oak.plugins.segment.file;
import static com.google.common.collect.Maps.newHashMap;
import static com.google.common.collect.Sets.newHashSet;
import static java.io.File.createTempFile;
import static java.nio.ByteBuffer.allocate;
import static java.util.Collections.singleton;
import static org.apache.jackrabbit.oak.plugins.segment.SegmentVersion.V_11;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import com.google.common.collect.ImmutableList;
import org.apache.jackrabbit.oak.plugins.segment.RecordId;
import org.apache.jackrabbit.oak.plugins.segment.Segment;
import org.apache.jackrabbit.oak.plugins.segment.SegmentId;
import org.apache.jackrabbit.oak.plugins.segment.SegmentStore;
import org.apache.jackrabbit.oak.plugins.segment.SegmentTracker;
import org.apache.jackrabbit.oak.plugins.segment.SegmentWriter;
import org.apache.jackrabbit.oak.plugins.segment.file.TarWriterTest.SegmentGraphBuilder.Node;
import org.apache.jackrabbit.oak.plugins.segment.memory.MemoryStore;
import org.junit.Test;
public class TarWriterTest {
/**
* Regression test for OAK-2800
*/
@Test
public void collectReferences() throws IOException {
SegmentGraphBuilder graphBuilder = new SegmentGraphBuilder();
// a -> b -> c
Node c = graphBuilder.createNode("c");
Node b = graphBuilder.createNode("b", c);
Node a = graphBuilder.createNode("a", b);
Node n = graphBuilder.createNode("n");
// y -> z
Node z = graphBuilder.createNode("z");
Node y = graphBuilder.createNode("y", z);
assertEquals(singleton(b), a.getReferences());
assertEquals(singleton(c), b.getReferences());
assertTrue(c.getReferences().isEmpty());
assertEquals(singleton(z), y.getReferences());
assertTrue(z.getReferences().isEmpty());
File tar = createTempFile(getClass().getName(), "tar");
TarWriter tarWriter = new TarWriter(tar);
try {
y.write(tarWriter);
b.write(tarWriter);
a.write(tarWriter);
n.write(tarWriter);
Set<UUID> references = newHashSet();
references.add(a.getUUID());
tarWriter.collectReferences(references);
assertEquals(
c + " must be in references as " + a + " has an transitive reference to " + c + " through " + b + ", " +
a + " must not be in references as " + a + " is in the TarWriter, " +
"no other elements must be in references.",
singleton(c), toNodes(graphBuilder, references));
references.clear();
references.add(b.getUUID());
tarWriter.collectReferences(references);
assertEquals(
b + " must be in references as " + a + " has a direct reference to " + b + ", " +
a + " must not be in references as " + a + " is in the TarWriter, " +
"no other elements must be in references.",
singleton(c), toNodes(graphBuilder, references));
references.clear();
references.add(y.getUUID());
tarWriter.collectReferences(references);
assertEquals(
z + " must be in references as " + y + " has a direct reference to " + z + ", " +
y + " must not be in references as " + y + " is in the TarWriter, " +
"no other elements must be in references.",
singleton(z), toNodes(graphBuilder, references));
references.clear();
references.add(c.getUUID());
tarWriter.collectReferences(references);
assertEquals(
c + " must be in references as " + c + " is not in the TarWriter, " +
"no other elements must be in references.",
singleton(c), toNodes(graphBuilder, references));
references.clear();
references.add(z.getUUID());
tarWriter.collectReferences(references);
assertEquals(
z + " must be in references as " + z + " is not in the TarWriter " +
"no other elements must be in references.",
singleton(z), toNodes(graphBuilder, references));
references.clear();
references.add(n.getUUID());
tarWriter.collectReferences(references);
assertTrue(
"references must be empty as " + n + " has no references " +
"and " + n + " is in the TarWriter",
references.isEmpty());
} finally {
tarWriter.close();
}
}
private static Set<Node> toNodes(SegmentGraphBuilder graphBuilder, Set<UUID> uuids) {
Set<Node> nodes = newHashSet();
for (UUID uuid : uuids) {
nodes.add(graphBuilder.getNode(uuid));
}
return nodes;
}
public static class SegmentGraphBuilder {
private final Map<SegmentId, ByteBuffer> segments = newHashMap();
private final Map<UUID, Node> nodes = newHashMap();
private final SegmentStore store = new MemoryStore() {
@Override
public void writeSegment(SegmentId id, byte[] data, int offset, int length) {
super.writeSegment(id, data, offset, length);
ByteBuffer buffer = allocate(length);
buffer.put(data, offset, length);
buffer.rewind();
segments.put(id, buffer);
}
};
private final SegmentTracker tracker = new SegmentTracker(store, V_11);
private final SegmentWriter writer = new SegmentWriter(store, tracker, V_11);
private int nextNodeNo;
public class Node {
final String name;
final RecordId selfId;
final byte[] data;
final Segment segment;
Node(String name, RecordId selfId, ByteBuffer data) {
this.name = name;
this.selfId = selfId;
this.data = data.array();
segment = new Segment(tracker, selfId.getSegmentId(), data);
}
public void write(TarWriter tarWriter) throws IOException {
long msb = getSegmentId().getMostSignificantBits();
long lsb = getSegmentId().getLeastSignificantBits();
tarWriter.writeEntry(msb, lsb, data, 0, data.length);
}
public UUID getUUID() {
return newUUID(getSegmentId());
}
private SegmentId getSegmentId() {
return selfId.getSegmentId();
}
public Set<Node> getReferences() {
Set<Node> references = newHashSet();
for (SegmentId segmentId : segment.getReferencedIds()) {
references.add(nodes.get(newUUID(segmentId)));
}
references.remove(this);
return references;
}
@Override
public String toString() {
return name;
}
void addReference(SegmentWriter writer) {
// Need to write a proper list as singleton lists are optimised
// to just returning the recordId of its single element
writer.writeList(ImmutableList.of(selfId, selfId));
}
}
public Node createNode(String name, Node... refs) {
RecordId selfId = writer.writeString("id-" + nextNodeNo++);
for (Node ref : refs) {
ref.addReference(writer);
}
writer.flush();
SegmentId segmentId = selfId.getSegmentId();
Node node = new Node(name, selfId, segments.get(segmentId));
nodes.put(newUUID(segmentId), node);
return node;
}
public Node getNode(UUID uuid) {
return nodes.get(uuid);
}
private static UUID newUUID(SegmentId segmentId) {
return new UUID(segmentId.getMostSignificantBits(), segmentId.getLeastSignificantBits());
}
}
}
|
|
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.bucket.composite;
import org.apache.lucene.analysis.core.KeywordAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.LongPoint;
import org.apache.lucene.document.SortedNumericDocValuesField;
import org.apache.lucene.document.SortedSetDocValuesField;
import org.apache.lucene.document.TextField;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.NumericUtils;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.index.fielddata.FieldData;
import org.elasticsearch.index.mapper.KeywordFieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.NumberFieldMapper;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.AggregatorTestCase;
import org.elasticsearch.search.aggregations.LeafBucketCollector;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import static org.elasticsearch.index.mapper.NumberFieldMapper.NumberType.DOUBLE;
import static org.elasticsearch.index.mapper.NumberFieldMapper.NumberType.LONG;
import static org.hamcrest.Matchers.equalTo;
public class CompositeValuesCollectorQueueTests extends AggregatorTestCase {
static class ClassAndName {
final MappedFieldType fieldType;
final Class<? extends Comparable<?>> clazz;
ClassAndName(MappedFieldType fieldType, Class<? extends Comparable<?>> clazz) {
this.fieldType = fieldType;
this.clazz = clazz;
}
}
public void testRandomLong() throws IOException {
testRandomCase(new ClassAndName(createNumber("long", LONG) , Long.class));
}
public void testRandomDouble() throws IOException {
testRandomCase(new ClassAndName(createNumber("double", DOUBLE) , Double.class));
}
public void testRandomDoubleAndLong() throws IOException {
testRandomCase(new ClassAndName(createNumber("double", DOUBLE), Double.class),
new ClassAndName(createNumber("long", LONG), Long.class));
}
public void testRandomDoubleAndKeyword() throws IOException {
testRandomCase(new ClassAndName(createNumber("double", DOUBLE), Double.class),
new ClassAndName(createKeyword("keyword"), BytesRef.class));
}
public void testRandomKeyword() throws IOException {
testRandomCase(new ClassAndName(createKeyword("keyword"), BytesRef.class));
}
public void testRandomLongAndKeyword() throws IOException {
testRandomCase(new ClassAndName(createNumber("long", LONG), Long.class),
new ClassAndName(createKeyword("keyword"), BytesRef.class));
}
public void testRandomLongAndDouble() throws IOException {
testRandomCase(new ClassAndName(createNumber("long", LONG), Long.class),
new ClassAndName(createNumber("double", DOUBLE) , Double.class));
}
public void testRandomKeywordAndLong() throws IOException {
testRandomCase(new ClassAndName(createKeyword("keyword"), BytesRef.class),
new ClassAndName(createNumber("long", LONG), Long.class));
}
public void testRandomKeywordAndDouble() throws IOException {
testRandomCase(new ClassAndName(createKeyword("keyword"), BytesRef.class),
new ClassAndName(createNumber("double", DOUBLE), Double.class));
}
public void testRandom() throws IOException {
int numTypes = randomIntBetween(3, 8);
ClassAndName[] types = new ClassAndName[numTypes];
for (int i = 0; i < numTypes; i++) {
int rand = randomIntBetween(0, 2);
switch (rand) {
case 0:
types[i] = new ClassAndName(createNumber(Integer.toString(i), LONG), Long.class);
break;
case 1:
types[i] = new ClassAndName(createNumber(Integer.toString(i), DOUBLE), Double.class);
break;
case 2:
types[i] = new ClassAndName(createKeyword(Integer.toString(i)), BytesRef.class);
break;
default:
assert(false);
}
}
testRandomCase(types);
}
private void testRandomCase(ClassAndName... types) throws IOException {
testRandomCase(true, true, types);
testRandomCase(true, false, types);
testRandomCase(false, true, types);
testRandomCase(false, false, types);
}
private void testRandomCase(boolean forceMerge, boolean missingBucket, ClassAndName... types) throws IOException {
final BigArrays bigArrays = BigArrays.NON_RECYCLING_INSTANCE;
int numDocs = randomIntBetween(50, 100);
List<Comparable<?>[]> possibleValues = new ArrayList<>();
for (ClassAndName type : types) {
final Comparable<?>[] values;
int numValues = randomIntBetween(1, numDocs * 2);
values = new Comparable[numValues];
if (type.clazz == Long.class) {
for (int i = 0; i < numValues; i++) {
values[i] = randomLong();
}
} else if (type.clazz == Double.class) {
for (int i = 0; i < numValues; i++) {
values[i] = randomDouble();
}
} else if (type.clazz == BytesRef.class) {
for (int i = 0; i < numValues; i++) {
values[i] = new BytesRef(randomAlphaOfLengthBetween(5, 50));
}
} else {
assert (false);
}
possibleValues.add(values);
}
Set<CompositeKey> keys = new HashSet<>();
try (Directory directory = newDirectory()) {
try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory, new KeywordAnalyzer())) {
for (int i = 0; i < numDocs; i++) {
Document document = new Document();
List<List<Comparable<?>>> docValues = new ArrayList<>();
boolean hasAllField = true;
for (int j = 0; j < types.length; j++) {
int numValues = randomIntBetween(0, 5);
List<Comparable<?>> values = new ArrayList<>();
if (numValues == 0) {
hasAllField = false;
if (missingBucket) {
values.add(null);
}
} else {
for (int k = 0; k < numValues; k++) {
values.add(possibleValues.get(j)[randomIntBetween(0, possibleValues.get(j).length - 1)]);
if (types[j].clazz == Long.class) {
long value = (Long) values.get(k);
document.add(new SortedNumericDocValuesField(types[j].fieldType.name(), value));
document.add(new LongPoint(types[j].fieldType.name(), value));
} else if (types[j].clazz == Double.class) {
document.add(new SortedNumericDocValuesField(types[j].fieldType.name(),
NumericUtils.doubleToSortableLong((Double) values.get(k))));
} else if (types[j].clazz == BytesRef.class) {
BytesRef value = (BytesRef) values.get(k);
document.add(new SortedSetDocValuesField(types[j].fieldType.name(), (BytesRef) values.get(k)));
document.add(new TextField(types[j].fieldType.name(), value.utf8ToString(), Field.Store.NO));
} else {
assert (false);
}
}
}
docValues.add(values);
}
if (hasAllField || missingBucket) {
List<CompositeKey> comb = createListCombinations(docValues);
keys.addAll(comb);
}
indexWriter.addDocument(document);
}
if (forceMerge) {
indexWriter.forceMerge(1);
}
}
IndexReader reader = DirectoryReader.open(directory);
int size = randomIntBetween(1, keys.size());
SingleDimensionValuesSource<?>[] sources = new SingleDimensionValuesSource[types.length];
for (int i = 0; i < types.length; i++) {
final MappedFieldType fieldType = types[i].fieldType;
if (types[i].clazz == Long.class) {
sources[i] = new LongValuesSource(
bigArrays,
fieldType,
context -> DocValues.getSortedNumeric(context.reader(), fieldType.name()),
value -> value,
DocValueFormat.RAW,
missingBucket,
size,
1
);
} else if (types[i].clazz == Double.class) {
sources[i] = new DoubleValuesSource(
bigArrays,
fieldType,
context -> FieldData.sortableLongBitsToDoubles(DocValues.getSortedNumeric(context.reader(), fieldType.name())),
DocValueFormat.RAW,
missingBucket,
size,
1
);
} else if (types[i].clazz == BytesRef.class) {
if (forceMerge) {
// we don't create global ordinals but we test this mode when the reader has a single segment
// since ordinals are global in this case.
sources[i] = new GlobalOrdinalValuesSource(
bigArrays,
fieldType,
context -> DocValues.getSortedSet(context.reader(), fieldType.name()),
DocValueFormat.RAW,
missingBucket,
size,
1
);
} else {
sources[i] = new BinaryValuesSource(
bigArrays,
(b) -> {},
fieldType,
context -> FieldData.toString(DocValues.getSortedSet(context.reader(), fieldType.name())),
DocValueFormat.RAW,
missingBucket,
size,
1
);
}
} else {
assert(false);
}
}
CompositeKey[] expected = keys.toArray(new CompositeKey[0]);
Arrays.sort(expected, (a, b) -> compareKey(a, b));
for (boolean withProducer : new boolean[] {true, false}) {
int pos = 0;
CompositeKey last = null;
while (pos < size) {
final CompositeValuesCollectorQueue queue =
new CompositeValuesCollectorQueue(BigArrays.NON_RECYCLING_INSTANCE, sources, size, last);
final SortedDocsProducer docsProducer = sources[0].createSortedDocsProducerOrNull(reader, new MatchAllDocsQuery());
for (LeafReaderContext leafReaderContext : reader.leaves()) {
final LeafBucketCollector leafCollector = new LeafBucketCollector() {
@Override
public void collect(int doc, long bucket) throws IOException {
queue.addIfCompetitive();
}
};
if (docsProducer != null && withProducer) {
assertEquals(DocIdSet.EMPTY,
docsProducer.processLeaf(new MatchAllDocsQuery(), queue, leafReaderContext, false));
} else {
final LeafBucketCollector queueCollector = queue.getLeafCollector(leafReaderContext, leafCollector);
final Bits liveDocs = leafReaderContext.reader().getLiveDocs();
for (int i = 0; i < leafReaderContext.reader().maxDoc(); i++) {
if (liveDocs == null || liveDocs.get(i)) {
queueCollector.collect(i);
}
}
}
}
assertEquals(size, Math.min(queue.size(), expected.length - pos));
int ptr = pos + (queue.size() - 1);
pos += queue.size();
last = null;
while (queue.size() > pos) {
CompositeKey key = queue.toCompositeKey(queue.pop());
if (last == null) {
last = key;
}
assertThat(key, equalTo(expected[ptr--]));
}
}
}
reader.close();
}
}
private static MappedFieldType createNumber(String name, NumberFieldMapper.NumberType type) {
MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(type);
fieldType.setIndexOptions(IndexOptions.DOCS);
fieldType.setName(name);
fieldType.setHasDocValues(true);
fieldType.freeze();
return fieldType;
}
private static MappedFieldType createKeyword(String name) {
MappedFieldType fieldType = new KeywordFieldMapper.KeywordFieldType();
fieldType.setIndexOptions(IndexOptions.DOCS);
fieldType.setName(name);
fieldType.setHasDocValues(true);
fieldType.freeze();
return fieldType;
}
private static int compareKey(CompositeKey key1, CompositeKey key2) {
assert key1.size() == key2.size();
for (int i = 0; i < key1.size(); i++) {
if (key1.get(i) == null) {
if (key2.get(i) == null) {
continue;
}
return -1;
} else if (key2.get(i) == null) {
return 1;
}
Comparable<Object> cmp1 = (Comparable<Object>) key1.get(i);
int cmp = cmp1.compareTo(key2.get(i));
if (cmp != 0) {
return cmp;
}
}
return 0;
}
private static List<CompositeKey> createListCombinations(List<List<Comparable<?>>> values) {
List<CompositeKey> keys = new ArrayList<>();
createListCombinations(new Comparable[values.size()], values, 0, values.size(), keys);
return keys;
}
private static void createListCombinations(Comparable<?>[] key, List<List<Comparable<?>>> values,
int pos, int maxPos, List<CompositeKey> keys) {
if (pos == maxPos) {
keys.add(new CompositeKey(key.clone()));
} else {
for (Comparable<?> val : values.get(pos)) {
key[pos] = val;
createListCombinations(key, values, pos + 1, maxPos, keys);
}
}
}
}
|
|
/*
* Copyright 2007 Kasper B. Graversen
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.supercsv.io;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.supercsv.SuperCsvTestUtils.ADA;
import static org.supercsv.SuperCsvTestUtils.ADA_STRING;
import static org.supercsv.SuperCsvTestUtils.ALICE;
import static org.supercsv.SuperCsvTestUtils.ALICE_STRING;
import static org.supercsv.SuperCsvTestUtils.BILL;
import static org.supercsv.SuperCsvTestUtils.BILL_STRING;
import static org.supercsv.SuperCsvTestUtils.BOB;
import static org.supercsv.SuperCsvTestUtils.BOB_STRING;
import static org.supercsv.SuperCsvTestUtils.CSV_FILE;
import static org.supercsv.SuperCsvTestUtils.CUSTOMERS;
import static org.supercsv.SuperCsvTestUtils.GRACE;
import static org.supercsv.SuperCsvTestUtils.GRACE_STRING;
import static org.supercsv.SuperCsvTestUtils.HEADER;
import static org.supercsv.SuperCsvTestUtils.JOHN;
import static org.supercsv.SuperCsvTestUtils.JOHN_STRING;
import static org.supercsv.SuperCsvTestUtils.LARRY;
import static org.supercsv.SuperCsvTestUtils.LARRY_STRING;
import static org.supercsv.SuperCsvTestUtils.MIRANDA;
import static org.supercsv.SuperCsvTestUtils.MIRANDA_STRING;
import static org.supercsv.SuperCsvTestUtils.PARTIAL_HEADER;
import static org.supercsv.SuperCsvTestUtils.READ_PROCESSORS;
import static org.supercsv.SuperCsvTestUtils.SERGEI;
import static org.supercsv.SuperCsvTestUtils.SERGEI_STRING;
import static org.supercsv.SuperCsvTestUtils.STEVE;
import static org.supercsv.SuperCsvTestUtils.STEVE_STRING;
import static org.supercsv.SuperCsvTestUtils.STRING_CUSTOMERS;
import java.io.IOException;
import java.io.Reader;
import java.io.StringReader;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.supercsv.cellprocessor.ift.CellProcessor;
import org.supercsv.exception.SuperCsvReflectionException;
import org.supercsv.mock.Customer;
import org.supercsv.mock.CustomerBean;
import org.supercsv.mock.CustomerStringBean;
import org.supercsv.mock.PersonBean;
import org.supercsv.prefs.CsvPreference;
/**
* Tests the CsvBeanReader class.
*
* @author James Bassett
* @author Pietro Aragona
*/
public class CsvBeanReaderTest {
private static final CsvPreference PREFS = CsvPreference.STANDARD_PREFERENCE;
private Reader reader;
private CsvBeanReader beanReader;
private CsvBeanReader tokenizerBeanReader;
/**
* Sets up the reader for the tests.
*/
@Before
public void setUp() {
reader = new StringReader(CSV_FILE);
beanReader = new CsvBeanReader(reader, PREFS);
final Tokenizer tokenizer = new Tokenizer(reader, PREFS);
tokenizerBeanReader = new CsvBeanReader(tokenizer, PREFS);
}
/**
* Closes the readers after the test.
*/
@After
public void tearDown() throws IOException {
beanReader.close();
tokenizerBeanReader.close();
}
/**
* Tests the read() method using processors.
*/
@Test
public void testReadWithProcessors() throws IOException {
final String[] header = beanReader.getHeader(true);
assertArrayEquals(HEADER, header);
assertEquals(JOHN, beanReader.read(CustomerBean.class, header, READ_PROCESSORS));
assertEquals(BOB, beanReader.read(CustomerBean.class, header, READ_PROCESSORS));
assertEquals(ALICE, beanReader.read(CustomerBean.class, header, READ_PROCESSORS));
assertEquals(BILL, beanReader.read(CustomerBean.class, header, READ_PROCESSORS));
assertEquals(MIRANDA, beanReader.read(CustomerBean.class, header, READ_PROCESSORS));
assertEquals(STEVE, beanReader.read(CustomerBean.class, header, READ_PROCESSORS));
assertEquals(ADA, beanReader.read(CustomerBean.class, header, READ_PROCESSORS));
assertEquals(SERGEI, beanReader.read(CustomerBean.class, header, READ_PROCESSORS));
assertEquals(LARRY, beanReader.read(CustomerBean.class, header, READ_PROCESSORS));
assertEquals(GRACE, beanReader.read(CustomerBean.class, header, READ_PROCESSORS));
assertNull(beanReader.read(CustomerBean.class, header, READ_PROCESSORS));
}
/**
* Tests the read() method using processors, but only mapping a few columns.
*/
@Test
public void testPartialReadWithProcessors() throws IOException {
assertArrayEquals(HEADER, beanReader.getHeader(true));
final String[] header = PARTIAL_HEADER;
for( CustomerBean fullCustomer : CUSTOMERS ) {
// create the expected customer (same as full but with only first/last name and email)
CustomerBean expectedCustomer = new CustomerBean();
expectedCustomer.setFirstName(fullCustomer.getFirstName());
expectedCustomer.setLastName(fullCustomer.getLastName());
expectedCustomer.setEmail(fullCustomer.getEmail());
assertEquals(expectedCustomer, beanReader.read(CustomerBean.class, header, READ_PROCESSORS));
}
assertNull(beanReader.read(CustomerBean.class, header, READ_PROCESSORS));
}
/**
* Tests the read() method with no processors.
*/
@Test
public void testRead() throws IOException {
final String[] header = beanReader.getHeader(true);
assertArrayEquals(HEADER, header);
assertEquals(JOHN_STRING, beanReader.read(CustomerStringBean.class, header));
assertEquals(BOB_STRING, beanReader.read(CustomerStringBean.class, header));
assertEquals(ALICE_STRING, beanReader.read(CustomerStringBean.class, header));
assertEquals(BILL_STRING, beanReader.read(CustomerStringBean.class, header));
assertEquals(MIRANDA_STRING, beanReader.read(CustomerStringBean.class, header));
assertEquals(STEVE_STRING, beanReader.read(CustomerStringBean.class, header));
assertEquals(ADA_STRING, beanReader.read(CustomerStringBean.class, header));
assertEquals(SERGEI_STRING, beanReader.read(CustomerStringBean.class, header));
assertEquals(LARRY_STRING, beanReader.read(CustomerStringBean.class, header));
assertEquals(GRACE_STRING, beanReader.read(CustomerStringBean.class, header));
assertNull(beanReader.read(CustomerStringBean.class, header));
}
/**
* Tests the read() method, but only mapping a few columns.
*/
@Test
public void testPartialRead() throws IOException {
assertArrayEquals(HEADER, beanReader.getHeader(true));
final String[] header = PARTIAL_HEADER;
for( CustomerStringBean fullCustomer : STRING_CUSTOMERS ) {
// create the expected customer (same as full but with only first/last name and email)
CustomerBean expectedCustomer = new CustomerBean();
expectedCustomer.setFirstName(fullCustomer.getFirstName());
expectedCustomer.setLastName(fullCustomer.getLastName());
expectedCustomer.setEmail(fullCustomer.getEmail());
assertEquals(expectedCustomer, beanReader.read(CustomerBean.class, header));
}
assertNull(beanReader.read(CustomerBean.class, header));
}
/**
* Tests the read() method with no processors, populating an existing bean.
*/
@Test
public void testReadIntoExistingBean() throws IOException {
final String[] header = beanReader.getHeader(true);
assertArrayEquals(HEADER, header);
assertEquals(JOHN_STRING, beanReader.read(new CustomerStringBean(), header));
assertEquals(BOB_STRING, beanReader.read(new CustomerStringBean(), header));
assertEquals(ALICE_STRING, beanReader.read(new CustomerStringBean(), header));
assertEquals(BILL_STRING, beanReader.read(new CustomerStringBean(), header));
assertEquals(MIRANDA_STRING, beanReader.read(new CustomerStringBean(), header));
assertEquals(STEVE_STRING, beanReader.read(new CustomerStringBean(), header));
assertEquals(ADA_STRING, beanReader.read(new CustomerStringBean(), header));
assertEquals(SERGEI_STRING, beanReader.read(new CustomerStringBean(), header));
assertEquals(LARRY_STRING, beanReader.read(new CustomerStringBean(), header));
assertEquals(GRACE_STRING, beanReader.read(new CustomerStringBean(), header));
assertNull(beanReader.read(new CustomerStringBean(), header));
}
/**
* Tests the read() method using processors, populating an existing bean.
*/
@Test
public void testReadIntoExistingBeanWithProcessors() throws IOException {
final String[] header = beanReader.getHeader(true);
assertArrayEquals(HEADER, header);
assertEquals(JOHN, beanReader.read(new CustomerBean(), header, READ_PROCESSORS));
assertEquals(BOB, beanReader.read(new CustomerBean(), header, READ_PROCESSORS));
assertEquals(ALICE, beanReader.read(new CustomerBean(), header, READ_PROCESSORS));
assertEquals(BILL, beanReader.read(new CustomerBean(), header, READ_PROCESSORS));
assertEquals(MIRANDA, beanReader.read(new CustomerBean(), header, READ_PROCESSORS));
assertEquals(STEVE, beanReader.read(new CustomerBean(), header, READ_PROCESSORS));
assertEquals(ADA, beanReader.read(new CustomerBean(), header, READ_PROCESSORS));
assertEquals(SERGEI, beanReader.read(new CustomerBean(), header, READ_PROCESSORS));
assertEquals(LARRY, beanReader.read(new CustomerBean(), header, READ_PROCESSORS));
assertEquals(GRACE, beanReader.read(new CustomerBean(), header, READ_PROCESSORS));
assertNull(beanReader.read(new CustomerBean(), header, READ_PROCESSORS));
}
/**
* Tests the read() method with no processors, using the tokenizer version of CsvBeanReader (just to make sure it
* behaves exactly the same as the reader version).
*/
@Test
public void testReadUsingTokenizerReader() throws IOException {
final String[] header = tokenizerBeanReader.getHeader(true);
assertArrayEquals(HEADER, header);
assertEquals(JOHN_STRING, tokenizerBeanReader.read(CustomerStringBean.class, header));
assertEquals(BOB_STRING, tokenizerBeanReader.read(CustomerStringBean.class, header));
assertEquals(ALICE_STRING, tokenizerBeanReader.read(CustomerStringBean.class, header));
assertEquals(BILL_STRING, tokenizerBeanReader.read(CustomerStringBean.class, header));
assertEquals(MIRANDA_STRING, tokenizerBeanReader.read(CustomerStringBean.class, header));
assertEquals(STEVE_STRING, tokenizerBeanReader.read(CustomerStringBean.class, header));
assertEquals(ADA_STRING, tokenizerBeanReader.read(CustomerStringBean.class, header));
assertEquals(SERGEI_STRING, tokenizerBeanReader.read(CustomerStringBean.class, header));
assertEquals(LARRY_STRING, tokenizerBeanReader.read(CustomerStringBean.class, header));
assertEquals(GRACE_STRING, tokenizerBeanReader.read(CustomerStringBean.class, header));
assertNull(tokenizerBeanReader.read(CustomerStringBean.class, header));
}
/**
* Tests the read() method with an interface and using processors.
*/
@Test
public void testReadWithProcessorsUsingInterface() throws IOException {
assertArrayEquals(HEADER, beanReader.getHeader(true));
// only map the fields relevant to the interface
final String[] header = new String[] { "customerNo", null, null, null, null, "mailingAddress", null, null, null,
null, "loyaltyPoints" };
int i = 0;
Customer customer;
while( (customer = beanReader.read(Customer.class, header, READ_PROCESSORS)) != null ) {
assertEquals(CUSTOMERS.get(i).getCustomerNo(), customer.getCustomerNo());
assertEquals(CUSTOMERS.get(i).getMailingAddress(), customer.getMailingAddress());
assertEquals(CUSTOMERS.get(i).getLoyaltyPoints(), customer.getLoyaltyPoints());
i++;
}
assertEquals(CUSTOMERS.size() + 1, beanReader.getRowNumber());
}
/**
* Tests the read() method with an class that has no default no-arg constructor.
*/
@Test(expected = SuperCsvReflectionException.class)
public void testReadWithNonJavabean() throws IOException {
beanReader.read(Integer.class, HEADER);
}
/**
* Tests the read() method, with a null bean class.
*/
@Test(expected = NullPointerException.class)
public void testReadWithNullBeanClass() throws IOException {
beanReader.read(null, HEADER);
}
/**
* Tests the read() method, with a null bean.
*/
@Test(expected = NullPointerException.class)
public void testReadWithNullBean() throws IOException {
beanReader.read((Object) null, HEADER);
}
/**
* Tests the read() method, with a null name mapping array.
*/
@Test(expected = NullPointerException.class)
public void testReadWithNullNameMapping() throws IOException {
beanReader.read(PersonBean.class, (String[]) null);
}
/**
* Tests the read() method, with a null name mapping array.
*/
@Test(expected = NullPointerException.class)
public void testReadIntoBeanWithNullNameMapping() throws IOException {
beanReader.read(new PersonBean(), (String[]) null);
}
/**
* Tests the read() method, with a name mapping array that's not the right size.
*/
@Test(expected = IllegalArgumentException.class)
public void testReadWithInvalidSizeNameMapping() throws IOException {
beanReader.getHeader(true);
beanReader.read(PersonBean.class, new String[] { null, "firstName" });
}
/**
* Tests the read() method, with a name mapping array that's not the right size.
*/
@Test(expected = IllegalArgumentException.class)
public void testReadIntoBeanWithInvalidSizeNameMapping() throws IOException {
beanReader.getHeader(true);
beanReader.read(new PersonBean(), new String[] { null, "firstName" });
}
/**
* Tests the read() method (with processors), with a null bean class.
*/
@Test(expected = NullPointerException.class)
public void testReadProcessorsWithNullBeanClass() throws IOException {
beanReader.read(null, HEADER, READ_PROCESSORS);
}
/**
* Tests the read() method (with processors), with a null bean.
*/
@Test(expected = NullPointerException.class)
public void testReadUsingProcessorsWithNullBean() throws IOException {
beanReader.read((Object) null, HEADER, READ_PROCESSORS);
}
/**
* Tests the read() method (with processors), with a null name mapping array.
*/
@Test(expected = NullPointerException.class)
public void testReadProcessorsWithNullNameMapping() throws IOException {
beanReader.read(PersonBean.class, (String[]) null, READ_PROCESSORS);
}
/**
* Tests the read() method (with processors), with a null name mapping array.
*/
@Test(expected = NullPointerException.class)
public void testReadIntoBeanUsingProcessorsWithNullNameMapping() throws IOException {
beanReader.read(new PersonBean(), (String[]) null, READ_PROCESSORS);
}
/**
* Tests the read() method (with processors), with a null cell processor array.
*/
@Test(expected = NullPointerException.class)
public void testReadProcessorsWithNullProcessors() throws IOException {
beanReader.read(PersonBean.class, HEADER, (CellProcessor[]) null);
}
/**
* Tests the read() method (with processors), with a null cell processor array.
*/
@Test(expected = NullPointerException.class)
public void testReadIntoBeanUsingProcessorsWithNullProcessors() throws IOException {
beanReader.read(new PersonBean(), HEADER, (CellProcessor[]) null);
}
/**
* Tests the Reader constructor with a null Reader.
*/
@SuppressWarnings("resource")
@Test(expected = NullPointerException.class)
public void testReaderConstructorWithNullReader() {
new CsvBeanReader((Reader) null, PREFS);
}
/**
* Tests the Reader constructor with a null preference.
*/
@SuppressWarnings("resource")
@Test(expected = NullPointerException.class)
public void testReaderConstructorWithNullPreferences() {
new CsvBeanReader(reader, null);
}
/**
* Tests the Tokenizer constructor with a null Reader.
*/
@SuppressWarnings("resource")
@Test(expected = NullPointerException.class)
public void testTokenizerConstructorWithNullReader() {
new CsvBeanReader((Tokenizer) null, PREFS);
}
/**
* Tests the Tokenizer constructor with a null preference.
*/
@SuppressWarnings("resource")
@Test(expected = NullPointerException.class)
public void testTokenizerConstructorWithNullPreferences() {
new CsvBeanReader(new Tokenizer(reader, PREFS), null);
}
/**
* Tests the read() method when invoking the bean's constructor throws IllegalAccessException.
*/
@Test(expected = SuperCsvReflectionException.class)
public void testBeanInstantationThrowingIllegalAccessException() throws IOException {
beanReader.read(IllegalAccessBean.class, HEADER);
}
/**
* Tests the read() method when invoking a setter throws an Exception.
*/
@SuppressWarnings("resource")
@Test(expected = SuperCsvReflectionException.class)
public void testSetterThrowingException() throws IOException {
new CsvBeanReader(new StringReader("value"), PREFS).read(ExceptionBean.class, "illegalArgument");
}
/**
* Bean to test exceptions when invoking setters using CsvBeanReader.
*/
public static class ExceptionBean extends CustomerBean {
public void setIllegalArgument(String s) {
throw new IllegalArgumentException("i don't like it!");
}
}
/**
* Bean to test exceptions when invoking the constructor using CsvBeanWriter.
*/
public static class IllegalAccessBean extends CustomerBean {
public IllegalAccessBean() throws IllegalAccessException {
throw new IllegalAccessException("naughty naughty!");
}
}
}
|
|
/*
* Copyright 2007-2009 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Project: JGentleFramework
*/
package org.jgentleframework.utils;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.Reader;
import java.io.StringWriter;
import java.io.Writer;
/**
* Simple utility methods for file and stream copying. All copy methods use a
* block size of 4096 bytes, and close all affected streams when done.
* <p>
* Mainly for use within the framework, but also useful for application code.
*
* @author Quoc Chung - mailto: <a
* href="mailto:[email protected]">[email protected]</a>
* @date Feb 13, 2008
*/
public final class FileObjectUtils {
public static final int BUFFER_SIZE = 4096;
/**
* Copy the contents of the given input File to the given output File.
*
* @param in
* the file to copy from
* @param out
* the file to copy to
* @return the number of bytes copied
* @throws IOException
* in case of I/O errors
*/
public static int copy(File in, File out) throws IOException {
Assertor.notNull(in, "No input File specified");
Assertor.notNull(out, "No output File specified");
return copy(new BufferedInputStream(new FileInputStream(in)),
new BufferedOutputStream(new FileOutputStream(out)));
}
/**
* Copy the contents of the given byte array to the given output File.
*
* @param in
* the byte array to copy from
* @param out
* the file to copy to
* @throws IOException
* in case of I/O errors
*/
public static void copy(byte[] in, File out) throws IOException {
Assertor.notNull(in, "No input byte array specified");
Assertor.notNull(out, "No output File specified");
ByteArrayInputStream inStream = new ByteArrayInputStream(in);
OutputStream outStream = new BufferedOutputStream(new FileOutputStream(
out));
copy(inStream, outStream);
}
/**
* Copy the contents of the given input File into a new byte array.
*
* @param in
* the file to copy from
* @return the new byte array that has been copied to
* @throws IOException
* in case of I/O errors
*/
public static byte[] copyToByteArray(File in) throws IOException {
Assertor.notNull(in, "No input File specified");
return copyToByteArray(new BufferedInputStream(new FileInputStream(in)));
}
/**
* Copy the contents of the given InputStream to the given OutputStream.
* Closes both streams when done.
*
* @param in
* the stream to copy from
* @param out
* the stream to copy to
* @return the number of bytes copied
* @throws IOException
* in case of I/O errors
*/
public static int copy(InputStream in, OutputStream out) throws IOException {
Assertor.notNull(in, "No InputStream specified");
Assertor.notNull(out, "No OutputStream specified");
try {
int byteCount = 0;
byte[] buffer = new byte[BUFFER_SIZE];
int bytesRead = -1;
while ((bytesRead = in.read(buffer)) != -1) {
out.write(buffer, 0, bytesRead);
byteCount += bytesRead;
}
out.flush();
return byteCount;
}
finally {
try {
in.close();
}
catch (IOException ex) {
}
try {
out.close();
}
catch (IOException ex) {
}
}
}
/**
* Copy the contents of the given byte array to the given OutputStream.
* Closes the stream when done.
*
* @param in
* the byte array to copy from
* @param out
* the OutputStream to copy to
* @throws IOException
* in case of I/O errors
*/
public static void copy(byte[] in, OutputStream out) throws IOException {
Assertor.notNull(in, "No input byte array specified");
Assertor.notNull(out, "No OutputStream specified");
try {
out.write(in);
}
finally {
try {
out.close();
}
catch (IOException ex) {
}
}
}
/**
* Copy the contents of the given InputStream into a new byte array. Closes
* the stream when done.
*
* @param in
* the stream to copy from
* @return the new byte array that has been copied to
* @throws IOException
* in case of I/O errors
*/
public static byte[] copyToByteArray(InputStream in) throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream(BUFFER_SIZE);
copy(in, out);
return out.toByteArray();
}
/**
* Copy the contents of the given Reader to the given Writer. Closes both
* when done.
*
* @param in
* the Reader to copy from
* @param out
* the Writer to copy to
* @return the number of characters copied
* @throws IOException
* in case of I/O errors
*/
public static int copy(Reader in, Writer out) throws IOException {
Assertor.notNull(in, "No Reader specified");
Assertor.notNull(out, "No Writer specified");
try {
int byteCount = 0;
char[] buffer = new char[BUFFER_SIZE];
int bytesRead = -1;
while ((bytesRead = in.read(buffer)) != -1) {
out.write(buffer, 0, bytesRead);
byteCount += bytesRead;
}
out.flush();
return byteCount;
}
finally {
try {
in.close();
}
catch (IOException ex) {
}
try {
out.close();
}
catch (IOException ex) {
}
}
}
/**
* Copy the contents of the given String to the given output Writer. Closes
* the write when done.
*
* @param in
* the String to copy from
* @param out
* the Writer to copy to
* @throws IOException
* in case of I/O errors
*/
public static void copy(String in, Writer out) throws IOException {
Assertor.notNull(in, "No input String specified");
Assertor.notNull(out, "No Writer specified");
try {
out.write(in);
}
finally {
try {
out.close();
}
catch (IOException ex) {
}
}
}
/**
* Copy the contents of the given Reader into a String. Closes the reader
* when done.
*
* @param in
* the reader to copy from
* @return the String that has been copied to
* @throws IOException
* in case of I/O errors
*/
public static String copyToString(Reader in) throws IOException {
StringWriter out = new StringWriter();
copy(in, out);
return out.toString();
}
}
|
|
/*
* Copyright (C) 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2;
import static com.google.common.truth.Truth.assertThat;
import static org.junit.Assert.assertNotSame;
import static org.junit.Assert.assertSame;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.isNull;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.android.exoplayer2.source.MediaSource;
import com.google.android.exoplayer2.source.ShuffleOrder;
import com.google.android.exoplayer2.testutil.FakeMediaSource;
import com.google.android.exoplayer2.testutil.FakeShuffleOrder;
import com.google.android.exoplayer2.util.Util;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
/** Unit test for {@link MediaSourceList}. */
@RunWith(AndroidJUnit4.class)
public class MediaSourceListTest {
private static final int MEDIA_SOURCE_LIST_SIZE = 4;
private static final MediaItem MINIMAL_MEDIA_ITEM =
new MediaItem.Builder().setMediaId("").build();
private MediaSourceList mediaSourceList;
@Before
public void setUp() {
mediaSourceList =
new MediaSourceList(
mock(MediaSourceList.MediaSourceListInfoRefreshListener.class),
/* analyticsCollector= */ null,
Util.createHandlerForCurrentOrMainLooper());
}
@Test
public void emptyMediaSourceList_expectConstantTimelineInstanceEMPTY() {
ShuffleOrder.DefaultShuffleOrder shuffleOrder =
new ShuffleOrder.DefaultShuffleOrder(/* length= */ 0);
List<MediaSourceList.MediaSourceHolder> fakeHolders = createFakeHolders();
Timeline timeline = mediaSourceList.setMediaSources(fakeHolders, shuffleOrder);
assertNotSame(timeline, Timeline.EMPTY);
// Remove all media sources.
timeline =
mediaSourceList.removeMediaSourceRange(
/* fromIndex= */ 0, /* toIndex= */ timeline.getWindowCount(), shuffleOrder);
assertSame(timeline, Timeline.EMPTY);
timeline = mediaSourceList.setMediaSources(fakeHolders, shuffleOrder);
assertNotSame(timeline, Timeline.EMPTY);
// Clear.
timeline = mediaSourceList.clear(shuffleOrder);
assertSame(timeline, Timeline.EMPTY);
}
@Test
public void prepareAndReprepareAfterRelease_expectSourcePreparationAfterMediaSourceListPrepare() {
MediaSource mockMediaSource1 = mock(MediaSource.class);
when(mockMediaSource1.getMediaItem()).thenReturn(MINIMAL_MEDIA_ITEM);
MediaSource mockMediaSource2 = mock(MediaSource.class);
when(mockMediaSource2.getMediaItem()).thenReturn(MINIMAL_MEDIA_ITEM);
mediaSourceList.setMediaSources(
createFakeHoldersWithSources(
/* useLazyPreparation= */ false, mockMediaSource1, mockMediaSource2),
new ShuffleOrder.DefaultShuffleOrder(/* length= */ 2));
// Verify prepare is called once on prepare.
verify(mockMediaSource1, times(0))
.prepareSource(
any(MediaSource.MediaSourceCaller.class), /* mediaTransferListener= */ isNull());
verify(mockMediaSource2, times(0))
.prepareSource(
any(MediaSource.MediaSourceCaller.class), /* mediaTransferListener= */ isNull());
mediaSourceList.prepare(/* mediaTransferListener= */ null);
assertThat(mediaSourceList.isPrepared()).isTrue();
// Verify prepare is called once on prepare.
verify(mockMediaSource1, times(1))
.prepareSource(
any(MediaSource.MediaSourceCaller.class), /* mediaTransferListener= */ isNull());
verify(mockMediaSource2, times(1))
.prepareSource(
any(MediaSource.MediaSourceCaller.class), /* mediaTransferListener= */ isNull());
mediaSourceList.release();
mediaSourceList.prepare(/* mediaTransferListener= */ null);
// Verify prepare is called a second time on re-prepare.
verify(mockMediaSource1, times(2))
.prepareSource(
any(MediaSource.MediaSourceCaller.class), /* mediaTransferListener= */ isNull());
verify(mockMediaSource2, times(2))
.prepareSource(
any(MediaSource.MediaSourceCaller.class), /* mediaTransferListener= */ isNull());
}
@Test
public void setMediaSources_mediaSourceListUnprepared_notUsingLazyPreparation() {
ShuffleOrder.DefaultShuffleOrder shuffleOrder =
new ShuffleOrder.DefaultShuffleOrder(/* length= */ 2);
MediaSource mockMediaSource1 = mock(MediaSource.class);
when(mockMediaSource1.getMediaItem()).thenReturn(MINIMAL_MEDIA_ITEM);
MediaSource mockMediaSource2 = mock(MediaSource.class);
when(mockMediaSource2.getMediaItem()).thenReturn(MINIMAL_MEDIA_ITEM);
List<MediaSourceList.MediaSourceHolder> mediaSources =
createFakeHoldersWithSources(
/* useLazyPreparation= */ false, mockMediaSource1, mockMediaSource2);
Timeline timeline = mediaSourceList.setMediaSources(mediaSources, shuffleOrder);
assertThat(timeline.getWindowCount()).isEqualTo(2);
assertThat(mediaSourceList.getSize()).isEqualTo(2);
// Assert holder offsets have been set properly
for (int i = 0; i < mediaSources.size(); i++) {
MediaSourceList.MediaSourceHolder mediaSourceHolder = mediaSources.get(i);
assertThat(mediaSourceHolder.isRemoved).isFalse();
assertThat(mediaSourceHolder.firstWindowIndexInChild).isEqualTo(i);
}
// Set media items again. The second holder is re-used.
MediaSource mockMediaSource3 = mock(MediaSource.class);
when(mockMediaSource3.getMediaItem()).thenReturn(MINIMAL_MEDIA_ITEM);
List<MediaSourceList.MediaSourceHolder> moreMediaSources =
createFakeHoldersWithSources(/* useLazyPreparation= */ false, mockMediaSource3);
moreMediaSources.add(mediaSources.get(1));
timeline = mediaSourceList.setMediaSources(moreMediaSources, shuffleOrder);
assertThat(mediaSourceList.getSize()).isEqualTo(2);
assertThat(timeline.getWindowCount()).isEqualTo(2);
for (int i = 0; i < moreMediaSources.size(); i++) {
MediaSourceList.MediaSourceHolder mediaSourceHolder = moreMediaSources.get(i);
assertThat(mediaSourceHolder.isRemoved).isFalse();
assertThat(mediaSourceHolder.firstWindowIndexInChild).isEqualTo(i);
}
// Expect removed holders and sources to be removed without releasing.
verify(mockMediaSource1, times(0)).releaseSource(any(MediaSource.MediaSourceCaller.class));
assertThat(mediaSources.get(0).isRemoved).isTrue();
// Expect re-used holder and source not to be removed.
verify(mockMediaSource2, times(0)).releaseSource(any(MediaSource.MediaSourceCaller.class));
assertThat(mediaSources.get(1).isRemoved).isFalse();
}
@Test
public void setMediaSources_mediaSourceListPrepared_notUsingLazyPreparation() {
ShuffleOrder.DefaultShuffleOrder shuffleOrder =
new ShuffleOrder.DefaultShuffleOrder(/* length= */ 2);
MediaSource mockMediaSource1 = mock(MediaSource.class);
when(mockMediaSource1.getMediaItem()).thenReturn(MINIMAL_MEDIA_ITEM);
MediaSource mockMediaSource2 = mock(MediaSource.class);
when(mockMediaSource2.getMediaItem()).thenReturn(MINIMAL_MEDIA_ITEM);
List<MediaSourceList.MediaSourceHolder> mediaSources =
createFakeHoldersWithSources(
/* useLazyPreparation= */ false, mockMediaSource1, mockMediaSource2);
mediaSourceList.prepare(/* mediaTransferListener= */ null);
mediaSourceList.setMediaSources(mediaSources, shuffleOrder);
// Verify sources are prepared.
verify(mockMediaSource1, times(1))
.prepareSource(
any(MediaSource.MediaSourceCaller.class), /* mediaTransferListener= */ isNull());
verify(mockMediaSource2, times(1))
.prepareSource(
any(MediaSource.MediaSourceCaller.class), /* mediaTransferListener= */ isNull());
// Set media items again. The second holder is re-used.
MediaSource mockMediaSource3 = mock(MediaSource.class);
when(mockMediaSource3.getMediaItem()).thenReturn(MINIMAL_MEDIA_ITEM);
List<MediaSourceList.MediaSourceHolder> moreMediaSources =
createFakeHoldersWithSources(/* useLazyPreparation= */ false, mockMediaSource3);
moreMediaSources.add(mediaSources.get(1));
mediaSourceList.setMediaSources(moreMediaSources, shuffleOrder);
// Expect removed holders and sources to be removed and released.
verify(mockMediaSource1, times(1)).releaseSource(any(MediaSource.MediaSourceCaller.class));
assertThat(mediaSources.get(0).isRemoved).isTrue();
// Expect re-used holder and source not to be removed but released.
verify(mockMediaSource2, times(1)).releaseSource(any(MediaSource.MediaSourceCaller.class));
assertThat(mediaSources.get(1).isRemoved).isFalse();
verify(mockMediaSource2, times(2))
.prepareSource(
any(MediaSource.MediaSourceCaller.class), /* mediaTransferListener= */ isNull());
}
@Test
public void addMediaSources_mediaSourceListUnprepared_notUsingLazyPreparation_expectUnprepared() {
MediaSource mockMediaSource1 = mock(MediaSource.class);
when(mockMediaSource1.getMediaItem()).thenReturn(MINIMAL_MEDIA_ITEM);
MediaSource mockMediaSource2 = mock(MediaSource.class);
when(mockMediaSource2.getMediaItem()).thenReturn(MINIMAL_MEDIA_ITEM);
List<MediaSourceList.MediaSourceHolder> mediaSources =
createFakeHoldersWithSources(
/* useLazyPreparation= */ false, mockMediaSource1, mockMediaSource2);
mediaSourceList.addMediaSources(
/* index= */ 0, mediaSources, new ShuffleOrder.DefaultShuffleOrder(2));
assertThat(mediaSourceList.getSize()).isEqualTo(2);
// Verify lazy initialization does not call prepare on sources.
verify(mockMediaSource1, times(0))
.prepareSource(
any(MediaSource.MediaSourceCaller.class), /* mediaTransferListener= */ isNull());
verify(mockMediaSource2, times(0))
.prepareSource(
any(MediaSource.MediaSourceCaller.class), /* mediaTransferListener= */ isNull());
for (int i = 0; i < mediaSources.size(); i++) {
assertThat(mediaSources.get(i).firstWindowIndexInChild).isEqualTo(i);
assertThat(mediaSources.get(i).isRemoved).isFalse();
}
// Add for more sources in between.
List<MediaSourceList.MediaSourceHolder> moreMediaSources = createFakeHolders();
mediaSourceList.addMediaSources(
/* index= */ 1, moreMediaSources, new ShuffleOrder.DefaultShuffleOrder(/* length= */ 3));
assertThat(mediaSources.get(0).firstWindowIndexInChild).isEqualTo(0);
assertThat(moreMediaSources.get(0).firstWindowIndexInChild).isEqualTo(1);
assertThat(moreMediaSources.get(3).firstWindowIndexInChild).isEqualTo(4);
assertThat(mediaSources.get(1).firstWindowIndexInChild).isEqualTo(5);
}
@Test
public void addMediaSources_mediaSourceListPrepared_notUsingLazyPreparation_expectPrepared() {
MediaSource mockMediaSource1 = mock(MediaSource.class);
when(mockMediaSource1.getMediaItem()).thenReturn(MINIMAL_MEDIA_ITEM);
MediaSource mockMediaSource2 = mock(MediaSource.class);
when(mockMediaSource2.getMediaItem()).thenReturn(MINIMAL_MEDIA_ITEM);
mediaSourceList.prepare(/* mediaTransferListener= */ null);
mediaSourceList.addMediaSources(
/* index= */ 0,
createFakeHoldersWithSources(
/* useLazyPreparation= */ false, mockMediaSource1, mockMediaSource2),
new ShuffleOrder.DefaultShuffleOrder(/* length= */ 2));
// Verify prepare is called on sources when added.
verify(mockMediaSource1, times(1))
.prepareSource(
any(MediaSource.MediaSourceCaller.class), /* mediaTransferListener= */ isNull());
verify(mockMediaSource2, times(1))
.prepareSource(
any(MediaSource.MediaSourceCaller.class), /* mediaTransferListener= */ isNull());
}
@Test
public void moveMediaSources() {
ShuffleOrder.DefaultShuffleOrder shuffleOrder =
new ShuffleOrder.DefaultShuffleOrder(/* length= */ 4);
List<MediaSourceList.MediaSourceHolder> holders = createFakeHolders();
mediaSourceList.addMediaSources(/* index= */ 0, holders, shuffleOrder);
assertDefaultFirstWindowInChildIndexOrder(holders);
mediaSourceList.moveMediaSource(/* currentIndex= */ 0, /* newIndex= */ 3, shuffleOrder);
assertFirstWindowInChildIndices(holders, 3, 0, 1, 2);
mediaSourceList.moveMediaSource(/* currentIndex= */ 3, /* newIndex= */ 0, shuffleOrder);
assertDefaultFirstWindowInChildIndexOrder(holders);
mediaSourceList.moveMediaSourceRange(
/* fromIndex= */ 0, /* toIndex= */ 2, /* newFromIndex= */ 2, shuffleOrder);
assertFirstWindowInChildIndices(holders, 2, 3, 0, 1);
mediaSourceList.moveMediaSourceRange(
/* fromIndex= */ 2, /* toIndex= */ 4, /* newFromIndex= */ 0, shuffleOrder);
assertDefaultFirstWindowInChildIndexOrder(holders);
mediaSourceList.moveMediaSourceRange(
/* fromIndex= */ 0, /* toIndex= */ 2, /* newFromIndex= */ 2, shuffleOrder);
assertFirstWindowInChildIndices(holders, 2, 3, 0, 1);
mediaSourceList.moveMediaSourceRange(
/* fromIndex= */ 2, /* toIndex= */ 3, /* newFromIndex= */ 0, shuffleOrder);
assertFirstWindowInChildIndices(holders, 0, 3, 1, 2);
mediaSourceList.moveMediaSourceRange(
/* fromIndex= */ 3, /* toIndex= */ 4, /* newFromIndex= */ 1, shuffleOrder);
assertDefaultFirstWindowInChildIndexOrder(holders);
// No-ops.
mediaSourceList.moveMediaSourceRange(
/* fromIndex= */ 0, /* toIndex= */ 4, /* newFromIndex= */ 0, shuffleOrder);
assertDefaultFirstWindowInChildIndexOrder(holders);
mediaSourceList.moveMediaSourceRange(
/* fromIndex= */ 0, /* toIndex= */ 0, /* newFromIndex= */ 3, shuffleOrder);
assertDefaultFirstWindowInChildIndexOrder(holders);
}
@Test
public void removeMediaSources_whenUnprepared_expectNoRelease() {
MediaSource mockMediaSource1 = mock(MediaSource.class);
when(mockMediaSource1.getMediaItem()).thenReturn(MINIMAL_MEDIA_ITEM);
MediaSource mockMediaSource2 = mock(MediaSource.class);
when(mockMediaSource2.getMediaItem()).thenReturn(MINIMAL_MEDIA_ITEM);
MediaSource mockMediaSource3 = mock(MediaSource.class);
when(mockMediaSource3.getMediaItem()).thenReturn(MINIMAL_MEDIA_ITEM);
MediaSource mockMediaSource4 = mock(MediaSource.class);
when(mockMediaSource4.getMediaItem()).thenReturn(MINIMAL_MEDIA_ITEM);
ShuffleOrder.DefaultShuffleOrder shuffleOrder =
new ShuffleOrder.DefaultShuffleOrder(/* length= */ 4);
List<MediaSourceList.MediaSourceHolder> holders =
createFakeHoldersWithSources(
/* useLazyPreparation= */ false,
mockMediaSource1,
mockMediaSource2,
mockMediaSource3,
mockMediaSource4);
mediaSourceList.addMediaSources(/* index= */ 0, holders, shuffleOrder);
mediaSourceList.removeMediaSourceRange(/* fromIndex= */ 1, /* toIndex= */ 3, shuffleOrder);
assertThat(mediaSourceList.getSize()).isEqualTo(2);
MediaSourceList.MediaSourceHolder removedHolder1 = holders.remove(1);
MediaSourceList.MediaSourceHolder removedHolder2 = holders.remove(1);
assertDefaultFirstWindowInChildIndexOrder(holders);
assertThat(removedHolder1.isRemoved).isTrue();
assertThat(removedHolder2.isRemoved).isTrue();
verify(mockMediaSource1, times(0)).releaseSource(any(MediaSource.MediaSourceCaller.class));
verify(mockMediaSource2, times(0)).releaseSource(any(MediaSource.MediaSourceCaller.class));
verify(mockMediaSource3, times(0)).releaseSource(any(MediaSource.MediaSourceCaller.class));
verify(mockMediaSource4, times(0)).releaseSource(any(MediaSource.MediaSourceCaller.class));
}
@Test
public void removeMediaSources_whenPrepared_expectRelease() {
MediaSource mockMediaSource1 = mock(MediaSource.class);
when(mockMediaSource1.getMediaItem()).thenReturn(MINIMAL_MEDIA_ITEM);
MediaSource mockMediaSource2 = mock(MediaSource.class);
when(mockMediaSource2.getMediaItem()).thenReturn(MINIMAL_MEDIA_ITEM);
MediaSource mockMediaSource3 = mock(MediaSource.class);
when(mockMediaSource3.getMediaItem()).thenReturn(MINIMAL_MEDIA_ITEM);
MediaSource mockMediaSource4 = mock(MediaSource.class);
when(mockMediaSource4.getMediaItem()).thenReturn(MINIMAL_MEDIA_ITEM);
ShuffleOrder.DefaultShuffleOrder shuffleOrder =
new ShuffleOrder.DefaultShuffleOrder(/* length= */ 4);
List<MediaSourceList.MediaSourceHolder> holders =
createFakeHoldersWithSources(
/* useLazyPreparation= */ false,
mockMediaSource1,
mockMediaSource2,
mockMediaSource3,
mockMediaSource4);
mediaSourceList.prepare(/* mediaTransferListener */ null);
mediaSourceList.addMediaSources(/* index= */ 0, holders, shuffleOrder);
mediaSourceList.removeMediaSourceRange(/* fromIndex= */ 1, /* toIndex= */ 3, shuffleOrder);
assertThat(mediaSourceList.getSize()).isEqualTo(2);
holders.remove(2);
holders.remove(1);
assertDefaultFirstWindowInChildIndexOrder(holders);
verify(mockMediaSource1, times(0)).releaseSource(any(MediaSource.MediaSourceCaller.class));
verify(mockMediaSource2, times(1)).releaseSource(any(MediaSource.MediaSourceCaller.class));
verify(mockMediaSource3, times(1)).releaseSource(any(MediaSource.MediaSourceCaller.class));
verify(mockMediaSource4, times(0)).releaseSource(any(MediaSource.MediaSourceCaller.class));
}
@Test
public void release_mediaSourceListUnprepared_expectSourcesNotReleased() {
MediaSource mockMediaSource = mock(MediaSource.class);
when(mockMediaSource.getMediaItem()).thenReturn(MINIMAL_MEDIA_ITEM);
MediaSourceList.MediaSourceHolder mediaSourceHolder =
new MediaSourceList.MediaSourceHolder(mockMediaSource, /* useLazyPreparation= */ false);
mediaSourceList.setMediaSources(
Collections.singletonList(mediaSourceHolder),
new ShuffleOrder.DefaultShuffleOrder(/* length= */ 1));
verify(mockMediaSource, times(0))
.prepareSource(
any(MediaSource.MediaSourceCaller.class), /* mediaTransferListener= */ isNull());
mediaSourceList.release();
verify(mockMediaSource, times(0)).releaseSource(any(MediaSource.MediaSourceCaller.class));
assertThat(mediaSourceHolder.isRemoved).isFalse();
}
@Test
public void release_mediaSourceListPrepared_expectSourcesReleasedNotRemoved() {
MediaSource mockMediaSource = mock(MediaSource.class);
when(mockMediaSource.getMediaItem()).thenReturn(MINIMAL_MEDIA_ITEM);
MediaSourceList.MediaSourceHolder mediaSourceHolder =
new MediaSourceList.MediaSourceHolder(mockMediaSource, /* useLazyPreparation= */ false);
mediaSourceList.prepare(/* mediaTransferListener= */ null);
mediaSourceList.setMediaSources(
Collections.singletonList(mediaSourceHolder),
new ShuffleOrder.DefaultShuffleOrder(/* length= */ 1));
verify(mockMediaSource, times(1))
.prepareSource(
any(MediaSource.MediaSourceCaller.class), /* mediaTransferListener= */ isNull());
mediaSourceList.release();
verify(mockMediaSource, times(1)).releaseSource(any(MediaSource.MediaSourceCaller.class));
assertThat(mediaSourceHolder.isRemoved).isFalse();
}
@Test
public void clearMediaSourceList_expectSourcesReleasedAndRemoved() {
ShuffleOrder.DefaultShuffleOrder shuffleOrder =
new ShuffleOrder.DefaultShuffleOrder(/* length= */ 4);
MediaSource mockMediaSource1 = mock(MediaSource.class);
when(mockMediaSource1.getMediaItem()).thenReturn(MINIMAL_MEDIA_ITEM);
MediaSource mockMediaSource2 = mock(MediaSource.class);
when(mockMediaSource2.getMediaItem()).thenReturn(MINIMAL_MEDIA_ITEM);
List<MediaSourceList.MediaSourceHolder> holders =
createFakeHoldersWithSources(
/* useLazyPreparation= */ false, mockMediaSource1, mockMediaSource2);
mediaSourceList.setMediaSources(holders, shuffleOrder);
mediaSourceList.prepare(/* mediaTransferListener= */ null);
Timeline timeline = mediaSourceList.clear(shuffleOrder);
assertThat(timeline.isEmpty()).isTrue();
assertThat(holders.get(0).isRemoved).isTrue();
assertThat(holders.get(1).isRemoved).isTrue();
verify(mockMediaSource1, times(1)).releaseSource(any());
verify(mockMediaSource2, times(1)).releaseSource(any());
}
@Test
public void setMediaSources_expectTimelineUsesCustomShuffleOrder() {
Timeline timeline =
mediaSourceList.setMediaSources(createFakeHolders(), new FakeShuffleOrder(/* length=*/ 4));
assertTimelineUsesFakeShuffleOrder(timeline);
}
@Test
public void addMediaSources_expectTimelineUsesCustomShuffleOrder() {
Timeline timeline =
mediaSourceList.addMediaSources(
/* index= */ 0, createFakeHolders(), new FakeShuffleOrder(MEDIA_SOURCE_LIST_SIZE));
assertTimelineUsesFakeShuffleOrder(timeline);
}
@Test
public void moveMediaSources_expectTimelineUsesCustomShuffleOrder() {
ShuffleOrder shuffleOrder =
new ShuffleOrder.DefaultShuffleOrder(/* length= */ MEDIA_SOURCE_LIST_SIZE);
mediaSourceList.addMediaSources(/* index= */ 0, createFakeHolders(), shuffleOrder);
Timeline timeline =
mediaSourceList.moveMediaSource(
/* currentIndex= */ 0, /* newIndex= */ 1, new FakeShuffleOrder(MEDIA_SOURCE_LIST_SIZE));
assertTimelineUsesFakeShuffleOrder(timeline);
}
@Test
public void moveMediaSourceRange_expectTimelineUsesCustomShuffleOrder() {
ShuffleOrder shuffleOrder =
new ShuffleOrder.DefaultShuffleOrder(/* length= */ MEDIA_SOURCE_LIST_SIZE);
mediaSourceList.addMediaSources(/* index= */ 0, createFakeHolders(), shuffleOrder);
Timeline timeline =
mediaSourceList.moveMediaSourceRange(
/* fromIndex= */ 0,
/* toIndex= */ 2,
/* newFromIndex= */ 2,
new FakeShuffleOrder(MEDIA_SOURCE_LIST_SIZE));
assertTimelineUsesFakeShuffleOrder(timeline);
}
@Test
public void removeMediaSourceRange_expectTimelineUsesCustomShuffleOrder() {
ShuffleOrder shuffleOrder =
new ShuffleOrder.DefaultShuffleOrder(/* length= */ MEDIA_SOURCE_LIST_SIZE);
mediaSourceList.addMediaSources(/* index= */ 0, createFakeHolders(), shuffleOrder);
Timeline timeline =
mediaSourceList.removeMediaSourceRange(
/* fromIndex= */ 0, /* toIndex= */ 2, new FakeShuffleOrder(/* length= */ 2));
assertTimelineUsesFakeShuffleOrder(timeline);
}
@Test
public void setShuffleOrder_expectTimelineUsesCustomShuffleOrder() {
mediaSourceList.setMediaSources(
createFakeHolders(),
new ShuffleOrder.DefaultShuffleOrder(/* length= */ MEDIA_SOURCE_LIST_SIZE));
assertTimelineUsesFakeShuffleOrder(
mediaSourceList.setShuffleOrder(new FakeShuffleOrder(MEDIA_SOURCE_LIST_SIZE)));
}
// Internal methods.
private static void assertTimelineUsesFakeShuffleOrder(Timeline timeline) {
assertThat(
timeline.getNextWindowIndex(
/* windowIndex= */ 0, Player.REPEAT_MODE_OFF, /* shuffleModeEnabled= */ true))
.isEqualTo(-1);
assertThat(
timeline.getPreviousWindowIndex(
/* windowIndex= */ timeline.getWindowCount() - 1,
Player.REPEAT_MODE_OFF,
/* shuffleModeEnabled= */ true))
.isEqualTo(-1);
}
private static void assertDefaultFirstWindowInChildIndexOrder(
List<MediaSourceList.MediaSourceHolder> holders) {
int[] indices = new int[holders.size()];
for (int i = 0; i < indices.length; i++) {
indices[i] = i;
}
assertFirstWindowInChildIndices(holders, indices);
}
private static void assertFirstWindowInChildIndices(
List<MediaSourceList.MediaSourceHolder> holders, int... firstWindowInChildIndices) {
assertThat(holders).hasSize(firstWindowInChildIndices.length);
for (int i = 0; i < holders.size(); i++) {
assertThat(holders.get(i).firstWindowIndexInChild).isEqualTo(firstWindowInChildIndices[i]);
}
}
private static List<MediaSourceList.MediaSourceHolder> createFakeHolders() {
List<MediaSourceList.MediaSourceHolder> holders = new ArrayList<>();
for (int i = 0; i < MEDIA_SOURCE_LIST_SIZE; i++) {
holders.add(
new MediaSourceList.MediaSourceHolder(
new FakeMediaSource(), /* useLazyPreparation= */ true));
}
return holders;
}
private static List<MediaSourceList.MediaSourceHolder> createFakeHoldersWithSources(
boolean useLazyPreparation, MediaSource... sources) {
List<MediaSourceList.MediaSourceHolder> holders = new ArrayList<>();
for (MediaSource mediaSource : sources) {
holders.add(
new MediaSourceList.MediaSourceHolder(
mediaSource, /* useLazyPreparation= */ useLazyPreparation));
}
return holders;
}
}
|
|
package qora.transaction;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.json.simple.JSONObject;
import com.google.common.primitives.Bytes;
import com.google.common.primitives.Ints;
import com.google.common.primitives.Longs;
import qora.account.Account;
import qora.account.PrivateKeyAccount;
import qora.account.PublicKeyAccount;
import qora.crypto.Base58;
import qora.crypto.Crypto;
import qora.naming.Name;
import qora.naming.NameSale;
import database.DBSet;
public class BuyNameTransaction extends Transaction
{
private static final int BUYER_LENGTH = 32;
private static final int SELLER_LENGTH = 25;
private static final int REFERENCE_LENGTH = 64;
private static final int FEE_LENGTH = 8;
private static final int SIGNATURE_LENGTH = 64;
private static final int BASE_LENGTH = TIMESTAMP_LENGTH + REFERENCE_LENGTH + BUYER_LENGTH + SELLER_LENGTH + FEE_LENGTH + SIGNATURE_LENGTH;
private PublicKeyAccount buyer;
private NameSale nameSale;
private Account seller;
public BuyNameTransaction(PublicKeyAccount buyer, NameSale nameSale, Account seller, BigDecimal fee, long timestamp, byte[] reference, byte[] signature) {
super(BUY_NAME_TRANSACTION, fee, timestamp, reference, signature);
this.buyer = buyer;
this.nameSale = nameSale;
this.seller = seller;
}
//GETTERS/SETTERS
public PublicKeyAccount getBuyer()
{
return this.buyer;
}
public NameSale getNameSale()
{
return this.nameSale;
}
public Account getSeller()
{
return this.seller;
}
//PARSE CONVERT
public static Transaction Parse(byte[] data) throws Exception
{
//CHECK IF WE MATCH BLOCK LENGTH
if(data.length < BASE_LENGTH)
{
throw new Exception("Data does not match block length");
}
int position = 0;
//READ TIMESTAMP
byte[] timestampBytes = Arrays.copyOfRange(data, position, position + TIMESTAMP_LENGTH);
long timestamp = Longs.fromByteArray(timestampBytes);
position += TIMESTAMP_LENGTH;
//READ REFERENCE
byte[] reference = Arrays.copyOfRange(data, position, position + REFERENCE_LENGTH);
position += REFERENCE_LENGTH;
//READ BUYER
byte[] buyerBytes = Arrays.copyOfRange(data, position, position + BUYER_LENGTH);
PublicKeyAccount buyer = new PublicKeyAccount(buyerBytes);
position += BUYER_LENGTH;
//READ NAMESALE
NameSale nameSale = NameSale.Parse(Arrays.copyOfRange(data, position, data.length));
position += nameSale.getDataLength();
//READ SELLER
byte[] recipientBytes = Arrays.copyOfRange(data, position, position + SELLER_LENGTH);
Account seller = new Account(Base58.encode(recipientBytes));
position += SELLER_LENGTH;
//READ FEE
byte[] feeBytes = Arrays.copyOfRange(data, position, position + FEE_LENGTH);
BigDecimal fee = new BigDecimal(new BigInteger(feeBytes), 8);
position += FEE_LENGTH;
//READ SIGNATURE
byte[] signatureBytes = Arrays.copyOfRange(data, position, position + SIGNATURE_LENGTH);
return new BuyNameTransaction(buyer, nameSale, seller, fee, timestamp, reference, signatureBytes);
}
@SuppressWarnings("unchecked")
@Override
public JSONObject toJson()
{
//GET BASE
JSONObject transaction = this.getJsonBase();
//ADD REGISTRANT/NAME/VALUE
transaction.put("buyer", this.buyer.getAddress());
transaction.put("name", this.nameSale.getKey());
transaction.put("amount", this.nameSale.getAmount());
transaction.put("seller", this.seller.getAddress());
return transaction;
}
@Override
public byte[] toBytes()
{
byte[] data = new byte[0];
//WRITE TYPE
byte[] typeBytes = Ints.toByteArray(BUY_NAME_TRANSACTION);
typeBytes = Bytes.ensureCapacity(typeBytes, TYPE_LENGTH, 0);
data = Bytes.concat(data, typeBytes);
//WRITE TIMESTAMP
byte[] timestampBytes = Longs.toByteArray(this.timestamp);
timestampBytes = Bytes.ensureCapacity(timestampBytes, TIMESTAMP_LENGTH, 0);
data = Bytes.concat(data, timestampBytes);
//WRITE REFERENCE
data = Bytes.concat(data, this.reference);
//WRITE BUYER
data = Bytes.concat(data, this.buyer.getPublicKey());
//WRITE NAME SALE
data = Bytes.concat(data, this.nameSale.toBytes());
//WRITE SELLER
data = Bytes.concat(data, Base58.decode(this.seller.getAddress()));
//WRITE FEE
byte[] feeBytes = this.fee.unscaledValue().toByteArray();
byte[] fill = new byte[FEE_LENGTH - feeBytes.length];
feeBytes = Bytes.concat(fill, feeBytes);
data = Bytes.concat(data, feeBytes);
//SIGNATURE
data = Bytes.concat(data, this.signature);
return data;
}
@Override
public int getDataLength()
{
return TYPE_LENGTH + BASE_LENGTH + this.nameSale.getDataLength();
}
//VALIDATE
@Override
public boolean isSignatureValid()
{
byte[] data = new byte[0];
//WRITE TYPE
byte[] typeBytes = Ints.toByteArray(BUY_NAME_TRANSACTION);
typeBytes = Bytes.ensureCapacity(typeBytes, TYPE_LENGTH, 0);
data = Bytes.concat(data, typeBytes);
//WRITE TIMESTAMP
byte[] timestampBytes = Longs.toByteArray(this.timestamp);
timestampBytes = Bytes.ensureCapacity(timestampBytes, TIMESTAMP_LENGTH, 0);
data = Bytes.concat(data, timestampBytes);
//WRITE REFERENCE
data = Bytes.concat(data, this.reference);
//WRITE BUYER
data = Bytes.concat(data, this.buyer.getPublicKey());
//WRITE NAME SALE
data = Bytes.concat(data, this.nameSale.toBytes());
//WRITE SELLER
data = Bytes.concat(data, Base58.decode(this.seller.getAddress()));
//WRITE FEE
byte[] feeBytes = this.fee.unscaledValue().toByteArray();
byte[] fill = new byte[FEE_LENGTH - feeBytes.length];
feeBytes = Bytes.concat(fill, feeBytes);
data = Bytes.concat(data, feeBytes);
return Crypto.getInstance().verify(this.buyer.getPublicKey(), this.signature, data);
}
@Override
public int isValid(DBSet db)
{
//CHECK NAME LENGTH
int nameLength = this.nameSale.getKey().getBytes(StandardCharsets.UTF_8).length;
if(nameLength > 400 || nameLength < 1)
{
return INVALID_NAME_LENGTH;
}
//CHECK IF NAME EXISTS
Name name = this.nameSale.getName(db);
if(name == null)
{
return NAME_DOES_NOT_EXIST;
}
//CHECK IF BUYER IS OWNER
if(name.getOwner().getAddress().equals(this.buyer.getAddress()))
{
return BUYER_ALREADY_OWNER;
}
//CHECK IF NAME FOR SALE ALREADY
if(!db.getNameExchangeMap().contains(this.nameSale.getKey()))
{
return NAME_NOT_FOR_SALE;
}
//CHECK IF SELLER IS SELLER
if(!name.getOwner().getAddress().equals(this.seller.getAddress()))
{
return INVALID_SELLER;
}
//CHECK IF BUYER HAS ENOUGH MONEY
if(this.buyer.getBalance(1, db).compareTo(this.nameSale.getAmount().add(this.fee)) == -1)
{
return NO_BALANCE;
}
//CHECK IF PRICE MATCHES
NameSale nameSale = db.getNameExchangeMap().getNameSale(this.nameSale.getKey());
if(!this.nameSale.getAmount().equals(nameSale.getAmount()))
{
return INVALID_AMOUNT;
}
//CHECK IF REFERENCE IS OKE
if(!Arrays.equals(this.buyer.getLastReference(db), this.reference))
{
return INVALID_REFERENCE;
}
//CHECK IF FEE IS POSITIVE
if(this.fee.compareTo(BigDecimal.ZERO) <= 0)
{
return NEGATIVE_FEE;
}
return VALIDATE_OKE;
}
//PROCESS/ORPHAN
@Override
public void process(DBSet db)
{
//UPDATE BUYER
this.buyer.setConfirmedBalance(this.buyer.getConfirmedBalance(db).subtract(this.fee).subtract(this.nameSale.getAmount()), db);
//UPDATE SELLER
Name name = this.nameSale.getName(db);
this.seller.setConfirmedBalance(this.seller.getConfirmedBalance(db).add(this.nameSale.getAmount()), db);
//UPDATE REFERENCE OF BUYER
this.buyer.setLastReference(this.signature, db);
//UPDATE NAME OWNER (NEW OBJECT FOR PREVENTING CACHE ERRORS)
name = new Name(this.buyer, name.getName(), name.getValue());
db.getNameMap().add(name);
//DELETE NAME SALE FROM DATABASE
db.getNameExchangeMap().delete(this.nameSale.getKey());
}
@Override
public void orphan(DBSet db)
{
//UPDATE BUYER
this.buyer.setConfirmedBalance(this.buyer.getConfirmedBalance(db).add(this.fee).add(this.nameSale.getAmount()), db);
//UPDATE SELLER
this.seller.setConfirmedBalance(this.seller.getConfirmedBalance(db).subtract(this.nameSale.getAmount()), db);
//UPDATE REFERENCE OF OWNER
this.buyer.setLastReference(this.reference, db);
//UPDATE NAME OWNER (NEW OBJECT FOR PREVENTING CACHE ERRORS)
Name name = this.nameSale.getName(db);
name = new Name(this.seller, name.getName(), name.getValue());
db.getNameMap().add(name);
//RESTORE NAMESALE
db.getNameExchangeMap().add(this.nameSale);
}
@Override
public Account getCreator()
{
return this.buyer;
}
@Override
public List<Account> getInvolvedAccounts()
{
List<Account> accounts = new ArrayList<Account>();
accounts.add(this.buyer);
accounts.add(this.getSeller());
return accounts;
}
@Override
public boolean isInvolved(Account account)
{
String address = account.getAddress();
if(address.equals(this.buyer.getAddress()))
{
return true;
}
if(address.equals(this.getSeller().getAddress()))
{
return true;
}
return false;
}
@Override
public BigDecimal getAmount(Account account)
{
String address = account.getAddress();
if(address.equals(this.buyer.getAddress()))
{
return BigDecimal.ZERO.setScale(8).subtract(this.fee).subtract(this.nameSale.getAmount());
}
if(address.equals(this.getSeller().getAddress()))
{
return this.nameSale.getAmount();
}
return BigDecimal.ZERO.setScale(8);
}
public static byte[] generateSignature(DBSet db, PrivateKeyAccount buyer, NameSale nameSale, Account seller, BigDecimal fee, long timestamp)
{
byte[] data = new byte[0];
//WRITE TYPE
byte[] typeBytes = Ints.toByteArray(BUY_NAME_TRANSACTION);
typeBytes = Bytes.ensureCapacity(typeBytes, TYPE_LENGTH, 0);
data = Bytes.concat(data, typeBytes);
//WRITE TIMESTAMP
byte[] timestampBytes = Longs.toByteArray(timestamp);
timestampBytes = Bytes.ensureCapacity(timestampBytes, TIMESTAMP_LENGTH, 0);
data = Bytes.concat(data, timestampBytes);
//WRITE REFERENCE
data = Bytes.concat(data, buyer.getLastReference(db));
//WRITE BUYER
data = Bytes.concat(data, buyer.getPublicKey());
//WRITE NAME SALE
data = Bytes.concat(data, nameSale.toBytes());
//WRITE SELLER
data = Bytes.concat(data, Base58.decode(seller.getAddress()));
//WRITE FEE
byte[] feeBytes = fee.unscaledValue().toByteArray();
byte[] fill = new byte[FEE_LENGTH - feeBytes.length];
feeBytes = Bytes.concat(fill, feeBytes);
data = Bytes.concat(data, feeBytes);
return Crypto.getInstance().sign(buyer, data);
}
}
|
|
/*
* Copyright 2011-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.http;
import com.amazonaws.AmazonWebServiceClient;
import com.amazonaws.annotation.NotThreadSafe;
import com.amazonaws.auth.AWSCredentials;
import com.amazonaws.auth.AWSCredentialsProvider;
import com.amazonaws.auth.Signer;
import com.amazonaws.handlers.RequestHandler2;
import com.amazonaws.http.timers.client.ClientExecutionAbortTrackerTask;
import com.amazonaws.http.timers.client.NoOpClientExecutionAbortTrackerTask;
import com.amazonaws.internal.auth.NoOpSignerProvider;
import com.amazonaws.internal.auth.SignerProviderContext;
import com.amazonaws.internal.auth.SignerProvider;
import com.amazonaws.retry.internal.AuthErrorRetryStrategy;
import com.amazonaws.util.AWSRequestMetrics;
import com.amazonaws.util.AWSRequestMetricsFullSupport;
import java.net.URI;
import java.util.List;
/**
* @NotThreadSafe This class should only be accessed by a single thread and be used throughout
* a single request lifecycle.
*/
@NotThreadSafe
public class ExecutionContext {
private final AWSRequestMetrics awsRequestMetrics;
private final List<RequestHandler2> requestHandler2s;
private final AmazonWebServiceClient awsClient;
private final SignerProvider signerProvider;
private boolean retryCapacityConsumed;
/**
* Optional credentials to enable the runtime layer to handle signing requests (and resigning on
* retries).
*/
private AWSCredentialsProvider credentialsProvider;
/**
* An internal retry strategy for auth errors. This is currently only used by the S3 client for
* auto-resolving V4-required regions.
*/
private AuthErrorRetryStrategy authErrorRetryStrategy;
private ClientExecutionAbortTrackerTask clientExecutionTrackerTask = NoOpClientExecutionAbortTrackerTask.INSTANCE;
/** For testing purposes. */
public ExecutionContext(boolean isMetricEnabled) {
this(builder().withUseRequestMetrics(isMetricEnabled).withSignerProvider(new NoOpSignerProvider()));
}
/** For testing purposes. */
public ExecutionContext() {
this(builder().withSignerProvider(new NoOpSignerProvider()));
}
@Deprecated
public ExecutionContext(List<RequestHandler2> requestHandler2s, boolean isMetricEnabled,
AmazonWebServiceClient awsClient) {
this.requestHandler2s = requestHandler2s;
awsRequestMetrics = isMetricEnabled ? new AWSRequestMetricsFullSupport() : new AWSRequestMetrics();
this.awsClient = awsClient;
this.signerProvider = new SignerProvider() {
@Override
public Signer getSigner(SignerProviderContext context) {
return getSignerByURI(context.getUri());
}
};
}
private ExecutionContext(final Builder builder) {
this.requestHandler2s = builder.requestHandler2s;
this.awsRequestMetrics = builder.useRequestMetrics ? new AWSRequestMetricsFullSupport() : new AWSRequestMetrics();
this.awsClient = builder.awsClient;
this.signerProvider = builder.signerProvider;
}
public List<RequestHandler2> getRequestHandler2s() {
return requestHandler2s;
}
public AWSRequestMetrics getAwsRequestMetrics() {
return awsRequestMetrics;
}
protected AmazonWebServiceClient getAwsClient() {
return awsClient;
}
/**
* There is in general no need to set the signer in the execution context, since the signer for
* each request may differ depending on the URI of the request. The exception is S3 where the
* signer is currently determined only when the S3 client is constructed. Hence the need for
* this method. We may consider supporting a per request level signer determination for S3 later
* on.
*/
@Deprecated
public void setSigner(Signer signer) {
}
/**
* Returns whether retry capacity was consumed during this request lifecycle.
* This can be inspected to determine whether capacity should be released if a retry succeeds.
*
* @return true if retry capacity was consumed
*/
public boolean retryCapacityConsumed() { return retryCapacityConsumed; }
/**
* Marks that a retry during this request lifecycle has consumed retry capacity. This is inspected
* when determining if capacity should be released if a retry succeeds.
*/
public void markRetryCapacityConsumed() {
this.retryCapacityConsumed = true;
}
/**
* Passes in the provided {@link SignerProviderContext} into a {@link SignerProvider} and returns
* a {@link Signer} instance.
*/
public Signer getSigner(SignerProviderContext context) { return signerProvider.getSigner(context); }
/**
* Returns the signer for the given uri. Note S3 in particular overrides this method.
*/
@Deprecated
public Signer getSignerByURI(URI uri) {
return awsClient == null ? null : awsClient.getSignerByURI(uri);
}
/**
* Sets the credentials provider used for fetching the credentials. The credentials fetched is
* used for signing the request. If there is no credential provider, then the runtime will not
* attempt to sign (or resign on retries) requests.
*
* @param credentialsProvider
* the credentials provider to fetch {@link AWSCredentials}
*/
public void setCredentialsProvider(AWSCredentialsProvider credentialsProvider) {
this.credentialsProvider = credentialsProvider;
}
/**
* Returns the credentials provider used for fetching the credentials. The credentials fetched
* is used for signing the request. If there is no credential provider, then the runtime will
* not attempt to sign (or resign on retries) requests.
*
* @return the credentials provider to fetch {@link AWSCredentials}
*/
public AWSCredentialsProvider getCredentialsProvider() {
return this.credentialsProvider;
}
/**
* Returns the retry strategy for auth errors. This is currently only used by the S3 client for
* auto-resolving sigv4-required regions.
* <p>
* Note that this will be checked BEFORE the HTTP client consults the user-specified
* RetryPolicy. i.e. if the configured AuthErrorRetryStrategy says the request should be
* retried, the retry will be performed internally and the effect is transparent to the user's
* RetryPolicy.
*/
public AuthErrorRetryStrategy getAuthErrorRetryStrategy() {
return authErrorRetryStrategy;
}
/**
* Sets the optional auth error retry strategy for this request execution.
*
* @see #getAuthErrorRetryStrategy()
*/
public void setAuthErrorRetryStrategy(AuthErrorRetryStrategy authErrorRetryStrategy) {
this.authErrorRetryStrategy = authErrorRetryStrategy;
}
public ClientExecutionAbortTrackerTask getClientExecutionTrackerTask() {
return clientExecutionTrackerTask;
}
public void setClientExecutionTrackerTask(ClientExecutionAbortTrackerTask clientExecutionTrackerTask) {
this.clientExecutionTrackerTask = clientExecutionTrackerTask;
}
public static ExecutionContext.Builder builder() { return new ExecutionContext.Builder(); }
public static class Builder {
private boolean useRequestMetrics;
private List<RequestHandler2> requestHandler2s;
private AmazonWebServiceClient awsClient;
private SignerProvider signerProvider = new NoOpSignerProvider();
private Builder() {}
public boolean useRequestMetrics() {
return useRequestMetrics;
}
public void setUseRequestMetrics(final boolean useRequestMetrics) {
this.useRequestMetrics = useRequestMetrics;
}
public Builder withUseRequestMetrics(final boolean withUseRequestMetrics) {
setUseRequestMetrics(withUseRequestMetrics);
return this;
}
public List<RequestHandler2> getRequestHandler2s() {
return requestHandler2s;
}
public void setRequestHandler2s(final List<RequestHandler2> requestHandler2s) {
this.requestHandler2s = requestHandler2s;
}
public Builder withRequestHandler2s(final List<RequestHandler2> requestHandler2s) {
setRequestHandler2s(requestHandler2s);
return this;
}
public AmazonWebServiceClient getAwsClient() {
return awsClient;
}
public void setAwsClient(final AmazonWebServiceClient awsClient) {
this.awsClient = awsClient;
}
public Builder withAwsClient(final AmazonWebServiceClient awsClient) {
setAwsClient(awsClient);
return this;
}
public SignerProvider getSignerProvider() {
return signerProvider;
}
public void setSignerProvider(final SignerProvider signerProvider) {
this.signerProvider = signerProvider;
}
public Builder withSignerProvider(final SignerProvider signerProvider) {
setSignerProvider(signerProvider);
return this;
}
public ExecutionContext build() {
return new ExecutionContext(this);
}
}
}
|
|
/*
* Copyright (c) 2005-2013, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.rssmanager.core.manager.adaptor;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.wso2.carbon.ndatasource.common.DataSourceException;
import org.wso2.carbon.ndatasource.core.CarbonDataSource;
import org.wso2.carbon.ndatasource.core.DataSourceMetaInfo;
import org.wso2.carbon.rssmanager.core.authorize.RSSAuthorizationUtils;
import org.wso2.carbon.rssmanager.core.authorize.RSSAuthorizer;
import org.wso2.carbon.rssmanager.core.config.RSSConfigurationManager;
import org.wso2.carbon.rssmanager.core.dao.exception.RSSDatabaseConnectionException;
import org.wso2.carbon.rssmanager.core.dto.DatabaseInfo;
import org.wso2.carbon.rssmanager.core.dto.DatabasePrivilegeSetInfo;
import org.wso2.carbon.rssmanager.core.dto.DatabasePrivilegeTemplateInfo;
import org.wso2.carbon.rssmanager.core.dto.DatabaseUserInfo;
import org.wso2.carbon.rssmanager.core.dto.MySQLPrivilegeSetInfo;
import org.wso2.carbon.rssmanager.core.dto.RSSInstanceInfo;
import org.wso2.carbon.rssmanager.core.dto.UserDatabaseEntryInfo;
import org.wso2.carbon.rssmanager.core.dto.common.DatabasePrivilegeSet;
import org.wso2.carbon.rssmanager.core.dto.common.DatabasePrivilegeTemplate;
import org.wso2.carbon.rssmanager.core.dto.common.DatabasePrivilegeTemplateEntry;
import org.wso2.carbon.rssmanager.core.dto.common.MySQLPrivilegeSet;
import org.wso2.carbon.rssmanager.core.dto.common.UserDatabaseEntry;
import org.wso2.carbon.rssmanager.core.dto.restricted.Database;
import org.wso2.carbon.rssmanager.core.dto.restricted.DatabaseUser;
import org.wso2.carbon.rssmanager.core.dto.restricted.RSSInstance;
import org.wso2.carbon.rssmanager.core.environment.Environment;
import org.wso2.carbon.rssmanager.core.environment.EnvironmentManager;
import org.wso2.carbon.rssmanager.core.exception.RSSManagerException;
import org.wso2.carbon.rssmanager.core.internal.RSSManagerDataHolder;
import org.wso2.carbon.rssmanager.core.service.RSSManagerService;
import org.wso2.carbon.rssmanager.core.util.RSSManagerUtil;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
public class EnvironmentAdaptor implements RSSManagerService {
private static final Log log = LogFactory.getLog(EnvironmentAdaptor.class);
private EnvironmentManager environmentManager;
public EnvironmentAdaptor(EnvironmentManager environmentManager) {
this.environmentManager = environmentManager;
}
/**
* @see RSSManagerService#addRSSInstance(String, org.wso2.carbon.rssmanager.core.dto.RSSInstanceInfo)
*/
public void addRSSInstance(String environmentName, RSSInstanceInfo rssInstance)
throws RSSManagerException {
String instanceType = RSSManagerUtil.getCleanInstanceType(rssInstance.getInstanceType());
RSSAuthorizer.isUserAuthorize(RSSAuthorizationUtils.getPermissionResource(environmentName, instanceType,
RSSAuthorizationUtils.RSSINSTANCE_RESOURCE, RSSAuthorizationUtils.ActionResource.ADD.getAction()));
RSSInstance entity = new RSSInstance();
RSSManagerUtil.createRSSInstance(rssInstance, entity);
try {
entity = this.getEnvironmentManager().addRSSInstance(entity);
}
catch (RSSDatabaseConnectionException e) {
String msg = "Database server error at adding rss instance " + rssInstance.getRssInstanceName() + e.getMessage();
handleException(msg, e);
}
environmentManager.getEnvironment(rssInstance.getEnvironmentName()).getDSWrapperRepository().addRSSInstanceDSWrapper(entity);
environmentManager.getEnvironment(rssInstance.getEnvironmentName()).addRSSInstance(entity);
}
/**
* @see RSSManagerService#removeRSSInstance(String, String, String)
*/
public void removeRSSInstance(String environmentName, String rssInstanceName, String type)
throws RSSManagerException {
String instanceType = RSSManagerUtil.getCleanInstanceType(type);
RSSAuthorizer.isUserAuthorize(RSSAuthorizationUtils.getPermissionResource(environmentName, instanceType,
RSSAuthorizationUtils.RSSINSTANCE_RESOURCE, RSSAuthorizationUtils.ActionResource.DELETE.getAction()));
this.getEnvironmentManager().removeRSSInstance(environmentName, rssInstanceName);
environmentManager.getEnvironment(environmentName).getDSWrapperRepository().removeRSSInstanceDSWrapper(rssInstanceName);
environmentManager.getEnvironment(environmentName).removeRSSInstance(rssInstanceName);
}
/**
* @see RSSManagerService#updateRSSInstance(String, org.wso2.carbon.rssmanager.core.dto.RSSInstanceInfo)
*/
public void updateRSSInstance(String environmentName, RSSInstanceInfo rssInstance)
throws RSSManagerException {
if(RSSManagerUtil.isRSSInstanceFromConfig(rssInstance.getRssInstanceName(), environmentName)) {
throw new RSSManagerException("RSS Instances define in the configuration cannot be edited");
}
String instanceType = RSSManagerUtil.getCleanInstanceType(rssInstance.getInstanceType());
RSSAuthorizer.isUserAuthorize(RSSAuthorizationUtils.getPermissionResource(environmentName, instanceType,
RSSAuthorizationUtils.RSSINSTANCE_RESOURCE, RSSAuthorizationUtils.ActionResource.EDIT.getAction()));
RSSInstance entity = new RSSInstance();
RSSManagerUtil.createRSSInstance(rssInstance, entity);
try {
this.environmentManager.updateRSSInstance(environmentName, entity);
}
catch (RSSDatabaseConnectionException e) {
String msg = "Database server error at updating rss instance" + rssInstance.getRssInstanceName() + e.getMessage();
handleException(msg, e);
}
environmentManager.getEnvironment(environmentName).getDSWrapperRepository().removeRSSInstanceDSWrapper(rssInstance.getRssInstanceName());
environmentManager.getEnvironment(environmentName).removeRSSInstance(rssInstance.getRssInstanceName());
environmentManager.getEnvironment(rssInstance.getEnvironmentName()).getDSWrapperRepository().addRSSInstanceDSWrapper(entity);
environmentManager.getEnvironment(rssInstance.getEnvironmentName()).addRSSInstance(entity);
}
/**
* @see RSSManagerService#getRSSInstance(String, String, String)
*/
public RSSInstanceInfo getRSSInstance(String environmentName, String rssInstanceName, String type)
throws RSSManagerException {
RSSInstance entity = this.getEnvironmentManager().getRSSInstance(environmentName, rssInstanceName);
RSSInstanceInfo info = new RSSInstanceInfo();
RSSManagerUtil.createRSSInstanceInfo(info, entity);
return info;
}
/**
* @see RSSManagerService#getRSSInstances(String)
*/
public RSSInstanceInfo[] getRSSInstances(String environmentName) throws RSSManagerException {
RSSInstance[] entities = this.environmentManager.getRSSInstances(environmentName);
List<RSSInstance> entityList = Arrays.asList(entities);
List<RSSInstanceInfo> infoList = new ArrayList<RSSInstanceInfo>();
for (RSSInstance entity : entityList) {
RSSInstanceInfo info = new RSSInstanceInfo();
RSSManagerUtil.createRSSInstanceInfo(info, entity);
infoList.add(info);
}
return infoList.toArray(new RSSInstanceInfo[infoList.size()]);
}
/**
* @see RSSManagerService#getRSSInstancesList()
*/
public RSSInstanceInfo[] getRSSInstancesList() throws RSSManagerException {
RSSInstance[] entities = this.environmentManager.getRSSInstancesList();
List<RSSInstance> entityList = Arrays.asList(entities);
List<RSSInstanceInfo> infoList = new ArrayList<RSSInstanceInfo>();
for (RSSInstance entity : entityList) {
RSSInstanceInfo info = new RSSInstanceInfo();
RSSManagerUtil.createRSSInstanceInfo(info, entity);
infoList.add(info);
}
return infoList.toArray(new RSSInstanceInfo[infoList.size()]);
}
/**
* @see RSSManagerService#addDatabase(String, org.wso2.carbon.rssmanager.core.dto.DatabaseInfo)
*/
public DatabaseInfo addDatabase(String environmentName, DatabaseInfo database) throws RSSManagerException {
String instanceType = RSSManagerUtil.getCleanInstanceType(database.getType());
RSSAuthorizer.isUserAuthorize(RSSAuthorizationUtils.getPermissionResource(environmentName, instanceType,
RSSAuthorizationUtils.DATABASE_RESOURCE, RSSAuthorizationUtils.ActionResource.ADD.getAction()));
Database entity = new Database();
RSSManagerUtil.createDatabase(database, entity);
Database returnEntity = this.getRSSManagerAdaptor(environmentName).addDatabase(entity);
RSSManagerUtil.createDatabaseInfo(database, returnEntity);
return database;
}
/**
* @see RSSManagerService#removeDatabase(String, String, String, String)
*/
public void removeDatabase(String environmentName, String rssInstanceName, String databaseName,
String type) throws RSSManagerException {
String instanceType = RSSManagerUtil.getCleanInstanceType(type);
RSSAuthorizer.isUserAuthorize(RSSAuthorizationUtils.getPermissionResource(environmentName, instanceType,
RSSAuthorizationUtils.DATABASE_RESOURCE, RSSAuthorizationUtils.ActionResource.DELETE.getAction()));
this.getRSSManagerAdaptor(environmentName).removeDatabase(rssInstanceName, databaseName, type);
}
/**
* @see RSSManagerService#getDatabases(String)
*/
public DatabaseInfo[] getDatabases(String environmentName) throws RSSManagerException {
Database[] entities = this.getRSSManagerAdaptor(environmentName).getDatabases();
List<Database> entityList = Arrays.asList(entities);
List<DatabaseInfo> infoList = new ArrayList<DatabaseInfo>();
for (Database entity : entityList) {
DatabaseInfo info = new DatabaseInfo();
RSSManagerUtil.createDatabaseInfo(info, entity);
infoList.add(info);
}
return infoList.toArray(new DatabaseInfo[infoList.size()]);
}
/**
* @see RSSManagerService#getDatabase(String, String, String, String)
*/
public DatabaseInfo getDatabase(String environmentName, String rssInstanceName, String databaseName,
String type) throws RSSManagerException {
Database entity = this.getRSSManagerAdaptor(environmentName).getDatabase(rssInstanceName,
databaseName, type);
DatabaseInfo info = new DatabaseInfo();
RSSManagerUtil.createDatabaseInfo(info, entity);
return info;
}
/**
* @see RSSManagerService#isDatabaseExist(String, String, String, String)
*/
public boolean isDatabaseExist(String environmentName, String rssInstanceName, String databaseName,
String type) throws RSSManagerException {
return this.getRSSManagerAdaptor(environmentName)
.isDatabaseExist(rssInstanceName, databaseName, type);
}
/**
* @see RSSManagerService#isDatabaseUserExist(String, String, String, String)
*/
public boolean isDatabaseUserExist(String environmentName, String rssInstanceName, String username,
String type) throws RSSManagerException {
return this.getRSSManagerAdaptor(environmentName)
.isDatabaseUserExist(rssInstanceName, username, type);
}
/**
* @see RSSManagerService#addDatabaseUser(String, org.wso2.carbon.rssmanager.core.dto.DatabaseUserInfo)
*/
public DatabaseUserInfo addDatabaseUser(String environmentName, DatabaseUserInfo user)
throws RSSManagerException {
String instanceType = RSSManagerUtil.getCleanInstanceType(user.getType());
RSSAuthorizer.isUserAuthorize(RSSAuthorizationUtils.getPermissionResource(environmentName, instanceType,
RSSAuthorizationUtils.DATABASE_USER_RESOURCE, RSSAuthorizationUtils.ActionResource.ADD.getAction()));
DatabaseUser entity = new DatabaseUser();
RSSManagerUtil.createDatabaseUser(user, entity);
entity = this.getRSSManagerAdaptor(environmentName).addDatabaseUser(entity);
RSSManagerUtil.createDatabaseUserInfo(user, entity);
return user;
}
/**
* @see RSSManagerService#removeDatabaseUser(String, String, String, String)
*/
public void removeDatabaseUser(String environmentName, String rssInstanceName, String username,
String type) throws RSSManagerException {
String instanceType = RSSManagerUtil.getCleanInstanceType(type);
RSSAuthorizer.isUserAuthorize(RSSAuthorizationUtils.getPermissionResource(environmentName, instanceType,
RSSAuthorizationUtils.DATABASE_USER_RESOURCE, RSSAuthorizationUtils.ActionResource.DELETE.getAction()));
this.getRSSManagerAdaptor(environmentName).removeDatabaseUser(rssInstanceName, username, type);
}
/**
* @see RSSManagerService#updateDatabaseUserPrivileges(String, DatabasePrivilegeSetInfo, DatabaseUserInfo, String)
*/
public void updateDatabaseUserPrivileges(String environmentName, DatabasePrivilegeSetInfo privileges,
DatabaseUserInfo user, String databaseName)
throws RSSManagerException {
DatabaseUser entityUser = new DatabaseUser();
RSSManagerUtil.createDatabaseUser(user, entityUser);
DatabasePrivilegeSet entitySet = new MySQLPrivilegeSet();
RSSManagerUtil.createDatabasePrivilegeSet(privileges, entitySet);
this.getRSSManagerAdaptor(environmentName).updateDatabaseUserPrivileges(entitySet, entityUser,
databaseName);
}
/**
* @see RSSManagerService#getDatabaseUser(String, String, String, String)
*/
public DatabaseUserInfo getDatabaseUser(String environmentName, String rssInstanceName, String username,
String type) throws RSSManagerException {
DatabaseUser entity = this.getRSSManagerAdaptor(environmentName).getDatabaseUser(rssInstanceName,
username, type);
DatabaseUserInfo info = new DatabaseUserInfo();
RSSManagerUtil.createDatabaseUserInfo(info, entity);
return info;
}
/**
* @see RSSManagerService#getDatabaseUsers(String)
*/
public DatabaseUserInfo[] getDatabaseUsers(String environmentName) throws RSSManagerException {
DatabaseUser[] entities = this.getRSSManagerAdaptor(environmentName).getDatabaseUsers();
List<DatabaseUser> entityList = Arrays.asList(entities);
Set<DatabaseUserInfo> infoList = new HashSet<DatabaseUserInfo>();
for (DatabaseUser entity : entityList) {
DatabaseUserInfo info = new DatabaseUserInfo();
RSSManagerUtil.createDatabaseUserInfo(info, entity);
infoList.add(info);
}
return infoList.toArray(new DatabaseUserInfo[infoList.size()]);
}
/**
* @see RSSManagerService#attachUser(String, String, String, String, String, java.lang.String)
*/
public void attachUser(String environmentName, String instanceType, String templateName, String username, String
databaseName, String rssInstanceName)
throws RSSManagerException {
RSSAuthorizer.isUserAuthorize(RSSAuthorizationUtils.getPermissionResource(environmentName, instanceType,
RSSAuthorizationUtils.ATTACH_DATABASE_USER_RESOURCE, RSSAuthorizationUtils.ActionResource.ADD.getAction()));
// TODO fix this with a proper DatabasePrivilegeTemplate
DatabasePrivilegeTemplate entity = null;
try {
entity = this.getEnvironmentManager()
.getDatabasePrivilegeTemplate(environmentName, templateName);
}
catch (RSSDatabaseConnectionException e) {
String msg = "Database server error attach database user" + username + e.getMessage();
handleException(msg, e);
}
DatabasePrivilegeTemplateEntry entry = entity.getEntry();
UserDatabaseEntry userEntity = new UserDatabaseEntry();
userEntity.setDatabaseName(databaseName);
userEntity.setRssInstanceName(rssInstanceName);
userEntity.setType(instanceType);
userEntity.setUsername(username);
this.getRSSManagerAdaptor(environmentName).attachUser(userEntity, entry);
}
/**
* @see RSSManagerService#detachUser(String, String, String, String, java.lang.String)
*/
public void detachUser(String environmentName, String databaseName, String instanceType, String username, String rssInstanceName) throws
RSSManagerException {
RSSAuthorizer.isUserAuthorize(RSSAuthorizationUtils.getPermissionResource(environmentName, instanceType,
RSSAuthorizationUtils.ATTACH_DATABASE_USER_RESOURCE, RSSAuthorizationUtils.ActionResource.DELETE.getAction()));
UserDatabaseEntry entity = new UserDatabaseEntry();
entity.setDatabaseName(databaseName);
entity.setRssInstanceName(rssInstanceName);
entity.setType(instanceType);
entity.setUsername(username);
this.getRSSManagerAdaptor(environmentName).detachUser(entity);
}
/**
* @see RSSManagerService#getAttachedUsers(String, String, String, String)
*/
public DatabaseUserInfo[] getAttachedUsers(String environmentName, String rssInstanceName,
String databaseName, String type) throws RSSManagerException {
DatabaseUser[] entities = this.getRSSManagerAdaptor(environmentName)
.getAttachedUsers(rssInstanceName, databaseName, type);
List<DatabaseUser> entityList = Arrays.asList(entities);
List<DatabaseUserInfo> infoList = new ArrayList<DatabaseUserInfo>();
for (DatabaseUser entity : entityList) {
DatabaseUserInfo info = new DatabaseUserInfo();
RSSManagerUtil.createDatabaseUserInfo(info, entity);
infoList.add(info);
}
return infoList.toArray(new DatabaseUserInfo[infoList.size()]);
}
/**
* @see RSSManagerService#getAvailableUsers(String, String, String, String)
*/
public DatabaseUserInfo[] getAvailableUsers(String environmentName, String rssInstanceName,
String databaseName, String type) throws RSSManagerException {
DatabaseUser[] entities = this.getRSSManagerAdaptor(environmentName)
.getAvailableUsers(rssInstanceName, databaseName, type);
List<DatabaseUser> entityList = Arrays.asList(entities);
List<DatabaseUserInfo> infoList = new ArrayList<DatabaseUserInfo>();
for (DatabaseUser entity : entityList) {
DatabaseUserInfo info = new DatabaseUserInfo();
RSSManagerUtil.createDatabaseUserInfo(info, entity);
infoList.add(info);
}
return infoList.toArray(new DatabaseUserInfo[infoList.size()]);
}
/**
* @see RSSManagerService#getUserDatabasePrivileges(String, String, String, String, String)
*/
public DatabasePrivilegeSetInfo getUserDatabasePrivileges(String environmentName, String rssInstanceName,
String databaseName, String username,
String type) throws RSSManagerException {
DatabasePrivilegeSet entity = this.getRSSManagerAdaptor(environmentName)
.getUserDatabasePrivileges(rssInstanceName, databaseName, username,
type);
DatabasePrivilegeSetInfo info = new MySQLPrivilegeSetInfo();
RSSManagerUtil.createDatabasePrivilegeSetInfo(info, entity);
return info;
}
/**
* @see RSSManagerService#getDatabaseForTenant(String, String, String, String, String)
*/
public DatabaseInfo[] getDatabasesForTenant(String environmentName, String tenantDomain)
throws RSSManagerException {
return new DatabaseInfo[0];
}
/**
* @see RSSManagerService#addDatabaseForTenant(String, org.wso2.carbon.rssmanager.core.dto.DatabaseInfo, String)
*/
public void addDatabaseForTenant(String environmentName, DatabaseInfo database, String tenantDomain)
throws RSSManagerException {
}
/**
* @see RSSManagerService#getDatabaseForTenant(String, String, String, String, String)
*/
public DatabaseInfo getDatabaseForTenant(String environmentName, String rssInstanceName,
String databaseName, String tenantDomain, String type)
throws RSSManagerException {
return null;
}
/**
* @see RSSManagerService#isDatabasePrivilegeTemplateExist(String, String)
*/
public boolean isDatabasePrivilegeTemplateExist(String environmentName, String templateName)
throws RSSManagerException {
try {
return this.getEnvironmentManager().isDatabasePrivilegeTemplateExist(environmentName, templateName);
}
catch (RSSDatabaseConnectionException e) {
String msg = "Database server error when checking database privilege template existence of" +templateName+ e
.getMessage();
handleException(msg, e);
}
return false;
}
/**
* @see RSSManagerService#deleteTenantRSSData(String, String)
*/
public boolean deleteTenantRSSData(String environmentName, String tenantDomain)
throws RSSManagerException {
//TODO implement this after finalize the proper approach to do this
return false;
}
/**
* @see RSSManagerService#addDatabasePrivilegeTemplate(String, org.wso2.carbon.rssmanager.core.dto.DatabasePrivilegeTemplateInfo)
*/
public void addDatabasePrivilegeTemplate(String environmentName, DatabasePrivilegeTemplateInfo template)
throws RSSManagerException {
DatabasePrivilegeTemplate entity = new DatabasePrivilegeTemplate();
RSSManagerUtil.createDatabasePrivilegeTemplate(template, entity);
try {
this.environmentManager.createDatabasePrivilegesTemplate(environmentName, entity);
}
catch (RSSDatabaseConnectionException e) {
String msg = "Database server error when adding privilege template" +template.getName()+ e.getMessage();
handleException(msg, e);
}
}
/**
* @see RSSManagerService#removeDatabasePrivilegeTemplate(String, String)
*/
public void removeDatabasePrivilegeTemplate(String environmentName, String templateName)
throws RSSManagerException {
try {
this.environmentManager.dropDatabasePrivilegesTemplate(environmentName, templateName);
}
catch (RSSDatabaseConnectionException e) {
String msg = "Database server error at removing database privilege template " +templateName+ e.getMessage();
handleException(msg, e);
}
}
/**
* @see RSSManagerService#updateDatabasePrivilegeTemplate(String, org.wso2.carbon.rssmanager.core.dto.DatabasePrivilegeTemplateInfo)
*/
public void updateDatabasePrivilegeTemplate(String environmentName, DatabasePrivilegeTemplateInfo template)
throws RSSManagerException {
DatabasePrivilegeTemplate entity = new DatabasePrivilegeTemplate();
RSSManagerUtil.createDatabasePrivilegeTemplate(template, entity);
try {
this.environmentManager.editDatabasePrivilegesTemplate(environmentName, entity);
} catch (RSSDatabaseConnectionException e) {
String msg = "Database server error when updating privilege template" +template.getName()+ e.getMessage();
handleException(msg, e);
}
}
/**
* @see RSSManagerService#getDatabasePrivilegeTemplates(String)
*/
public DatabasePrivilegeTemplateInfo[] getDatabasePrivilegeTemplates(String environmentName)
throws RSSManagerException {
DatabasePrivilegeTemplate[] entities = new DatabasePrivilegeTemplate[0];
try {
entities = this.getEnvironmentManager()
.getDatabasePrivilegeTemplates(environmentName);
} catch (RSSDatabaseConnectionException e) {
String msg = "Database server error at get privilege templates" + e.getMessage();
handleException(msg, e);
}
List<DatabasePrivilegeTemplate> entityList = Arrays.asList(entities);
List<DatabasePrivilegeTemplateInfo> infoList = new ArrayList<DatabasePrivilegeTemplateInfo>();
for (DatabasePrivilegeTemplate entity : entityList) {
DatabasePrivilegeTemplateInfo info = new DatabasePrivilegeTemplateInfo();
RSSManagerUtil.createDatabasePrivilegeTemplateInfo(info, entity);
infoList.add(info);
}
return infoList.toArray(new DatabasePrivilegeTemplateInfo[infoList.size()]);
}
/**
* @see RSSManagerService#getDatabasePrivilegeTemplate(String, String)
*/
public DatabasePrivilegeTemplateInfo getDatabasePrivilegeTemplate(String environmentName,
String templateName)
throws RSSManagerException {
DatabasePrivilegeTemplate entity = null;
try {
entity = this.getEnvironmentManager()
.getDatabasePrivilegeTemplate(environmentName, templateName);
} catch (RSSDatabaseConnectionException e) {
String msg = "Database server error when getting privilege template" +templateName+ e.getMessage();
handleException(msg, e);
}
DatabasePrivilegeTemplateInfo info = new DatabasePrivilegeTemplateInfo();
RSSManagerUtil.createDatabasePrivilegeTemplateInfo(info, entity);
return info;
}
/**
* @see RSSManagerService#addCarbonDataSource(String, String, org.wso2.carbon.rssmanager.core.dto.UserDatabaseEntryInfo)
*/
public void addCarbonDataSource(String environmentName,
String dataSourceName, UserDatabaseEntryInfo entry)
throws RSSManagerException {
Database database = this.getRSSManagerAdaptor(environmentName)
.getDatabase(entry.getRssInstanceName(),
entry.getDatabaseName(), entry.getType());
DatabaseUser databaseuserinfo = this.getRSSManagerAdaptor(
environmentName).getDatabaseUser(entry.getRssInstanceName(),
entry.getUsername(), entry.getType());
DatabaseInfo info = new DatabaseInfo();
RSSManagerUtil.createDatabaseInfo(info, database);
DataSourceMetaInfo metaInfo = RSSManagerUtil.createDSMetaInfo(info,
entry.getUsername(), databaseuserinfo.getPassword(),
dataSourceName);
try {
List<CarbonDataSource> dsList = RSSManagerDataHolder.getInstance()
.getDataSourceService().getAllDataSources();
for (CarbonDataSource ds : dsList) {
if (ds.getDSMInfo().getName().equals(dataSourceName)) {
String msg = "Datasource already exists by name '"
+ dataSourceName + "'";
throw new RSSManagerException(msg,
new DataSourceException());
}
}
RSSManagerDataHolder.getInstance().getDataSourceService()
.addDataSource(metaInfo);
} catch (DataSourceException e) {
String msg = "Error occurred while creating carbon datasource for the database '"
+ entry.getDatabaseName() + "'";
throw new RSSManagerException(msg, e);
}
}
/**
* Get rss manager adaptor fot the environment
*
* @param environmentName name of the environment
* @return RSSManagerAdaptor
*/
private RSSManagerAdaptor getRSSManagerAdaptor(String environmentName) throws RSSManagerException {
EnvironmentManager environmentManager = this.getEnvironmentManager();
Environment environment = environmentManager.getEnvironment(environmentName);
if (environment == null) {
throw new IllegalArgumentException("Invalid RSS environment '" + environmentName + "'");
}
RSSManagerAdaptor adaptor = environment.getRSSManagerAdaptor();
if (adaptor == null) {
throw new RSSManagerException("RSS Manager is not initialized properly and " + "thus, is null");
}
return adaptor;
}
private EnvironmentManager getEnvironmentManager() {
return environmentManager;
}
public String[] getEnvironments() throws RSSManagerException {
return this.getEnvironmentManager().getEnvironmentNames();
}
/**
* @see RSSManagerService#editDatabaseUser(String, org.wso2.carbon.rssmanager.core.dto.DatabaseUserInfo)
*/
public DatabaseUserInfo editDatabaseUser(String environment, DatabaseUserInfo databaseUserInfo) throws RSSManagerException {
String instanceType = RSSManagerUtil.getCleanInstanceType(databaseUserInfo.getType());
RSSAuthorizer.isUserAuthorize(RSSAuthorizationUtils.getPermissionResource(environment, instanceType,
RSSAuthorizationUtils.DATABASE_USER_RESOURCE, RSSAuthorizationUtils.ActionResource.EDIT.getAction()));
DatabaseUser entity = new DatabaseUser();
RSSManagerUtil.createDatabaseUser(databaseUserInfo, entity);
entity = this.getRSSManagerAdaptor(environment).editDatabaseUser(entity);
RSSManagerUtil.createDatabaseUserInfo(databaseUserInfo, entity);
return databaseUserInfo;
}
/**
* @see RSSManagerService#addCarbonDataSource(String, String, org.wso2.carbon.rssmanager.core.dto.UserDatabaseEntryInfo)
*/
public String getRSSProvider() {
return RSSConfigurationManager.getInstance().getCurrentRSSConfig().getRSSProvider();
}
/**
* @see RSSManagerService#createSnapshot
*/
@Override
public void createSnapshot(String environmentName, String databaseName, String type) throws RSSManagerException {
this.getRSSManagerAdaptor(environmentName).createSnapshot(databaseName, type);
}
public void handleException(String msg, Exception e) throws RSSManagerException {
log.error(msg, e);
throw new RSSManagerException(msg, e);
}
}
|
|
package zone.otto;
import org.junit.Assert;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
/**
* <p>
* The <code>TestSudoku</code> class tests the functionality of the <code>Sudoku</code> class.
* </p>
*/
public class TestSudoku {
/**
* <p>
* Set up exception rule since JUnit4 <code>expects</code> cannot respond to messages."
* </p>
*/
@Rule
public ExpectedException expectedEx = ExpectedException.none();
/**
* <p>
* Set up path to test resources
* </p>
*/
private static final String testResourcePath = "src/test/resources/";
/**
* <p>
* Tests that <code>Sudoku.dataParse()</code> handles invalid filenames.
* </p>
*/
@Test
public void testDataParse_BadFileName() {
expectedEx.expect(RuntimeException.class);
expectedEx.expectMessage("ERROR: The file (");
Sudoku.dataParse("NonExistentFile.dat");
}
/**
* <p>
* Tests that <code>Sudoku.dataParse()</code> handles invalid input with insufficient lines.
* </p>
*/
@Test
public void testDataParse_MissingLines() {
expectedEx.expect(RuntimeException.class);
expectedEx.expectMessage("ERROR: Insufficient number of valid input lines: ");
Sudoku.dataParse(testResourcePath + "testDataParse_MissingLines.dat");
}
/**
* <p>
* Tests that <code>Sudoku.dataParse()</code> handles invalid input with invalid lines,
* specifically invalid characters (i.e. <code>[^_ 1-9]</code>).
* </p>
*/
@Test
public void testDataParse_InvalidCharacter() {
expectedEx.expect(RuntimeException.class);
expectedEx.expectMessage("ERROR: Line #");
Sudoku.dataParse(testResourcePath + "testDataParse_InvalidCharacter.dat");
}
/**
* <p>
* Tests that <code>Sudoku.dataParse()</code> handles invalid input with invalid lines,
* specifically duplicate digits.
* </p>
*/
@Test
public void testDataParse_DuplicateDigit() {
expectedEx.expect(RuntimeException.class);
expectedEx.expectMessage("ERROR: Line #");
Sudoku.dataParse(testResourcePath + "testDataParse_DuplicateDigit.dat");
}
/**
* <p>
* Tests that the backtracking recursive algorithm <code>Sudoku.dataSolve()</code> actually works. :P
* </p>
*/
@Test
public void testDataSolve() {
final int[][] data = Sudoku.dataParse(testResourcePath + "TestSudoku.dat");
String expected = " 0 1 2 3 4 5 6 7 8\n" +
" +-------+-------+-------+\n" +
" 0 | 3 4 7 | 1 8 2 | 5 6 9 |\n" +
" 1 | 5 1 9 | 6 7 4 | 2 3 8 |\n" +
" 2 | 2 8 6 | 3 5 9 | 1 4 7 |\n" +
" +-------+-------+-------+\n" +
" 3 | 1 2 8 | 7 4 5 | 3 9 6 |\n" +
" 4 | 4 7 3 | 9 2 6 | 8 5 1 |\n" +
" 5 | 6 9 5 | 8 3 1 | 7 2 4 |\n" +
" +-------+-------+-------+\n" +
" 6 | 7 3 2 | 4 6 8 | 9 1 5 |\n" +
" 7 | 8 6 1 | 5 9 3 | 4 7 2 |\n" +
" 8 | 9 5 4 | 2 1 7 | 6 8 3 |\n" +
" +-------+-------+-------+\n";
Assert.assertTrue(Sudoku.dataSolve(0, 0, data));
String actual = Sudoku.dataRender(data);
Assert.assertEquals(expected, actual);
}
/**
* <p>
* Tests that <code>Sudoku.getValidSet()</code> eliminates row, column and
* box values correctly from the <code>FULL_SET</code> (i.e. values 1 to 9).
* </p><p>
* EIGHT assertions are made (in four categories):
* </p><p><ul>
* <li>
* All cells with fully populated ValidSet.<br>
* <code>ValidSet.Size() == 9</code><br>
* or <code>[1,2,3,4,5,6,7,8,9]</code><br>
* (Only ONE candidate)
* </li>
* <li>
* All cells with only a single elimination from the ValidSet.<br>
* <code>ValidSet.Size() == 8</code><br>
* or <code>[[1-9], [1-9], [1-9], [1-9], [1-9], [1-9], [1-9], [1-9]]</code><br>
* (TWO candidates)
* </li>
* <li>
* All cells with only two entry ValidSet.<br>
* <code>ValidSet.Size() == 2</code><br>
* or <code>[[1-9], [1-9]]</code><br>
* (FOUR candidates)
* </li>
* <li>
* All cells with single entry ValidSet.<br>
* <code>ValidSet.Size() == 1</code><br>
* or <code>[[1-9]]</code><br>
* (Only ONE candidate)
* </li>
* </ul></p>
*/
@Test
public void testGetValidSet() {
final int[][] data = Sudoku.dataParse(testResourcePath + "TestSudoku.dat");
int[] actual;
int[] expected;
// All cells with fully populated ValidSet.
actual = Sudoku.getValidSet(0, 8, data).stream().mapToInt(Integer::intValue).sorted().toArray();
expected = new int[]{1, 2, 3, 4, 5, 6, 7, 8, 9};
Assert.assertArrayEquals(expected, actual);
// All cells with only a single elimination from the ValidSet.
actual = Sudoku.getValidSet(0, 7, data).stream().mapToInt(Integer::intValue).sorted().toArray();
expected = new int[]{1, 2, 3, 4, 6, 7, 9};
Assert.assertArrayEquals(expected, actual);
actual = Sudoku.getValidSet(5, 8, data).stream().mapToInt(Integer::intValue).sorted().toArray();
expected = new int[]{1, 2, 4, 6, 7, 8, 9};
Assert.assertArrayEquals(expected, actual);
// All cells with only two entry ValidSet.
actual = Sudoku.getValidSet(6, 5, data).stream().mapToInt(Integer::intValue).sorted().toArray();
expected = new int[]{1, 8};
Assert.assertArrayEquals(expected, actual);
actual = Sudoku.getValidSet(7, 0, data).stream().mapToInt(Integer::intValue).sorted().toArray();
expected = new int[]{1, 8};
Assert.assertArrayEquals(expected, actual);
actual = Sudoku.getValidSet(7, 1, data).stream().mapToInt(Integer::intValue).sorted().toArray();
expected = new int[]{6, 8};
Assert.assertArrayEquals(expected, actual);
actual = Sudoku.getValidSet(8, 4, data).stream().mapToInt(Integer::intValue).sorted().toArray();
expected = new int[]{1, 2};
Assert.assertArrayEquals(expected, actual);
// All cells with single entry ValidSet.
actual = Sudoku.getValidSet(7, 2, data).stream().mapToInt(Integer::intValue).sorted().toArray();
expected = new int[]{1};
Assert.assertArrayEquals(expected, actual);
}
/**
* <p>
* Test that the output of MatrixHelper.renderMatrix() is correctly "Sudokufied", i.e.:
* </p><p><ul>
* <li>
* Data cell 0s replaced with " "
* </li>
* <li></li>
* + Data cells grouped into 3 x 3 boxes.
* <li>
* </ul></p>
*/
@Test
public void testDataRender() {
final int[][] data = Sudoku.dataParse(testResourcePath + "TestSudoku.dat");
String expected = " 0 1 2 3 4 5 6 7 8\n" +
" +-------+-------+-------+\n" +
" 0 | | | |\n" +
" 1 | 1 | 6 4 | |\n" +
" 2 | 2 6 | 5 | |\n" +
" +-------+-------+-------+\n" +
" 3 | 8 | 4 | |\n" +
" 4 | 4 | 9 | 5 |\n" +
" 5 | 5 | 3 | |\n" +
" +-------+-------+-------+\n" +
" 6 | 7 2 | 6 | |\n" +
" 7 | 6 | 5 9 3 | 4 |\n" +
" 8 | | 7 | 8 |\n" +
" +-------+-------+-------+\n";
String actual = Sudoku.dataRender(data);
Assert.assertEquals(expected, actual);
}
/**
* </p>
* Test that <code>Sudoku.renderUsage()</code> renders the usage screen correctly.
* </p>
*/
@Test
public void testRenderUsage() {
String expected = "\nUSAGE:\n" +
"\n java Sudoku <filename>\n" +
"\n OR\n" +
"\n cat <filename> | java Sudoku\n" +
"\n";
String actual = Sudoku.renderUsage();
Assert.assertEquals(expected, actual);
}
}
|
|
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.lexmodelbuilding.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/lex-models-2017-04-19/GetBotVersions" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class GetBotVersionsResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable {
/**
* <p>
* An array of <code>BotMetadata</code> objects, one for each numbered version of the bot plus one for the
* <code>$LATEST</code> version.
* </p>
*/
private java.util.List<BotMetadata> bots;
/**
* <p>
* A pagination token for fetching the next page of bot versions. If the response to this call is truncated, Amazon
* Lex returns a pagination token in the response. To fetch the next page of versions, specify the pagination token
* in the next request.
* </p>
*/
private String nextToken;
/**
* <p>
* An array of <code>BotMetadata</code> objects, one for each numbered version of the bot plus one for the
* <code>$LATEST</code> version.
* </p>
*
* @return An array of <code>BotMetadata</code> objects, one for each numbered version of the bot plus one for the
* <code>$LATEST</code> version.
*/
public java.util.List<BotMetadata> getBots() {
return bots;
}
/**
* <p>
* An array of <code>BotMetadata</code> objects, one for each numbered version of the bot plus one for the
* <code>$LATEST</code> version.
* </p>
*
* @param bots
* An array of <code>BotMetadata</code> objects, one for each numbered version of the bot plus one for the
* <code>$LATEST</code> version.
*/
public void setBots(java.util.Collection<BotMetadata> bots) {
if (bots == null) {
this.bots = null;
return;
}
this.bots = new java.util.ArrayList<BotMetadata>(bots);
}
/**
* <p>
* An array of <code>BotMetadata</code> objects, one for each numbered version of the bot plus one for the
* <code>$LATEST</code> version.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setBots(java.util.Collection)} or {@link #withBots(java.util.Collection)} if you want to override the
* existing values.
* </p>
*
* @param bots
* An array of <code>BotMetadata</code> objects, one for each numbered version of the bot plus one for the
* <code>$LATEST</code> version.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetBotVersionsResult withBots(BotMetadata... bots) {
if (this.bots == null) {
setBots(new java.util.ArrayList<BotMetadata>(bots.length));
}
for (BotMetadata ele : bots) {
this.bots.add(ele);
}
return this;
}
/**
* <p>
* An array of <code>BotMetadata</code> objects, one for each numbered version of the bot plus one for the
* <code>$LATEST</code> version.
* </p>
*
* @param bots
* An array of <code>BotMetadata</code> objects, one for each numbered version of the bot plus one for the
* <code>$LATEST</code> version.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetBotVersionsResult withBots(java.util.Collection<BotMetadata> bots) {
setBots(bots);
return this;
}
/**
* <p>
* A pagination token for fetching the next page of bot versions. If the response to this call is truncated, Amazon
* Lex returns a pagination token in the response. To fetch the next page of versions, specify the pagination token
* in the next request.
* </p>
*
* @param nextToken
* A pagination token for fetching the next page of bot versions. If the response to this call is truncated,
* Amazon Lex returns a pagination token in the response. To fetch the next page of versions, specify the
* pagination token in the next request.
*/
public void setNextToken(String nextToken) {
this.nextToken = nextToken;
}
/**
* <p>
* A pagination token for fetching the next page of bot versions. If the response to this call is truncated, Amazon
* Lex returns a pagination token in the response. To fetch the next page of versions, specify the pagination token
* in the next request.
* </p>
*
* @return A pagination token for fetching the next page of bot versions. If the response to this call is truncated,
* Amazon Lex returns a pagination token in the response. To fetch the next page of versions, specify the
* pagination token in the next request.
*/
public String getNextToken() {
return this.nextToken;
}
/**
* <p>
* A pagination token for fetching the next page of bot versions. If the response to this call is truncated, Amazon
* Lex returns a pagination token in the response. To fetch the next page of versions, specify the pagination token
* in the next request.
* </p>
*
* @param nextToken
* A pagination token for fetching the next page of bot versions. If the response to this call is truncated,
* Amazon Lex returns a pagination token in the response. To fetch the next page of versions, specify the
* pagination token in the next request.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetBotVersionsResult withNextToken(String nextToken) {
setNextToken(nextToken);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getBots() != null)
sb.append("Bots: ").append(getBots()).append(",");
if (getNextToken() != null)
sb.append("NextToken: ").append(getNextToken());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof GetBotVersionsResult == false)
return false;
GetBotVersionsResult other = (GetBotVersionsResult) obj;
if (other.getBots() == null ^ this.getBots() == null)
return false;
if (other.getBots() != null && other.getBots().equals(this.getBots()) == false)
return false;
if (other.getNextToken() == null ^ this.getNextToken() == null)
return false;
if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getBots() == null) ? 0 : getBots().hashCode());
hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode());
return hashCode;
}
@Override
public GetBotVersionsResult clone() {
try {
return (GetBotVersionsResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
|
|
package org.aries.ui.tag;
import java.io.IOException;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.StringTokenizer;
import javax.el.ELException;
import javax.el.ValueExpression;
import javax.el.VariableMapper;
import javax.faces.FacesException;
import javax.faces.component.UIComponent;
import javax.faces.view.facelets.FaceletContext;
import javax.faces.view.facelets.FaceletException;
import javax.faces.view.facelets.TagAttribute;
import javax.faces.view.facelets.TagConfig;
import javax.faces.view.facelets.TagHandler;
import org.jboss.el.ValueExpressionImpl;
public class ValueBindingTagHandler extends TagHandler {
private FaceletContext faceletContext;
protected final TagAttribute[] vars;
private ValueExpressionImpl wrapped;
public ValueBindingTagHandler(TagConfig config) {
super(config);
vars = tag.getAttributes().getAll();
}
public void apply(FaceletContext faceletContext, UIComponent parent) throws IOException, FacesException, FaceletException, ELException {
this.faceletContext = faceletContext;
VariableMapper originalMapper = faceletContext.getVariableMapper();
VariableMapper newMapper = new CustomVariableMapper(originalMapper);
ValueExpression enabled = originalMapper.resolveVariable("enabled");
ValueExpression manager = originalMapper.resolveVariable("manager");
ValueExpression action = originalMapper.resolveVariable("action");
ValueExpression event = originalMapper.resolveVariable("event");
ValueExpression domain = originalMapper.resolveVariable("domain");
ValueExpression section = originalMapper.resolveVariable("section");
ValueExpression dialog = originalMapper.resolveVariable("dialog");
ValueExpression logo = originalMapper.resolveVariable("logo");
ValueExpression title = originalMapper.resolveVariable("title");
ValueExpression heading = originalMapper.resolveVariable("heading");
ValueExpression message = originalMapper.resolveVariable("message");
ValueExpression render = originalMapper.resolveVariable("render");
ValueExpression value = originalMapper.resolveVariable("value");
ValueExpression height = originalMapper.resolveVariable("height");
if (value != null && value.getValue(faceletContext) != null && value.getValue(faceletContext).equals("manager")) {
// TagAttribute attribute = getRequiredAttribute("value");
// ValueExpression valueExpression = attribute.getValueExpression(faceletContext, Object.class);
// Class type = valueExpression.getType(faceletContext);
// faceletContext.setAttribute("value", valueExpression);
System.out.println();
//System.out.println(">>>"+value.getValue(faceletContext));
//System.out.println(">>>"+value.getValueReference(faceletContext));
//System.out.println(">>>"+value.getExpressionString());
//System.out.println(">>>"+value.getType(faceletContext));
//System.out.println(">>>"+value.getExpectedType());
//System.out.println(">>>"+value.isLiteralText());
//TagValueExpression tagValueExpression = (TagValueExpression) value;
//wrapped = (ValueExpressionImpl) tagValueExpression.getWrapped();
//System.out.println();
//faceletContext.getELResolver().setValue(faceletContext, "value", value.getExpressionString(), value.getValue(faceletContext));
//newMapper.setVariable("value", value.);
}
// if (value != null) {
// TagValueExpression tagValueExpression = (TagValueExpression) value;
// wrapped = (ValueExpressionImpl) tagValueExpression.getWrapped();
// }
// if (height != null && height.getValue(faceletContext).equals("23px"))
// System.out.println();
// if (this.vars.length > 0)
// System.out.println();
// setup variable map
Set<String> set = new HashSet<String>();
for (TagAttribute tagAttribute : this.vars) {
String localName = tagAttribute.getLocalName();
ValueExpression valueExpression = tagAttribute.getValueExpression(faceletContext, Object.class);
newMapper.setVariable(localName, valueExpression);
if (localName.contains("param")) {
String values = (String) valueExpression.getValue(faceletContext);
StringTokenizer stringTokenizer = new StringTokenizer(values, ",");
while (stringTokenizer.hasMoreElements()) {
String paramName = (String) stringTokenizer.nextElement();
paramName = paramName.trim();
if (paramName.equals("manager")) {
set.add("manager");
continue;
}
if (paramName.equals("enabled")) {
set.add("enabled");
continue;
}
if (paramName.equals("action")) {
set.add("action");
continue;
}
if (paramName.equals("event")) {
set.add("event");
continue;
}
if (paramName.equals("domain")) {
set.add("domain");
continue;
}
if (paramName.equals("section")) {
set.add("section");
continue;
}
if (paramName.equals("dialog")) {
set.add("dialog");
continue;
}
if (paramName.equals("logo")) {
set.add("logo");
continue;
}
if (paramName.equals("title")) {
set.add("title");
continue;
}
if (paramName.equals("heading")) {
set.add("heading");
continue;
}
if (paramName.equals("message")) {
set.add("message");
continue;
}
if (paramName.equals("render")) {
set.add("render");
continue;
}
if (paramName.equals("value")) {
set.add("value");
continue;
}
ValueExpression paramExpression = originalMapper.resolveVariable(paramName);
if (paramExpression != null) {
Object paramValue = paramExpression.getValue(faceletContext);
if (paramValue != null)
faceletContext.setAttribute(paramName, paramValue);
}
}
}
}
ValueExpression value2 = originalMapper.resolveVariable("value");
if (enabled != null && !set.contains("enabled"))
faceletContext.setAttribute("enabled", enabled.getValue(faceletContext));
if (manager != null && !set.contains("manager"))
faceletContext.setAttribute("manager", manager.getValue(faceletContext));
if (action != null && !set.contains("action"))
faceletContext.setAttribute("action", action.getValue(faceletContext));
if (event != null && !set.contains("event"))
faceletContext.setAttribute("event", event.getValue(faceletContext));
if (domain != null && !set.contains("domain"))
faceletContext.setAttribute("domain", domain.getValue(faceletContext));
if (section != null && !set.contains("section"))
faceletContext.setAttribute("section", section.getValue(faceletContext));
if (dialog != null && !set.contains("dialog"))
faceletContext.setAttribute("dialog", dialog.getValue(faceletContext));
if (logo != null && !set.contains("logo"))
faceletContext.setAttribute("logo", logo.getValue(faceletContext));
if (title != null && !set.contains("title"))
faceletContext.setAttribute("title", title.getValue(faceletContext));
if (heading != null && !set.contains("heading"))
faceletContext.setAttribute("heading", heading.getValue(faceletContext));
if (message != null && !set.contains("message"))
faceletContext.setAttribute("message", message.getValue(faceletContext));
if (render != null && !set.contains("render"))
faceletContext.setAttribute("render", render.getValue(faceletContext));
if (value != null && !set.contains("value")) {
ValueExpression value3 = originalMapper.resolveVariable("value");
faceletContext.setAttribute("value", value);
}
ValueExpression value4 = originalMapper.resolveVariable("value");
faceletContext.setVariableMapper(newMapper);
this.nextHandler.apply(faceletContext, parent);
}
public class CustomVariableMapper extends VariableMapper {
private final VariableMapper originalMapper;
private Map<String, ValueExpression> vars;
public CustomVariableMapper(VariableMapper originalMapper) {
this.originalMapper = originalMapper;
}
public ValueExpression resolveVariable(String variable) {
// if (variable.equals("value"))
// System.out.println();
ValueExpression ve = null;
try {
if (this.vars != null) {
ve = (ValueExpression) this.vars.get(variable);
}
if (ve == null) {
ValueExpression fromParentVE = this.originalMapper.resolveVariable(variable);
if (fromParentVE != null && variable.equals("value")) {
//String expressionString = fromParentVE.getExpressionString();
//System.out.println(wrapped);
//ValueExpressionImpl valueExpressionImpl = new ValueExpressionImpl();
//valueExpressionImpl.setValue(faceletContext.getFacesContext().getELContext(), expressionString);
//return wrapped;
}
if (fromParentVE != null) {
if (fromParentVE instanceof FromParentValueExpression) {
ve = null;
} else {
ve = new FromParentValueExpression(fromParentVE);
}
}
}
return ve;
} catch (StackOverflowError e) {
throw new ELException("Could not Resolve Variable [Overflow]: " + variable, e);
}
}
public ValueExpression setVariable(String variable, ValueExpression expression) {
if (vars == null) {
vars = new HashMap<String, ValueExpression>();
}
ValueExpression valueExpression = vars.put(variable, expression);
return valueExpression;
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.streams.state.internals;
import org.apache.kafka.common.serialization.Serde;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.common.utils.Bytes;
import org.apache.kafka.streams.kstream.Windowed;
import org.apache.kafka.streams.kstream.internals.CacheFlushListener;
import org.apache.kafka.streams.processor.ProcessorContext;
import org.apache.kafka.streams.processor.StateStore;
import org.apache.kafka.streams.processor.internals.InternalProcessorContext;
import org.apache.kafka.streams.processor.internals.ProcessorRecordContext;
import org.apache.kafka.streams.processor.internals.ProcessorStateManager;
import org.apache.kafka.streams.state.KeyValueIterator;
import org.apache.kafka.streams.state.StateSerdes;
import org.apache.kafka.streams.state.WindowStore;
import org.apache.kafka.streams.state.WindowStoreIterator;
import java.util.List;
class CachingWindowStore<K, V> extends WrappedStateStore.AbstractStateStore implements WindowStore<Bytes, byte[]>, CachedStateStore<Windowed<K>, V> {
private final WindowStore<Bytes, byte[]> underlying;
private final Serde<K> keySerde;
private final Serde<V> valueSerde;
private final long windowSize;
private final SegmentedBytesStore.KeySchema keySchema = new WindowKeySchema();
private String name;
private ThreadCache cache;
private boolean sendOldValues;
private StateSerdes<K, V> serdes;
private InternalProcessorContext context;
private StateSerdes<Bytes, byte[]> bytesSerdes;
private CacheFlushListener<Windowed<K>, V> flushListener;
private final SegmentedCacheFunction cacheFunction;
CachingWindowStore(final WindowStore<Bytes, byte[]> underlying,
final Serde<K> keySerde,
final Serde<V> valueSerde,
final long windowSize,
final long segmentInterval) {
super(underlying);
this.underlying = underlying;
this.keySerde = keySerde;
this.valueSerde = valueSerde;
this.windowSize = windowSize;
this.cacheFunction = new SegmentedCacheFunction(keySchema, segmentInterval);
}
@Override
public void init(final ProcessorContext context, final StateStore root) {
initInternal(context);
underlying.init(context, root);
keySchema.init(context.applicationId());
}
@SuppressWarnings("unchecked")
private void initInternal(final ProcessorContext context) {
this.context = (InternalProcessorContext) context;
final String topic = ProcessorStateManager.storeChangelogTopic(context.applicationId(), underlying.name());
serdes = new StateSerdes<>(topic,
keySerde == null ? (Serde<K>) context.keySerde() : keySerde,
valueSerde == null ? (Serde<V>) context.valueSerde() : valueSerde);
bytesSerdes = new StateSerdes<>(topic,
Serdes.Bytes(),
Serdes.ByteArray());
name = context.taskId() + "-" + underlying.name();
cache = this.context.getCache();
cache.addDirtyEntryFlushListener(name, new ThreadCache.DirtyEntryFlushListener() {
@Override
public void apply(final List<ThreadCache.DirtyEntry> entries) {
for (final ThreadCache.DirtyEntry entry : entries) {
final byte[] binaryWindowKey = cacheFunction.key(entry.key()).get();
final long timestamp = WindowKeySchema.extractStoreTimestamp(binaryWindowKey);
final Windowed<K> windowedKey = WindowKeySchema.fromStoreKey(binaryWindowKey, windowSize, serdes);
final Bytes key = Bytes.wrap(WindowKeySchema.extractStoreKeyBytes(binaryWindowKey));
maybeForward(entry, key, windowedKey, (InternalProcessorContext) context);
underlying.put(key, entry.newValue(), timestamp);
}
}
});
}
private void maybeForward(final ThreadCache.DirtyEntry entry,
final Bytes key,
final Windowed<K> windowedKey,
final InternalProcessorContext context) {
if (flushListener != null) {
final ProcessorRecordContext current = context.recordContext();
context.setRecordContext(entry.recordContext());
try {
final V oldValue = sendOldValues ? fetchPrevious(key, windowedKey.window().start()) : null;
flushListener.apply(windowedKey, serdes.valueFrom(entry.newValue()), oldValue);
} finally {
context.setRecordContext(current);
}
}
}
public void setFlushListener(final CacheFlushListener<Windowed<K>, V> flushListener,
final boolean sendOldValues) {
this.flushListener = flushListener;
this.sendOldValues = sendOldValues;
}
@Override
public synchronized void flush() {
cache.flush(name);
underlying.flush();
}
@Override
public void close() {
flush();
cache.close(name);
underlying.close();
}
@Override
public synchronized void put(final Bytes key, final byte[] value) {
put(key, value, context.timestamp());
}
@Override
public synchronized void put(final Bytes key, final byte[] value, final long windowStartTimestamp) {
// since this function may not access the underlying inner store, we need to validate
// if store is open outside as well.
validateStoreOpen();
final Bytes keyBytes = WindowKeySchema.toStoreKeyBinary(key, windowStartTimestamp, 0);
final LRUCacheEntry entry =
new LRUCacheEntry(
value,
context.headers(),
true,
context.offset(),
context.timestamp(),
context.partition(),
context.topic());
cache.put(name, cacheFunction.cacheKey(keyBytes), entry);
}
@Override
public byte[] fetch(final Bytes key, final long timestamp) {
validateStoreOpen();
final Bytes bytesKey = WindowKeySchema.toStoreKeyBinary(key, timestamp, 0);
final Bytes cacheKey = cacheFunction.cacheKey(bytesKey);
if (cache == null) {
return underlying.fetch(key, timestamp);
}
final LRUCacheEntry entry = cache.get(name, cacheKey);
if (entry == null) {
return underlying.fetch(key, timestamp);
} else {
return entry.value();
}
}
@Override
public synchronized WindowStoreIterator<byte[]> fetch(final Bytes key, final long timeFrom, final long timeTo) {
// since this function may not access the underlying inner store, we need to validate
// if store is open outside as well.
validateStoreOpen();
final WindowStoreIterator<byte[]> underlyingIterator = underlying.fetch(key, timeFrom, timeTo);
if (cache == null) {
return underlyingIterator;
}
final Bytes cacheKeyFrom = cacheFunction.cacheKey(keySchema.lowerRangeFixedSize(key, timeFrom));
final Bytes cacheKeyTo = cacheFunction.cacheKey(keySchema.upperRangeFixedSize(key, timeTo));
final ThreadCache.MemoryLRUCacheBytesIterator cacheIterator = cache.range(name, cacheKeyFrom, cacheKeyTo);
final HasNextCondition hasNextCondition = keySchema.hasNextCondition(key,
key,
timeFrom,
timeTo);
final PeekingKeyValueIterator<Bytes, LRUCacheEntry> filteredCacheIterator = new FilteredCacheIterator(
cacheIterator, hasNextCondition, cacheFunction
);
return new MergedSortedCacheWindowStoreIterator(filteredCacheIterator, underlyingIterator);
}
@Override
public KeyValueIterator<Windowed<Bytes>, byte[]> fetch(final Bytes from, final Bytes to, final long timeFrom, final long timeTo) {
// since this function may not access the underlying inner store, we need to validate
// if store is open outside as well.
validateStoreOpen();
final KeyValueIterator<Windowed<Bytes>, byte[]> underlyingIterator = underlying.fetch(from, to, timeFrom, timeTo);
if (cache == null) {
return underlyingIterator;
}
final Bytes cacheKeyFrom = cacheFunction.cacheKey(keySchema.lowerRange(from, timeFrom));
final Bytes cacheKeyTo = cacheFunction.cacheKey(keySchema.upperRange(to, timeTo));
final ThreadCache.MemoryLRUCacheBytesIterator cacheIterator = cache.range(name, cacheKeyFrom, cacheKeyTo);
final HasNextCondition hasNextCondition = keySchema.hasNextCondition(from,
to,
timeFrom,
timeTo);
final PeekingKeyValueIterator<Bytes, LRUCacheEntry> filteredCacheIterator = new FilteredCacheIterator(cacheIterator, hasNextCondition, cacheFunction);
return new MergedSortedCacheWindowStoreKeyValueIterator(
filteredCacheIterator,
underlyingIterator,
bytesSerdes,
windowSize,
cacheFunction
);
}
private V fetchPrevious(final Bytes key, final long timestamp) {
final byte[] value = underlying.fetch(key, timestamp);
if (value != null) {
return serdes.valueFrom(value);
}
return null;
}
@Override
public KeyValueIterator<Windowed<Bytes>, byte[]> all() {
validateStoreOpen();
final KeyValueIterator<Windowed<Bytes>, byte[]> underlyingIterator = underlying.all();
final ThreadCache.MemoryLRUCacheBytesIterator cacheIterator = cache.all(name);
return new MergedSortedCacheWindowStoreKeyValueIterator(
cacheIterator,
underlyingIterator,
bytesSerdes,
windowSize,
cacheFunction
);
}
@Override
public KeyValueIterator<Windowed<Bytes>, byte[]> fetchAll(final long timeFrom, final long timeTo) {
validateStoreOpen();
final KeyValueIterator<Windowed<Bytes>, byte[]> underlyingIterator = underlying.fetchAll(timeFrom, timeTo);
final ThreadCache.MemoryLRUCacheBytesIterator cacheIterator = cache.all(name);
final HasNextCondition hasNextCondition = keySchema.hasNextCondition(null, null, timeFrom, timeTo);
final PeekingKeyValueIterator<Bytes, LRUCacheEntry> filteredCacheIterator = new FilteredCacheIterator(cacheIterator,
hasNextCondition,
cacheFunction);
return new MergedSortedCacheWindowStoreKeyValueIterator(
filteredCacheIterator,
underlyingIterator,
bytesSerdes,
windowSize,
cacheFunction
);
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache license, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the license for the specific language governing permissions and
* limitations under the license.
*/
package org.apache.logging.log4j.core.appender.mom;
import java.io.Serializable;
import java.util.Properties;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import javax.jms.Connection;
import javax.jms.ConnectionFactory;
import javax.jms.Destination;
import javax.jms.JMSException;
import javax.jms.MapMessage;
import javax.jms.Message;
import javax.jms.MessageConsumer;
import javax.jms.MessageProducer;
import javax.jms.Session;
import javax.naming.NamingException;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.core.LogEvent;
import org.apache.logging.log4j.core.appender.AbstractManager;
import org.apache.logging.log4j.core.appender.AppenderLoggingException;
import org.apache.logging.log4j.core.appender.ManagerFactory;
import org.apache.logging.log4j.core.net.JndiManager;
import org.apache.logging.log4j.core.util.Log4jThread;
import org.apache.logging.log4j.status.StatusLogger;
import org.apache.logging.log4j.util.BiConsumer;
/**
* Consider this class <b>private</b>; it is only <b>public</b> for access by integration tests.
*
* <p>
* JMS connection and session manager. Can be used to access MessageProducer, MessageConsumer, and Message objects
* involving a configured ConnectionFactory and Destination.
* </p>
*/
public class JmsManager extends AbstractManager {
public static class JmsManagerConfiguration {
private final Properties jndiProperties;
private final String connectionFactoryName;
private final String destinationName;
private final String userName;
private final char[] password;
private final boolean immediateFail;
private final boolean retry;
private final long reconnectIntervalMillis;
JmsManagerConfiguration(final Properties jndiProperties, final String connectionFactoryName,
final String destinationName, final String userName, final char[] password, final boolean immediateFail,
final long reconnectIntervalMillis) {
this.jndiProperties = jndiProperties;
this.connectionFactoryName = connectionFactoryName;
this.destinationName = destinationName;
this.userName = userName;
this.password = password;
this.immediateFail = immediateFail;
this.reconnectIntervalMillis = reconnectIntervalMillis;
this.retry = reconnectIntervalMillis > 0;
}
public String getConnectionFactoryName() {
return connectionFactoryName;
}
public String getDestinationName() {
return destinationName;
}
public JndiManager getJndiManager() {
return JndiManager.getJndiManager(getJndiProperties());
}
public Properties getJndiProperties() {
return jndiProperties;
}
public char[] getPassword() {
return password;
}
public long getReconnectIntervalMillis() {
return reconnectIntervalMillis;
}
public String getUserName() {
return userName;
}
public boolean isImmediateFail() {
return immediateFail;
}
public boolean isRetry() {
return retry;
}
}
private static class JmsManagerFactory implements ManagerFactory<JmsManager, JmsManagerConfiguration> {
@Override
public JmsManager createManager(final String name, final JmsManagerConfiguration data) {
try {
return new JmsManager(name, data);
} catch (final Exception e) {
LOGGER.error("Error creating JmsManager using JmsManagerConfiguration [{}]", data, e);
return null;
}
}
}
/**
* Handles reconnecting to a Socket on a Thread.
*/
private class Reconnector extends Log4jThread {
private final CountDownLatch latch = new CountDownLatch(1);
private volatile boolean shutdown = false;
private final Object owner;
public Reconnector(final Object owner) {
super("JmsManager-Reconnector");
this.owner = owner;
}
public void latch() {
try {
latch.await();
} catch (final InterruptedException ex) {
// Ignore the exception.
}
}
void reconnect() throws NamingException, JMSException {
final JndiManager jndiManager2 = getJndiManager();
final Connection connection2 = createConnection(jndiManager2);
final Session session2 = createSession(connection2);
final Destination destination2 = createDestination(jndiManager2);
final MessageProducer messageProducer2 = createMessageProducer(session2, destination2);
connection2.start();
synchronized (owner) {
jndiManager = jndiManager2;
connection = connection2;
session = session2;
destination = destination2;
messageProducer = messageProducer2;
reconnector = null;
shutdown = true;
}
LOGGER.debug("Connection reestablished to {}", configuration);
}
@Override
public void run() {
while (!shutdown) {
try {
sleep(configuration.getReconnectIntervalMillis());
reconnect();
} catch (final InterruptedException | JMSException | NamingException e) {
LOGGER.debug("Cannot reestablish JMS connection to {}: {}", configuration, e.getLocalizedMessage(),
e);
} finally {
latch.countDown();
}
}
}
public void shutdown() {
shutdown = true;
}
}
private static final Logger LOGGER = StatusLogger.getLogger();
static final JmsManagerFactory FACTORY = new JmsManagerFactory();
/**
* Gets a JmsManager using the specified configuration parameters.
*
* @param name
* The name to use for this JmsManager.
* @param connectionFactoryName
* The binding name for the {@link javax.jms.ConnectionFactory}.
* @param destinationName
* The binding name for the {@link javax.jms.Destination}.
* @param userName
* The userName to connect with or {@code null} for no authentication.
* @param password
* The password to use with the given userName or {@code null} for no authentication.
* @param immediateFail
* Whether or not to fail immediately with a {@link AppenderLoggingException} when connecting to JMS
* fails.
* @param reconnectIntervalMillis
* How to log sleep in milliseconds before trying to reconnect to JMS.
* @param jndiManager
* The JndiManager to look up JMS information through.
* @return The JmsManager as configured.
*/
public static JmsManager getJmsManager(final String name, final Properties jndiProperties,
final String connectionFactoryName, final String destinationName, final String userName,
final char[] password, final boolean immediateFail, final long reconnectIntervalMillis) {
final JmsManagerConfiguration configuration = new JmsManagerConfiguration(jndiProperties, connectionFactoryName,
destinationName, userName, password, immediateFail, reconnectIntervalMillis);
return getManager(name, FACTORY, configuration);
}
private final JmsManagerConfiguration configuration;
private volatile Reconnector reconnector;
private volatile JndiManager jndiManager;
private volatile Connection connection;
private volatile Session session;
private volatile Destination destination;
private volatile MessageProducer messageProducer;
private JmsManager(final String name, final JmsManagerConfiguration configuration) {
super(null, name);
this.configuration = configuration;
this.jndiManager = configuration.getJndiManager();
try {
this.connection = createConnection(this.jndiManager);
this.session = createSession(this.connection);
this.destination = createDestination(this.jndiManager);
this.messageProducer = createMessageProducer(this.session, this.destination);
this.connection.start();
} catch (NamingException | JMSException e) {
this.reconnector = createReconnector();
this.reconnector.start();
}
}
private boolean closeConnection() {
if (connection == null) {
return true;
}
final Connection temp = connection;
connection = null;
try {
temp.close();
return true;
} catch (final JMSException e) {
StatusLogger.getLogger().debug(
"Caught exception closing JMS Connection: {} ({}); continuing JMS manager shutdown",
e.getLocalizedMessage(), temp, e);
return false;
}
}
private boolean closeJndiManager() {
if (jndiManager == null) {
return true;
}
final JndiManager tmp = jndiManager;
jndiManager = null;
tmp.close();
return true;
}
private boolean closeMessageProducer() {
if (messageProducer == null) {
return true;
}
final MessageProducer temp = messageProducer;
messageProducer = null;
try {
temp.close();
return true;
} catch (final JMSException e) {
StatusLogger.getLogger().debug(
"Caught exception closing JMS MessageProducer: {} ({}); continuing JMS manager shutdown",
e.getLocalizedMessage(), temp, e);
return false;
}
}
private boolean closeSession() {
if (session == null) {
return true;
}
final Session temp = session;
session = null;
try {
temp.close();
return true;
} catch (final JMSException e) {
StatusLogger.getLogger().debug(
"Caught exception closing JMS Session: {} ({}); continuing JMS manager shutdown",
e.getLocalizedMessage(), temp, e);
return false;
}
}
private Connection createConnection(final JndiManager jndiManager) throws NamingException, JMSException {
final ConnectionFactory connectionFactory = jndiManager.lookup(configuration.getConnectionFactoryName());
if (configuration.getUserName() != null && configuration.getPassword() != null) {
return connectionFactory.createConnection(configuration.getUserName(),
configuration.getPassword() == null ? null : String.valueOf(configuration.getPassword()));
}
return connectionFactory.createConnection();
}
private Destination createDestination(final JndiManager jndiManager) throws NamingException {
return jndiManager.lookup(configuration.getDestinationName());
}
/**
* Creates a TextMessage, MapMessage, or ObjectMessage from a Serializable object.
* <p>
* For instance, when using a text-based {@link org.apache.logging.log4j.core.Layout} such as
* {@link org.apache.logging.log4j.core.layout.PatternLayout}, the {@link org.apache.logging.log4j.core.LogEvent}
* message will be serialized to a String.
* </p>
* <p>
* When using a layout such as {@link org.apache.logging.log4j.core.layout.SerializedLayout}, the LogEvent message
* will be serialized as a Java object.
* </p>
* <p>
* When using a layout such as {@link org.apache.logging.log4j.core.layout.MessageLayout} and the LogEvent message
* is a Log4j MapMessage, the message will be serialized as a JMS MapMessage.
* </p>
*
* @param object
* The LogEvent or String message to wrap.
* @return A new JMS message containing the provided object.
* @throws JMSException
*/
public Message createMessage(final Serializable object) throws JMSException {
if (object instanceof String) {
return this.session.createTextMessage((String) object);
} else if (object instanceof org.apache.logging.log4j.message.MapMessage) {
return map((org.apache.logging.log4j.message.MapMessage<?, ?>) object, this.session.createMapMessage());
}
return this.session.createObjectMessage(object);
}
private void createMessageAndSend(final LogEvent event, final Serializable serializable) throws JMSException {
final Message message = createMessage(serializable);
message.setJMSTimestamp(event.getTimeMillis());
messageProducer.send(message);
}
/**
* Creates a MessageConsumer on this Destination using the current Session.
*
* @return A MessageConsumer on this Destination.
* @throws JMSException
*/
public MessageConsumer createMessageConsumer() throws JMSException {
return this.session.createConsumer(this.destination);
}
/**
* Creates a MessageProducer on this Destination using the current Session.
*
* @param session
* The JMS Session to use to create the MessageProducer
* @param destination
* The JMS Destination for the MessageProducer
* @return A MessageProducer on this Destination.
* @throws JMSException
*/
public MessageProducer createMessageProducer(final Session session, final Destination destination) throws JMSException {
return session.createProducer(destination);
}
private Reconnector createReconnector() {
final Reconnector recon = new Reconnector(this);
recon.setDaemon(true);
recon.setPriority(Thread.MIN_PRIORITY);
return recon;
}
private Session createSession(final Connection connection) throws JMSException {
return connection.createSession(false, Session.AUTO_ACKNOWLEDGE);
}
public JmsManagerConfiguration getJmsManagerConfiguration() {
return configuration;
}
JndiManager getJndiManager() {
return configuration.getJndiManager();
}
<T> T lookup(final String destinationName) throws NamingException {
return this.jndiManager.lookup(destinationName);
}
private MapMessage map(final org.apache.logging.log4j.message.MapMessage<?, ?> log4jMapMessage,
final MapMessage jmsMapMessage) {
// Map without calling rg.apache.logging.log4j.message.MapMessage#getData() which makes a copy of the map.
log4jMapMessage.forEach(new BiConsumer<String, Object>() {
@Override
public void accept(final String key, final Object value) {
try {
jmsMapMessage.setObject(key, value);
} catch (final JMSException e) {
throw new IllegalArgumentException(String.format("%s mapping key '%s' to value '%s': %s",
e.getClass(), key, value, e.getLocalizedMessage()), e);
}
}
});
return jmsMapMessage;
}
@Override
protected boolean releaseSub(final long timeout, final TimeUnit timeUnit) {
if (reconnector != null) {
reconnector.shutdown();
reconnector.interrupt();
reconnector = null;
}
boolean closed = false;
closed &= closeJndiManager();
closed &= closeMessageProducer();
closed &= closeSession();
closed &= closeConnection();
return closed && this.jndiManager.stop(timeout, timeUnit);
}
void send(final LogEvent event, final Serializable serializable) {
if (messageProducer == null) {
if (reconnector != null && !configuration.isImmediateFail()) {
reconnector.latch();
}
if (messageProducer == null) {
throw new AppenderLoggingException(
"Error sending to JMS Manager '" + getName() + "': JMS message producer not available");
}
}
synchronized (this) {
try {
createMessageAndSend(event, serializable);
} catch (final JMSException causeEx) {
if (configuration.isRetry() && reconnector == null) {
reconnector = createReconnector();
try {
closeJndiManager();
reconnector.reconnect();
} catch (NamingException | JMSException reconnEx) {
LOGGER.debug("Cannot reestablish JMS connection to {}: {}; starting reconnector thread {}",
configuration, reconnEx.getLocalizedMessage(), reconnector.getName(), reconnEx);
reconnector.start();
throw new AppenderLoggingException(
String.format("Error sending to %s for %s", getName(), configuration), causeEx);
}
try {
createMessageAndSend(event, serializable);
} catch (final JMSException e) {
throw new AppenderLoggingException(
String.format("Error sending to %s after reestablishing connection for %s", getName(),
configuration),
causeEx);
}
}
}
}
}
}
|
|
package org.jgroups.protocols;
import org.jgroups.Address;
import org.jgroups.MergeView;
import org.jgroups.Message;
import org.jgroups.View;
import org.jgroups.annotations.ManagedAttribute;
import org.jgroups.annotations.ManagedOperation;
import org.jgroups.annotations.Property;
import org.jgroups.util.*;
import java.util.Collection;
import java.util.List;
import java.util.Objects;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.stream.Stream;
/**
* Implementation of a locking protocol which acquires locks by asking the coordinator.<br/>
* Because the coordinator maintains all locks, no total ordering of requests is required.<br/>
* CENTRAL_LOCK2 has all members send lock and unlock requests to the current coordinator. The coordinator has a queue
* for incoming requests, and grants locks based on order of arrival.<br/>
* Contrary to {@link CENTRAL_LOCK}, CENTRAL_LOCK2 has no members who act as backups for lock information. Instead,
* when the coord leaves or on a merge, the new coordinator runs a <em>reconciliation</em> protocol in which it fetches
* information from all members about acquired locks and pending lock and unlock requests, and then creates its lock
* table accordingly. During this phase, all regular request handling is paused.<br/>
* This protocol requires less traffic than {@link CENTRAL_LOCK} (each request also has to be sent to the backup(s)),
* but introduces communication between the new coord and all members (and thus a small pause) on coord change.
* <br/>
* The JIRA issue is https://issues.jboss.org/browse/JGRP-2249.
* @author Bela Ban
* @since 4.0.13
* @see Locking
* @see CENTRAL_LOCK
*/
public class CENTRAL_LOCK2 extends Locking {
@Property(description="By default, a lock owner is address:thread-id. If false, we only use the node's address. " +
"See https://issues.jboss.org/browse/JGRP-1886 for details")
protected boolean use_thread_id_for_lock_owner=true;
@Property(description="Max time (im ms) to wait for lock info responses from members in a lock reconciliation phase")
protected long lock_reconciliation_timeout=10_000;
protected Address coord;
// collect information about held locks and pending lock requests from all members during a reconciliation round
protected final ResponseCollector<LockInfoResponse> lock_info_responses=new ResponseCollector<>();
// Queue to hold requests, typically only at the coordinator. Processed by RequestHandler
protected final BlockingQueue<Request> req_queue=new LinkedBlockingQueue<>();
// Thread which processes requests in req-queue (running only on coord)
protected final Runner req_handler;
public CENTRAL_LOCK2() {
req_handler=new Runner(new DefaultThreadFactory("lock-handler", true, true),
"lock-handler", this::processQueue, req_queue::clear);
}
@ManagedAttribute public boolean isCoord() {return Objects.equals(local_addr, coord);}
@ManagedAttribute public String getCoordinator() {return coord != null? coord.toString() : "n/a";}
@ManagedAttribute public boolean isRequestHandlerRunning() {return req_handler.isRunning();}
@ManagedAttribute public int requestQueueSize() {return req_queue.size();}
@Override
public void handleView(View v) {
Address old_coord=this.view != null? this.view.getCoord() : null;
super.handleView(v);
if(v.size() > 0) {
coord=v.getCoord();
log.debug("%s: coord=%s, is_coord=%b", local_addr, coord, isCoord());
}
if(Objects.equals(local_addr, coord)) {
if(v instanceof MergeView || !Objects.equals(local_addr, old_coord)) {
// I'm the new coord: run reconciliation to find all existing locks (and pending lock/unlock requests)
runReconciliation();
req_handler.start();
}
}
else {
if(Objects.equals(local_addr, old_coord)) {
log.debug("%s: not coordinator anymore; stopping the request handler", local_addr);
req_handler.stop(); // clears the req-queue
server_locks.clear();
}
}
}
@Override
protected void requestReceived(Request req) {
if(req == null) return;
switch(req.type) {
// requests to be handled by the coord:
case GRANT_LOCK:
case RELEASE_LOCK:
case CREATE_LOCK:
case DELETE_LOCK:
case COND_SIG:
case COND_SIG_ALL:
case LOCK_AWAIT:
case DELETE_LOCK_AWAIT:
case CREATE_AWAITER:
case DELETE_AWAITER:
req_queue.add(req);
break;
// requests/responses to be handled by clients
case LOCK_GRANTED:
case RELEASE_LOCK_OK:
case LOCK_DENIED:
case SIG_RET:
case LOCK_INFO_REQ:
case LOCK_INFO_RSP:
case LOCK_REVOKED:
if(log.isTraceEnabled())
log.trace("%s <-- %s: %s", local_addr, req.sender, req);
handleRequest(req);
break;
default:
log.error("%s: request of type %s not known", local_addr, req.type);
break;
}
}
protected void processQueue() {
Request req=null;
try {
req=req_queue.take();
}
catch(InterruptedException e) {
}
try {
if(req != null && log.isTraceEnabled())
log.trace("%s <-- %s: %s", local_addr, req.sender, req);
handleRequest(req);
}
catch(Throwable t) {
log.error("%s: failed handling request %s: %s", local_addr, req, t);
}
}
protected void handleLockInfoRequest(Address requester) {
if(requester != null && !Objects.equals(coord, requester)) {
log.trace("%s: changed coord from %s to %s as a result of getting a LOCK_INFO_REQ",
local_addr, coord, requester);
coord=requester;
}
LockInfoResponse response=createLockInfoResponse();
if(log.isTraceEnabled())
log.trace("%s --> %s LOCK-INFO-RSP:\n%s", local_addr, requester, response.printDetails());
send(requester, new Request(Type.LOCK_INFO_RSP).infoRsp(response));
}
@Override
protected void handleLockInfoResponse(Address sender, Request rsp) {
lock_info_responses.add(sender, rsp.info_rsp);
}
@Override
protected void handleLockRevoked(Request rsp) {
notifyLockRevoked(rsp.lock_name, rsp.owner);
}
/** Grabs information about locks held and pending lock/unlock requests from all members */
@ManagedOperation(description="Runs the reconciliation protocol to fetch information about owned locks and pending " +
"lock/unlock requests from each member to establish the server lock table. Only run by a coordinator.")
public void runReconciliation() {
if(!isCoord()) {
log.warn("%s: reconciliation protocol is not run as I'm not the coordinator (%s is)",
local_addr, getCoordinator());
return;
}
Request lock_info_req=new Request(Type.LOCK_INFO_REQ);
Address[] mbrs=view.getMembersRaw();
log.debug("%s: running reconciliation protocol on %d members", local_addr, mbrs != null? mbrs.length : 0);
lock_info_responses.reset(mbrs);
lock_info_responses.add(local_addr, createLockInfoResponse());
log.trace("%s --> ALL: %s", local_addr, lock_info_req);
// we cannot use a multicast as this may happen as a result of a MergeView and not everybody may have the view yet
sendLockInfoRequestTo(Util.streamableToBuffer(lock_info_req), mbrs, local_addr);
if(!lock_info_responses.waitForAllResponses(lock_reconciliation_timeout)) {
List<Address> missing=lock_info_responses.getMissing();
log.warn("%s: failed getting lock information from all members, missing responses: %d (from %s)",
local_addr, missing.size(), missing);
}
// 1. Add all existing locks to the server lock table
Collection<LockInfoResponse> responses=lock_info_responses.getResults().values();
responses.stream().filter(rsp -> rsp != null && rsp.existing_locks != null)
.map(rsp -> rsp.existing_locks).flatMap(Collection::stream)
.forEach(t -> {
String lock_name=t.getVal1();
Owner owner=t.getVal2();
ServerLock srv_lock=new ServerLock(lock_name, owner);
ServerLock ret=server_locks.putIfAbsent(lock_name, srv_lock);
if(ret != null) {
if(!Objects.equals(owner, ret.owner)) {
log.warn("%s: lock %s requested by %s is already present: %s", local_addr, lock_name, owner, ret);
send(owner.getAddress(), new Request(Type.LOCK_REVOKED, lock_name, ret.owner, 0));
}
}
else {
notifyLockCreated(lock_name);
log.trace("%s: added lock %s", local_addr, lock_name);
}
});
// 2. Process all pending requests
responses.stream().filter(rsp -> rsp != null && rsp.pending_requests != null && !rsp.pending_requests.isEmpty())
.map(rsp -> rsp.pending_requests).flatMap(Collection::stream)
.forEach(req -> {
try {
if(log.isTraceEnabled())
log.trace("%s: processing request %s", local_addr, req);
handleRequest(req);
}
catch(Throwable t) {
log.error("%s: failed handling request %s: %s", local_addr, req, t);
}
});
}
protected void sendLockInfoRequestTo(Buffer buf, Address[] mbrs, Address exclude) {
Stream.of(mbrs).filter(m -> m != null && !Objects.equals(m, exclude)).forEach(dest -> {
Message msg=new Message(dest, buf).putHeader(id, new LockingHeader());
if(bypass_bundling)
msg.setFlag(Message.Flag.DONT_BUNDLE);
try {
down_prot.down(msg);
}
catch(Throwable t) {
log.error("%s: failed sending LOCK_INFO_REQ to %s: %s", local_addr, dest, t);
}
});
}
protected Owner getOwner() {
return use_thread_id_for_lock_owner? super.getOwner(): new Owner(local_addr, -1);
}
protected void sendGrantLockRequest(String lock_name, int lock_id, Owner owner, long timeout, boolean is_trylock) {
Address dest=coord;
if(dest == null)
throw new IllegalStateException("No coordinator available, cannot send GRANT-LOCK request");
sendRequest(dest, Type.GRANT_LOCK, lock_name, lock_id, owner, timeout, is_trylock);
}
protected void sendReleaseLockRequest(String lock_name, int lock_id, Owner owner) {
Address dest=coord;
if(dest == null)
throw new IllegalStateException("No coordinator available, cannot send RELEASE-LOCK request");
sendRequest(dest, Type.RELEASE_LOCK, lock_name, lock_id, owner, 0, false);
}
@Override
protected void sendAwaitConditionRequest(String lock_name, Owner owner) {
sendRequest(coord, Type.LOCK_AWAIT, lock_name, owner, 0, false);
}
@Override
protected void sendSignalConditionRequest(String lock_name, boolean all) {
sendRequest(coord, all ? Type.COND_SIG_ALL : Type.COND_SIG, lock_name, null, 0, false);
}
@Override
protected void sendDeleteAwaitConditionRequest(String lock_name, Owner owner) {
sendRequest(coord, Type.DELETE_LOCK_AWAIT, lock_name, owner, 0, false);
}
protected LockInfoResponse createLockInfoResponse() {
LockInfoResponse rsp=new LockInfoResponse();
List<Tuple<String,Owner>> locks=client_lock_table.getLockInfo(); // successfully acquired locks
for(Tuple<String,Owner> t: locks)
rsp.add(t);
List<Request> pending_reqs=client_lock_table.getPendingRequests(local_addr); // pending lock/unlock requests
if(pending_reqs != null && !pending_reqs.isEmpty())
rsp.pending_requests=pending_reqs;
return rsp;
}
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.zeppelin.jdbc;
import static java.lang.String.format;
import static org.junit.Assert.assertEquals;
import static org.apache.zeppelin.jdbc.JDBCInterpreter.DEFAULT_KEY;
import static org.apache.zeppelin.jdbc.JDBCInterpreter.DEFAULT_DRIVER;
import static org.apache.zeppelin.jdbc.JDBCInterpreter.DEFAULT_PASSWORD;
import static org.apache.zeppelin.jdbc.JDBCInterpreter.DEFAULT_USER;
import static org.apache.zeppelin.jdbc.JDBCInterpreter.DEFAULT_URL;
import static org.apache.zeppelin.jdbc.JDBCInterpreter.COMMON_MAX_LINE;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.sql.*;
import java.util.Properties;
import org.apache.zeppelin.interpreter.InterpreterContext;
import org.apache.zeppelin.interpreter.InterpreterResult;
import org.apache.zeppelin.jdbc.JDBCInterpreter;
import org.apache.zeppelin.scheduler.FIFOScheduler;
import org.apache.zeppelin.scheduler.ParallelScheduler;
import org.apache.zeppelin.scheduler.Scheduler;
import org.junit.Before;
import org.junit.Test;
import com.mockrunner.jdbc.BasicJDBCTestCaseAdapter;
/**
* JDBC interpreter unit tests
*/
public class JDBCInterpreterTest extends BasicJDBCTestCaseAdapter {
static String jdbcConnection;
private static String getJdbcConnection() throws IOException {
if(null == jdbcConnection) {
Path tmpDir = Files.createTempDirectory("h2-test-");
tmpDir.toFile().deleteOnExit();
jdbcConnection = format("jdbc:h2:%s", tmpDir);
}
return jdbcConnection;
}
public static Properties getJDBCTestProperties() {
Properties p = new Properties();
p.setProperty("default.driver", "org.postgresql.Driver");
p.setProperty("default.url", "jdbc:postgresql://localhost:5432/");
p.setProperty("default.user", "gpadmin");
p.setProperty("default.password", "");
p.setProperty("common.max_count", "1000");
return p;
}
@Before
public void setUp() throws Exception {
Class.forName("org.h2.Driver");
Connection connection = DriverManager.getConnection(getJdbcConnection());
Statement statement = connection.createStatement();
statement.execute(
"DROP TABLE IF EXISTS test_table; " +
"CREATE TABLE test_table(id varchar(255), name varchar(255));");
PreparedStatement insertStatement = connection.prepareStatement("insert into test_table(id, name) values ('a', 'a_name'),('b', 'b_name'),('c', ?);");
insertStatement.setString(1, null);
insertStatement.execute();
}
@Test
public void testForParsePropertyKey() throws IOException {
JDBCInterpreter t = new JDBCInterpreter(new Properties());
assertEquals(t.getPropertyKey("(fake) select max(cant) from test_table where id >= 2452640"),
"fake");
assertEquals(t.getPropertyKey("() select max(cant) from test_table where id >= 2452640"),
"");
assertEquals(t.getPropertyKey(")fake( select max(cant) from test_table where id >= 2452640"),
"default");
// when you use a %jdbc(prefix1), prefix1 is the propertyKey as form part of the cmd string
assertEquals(t.getPropertyKey("(prefix1)\n select max(cant) from test_table where id >= 2452640"),
"prefix1");
assertEquals(t.getPropertyKey("(prefix2) select max(cant) from test_table where id >= 2452640"),
"prefix2");
// when you use a %jdbc, prefix is the default
assertEquals(t.getPropertyKey("select max(cant) from test_table where id >= 2452640"),
"default");
}
@Test
public void testForMapPrefix() throws SQLException, IOException {
Properties properties = new Properties();
properties.setProperty("common.max_count", "1000");
properties.setProperty("common.max_retry", "3");
properties.setProperty("default.driver", "org.h2.Driver");
properties.setProperty("default.url", getJdbcConnection());
properties.setProperty("default.user", "");
properties.setProperty("default.password", "");
JDBCInterpreter t = new JDBCInterpreter(properties);
t.open();
String sqlQuery = "(fake) select * from test_table";
InterpreterResult interpreterResult = t.interpret(sqlQuery, new InterpreterContext("", "1", "", "", null, null, null, null, null, null, null));
// if prefix not found return ERROR and Prefix not found.
assertEquals(InterpreterResult.Code.ERROR, interpreterResult.code());
assertEquals("Prefix not found.", interpreterResult.message());
}
@Test
public void testDefaultProperties() throws SQLException {
JDBCInterpreter jdbcInterpreter = new JDBCInterpreter(getJDBCTestProperties());
assertEquals("org.postgresql.Driver", jdbcInterpreter.getProperty(DEFAULT_DRIVER));
assertEquals("jdbc:postgresql://localhost:5432/", jdbcInterpreter.getProperty(DEFAULT_URL));
assertEquals("gpadmin", jdbcInterpreter.getProperty(DEFAULT_USER));
assertEquals("", jdbcInterpreter.getProperty(DEFAULT_PASSWORD));
assertEquals("1000", jdbcInterpreter.getProperty(COMMON_MAX_LINE));
}
@Test
public void testSelectQuery() throws SQLException, IOException {
Properties properties = new Properties();
properties.setProperty("common.max_count", "1000");
properties.setProperty("common.max_retry", "3");
properties.setProperty("default.driver", "org.h2.Driver");
properties.setProperty("default.url", getJdbcConnection());
properties.setProperty("default.user", "");
properties.setProperty("default.password", "");
JDBCInterpreter t = new JDBCInterpreter(properties);
t.open();
String sqlQuery = "select * from test_table WHERE ID in ('a', 'b')";
InterpreterResult interpreterResult = t.interpret(sqlQuery, new InterpreterContext("", "1", "", "", null, null, null, null, null, null, null));
assertEquals(InterpreterResult.Code.SUCCESS, interpreterResult.code());
assertEquals(InterpreterResult.Type.TABLE, interpreterResult.type());
assertEquals("ID\tNAME\na\ta_name\nb\tb_name\n", interpreterResult.message());
}
@Test
public void testSelectQueryWithNull() throws SQLException, IOException {
Properties properties = new Properties();
properties.setProperty("common.max_count", "1000");
properties.setProperty("common.max_retry", "3");
properties.setProperty("default.driver", "org.h2.Driver");
properties.setProperty("default.url", getJdbcConnection());
properties.setProperty("default.user", "");
properties.setProperty("default.password", "");
JDBCInterpreter t = new JDBCInterpreter(properties);
t.open();
String sqlQuery = "select * from test_table WHERE ID = 'c'";
InterpreterResult interpreterResult = t.interpret(sqlQuery, new InterpreterContext("", "1", "", "", null, null, null, null, null, null, null));
assertEquals(InterpreterResult.Code.SUCCESS, interpreterResult.code());
assertEquals(InterpreterResult.Type.TABLE, interpreterResult.type());
assertEquals("ID\tNAME\nc\tnull\n", interpreterResult.message());
}
@Test
public void testSelectQueryMaxResult() throws SQLException, IOException {
Properties properties = new Properties();
properties.setProperty("common.max_count", "1");
properties.setProperty("common.max_retry", "3");
properties.setProperty("default.driver", "org.h2.Driver");
properties.setProperty("default.url", getJdbcConnection());
properties.setProperty("default.user", "");
properties.setProperty("default.password", "");
JDBCInterpreter t = new JDBCInterpreter(properties);
t.open();
String sqlQuery = "select * from test_table";
InterpreterResult interpreterResult = t.interpret(sqlQuery, new InterpreterContext("", "1", "", "", null, null, null, null, null, null, null));
assertEquals(InterpreterResult.Code.SUCCESS, interpreterResult.code());
assertEquals(InterpreterResult.Type.TABLE, interpreterResult.type());
assertEquals("ID\tNAME\na\ta_name\n", interpreterResult.message());
}
@Test
public void concurrentSettingTest() {
Properties properties = new Properties();
properties.setProperty("zeppelin.jdbc.concurrent.use", "true");
properties.setProperty("zeppelin.jdbc.concurrent.max_connection", "10");
JDBCInterpreter jdbcInterpreter = new JDBCInterpreter(properties);
assertTrue(jdbcInterpreter.isConcurrentExecution());
assertEquals(10, jdbcInterpreter.getMaxConcurrentConnection());
Scheduler scheduler = jdbcInterpreter.getScheduler();
assertTrue(scheduler instanceof ParallelScheduler);
properties.clear();
properties.setProperty("zeppelin.jdbc.concurrent.use", "false");
jdbcInterpreter = new JDBCInterpreter(properties);
assertFalse(jdbcInterpreter.isConcurrentExecution());
scheduler = jdbcInterpreter.getScheduler();
assertTrue(scheduler instanceof FIFOScheduler);
}
}
|
|
/*
* Copyright (c) 2010-2014 Evolveum
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.evolveum.midpoint.model.impl.scripting;
import com.evolveum.midpoint.model.api.ScriptExecutionException;
import com.evolveum.midpoint.model.impl.scripting.expressions.SearchEvaluator;
import com.evolveum.midpoint.model.impl.scripting.expressions.SelectEvaluator;
import com.evolveum.midpoint.model.impl.scripting.helpers.JaxbHelper;
import com.evolveum.midpoint.prism.Item;
import com.evolveum.midpoint.prism.PrismContext;
import com.evolveum.midpoint.prism.parser.QueryConvertor;
import com.evolveum.midpoint.prism.query.ObjectFilter;
import com.evolveum.midpoint.schema.constants.SchemaConstants;
import com.evolveum.midpoint.schema.result.OperationResult;
import com.evolveum.midpoint.task.api.Task;
import com.evolveum.midpoint.task.api.TaskManager;
import com.evolveum.midpoint.util.exception.SchemaException;
import com.evolveum.midpoint.util.logging.Trace;
import com.evolveum.midpoint.util.logging.TraceManager;
import com.evolveum.midpoint.xml.ns._public.model.scripting_3.ActionExpressionType;
import com.evolveum.midpoint.xml.ns._public.model.scripting_3.ExecuteScriptType;
import com.evolveum.midpoint.xml.ns._public.model.scripting_3.ExpressionPipelineType;
import com.evolveum.midpoint.xml.ns._public.model.scripting_3.ExpressionSequenceType;
import com.evolveum.midpoint.xml.ns._public.model.scripting_3.FilterExpressionType;
import com.evolveum.midpoint.xml.ns._public.model.scripting_3.ForeachExpressionType;
import com.evolveum.midpoint.xml.ns._public.model.scripting_3.ObjectFactory;
import com.evolveum.midpoint.xml.ns._public.model.scripting_3.ScriptingExpressionType;
import com.evolveum.midpoint.xml.ns._public.model.scripting_3.SearchExpressionType;
import com.evolveum.midpoint.xml.ns._public.model.scripting_3.SelectExpressionType;
import com.evolveum.prism.xml.ns._public.types_3.RawType;
import org.apache.commons.lang.NotImplementedException;
import org.apache.commons.lang.Validate;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import javax.xml.bind.JAXBElement;
import javax.xml.namespace.QName;
import java.util.HashMap;
import java.util.Map;
/**
* Main entry point for evaluating scripting expressions.
*
* @author mederly
*/
@Component
public class ScriptingExpressionEvaluator {
private static final Trace LOGGER = TraceManager.getTrace(ScriptingExpressionEvaluator.class);
private static final String DOT_CLASS = ScriptingExpressionEvaluator.class + ".";
@Autowired
private TaskManager taskManager;
@Autowired
private SearchEvaluator searchEvaluator;
@Autowired
private SelectEvaluator selectEvaluator;
@Autowired
private JaxbHelper jaxbHelper;
@Autowired
private PrismContext prismContext;
private ObjectFactory objectFactory = new ObjectFactory();
private Map<String,ActionExecutor> actionExecutors = new HashMap<>();
/**
* Asynchronously executes simple scripting expressions, consisting of one search command and one action.
*
* @param objectType Object type to search (e.g. c:UserType)
* @param filter Filter to be applied (ObjectFilter)
* @param actionName Action to be executed on objects found (e.g. "disable", "delete", "recompute", etc).
* @param task Task in context of which the script should execute. The task should be "clean", i.e.
* (1) transient, (2) without any handler. This method puts the task into background,
* and assigns ScriptExecutionTaskHandler to it, to execute the script.
* @param parentResult
* @throws SchemaException
*/
public void evaluateExpressionInBackground(QName objectType, ObjectFilter filter, String actionName, Task task, OperationResult parentResult) throws SchemaException {
Validate.notNull(objectType);
Validate.notNull(actionName);
Validate.notNull(task);
SearchExpressionType search = new SearchExpressionType();
search.setType(objectType);
if (filter != null) {
search.setSearchFilter(QueryConvertor.createSearchFilterType(filter, prismContext));
}
ActionExpressionType action = new ActionExpressionType();
action.setType(actionName);
search.setScriptingExpression(objectFactory.createAction(action));
evaluateExpressionInBackground(search, task, parentResult);
}
/**
* Asynchronously executes any scripting expression.
*
* @param expression Expression to be executed.
* @param task Task in context of which the script should execute. The task should be "clean", i.e.
* (1) transient, (2) without any handler. This method puts the task into background,
* and assigns ScriptExecutionTaskHandler to it, to execute the script.
* @param parentResult
* @throws SchemaException
*/
public void evaluateExpressionInBackground(ScriptingExpressionType expression, Task task, OperationResult parentResult) throws SchemaException {
OperationResult result = parentResult.createSubresult(DOT_CLASS + "evaluateExpressionInBackground");
if (!task.isTransient()) {
throw new IllegalStateException("Task must be transient");
}
if (task.getHandlerUri() != null) {
throw new IllegalStateException("Task must not have a handler");
}
ExecuteScriptType executeScriptType = new ExecuteScriptType();
executeScriptType.setScriptingExpression(jaxbHelper.toJaxbElement(expression));
task.setExtensionPropertyValue(SchemaConstants.SE_EXECUTE_SCRIPT, executeScriptType);
task.setHandlerUri(ScriptExecutionTaskHandler.HANDLER_URI);
taskManager.switchToBackground(task, result);
result.computeStatus();
}
/**
* Entry point to _synchronous_ script execution, with no input data.
*
* @param expression Scripting expression to execute.
* @param task Task in context of which the script should execute (in foreground!)
* @param result Operation result
* @return ExecutionContext, from which the caller can retrieve the output data via getFinalOutput() method,
* and the console output via getConsoleOutput() method.
* @throws com.evolveum.midpoint.model.api.ScriptExecutionException
*/
public ExecutionContext evaluateExpression(ScriptingExpressionType expression, Task task, OperationResult result) throws ScriptExecutionException {
ExecutionContext context = new ExecutionContext(task);
Data output;
try {
output = evaluateExpression(expression, Data.createEmpty(), context, result);
} catch (RuntimeException e) {
result.recordFatalError("Couldn't execute script", e);
throw new ScriptExecutionException("Couldn't execute script: " + e.getMessage(), e);
}
result.computeStatusIfUnknown();
context.setFinalOutput(output);
return context;
}
// public ExecutionContext evaluateExpression(JAXBElement<? extends ScriptingExpressionType> expression, Task task, OperationResult result) throws ScriptExecutionException {
// ExecutionContext context = new ExecutionContext(task);
// Data output = evaluateExpression(expression.getValue(), Data.createEmpty(), context, result);
// result.computeStatusIfUnknown();
// context.setFinalOutput(output);
// return context;
// }
public ExecutionContext evaluateExpression(ExecuteScriptType executeScript, Task task, OperationResult result) throws ScriptExecutionException {
return evaluateExpression(executeScript.getScriptingExpression().getValue(), task, result);
}
public ExecutionContext evaluateExpression(ScriptingExpressionType expression, OperationResult result) throws ScriptExecutionException {
Task task = taskManager.createTaskInstance();
return evaluateExpression(expression, task, result);
}
public Data evaluateExpression(JAXBElement<? extends ScriptingExpressionType> expression, Data input, ExecutionContext context, OperationResult parentResult) throws ScriptExecutionException {
return evaluateExpression((ScriptingExpressionType) expression.getValue(), input, context, parentResult);
}
public Data evaluateExpression(ScriptingExpressionType value, Data input, ExecutionContext context, OperationResult parentResult) throws ScriptExecutionException {
OperationResult result = parentResult.createMinorSubresult(DOT_CLASS + "evaluateExpression");
Data output;
if (value instanceof ExpressionPipelineType) {
output = executePipeline((ExpressionPipelineType) value, input, context, result);
} else if (value instanceof ExpressionSequenceType) {
output = executeSequence((ExpressionSequenceType) value, input, context, result);
} else if (value instanceof ForeachExpressionType) {
output = executeForEach((ForeachExpressionType) value, input, context, result);
} else if (value instanceof SelectExpressionType) {
output = selectEvaluator.evaluate((SelectExpressionType) value, input, context, result);
} else if (value instanceof FilterExpressionType) {
output = executeFilter((FilterExpressionType) value, input, context, result);
} else if (value instanceof SearchExpressionType) {
output = searchEvaluator.evaluate((SearchExpressionType) value, input, context, result);
} else if (value instanceof ActionExpressionType) {
output = executeAction((ActionExpressionType) value, input, context, result);
} else {
throw new IllegalArgumentException("Unsupported expression type: " + (value==null?"(null)":value.getClass()));
}
result.computeStatusIfUnknown();
return output;
}
private Data executeAction(ActionExpressionType command, Data input, ExecutionContext context, OperationResult result) throws ScriptExecutionException {
Validate.notNull(command, "command");
Validate.notNull(command.getType(), "command.actionType");
if (LOGGER.isTraceEnabled()) {
LOGGER.trace("Executing action {} on {}", command.getType(), input.debugDump());
} else if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Executing action {}", command.getType());
}
ActionExecutor executor = actionExecutors.get(command.getType());
if (executor == null) {
throw new IllegalStateException("Unsupported action type: " + command.getType());
} else {
Data retval = executor.execute(command, input, context, result);
result.setSummarizeSuccesses(true);
result.summarize();
return retval;
}
}
private Data executeFilter(FilterExpressionType command, Data input, ExecutionContext context, OperationResult result) {
throw new NotImplementedException();
}
private Data executeForEach(ForeachExpressionType command, Data input, ExecutionContext context, OperationResult result) {
return null;
}
private Data executePipeline(ExpressionPipelineType pipeline, Data data, ExecutionContext context, OperationResult result) throws ScriptExecutionException {
for (ScriptingExpressionType expressionType : pipeline.getExpression()) {
data = evaluateExpression(expressionType, data, context, result);
}
return data;
}
private Data executeSequence(ExpressionSequenceType sequence, Data input, ExecutionContext context, OperationResult result) throws ScriptExecutionException {
Data lastOutput = null;
for (ScriptingExpressionType expressionType : sequence.getExpression()) {
lastOutput = evaluateExpression(expressionType, input, context, result);
}
return lastOutput;
}
public Data evaluateConstantExpression(RawType constant, ExecutionContext context, OperationResult result) throws ScriptExecutionException {
try {
Object value = prismContext.getXnodeProcessor().parseAnyData(constant.getXnode());
if (value instanceof Item) {
return Data.create((Item) value);
} else {
return Data.createProperty(value, prismContext);
}
} catch (SchemaException e) {
throw new ScriptExecutionException(e.getMessage(), e);
}
}
public void registerActionExecutor(String actionName, ActionExecutor executor) {
actionExecutors.put(actionName, executor);
}
}
|
|
/*
* Copyright 2006 The Apache Software Foundation or its licensors, as applicable
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @author Oleg V. Oleinik
* @version $Revision: 1.1 $
*/
package org.apache.harmony.test.reliability.api.nio.channels.filechannel;
import org.apache.harmony.test.reliability.share.Test;
import java.io.File;
import java.io.IOException;
import java.io.FileOutputStream;
import java.io.FileInputStream;
import java.io.RandomAccessFile;
import java.nio.channels.FileChannel;
import java.nio.ByteBuffer;
import java.nio.MappedByteBuffer;
/**
* Goal: check FileChannel write operation for thread safety.
*
* Idea: Several threads write into a FileChannel equal size chunks of bytes each starting
* and ending with MARK byte. If write operations are thread unsafe, then chunks
* will be mixed.
*
* The test does:
*
* 1. Parses parameters:
* param[0] - a path to an exiting directory where the test can store its files.
*
* 2. Creates a file with the name param[0]/fileName.
*
* 3. Creates FileInputStream, FileOutputStream and associated channels.
*
* 4. Starts N_OF_THREADS threads, each thread:
*
* a. Creates N_OF_BUFFERS_PER_WRITE byte arrays of (CHUNK_SIZE + 2) size,
* each wrapped into direct ByteBuffer.
*
* b. Writes the chunks.
*
* c. Repeates the opration N_OF_WRITES_PER_THREAD times.
*
* 6. Checks the content of the file: reads bytes from the file by (CHUNK_SIZE + 2)
* chunks, checks that first and last bytes of each read chunk is MARK.
*
* 7. Closes channels.
*
*/
public class FileChannelThrSafetyTest extends Test {
// each written chunk has has size (CHUNK_SIZE + 2)
// and content {MARK, <random bytes>, MARK}
static final int CHUNK_SIZE = 2000;
// Each thread repeats writing into a channel N_OF_WRITES_PER_THREAD times
static final int N_OF_WRITES_PER_THREAD = 20;
// Each write operation is called with N_OF_BUFFERS_PER_WRITE ByteBuffers
static final int N_OF_BUFFERS_PER_WRITE = 20;
// Mark-up byte value
static final byte MARK = 0x22;
// Number of byte-writing threads to run
static final int N_OF_THREADS = 10;
boolean passed = true;
String workFileName = "";
String fileName = "FileChannelThrSafetyTest.file";
String outputDir = "";
FileChannel outChannel = null, inChannel = null;
public static void main(String[] args) {
System.exit(new FileChannelThrSafetyTest().test(args));
}
public int test(String[] params) {
File f = null;
passed = true;
try {
parseParams(params);
f = Utils.createWorkFile(workFileName);
outChannel = new FileOutputStream(f).getChannel();
inChannel = new FileInputStream(f).getChannel();
runThreads();
if (!passed || !checkWrittenContent()) {
return fail("Failed");
}
} catch (Throwable t) {
t.printStackTrace();
return fail("Exception thrown: " + t);
} finally {
try {
if (outChannel != null){
outChannel.close();
}
if (inChannel != null){
inChannel.close();
}
} catch (Throwable t){
}
if (f != null){
f.delete();
}
}
return pass("OK");
}
// Reads from inChannel by (CHUNK_SIZE + 2) chunks, checks the first and last
// bytes are MARK bytes.
boolean checkWrittenContent() throws Exception {
MappedByteBuffer mp = null;
int steps = N_OF_WRITES_PER_THREAD * N_OF_THREADS * N_OF_BUFFERS_PER_WRITE;
int size = (CHUNK_SIZE + 2);
ByteBuffer bb = ByteBuffer.allocate(size);
for (int i = 0; i < steps; ++i) {
bb.clear();
inChannel.read(bb);
byte first = bb.get(0);
byte last = bb.get(bb.capacity() - 1);
if (first != MARK || last != MARK) {
log.add("Channel writing is thread-unsafe? - Chunk # " + i + ": first byte is " +
first + ", last byte is " + last + ", While " + MARK +
" byte is expected for each.");
return false;
}
}
return true;
}
void runThreads() throws Exception {
Thread[] t = new Thread[N_OF_THREADS];
for (int i = 0; i < t.length; ++i) {
t[i] = new ChunkWriter(this);
t[i].start();
}
for (int i = 0; i < t.length; ++i) {
t[i].join();
}
}
public void parseParams(String[] params) throws Exception {
// NOTE: outputDir must exist prior to running this test
if (params.length >= 1) {
outputDir = params[0];
} else {
throw new Exception("output directory is not specified. Usage: Test <existing output dir name>");
}
workFileName = outputDir + File.separator + fileName;
}
}
class ChunkWriter extends Thread {
FileChannel outChannel = null;
FileChannelThrSafetyTest base = null;
ChunkWriter(FileChannelThrSafetyTest base) {
this.outChannel = base.outChannel;
this.base = base;
}
public void run () {
try {
for (int i = 0; i < FileChannelThrSafetyTest.N_OF_WRITES_PER_THREAD; ++i) {
long written = outChannel.write(createByteChunks());
// base.log.add("Bytes written " + written);
Thread.yield();
Thread.sleep(10);
}
outChannel.force(false);
} catch (Exception e){
base.log.add("Thread " + Thread.currentThread().getId() + ": Exception \"" + e +
"\", while writing ByteBuffer[] into channel");
base.passed = false;
}
}
ByteBuffer[] createByteChunks() {
ByteBuffer[] bb_arr = new ByteBuffer[FileChannelThrSafetyTest.N_OF_BUFFERS_PER_WRITE];
for (int i = 0; i < bb_arr.length; ++i) {
bb_arr[i] = createByteChunk();
}
return bb_arr;
}
ByteBuffer createByteChunk() {
byte[] exclusion_bytes = new byte[] {FileChannelThrSafetyTest.MARK, FileChannelThrSafetyTest.MARK};
byte[] b = Utils.createRndBytes(FileChannelThrSafetyTest.CHUNK_SIZE + 2, exclusion_bytes);
b[0] = FileChannelThrSafetyTest.MARK;
b[b.length - 1] = b[0];
ByteBuffer bb = ByteBuffer.allocateDirect(b.length).wrap(b);
return bb;
}
}
|
|
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
import org.apache.lucene.document.StoredField;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.search.Query;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.query.QueryShardException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.lenientNodeBooleanValue;
public class SourceFieldMapper extends MetadataFieldMapper {
public static final String NAME = "_source";
public static final String CONTENT_TYPE = "_source";
private final Function<Map<String, ?>, Map<String, Object>> filter;
public static class Defaults {
public static final String NAME = SourceFieldMapper.NAME;
public static final boolean ENABLED = true;
public static final MappedFieldType FIELD_TYPE = new SourceFieldType();
static {
FIELD_TYPE.setIndexOptions(IndexOptions.NONE); // not indexed
FIELD_TYPE.setStored(true);
FIELD_TYPE.setOmitNorms(true);
FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER);
FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER);
FIELD_TYPE.setName(NAME);
FIELD_TYPE.freeze();
}
}
public static class Builder extends MetadataFieldMapper.Builder<Builder, SourceFieldMapper> {
private boolean enabled = Defaults.ENABLED;
private String[] includes = null;
private String[] excludes = null;
public Builder() {
super(Defaults.NAME, Defaults.FIELD_TYPE, Defaults.FIELD_TYPE);
}
public Builder enabled(boolean enabled) {
this.enabled = enabled;
return this;
}
public Builder includes(String[] includes) {
this.includes = includes;
return this;
}
public Builder excludes(String[] excludes) {
this.excludes = excludes;
return this;
}
@Override
public SourceFieldMapper build(BuilderContext context) {
return new SourceFieldMapper(enabled, includes, excludes, context.indexSettings());
}
}
public static class TypeParser implements MetadataFieldMapper.TypeParser {
@Override
public MetadataFieldMapper.Builder<?,?> parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
Builder builder = new Builder();
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String fieldName = entry.getKey();
Object fieldNode = entry.getValue();
if (fieldName.equals("enabled")) {
builder.enabled(lenientNodeBooleanValue(fieldNode));
iterator.remove();
} else if ("format".equals(fieldName) && parserContext.indexVersionCreated().before(Version.V_5_0_0_alpha1)) {
// ignore on old indices, reject on and after 5.0
iterator.remove();
} else if (fieldName.equals("includes")) {
List<Object> values = (List<Object>) fieldNode;
String[] includes = new String[values.size()];
for (int i = 0; i < includes.length; i++) {
includes[i] = values.get(i).toString();
}
builder.includes(includes);
iterator.remove();
} else if (fieldName.equals("excludes")) {
List<Object> values = (List<Object>) fieldNode;
String[] excludes = new String[values.size()];
for (int i = 0; i < excludes.length; i++) {
excludes[i] = values.get(i).toString();
}
builder.excludes(excludes);
iterator.remove();
}
}
return builder;
}
@Override
public MetadataFieldMapper getDefault(MappedFieldType fieldType, ParserContext context) {
final Settings indexSettings = context.mapperService().getIndexSettings().getSettings();
return new SourceFieldMapper(indexSettings);
}
}
static final class SourceFieldType extends MappedFieldType {
public SourceFieldType() {}
protected SourceFieldType(SourceFieldType ref) {
super(ref);
}
@Override
public MappedFieldType clone() {
return new SourceFieldType(this);
}
@Override
public String typeName() {
return CONTENT_TYPE;
}
@Override
public Query termQuery(Object value, QueryShardContext context) {
throw new QueryShardException(context, "The _source field is not searchable");
}
}
private final boolean enabled;
/** indicates whether the source will always exist and be complete, for use by features like the update API */
private final boolean complete;
private final String[] includes;
private final String[] excludes;
private SourceFieldMapper(Settings indexSettings) {
this(Defaults.ENABLED, null, null, indexSettings);
}
private SourceFieldMapper(boolean enabled, String[] includes, String[] excludes, Settings indexSettings) {
super(NAME, Defaults.FIELD_TYPE.clone(), Defaults.FIELD_TYPE, indexSettings); // Only stored.
this.enabled = enabled;
this.includes = includes;
this.excludes = excludes;
final boolean filtered = (includes != null && includes.length > 0) || (excludes != null && excludes.length > 0);
this.filter = enabled && filtered && fieldType().stored() ? XContentMapValues.filter(includes, excludes) : null;
this.complete = enabled && includes == null && excludes == null;
}
public boolean enabled() {
return enabled;
}
public String[] excludes() {
return this.excludes != null ? this.excludes : Strings.EMPTY_ARRAY;
}
public String[] includes() {
return this.includes != null ? this.includes : Strings.EMPTY_ARRAY;
}
public boolean isComplete() {
return complete;
}
@Override
public void preParse(ParseContext context) throws IOException {
super.parse(context);
}
@Override
public void postParse(ParseContext context) throws IOException {
}
@Override
public Mapper parse(ParseContext context) throws IOException {
// nothing to do here, we will call it in pre parse
return null;
}
@Override
protected void parseCreateField(ParseContext context, List<IndexableField> fields) throws IOException {
if (!enabled) {
return;
}
if (!fieldType().stored()) {
return;
}
BytesReference source = context.sourceToParse().source();
// Percolate and tv APIs may not set the source and that is ok, because these APIs will not index any data
if (source == null) {
return;
}
if (filter != null) {
// we don't update the context source if we filter, we want to keep it as is...
Tuple<XContentType, Map<String, Object>> mapTuple = XContentHelper.convertToMap(source, true);
Map<String, Object> filteredSource = filter.apply(mapTuple.v2());
BytesStreamOutput bStream = new BytesStreamOutput();
XContentType contentType = mapTuple.v1();
XContentBuilder builder = XContentFactory.contentBuilder(contentType, bStream).map(filteredSource);
builder.close();
source = bStream.bytes();
}
BytesRef ref = source.toBytesRef();
fields.add(new StoredField(fieldType().name(), ref.bytes, ref.offset, ref.length));
}
@Override
protected String contentType() {
return CONTENT_TYPE;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
boolean includeDefaults = params.paramAsBoolean("include_defaults", false);
// all are defaults, no need to write it at all
if (!includeDefaults && enabled == Defaults.ENABLED && includes == null && excludes == null) {
return builder;
}
builder.startObject(contentType());
if (includeDefaults || enabled != Defaults.ENABLED) {
builder.field("enabled", enabled);
}
if (includes != null) {
builder.array("includes", includes);
} else if (includeDefaults) {
builder.array("includes", Strings.EMPTY_ARRAY);
}
if (excludes != null) {
builder.array("excludes", excludes);
} else if (includeDefaults) {
builder.array("excludes", Strings.EMPTY_ARRAY);
}
builder.endObject();
return builder;
}
@Override
protected void doMerge(Mapper mergeWith, boolean updateAllTypes) {
SourceFieldMapper sourceMergeWith = (SourceFieldMapper) mergeWith;
List<String> conflicts = new ArrayList<>();
if (this.enabled != sourceMergeWith.enabled) {
conflicts.add("Cannot update enabled setting for [_source]");
}
if (Arrays.equals(includes(), sourceMergeWith.includes()) == false) {
conflicts.add("Cannot update includes setting for [_source]");
}
if (Arrays.equals(excludes(), sourceMergeWith.excludes()) == false) {
conflicts.add("Cannot update excludes setting for [_source]");
}
if (conflicts.isEmpty() == false) {
throw new IllegalArgumentException("Can't merge because of conflicts: " + conflicts);
}
}
}
|
|
/*
* Copyright 2017 StreamSets Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.streamsets.datacollector.execution.manager.standalone;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.RemovalListener;
import com.streamsets.datacollector.event.handler.remote.RemoteDataCollector;
import com.streamsets.datacollector.execution.EventListenerManager;
import com.streamsets.datacollector.execution.Manager;
import com.streamsets.datacollector.execution.PipelineState;
import com.streamsets.datacollector.execution.PipelineStateStore;
import com.streamsets.datacollector.execution.PipelineStatus;
import com.streamsets.datacollector.execution.PreviewStatus;
import com.streamsets.datacollector.execution.Previewer;
import com.streamsets.datacollector.execution.PreviewerListener;
import com.streamsets.datacollector.execution.Runner;
import com.streamsets.datacollector.execution.StateEventListener;
import com.streamsets.datacollector.execution.StatsCollectorRunner;
import com.streamsets.datacollector.execution.manager.PipelineManagerException;
import com.streamsets.datacollector.execution.manager.PreviewerProvider;
import com.streamsets.datacollector.execution.manager.RunnerProvider;
import com.streamsets.datacollector.main.RuntimeInfo;
import com.streamsets.datacollector.metrics.MetricsCache;
import com.streamsets.datacollector.metrics.MetricsConfigurator;
import com.streamsets.datacollector.security.GroupsInScope;
import com.streamsets.datacollector.stagelibrary.StageLibraryTask;
import com.streamsets.datacollector.store.PipelineInfo;
import com.streamsets.datacollector.store.PipelineStoreException;
import com.streamsets.datacollector.store.PipelineStoreTask;
import com.streamsets.datacollector.task.AbstractTask;
import com.streamsets.datacollector.usagestats.StatsCollector;
import com.streamsets.datacollector.util.Configuration;
import com.streamsets.datacollector.util.ContainerError;
import com.streamsets.datacollector.util.PipelineException;
import com.streamsets.datacollector.validation.ValidationError;
import com.streamsets.dc.execution.manager.standalone.ResourceManager;
import com.streamsets.pipeline.api.ExecutionMode;
import com.streamsets.pipeline.api.impl.Utils;
import com.streamsets.pipeline.lib.executor.SafeScheduledExecutorService;
import com.streamsets.pipeline.lib.util.ExceptionUtils;
import dagger.ObjectGraph;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.inject.Inject;
import javax.inject.Named;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
public class StandaloneAndClusterPipelineManager extends AbstractTask implements Manager, PreviewerListener {
private static final Logger LOG = LoggerFactory.getLogger(StandaloneAndClusterPipelineManager.class);
private static final String PIPELINE_MANAGER = "PipelineManager";
private final ObjectGraph objectGraph;
@Inject RuntimeInfo runtimeInfo;
@Inject Configuration configuration;
@Inject PipelineStoreTask pipelineStore;
@Inject PipelineStateStore pipelineStateStore;
@Inject StageLibraryTask stageLibrary;
@Inject @Named("previewExecutor") SafeScheduledExecutorService previewExecutor;
@Inject @Named("runnerExecutor") SafeScheduledExecutorService runnerExecutor;
@Inject @Named("managerExecutor") SafeScheduledExecutorService managerExecutor;
@Inject RunnerProvider runnerProvider;
@Inject PreviewerProvider previewerProvider;
@Inject ResourceManager resourceManager;
@Inject EventListenerManager eventListenerManager;
@Inject StatsCollector statsCollector;
private Cache<String, RunnerInfo> runnerCache;
private Cache<String, Previewer> previewerCache;
static final long DEFAULT_RUNNER_EXPIRY_INTERVAL = 5*60*1000;
static final String RUNNER_EXPIRY_INTERVAL = "runner.expiry.interval";
static final long DEFAULT_RUNNER_EXPIRY_INITIAL_DELAY = 5*60*1000;
static final String RUNNER_EXPIRY_INITIAL_DELAY = "runner.expiry.initial.delay";
static final boolean DEFAULT_RUNNER_RESTART_PIPELINES = true;
static final String RUNNER_RESTART_PIPELINES = "runner.boot.pipeline.restart";
private final long runnerExpiryInterval;
private final long runnerExpiryInitialDelay;
private ScheduledFuture<?> runnerExpiryFuture;
private static final String NAME_AND_REV_SEPARATOR = "::";
public StandaloneAndClusterPipelineManager(ObjectGraph objectGraph) {
super(PIPELINE_MANAGER);
this.objectGraph = objectGraph;
this.objectGraph.inject(this);
runnerExpiryInterval = this.configuration.get(RUNNER_EXPIRY_INTERVAL, DEFAULT_RUNNER_EXPIRY_INTERVAL);
runnerExpiryInitialDelay = configuration.get(RUNNER_EXPIRY_INITIAL_DELAY, DEFAULT_RUNNER_EXPIRY_INITIAL_DELAY);
eventListenerManager.addStateEventListener(resourceManager);
MetricsConfigurator.registerJmxMetrics(runtimeInfo.getMetrics());
}
@Override
public void addStateEventListener(StateEventListener listener) {
eventListenerManager.addStateEventListener(listener);
}
@Override
public Previewer createPreviewer(String user, String name, String rev) throws PipelineException {
if (!pipelineStore.hasPipeline(name)) {
throw new PipelineStoreException(ContainerError.CONTAINER_0200, name);
}
Previewer previewer = previewerProvider.createPreviewer(user, name, rev, this, objectGraph);
previewerCache.put(previewer.getId(), previewer);
return previewer;
}
@Override
public Previewer getPreviewer(String previewerId) {
Utils.checkNotNull(previewerId, "previewerId");
Previewer previewer = previewerCache.getIfPresent(previewerId);
if (previewer == null) {
LOG.warn("Cannot find the previewer in cache for id: '{}'", previewerId);
}
return previewer;
}
@Override
@SuppressWarnings("deprecation")
public Runner getRunner(final String name, final String rev) throws PipelineException {
if (!pipelineStore.hasPipeline(name)) {
throw new PipelineStoreException(ContainerError.CONTAINER_0200, name);
}
final String nameAndRevString = getNameAndRevString(name, rev);
RunnerInfo runnerInfo;
try {
runnerInfo = runnerCache.get(nameAndRevString, () -> {
ExecutionMode executionMode = pipelineStateStore.getState(name, rev).getExecutionMode();
Runner runner = getRunner(name, rev, executionMode);
return new RunnerInfo(runner, executionMode);
});
ExecutionMode cachedExecutionMode = runnerInfo.executionMode;
ExecutionMode persistentExecutionMode = pipelineStateStore.getState(name, rev).getExecutionMode();
if (cachedExecutionMode == ExecutionMode.CLUSTER) {
LOG.info("Upgrading execution mode from " + ExecutionMode.CLUSTER + " to " + persistentExecutionMode);
runnerInfo.executionMode = persistentExecutionMode;
}
if (runnerInfo.executionMode != pipelineStateStore.getState(name, rev).getExecutionMode()) {
LOG.info(Utils.format("Invalidate the existing runner for pipeline '{}::{}' as execution mode has changed",
name, rev));
if (!removeRunnerIfNotActive(runnerInfo.runner)) {
throw new PipelineManagerException(ValidationError.VALIDATION_0082, pipelineStateStore.getState(name, rev).getExecutionMode(),
runnerInfo.executionMode);
} else {
return getRunner(name, rev);
}
}
} catch (ExecutionException ex) {
if (ex.getCause() instanceof RuntimeException) {
throw (RuntimeException) ex.getCause();
} else if (ex.getCause() instanceof PipelineStoreException) {
throw (PipelineStoreException) ex.getCause();
} else {
throw new PipelineStoreException(ContainerError.CONTAINER_0114, ex.toString(), ex);
}
}
return runnerInfo.runner;
}
@Override
public List<PipelineState> getPipelines() throws PipelineStoreException {
List<PipelineInfo> pipelineInfoList = pipelineStore.getPipelines();
List<PipelineState> pipelineStateList = new ArrayList<>();
for (PipelineInfo pipelineInfo : pipelineInfoList) {
String name = pipelineInfo.getPipelineId();
String rev = pipelineInfo.getLastRev();
PipelineState pipelineState = pipelineStateStore.getState(name, rev);
Utils.checkState(pipelineState != null, Utils.format("State for pipeline: '{}::{}' doesn't exist", name, rev));
pipelineStateList.add(pipelineState);
}
return pipelineStateList;
}
@Override
public PipelineState getPipelineState(String name, String rev) throws PipelineStoreException {
return pipelineStateStore.getState(name, rev);
}
@Override
public boolean isPipelineActive(String name, String rev) throws PipelineException {
if (!pipelineStore.hasPipeline(name)) {
throw new PipelineStoreException(ContainerError.CONTAINER_0200, name);
}
RunnerInfo runnerInfo = runnerCache.getIfPresent(getNameAndRevString(name, rev));
return runnerInfo != null && runnerInfo.runner.getState().getStatus().isActive();
}
@Override
public void runTask() {
previewerCache = new MetricsCache<>(
runtimeInfo.getMetrics(),
"manager-previewer-cache",
CacheBuilder.newBuilder()
.expireAfterAccess(30, TimeUnit.MINUTES).removalListener((RemovalListener<String, Previewer>) removal -> {
Previewer previewer = removal.getValue();
LOG.warn("Evicting idle previewer '{}::{}'::'{}' in status '{}'",
previewer.getName(), previewer.getRev(), previewer.getId(), previewer.getStatus());
}).build()
);
runnerCache = new MetricsCache<>(
runtimeInfo.getMetrics(),
"manager-runner-cache",
CacheBuilder.newBuilder().build())
;
// On SDC start up we will try by default start all pipelines that were running at the time SDC was shut down. This
// can however be disabled via sdc.properties config. Especially helpful when starting all pipeline at once could
// lead to troubles.
boolean restartPipelines = configuration.get(RUNNER_RESTART_PIPELINES, DEFAULT_RUNNER_RESTART_PIPELINES);
List<PipelineInfo> pipelineInfoList;
try {
pipelineInfoList = pipelineStore.getPipelines();
} catch (PipelineStoreException ex) {
throw new RuntimeException("Cannot load the list of pipelines from StateStore", ex);
}
for (PipelineInfo pipelineInfo : pipelineInfoList) {
String name = pipelineInfo.getPipelineId();
String rev = pipelineInfo.getLastRev();
try {
if (isRemotePipeline(name, rev) && !runtimeInfo.isDPMEnabled()) {
LOG.info(Utils.format("Not activating remote pipeline'{}:{}' as DPM is disabled ", name, rev));
continue;
}
PipelineState pipelineState = pipelineStateStore.getState(name, rev);
// Create runner if active
if (pipelineState.getStatus().isActive()) {
ExecutionMode executionMode = pipelineState.getExecutionMode();
Runner runner = getRunner(name, rev, executionMode);
runner.prepareForDataCollectorStart(pipelineState.getUser());
if (restartPipelines && runner.getState().getStatus() == PipelineStatus.DISCONNECTED) {
runnerCache.put(getNameAndRevString(name, rev), new RunnerInfo(runner, executionMode));
try {
String user = pipelineState.getUser();
// we need to skip enforcement user groups in scope.
GroupsInScope.executeIgnoreGroups(() -> {
runner.onDataCollectorStart(user);
return null;
});
} catch (Exception ex) {
ExceptionUtils.throwUndeclared(ex.getCause());
}
}
}
} catch (Exception ex) {
LOG.error(Utils.format("Error while processing pipeline '{}::{}'", name, rev), ex);
}
}
runnerExpiryFuture = managerExecutor.scheduleAtFixedRate(new Runnable() {
@Override
public void run() {
for (RunnerInfo runnerInfo : runnerCache.asMap().values()) {
Runner runner = runnerInfo.runner;
try {
LOG.debug("Runner for pipeline '{}::{}' is in status: '{}'", runner.getName(), runner.getRev(),
runner.getState());
removeRunnerIfNotActive(runner);
} catch (PipelineStoreException ex) {
if (ex.getErrorCode() == ContainerError.CONTAINER_0209) {
LOG.debug(
"Pipeline state file for pipeline: '{}::{}' was already deleted; removing runner from cache",
runner.getName(),
runner.getRev(),
ex.toString(),
ex
);
runnerCache.invalidate(getNameAndRevString(runner.getName(), runner.getRev()));
} else {
LOG.warn(
"Cannot remove runner for pipeline: '{}::{}' due to '{}'; memory leak is possible",
runner.getName(),
runner.getRev(),
ex.toString(),
ex
);
}
}
}
}
}, runnerExpiryInitialDelay, runnerExpiryInterval, TimeUnit.MILLISECONDS);
}
@VisibleForTesting
boolean isRunnerPresent(String name, String rev) {
return runnerCache.getIfPresent(getNameAndRevString(name, rev)) != null;
}
private boolean removeRunnerIfNotActive(Runner runner) throws PipelineStoreException {
if (!runner.getState().getStatus().isActive()) {
// first invalidate the cache and then close the runner, so a closed runner can never
// sit in cache
runnerCache.invalidate(getNameAndRevString(runner.getName(), runner.getRev()));
runner.close();
LOG.info("Removing runner for pipeline '{}::'{}'", runner.getName(), runner.getRev());
return true;
} else {
return false;
}
}
@Override
public void stopTask() {
if(runnerCache != null) {
for (RunnerInfo runnerInfo : runnerCache.asMap().values()) {
Runner runner = runnerInfo.runner;
try {
runner.close();
PipelineState pipelineState = pipelineStateStore.getState(runner.getName(), runner.getRev());
runner.onDataCollectorStop(pipelineState.getUser());
} catch (Exception e) {
LOG.warn("Failed to stop the runner for pipeline: {} and rev: {} due to: {}", runner.getName(),
runner.getRev(), e.toString(), e);
}
}
runnerCache.invalidateAll();
for (Previewer previewer : previewerCache.asMap().values()) {
try {
previewer.stop();
} catch (Exception e) {
LOG.warn("Failed to stop the previewer: {}::{}::{} due to: {}", previewer.getName(),
previewer.getRev(), previewer.getId(), e.toString(), e);
}
}
}
if(previewerCache != null) {
previewerCache.invalidateAll();
}
if(runnerExpiryFuture != null) {
runnerExpiryFuture.cancel(true);
}
LOG.info("Stopped Production Pipeline Manager");
}
@Override
public void statusChange(String id, PreviewStatus status) {
LOG.debug("Status of previewer with id: '{}' changed to status: '{}'", id, status);
}
@Override
public void outputRetrieved(String id) {
LOG.debug("Removing previewer with id: '{}' from cache as output is retrieved", id);
previewerCache.invalidate(id);
}
@Override
public boolean isRemotePipeline(String name, String rev) throws PipelineStoreException {
Object isRemote = pipelineStateStore.getState(name, rev).getAttributes().get(RemoteDataCollector.IS_REMOTE_PIPELINE);
// remote attribute will be null for pipelines with version earlier than 1.3
return (isRemote == null) ? false : (boolean) isRemote;
}
private Runner getRunner(String name, String rev, ExecutionMode executionMode) throws PipelineStoreException {
if(executionMode == null) {
executionMode = ExecutionMode.STANDALONE;
}
Runner runner = runnerProvider.createRunner(name, rev, objectGraph, executionMode);
return new StatsCollectorRunner(runner, statsCollector);
}
private String getNameAndRevString(String name, String rev) {
return name + NAME_AND_REV_SEPARATOR + rev;
}
private static class RunnerInfo {
private final Runner runner;
private ExecutionMode executionMode;
private RunnerInfo(Runner runner, ExecutionMode executionMode) {
this.runner = runner;
this.executionMode = executionMode;
}
}
}
|
|
/**
*/
package serviceexample.util;
import java.util.Map;
import org.eclipse.emf.common.util.Diagnostic;
import org.eclipse.emf.common.util.DiagnosticChain;
import org.eclipse.emf.common.util.ResourceLocator;
import org.eclipse.emf.ecore.EPackage;
import org.eclipse.emf.ecore.util.EObjectValidator;
import serviceexample.*;
/**
* <!-- begin-user-doc -->
* The <b>Validator</b> for the model.
* <!-- end-user-doc -->
* @see serviceexample.ServiceexamplePackage
* @generated
*/
public class ServiceexampleValidator extends EObjectValidator {
/**
* The cached model package
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public static final ServiceexampleValidator INSTANCE = new ServiceexampleValidator();
/**
* A constant for the {@link org.eclipse.emf.common.util.Diagnostic#getSource() source} of diagnostic {@link org.eclipse.emf.common.util.Diagnostic#getCode() codes} from this package.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see org.eclipse.emf.common.util.Diagnostic#getSource()
* @see org.eclipse.emf.common.util.Diagnostic#getCode()
* @generated
*/
public static final String DIAGNOSTIC_SOURCE = "serviceexample";
/**
* A constant with a fixed name that can be used as the base value for additional hand written constants.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private static final int GENERATED_DIAGNOSTIC_CODE_COUNT = 0;
/**
* A constant with a fixed name that can be used as the base value for additional hand written constants in a derived class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected static final int DIAGNOSTIC_CODE_COUNT = GENERATED_DIAGNOSTIC_CODE_COUNT;
/**
* Creates an instance of the switch.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public ServiceexampleValidator() {
super();
}
/**
* Returns the package of this validator switch.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EPackage getEPackage() {
return ServiceexamplePackage.eINSTANCE;
}
/**
* Calls <code>validateXXX</code> for the corresponding classifier of the model.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected boolean validate(int classifierID, Object value, DiagnosticChain diagnostics, Map<Object, Object> context) {
switch (classifierID) {
case ServiceexamplePackage.CLUSTER:
return validateCluster((Cluster)value, diagnostics, context);
case ServiceexamplePackage.SERVICE:
return validateService((Service)value, diagnostics, context);
case ServiceexamplePackage.SERVER:
return validateServer((Server)value, diagnostics, context);
case ServiceexamplePackage.CONTAINER:
return validateContainer((Container)value, diagnostics, context);
case ServiceexamplePackage.NAMED_THING:
return validateNamedThing((NamedThing)value, diagnostics, context);
case ServiceexamplePackage.SERVICE_TYPE:
return validateServiceType((ServiceType)value, diagnostics, context);
default:
return true;
}
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public boolean validateCluster(Cluster cluster, DiagnosticChain diagnostics, Map<Object, Object> context) {
if (!validate_NoCircularContainment(cluster, diagnostics, context)) return false;
boolean result = validate_EveryMultiplicityConforms(cluster, diagnostics, context);
if (result || diagnostics != null) result &= validate_EveryDataValueConforms(cluster, diagnostics, context);
if (result || diagnostics != null) result &= validate_EveryReferenceIsContained(cluster, diagnostics, context);
if (result || diagnostics != null) result &= validate_EveryBidirectionalReferenceIsPaired(cluster, diagnostics, context);
if (result || diagnostics != null) result &= validate_EveryProxyResolves(cluster, diagnostics, context);
if (result || diagnostics != null) result &= validate_UniqueID(cluster, diagnostics, context);
if (result || diagnostics != null) result &= validate_EveryKeyUnique(cluster, diagnostics, context);
if (result || diagnostics != null) result &= validate_EveryMapEntryUnique(cluster, diagnostics, context);
if (result || diagnostics != null) result &= validateCluster_sameServics(cluster, diagnostics, context);
if (result || diagnostics != null) result &= validateCluster_goodSpeed(cluster, diagnostics, context);
if (result || diagnostics != null) result &= validateCluster_onlyOneImportant(cluster, diagnostics, context);
if (result || diagnostics != null) result &= validateCluster_backupDifferent(cluster, diagnostics, context);
return result;
}
/**
* The cached validation expression for the sameServics constraint of '<em>Cluster</em>'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected static final String CLUSTER__SAME_SERVICS__EEXPRESSION = "backup = null or backup.services->includesAll(services)";
/**
* Validates the sameServics constraint of '<em>Cluster</em>'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public boolean validateCluster_sameServics(Cluster cluster, DiagnosticChain diagnostics, Map<Object, Object> context) {
return
validate
(ServiceexamplePackage.Literals.CLUSTER,
cluster,
diagnostics,
context,
"http://www.eclipse.org/emf/2002/Ecore/OCL/Pivot",
"sameServics",
CLUSTER__SAME_SERVICS__EEXPRESSION,
Diagnostic.ERROR,
DIAGNOSTIC_SOURCE,
0);
}
/**
* The cached validation expression for the goodSpeed constraint of '<em>Cluster</em>'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected static final String CLUSTER__GOOD_SPEED__EEXPRESSION = "designSpeed <= server.speed->sum()";
/**
* Validates the goodSpeed constraint of '<em>Cluster</em>'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public boolean validateCluster_goodSpeed(Cluster cluster, DiagnosticChain diagnostics, Map<Object, Object> context) {
return
validate
(ServiceexamplePackage.Literals.CLUSTER,
cluster,
diagnostics,
context,
"http://www.eclipse.org/emf/2002/Ecore/OCL/Pivot",
"goodSpeed",
CLUSTER__GOOD_SPEED__EEXPRESSION,
Diagnostic.ERROR,
DIAGNOSTIC_SOURCE,
0);
}
/**
* The cached validation expression for the onlyOneImportant constraint of '<em>Cluster</em>'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected static final String CLUSTER__ONLY_ONE_IMPORTANT__EEXPRESSION = "services->select(s | s.type = ServiceType::IMPORTANT)->size() <= 1";
/**
* Validates the onlyOneImportant constraint of '<em>Cluster</em>'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public boolean validateCluster_onlyOneImportant(Cluster cluster, DiagnosticChain diagnostics, Map<Object, Object> context) {
return
validate
(ServiceexamplePackage.Literals.CLUSTER,
cluster,
diagnostics,
context,
"http://www.eclipse.org/emf/2002/Ecore/OCL/Pivot",
"onlyOneImportant",
CLUSTER__ONLY_ONE_IMPORTANT__EEXPRESSION,
Diagnostic.ERROR,
DIAGNOSTIC_SOURCE,
0);
}
/**
* The cached validation expression for the backupDifferent constraint of '<em>Cluster</em>'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected static final String CLUSTER__BACKUP_DIFFERENT__EEXPRESSION = "backup <> self";
/**
* Validates the backupDifferent constraint of '<em>Cluster</em>'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public boolean validateCluster_backupDifferent(Cluster cluster, DiagnosticChain diagnostics, Map<Object, Object> context) {
return
validate
(ServiceexamplePackage.Literals.CLUSTER,
cluster,
diagnostics,
context,
"http://www.eclipse.org/emf/2002/Ecore/OCL/Pivot",
"backupDifferent",
CLUSTER__BACKUP_DIFFERENT__EEXPRESSION,
Diagnostic.ERROR,
DIAGNOSTIC_SOURCE,
0);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public boolean validateService(Service service, DiagnosticChain diagnostics, Map<Object, Object> context) {
if (!validate_NoCircularContainment(service, diagnostics, context)) return false;
boolean result = validate_EveryMultiplicityConforms(service, diagnostics, context);
if (result || diagnostics != null) result &= validate_EveryDataValueConforms(service, diagnostics, context);
if (result || diagnostics != null) result &= validate_EveryReferenceIsContained(service, diagnostics, context);
if (result || diagnostics != null) result &= validate_EveryBidirectionalReferenceIsPaired(service, diagnostics, context);
if (result || diagnostics != null) result &= validate_EveryProxyResolves(service, diagnostics, context);
if (result || diagnostics != null) result &= validate_UniqueID(service, diagnostics, context);
if (result || diagnostics != null) result &= validate_EveryKeyUnique(service, diagnostics, context);
if (result || diagnostics != null) result &= validate_EveryMapEntryUnique(service, diagnostics, context);
if (result || diagnostics != null) result &= validateService_speedFulfilled(service, diagnostics, context);
return result;
}
/**
* The cached validation expression for the speedFulfilled constraint of '<em>Service</em>'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected static final String SERVICE__SPEED_FULFILLED__EEXPRESSION = "type = ServiceType::BESTEFFORT or ( \n" +
"\t\t\tdesignSpeed <= providedBy.designSpeed and (if type = ServiceType::IMPORTANT then designSpeed <= providedBy.backup.designSpeed\n" +
"\t\t\t\telse type = ServiceType::WEAKCONTRACT or providedBy.backup <> null endif))";
/**
* Validates the speedFulfilled constraint of '<em>Service</em>'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public boolean validateService_speedFulfilled(Service service, DiagnosticChain diagnostics, Map<Object, Object> context) {
return
validate
(ServiceexamplePackage.Literals.SERVICE,
service,
diagnostics,
context,
"http://www.eclipse.org/emf/2002/Ecore/OCL/Pivot",
"speedFulfilled",
SERVICE__SPEED_FULFILLED__EEXPRESSION,
Diagnostic.ERROR,
DIAGNOSTIC_SOURCE,
0);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public boolean validateServer(Server server, DiagnosticChain diagnostics, Map<Object, Object> context) {
if (!validate_NoCircularContainment(server, diagnostics, context)) return false;
boolean result = validate_EveryMultiplicityConforms(server, diagnostics, context);
if (result || diagnostics != null) result &= validate_EveryDataValueConforms(server, diagnostics, context);
if (result || diagnostics != null) result &= validate_EveryReferenceIsContained(server, diagnostics, context);
if (result || diagnostics != null) result &= validate_EveryBidirectionalReferenceIsPaired(server, diagnostics, context);
if (result || diagnostics != null) result &= validate_EveryProxyResolves(server, diagnostics, context);
if (result || diagnostics != null) result &= validate_UniqueID(server, diagnostics, context);
if (result || diagnostics != null) result &= validate_EveryKeyUnique(server, diagnostics, context);
if (result || diagnostics != null) result &= validate_EveryMapEntryUnique(server, diagnostics, context);
if (result || diagnostics != null) result &= validateServer_speedValid(server, diagnostics, context);
return result;
}
/**
* The cached validation expression for the speedValid constraint of '<em>Server</em>'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected static final String SERVER__SPEED_VALID__EEXPRESSION = "speed > 0 and speed <= 5";
/**
* Validates the speedValid constraint of '<em>Server</em>'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public boolean validateServer_speedValid(Server server, DiagnosticChain diagnostics, Map<Object, Object> context) {
return
validate
(ServiceexamplePackage.Literals.SERVER,
server,
diagnostics,
context,
"http://www.eclipse.org/emf/2002/Ecore/OCL/Pivot",
"speedValid",
SERVER__SPEED_VALID__EEXPRESSION,
Diagnostic.ERROR,
DIAGNOSTIC_SOURCE,
0);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public boolean validateContainer(Container container, DiagnosticChain diagnostics, Map<Object, Object> context) {
return validate_EveryDefaultConstraint(container, diagnostics, context);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public boolean validateNamedThing(NamedThing namedThing, DiagnosticChain diagnostics, Map<Object, Object> context) {
return validate_EveryDefaultConstraint(namedThing, diagnostics, context);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public boolean validateServiceType(ServiceType serviceType, DiagnosticChain diagnostics, Map<Object, Object> context) {
return true;
}
/**
* Returns the resource locator that will be used to fetch messages for this validator's diagnostics.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public ResourceLocator getResourceLocator() {
// TODO
// Specialize this to return a resource locator for messages specific to this validator.
// Ensure that you remove @generated or mark it @generated NOT
return super.getResourceLocator();
}
} //ServiceexampleValidator
|
|
/***
* ASM examples: examples showing how ASM can be used
* Copyright (c) 2000-2007 INRIA, France Telecom
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. Neither the name of the copyright holders nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
* THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.objectweb.asm.depend;
import java.awt.Color;
import java.awt.Font;
import java.awt.Graphics2D;
import java.awt.RenderingHints;
import java.awt.geom.AffineTransform;
import java.awt.image.BufferedImage;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Enumeration;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
import javax.imageio.ImageIO;
import org.objectweb.asm.ClassReader;
/**
* DependencyTracker
*
* @author Eugene Kuleshov
*
* @see http://www.onjava.com/pub/a/onjava/2005/08/17/asm3.html
*/
public class DependencyTracker {
private static final int CELL_PAD = 1;
private static final int GRID_SIZE = 10;
private static final int CELLS_SIZE = 8;
private static final int LABEL_WIDTH = 200;
private static final String LABEL_FONT = "Tahoma-9";
public static void main(final String[] args) throws IOException {
DependencyVisitor v = new DependencyVisitor();
ZipFile f = new ZipFile(args[0]);
long l1 = System.currentTimeMillis();
Enumeration< ? extends ZipEntry> en = f.entries();
while (en.hasMoreElements()) {
ZipEntry e = en.nextElement();
String name = e.getName();
if (name.endsWith(".class")) {
new ClassReader(f.getInputStream(e)).accept(v, 0);
}
}
long l2 = System.currentTimeMillis();
Map<String, Map<String, Integer>> globals = v.getGlobals();
Set<String> jarPackages = globals.keySet();
Set<String> classPackages = v.getPackages();
int size = classPackages.size();
System.err.println("time: " + (l2 - l1) / 1000f + " " + size);
String[] jarNames = jarPackages.toArray(new String[jarPackages.size()]);
String[] classNames = classPackages.toArray(new String[classPackages.size()]);
Arrays.sort(jarNames);
Arrays.sort(classNames);
buildDiagram(jarNames, classNames, globals);
}
public static void buildDiagram(
final String[] jarNames,
final String[] classNames,
final Map<String, Map<String, Integer>> globals) throws IOException
{
// normalize
int max = 0;
for (int i = 0; i < classNames.length; i++) {
Map<String, Integer> map = globals.get(classNames[i]);
if (map == null) {
continue;
}
Integer maxCount = Collections.max(map.values());
if (maxCount > max) {
max = maxCount;
}
}
List<Color> colors = new ArrayList<Color>();
for (int i = LABEL_WIDTH; i >= 0; i--) {
colors.add(new Color(i, i, 255));
}
for (int i = 255; i >= 128; i--) {
colors.add(new Color(0, 0, i));
}
int maxcolor = colors.size() - 1;
int heigh = CELL_PAD + (CELLS_SIZE + CELL_PAD) * classNames.length;
int width = CELL_PAD + (CELLS_SIZE + CELL_PAD) * jarNames.length;
BufferedImage img = new BufferedImage(width + LABEL_WIDTH, heigh
+ LABEL_WIDTH, BufferedImage.TYPE_INT_RGB);
Graphics2D g = img.createGraphics();
g.setRenderingHint(RenderingHints.KEY_ANTIALIASING,
RenderingHints.VALUE_ANTIALIAS_ON);
g.setColor(Color.WHITE);
g.fillRect(0, 0, width + LABEL_WIDTH, heigh + LABEL_WIDTH);
// draw lines
g.setColor(Color.LIGHT_GRAY);
for (int y = GRID_SIZE; y < classNames.length; y += GRID_SIZE) {
g.drawLine(0, y * (CELLS_SIZE + CELL_PAD), width, y
* (CELLS_SIZE + CELL_PAD));
}
for (int x = GRID_SIZE; x < jarNames.length; x += GRID_SIZE) {
g.drawLine(x * (CELLS_SIZE + CELL_PAD), 0, x
* (CELLS_SIZE + CELL_PAD), heigh);
}
// draw diagram
for (int y = 0; y < classNames.length; y++) {
// System.err.println( y+" : "+classNames[ y]);
for (int x = 0; x < jarNames.length; x++) {
Map<String, Integer> map = globals.get(jarNames[x]);
Integer count = map == null ? null : map.get(classNames[y]);
if (count != null) {
int b = (int) ((float) count * maxcolor / max);
g.setColor(colors.get(b));
g.fillRect(CELL_PAD + x * (CELLS_SIZE + CELL_PAD),
CELL_PAD + y * (CELLS_SIZE + CELL_PAD),
CELLS_SIZE,
CELLS_SIZE);
}
}
}
// draw labels
Font f = Font.decode(LABEL_FONT);
g.setFont(f);
// g.setColor( new Color( 70, 70, 255));
g.setColor(Color.GRAY);
for (int y = 0; y < classNames.length; y++) {
AffineTransform trans = g.getTransform();
g.transform(AffineTransform.getTranslateInstance(CELL_PAD * 2
+ width, CELLS_SIZE + y * (CELLS_SIZE + CELL_PAD)));
g.transform(AffineTransform.getRotateInstance(Math.PI / 12));
g.drawString(classNames[y], 0, 0);
g.setTransform(trans);
}
for (int x = 0; x < jarNames.length; x++) {
AffineTransform trans = g.getTransform();
g.transform(AffineTransform.getTranslateInstance(CELL_PAD * 2 + x
* (CELLS_SIZE + CELL_PAD), heigh + CELL_PAD * 2));
g.transform(AffineTransform.getRotateInstance(Math.PI / 2.5));
g.drawString(jarNames[x], 0, 0);
g.setTransform(trans);
}
FileOutputStream fos = new FileOutputStream("test.png");
ImageIO.write(img, "png", fos);
fos.flush();
fos.close();
}
}
|
|
/*L
* Copyright SAIC
*
* Distributed under the OSI-approved BSD 3-Clause License.
* See http://ncip.github.com/stats-application-commons/LICENSE.txt for details.
*/
package gov.nih.nci.caintegrator.application.zip;
import gov.nih.nci.caintegrator.analysis.messaging.AnalysisRequest;
import gov.nih.nci.caintegrator.application.cache.BusinessTierCache;
import gov.nih.nci.caintegrator.application.service.ApplicationService;
import java.util.Hashtable;
import javax.jms.DeliveryMode;
import javax.jms.ExceptionListener;
import javax.jms.JMSException;
import javax.jms.Message;
import javax.jms.MessageListener;
import javax.jms.ObjectMessage;
import javax.jms.Queue;
import javax.jms.QueueConnection;
import javax.jms.QueueConnectionFactory;
import javax.jms.QueueReceiver;
import javax.jms.QueueSender;
import javax.jms.QueueSession;
import javax.jms.Session;
import javax.naming.Context;
import javax.naming.InitialContext;
import javax.naming.NamingException;
import org.apache.log4j.Logger;
/**
* This object is used by the Rembrandt application send analysis requests to and receive results from the
* analysis server(s). There is only one instance of this object (singleton object) for the application.
* Communication with the analysis server(s) is implemented using the JBossMQ JMS implementation. Requests are sent to the
* AnalysisRequest JMS queue and results are returned to the AnalysisResponse JMS queue.
*
* @author sahnih, harrismic
*
*/
public abstract class ZipFileJMSListener implements ApplicationService, MessageListener, ExceptionListener{
private static Logger logger = Logger.getLogger(ZipFileJMSListener.class);
//private BusinessTierCache _cacheManager = (BusinessTierCache)ApplicationContext.getApplicationService("BUSINESS_TIER_CACHE");
protected BusinessTierCache _cacheManager = null;
//private Properties messagingProps;
private QueueSession queueSession;
private QueueSender requestSender;
private QueueReceiver resultReceiver;
private Queue requestQueue;
private Queue resultQueue;
private QueueConnection queueConnection;
private static final long reconnectWaitTimeMS = 5000L;
private String jmsProviderURL = null;
private String requestQueueName = null;
private String responseQueueName = null;
private String jndiFactoryName = null;
private boolean jmsParamsSet = false;
/**
* @param properties
* @throws NamingException
* @throws JMSException
*/
@SuppressWarnings("unchecked")
protected ZipFileJMSListener() throws NamingException, JMSException {
try {
logger.debug("ZipFileJMSListener constructor start");
//establishQueueConnection();
logger.debug("ZipFileJMSListener constructor finished successfully");
}catch(Throwable t) {
logger.error("Constructor has thrown an exception of type:"+t.getClass());
logger.error(t);
}
}
public void setCache(BusinessTierCache cache) {
this._cacheManager = cache;
}
public void setJMSparameters(String jmsProviderURL, String jndiFactoryName, String requestQueueName, String responseQueueName) {
this.jmsProviderURL = jmsProviderURL;
this.jndiFactoryName = jndiFactoryName;
this.requestQueueName = requestQueueName;
this.responseQueueName = responseQueueName;
this.jmsParamsSet = true;
}
/**
* Establish a connection to the JMS queues. If it is not possible
* to connect then this method will sleep for reconnectWaitTimeMS milliseconds and
* then try to connect again.
*
*/
@SuppressWarnings("unchecked")
public void establishQueueConnection() {
boolean connected = false;
int numConnectAttempts = 0;
if (!jmsParamsSet) {
logger.error("Attempted to establish queue connection with unset JMS parameters. Must first call setJMSparameters method.");
throw new IllegalStateException("Attempted to establish queue connection with unset JMS parameters. Must first call setJMSparameters method.");
}
//Properties messagingProps = ApplicationContext.getJMSProperties();
//String jbossURL = System.getProperty("gov.nih.nci.caintegrator.analysis.jms.jboss_url");
while (!connected) {
try {
//logger.debug("AnalysisServerClientManager constructor start");
//Properties messagingProps = ApplicationContext.getJMSProperties();
logger.info("Attempting to establish queue connection with provider: " + jmsProviderURL);
// Populate with needed properties
Hashtable props = new Hashtable();
props.put(Context.INITIAL_CONTEXT_FACTORY,
"org.jnp.interfaces.NamingContextFactory");
props.put(Context.PROVIDER_URL, jmsProviderURL);
props.put("java.naming.rmi.security.manager", "yes");
props.put(Context.URL_PKG_PREFIXES, "org.jboss.naming");
// Get the initial context with given properties
Context context = new InitialContext(props);
// Get the connection factory
QueueConnectionFactory queueConnectionFactory = (QueueConnectionFactory) context
.lookup(jndiFactoryName);
// Create the connection
queueConnection = queueConnectionFactory.createQueueConnection();
queueConnection.setExceptionListener(this);
// Create the session
queueSession = queueConnection.createQueueSession(
// No transaction
false,
// Auto ack
Session.AUTO_ACKNOWLEDGE);
// Look up the destination
requestQueue = (Queue) context.lookup(requestQueueName);
resultQueue = (Queue) context.lookup(responseQueueName);
// Create a publisher
resultReceiver = queueSession.createReceiver(resultQueue);
resultReceiver.setMessageListener(this);
queueConnection.start();
connected = true;
numConnectAttempts = 0;
logger.info(" successfully established queue connection with provider=" + jmsProviderURL);
logger.info(" successfully found request queue=" + requestQueueName);
logger.info(" successfully found response queue=" + responseQueueName);
logger.info("Now listening for requests...");
}
catch (Exception ex) {
numConnectAttempts++;
if (numConnectAttempts <= 10) {
logger.warn(" could not establish connection with provider=" + jmsProviderURL + " after numAttempts=" + numConnectAttempts + " Will try again in " + Long.toString(reconnectWaitTimeMS/1000L) + " seconds...");
if (numConnectAttempts == 10) {
logger.warn(" Will only print connection attempts every 600 atttempts to reduce log size.");
}
}
else if ((numConnectAttempts % 600) == 0) {
logger.info(" could not establish connection after numAttempts=" + numConnectAttempts + " will keep trying every " + Long.toString(reconnectWaitTimeMS/1000L) + " seconds...");
}
try {
Thread.sleep(reconnectWaitTimeMS);
}
catch (Exception ex2) {
logger.error("Caugh exception while trying to sleep.." + ex2.getMessage());
logger.error(ex2);
//ex2.printStackTrace(System.out);
return;
}
}
}
}
/**
* JMS notification about a new message
*/
public void onMessage(Message message) {
//String msg = ((TextMessage)m).getText();
logger.debug("onMessage has been called");
ObjectMessage msg = (ObjectMessage) message;
try {
Object result = msg.getObject();
if( result instanceof ZipFileRequest){
receiveZipFileRequest((ZipFileRequest) result);
}
else if( result instanceof Exception){
receiveZipFileException((Exception) result);
}
} catch (JMSException e) {
logger.error(e);
}
}
protected abstract void receiveZipFileException(Exception exception);
protected abstract void receiveZipFileRequest(ZipFileRequest request);
/**
* JMS notification about an exception
*/
public void onException(JMSException jmsException) {
//System.out.println("onException: caught JMSexception: " + exception.getMessage());
logger.error("onException: caught JMSexception: " + jmsException.getMessage());
try
{
if (queueConnection != null) {
queueConnection.setExceptionListener(null);
//close();
queueConnection.close();
}
}
catch (JMSException c)
{
logger.info("Ignoring exception thrown when closing broken connection msg=" + c.getMessage());
//System.out.println("Ignoring exception thrown when closing broken connection msg=" + c.getMessage());
//c.printStackTrace(System.out);
}
//attempt to re-establish the queue connection
establishQueueConnection();
}
/**
* //Instantiat this class, using code something like this
if(instance == null){
try {
instance = new ZipFileJMSListener();
} catch (NamingException e) {
logger.error(e.getMessage());
throw e;
} catch (JMSException e) {
logger.error(e.getMessage());
throw e;
}
}
return instance;
* @returnReturns the instance.
* @throws NamingException
* @throws JMSException
*/
public abstract ZipFileJMSListener getInstance() throws NamingException, JMSException;
/**
* Send an AnalysisRequest to the JMS request queue. Note this method does not store anything
* in the cache.
* @throws JMSException
* @see sendRequest(Query query, AnalysisRequest request)
*/
public void sendRequest(AnalysisRequest request) throws JMSException {
ObjectMessage msg;
try {
QueueSession requestSession = queueConnection.createQueueSession(
// No transaction
false,
// Auto ack
Session.AUTO_ACKNOWLEDGE);
// Create a message
msg = requestSession.createObjectMessage(request);
msg.setJMSReplyTo(resultQueue);
QueueSender requestSender = requestSession.createSender(requestQueue);
// Send the message
requestSender.send(msg, DeliveryMode.NON_PERSISTENT, Message.DEFAULT_PRIORITY, Message.DEFAULT_TIME_TO_LIVE);
requestSender.close();
requestSession.close();
logger.debug("sendRequest session: "+request.getSessionId()+" & task: "+request.getTaskId()+" has been sent to the JMQ");
} catch (JMSException e) {
logger.error(e);
throw e;
}
}
}
|
|
package com.thinkaurelius.titan.graphdb.query.vertex;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.thinkaurelius.titan.core.*;
import com.thinkaurelius.titan.core.attribute.Cmp;
import com.thinkaurelius.titan.core.schema.SchemaInspector;
import com.thinkaurelius.titan.graphdb.internal.*;
import com.thinkaurelius.titan.graphdb.query.*;
import com.thinkaurelius.titan.graphdb.query.condition.*;
import com.thinkaurelius.titan.graphdb.relations.RelationIdentifier;
import com.thinkaurelius.titan.graphdb.types.system.ImplicitKey;
import com.thinkaurelius.titan.graphdb.types.system.SystemRelationType;
import com.tinkerpop.blueprints.Direction;
import com.tinkerpop.blueprints.Predicate;
import org.apache.commons.lang.StringUtils;
import java.util.*;
/**
* Builds a {@link com.thinkaurelius.titan.core.BaseVertexQuery}, optimizes the query and compiles the result into a {@link BaseVertexCentricQuery} which
* is then executed by one of the extending classes.
*
* @author Matthias Broecheler ([email protected])
*/
public abstract class BaseVertexCentricQueryBuilder<Q extends BaseVertexQuery<Q>> implements BaseVertexQuery<Q> {
private static final String[] NO_TYPES = new String[0];
private static final List<PredicateCondition<String, TitanRelation>> NO_CONSTRAINTS = ImmutableList.of();
/**
* The direction of this query. BOTH by default
*/
protected Direction dir = Direction.BOTH;
/**
* The relation types (labels or keys) to query for. None by default which means query for any relation type.
*/
protected String[] types = NO_TYPES;
/**
* The constraints added to this query. None by default.
*/
protected List<PredicateCondition<String, TitanRelation>> constraints = NO_CONSTRAINTS;
/**
* The vertex to be used for the adjacent vertex constraint. If null, that means no such constraint. Null by default.
*/
protected TitanVertex adjacentVertex = null;
/**
* The order in which the relations should be returned. None by default.
*/
protected OrderList orders = new OrderList();
/**
* The limit of this query. No limit by default.
*/
protected int limit = Query.NO_LIMIT;
private final SchemaInspector schemaInspector;
protected BaseVertexCentricQueryBuilder(SchemaInspector schemaInspector) {
this.schemaInspector = schemaInspector;
}
protected abstract Q getThis();
protected abstract TitanVertex getVertex(long vertexid);
/* ---------------------------------------------------------------
* Query Construction
* ---------------------------------------------------------------
*/
@Override
public Q adjacent(TitanVertex vertex) {
Preconditions.checkArgument(vertex!=null,"Not a valid vertex provided for adjacency constraint");
this.adjacentVertex = vertex;
return getThis();
}
private Q addConstraint(String type, TitanPredicate rel, Object value) {
Preconditions.checkArgument(type!=null && StringUtils.isNotBlank(type) && rel!=null);
//Treat special cases
if (type.equals(ImplicitKey.ADJACENT_ID.getName())) {
Preconditions.checkArgument(rel == Cmp.EQUAL,"Only equality constraints are supported for %s",type);
Preconditions.checkArgument(value instanceof Number,"Expected valid vertex id: %s",value);
return adjacent(getVertex(((Number)value).longValue()));
} else if (type.equals(ImplicitKey.ID.getName())) {
Preconditions.checkArgument(value instanceof RelationIdentifier,"Expected valid relation id: %s",value);
return addConstraint(ImplicitKey.TITANID.getName(),rel,((RelationIdentifier)value).getRelationId());
}
if (constraints==NO_CONSTRAINTS) constraints = new ArrayList<PredicateCondition<String, TitanRelation>>(5);
constraints.add(new PredicateCondition<String, TitanRelation>(type, rel, value));
return getThis();
}
@Override
public Q has(PropertyKey key, Object value) {
return has(key.getName(), value);
}
@Override
public Q has(EdgeLabel label, TitanVertex vertex) {
return has(label.getName(), vertex);
}
@Override
public Q has(String type, Object value) {
return addConstraint(type, Cmp.EQUAL, value);
}
@Override
public Q hasNot(String key, Object value) {
return has(key, Cmp.NOT_EQUAL, value);
}
@Override
public Q has(String key, Predicate predicate, Object value) {
return addConstraint(key, TitanPredicate.Converter.convert(predicate), value);
}
@Override
public Q has(PropertyKey key, Predicate predicate, Object value) {
return has(key.getName(), predicate, value);
}
@Override
public Q has(String key) {
return has(key, Cmp.NOT_EQUAL, (Object) null);
}
@Override
public Q hasNot(String key) {
return has(key, Cmp.EQUAL, (Object) null);
}
@Override
public <T extends Comparable<?>> Q interval(PropertyKey key, T start, T end) {
return interval(key.getName(), start, end);
}
@Override
public <T extends Comparable<?>> Q interval(String key, T start, T end) {
addConstraint(key, Cmp.GREATER_THAN_EQUAL, start);
return addConstraint(key, Cmp.LESS_THAN, end);
}
@Deprecated
public <T extends Comparable<T>> Q has(String key, T value, com.tinkerpop.blueprints.Query.Compare compare) {
return addConstraint(key, TitanPredicate.Converter.convert(compare), value);
}
@Override
public Q types(RelationType... types) {
String[] ts = new String[types.length];
for (int i = 0; i < types.length; i++) {
ts[i]=types[i].getName();
}
return types(ts);
}
@Override
public Q labels(String... labels) {
return types(labels);
}
@Override
public Q keys(String... keys) {
return types(keys);
}
public Q type(RelationType type) {
return types(type.getName());
}
public Q types(String... types) {
if (types==null) types = NO_TYPES;
for (String type : types) Preconditions.checkArgument(StringUtils.isNotBlank(type),"Invalid type: %s",type);
this.types=types;
return getThis();
}
@Override
public Q direction(Direction d) {
Preconditions.checkNotNull(d);
dir = d;
return getThis();
}
@Override
public Q limit(int limit) {
Preconditions.checkArgument(limit >= 0);
this.limit = limit;
return getThis();
}
@Override
public Q orderBy(String key, Order order) {
Preconditions.checkArgument(schemaInspector.containsPropertyKey(key),"Provided key does not exist: %s",key);
return orderBy(schemaInspector.getPropertyKey(key), order);
}
@Override
public Q orderBy(PropertyKey key, Order order) {
Preconditions.checkArgument(key!=null && order!=null,"Need to specify and key and an order");
Preconditions.checkArgument(Comparable.class.isAssignableFrom(key.getDataType()),
"Can only order on keys with comparable data type. [%s] has datatype [%s]", key.getName(), key.getDataType());
Preconditions.checkArgument(key.getCardinality()== Cardinality.SINGLE, "Ordering is undefined on multi-valued key [%s]", key.getName());
Preconditions.checkArgument(!(key instanceof SystemRelationType),"Cannot use system types in ordering: %s",key);
Preconditions.checkArgument(!orders.containsKey(key));
Preconditions.checkArgument(orders.isEmpty(),"Only a single sort order is supported on vertex queries");
orders.add(key, order);
return getThis();
}
/* ---------------------------------------------------------------
* Inspection Methods
* ---------------------------------------------------------------
*/
protected final boolean hasTypes() {
return types.length>0;
}
protected final boolean hasSingleType() {
return types.length==1 && schemaInspector.getRelationType(types[0])!=null;
}
protected final RelationType getSingleType() {
Preconditions.checkArgument(hasSingleType());
return schemaInspector.getRelationType(types[0]);
}
/**
* Whether this query is asking for the value of an {@link com.thinkaurelius.titan.graphdb.types.system.ImplicitKey}.
* </p>
* Handling of implicit keys is completely distinct from "normal" query execution and handled extra
* for completeness reasons.
*
* @param returnType
* @return
*/
protected final boolean isImplicitKeyQuery(RelationCategory returnType) {
if (returnType==RelationCategory.EDGE || types.length!=1 || !constraints.isEmpty()) return false;
return schemaInspector.getRelationType(types[0]) instanceof ImplicitKey;
}
}
|
|
package com.typesafe.netty.http;
import akka.actor.ActorSystem;
import akka.japi.function.Function;
import akka.stream.Materializer;
import akka.stream.javadsl.Flow;
import io.netty.bootstrap.Bootstrap;
import io.netty.bootstrap.ServerBootstrap;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import io.netty.channel.*;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.socket.SocketChannel;
import io.netty.channel.socket.nio.NioServerSocketChannel;
import io.netty.channel.socket.nio.NioSocketChannel;
import io.netty.handler.codec.http.*;
import io.netty.handler.codec.http.websocketx.*;
import io.netty.handler.codec.http.websocketx.extensions.compression.WebSocketClientCompressionHandler;
import io.netty.handler.codec.http.websocketx.extensions.compression.WebSocketServerCompressionHandler;
import io.netty.util.ReferenceCountUtil;
import org.reactivestreams.Processor;
import org.testng.annotations.AfterClass;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import java.net.InetSocketAddress;
import java.net.URI;
import java.nio.charset.Charset;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import static org.testng.Assert.*;
public class WebSocketsTest {
private NioEventLoopGroup eventLoop;
private ActorSystem actorSystem;
private Materializer materializer;
private Channel serverBindChannel;
private Channel client;
private BlockingQueue<Object> clientEvents = new LinkedBlockingQueue<>();
private int port;
/**
* Note: withCompression and withoutExtensions will not work as compression requires Extensions.
* @param withCompression Enable Compression for this test
* @param withExtensions Enable WebSocket Extensions on the handshaker
*/
private void simpleWebSocket(final boolean withCompression, final boolean withExtensions) throws Exception {
start(new AutoReadHandler() {
@Override
public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception {
if (msg instanceof HttpRequest) {
HttpRequest request = (HttpRequest) msg;
ReferenceCountUtil.release(msg);
Processor<WebSocketFrame, WebSocketFrame> processor = Flow.<WebSocketFrame>create().map(new Function<WebSocketFrame, WebSocketFrame>() {
public WebSocketFrame apply(WebSocketFrame msg) throws Exception {
if (msg instanceof TextWebSocketFrame) {
TextWebSocketFrame echo = new TextWebSocketFrame("echo " + ((TextWebSocketFrame) msg).text());
ReferenceCountUtil.release(msg);
return echo;
} else if (msg instanceof PingWebSocketFrame) {
return new PongWebSocketFrame(msg.content());
} else if (msg instanceof CloseWebSocketFrame) {
return msg;
} else {
throw new IllegalArgumentException("Unexpected websocket frame: " + msg);
}
}
}).toProcessor().run(materializer);
ctx.writeAndFlush(new DefaultWebSocketHttpResponse(request.protocolVersion(),
HttpResponseStatus.valueOf(200), processor,
new WebSocketServerHandshakerFactory("ws://127.0.0.1/" + port + "/", null, withExtensions)
));
}
}
}, withCompression);
makeWebSocketRequest(withCompression, withExtensions);
assertNoMessages();
client.writeAndFlush(new TextWebSocketFrame("hello"));
assertEquals(readTextFrame(), "echo hello");
ByteBuf ping = Unpooled.wrappedBuffer("hello".getBytes());
client.writeAndFlush(new PingWebSocketFrame(ping));
Object pong = pollClient();
assertNotNull(pong);
if (pong instanceof PongWebSocketFrame) {
assertEquals(((PongWebSocketFrame) pong).content().toString(Charset.defaultCharset()), "hello");
} else {
fail("Expected pong reply but got " + pong);
}
ReferenceCountUtil.release(pong);
client.writeAndFlush(new CloseWebSocketFrame(1000, "no reason"));
Object close = pollClient();
assertNotNull(close);
if (close instanceof CloseWebSocketFrame) {
CloseWebSocketFrame cl = (CloseWebSocketFrame) close;
assertEquals(cl.statusCode(), 1000);
assertEquals(cl.reasonText(), "no reason");
} else {
fail("Expected close but got " + close);
}
ReferenceCountUtil.release(close);
client.close();
assertNoMessages();
}
@Test
public void simpleWebSocketWithCompressionAndExtensions() throws Exception {
simpleWebSocket(true, true);
}
@Test
public void simpleWebSocketWithoutCompressionWithoutExtensions() throws Exception {
simpleWebSocket(false, false);
}
@Test
public void simpleWebSocketWithoutCompressionWithExtensions() throws Exception {
simpleWebSocket(false, true);
}
@Test
public void rejectWebSocket() throws Exception {
start(new AutoReadHandler() {
@Override
public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception {
if (msg instanceof HttpRequest) {
HttpRequest request = (HttpRequest) msg;
ReferenceCountUtil.release(msg);
Processor<WebSocketFrame, WebSocketFrame> processor = Flow.<WebSocketFrame>create().toProcessor().run(materializer);
ctx.writeAndFlush(new DefaultWebSocketHttpResponse(request.protocolVersion(),
HttpResponseStatus.valueOf(200), processor,
new WebSocketServerHandshakerFactory("ws://127.0.0.1/" + port + "/", null, false)
));
}
}
});
FullHttpRequest request = new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, "/");
HttpHeaders headers = request.headers();
headers.add(HttpHeaderNames.UPGRADE, HttpHeaderValues.WEBSOCKET.toLowerCase())
.add(HttpHeaderNames.CONNECTION, HttpHeaderValues.UPGRADE)
.add(HttpHeaderNames.SEC_WEBSOCKET_KEY, "foobar")
.add(HttpHeaderNames.HOST, "http://127.0.0.1:" + port)
.add(HttpHeaderNames.SEC_WEBSOCKET_ORIGIN, "http://127.0.0.1:" + port)
.add(HttpHeaderNames.SEC_WEBSOCKET_VERSION, "1");
client.writeAndFlush(request);
FullHttpResponse response = receiveFullResponse();
assertEquals(response.status(), HttpResponseStatus.UPGRADE_REQUIRED);
assertEquals(response.headers().get(HttpHeaderNames.SEC_WEBSOCKET_VERSION), "13");
ReferenceCountUtil.release(response);
}
@BeforeClass
public void startEventLoop() {
eventLoop = new NioEventLoopGroup();
actorSystem = ActorSystem.create();
materializer = Materializer.matFromSystem(actorSystem);
}
@AfterClass
public void stopEventLoop() {
actorSystem.terminate();
eventLoop.shutdownGracefully();
}
@AfterMethod
public void closeChannels() throws InterruptedException {
if (serverBindChannel != null) {
serverBindChannel.close();
}
if (client != null) {
client.close();
}
clientEvents = null;
}
private void start(final ChannelHandler handler) throws InterruptedException {
start(handler, false);
}
private void start(final ChannelHandler handler, final boolean enableCompression) throws InterruptedException {
ServerBootstrap bootstrap = new ServerBootstrap();
bootstrap.group(eventLoop)
.channel(NioServerSocketChannel.class)
.childOption(ChannelOption.AUTO_READ, false)
.localAddress("127.0.0.1", 0)
.childHandler(new ChannelInitializer<SocketChannel>() {
@Override
protected void initChannel(SocketChannel ch) throws Exception {
ChannelPipeline pipeline = ch.pipeline();
pipeline.addLast(
new HttpRequestDecoder(),
new HttpResponseEncoder()
);
if (enableCompression) {
pipeline.addLast(new WebSocketServerCompressionHandler());
}
pipeline
.addLast("serverStreamsHandler", new HttpStreamsServerHandler())
.addLast(handler);
}
});
serverBindChannel = bootstrap.bind().await().channel();
port = ((InetSocketAddress) serverBindChannel.localAddress()).getPort();
clientEvents = new LinkedBlockingQueue<>();
Bootstrap client = new Bootstrap()
.group(eventLoop)
.option(ChannelOption.AUTO_READ, false)
.channel(NioSocketChannel.class)
.handler(new ChannelInitializer<SocketChannel>() {
@Override
protected void initChannel(SocketChannel ch) throws Exception {
final ChannelPipeline pipeline = ch.pipeline();
pipeline.addLast(new HttpClientCodec(), new HttpObjectAggregator(8192));
if (enableCompression) pipeline.addLast(WebSocketClientCompressionHandler.INSTANCE);
pipeline.addLast(new AutoReadHandler() {
// Store a reference to the current client events
BlockingQueue<Object> events = clientEvents;
public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception {
events.add(msg);
}
});
}
});
this.client = client.remoteAddress(serverBindChannel.localAddress()).connect().await().channel();
}
private void makeWebSocketRequest(final boolean withCompression, final boolean withExtensions) throws InterruptedException {
WebSocketClientHandshaker handshaker = WebSocketClientHandshakerFactory.newHandshaker(
URI.create("ws://127.0.0.1:" + port + "/"),
WebSocketVersion.V13, null, withExtensions, new DefaultHttpHeaders());
handshaker.handshake(client);
FullHttpResponse response = receiveFullResponse();
HttpHeaders headers = response.headers();
if (withCompression) {
assertTrue(headers.contains("sec-websocket-extensions"));
assertEquals(headers.get("sec-websocket-extensions"), "permessage-deflate");
} else {
assertTrue(!headers.contains("sec-websocket-extensions") ||
!headers.get("sec-websocket-extensions").contains("permessage-deflate"));
}
handshaker.finishHandshake(client, response);
}
private FullHttpResponse receiveFullResponse() throws InterruptedException {
Object msg = pollClient();
assertNotNull(msg);
if (msg instanceof FullHttpResponse) {
return (FullHttpResponse) msg;
} else {
throw new AssertionError("Expected FullHttpResponse, got " + msg);
}
}
private String readTextFrame() throws InterruptedException {
Object msg = pollClient();
assertNotNull(msg);
if (msg instanceof TextWebSocketFrame) {
String text = ((TextWebSocketFrame) msg).text();
ReferenceCountUtil.release(msg);
return text;
} else {
throw new AssertionError("Expected text web socket frame, got " + msg);
}
}
private void assertNoMessages() throws InterruptedException {
assertNull(pollClient());
}
private Object pollClient() throws InterruptedException {
return clientEvents.poll(500, TimeUnit.MILLISECONDS);
}
private class AutoReadHandler extends ChannelInboundHandlerAdapter {
public void channelActive(ChannelHandlerContext ctx) throws Exception {
ctx.read();
}
public void channelReadComplete(ChannelHandlerContext ctx) throws Exception {
ctx.read();
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.math3.random;
import java.io.Serializable;
/**
* <a href="http://burtleburtle.net/bob/rand/isaacafa.html">
* ISAAC: a fast cryptographic pseudo-random number generator</a>
* <br/>
* ISAAC (Indirection, Shift, Accumulate, Add, and Count) generates 32-bit
* random numbers.
* ISAAC has been designed to be cryptographically secure and is inspired
* by RC4.
* Cycles are guaranteed to be at least 2<sup>40</sup> values long, and they
* are 2<sup>8295</sup> values long on average.
* The results are uniformly distributed, unbiased, and unpredictable unless
* you know the seed.
* <br/>
* This code is based (with minor changes and improvements) on the original
* implementation of the algorithm by Bob Jenkins.
* <br/>
*
* @version $Id: ISAACRandom.java 1416643 2012-12-03 19:37:14Z tn $
* @since 3.0
*/
public class ISAACRandom extends BitsStreamGenerator implements Serializable {
/** Serializable version identifier */
private static final long serialVersionUID = 7288197941165002400L;
/** Log of size of rsl[] and mem[] */
private static final int SIZE_L = 8;
/** Size of rsl[] and mem[] */
private static final int SIZE = 1 << SIZE_L;
/** Half-size of rsl[] and mem[] */
private static final int H_SIZE = SIZE >> 1;
/** For pseudo-random lookup */
private static final int MASK = SIZE - 1 << 2;
/** The golden ratio */
private static final int GLD_RATIO = 0x9e3779b9;
/** The results given to the user */
private final int[] rsl = new int[SIZE];
/** The internal state */
private final int[] mem = new int[SIZE];
/** Count through the results in rsl[] */
private int count;
/** Accumulator */
private int isaacA;
/** The last result */
private int isaacB;
/** Counter, guarantees cycle is at least 2^40 */
private int isaacC;
/** Service variable. */
private final int[] arr = new int[8];
/** Service variable. */
private int isaacX;
/** Service variable. */
private int isaacI;
/** Service variable. */
private int isaacJ;
/**
* Creates a new ISAAC random number generator.
* <br/>
* The instance is initialized using a combination of the
* current time and system hash code of the instance as the seed.
*/
public ISAACRandom() {
setSeed(System.currentTimeMillis() + System.identityHashCode(this));
}
/**
* Creates a new ISAAC random number generator using a single long seed.
*
* @param seed Initial seed.
*/
public ISAACRandom(long seed) {
setSeed(seed);
}
/**
* Creates a new ISAAC random number generator using an int array seed.
*
* @param seed Initial seed. If {@code null}, the seed will be related
* to the current time.
*/
public ISAACRandom(int[] seed) {
setSeed(seed);
}
/** {@inheritDoc} */
@Override
public void setSeed(int seed) {
setSeed(new int[]{seed});
}
/** {@inheritDoc} */
@Override
public void setSeed(long seed) {
setSeed(new int[]{(int) (seed >>> 32), (int) (seed & 0xffffffffL)});
}
/** {@inheritDoc} */
@Override
public void setSeed(int[] seed) {
if (seed == null) {
setSeed(System.currentTimeMillis() + System.identityHashCode(this));
return;
}
final int seedLen = seed.length;
final int rslLen = rsl.length;
System.arraycopy(seed, 0, rsl, 0, Math.min(seedLen, rslLen));
if (seedLen < rslLen) {
for (int j = seedLen; j < rslLen; j++) {
long k = rsl[j - seedLen];
rsl[j] = (int) (0x6c078965L * (k ^ k >> 30) + j & 0xffffffffL);
}
}
initState();
}
/** {@inheritDoc} */
@Override
protected int next(int bits) {
if (count < 0) {
isaac();
count = SIZE - 1;
}
return rsl[count--] >>> 32 - bits;
}
/** Generate 256 results */
private void isaac() {
isaacI = 0;
isaacJ = H_SIZE;
isaacB += ++isaacC;
while (isaacI < H_SIZE) {
isaac2();
}
isaacJ = 0;
while (isaacJ < H_SIZE) {
isaac2();
}
}
/** Intermediate internal loop. */
private void isaac2() {
isaacX = mem[isaacI];
isaacA ^= isaacA << 13;
isaacA += mem[isaacJ++];
isaac3();
isaacX = mem[isaacI];
isaacA ^= isaacA >>> 6;
isaacA += mem[isaacJ++];
isaac3();
isaacX = mem[isaacI];
isaacA ^= isaacA << 2;
isaacA += mem[isaacJ++];
isaac3();
isaacX = mem[isaacI];
isaacA ^= isaacA >>> 16;
isaacA += mem[isaacJ++];
isaac3();
}
/** Lowest level internal loop. */
private void isaac3() {
mem[isaacI] = mem[(isaacX & MASK) >> 2] + isaacA + isaacB;
isaacB = mem[(mem[isaacI] >> SIZE_L & MASK) >> 2] + isaacX;
rsl[isaacI++] = isaacB;
}
/** Initialize, or reinitialize, this instance of rand. */
private void initState() {
isaacA = 0;
isaacB = 0;
isaacC = 0;
for (int j = 0; j < arr.length; j++) {
arr[j] = GLD_RATIO;
}
for (int j = 0; j < 4; j++) {
shuffle();
}
// fill in mem[] with messy stuff
for (int j = 0; j < SIZE; j += 8) {
arr[0] += rsl[j];
arr[1] += rsl[j + 1];
arr[2] += rsl[j + 2];
arr[3] += rsl[j + 3];
arr[4] += rsl[j + 4];
arr[5] += rsl[j + 5];
arr[6] += rsl[j + 6];
arr[7] += rsl[j + 7];
shuffle();
setState(j);
}
// second pass makes all of seed affect all of mem
for (int j = 0; j < SIZE; j += 8) {
arr[0] += mem[j];
arr[1] += mem[j + 1];
arr[2] += mem[j + 2];
arr[3] += mem[j + 3];
arr[4] += mem[j + 4];
arr[5] += mem[j + 5];
arr[6] += mem[j + 6];
arr[7] += mem[j + 7];
shuffle();
setState(j);
}
isaac();
count = SIZE - 1;
clear();
}
/** Shuffle array. */
private void shuffle() {
arr[0] ^= arr[1] << 11;
arr[3] += arr[0];
arr[1] += arr[2];
arr[1] ^= arr[2] >>> 2;
arr[4] += arr[1];
arr[2] += arr[3];
arr[2] ^= arr[3] << 8;
arr[5] += arr[2];
arr[3] += arr[4];
arr[3] ^= arr[4] >>> 16;
arr[6] += arr[3];
arr[4] += arr[5];
arr[4] ^= arr[5] << 10;
arr[7] += arr[4];
arr[5] += arr[6];
arr[5] ^= arr[6] >>> 4;
arr[0] += arr[5];
arr[6] += arr[7];
arr[6] ^= arr[7] << 8;
arr[1] += arr[6];
arr[7] += arr[0];
arr[7] ^= arr[0] >>> 9;
arr[2] += arr[7];
arr[0] += arr[1];
}
/** Set the state by copying the internal arrays.
*
* @param start First index into {@link #mem} array.
*/
private void setState(int start) {
mem[start] = arr[0];
mem[start + 1] = arr[1];
mem[start + 2] = arr[2];
mem[start + 3] = arr[3];
mem[start + 4] = arr[4];
mem[start + 5] = arr[5];
mem[start + 6] = arr[6];
mem[start + 7] = arr[7];
}
}
|
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.storm.daemon;
import org.apache.commons.lang.StringUtils;
import org.apache.storm.Config;
import org.apache.storm.Constants;
import org.apache.storm.Thrift;
import org.apache.storm.cluster.IStormClusterState;
import org.apache.storm.generated.Bolt;
import org.apache.storm.generated.ComponentCommon;
import org.apache.storm.generated.GlobalStreamId;
import org.apache.storm.generated.Grouping;
import org.apache.storm.generated.InvalidTopologyException;
import org.apache.storm.generated.NodeInfo;
import org.apache.storm.generated.SpoutSpec;
import org.apache.storm.generated.StateSpoutSpec;
import org.apache.storm.generated.StormBase;
import org.apache.storm.generated.StormTopology;
import org.apache.storm.generated.StreamInfo;
import org.apache.storm.metric.EventLoggerBolt;
import org.apache.storm.metric.MetricsConsumerBolt;
import org.apache.storm.metric.SystemBolt;
import org.apache.storm.metric.filter.FilterByMetricName;
import org.apache.storm.metric.util.DataPointExpander;
import org.apache.storm.security.auth.IAuthorizer;
import org.apache.storm.task.IBolt;
import org.apache.storm.task.WorkerTopologyContext;
import org.apache.storm.tuple.Fields;
import org.apache.storm.utils.ConfigUtils;
import org.apache.storm.utils.IPredicate;
import org.apache.storm.utils.ThriftTopologyUtils;
import org.apache.storm.utils.Utils;
import org.json.simple.JSONValue;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.TreeMap;
public class StormCommon {
// A singleton instance allows us to mock delegated static methods in our
// tests by subclassing.
private static StormCommon _instance = new StormCommon();
/**
* Provide an instance of this class for delegates to use. To mock out
* delegated methods, provide an instance of a subclass that overrides the
* implementation of the delegated method.
*
* @param common a StormCommon instance
* @return the previously set instance
*/
public static StormCommon setInstance(StormCommon common) {
StormCommon oldInstance = _instance;
_instance = common;
return oldInstance;
}
private static final Logger LOG = LoggerFactory.getLogger(StormCommon.class);
public static final String SYSTEM_STREAM_ID = "__system";
public static final String EVENTLOGGER_COMPONENT_ID = "__eventlogger";
public static final String EVENTLOGGER_STREAM_ID = "__eventlog";
public static final String TOPOLOGY_METRICS_CONSUMER_CLASS = "class";
public static final String TOPOLOGY_METRICS_CONSUMER_ARGUMENT = "argument";
public static final String TOPOLOGY_METRICS_CONSUMER_MAX_RETAIN_METRIC_TUPLES = "max.retain.metric.tuples";
public static final String TOPOLOGY_METRICS_CONSUMER_PARALLELISM_HINT = "parallelism.hint";
public static final String TOPOLOGY_METRICS_CONSUMER_WHITELIST = "whitelist";
public static final String TOPOLOGY_METRICS_CONSUMER_BLACKLIST = "blacklist";
public static final String TOPOLOGY_METRICS_CONSUMER_EXPAND_MAP_TYPE = "expandMapType";
public static final String TOPOLOGY_METRICS_CONSUMER_METRIC_NAME_SEPARATOR = "metricNameSeparator";
@Deprecated
public static String getStormId(final IStormClusterState stormClusterState, final String topologyName) {
return stormClusterState.getTopoId(topologyName).get();
}
public static void validateDistributedMode(Map conf) {
if (ConfigUtils.isLocalMode(conf)) {
throw new IllegalArgumentException("Cannot start server in local mode!");
}
}
@SuppressWarnings("unchecked")
private static void validateIds(StormTopology topology) throws InvalidTopologyException {
List<String> componentIds = new ArrayList<>();
for (StormTopology._Fields field : Thrift.getTopologyFields()) {
if (!ThriftTopologyUtils.isWorkerHook(field) && !ThriftTopologyUtils.isDependencies(field)) {
Object value = topology.getFieldValue(field);
Map<String, Object> componentMap = (Map<String, Object>) value;
componentIds.addAll(componentMap.keySet());
for (String id : componentMap.keySet()) {
if (Utils.isSystemId(id)) {
throw new InvalidTopologyException(id + " is not a valid component id.");
}
}
for (Object componentObj : componentMap.values()) {
ComponentCommon common = getComponentCommon(componentObj);
Set<String> streamIds = common.get_streams().keySet();
for (String id : streamIds) {
if (Utils.isSystemId(id)) {
throw new InvalidTopologyException(id + " is not a valid stream id.");
}
}
}
}
}
List<String> offending = Utils.getRepeat(componentIds);
if (!offending.isEmpty()) {
throw new InvalidTopologyException("Duplicate component ids: " + offending);
}
}
private static boolean isEmptyInputs(ComponentCommon common) {
if (common.get_inputs() == null) {
return true;
} else {
return common.get_inputs().isEmpty();
}
}
@SuppressWarnings("unchecked")
public static Map<String, Object> allComponents(StormTopology topology) {
Map<String, Object> components = new HashMap<>();
List<StormTopology._Fields> topologyFields = Arrays.asList(Thrift.getTopologyFields());
for (StormTopology._Fields field : topologyFields) {
if (!ThriftTopologyUtils.isWorkerHook(field) && !ThriftTopologyUtils.isDependencies(field)) {
components.putAll(((Map) topology.getFieldValue(field)));
}
}
return components;
}
@SuppressWarnings("unchecked")
public static Map<String, Object> componentConf(Object component) {
try {
Map<String, Object> conf = new HashMap<>();
ComponentCommon common = getComponentCommon(component);
String jconf = common.get_json_conf();
if (jconf != null) {
conf.putAll((Map<String, Object>) JSONValue.parseWithException(jconf));
}
return conf;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
@SuppressWarnings("unchecked")
public static void validateBasic(StormTopology topology) throws InvalidTopologyException {
validateIds(topology);
for (StormTopology._Fields field : Thrift.getSpoutFields()) {
Map<String, Object> spoutComponents = (Map<String, Object>) topology.getFieldValue(field);
if (spoutComponents != null) {
for (Object obj : spoutComponents.values()) {
ComponentCommon common = getComponentCommon(obj);
if (!isEmptyInputs(common)) {
throw new InvalidTopologyException("May not declare inputs for a spout");
}
}
}
}
Map<String, Object> componentMap = allComponents(topology);
for (Object componentObj : componentMap.values()) {
Map conf = componentConf(componentObj);
ComponentCommon common = getComponentCommon(componentObj);
int parallelismHintNum = Thrift.getParallelismHint(common);
Integer taskNum = Utils.getInt(conf.get(Config.TOPOLOGY_TASKS), 0);
if (taskNum > 0 && parallelismHintNum <= 0) {
throw new InvalidTopologyException("Number of executors must be greater than 0 when number of tasks is greater than 0");
}
}
}
private static Set<String> getStreamOutputFields(Map<String, StreamInfo> streams) {
Set<String> outputFields = new HashSet<>();
for (StreamInfo streamInfo : streams.values()) {
outputFields.addAll(streamInfo.get_output_fields());
}
return outputFields;
}
public static void validateStructure(StormTopology topology) throws InvalidTopologyException {
Map<String, Object> componentMap = allComponents(topology);
for (Map.Entry<String, Object> entry : componentMap.entrySet()) {
String componentId = entry.getKey();
ComponentCommon common = getComponentCommon(entry.getValue());
Map<GlobalStreamId, Grouping> inputs = common.get_inputs();
for (Map.Entry<GlobalStreamId, Grouping> input : inputs.entrySet()) {
String sourceStreamId = input.getKey().get_streamId();
String sourceComponentId = input.getKey().get_componentId();
if (!componentMap.keySet().contains(sourceComponentId)) {
throw new InvalidTopologyException("Component: [" + componentId +
"] subscribes from non-existent component [" + sourceComponentId + "]");
}
ComponentCommon sourceComponent = getComponentCommon(componentMap.get(sourceComponentId));
if (!sourceComponent.get_streams().containsKey(sourceStreamId)) {
throw new InvalidTopologyException("Component: [" + componentId +
"] subscribes from non-existent stream: " +
"[" + sourceStreamId + "] of component [" + sourceComponentId + "]");
}
Grouping grouping = input.getValue();
if (Thrift.groupingType(grouping) == Grouping._Fields.FIELDS) {
List<String> fields = new ArrayList<>(grouping.get_fields());
Map<String, StreamInfo> streams = sourceComponent.get_streams();
Set<String> sourceOutputFields = getStreamOutputFields(streams);
fields.removeAll(sourceOutputFields);
if (fields.size() != 0) {
throw new InvalidTopologyException("Component: [" + componentId +
"] subscribes from stream: [" + sourceStreamId + "] of component " +
"[" + sourceComponentId + "] + with non-existent fields: " + fields);
}
}
}
}
}
public static Map<GlobalStreamId, Grouping> ackerInputs(StormTopology topology) {
Map<GlobalStreamId, Grouping> inputs = new HashMap<>();
Set<String> boltIds = topology.get_bolts().keySet();
Set<String> spoutIds = topology.get_spouts().keySet();
for (String id : spoutIds) {
inputs.put(Utils.getGlobalStreamId(id, Acker.ACKER_INIT_STREAM_ID),
Thrift.prepareFieldsGrouping(Arrays.asList("id")));
}
for (String id : boltIds) {
inputs.put(Utils.getGlobalStreamId(id, Acker.ACKER_ACK_STREAM_ID),
Thrift.prepareFieldsGrouping(Arrays.asList("id")));
inputs.put(Utils.getGlobalStreamId(id, Acker.ACKER_FAIL_STREAM_ID),
Thrift.prepareFieldsGrouping(Arrays.asList("id")));
inputs.put(Utils.getGlobalStreamId(id, Acker.ACKER_RESET_TIMEOUT_STREAM_ID),
Thrift.prepareFieldsGrouping(Arrays.asList("id")));
}
return inputs;
}
public static IBolt makeAckerBolt() {
return _instance.makeAckerBoltImpl();
}
public IBolt makeAckerBoltImpl() {
return new Acker();
}
@SuppressWarnings("unchecked")
public static void addAcker(Map conf, StormTopology topology) {
int ackerNum = Utils.getInt(conf.get(Config.TOPOLOGY_ACKER_EXECUTORS), Utils.getInt(conf.get(Config.TOPOLOGY_WORKERS)));
Map<GlobalStreamId, Grouping> inputs = ackerInputs(topology);
Map<String, StreamInfo> outputStreams = new HashMap<String, StreamInfo>();
outputStreams.put(Acker.ACKER_ACK_STREAM_ID, Thrift.directOutputFields(Arrays.asList("id", "time-delta-ms")));
outputStreams.put(Acker.ACKER_FAIL_STREAM_ID, Thrift.directOutputFields(Arrays.asList("id", "time-delta-ms")));
outputStreams.put(Acker.ACKER_RESET_TIMEOUT_STREAM_ID, Thrift.directOutputFields(Arrays.asList("id", "time-delta-ms")));
Map<String, Object> ackerConf = new HashMap<>();
ackerConf.put(Config.TOPOLOGY_TASKS, ackerNum);
ackerConf.put(Config.TOPOLOGY_TICK_TUPLE_FREQ_SECS, Utils.getInt(conf.get(Config.TOPOLOGY_MESSAGE_TIMEOUT_SECS)));
Bolt acker = Thrift.prepareSerializedBoltDetails(inputs, makeAckerBolt(), outputStreams, ackerNum, ackerConf);
for (Bolt bolt : topology.get_bolts().values()) {
ComponentCommon common = bolt.get_common();
common.put_to_streams(Acker.ACKER_ACK_STREAM_ID, Thrift.outputFields(Arrays.asList("id", "ack-val")));
common.put_to_streams(Acker.ACKER_FAIL_STREAM_ID, Thrift.outputFields(Arrays.asList("id")));
common.put_to_streams(Acker.ACKER_RESET_TIMEOUT_STREAM_ID, Thrift.outputFields(Arrays.asList("id")));
}
for (SpoutSpec spout : topology.get_spouts().values()) {
ComponentCommon common = spout.get_common();
Map spoutConf = componentConf(spout);
spoutConf.put(Config.TOPOLOGY_TICK_TUPLE_FREQ_SECS,
Utils.getInt(conf.get(Config.TOPOLOGY_MESSAGE_TIMEOUT_SECS)));
common.set_json_conf(JSONValue.toJSONString(spoutConf));
common.put_to_streams(Acker.ACKER_INIT_STREAM_ID,
Thrift.outputFields(Arrays.asList("id", "init-val", "spout-task")));
common.put_to_inputs(Utils.getGlobalStreamId(Acker.ACKER_COMPONENT_ID, Acker.ACKER_ACK_STREAM_ID),
Thrift.prepareDirectGrouping());
common.put_to_inputs(Utils.getGlobalStreamId(Acker.ACKER_COMPONENT_ID, Acker.ACKER_FAIL_STREAM_ID),
Thrift.prepareDirectGrouping());
common.put_to_inputs(Utils.getGlobalStreamId(Acker.ACKER_COMPONENT_ID, Acker.ACKER_RESET_TIMEOUT_STREAM_ID),
Thrift.prepareDirectGrouping());
}
topology.put_to_bolts(Acker.ACKER_COMPONENT_ID, acker);
}
public static ComponentCommon getComponentCommon(Object component) {
ComponentCommon common = null;
if (component instanceof StateSpoutSpec) {
common = ((StateSpoutSpec) component).get_common();
} else if (component instanceof SpoutSpec) {
common = ((SpoutSpec) component).get_common();
} else if (component instanceof Bolt) {
common = ((Bolt) component).get_common();
}
return common;
}
public static void addMetricStreams(StormTopology topology) {
for (Object component : allComponents(topology).values()) {
ComponentCommon common = getComponentCommon(component);
StreamInfo streamInfo = Thrift.outputFields(Arrays.asList("task-info", "data-points"));
common.put_to_streams(Constants.METRICS_STREAM_ID, streamInfo);
}
}
public static void addSystemStreams(StormTopology topology) {
for (Object component : allComponents(topology).values()) {
ComponentCommon common = getComponentCommon(component);
StreamInfo streamInfo = Thrift.outputFields(Arrays.asList("event"));
common.put_to_streams(SYSTEM_STREAM_ID, streamInfo);
}
}
public static List<String> eventLoggerBoltFields() {
return Arrays.asList(EventLoggerBolt.FIELD_COMPONENT_ID, EventLoggerBolt.FIELD_MESSAGE_ID,
EventLoggerBolt.FIELD_TS, EventLoggerBolt.FIELD_VALUES);
}
public static Map<GlobalStreamId, Grouping> eventLoggerInputs(StormTopology topology) {
Map<GlobalStreamId, Grouping> inputs = new HashMap<GlobalStreamId, Grouping>();
Set<String> allIds = new HashSet<String>();
allIds.addAll(topology.get_bolts().keySet());
allIds.addAll(topology.get_spouts().keySet());
for (String id : allIds) {
inputs.put(Utils.getGlobalStreamId(id, EVENTLOGGER_STREAM_ID),
Thrift.prepareFieldsGrouping(Arrays.asList("component-id")));
}
return inputs;
}
public static void addEventLogger(Map conf, StormTopology topology) {
Integer numExecutors = Utils.getInt(conf.get(Config.TOPOLOGY_EVENTLOGGER_EXECUTORS),
Utils.getInt(conf.get(Config.TOPOLOGY_WORKERS)));
HashMap<String, Object> componentConf = new HashMap<>();
componentConf.put(Config.TOPOLOGY_TASKS, numExecutors);
componentConf.put(Config.TOPOLOGY_TICK_TUPLE_FREQ_SECS, Utils.getInt(conf.get(Config.TOPOLOGY_MESSAGE_TIMEOUT_SECS)));
Bolt eventLoggerBolt = Thrift.prepareSerializedBoltDetails(
eventLoggerInputs(topology), new EventLoggerBolt(), null, numExecutors, componentConf);
for (Object component : allComponents(topology).values()) {
ComponentCommon common = getComponentCommon(component);
common.put_to_streams(EVENTLOGGER_STREAM_ID, Thrift.outputFields(eventLoggerBoltFields()));
}
topology.put_to_bolts(EVENTLOGGER_COMPONENT_ID, eventLoggerBolt);
}
@SuppressWarnings("unchecked")
public static Map<String, Bolt> metricsConsumerBoltSpecs(Map conf, StormTopology topology) {
Map<String, Bolt> metricsConsumerBolts = new HashMap<>();
Set<String> componentIdsEmitMetrics = new HashSet<>();
componentIdsEmitMetrics.addAll(allComponents(topology).keySet());
componentIdsEmitMetrics.add(Constants.SYSTEM_COMPONENT_ID);
Map<GlobalStreamId, Grouping> inputs = new HashMap<>();
for (String componentId : componentIdsEmitMetrics) {
inputs.put(Utils.getGlobalStreamId(componentId, Constants.METRICS_STREAM_ID), Thrift.prepareShuffleGrouping());
}
List<Map<String, Object>> registerInfo = (List<Map<String, Object>>) conf.get(Config.TOPOLOGY_METRICS_CONSUMER_REGISTER);
if (registerInfo != null) {
Map<String, Integer> classOccurrencesMap = new HashMap<String, Integer>();
for (Map<String, Object> info : registerInfo) {
String className = (String) info.get(TOPOLOGY_METRICS_CONSUMER_CLASS);
Object argument = info.get(TOPOLOGY_METRICS_CONSUMER_ARGUMENT);
Integer maxRetainMetricTuples = Utils.getInt(info.get(
TOPOLOGY_METRICS_CONSUMER_MAX_RETAIN_METRIC_TUPLES), 100);
Integer phintNum = Utils.getInt(info.get(TOPOLOGY_METRICS_CONSUMER_PARALLELISM_HINT), 1);
Map<String, Object> metricsConsumerConf = new HashMap<String, Object>();
metricsConsumerConf.put(Config.TOPOLOGY_TASKS, phintNum);
List<String> whitelist = (List<String>) info.get(
TOPOLOGY_METRICS_CONSUMER_WHITELIST);
List<String> blacklist = (List<String>) info.get(
TOPOLOGY_METRICS_CONSUMER_BLACKLIST);
FilterByMetricName filterPredicate = new FilterByMetricName(whitelist, blacklist);
Boolean expandMapType = Utils.getBoolean(info.get(
TOPOLOGY_METRICS_CONSUMER_EXPAND_MAP_TYPE), false);
String metricNameSeparator = Utils.getString(info.get(
TOPOLOGY_METRICS_CONSUMER_METRIC_NAME_SEPARATOR), ".");
DataPointExpander expander = new DataPointExpander(expandMapType, metricNameSeparator);
MetricsConsumerBolt boltInstance = new MetricsConsumerBolt(className, argument,
maxRetainMetricTuples, filterPredicate, expander);
Bolt metricsConsumerBolt = Thrift.prepareSerializedBoltDetails(inputs,
boltInstance, null, phintNum, metricsConsumerConf);
String id = className;
if (classOccurrencesMap.containsKey(className)) {
// e.g. [\"a\", \"b\", \"a\"]) => [\"a\", \"b\", \"a#2\"]"
int occurrenceNum = classOccurrencesMap.get(className);
occurrenceNum++;
classOccurrencesMap.put(className, occurrenceNum);
id = Constants.METRICS_COMPONENT_ID_PREFIX + className + "#" + occurrenceNum;
} else {
classOccurrencesMap.put(className, 1);
}
metricsConsumerBolts.put(id, metricsConsumerBolt);
}
}
return metricsConsumerBolts;
}
public static void addMetricComponents(Map conf, StormTopology topology) {
Map<String, Bolt> metricsConsumerBolts = metricsConsumerBoltSpecs(conf, topology);
for (Map.Entry<String, Bolt> entry : metricsConsumerBolts.entrySet()) {
topology.put_to_bolts(entry.getKey(), entry.getValue());
}
}
@SuppressWarnings("unused")
public static void addSystemComponents(Map conf, StormTopology topology) {
Map<String, StreamInfo> outputStreams = new HashMap<>();
outputStreams.put(Constants.SYSTEM_TICK_STREAM_ID, Thrift.outputFields(Arrays.asList("rate_secs")));
outputStreams.put(Constants.METRICS_TICK_STREAM_ID, Thrift.outputFields(Arrays.asList("interval")));
outputStreams.put(Constants.CREDENTIALS_CHANGED_STREAM_ID, Thrift.outputFields(Arrays.asList("creds")));
Map<String, Object> boltConf = new HashMap<>();
boltConf.put(Config.TOPOLOGY_TASKS, 0);
Bolt systemBoltSpec = Thrift.prepareSerializedBoltDetails(null, new SystemBolt(), outputStreams, 0, boltConf);
topology.put_to_bolts(Constants.SYSTEM_COMPONENT_ID, systemBoltSpec);
}
public static StormTopology systemTopology(Map stormConf, StormTopology topology) throws InvalidTopologyException {
return _instance.systemTopologyImpl(stormConf, topology);
}
protected StormTopology systemTopologyImpl(Map stormConf, StormTopology topology) throws InvalidTopologyException {
validateBasic(topology);
StormTopology ret = topology.deepCopy();
addAcker(stormConf, ret);
if (hasEventLoggers(stormConf)) {
addEventLogger(stormConf, ret);
}
addMetricComponents(stormConf, ret);
addSystemComponents(stormConf, ret);
addMetricStreams(ret);
addSystemStreams(ret);
validateStructure(ret);
return ret;
}
public static boolean hasAckers(Map stormConf) {
Object ackerNum = stormConf.get(Config.TOPOLOGY_ACKER_EXECUTORS);
return ackerNum == null || Utils.getInt(ackerNum) > 0;
}
public static boolean hasEventLoggers(Map stormConf) {
Object eventLoggerNum = stormConf.get(Config.TOPOLOGY_EVENTLOGGER_EXECUTORS);
return eventLoggerNum == null || Utils.getInt(eventLoggerNum) > 0;
}
public static int numStartExecutors(Object component) throws InvalidTopologyException {
ComponentCommon common = getComponentCommon(component);
return Thrift.getParallelismHint(common);
}
public static Map<Integer, String> stormTaskInfo(StormTopology userTopology, Map stormConf) throws InvalidTopologyException {
return _instance.stormTaskInfoImpl(userTopology, stormConf);
}
/*
* Returns map from task -> componentId
*/
protected Map<Integer, String> stormTaskInfoImpl(StormTopology userTopology, Map stormConf) throws InvalidTopologyException {
Map<Integer, String> taskIdToComponentId = new HashMap<>();
StormTopology systemTopology = systemTopology(stormConf, userTopology);
Map<String, Object> components = allComponents(systemTopology);
Map<String, Integer> componentIdToTaskNum = new TreeMap<>();
for (Map.Entry<String, Object> entry : components.entrySet()) {
Map conf = componentConf(entry.getValue());
Object taskNum = conf.get(Config.TOPOLOGY_TASKS);
componentIdToTaskNum.put(entry.getKey(), Utils.getInt(taskNum));
}
int taskId = 1;
for (Map.Entry<String, Integer> entry : componentIdToTaskNum.entrySet()) {
String componentId = entry.getKey();
Integer taskNum = entry.getValue();
while (taskNum > 0) {
taskIdToComponentId.put(taskId, componentId);
taskNum--;
taskId++;
}
}
return taskIdToComponentId;
}
public static List<Integer> executorIdToTasks(List<Long> executorId) {
List<Integer> taskIds = new ArrayList<>();
int taskId = executorId.get(0).intValue();
while (taskId <= executorId.get(1).intValue()) {
taskIds.add(taskId);
taskId++;
}
return taskIds;
}
public static Map<Integer, NodeInfo> taskToNodeport(Map<List<Long>, NodeInfo> executorToNodePort) {
Map<Integer, NodeInfo> tasksToNodePort = new HashMap<>();
for (Map.Entry<List<Long>, NodeInfo> entry : executorToNodePort.entrySet()) {
List<Integer> taskIds = executorIdToTasks(entry.getKey());
for (Integer taskId : taskIds) {
tasksToNodePort.put(taskId, entry.getValue());
}
}
return tasksToNodePort;
}
public static IAuthorizer mkAuthorizationHandler(String klassName, Map conf)
throws IllegalAccessException, InstantiationException, ClassNotFoundException {
return _instance.mkAuthorizationHandlerImpl(klassName, conf);
}
protected IAuthorizer mkAuthorizationHandlerImpl(String klassName, Map conf)
throws ClassNotFoundException, IllegalAccessException, InstantiationException {
IAuthorizer aznHandler = null;
if (StringUtils.isNotBlank(klassName)) {
Class<?> aznClass = Class.forName(klassName);
if (aznClass != null) {
aznHandler = (IAuthorizer) aznClass.newInstance();
if (aznHandler != null) {
aznHandler.prepare(conf);
}
LOG.debug("authorization class name:{}, class:{}, handler:{}", klassName, aznClass, aznHandler);
}
}
return aznHandler;
}
@SuppressWarnings("unchecked")
public static WorkerTopologyContext makeWorkerContext(Map<String, Object> workerData) {
try {
StormTopology stormTopology = (StormTopology) workerData.get(Constants.SYSTEM_TOPOLOGY);
Map stormConf = (Map) workerData.get(Constants.STORM_CONF);
Map<Integer, String> taskToComponent = (Map<Integer, String>) workerData.get(Constants.TASK_TO_COMPONENT);
Map<String, List<Integer>> componentToSortedTasks =
(Map<String, List<Integer>>) workerData.get(Constants.COMPONENT_TO_SORTED_TASKS);
Map<String, Map<String, Fields>> componentToStreamToFields =
(Map<String, Map<String, Fields>>) workerData.get(Constants.COMPONENT_TO_STREAM_TO_FIELDS);
String stormId = (String) workerData.get(Constants.STORM_ID);
Map conf = (Map) workerData.get(Constants.CONF);
Integer port = (Integer) workerData.get(Constants.PORT);
String codeDir = ConfigUtils.supervisorStormResourcesPath(ConfigUtils.supervisorStormDistRoot(conf, stormId));
String pidDir = ConfigUtils.workerPidsRoot(conf, stormId);
List<Integer> workerTasks = (List<Integer>) workerData.get(Constants.TASK_IDS);
Map<String, Object> defaultResources = (Map<String, Object>) workerData.get(Constants.DEFAULT_SHARED_RESOURCES);
Map<String, Object> userResources = (Map<String, Object>) workerData.get(Constants.USER_SHARED_RESOURCES);
return new WorkerTopologyContext(stormTopology, stormConf, taskToComponent, componentToSortedTasks,
componentToStreamToFields, stormId, codeDir, pidDir, port, workerTasks, defaultResources, userResources);
} catch (IOException e) {
throw Utils.wrapInRuntime(e);
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.streams.state.internals;
import org.apache.kafka.common.Metric;
import org.apache.kafka.common.MetricName;
import org.apache.kafka.common.metrics.JmxReporter;
import org.apache.kafka.common.metrics.MetricConfig;
import org.apache.kafka.common.metrics.Metrics;
import org.apache.kafka.common.metrics.Sensor;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.common.utils.Bytes;
import org.apache.kafka.common.utils.LogContext;
import org.apache.kafka.common.utils.MockTime;
import org.apache.kafka.streams.StreamsConfig;
import org.apache.kafka.streams.kstream.Windowed;
import org.apache.kafka.streams.processor.internals.metrics.StreamsMetricsImpl;
import org.apache.kafka.streams.state.WindowStore;
import org.apache.kafka.test.InternalMockProcessorContext;
import org.apache.kafka.test.MockRecordCollector;
import org.apache.kafka.test.StreamsTestUtils;
import org.apache.kafka.test.TestUtils;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameter;
import org.junit.runners.Parameterized.Parameters;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import static java.time.Instant.ofEpochMilli;
import static java.util.Collections.singletonMap;
import static org.apache.kafka.common.utils.Utils.mkEntry;
import static org.apache.kafka.common.utils.Utils.mkMap;
import static org.apache.kafka.streams.processor.internals.metrics.StreamsMetricsImpl.ROLLUP_VALUE;
import static org.apache.kafka.test.StreamsTestUtils.getMetricByNameFilterByTags;
import static org.easymock.EasyMock.anyObject;
import static org.easymock.EasyMock.createNiceMock;
import static org.easymock.EasyMock.eq;
import static org.easymock.EasyMock.expect;
import static org.easymock.EasyMock.expectLastCall;
import static org.easymock.EasyMock.mock;
import static org.easymock.EasyMock.replay;
import static org.easymock.EasyMock.verify;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.not;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertThrows;
import static org.junit.Assert.assertTrue;
@RunWith(Parameterized.class)
public class MeteredWindowStoreTest {
private static final String STORE_TYPE = "scope";
private static final String STORE_LEVEL_GROUP_FROM_0100_TO_24 = "stream-" + STORE_TYPE + "-state-metrics";
private static final String STORE_LEVEL_GROUP = "stream-state-metrics";
private static final String THREAD_ID_TAG_KEY_FROM_0100_TO_24 = "client-id";
private static final String THREAD_ID_TAG_KEY = "thread-id";
private static final String STORE_NAME = "mocked-store";
private final String threadId = Thread.currentThread().getName();
private InternalMockProcessorContext context;
@SuppressWarnings("unchecked")
private final WindowStore<Bytes, byte[]> innerStoreMock = createNiceMock(WindowStore.class);
private final MeteredWindowStore<String, String> store = new MeteredWindowStore<>(
innerStoreMock,
10L, // any size
STORE_TYPE,
new MockTime(),
Serdes.String(),
new SerdeThatDoesntHandleNull()
);
private final Metrics metrics = new Metrics(new MetricConfig().recordLevel(Sensor.RecordingLevel.DEBUG));
private String storeLevelGroup;
private String threadIdTagKey;
private Map<String, String> tags;
{
expect(innerStoreMock.name()).andReturn(STORE_NAME).anyTimes();
}
@Parameters(name = "{0}")
public static Collection<Object[]> data() {
return Arrays.asList(new Object[][] {
{StreamsConfig.METRICS_LATEST},
{StreamsConfig.METRICS_0100_TO_24}
});
}
@Parameter
public String builtInMetricsVersion;
@Before
public void setUp() {
final StreamsMetricsImpl streamsMetrics =
new StreamsMetricsImpl(metrics, "test", builtInMetricsVersion);
context = new InternalMockProcessorContext(
TestUtils.tempDirectory(),
Serdes.String(),
Serdes.Long(),
streamsMetrics,
new StreamsConfig(StreamsTestUtils.getStreamsConfig()),
MockRecordCollector::new,
new ThreadCache(new LogContext("testCache "), 0, streamsMetrics)
);
storeLevelGroup =
StreamsConfig.METRICS_0100_TO_24.equals(builtInMetricsVersion) ? STORE_LEVEL_GROUP_FROM_0100_TO_24 : STORE_LEVEL_GROUP;
threadIdTagKey =
StreamsConfig.METRICS_0100_TO_24.equals(builtInMetricsVersion) ? THREAD_ID_TAG_KEY_FROM_0100_TO_24 : THREAD_ID_TAG_KEY;
tags = mkMap(
mkEntry(threadIdTagKey, threadId),
mkEntry("task-id", context.taskId().toString()),
mkEntry(STORE_TYPE + "-state-id", STORE_NAME)
);
}
@Test
public void testMetrics() {
replay(innerStoreMock);
store.init(context, store);
final JmxReporter reporter = new JmxReporter("kafka.streams");
metrics.addReporter(reporter);
assertTrue(reporter.containsMbean(String.format(
"kafka.streams:type=%s,%s=%s,task-id=%s,%s-state-id=%s",
storeLevelGroup,
threadIdTagKey,
threadId,
context.taskId().toString(),
STORE_TYPE,
STORE_NAME
)));
if (StreamsConfig.METRICS_0100_TO_24.equals(builtInMetricsVersion)) {
assertTrue(reporter.containsMbean(String.format(
"kafka.streams:type=%s,%s=%s,task-id=%s,%s-state-id=%s",
storeLevelGroup,
threadIdTagKey,
threadId,
context.taskId().toString(),
STORE_TYPE,
ROLLUP_VALUE
)));
}
}
@Test
public void shouldRecordRestoreLatencyOnInit() {
innerStoreMock.init(context, store);
expectLastCall();
replay(innerStoreMock);
store.init(context, store);
final Map<MetricName, ? extends Metric> metrics = context.metrics().metrics();
if (StreamsConfig.METRICS_0100_TO_24.equals(builtInMetricsVersion)) {
assertEquals(1.0, getMetricByNameFilterByTags(
metrics,
"restore-total",
storeLevelGroup,
singletonMap(STORE_TYPE + "-state-id", STORE_NAME)
).metricValue());
assertEquals(1.0, getMetricByNameFilterByTags(
metrics,
"restore-total",
storeLevelGroup,
singletonMap(STORE_TYPE + "-state-id", ROLLUP_VALUE)
).metricValue());
}
}
@Test
@SuppressWarnings("deprecation")
public void shouldRecordPutLatency() {
final byte[] bytes = "a".getBytes();
innerStoreMock.put(eq(Bytes.wrap(bytes)), anyObject(), eq(context.timestamp()));
expectLastCall();
replay(innerStoreMock);
store.init(context, store);
store.put("a", "a");
final Map<MetricName, ? extends Metric> metrics = context.metrics().metrics();
if (StreamsConfig.METRICS_0100_TO_24.equals(builtInMetricsVersion)) {
assertEquals(1.0, getMetricByNameFilterByTags(
metrics,
"put-total",
storeLevelGroup,
singletonMap(STORE_TYPE + "-state-id", STORE_NAME)
).metricValue());
assertEquals(1.0, getMetricByNameFilterByTags(
metrics,
"put-total",
storeLevelGroup,
singletonMap(STORE_TYPE + "-state-id", ROLLUP_VALUE)
).metricValue());
}
verify(innerStoreMock);
}
@Test
public void shouldRecordFetchLatency() {
expect(innerStoreMock.fetch(Bytes.wrap("a".getBytes()), 1, 1)).andReturn(KeyValueIterators.<byte[]>emptyWindowStoreIterator());
replay(innerStoreMock);
store.init(context, store);
store.fetch("a", ofEpochMilli(1), ofEpochMilli(1)).close(); // recorded on close;
final Map<MetricName, ? extends Metric> metrics = context.metrics().metrics();
if (StreamsConfig.METRICS_0100_TO_24.equals(builtInMetricsVersion)) {
assertEquals(1.0, getMetricByNameFilterByTags(
metrics,
"fetch-total",
storeLevelGroup,
singletonMap(STORE_TYPE + "-state-id", STORE_NAME)
).metricValue());
assertEquals(1.0, getMetricByNameFilterByTags(
metrics,
"fetch-total",
storeLevelGroup,
singletonMap(STORE_TYPE + "-state-id", ROLLUP_VALUE)
).metricValue());
}
verify(innerStoreMock);
}
@Test
public void shouldRecordFetchRangeLatency() {
expect(innerStoreMock.fetch(Bytes.wrap("a".getBytes()), Bytes.wrap("b".getBytes()), 1, 1)).andReturn(KeyValueIterators.<Windowed<Bytes>, byte[]>emptyIterator());
replay(innerStoreMock);
store.init(context, store);
store.fetch("a", "b", ofEpochMilli(1), ofEpochMilli(1)).close(); // recorded on close;
final Map<MetricName, ? extends Metric> metrics = context.metrics().metrics();
if (StreamsConfig.METRICS_0100_TO_24.equals(builtInMetricsVersion)) {
assertEquals(1.0, getMetricByNameFilterByTags(
metrics,
"fetch-total",
storeLevelGroup,
singletonMap(STORE_TYPE + "-state-id", STORE_NAME)
).metricValue());
assertEquals(1.0, getMetricByNameFilterByTags(
metrics,
"fetch-total",
storeLevelGroup,
singletonMap(STORE_TYPE + "-state-id", ROLLUP_VALUE)
).metricValue());
}
verify(innerStoreMock);
}
@Test
public void shouldRecordFlushLatency() {
innerStoreMock.flush();
expectLastCall();
replay(innerStoreMock);
store.init(context, store);
store.flush();
final Map<MetricName, ? extends Metric> metrics = context.metrics().metrics();
if (StreamsConfig.METRICS_0100_TO_24.equals(builtInMetricsVersion)) {
assertEquals(1.0, getMetricByNameFilterByTags(
metrics,
"flush-total",
storeLevelGroup,
singletonMap(STORE_TYPE + "-state-id", STORE_NAME)
).metricValue());
assertEquals(1.0, getMetricByNameFilterByTags(
metrics,
"flush-total",
storeLevelGroup,
singletonMap(STORE_TYPE + "-state-id", ROLLUP_VALUE)
).metricValue());
}
verify(innerStoreMock);
}
@Test
public void shouldNotThrowNullPointerExceptionIfFetchReturnsNull() {
expect(innerStoreMock.fetch(Bytes.wrap("a".getBytes()), 0)).andReturn(null);
replay(innerStoreMock);
store.init(context, store);
assertNull(store.fetch("a", 0));
}
private interface CachedWindowStore extends WindowStore<Bytes, byte[]>, CachedStateStore<byte[], byte[]> { }
@SuppressWarnings("unchecked")
@Test
public void shouldSetFlushListenerOnWrappedCachingStore() {
final CachedWindowStore cachedWindowStore = mock(CachedWindowStore.class);
expect(cachedWindowStore.setFlushListener(anyObject(CacheFlushListener.class), eq(false))).andReturn(true);
replay(cachedWindowStore);
final MeteredWindowStore<String, String> metered = new MeteredWindowStore<>(
cachedWindowStore,
10L, // any size
STORE_TYPE,
new MockTime(),
Serdes.String(),
new SerdeThatDoesntHandleNull()
);
assertTrue(metered.setFlushListener(null, false));
verify(cachedWindowStore);
}
@Test
public void shouldNotSetFlushListenerOnWrappedNoneCachingStore() {
assertFalse(store.setFlushListener(null, false));
}
@Test
public void shouldCloseUnderlyingStore() {
innerStoreMock.close();
expectLastCall();
replay(innerStoreMock);
store.init(context, store);
store.close();
verify(innerStoreMock);
}
@Test
public void shouldRemoveMetricsOnClose() {
innerStoreMock.close();
expectLastCall();
replay(innerStoreMock);
store.init(context, store);
assertThat(storeMetrics(), not(empty()));
store.close();
assertThat(storeMetrics(), empty());
verify(innerStoreMock);
}
@Test
public void shouldRemoveMetricsEvenIfWrappedStoreThrowsOnClose() {
innerStoreMock.close();
expectLastCall().andThrow(new RuntimeException("Oops!"));
replay(innerStoreMock);
store.init(context, store);
// There's always a "count" metric registered
assertThat(storeMetrics(), not(empty()));
assertThrows(RuntimeException.class, store::close);
assertThat(storeMetrics(), empty());
verify(innerStoreMock);
}
private List<MetricName> storeMetrics() {
return metrics.metrics()
.keySet()
.stream()
.filter(name -> name.group().equals(storeLevelGroup) && name.tags().equals(tags))
.collect(Collectors.toList());
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.glaf.core.base;
import com.alibaba.fastjson.JSONObject;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.glaf.core.domain.util.ConnectionDefinitionJsonFactory;
public class ConnectionDefinition implements java.io.Serializable, JSONable {
private static final long serialVersionUID = 1L;
protected String provider;
protected String type;
protected String name;
protected String subject;
protected String datasource;
protected String database;
protected String host;
protected int port;
protected String driver;
protected String url;
protected String user;
protected String password;
protected String attribute;
protected boolean autoCommit;
protected java.util.Properties properties;
public ConnectionDefinition() {
}
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
ConnectionDefinition other = (ConnectionDefinition) obj;
if (name == null) {
if (other.name != null)
return false;
} else if (!name.equals(other.name))
return false;
return true;
}
public String getAttribute() {
return attribute;
}
public String getDatabase() {
return database;
}
public String getDatasource() {
return datasource;
}
public String getDriver() {
return driver;
}
public String getHost() {
return host;
}
public String getName() {
return name;
}
public String getPassword() {
return password;
}
public int getPort() {
return port;
}
public java.util.Properties getProperties() {
return properties;
}
public String getProvider() {
return provider;
}
public String getSubject() {
return subject;
}
public String getType() {
return type;
}
public String getUrl() {
return url;
}
public String getUser() {
return user;
}
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((name == null) ? 0 : name.hashCode());
return result;
}
public boolean isAutoCommit() {
return autoCommit;
}
public ConnectionDefinition jsonToObject(JSONObject jsonObject) {
return ConnectionDefinitionJsonFactory.jsonToObject(jsonObject);
}
public void setAttribute(String attribute) {
this.attribute = attribute;
}
public void setAutoCommit(boolean autoCommit) {
this.autoCommit = autoCommit;
}
public void setDatabase(String database) {
this.database = database;
}
public void setDatasource(String datasource) {
this.datasource = datasource;
}
public void setDriver(String driver) {
this.driver = driver;
}
public void setHost(String host) {
this.host = host;
}
public void setName(String name) {
this.name = name;
}
public void setPassword(String password) {
this.password = password;
}
public void setPort(int port) {
this.port = port;
}
public void setProperties(java.util.Properties properties) {
this.properties = properties;
}
public void setProvider(String provider) {
this.provider = provider;
}
public void setSubject(String subject) {
this.subject = subject;
}
public void setType(String type) {
this.type = type;
}
public void setUrl(String url) {
this.url = url;
}
public void setUser(String user) {
this.user = user;
}
public JSONObject toJsonObject() {
return ConnectionDefinitionJsonFactory.toJsonObject(this);
}
public ObjectNode toObjectNode() {
return ConnectionDefinitionJsonFactory.toObjectNode(this);
}
public String toString() {
return "ConnectionDefinition [name=" + name + ", driver=" + driver
+ ", url=" + url + ", user=" + user + "]";
}
}
|
|
package water.parser;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
import org.apache.hadoop.hive.ql.io.orc.OrcFile;
import org.apache.hadoop.hive.ql.io.orc.Reader;
import org.apache.hadoop.hive.ql.io.orc.RecordReader;
import org.apache.hadoop.hive.ql.io.orc.StripeInformation;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
import org.apache.hadoop.hive.serde2.objectinspector.StructField;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.joda.time.DateTime;
import org.junit.Ignore;
import java.io.File;
import java.io.IOException;
import java.util.List;
import java.util.Set;
import water.fvec.Frame;
import water.fvec.Vec;
import water.util.Log;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static water.parser.orc.OrcUtil.isSupportedSchema;
/**
* ORC testing support methods.
*
* Note: Separate ORC-specific logic from tests.
* This is necessary to avoid classloading of ORC-classes during loading ORC tests.
*/
@Ignore("Support for ORC tests, but no actual tests here")
public class OrcTestUtils {
static private double EPSILON = 1e-9;
static private long ERRORMARGIN = 1000L; // error margin when compare timestamp.
static final int DAY_TO_MS = 24*3600*1000;
static final int ADD_OFFSET = 8*3600*1000;
static final int HOUR_OFFSET = 3600000; // in ms to offset for leap seconds, years
static int compareOrcAndH2OFrame(String fileName, File f, Set<String> failedFiles) throws IOException {
Frame h2oFrame = null;
try {
Configuration conf = new Configuration();
Path p = new Path(f.toString());
Reader orcFileReader = OrcFile.createReader(p, OrcFile.readerOptions(conf));
h2oFrame = water.TestUtil.parse_test_file(f.toString());
return compareH2OFrame(fileName, failedFiles, h2oFrame, orcFileReader);
} finally {
if (h2oFrame != null) h2oFrame.delete();
}
}
/**
* This method will take one H2O frame generated by the Orc parser and the fileName of the Orc file
* and attempt to compare the content of the Orc file to the H2O frame. In particular, the following
* are compared:
* - column names;
* - number of columns and rows;
* - content of each row.
*
* If all comparison pass, the test will pass. Otherwise, the test will fail.
*
* @param h2oFrame
* @param orcReader
*/
static int compareH2OFrame(String fileName, Set<String> failedFiles, Frame h2oFrame, Reader orcReader) {
// grab column names, column and row numbers
StructObjectInspector insp = (StructObjectInspector) orcReader.getObjectInspector();
List<StructField> allColInfo = (List<StructField>) insp.getAllStructFieldRefs(); // get info of all cols
// compare number of columns and rows
int allColNumber = allColInfo.size(); // get and check column number
boolean[] toInclude = new boolean[allColNumber+1];
int colNumber = 0 ;
int index1 = 0;
for (StructField oneField:allColInfo) {
String colType = oneField.getFieldObjectInspector().getTypeName();
if (colType.toLowerCase().contains("decimal"))
colType = "decimal";
if (isSupportedSchema(colType)) {
toInclude[index1 + 1] = true;
colNumber++;
}
index1++;
}
assertEquals("Number of columns need to be the same: ", colNumber, h2oFrame.numCols());
// compare column names
String[] colNames = new String[colNumber];
String[] colTypes = new String[colNumber];
int colIndex = 0;
for (int index = 0; index < allColNumber; index++) { // get and check column names
String typeName = allColInfo.get(index).getFieldObjectInspector().getTypeName();
if (typeName.toLowerCase().contains("decimal"))
typeName = "decimal";
if (isSupportedSchema(typeName)) {
colNames[colIndex] = allColInfo.get(index).getFieldName();
colTypes[colIndex] = typeName;
colIndex++;
}
}
assertArrayEquals("Column names need to be the same: ", colNames, h2oFrame._names);
// compare one column at a time of the whole row?
int failed = compareFrameContents(fileName, failedFiles, h2oFrame, orcReader, colTypes, colNames, toInclude);
Long totalRowNumber = orcReader.getNumberOfRows(); // get and check row number
assertEquals("Number of rows need to be the same: ", totalRowNumber, (Long) h2oFrame.numRows());
return failed;
}
static int compareFrameContents(String fileName, Set<String> failedFiles, Frame h2oFrame, Reader orcReader,
String[] colTypes, String[] colNames, boolean[] toInclude) {
List<StripeInformation> stripesInfo = orcReader.getStripes(); // get all stripe info
int wrongTests = 0;
if (stripesInfo.size() == 0) { // Orc file contains no data
assertEquals("Orc file is empty. H2O frame row number should be zero: ", 0, h2oFrame.numRows());
} else {
Long startRowIndex = 0L; // row index into H2O frame
for (StripeInformation oneStripe : stripesInfo) {
try {
RecordReader
perStripe = orcReader.rows(oneStripe.getOffset(), oneStripe.getDataLength(), toInclude, null,
colNames);
VectorizedRowBatch batch = perStripe.nextBatch(null); // read orc file stripes in vectorizedRowBatch
boolean done = false;
Long rowCounts = 0L;
Long rowNumber = oneStripe.getNumberOfRows(); // row number of current stripe
while (!done) {
long currentBatchRow = batch.count(); // row number of current batch
ColumnVector[] dataVectors = batch.cols;
int colIndex = 0;
for (int cIdx = 0; cIdx < batch.numCols; cIdx++) { // read one column at a time;
if (toInclude[cIdx+1]) {
compare1Cloumn(dataVectors[cIdx], colTypes[colIndex].toLowerCase(), colIndex, currentBatchRow,
h2oFrame.vec(colNames[colIndex]), startRowIndex);
colIndex++;
}
}
rowCounts = rowCounts + currentBatchRow; // record number of rows of data actually read
startRowIndex = startRowIndex + currentBatchRow;
if (rowCounts >= rowNumber) // read all rows of the stripe already.
done = true;
if (!done) // not done yet, get next batch
batch = perStripe.nextBatch(batch);
}
perStripe.close();
} catch (Throwable e) {
failedFiles.add(fileName);
e.printStackTrace();
wrongTests += 1;
}
}
}
return wrongTests;
}
static void compare1Cloumn(ColumnVector oneColumn, String columnType, int cIdx, long currentBatchRow,
Vec h2oColumn, Long startRowIndex) {
// if (columnType.contains("bigint")) // cannot handle big integer right now
// return;
if (columnType.contains("binary")) // binary retrieval problem. Tomas
return;
switch (columnType) {
case "boolean":
case "bigint": // FIXME: not working right now
case "int":
case "smallint":
case "tinyint":
CompareLongcolumn(oneColumn, oneColumn.isNull, currentBatchRow, h2oColumn, startRowIndex);
break;
case "float":
case "double":
compareDoublecolumn(oneColumn, oneColumn.isNull, currentBatchRow, h2oColumn, startRowIndex);
break;
case "string": //FIXME: not working right now
case "varchar":
case "char":
case "binary": //FIXME: only reading it as string right now.
compareStringcolumn(oneColumn, oneColumn.isNull, currentBatchRow, h2oColumn, startRowIndex, columnType);
break;
case "timestamp":
case "date":
compareTimecolumn(oneColumn, columnType, oneColumn.isNull, currentBatchRow, h2oColumn, startRowIndex);
break;
case "decimal":
compareDecimalcolumn(oneColumn, oneColumn.isNull, currentBatchRow, h2oColumn, startRowIndex);
break;
default:
Log.warn("String, bigint are not tested. H2O frame is built for them but cannot be verified.");
}
}
static void compareDecimalcolumn(ColumnVector oneDecimalColumn, boolean[] isNull,
long currentBatchRow, Vec h2oFrame, Long startRowIndex) {
HiveDecimalWritable[] oneColumn= ((DecimalColumnVector) oneDecimalColumn).vector;
long frameRowIndex = startRowIndex;
for (int rowIndex = 0; rowIndex < currentBatchRow; rowIndex++) {
if (isNull[rowIndex])
assertEquals("Na is found: ", true, h2oFrame.isNA(frameRowIndex));
else
assertEquals("Decimal elements should equal: ", Double.parseDouble(oneColumn[rowIndex].toString()),
h2oFrame.at(frameRowIndex), EPSILON);
frameRowIndex++;
}
}
static void compareTimecolumn(ColumnVector oneTSColumn, String columnType, boolean[] isNull, long currentBatchRow,
Vec h2oFrame, Long startRowIndex) {
long[] oneColumn = ((LongColumnVector) oneTSColumn).vector;
long frameRowIndex = startRowIndex;
for (int rowIndex = 0; rowIndex < currentBatchRow; rowIndex++) {
if (isNull[rowIndex])
assertEquals("Na is found: ", true, h2oFrame.isNA(frameRowIndex));
else {
if (columnType.contains("timestamp"))
assertEquals("Numerical elements should equal: ", oneColumn[rowIndex]/1000000, h2oFrame.at8(frameRowIndex),
ERRORMARGIN);
else
assertEquals("Numerical elements should equal: ", correctTimeStamp(oneColumn[rowIndex]),
h2oFrame.at8(frameRowIndex), ERRORMARGIN);
}
frameRowIndex++;
}
}
static void compareStringcolumn(ColumnVector oneStringColumn, boolean[] isNull,
long currentBatchRow, Vec h2oFrame, Long startRowIndex, String columnType) {
byte[][] oneColumn = ((BytesColumnVector) oneStringColumn).vector;
int[] stringLength = ((BytesColumnVector) oneStringColumn).length;
int[] stringStart = ((BytesColumnVector) oneStringColumn).start;
long frameRowIndex = startRowIndex;
BufferedString tempH2o = new BufferedString();
BufferedString tempOrc = new BufferedString();
for (int rowIndex = 0; rowIndex < currentBatchRow; rowIndex++) {
if (isNull[rowIndex])
assertEquals("Na is found: ", true, h2oFrame.isNA(frameRowIndex));
else {
if (!oneStringColumn.isRepeating || rowIndex == 0)
tempOrc.set(oneColumn[rowIndex], stringStart[rowIndex], stringLength[rowIndex]);
h2oFrame.atStr(tempH2o, frameRowIndex);
assertEquals("isRepeating = " + oneStringColumn.isRepeating + " String/char elements should equal: ", true, tempOrc.equals(tempH2o));
}
frameRowIndex++;
}
}
static void compareDoublecolumn(ColumnVector oneDoubleColumn, boolean[] isNull,
long currentBatchRow, Vec h2oFrame, Long startRowIndex) {
double[] oneColumn= ((DoubleColumnVector) oneDoubleColumn).vector;
long frameRowIndex = startRowIndex;
for (int rowIndex = 0; rowIndex < currentBatchRow; rowIndex++) {
if (isNull[rowIndex])
assertEquals("Na is found: ", true, h2oFrame.isNA(frameRowIndex));
else
assertEquals("Numerical elements should equal: ", oneColumn[rowIndex], h2oFrame.at(frameRowIndex), EPSILON);
frameRowIndex++;
}
}
static void CompareLongcolumn(ColumnVector oneLongColumn, boolean[] isNull,
long currentBatchRow, Vec h2oFrame, Long startRowIndex) {
long[] oneColumn= ((LongColumnVector) oneLongColumn).vector;
long frameRowIndex = startRowIndex;
for (int rowIndex = 0; rowIndex < currentBatchRow; rowIndex++) {
if (isNull[rowIndex])
assertEquals("Na is found: ", true, h2oFrame.isNA(frameRowIndex));
else {
if (h2oFrame.isNA(frameRowIndex))
continue;
else
assertEquals("Numerical elements should equal: ", oneColumn[rowIndex], h2oFrame.at8(frameRowIndex));
}
frameRowIndex++;
}
}
static long correctTimeStamp(long daysSinceEpoch) {
long timestamp = (daysSinceEpoch*DAY_TO_MS+ADD_OFFSET);
DateTime date = new DateTime(timestamp);
int hour = date.hourOfDay().get();
if (hour == 0)
return timestamp;
else
return (timestamp-hour*HOUR_OFFSET);
}
}
|
|
package com.mindoo.domino.jna.xsp.internal.mime;
import java.io.Closeable;
import java.io.File;
import java.io.FilePermission;
import java.io.IOException;
import java.io.InputStream;
import java.net.JarURLConnection;
import java.net.SocketPermission;
import java.net.URL;
import java.net.URLClassLoader;
import java.net.URLConnection;
import java.security.AccessControlContext;
import java.security.AccessController;
import java.security.CodeSigner;
import java.security.CodeSource;
import java.security.Permission;
import java.security.PermissionCollection;
import java.security.PrivilegedAction;
import java.security.PrivilegedExceptionAction;
import java.security.ProtectionDomain;
import java.security.SecureClassLoader;
import java.util.Enumeration;
import java.util.List;
import java.util.NoSuchElementException;
import java.util.Set;
import java.util.WeakHashMap;
import java.util.jar.Attributes;
import java.util.jar.Attributes.Name;
import java.util.jar.JarFile;
import java.util.jar.Manifest;
import sun.misc.Resource;
import sun.misc.URLClassPath;
import sun.net.www.ParseUtil;
import sun.security.util.SecurityConstants;
/**
* Funtionality similar to the JDK's {@link URLClassLoader}, but we pass
* our own {@link ProtectionDomain} to the loaded classes.
*/
public class CustomURLClassLoader extends SecureClassLoader implements Closeable {
/* The search path for classes and resources */
private final URLClassPath ucp;
/* The context to be used when loading classes and resources */
private final AccessControlContext acc;
private final ProtectionDomain pd;
public CustomURLClassLoader(URL[] urls, ClassLoader parent,
AccessControlContext acc) {
super(parent);
// this is to make the stack depth consistent with 1.1
SecurityManager security = System.getSecurityManager();
if (security != null) {
security.checkCreateClassLoader();
}
this.acc = acc;
ucp = new URLClassPath(urls);
this.pd = getClass().getProtectionDomain();
}
/* A map (used as a set) to keep track of closeable local resources
* (either JarFiles or FileInputStreams). We don't care about
* Http resources since they don't need to be closed.
*
* If the resource is coming from a jar file
* we keep a (weak) reference to the JarFile object which can
* be closed if URLClassLoader.close() called. Due to jar file
* caching there will typically be only one JarFile object
* per underlying jar file.
*
* For file resources, which is probably a less common situation
* we have to keep a weak reference to each stream.
*/
private WeakHashMap<Closeable,Void>
closeables = new WeakHashMap<>();
/**
* Returns an input stream for reading the specified resource.
* If this loader is closed, then any resources opened by this method
* will be closed.
*
* <p> The search order is described in the documentation for {@link
* #getResource(String)}. </p>
*
* @param name
* The resource name
*
* @return An input stream for reading the resource, or {@code null}
* if the resource could not be found
*
* @since 1.7
*/
public InputStream getResourceAsStream(String name) {
URL url = getResource(name);
try {
if (url == null) {
return null;
}
URLConnection urlc = url.openConnection();
InputStream is = urlc.getInputStream();
if (urlc instanceof JarURLConnection) {
JarURLConnection juc = (JarURLConnection)urlc;
JarFile jar = juc.getJarFile();
synchronized (closeables) {
if (!closeables.containsKey(jar)) {
closeables.put(jar, null);
}
}
} else if (urlc instanceof sun.net.www.protocol.file.FileURLConnection) {
synchronized (closeables) {
closeables.put(is, null);
}
}
return is;
} catch (IOException e) {
return null;
}
}
/**
* Closes this URLClassLoader, so that it can no longer be used to load
* new classes or resources that are defined by this loader.
* Classes and resources defined by any of this loader's parents in the
* delegation hierarchy are still accessible. Also, any classes or resources
* that are already loaded, are still accessible.
* <p>
* In the case of jar: and file: URLs, it also closes any files
* that were opened by it. If another thread is loading a
* class when the {@code close} method is invoked, then the result of
* that load is undefined.
* <p>
* The method makes a best effort attempt to close all opened files,
* by catching {@link IOException}s internally. Unchecked exceptions
* and errors are not caught. Calling close on an already closed
* loader has no effect.
* <p>
* @exception IOException if closing any file opened by this class loader
* resulted in an IOException. Any such exceptions are caught internally.
* If only one is caught, then it is re-thrown. If more than one exception
* is caught, then the second and following exceptions are added
* as suppressed exceptions of the first one caught, which is then re-thrown.
*
* @exception SecurityException if a security manager is set, and it denies
* {@link RuntimePermission}{@code ("closeClassLoader")}
*
* @since 1.7
*/
public void close() throws IOException {
SecurityManager security = System.getSecurityManager();
if (security != null) {
security.checkPermission(new RuntimePermission("closeClassLoader"));
}
List<IOException> errors = ucp.closeLoaders();
// now close any remaining streams.
synchronized (closeables) {
Set<Closeable> keys = closeables.keySet();
for (Closeable c : keys) {
try {
c.close();
} catch (IOException ioex) {
errors.add(ioex);
}
}
closeables.clear();
}
if (errors.isEmpty()) {
return;
}
IOException firstex = errors.remove(0);
// Suppress any remaining exceptions
for (IOException error: errors) {
firstex.addSuppressed(error);
}
throw firstex;
}
/**
* Appends the specified URL to the list of URLs to search for
* classes and resources.
* <p>
* If the URL specified is {@code null} or is already in the
* list of URLs, or if this loader is closed, then invoking this
* method has no effect.
*
* @param url the URL to be added to the search path of URLs
*/
protected void addURL(URL url) {
ucp.addURL(url);
}
/**
* Returns the search path of URLs for loading classes and resources.
* This includes the original list of URLs specified to the constructor,
* along with any URLs subsequently appended by the addURL() method.
* @return the search path of URLs for loading classes and resources.
*/
public URL[] getURLs() {
return ucp.getURLs();
}
/**
* Finds and loads the class with the specified name from the URL search
* path. Any URLs referring to JAR files are loaded and opened as needed
* until the class is found.
*
* @param name the name of the class
* @return the resulting class
* @exception ClassNotFoundException if the class could not be found,
* or if the loader is closed.
* @exception NullPointerException if {@code name} is {@code null}.
*/
protected Class<?> findClass(final String name)
throws ClassNotFoundException
{
final Class<?> result;
try {
result = AccessController.doPrivileged(
new PrivilegedExceptionAction<Class<?>>() {
public Class<?> run() throws ClassNotFoundException {
String path = name.replace('.', '/').concat(".class");
Resource res = ucp.getResource(path, false);
if (res != null) {
try {
return defineClass(name, res);
} catch (IOException e) {
throw new ClassNotFoundException(name, e);
}
} else {
return null;
}
}
}, acc);
} catch (java.security.PrivilegedActionException pae) {
throw (ClassNotFoundException) pae.getException();
}
if (result == null) {
throw new ClassNotFoundException(name);
}
return result;
}
/*
* Retrieve the package using the specified package name.
* If non-null, verify the package using the specified code
* source and manifest.
*/
private Package getAndVerifyPackage(String pkgname,
Manifest man, URL url) {
Package pkg = getPackage(pkgname);
if (pkg != null) {
// Package found, so check package sealing.
if (pkg.isSealed()) {
// Verify that code source URL is the same.
if (!pkg.isSealed(url)) {
throw new SecurityException(
"sealing violation: package " + pkgname + " is sealed");
}
} else {
// Make sure we are not attempting to seal the package
// at this code source URL.
if ((man != null) && isSealed(pkgname, man)) {
throw new SecurityException(
"sealing violation: can't seal package " + pkgname +
": already loaded");
}
}
}
return pkg;
}
// Also called by VM to define Package for classes loaded from the CDS
// archive
private void definePackageInternal(String pkgname, Manifest man, URL url)
{
if (getAndVerifyPackage(pkgname, man, url) == null) {
try {
if (man != null) {
definePackage(pkgname, man, url);
} else {
definePackage(pkgname, null, null, null, null, null, null, null);
}
} catch (IllegalArgumentException iae) {
// parallel-capable class loaders: re-verify in case of a
// race condition
if (getAndVerifyPackage(pkgname, man, url) == null) {
// Should never happen
throw new AssertionError("Cannot find package " +
pkgname);
}
}
}
}
/*
* Defines a Class using the class bytes obtained from the specified
* Resource. The resulting Class must be resolved before it can be
* used.
*/
private Class<?> defineClass(String name, Resource res) throws IOException {
long t0 = System.nanoTime();
int i = name.lastIndexOf('.');
URL url = res.getCodeSourceURL();
if (i != -1) {
String pkgname = name.substring(0, i);
// Check if package already loaded.
Manifest man = res.getManifest();
definePackageInternal(pkgname, man, url);
}
// Now read the class bytes and define the class
java.nio.ByteBuffer bb = res.getByteBuffer();
if (bb != null) {
// Use (direct) ByteBuffer:
CodeSigner[] signers = res.getCodeSigners();
CodeSource cs = new CodeSource(url, signers);
sun.misc.PerfCounter.getReadClassBytesTime().addElapsedTimeFrom(t0);
//FIXED KL: load class with our ProtectionDomain
//return defineClass(name, bb, cs);
return defineClass(name, bb, pd);
} else {
byte[] b = res.getBytes();
// must read certificates AFTER reading bytes.
CodeSigner[] signers = res.getCodeSigners();
CodeSource cs = new CodeSource(url, signers);
sun.misc.PerfCounter.getReadClassBytesTime().addElapsedTimeFrom(t0);
//FIXED KL: load class with our ProtectionDomain
//return defineClass(name, b, 0, b.length, cs);
return defineClass(name, b, 0, b.length, pd);
}
}
/**
* Defines a new package by name in this ClassLoader. The attributes
* contained in the specified Manifest will be used to obtain package
* version and sealing information. For sealed packages, the additional
* URL specifies the code source URL from which the package was loaded.
*
* @param name the package name
* @param man the Manifest containing package version and sealing
* information
* @param url the code source url for the package, or null if none
* @exception IllegalArgumentException if the package name duplicates
* an existing package either in this class loader or one
* of its ancestors
* @return the newly defined Package object
*/
protected Package definePackage(String name, Manifest man, URL url)
throws IllegalArgumentException
{
String path = name.replace('.', '/').concat("/");
String specTitle = null, specVersion = null, specVendor = null;
String implTitle = null, implVersion = null, implVendor = null;
String sealed = null;
URL sealBase = null;
Attributes attr = man.getAttributes(path);
if (attr != null) {
specTitle = attr.getValue(Name.SPECIFICATION_TITLE);
specVersion = attr.getValue(Name.SPECIFICATION_VERSION);
specVendor = attr.getValue(Name.SPECIFICATION_VENDOR);
implTitle = attr.getValue(Name.IMPLEMENTATION_TITLE);
implVersion = attr.getValue(Name.IMPLEMENTATION_VERSION);
implVendor = attr.getValue(Name.IMPLEMENTATION_VENDOR);
sealed = attr.getValue(Name.SEALED);
}
attr = man.getMainAttributes();
if (attr != null) {
if (specTitle == null) {
specTitle = attr.getValue(Name.SPECIFICATION_TITLE);
}
if (specVersion == null) {
specVersion = attr.getValue(Name.SPECIFICATION_VERSION);
}
if (specVendor == null) {
specVendor = attr.getValue(Name.SPECIFICATION_VENDOR);
}
if (implTitle == null) {
implTitle = attr.getValue(Name.IMPLEMENTATION_TITLE);
}
if (implVersion == null) {
implVersion = attr.getValue(Name.IMPLEMENTATION_VERSION);
}
if (implVendor == null) {
implVendor = attr.getValue(Name.IMPLEMENTATION_VENDOR);
}
if (sealed == null) {
sealed = attr.getValue(Name.SEALED);
}
}
if ("true".equalsIgnoreCase(sealed)) {
sealBase = url;
}
return definePackage(name, specTitle, specVersion, specVendor,
implTitle, implVersion, implVendor, sealBase);
}
/*
* Returns true if the specified package name is sealed according to the
* given manifest.
*/
private boolean isSealed(String name, Manifest man) {
String path = name.replace('.', '/').concat("/");
Attributes attr = man.getAttributes(path);
String sealed = null;
if (attr != null) {
sealed = attr.getValue(Name.SEALED);
}
if (sealed == null) {
if ((attr = man.getMainAttributes()) != null) {
sealed = attr.getValue(Name.SEALED);
}
}
return "true".equalsIgnoreCase(sealed);
}
/**
* Finds the resource with the specified name on the URL search path.
*
* @param name the name of the resource
* @return a {@code URL} for the resource, or {@code null}
* if the resource could not be found, or if the loader is closed.
*/
public URL findResource(final String name) {
/*
* The same restriction to finding classes applies to resources
*/
URL url = AccessController.doPrivileged(
new PrivilegedAction<URL>() {
public URL run() {
return ucp.findResource(name, true);
}
}, acc);
return url != null ? ucp.checkURL(url) : null;
}
/**
* Returns an Enumeration of URLs representing all of the resources
* on the URL search path having the specified name.
*
* @param name the resource name
* @exception IOException if an I/O exception occurs
* @return an {@code Enumeration} of {@code URL}s
* If the loader is closed, the Enumeration will be empty.
*/
public Enumeration<URL> findResources(final String name)
throws IOException
{
final Enumeration<URL> e = ucp.findResources(name, true);
return new Enumeration<URL>() {
private URL url = null;
private boolean next() {
if (url != null) {
return true;
}
do {
URL u = AccessController.doPrivileged(
new PrivilegedAction<URL>() {
public URL run() {
if (!e.hasMoreElements())
return null;
return e.nextElement();
}
}, acc);
if (u == null)
break;
url = ucp.checkURL(u);
} while (url == null);
return url != null;
}
public URL nextElement() {
if (!next()) {
throw new NoSuchElementException();
}
URL u = url;
url = null;
return u;
}
public boolean hasMoreElements() {
return next();
}
};
}
/**
* Returns the permissions for the given codesource object.
* The implementation of this method first calls super.getPermissions
* and then adds permissions based on the URL of the codesource.
* <p>
* If the protocol of this URL is "jar", then the permission granted
* is based on the permission that is required by the URL of the Jar
* file.
* <p>
* If the protocol is "file" and there is an authority component, then
* permission to connect to and accept connections from that authority
* may be granted. If the protocol is "file"
* and the path specifies a file, then permission to read that
* file is granted. If protocol is "file" and the path is
* a directory, permission is granted to read all files
* and (recursively) all files and subdirectories contained in
* that directory.
* <p>
* If the protocol is not "file", then permission
* to connect to and accept connections from the URL's host is granted.
* @param codesource the codesource
* @exception NullPointerException if {@code codesource} is {@code null}.
* @return the permissions granted to the codesource
*/
protected PermissionCollection getPermissions(CodeSource codesource)
{
PermissionCollection perms = super.getPermissions(codesource);
URL url = codesource.getLocation();
Permission p;
URLConnection urlConnection;
try {
urlConnection = url.openConnection();
p = urlConnection.getPermission();
} catch (java.io.IOException ioe) {
p = null;
urlConnection = null;
}
if (p instanceof FilePermission) {
// if the permission has a separator char on the end,
// it means the codebase is a directory, and we need
// to add an additional permission to read recursively
String path = p.getName();
if (path.endsWith(File.separator)) {
path += "-";
p = new FilePermission(path, SecurityConstants.FILE_READ_ACTION);
}
} else if ((p == null) && (url.getProtocol().equals("file"))) {
String path = url.getFile().replace('/', File.separatorChar);
path = ParseUtil.decode(path);
if (path.endsWith(File.separator))
path += "-";
p = new FilePermission(path, SecurityConstants.FILE_READ_ACTION);
} else {
/**
* Not loading from a 'file:' URL so we want to give the class
* permission to connect to and accept from the remote host
* after we've made sure the host is the correct one and is valid.
*/
URL locUrl = url;
if (urlConnection instanceof JarURLConnection) {
locUrl = ((JarURLConnection)urlConnection).getJarFileURL();
}
String host = locUrl.getHost();
if (host != null && (host.length() > 0))
p = new SocketPermission(host,
SecurityConstants.SOCKET_CONNECT_ACCEPT_ACTION);
}
// make sure the person that created this class loader
// would have this permission
if (p != null) {
final SecurityManager sm = System.getSecurityManager();
if (sm != null) {
final Permission fp = p;
AccessController.doPrivileged(new PrivilegedAction<Void>() {
public Void run() throws SecurityException {
sm.checkPermission(fp);
return null;
}
}, acc);
}
perms.add(p);
}
return perms;
}
static {
ClassLoader.registerAsParallelCapable();
}
}
|
|
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.ide.actions.searcheverywhere;
import com.intellij.codeInsight.navigation.NavigationUtil;
import com.intellij.ide.DataManager;
import com.intellij.ide.actions.GotoActionBase;
import com.intellij.ide.actions.QualifiedNameProviderUtil;
import com.intellij.ide.actions.SearchEverywhereClassifier;
import com.intellij.ide.actions.SearchEverywherePsiRenderer;
import com.intellij.ide.plugins.DynamicPluginListener;
import com.intellij.ide.plugins.IdeaPluginDescriptor;
import com.intellij.ide.util.EditSourceUtil;
import com.intellij.ide.util.ElementsChooser;
import com.intellij.ide.util.gotoByName.*;
import com.intellij.ide.util.scopeChooser.ScopeChooserCombo;
import com.intellij.ide.util.scopeChooser.ScopeDescriptor;
import com.intellij.lang.LangBundle;
import com.intellij.navigation.NavigationItem;
import com.intellij.navigation.PsiElementNavigationItem;
import com.intellij.openapi.MnemonicHelper;
import com.intellij.openapi.actionSystem.*;
import com.intellij.openapi.actionSystem.ex.ActionUtil;
import com.intellij.openapi.actionSystem.ex.CustomComponentAction;
import com.intellij.openapi.actionSystem.impl.ActionButtonWithText;
import com.intellij.openapi.actionSystem.impl.SimpleDataContext;
import com.intellij.openapi.application.Application;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.fileEditor.OpenFileDescriptor;
import com.intellij.openapi.keymap.KeymapUtil;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.progress.util.ProgressIndicatorUtils;
import com.intellij.openapi.project.DumbAware;
import com.intellij.openapi.project.DumbService;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.popup.PopupStep;
import com.intellij.openapi.ui.popup.util.BaseListPopupStep;
import com.intellij.openapi.util.Key;
import com.intellij.openapi.util.NlsContexts;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.registry.Registry;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.pom.Navigatable;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import com.intellij.psi.SmartPointerManager;
import com.intellij.psi.SmartPsiElementPointer;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.psi.util.PsiUtilCore;
import com.intellij.ui.ComponentUtil;
import com.intellij.ui.OffsetIcon;
import com.intellij.ui.TitledSeparator;
import com.intellij.ui.components.JBList;
import com.intellij.ui.popup.list.ListPopupImpl;
import com.intellij.util.CommonProcessors;
import com.intellij.util.Processor;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.containers.JBIterable;
import com.intellij.util.indexing.FindSymbolParameters;
import com.intellij.util.ui.JBUI;
import org.jetbrains.annotations.ApiStatus;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.awt.*;
import java.awt.event.ActionEvent;
import java.awt.event.InputEvent;
import java.awt.event.KeyEvent;
import java.util.List;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public abstract class AbstractGotoSEContributor implements WeightedSearchEverywhereContributor<Object>, ScopeSupporting {
private static final Logger LOG = Logger.getInstance(AbstractGotoSEContributor.class);
private static final Key<Map<String, String>> SE_SELECTED_SCOPES = Key.create("SE_SELECTED_SCOPES");
private static final Pattern ourPatternToDetectLinesAndColumns = Pattern.compile(
"(.+?)" + // name, non-greedy matching
"(?::|@|,| |#|#L|\\?l=| on line | at line |:?\\(|:?\\[)" + // separator
"(\\d+)?(?:\\W(\\d+)?)?" + // line + column
"[)\\]]?" // possible closing paren/brace
);
protected final Project myProject;
protected boolean myEverywhere;
protected ScopeDescriptor myScopeDescriptor;
private final GlobalSearchScope myEverywhereScope;
private final GlobalSearchScope myProjectScope;
protected final SmartPsiElementPointer<PsiElement> myPsiContext;
protected AbstractGotoSEContributor(@NotNull AnActionEvent event) {
myProject = event.getRequiredData(CommonDataKeys.PROJECT);
PsiElement context = GotoActionBase.getPsiContext(event);
myPsiContext = context != null ? SmartPointerManager.getInstance(myProject).createSmartPsiElementPointer(context) : null;
GlobalSearchScope everywhereScope = SearchEverywhereClassifier.EP_Manager.getEverywhereScope(myProject);
if (everywhereScope == null) {
everywhereScope = GlobalSearchScope.everythingScope(myProject);
}
myEverywhereScope = everywhereScope;
List<ScopeDescriptor> scopeDescriptors = createScopes();
GlobalSearchScope projectScope = SearchEverywhereClassifier.EP_Manager.getProjectScope(myProject);
if (projectScope == null) {
projectScope = GlobalSearchScope.projectScope(myProject);
if (myEverywhereScope.equals(projectScope)) {
// just get the second scope, i.e. Attached Directories in DataGrip
ScopeDescriptor secondScope = JBIterable.from(scopeDescriptors)
.filter(o -> !o.scopeEquals(this.myEverywhereScope) && !o.scopeEquals(null))
.first();
projectScope = secondScope != null ? (GlobalSearchScope) secondScope.getScope() : this.myEverywhereScope;
}
}
myProjectScope = projectScope;
myScopeDescriptor = getInitialSelectedScope(scopeDescriptors);
myProject.getMessageBus().connect(this).subscribe(DynamicPluginListener.TOPIC, new DynamicPluginListener() {
@Override
public void pluginUnloaded(@NotNull IdeaPluginDescriptor pluginDescriptor, boolean isUpdate) {
myScopeDescriptor = getInitialSelectedScope(createScopes());
}
});
}
private List<ScopeDescriptor> createScopes() {
DataContext context = createContext();
List<ScopeDescriptor> res = new ArrayList<>();
ScopeChooserCombo.processScopes(
myProject, context,
ScopeChooserCombo.OPT_LIBRARIES | ScopeChooserCombo.OPT_EMPTY_SCOPES,
new CommonProcessors.CollectProcessor<>(res));
return res;
}
private DataContext createContext() {
DataContext parentContext = myProject == null ? null : SimpleDataContext.getProjectContext(myProject);
PsiElement context = myPsiContext != null ? myPsiContext.getElement() : null;
PsiFile file = context == null ? null : context.getContainingFile();
return SimpleDataContext.builder()
.setParent(parentContext)
.add(CommonDataKeys.PSI_ELEMENT, context)
.add(CommonDataKeys.PSI_FILE, file)
.build();
}
@NotNull
@Override
public String getSearchProviderId() {
return getClass().getSimpleName();
}
@Override
public boolean isShownInSeparateTab() {
return true;
}
/** @deprecated override {@link #doGetActions(PersistentSearchEverywhereContributorFilter, ElementsChooser.StatisticsCollector, Runnable)} instead**/
@ApiStatus.ScheduledForRemoval(inVersion = "2022.1")
@Deprecated
@NotNull
protected List<AnAction> doGetActions(@NotNull @NlsContexts.Checkbox String ignored,
@Nullable PersistentSearchEverywhereContributorFilter<?> filter,
@NotNull Runnable onChanged) {
return doGetActions(filter, null, onChanged);
}
@NotNull
protected <T> List<AnAction> doGetActions(@Nullable PersistentSearchEverywhereContributorFilter<T> filter,
@Nullable ElementsChooser.StatisticsCollector<T> statisticsCollector,
@NotNull Runnable onChanged) {
if (myProject == null || filter == null) return Collections.emptyList();
ArrayList<AnAction> result = new ArrayList<>();
result.add(new ScopeChooserAction() {
final boolean canToggleEverywhere = !myEverywhereScope.equals(myProjectScope);
@Override
void onScopeSelected(@NotNull ScopeDescriptor o) {
setSelectedScope(o);
onChanged.run();
}
@NotNull
@Override
ScopeDescriptor getSelectedScope() {
return myScopeDescriptor;
}
@Override
void onProjectScopeToggled() {
setEverywhere(!myScopeDescriptor.scopeEquals(myEverywhereScope));
}
@Override
boolean processScopes(@NotNull Processor<? super ScopeDescriptor> processor) {
return ContainerUtil.process(createScopes(), processor);
}
@Override
public boolean isEverywhere() {
return myScopeDescriptor.scopeEquals(myEverywhereScope);
}
@Override
public void setEverywhere(boolean everywhere) {
setSelectedScope(new ScopeDescriptor(everywhere ? myEverywhereScope : myProjectScope));
onChanged.run();
}
@Override
public boolean canToggleEverywhere() {
if (!canToggleEverywhere) return false;
return myScopeDescriptor.scopeEquals(myEverywhereScope) ||
myScopeDescriptor.scopeEquals(myProjectScope);
}
});
result.add(new SearchEverywhereFiltersAction<>(filter, onChanged, statisticsCollector));
return result;
}
@NotNull
private ScopeDescriptor getInitialSelectedScope(List<? extends ScopeDescriptor> scopeDescriptors) {
String selectedScope = myProject == null ? null : getSelectedScopes(myProject).get(getClass().getSimpleName());
if (StringUtil.isNotEmpty(selectedScope)) {
for (ScopeDescriptor descriptor : scopeDescriptors) {
if (!selectedScope.equals(descriptor.getDisplayName()) || descriptor.scopeEquals(null)) continue;
return descriptor;
}
}
return new ScopeDescriptor(myProjectScope);
}
private void setSelectedScope(@NotNull ScopeDescriptor o) {
myScopeDescriptor = o;
getSelectedScopes(myProject).put(
getClass().getSimpleName(),
o.scopeEquals(myEverywhereScope) || o.scopeEquals(myProjectScope) ? null : o.getDisplayName());
}
@NotNull
private static Map<String, String> getSelectedScopes(@NotNull Project project) {
Map<String, String> map = SE_SELECTED_SCOPES.get(project);
if (map == null) SE_SELECTED_SCOPES.set(project, map = new HashMap<>(3));
return map;
}
@Override
public void fetchWeightedElements(@NotNull String rawPattern,
@NotNull ProgressIndicator progressIndicator,
@NotNull Processor<? super FoundItemDescriptor<Object>> consumer) {
if (myProject == null) return; //nowhere to search
String pattern = removeCommandFromPattern(rawPattern);
if (!isEmptyPatternSupported() && pattern.isEmpty()) return;
Runnable fetchRunnable = () -> {
if (!isDumbAware() && DumbService.isDumb(myProject)) return;
FilteringGotoByModel<?> model = createModel(myProject);
if (progressIndicator.isCanceled()) return;
PsiElement context = myPsiContext != null ? myPsiContext.getElement() : null;
ChooseByNameItemProvider provider = ChooseByNameModelEx.getItemProvider(model, context);
GlobalSearchScope scope = (GlobalSearchScope)Objects.requireNonNull(myScopeDescriptor.getScope());
boolean everywhere = scope.isSearchInLibraries();
ChooseByNameViewModel viewModel = new MyViewModel(myProject, model);
if (provider instanceof ChooseByNameInScopeItemProvider) {
FindSymbolParameters parameters = FindSymbolParameters.wrap(pattern, scope);
((ChooseByNameInScopeItemProvider)provider).filterElementsWithWeights(viewModel, parameters, progressIndicator,
item -> processElement(progressIndicator, consumer, model,
item.getItem(), item.getWeight())
);
}
else if (provider instanceof ChooseByNameWeightedItemProvider) {
((ChooseByNameWeightedItemProvider)provider).filterElementsWithWeights(viewModel, pattern, everywhere, progressIndicator,
item -> processElement(progressIndicator, consumer, model,
item.getItem(), item.getWeight())
);
}
else {
provider.filterElements(viewModel, pattern, everywhere, progressIndicator,
element -> processElement(progressIndicator, consumer, model, element,
getElementPriority(element, pattern))
);
}
};
Application application = ApplicationManager.getApplication();
if (application.isUnitTestMode() && application.isDispatchThread()) {
fetchRunnable.run();
}
else {
ProgressIndicatorUtils.yieldToPendingWriteActions();
ProgressIndicatorUtils.runInReadActionWithWriteActionPriority(fetchRunnable, progressIndicator);
}
}
private boolean processElement(@NotNull ProgressIndicator progressIndicator,
@NotNull Processor<? super FoundItemDescriptor<Object>> consumer,
FilteringGotoByModel<?> model, Object element, int degree) {
if (progressIndicator.isCanceled()) return false;
if (element == null) {
LOG.error("Null returned from " + model + " in " + this);
return true;
}
return consumer.process(new FoundItemDescriptor<>(element, degree));
}
@Override
public ScopeDescriptor getScope() {
return myScopeDescriptor;
}
@Override
public void setScope(ScopeDescriptor scope) {
setSelectedScope(scope);
}
@Override
public List<ScopeDescriptor> getSupportedScopes() {
return createScopes();
}
@Override
public @NotNull List<SearchEverywhereCommandInfo> getSupportedCommands() {
if (Registry.is("search.everywhere.group.contributors.by.type")) {
SearchEverywhereCommandInfo command = getFilterCommand();
return command == null ? Collections.emptyList() : Collections.singletonList(command);
}
return Collections.emptyList();
}
@NotNull
protected abstract FilteringGotoByModel<?> createModel(@NotNull Project project);
@Nullable
protected SearchEverywhereCommandInfo getFilterCommand() {
return null;
}
@NotNull
@Override
public String filterControlSymbols(@NotNull String pattern) {
pattern = removeCommandFromPattern(pattern);
if (StringUtil.containsAnyChar(pattern, ":,;@[( #") ||
pattern.contains(" line ") ||
pattern.contains("?l=")) { // quick test if reg exp should be used
return applyPatternFilter(pattern, ourPatternToDetectLinesAndColumns);
}
return pattern;
}
private String removeCommandFromPattern(@NotNull String pattern) {
SearchEverywhereCommandInfo command = getFilterCommand();
if (command != null && pattern.startsWith(command.getCommandWithPrefix())) {
pattern = pattern.substring(command.getCommandWithPrefix().length()).stripLeading();
}
return pattern;
}
protected static String applyPatternFilter(String str, Pattern regex) {
Matcher matcher = regex.matcher(str);
if (matcher.matches()) {
return matcher.group(1);
}
return str;
}
@Override
public boolean showInFindResults() {
return true;
}
@Override
public boolean processSelectedItem(@NotNull Object selected, int modifiers, @NotNull String searchText) {
if (selected instanceof PsiElement) {
if (!((PsiElement)selected).isValid()) {
LOG.warn("Cannot navigate to invalid PsiElement");
return true;
}
PsiElement psiElement = preparePsi((PsiElement)selected, modifiers, searchText);
Navigatable extNavigatable = createExtendedNavigatable(psiElement, searchText, modifiers);
if (extNavigatable != null && extNavigatable.canNavigate()) {
extNavigatable.navigate(true);
return true;
}
NavigationUtil.activateFileWithPsiElement(psiElement, true);
}
else {
EditSourceUtil.navigate(((NavigationItem)selected), true, false);
}
return true;
}
@Override
public Object getDataForItem(@NotNull Object element, @NotNull String dataId) {
if (CommonDataKeys.PSI_ELEMENT.is(dataId)) {
if (element instanceof PsiElement) {
return element;
}
if (element instanceof DataProvider) {
return ((DataProvider)element).getData(dataId);
}
if (element instanceof PsiElementNavigationItem) {
return ((PsiElementNavigationItem)element).getTargetElement();
}
}
if (SearchEverywhereDataKeys.ITEM_STRING_DESCRIPTION.is(dataId) && element instanceof PsiElement) {
return QualifiedNameProviderUtil.getQualifiedName((PsiElement)element);
}
return null;
}
@Override
public boolean isMultiSelectionSupported() {
return true;
}
@Override
public boolean isDumbAware() {
return DumbService.isDumbAware(createModel(myProject));
}
@NotNull
@Override
public ListCellRenderer<Object> getElementsRenderer() {
return new SearchEverywherePsiRenderer(this);
}
@Override
public int getElementPriority(@NotNull Object element, @NotNull String searchPattern) {
return 50;
}
@Nullable
protected Navigatable createExtendedNavigatable(PsiElement psi, String searchText, int modifiers) {
VirtualFile file = PsiUtilCore.getVirtualFile(psi);
Pair<Integer, Integer> position = getLineAndColumn(searchText);
boolean positionSpecified = position.first >= 0 || position.second >= 0;
if (file != null && positionSpecified) {
return new OpenFileDescriptor(psi.getProject(), file, position.first, position.second);
}
return null;
}
protected PsiElement preparePsi(PsiElement psiElement, int modifiers, String searchText) {
return psiElement.getNavigationElement();
}
protected static Pair<Integer, Integer> getLineAndColumn(String text) {
int line = getLineAndColumnRegexpGroup(text, 2);
int column = getLineAndColumnRegexpGroup(text, 3);
if (line == -1 && column != -1) {
line = 0;
}
return new Pair<>(line, column);
}
private static int getLineAndColumnRegexpGroup(String text, int groupNumber) {
final Matcher matcher = ourPatternToDetectLinesAndColumns.matcher(text);
if (matcher.matches()) {
try {
if (groupNumber <= matcher.groupCount()) {
final String group = matcher.group(groupNumber);
if (group != null) return Integer.parseInt(group) - 1;
}
}
catch (NumberFormatException ignored) {
}
}
return -1;
}
abstract static class ScopeChooserAction extends ActionGroup
implements CustomComponentAction, DumbAware, SearchEverywhereToggleAction {
static final char CHOOSE = 'O';
static final char TOGGLE = 'P';
static final String TOGGLE_ACTION_NAME = "toggleProjectScope";
abstract void onScopeSelected(@NotNull ScopeDescriptor o);
@NotNull
abstract ScopeDescriptor getSelectedScope();
abstract void onProjectScopeToggled();
abstract boolean processScopes(@NotNull Processor<? super ScopeDescriptor> processor);
@Override public boolean canBePerformed(@NotNull DataContext context) { return true; }
@Override public boolean isPopup() { return true; }
@Override public AnAction @NotNull [] getChildren(@Nullable AnActionEvent e) { return EMPTY_ARRAY; }
@NotNull @Override
public JComponent createCustomComponent(@NotNull Presentation presentation, @NotNull String place) {
JComponent component = new ActionButtonWithText(this, presentation, place, ActionToolbar.DEFAULT_MINIMUM_BUTTON_SIZE);
ComponentUtil.putClientProperty(component, MnemonicHelper.MNEMONIC_CHECKER, keyCode ->
KeyEvent.getExtendedKeyCodeForChar(TOGGLE) == keyCode ||
KeyEvent.getExtendedKeyCodeForChar(CHOOSE) == keyCode);
MnemonicHelper.registerMnemonicAction(component, CHOOSE);
InputMap map = component.getInputMap(JComponent.WHEN_IN_FOCUSED_WINDOW);
int mask = MnemonicHelper.getFocusAcceleratorKeyMask();
map.put(KeyStroke.getKeyStroke(TOGGLE, mask, false), TOGGLE_ACTION_NAME);
component.getActionMap().put(TOGGLE_ACTION_NAME, new AbstractAction() {
@Override
public void actionPerformed(ActionEvent e) {
// mimic AnAction event invocation to trigger myEverywhereAutoSet=false logic
DataContext dataContext = DataManager.getInstance().getDataContext(component);
KeyEvent inputEvent = new KeyEvent(
component, KeyEvent.KEY_PRESSED, e.getWhen(), MnemonicHelper.getFocusAcceleratorKeyMask(),
KeyEvent.getExtendedKeyCodeForChar(TOGGLE), TOGGLE);
AnActionEvent event = AnActionEvent.createFromAnAction(
ScopeChooserAction.this, inputEvent, ActionPlaces.TOOLBAR, dataContext);
ActionUtil.performDumbAwareWithCallbacks(ScopeChooserAction.this, event, ScopeChooserAction.this::onProjectScopeToggled);
}
});
return component;
}
@Override
public void update(@NotNull AnActionEvent e) {
ScopeDescriptor selection = getSelectedScope();
String name = StringUtil.trimMiddle(StringUtil.notNullize(selection.getDisplayName()), 30);
String text = StringUtil.escapeMnemonics(name).replaceFirst("(?i)([" + TOGGLE + CHOOSE + "])", "_$1");
e.getPresentation().setText(text);
e.getPresentation().setIcon(OffsetIcon.getOriginalIcon(selection.getIcon()));
String shortcutText = KeymapUtil.getKeystrokeText(KeyStroke.getKeyStroke(
CHOOSE, MnemonicHelper.getFocusAcceleratorKeyMask(), true));
String shortcutText2 = KeymapUtil.getKeystrokeText(KeyStroke.getKeyStroke(
TOGGLE, MnemonicHelper.getFocusAcceleratorKeyMask(), true));
e.getPresentation().setDescription(LangBundle.message("action.choose.scope.p.toggle.scope.description", shortcutText, shortcutText2));
JComponent button = e.getPresentation().getClientProperty(CustomComponentAction.COMPONENT_KEY);
if (button != null) {
button.setBackground(selection.getColor());
}
}
@Override
public void actionPerformed(@NotNull AnActionEvent e) {
JComponent button = e.getPresentation().getClientProperty(CustomComponentAction.COMPONENT_KEY);
if (button == null || !button.isValid()) return;
ListCellRenderer<ScopeDescriptor> renderer = new ListCellRenderer<>() {
final ListCellRenderer<ScopeDescriptor> delegate = ScopeChooserCombo.createDefaultRenderer();
@Override
public Component getListCellRendererComponent(JList<? extends ScopeDescriptor> list,
ScopeDescriptor value,
int index,
boolean isSelected,
boolean cellHasFocus) {
// copied from DarculaJBPopupComboPopup.customizeListRendererComponent()
Component component = delegate.getListCellRendererComponent(list, value, index, isSelected, cellHasFocus);
if (component instanceof JComponent &&
!(component instanceof JSeparator || component instanceof TitledSeparator)) {
((JComponent)component).setBorder(JBUI.Borders.empty(2, 8));
}
return component;
}
};
List<ScopeDescriptor> items = new ArrayList<>();
JList<ScopeDescriptor> fakeList = new JBList<>();
processScopes(o -> {
Component c = renderer.getListCellRendererComponent(fakeList, o, -1, false, false);
if (c instanceof JSeparator || c instanceof TitledSeparator ||
!o.scopeEquals(null) && o.getScope() instanceof GlobalSearchScope) {
items.add(o);
}
return true;
});
BaseListPopupStep<ScopeDescriptor> step = new BaseListPopupStep<>("", items) {
@Nullable
@Override
public PopupStep<?> onChosen(ScopeDescriptor selectedValue, boolean finalChoice) {
onScopeSelected(selectedValue);
ActionToolbar toolbar = ActionToolbar.findToolbarBy(button);
if (toolbar != null) toolbar.updateActionsImmediately();
return FINAL_CHOICE;
}
@Override
public boolean isSpeedSearchEnabled() {
return true;
}
@NotNull
@Override
public String getTextFor(ScopeDescriptor value) {
return value.getScope() instanceof GlobalSearchScope ? StringUtil.notNullize(value.getDisplayName()) : "";
}
@Override
public boolean isSelectable(ScopeDescriptor value) {
return value.getScope() instanceof GlobalSearchScope;
}
};
ScopeDescriptor selection = getSelectedScope();
step.setDefaultOptionIndex(ContainerUtil.indexOf(items, o -> Objects.equals(o.getDisplayName(), selection.getDisplayName())));
ListPopupImpl popup = new ListPopupImpl(e.getProject(), step);
popup.setMaxRowCount(10);
//noinspection unchecked
popup.getList().setCellRenderer(renderer);
popup.showUnderneathOf(button);
}
}
private static final class MyViewModel implements ChooseByNameViewModel {
private final Project myProject;
private final ChooseByNameModel myModel;
private MyViewModel(Project project, ChooseByNameModel model) {
myProject = project;
myModel = model;
}
@Override
public Project getProject() {
return myProject;
}
@Override
public @NotNull ChooseByNameModel getModel() {
return myModel;
}
@Override
public boolean isSearchInAnyPlace() {
return myModel.useMiddleMatching();
}
@Override
public @NotNull String transformPattern(@NotNull String pattern) {
return ChooseByNamePopup.getTransformedPattern(pattern, myModel);
}
@Override
public boolean canShowListForEmptyPattern() {
return false;
}
@Override
public int getMaximumListSizeLimit() {
return 0;
}
}
}
|
|
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.impala.analysis;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import org.apache.impala.analysis.Path.PathType;
import org.apache.impala.catalog.Table;
import org.apache.impala.catalog.TableLoadingException;
import org.apache.impala.catalog.Type;
import org.apache.impala.common.AnalysisException;
import org.apache.impala.thrift.TExprNode;
import org.apache.impala.thrift.TExprNodeType;
import org.apache.impala.thrift.TSlotRef;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Joiner;
import com.google.common.base.Objects;
import com.google.common.base.Preconditions;
public class SlotRef extends Expr {
private final static Logger LOG = LoggerFactory.getLogger(SlotRef.class);
private final List<String> rawPath_;
private final String label_; // printed in toSql()
// Results of analysis.
private SlotDescriptor desc_;
public SlotRef(ArrayList<String> rawPath) {
super();
rawPath_ = rawPath;
label_ = ToSqlUtils.getPathSql(rawPath_);
}
/**
* C'tor for a "dummy" SlotRef used in substitution maps.
*/
public SlotRef(String alias) {
super();
rawPath_ = null;
// Relies on the label_ being compared in equals().
label_ = ToSqlUtils.getIdentSql(alias.toLowerCase());
}
/**
* C'tor for a "pre-analyzed" ref to a slot.
*/
public SlotRef(SlotDescriptor desc) {
super();
if (desc.isScanSlot()) {
rawPath_ = desc.getPath().getRawPath();
} else {
rawPath_ = null;
}
isAnalyzed_ = true;
desc_ = desc;
type_ = desc.getType();
evalCost_ = SLOT_REF_COST;
String alias = desc.getParent().getAlias();
label_ = (alias != null ? alias + "." : "") + desc.getLabel();
numDistinctValues_ = desc.getStats().getNumDistinctValues();
}
/**
* C'tor for cloning.
*/
private SlotRef(SlotRef other) {
super(other);
rawPath_ = other.rawPath_;
label_ = other.label_;
desc_ = other.desc_;
type_ = other.type_;
isAnalyzed_ = other.isAnalyzed_;
}
@Override
public void analyze(Analyzer analyzer) throws AnalysisException {
if (isAnalyzed_) return;
super.analyze(analyzer);
Path resolvedPath = null;
try {
resolvedPath = analyzer.resolvePath(rawPath_, PathType.SLOT_REF);
} catch (TableLoadingException e) {
// Should never happen because we only check registered table aliases.
Preconditions.checkState(false);
}
Preconditions.checkNotNull(resolvedPath);
desc_ = analyzer.registerSlotRef(resolvedPath);
type_ = desc_.getType();
if (!type_.isSupported()) {
throw new AnalysisException("Unsupported type '"
+ type_.toSql() + "' in '" + toSql() + "'.");
}
if (type_.isInvalid()) {
// In this case, the metastore contained a string we can't parse at all
// e.g. map. We could report a better error if we stored the original
// HMS string.
throw new AnalysisException("Unsupported type in '" + toSql() + "'.");
}
evalCost_ = SLOT_REF_COST;
numDistinctValues_ = desc_.getStats().getNumDistinctValues();
Table rootTable = resolvedPath.getRootTable();
if (rootTable != null && rootTable.getNumRows() > 0) {
// The NDV cannot exceed the #rows in the table.
numDistinctValues_ = Math.min(numDistinctValues_, rootTable.getNumRows());
}
isAnalyzed_ = true;
}
@Override
public boolean isConstant() { return false; }
public SlotDescriptor getDesc() {
Preconditions.checkState(isAnalyzed_);
Preconditions.checkNotNull(desc_);
return desc_;
}
public SlotId getSlotId() {
Preconditions.checkState(isAnalyzed_);
Preconditions.checkNotNull(desc_);
return desc_.getId();
}
public Path getResolvedPath() {
Preconditions.checkState(isAnalyzed_);
return desc_.getPath();
}
@Override
public String toSqlImpl() {
if (label_ != null) return label_;
if (rawPath_ != null) return ToSqlUtils.getPathSql(rawPath_);
return "<slot " + Integer.toString(desc_.getId().asInt()) + ">";
}
@Override
protected void toThrift(TExprNode msg) {
msg.node_type = TExprNodeType.SLOT_REF;
msg.slot_ref = new TSlotRef(desc_.getId().asInt());
// we shouldn't be sending exprs over non-materialized slots
Preconditions.checkState(desc_.isMaterialized(), String.format(
"Illegal reference to non-materialized slot: tid=%s sid=%s",
desc_.getParent().getId(), desc_.getId()));
// check that the tuples associated with this slot are executable
desc_.getParent().checkIsExecutable();
if (desc_.getItemTupleDesc() != null) desc_.getItemTupleDesc().checkIsExecutable();
}
@Override
public String debugString() {
Objects.ToStringHelper toStrHelper = Objects.toStringHelper(this);
if (rawPath_ != null) toStrHelper.add("path", Joiner.on('.').join(rawPath_));
toStrHelper.add("type", type_.toSql());
String idStr = (desc_ == null ? "null" : Integer.toString(desc_.getId().asInt()));
toStrHelper.add("id", idStr);
return toStrHelper.toString();
}
@Override
public int hashCode() {
if (desc_ != null) return desc_.getId().hashCode();
return Objects.hashCode(Joiner.on('.').join(rawPath_).toLowerCase());
}
@Override
public boolean equals(Object obj) {
if (!super.equals(obj)) return false;
SlotRef other = (SlotRef) obj;
// check slot ids first; if they're both set we only need to compare those
// (regardless of how the ref was constructed)
if (desc_ != null && other.desc_ != null) {
return desc_.getId().equals(other.desc_.getId());
}
if ((label_ == null) != (other.label_ == null)) return false;
if (!label_.equalsIgnoreCase(other.label_)) return false;
return true;
}
@Override
public boolean isBoundByTupleIds(List<TupleId> tids) {
Preconditions.checkState(desc_ != null);
for (TupleId tid: tids) {
if (tid.equals(desc_.getParent().getId())) return true;
}
return false;
}
@Override
public boolean isBoundBySlotIds(List<SlotId> slotIds) {
Preconditions.checkState(isAnalyzed_);
return slotIds.contains(desc_.getId());
}
@Override
public void getIdsHelper(Set<TupleId> tupleIds, Set<SlotId> slotIds) {
Preconditions.checkState(type_.isValid());
Preconditions.checkState(desc_ != null);
if (slotIds != null) slotIds.add(desc_.getId());
if (tupleIds != null) tupleIds.add(desc_.getParent().getId());
}
@Override
public Expr clone() { return new SlotRef(this); }
@Override
public String toString() {
if (desc_ != null) {
return "tid=" + desc_.getParent().getId() + " sid=" + desc_.getId();
}
return "no desc set";
}
@Override
protected Expr uncheckedCastTo(Type targetType) throws AnalysisException {
if (type_.isNull()) {
// Hack to prevent null SlotRefs in the BE
return NullLiteral.create(targetType);
} else {
return super.uncheckedCastTo(targetType);
}
}
}
|
|
/*
* Licensed to Apereo under one or more contributor license
* agreements. See the NOTICE file distributed with this work
* for additional information regarding copyright ownership.
* Apereo licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a
* copy of the License at the following location:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.jasig.cas.ticket.registry;
import de.flapdoodle.embed.memcached.Command;
import de.flapdoodle.embed.memcached.MemcachedExecutable;
import de.flapdoodle.embed.memcached.MemcachedProcess;
import de.flapdoodle.embed.memcached.MemcachedStarter;
import de.flapdoodle.embed.memcached.config.ArtifactStoreBuilder;
import de.flapdoodle.embed.memcached.config.DownloadConfigBuilder;
import de.flapdoodle.embed.memcached.config.MemcachedConfig;
import de.flapdoodle.embed.memcached.config.RuntimeConfigBuilder;
import de.flapdoodle.embed.memcached.distribution.Version;
import de.flapdoodle.embed.process.config.store.IDownloadConfig;
import de.flapdoodle.embed.process.io.progress.StandardConsoleProgressListener;
import org.jasig.cas.TestUtils;
import org.jasig.cas.ticket.ServiceTicket;
import org.jasig.cas.ticket.TicketGrantingTicket;
import org.jasig.cas.ticket.TicketGrantingTicketImpl;
import org.jasig.cas.ticket.support.NeverExpiresExpirationPolicy;
import org.jasig.cas.authentication.principal.Service;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Assume;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.slf4j.Logger;
import org.springframework.context.ApplicationContext;
import org.springframework.context.support.ClassPathXmlApplicationContext;
import java.io.IOException;
import java.net.Socket;
import java.util.Arrays;
import java.util.Collection;
import static org.junit.Assert.*;
import static org.mockito.Mockito.*;
import static org.slf4j.LoggerFactory.*;
/**
* Unit test for MemCacheTicketRegistry class.
*
* @author Middleware Services
* @since 3.0.0
*/
@RunWith(Parameterized.class)
public class MemCacheTicketRegistryTests {
private static final Logger LOGGER = getLogger(MemCacheTicketRegistryTests.class);
private static final int PORT = 11211;
private static MemcachedExecutable MEMCACHED_EXECUTABLE;
private static MemcachedProcess MEMCACHED;
private MemCacheTicketRegistry registry;
private final String registryBean;
private final boolean binaryProtocol;
public MemCacheTicketRegistryTests(final String beanName, final boolean binary) {
registryBean = beanName;
binaryProtocol = binary;
}
@Parameterized.Parameters
public static Collection<Object[]> getTestParameters() throws Exception {
return Arrays.asList(new Object[] {"testCase1", false}, new Object[] {"testCase2", true});
}
@BeforeClass
public static void beforeClass() throws IOException {
try {
final MemcachedStarter runtime = MemcachedStarter.getInstance(
new CasRuntimeConfigBuilder().defaults(Command.MemcacheD).build());
MEMCACHED_EXECUTABLE = runtime.prepare(new MemcachedConfig(Version.V1_4_22, PORT));
MEMCACHED = MEMCACHED_EXECUTABLE.start();
} catch (final Exception e) {
LOGGER.warn("Aborting since no memcached server could be started.", e);
}
}
@AfterClass
public static void afterClass() throws Exception {
if (MEMCACHED != null && MEMCACHED.isProcessRunning()) {
MEMCACHED.stop();
}
if (MEMCACHED_EXECUTABLE != null) {
MEMCACHED_EXECUTABLE.stop();
}
}
@Before
public void setUp() throws IOException {
// Abort tests if there is no memcached server available on localhost:11211.
final boolean environmentOk = isMemcachedListening();
if (!environmentOk) {
LOGGER.warn("Aborting test since no memcached server is available on localhost.");
}
Assume.assumeTrue(environmentOk);
final ApplicationContext context = new ClassPathXmlApplicationContext("/ticketRegistry-test.xml");
registry = context.getBean(registryBean, MemCacheTicketRegistry.class);
}
@Test
public void verifyWriteGetDelete() throws Exception {
final String id = "ST-1234567890ABCDEFGHIJKL-crud";
final ServiceTicket ticket = mock(ServiceTicket.class, withSettings().serializable());
when(ticket.getId()).thenReturn(id);
registry.addTicket(ticket);
final ServiceTicket ticketFromRegistry = (ServiceTicket) registry.getTicket(id);
Assert.assertNotNull(ticketFromRegistry);
Assert.assertEquals(id, ticketFromRegistry.getId());
registry.deleteTicket(id);
Assert.assertNull(registry.getTicket(id));
}
@Test
public void verifyExpiration() throws Exception {
final String id = "ST-1234567890ABCDEFGHIJKL-exp";
final ServiceTicket ticket = mock(ServiceTicket.class, withSettings().serializable());
when(ticket.getId()).thenReturn(id);
registry.addTicket(ticket);
Assert.assertNotNull(registry.getTicket(id, ServiceTicket.class));
// Sleep a little longer than service ticket expiry defined in Spring context
Thread.sleep(2100);
Assert.assertNull(registry.getTicket(id, ServiceTicket.class));
}
@Test
public void verifyDeleteTicketWithChildren() throws Exception {
this.registry.addTicket(new TicketGrantingTicketImpl(
"TGT", TestUtils.getAuthentication(), new NeverExpiresExpirationPolicy()));
final TicketGrantingTicket tgt = this.registry.getTicket(
"TGT", TicketGrantingTicket.class);
final Service service = TestUtils.getService("TGT_DELETE_TEST");
final ServiceTicket st1 = tgt.grantServiceTicket(
"ST1", service, new NeverExpiresExpirationPolicy(), true);
final ServiceTicket st2 = tgt.grantServiceTicket(
"ST2", service, new NeverExpiresExpirationPolicy(), true);
final ServiceTicket st3 = tgt.grantServiceTicket(
"ST3", service, new NeverExpiresExpirationPolicy(), true);
this.registry.addTicket(st1);
this.registry.addTicket(st2);
this.registry.addTicket(st3);
assertNotNull(this.registry.getTicket("TGT", TicketGrantingTicket.class));
assertNotNull(this.registry.getTicket("ST1", ServiceTicket.class));
assertNotNull(this.registry.getTicket("ST2", ServiceTicket.class));
assertNotNull(this.registry.getTicket("ST3", ServiceTicket.class));
this.registry.deleteTicket(tgt.getId());
assertNull(this.registry.getTicket("TGT", TicketGrantingTicket.class));
assertNull(this.registry.getTicket("ST1", ServiceTicket.class));
assertNull(this.registry.getTicket("ST2", ServiceTicket.class));
assertNull(this.registry.getTicket("ST3", ServiceTicket.class));
}
private boolean isMemcachedListening() {
try (final Socket socket = new Socket("127.0.0.1", PORT)) {
return true;
} catch (final Exception e) {
return false;
}
}
private static class CasRuntimeConfigBuilder extends RuntimeConfigBuilder {
@Override
public RuntimeConfigBuilder defaults(final Command command) {
final RuntimeConfigBuilder builder = super.defaults(command);
final IDownloadConfig downloadConfig = new CasDownloadConfigBuilder()
.defaultsForCommand(command)
.progressListener(new StandardConsoleProgressListener())
.build();
this.artifactStore().overwriteDefault(new ArtifactStoreBuilder()
.defaults(command).download(downloadConfig).build());
return builder;
}
}
/**
* Download an embedded memcached instance based on environment.
*/
private static class CasDownloadConfigBuilder extends DownloadConfigBuilder {
@Override
public DownloadConfigBuilder defaults() {
final DownloadConfigBuilder bldr = super.defaults();
bldr.downloadPath("http://heli0s.darktech.org/memcached/");
return bldr;
}
}
}
|
|
package org.json;
/*
Copyright (c) 2002 JSON.org
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
The Software shall be used for Good, not Evil.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
import java.io.IOException;
import java.io.Writer;
import java.lang.reflect.Array;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.Map;
/**
* A JSONArray is an ordered sequence of values. Its external text form is a
* string wrapped in square brackets with commas separating the values. The
* internal form is an object having <code>get</code> and <code>opt</code>
* methods for accessing the values by index, and <code>put</code> methods for
* adding or replacing values. The values can be any of these types:
* <code>Boolean</code>, <code>JSONArray</code>, <code>JSONObject</code>,
* <code>Number</code>, <code>String</code>, or the
* <code>JSONObject.NULL object</code>.
* <p>
* The constructor can convert a JSON text into a Java object. The
* <code>toString</code> method converts to JSON text.
* <p>
* A <code>get</code> method returns a value if one can be found, and throws an
* exception if one cannot be found. An <code>opt</code> method returns a
* default value instead of throwing an exception, and so is useful for
* obtaining optional values.
* <p>
* The generic <code>get()</code> and <code>opt()</code> methods return an
* object which you can cast or query for type. There are also typed
* <code>get</code> and <code>opt</code> methods that do type checking and type
* coercion for you.
* <p>
* The texts produced by the <code>toString</code> methods strictly conform to
* JSON syntax rules. The constructors are more forgiving in the texts they will
* accept:
* <ul>
* <li>An extra <code>,</code> <small>(comma)</small> may appear just
* before the closing bracket.</li>
* <li>The <code>null</code> value will be inserted when there
* is <code>,</code> <small>(comma)</small> elision.</li>
* <li>Strings may be quoted with <code>'</code> <small>(single
* quote)</small>.</li>
* <li>Strings do not need to be quoted at all if they do not begin with a quote
* or single quote, and if they do not contain leading or trailing spaces,
* and if they do not contain any of these characters:
* <code>{ } [ ] / \ : , = ; #</code> and if they do not look like numbers
* and if they are not the reserved words <code>true</code>,
* <code>false</code>, or <code>null</code>.</li>
* <li>Values can be separated by <code>;</code> <small>(semicolon)</small> as
* well as by <code>,</code> <small>(comma)</small>.</li>
* </ul>
* @author JSON.org
* @version 2011-11-24
*/
public class JSONArray {
/**
* The arrayList where the JSONArray's properties are kept.
*/
private final ArrayList myArrayList;
/**
* Construct an empty JSONArray.
*/
public JSONArray() {
this.myArrayList = new ArrayList();
}
/**
* Construct a JSONArray from a JSONTokener.
* @param x A JSONTokener
* @throws JSONException If there is a syntax error.
*/
public JSONArray(JSONTokener x) throws JSONException {
this();
if (x.nextClean() != '[') {
throw x.syntaxError("A JSONArray text must start with '['");
}
if (x.nextClean() != ']') {
x.back();
for (;;) {
if (x.nextClean() == ',') {
x.back();
this.myArrayList.add(JSONObject.NULL);
} else {
x.back();
this.myArrayList.add(x.nextValue());
}
switch (x.nextClean()) {
case ';':
case ',':
if (x.nextClean() == ']') {
return;
}
x.back();
break;
case ']':
return;
default:
throw x.syntaxError("Expected a ',' or ']'");
}
}
}
}
/**
* Construct a JSONArray from a source JSON text.
* @param source A string that begins with
* <code>[</code> <small>(left bracket)</small>
* and ends with <code>]</code> <small>(right bracket)</small>.
* @throws JSONException If there is a syntax error.
*/
public JSONArray(String source) throws JSONException {
this(new JSONTokener(source));
}
/**
* Construct a JSONArray from a Collection.
* @param collection A Collection.
*/
public JSONArray(Collection collection) {
this.myArrayList = new ArrayList();
if (collection != null) {
Iterator iter = collection.iterator();
while (iter.hasNext()) {
this.myArrayList.add(JSONObject.wrap(iter.next()));
}
}
}
/**
* Construct a JSONArray from an array
* @throws JSONException If not an array.
*/
public JSONArray(Object array) throws JSONException {
this();
if (array.getClass().isArray()) {
int length = Array.getLength(array);
for (int i = 0; i < length; i += 1) {
this.put(JSONObject.wrap(Array.get(array, i)));
}
} else {
throw new JSONException(
"JSONArray initial value should be a string or collection or array.");
}
}
/**
* Get the object value associated with an index.
* @param index
* The index must be between 0 and length() - 1.
* @return An object value.
* @throws JSONException If there is no value for the index.
*/
public Object get(int index) throws JSONException {
Object object = this.opt(index);
if (object == null) {
throw new JSONException("JSONArray[" + index + "] not found.");
}
return object;
}
/**
* Get the boolean value associated with an index.
* The string values "true" and "false" are converted to boolean.
*
* @param index The index must be between 0 and length() - 1.
* @return The truth.
* @throws JSONException If there is no value for the index or if the
* value is not convertible to boolean.
*/
public boolean getBoolean(int index) throws JSONException {
Object object = this.get(index);
if (object.equals(Boolean.FALSE) ||
(object instanceof String &&
((String)object).equalsIgnoreCase("false"))) {
return false;
} else if (object.equals(Boolean.TRUE) ||
(object instanceof String &&
((String)object).equalsIgnoreCase("true"))) {
return true;
}
throw new JSONException("JSONArray[" + index + "] is not a boolean.");
}
/**
* Get the double value associated with an index.
*
* @param index The index must be between 0 and length() - 1.
* @return The value.
* @throws JSONException If the key is not found or if the value cannot
* be converted to a number.
*/
public double getDouble(int index) throws JSONException {
Object object = this.get(index);
try {
return object instanceof Number
? ((Number)object).doubleValue()
: Double.parseDouble((String)object);
} catch (Exception e) {
throw new JSONException("JSONArray[" + index +
"] is not a number.");
}
}
/**
* Get the int value associated with an index.
*
* @param index The index must be between 0 and length() - 1.
* @return The value.
* @throws JSONException If the key is not found or if the value is not a number.
*/
public int getInt(int index) throws JSONException {
Object object = this.get(index);
try {
return object instanceof Number
? ((Number)object).intValue()
: Integer.parseInt((String)object);
} catch (Exception e) {
throw new JSONException("JSONArray[" + index +
"] is not a number.");
}
}
/**
* Get the JSONArray associated with an index.
* @param index The index must be between 0 and length() - 1.
* @return A JSONArray value.
* @throws JSONException If there is no value for the index. or if the
* value is not a JSONArray
*/
public JSONArray getJSONArray(int index) throws JSONException {
Object object = this.get(index);
if (object instanceof JSONArray) {
return (JSONArray)object;
}
throw new JSONException("JSONArray[" + index +
"] is not a JSONArray.");
}
/**
* Get the JSONObject associated with an index.
* @param index subscript
* @return A JSONObject value.
* @throws JSONException If there is no value for the index or if the
* value is not a JSONObject
*/
public JSONObject getJSONObject(int index) throws JSONException {
Object object = this.get(index);
if (object instanceof JSONObject) {
return (JSONObject)object;
}
throw new JSONException("JSONArray[" + index +
"] is not a JSONObject.");
}
/**
* Get the long value associated with an index.
*
* @param index The index must be between 0 and length() - 1.
* @return The value.
* @throws JSONException If the key is not found or if the value cannot
* be converted to a number.
*/
public long getLong(int index) throws JSONException {
Object object = this.get(index);
try {
return object instanceof Number
? ((Number)object).longValue()
: Long.parseLong((String)object);
} catch (Exception e) {
throw new JSONException("JSONArray[" + index +
"] is not a number.");
}
}
/**
* Get the string associated with an index.
* @param index The index must be between 0 and length() - 1.
* @return A string value.
* @throws JSONException If there is no string value for the index.
*/
public String getString(int index) throws JSONException {
Object object = this.get(index);
if (object instanceof String) {
return (String)object;
}
throw new JSONException("JSONArray[" + index + "] not a string.");
}
/**
* Determine if the value is null.
* @param index The index must be between 0 and length() - 1.
* @return true if the value at the index is null, or if there is no value.
*/
public boolean isNull(int index) {
return JSONObject.NULL.equals(this.opt(index));
}
/**
* Make a string from the contents of this JSONArray. The
* <code>separator</code> string is inserted between each element.
* Warning: This method assumes that the data structure is acyclical.
* @param separator A string that will be inserted between the elements.
* @return a string.
* @throws JSONException If the array contains an invalid number.
*/
public String join(String separator) throws JSONException {
int len = this.length();
StringBuffer sb = new StringBuffer();
for (int i = 0; i < len; i += 1) {
if (i > 0) {
sb.append(separator);
}
sb.append(JSONObject.valueToString(this.myArrayList.get(i)));
}
return sb.toString();
}
/**
* Get the number of elements in the JSONArray, included nulls.
*
* @return The length (or size).
*/
public int length() {
return this.myArrayList.size();
}
/**
* Get the optional object value associated with an index.
* @param index The index must be between 0 and length() - 1.
* @return An object value, or null if there is no
* object at that index.
*/
public Object opt(int index) {
return (index < 0 || index >= this.length())
? null
: this.myArrayList.get(index);
}
/**
* Get the optional boolean value associated with an index.
* It returns false if there is no value at that index,
* or if the value is not Boolean.TRUE or the String "true".
*
* @param index The index must be between 0 and length() - 1.
* @return The truth.
*/
public boolean optBoolean(int index) {
return this.optBoolean(index, false);
}
/**
* Get the optional boolean value associated with an index.
* It returns the defaultValue if there is no value at that index or if
* it is not a Boolean or the String "true" or "false" (case insensitive).
*
* @param index The index must be between 0 and length() - 1.
* @param defaultValue A boolean default.
* @return The truth.
*/
public boolean optBoolean(int index, boolean defaultValue) {
try {
return this.getBoolean(index);
} catch (Exception e) {
return defaultValue;
}
}
/**
* Get the optional double value associated with an index.
* NaN is returned if there is no value for the index,
* or if the value is not a number and cannot be converted to a number.
*
* @param index The index must be between 0 and length() - 1.
* @return The value.
*/
public double optDouble(int index) {
return this.optDouble(index, Double.NaN);
}
/**
* Get the optional double value associated with an index.
* The defaultValue is returned if there is no value for the index,
* or if the value is not a number and cannot be converted to a number.
*
* @param index subscript
* @param defaultValue The default value.
* @return The value.
*/
public double optDouble(int index, double defaultValue) {
try {
return this.getDouble(index);
} catch (Exception e) {
return defaultValue;
}
}
/**
* Get the optional int value associated with an index.
* Zero is returned if there is no value for the index,
* or if the value is not a number and cannot be converted to a number.
*
* @param index The index must be between 0 and length() - 1.
* @return The value.
*/
public int optInt(int index) {
return this.optInt(index, 0);
}
/**
* Get the optional int value associated with an index.
* The defaultValue is returned if there is no value for the index,
* or if the value is not a number and cannot be converted to a number.
* @param index The index must be between 0 and length() - 1.
* @param defaultValue The default value.
* @return The value.
*/
public int optInt(int index, int defaultValue) {
try {
return this.getInt(index);
} catch (Exception e) {
return defaultValue;
}
}
/**
* Get the optional JSONArray associated with an index.
* @param index subscript
* @return A JSONArray value, or null if the index has no value,
* or if the value is not a JSONArray.
*/
public JSONArray optJSONArray(int index) {
Object o = this.opt(index);
return o instanceof JSONArray ? (JSONArray)o : null;
}
/**
* Get the optional JSONObject associated with an index.
* Null is returned if the key is not found, or null if the index has
* no value, or if the value is not a JSONObject.
*
* @param index The index must be between 0 and length() - 1.
* @return A JSONObject value.
*/
public JSONObject optJSONObject(int index) {
Object o = this.opt(index);
return o instanceof JSONObject ? (JSONObject)o : null;
}
/**
* Get the optional long value associated with an index.
* Zero is returned if there is no value for the index,
* or if the value is not a number and cannot be converted to a number.
*
* @param index The index must be between 0 and length() - 1.
* @return The value.
*/
public long optLong(int index) {
return this.optLong(index, 0);
}
/**
* Get the optional long value associated with an index.
* The defaultValue is returned if there is no value for the index,
* or if the value is not a number and cannot be converted to a number.
* @param index The index must be between 0 and length() - 1.
* @param defaultValue The default value.
* @return The value.
*/
public long optLong(int index, long defaultValue) {
try {
return this.getLong(index);
} catch (Exception e) {
return defaultValue;
}
}
/**
* Get the optional string value associated with an index. It returns an
* empty string if there is no value at that index. If the value
* is not a string and is not null, then it is coverted to a string.
*
* @param index The index must be between 0 and length() - 1.
* @return A String value.
*/
public String optString(int index) {
return this.optString(index, "");
}
/**
* Get the optional string associated with an index.
* The defaultValue is returned if the key is not found.
*
* @param index The index must be between 0 and length() - 1.
* @param defaultValue The default value.
* @return A String value.
*/
public String optString(int index, String defaultValue) {
Object object = this.opt(index);
return JSONObject.NULL.equals(object)
? object.toString()
: defaultValue;
}
/**
* Append a boolean value. This increases the array's length by one.
*
* @param value A boolean value.
* @return this.
*/
public JSONArray put(boolean value) {
this.put(value ? Boolean.TRUE : Boolean.FALSE);
return this;
}
/**
* Put a value in the JSONArray, where the value will be a
* JSONArray which is produced from a Collection.
* @param value A Collection value.
* @return this.
*/
public JSONArray put(Collection value) {
this.put(new JSONArray(value));
return this;
}
/**
* Append a double value. This increases the array's length by one.
*
* @param value A double value.
* @throws JSONException if the value is not finite.
* @return this.
*/
public JSONArray put(double value) throws JSONException {
Double d = new Double(value);
JSONObject.testValidity(d);
this.put(d);
return this;
}
/**
* Append an int value. This increases the array's length by one.
*
* @param value An int value.
* @return this.
*/
public JSONArray put(int value) {
this.put(new Integer(value));
return this;
}
/**
* Append an long value. This increases the array's length by one.
*
* @param value A long value.
* @return this.
*/
public JSONArray put(long value) {
this.put(new Long(value));
return this;
}
/**
* Put a value in the JSONArray, where the value will be a
* JSONObject which is produced from a Map.
* @param value A Map value.
* @return this.
*/
public JSONArray put(Map value) {
this.put(new JSONObject(value));
return this;
}
/**
* Append an object value. This increases the array's length by one.
* @param value An object value. The value should be a
* Boolean, Double, Integer, JSONArray, JSONObject, Long, or String, or the
* JSONObject.NULL object.
* @return this.
*/
public JSONArray put(Object value) {
this.myArrayList.add(value);
return this;
}
/**
* Put or replace a boolean value in the JSONArray. If the index is greater
* than the length of the JSONArray, then null elements will be added as
* necessary to pad it out.
* @param index The subscript.
* @param value A boolean value.
* @return this.
* @throws JSONException If the index is negative.
*/
public JSONArray put(int index, boolean value) throws JSONException {
this.put(index, value ? Boolean.TRUE : Boolean.FALSE);
return this;
}
/**
* Put a value in the JSONArray, where the value will be a
* JSONArray which is produced from a Collection.
* @param index The subscript.
* @param value A Collection value.
* @return this.
* @throws JSONException If the index is negative or if the value is
* not finite.
*/
public JSONArray put(int index, Collection value) throws JSONException {
this.put(index, new JSONArray(value));
return this;
}
/**
* Put or replace a double value. If the index is greater than the length of
* the JSONArray, then null elements will be added as necessary to pad
* it out.
* @param index The subscript.
* @param value A double value.
* @return this.
* @throws JSONException If the index is negative or if the value is
* not finite.
*/
public JSONArray put(int index, double value) throws JSONException {
this.put(index, new Double(value));
return this;
}
/**
* Put or replace an int value. If the index is greater than the length of
* the JSONArray, then null elements will be added as necessary to pad
* it out.
* @param index The subscript.
* @param value An int value.
* @return this.
* @throws JSONException If the index is negative.
*/
public JSONArray put(int index, int value) throws JSONException {
this.put(index, new Integer(value));
return this;
}
/**
* Put or replace a long value. If the index is greater than the length of
* the JSONArray, then null elements will be added as necessary to pad
* it out.
* @param index The subscript.
* @param value A long value.
* @return this.
* @throws JSONException If the index is negative.
*/
public JSONArray put(int index, long value) throws JSONException {
this.put(index, new Long(value));
return this;
}
/**
* Put a value in the JSONArray, where the value will be a
* JSONObject that is produced from a Map.
* @param index The subscript.
* @param value The Map value.
* @return this.
* @throws JSONException If the index is negative or if the the value is
* an invalid number.
*/
public JSONArray put(int index, Map value) throws JSONException {
this.put(index, new JSONObject(value));
return this;
}
/**
* Put or replace an object value in the JSONArray. If the index is greater
* than the length of the JSONArray, then null elements will be added as
* necessary to pad it out.
* @param index The subscript.
* @param value The value to put into the array. The value should be a
* Boolean, Double, Integer, JSONArray, JSONObject, Long, or String, or the
* JSONObject.NULL object.
* @return this.
* @throws JSONException If the index is negative or if the the value is
* an invalid number.
*/
public JSONArray put(int index, Object value) throws JSONException {
JSONObject.testValidity(value);
if (index < 0) {
throw new JSONException("JSONArray[" + index + "] not found.");
}
if (index < this.length()) {
this.myArrayList.set(index, value);
} else {
while (index != this.length()) {
this.put(JSONObject.NULL);
}
this.put(value);
}
return this;
}
/**
* Remove an index and close the hole.
* @param index The index of the element to be removed.
* @return The value that was associated with the index,
* or null if there was no value.
*/
public Object remove(int index) {
Object o = this.opt(index);
this.myArrayList.remove(index);
return o;
}
/**
* Produce a JSONObject by combining a JSONArray of names with the values
* of this JSONArray.
* @param names A JSONArray containing a list of key strings. These will be
* paired with the values.
* @return A JSONObject, or null if there are no names or if this JSONArray
* has no values.
* @throws JSONException If any of the names are null.
*/
public JSONObject toJSONObject(JSONArray names) throws JSONException {
if (names == null || names.length() == 0 || this.length() == 0) {
return null;
}
JSONObject jo = new JSONObject();
for (int i = 0; i < names.length(); i += 1) {
jo.put(names.getString(i), this.opt(i));
}
return jo;
}
/**
* Make a JSON text of this JSONArray. For compactness, no
* unnecessary whitespace is added. If it is not possible to produce a
* syntactically correct JSON text then null will be returned instead. This
* could occur if the array contains an invalid number.
* <p>
* Warning: This method assumes that the data structure is acyclical.
*
* @return a printable, displayable, transmittable
* representation of the array.
*/
public String toString() {
try {
return '[' + this.join(",") + ']';
} catch (Exception e) {
return null;
}
}
/**
* Make a prettyprinted JSON text of this JSONArray.
* Warning: This method assumes that the data structure is acyclical.
* @param indentFactor The number of spaces to add to each level of
* indentation.
* @return a printable, displayable, transmittable
* representation of the object, beginning
* with <code>[</code> <small>(left bracket)</small> and ending
* with <code>]</code> <small>(right bracket)</small>.
* @throws JSONException
*/
public String toString(int indentFactor) throws JSONException {
return this.toString(indentFactor, 0);
}
/**
* Make a prettyprinted JSON text of this JSONArray.
* Warning: This method assumes that the data structure is acyclical.
* @param indentFactor The number of spaces to add to each level of
* indentation.
* @param indent The indention of the top level.
* @return a printable, displayable, transmittable
* representation of the array.
* @throws JSONException
*/
String toString(int indentFactor, int indent) throws JSONException {
int len = this.length();
if (len == 0) {
return "[]";
}
int i;
StringBuffer sb = new StringBuffer("[");
if (len == 1) {
sb.append(JSONObject.valueToString(this.myArrayList.get(0),
indentFactor, indent));
} else {
int newindent = indent + indentFactor;
sb.append('\n');
for (i = 0; i < len; i += 1) {
if (i > 0) {
sb.append(",\n");
}
for (int j = 0; j < newindent; j += 1) {
sb.append(' ');
}
sb.append(JSONObject.valueToString(this.myArrayList.get(i),
indentFactor, newindent));
}
sb.append('\n');
for (i = 0; i < indent; i += 1) {
sb.append(' ');
}
}
sb.append(']');
return sb.toString();
}
/**
* Write the contents of the JSONArray as JSON text to a writer.
* For compactness, no whitespace is added.
* <p>
* Warning: This method assumes that the data structure is acyclical.
*
* @return The writer.
* @throws JSONException
*/
public Writer write(Writer writer) throws JSONException {
try {
boolean b = false;
int len = this.length();
writer.write('[');
for (int i = 0; i < len; i += 1) {
if (b) {
writer.write(',');
}
Object v = this.myArrayList.get(i);
if (v instanceof JSONObject) {
((JSONObject)v).write(writer);
} else if (v instanceof JSONArray) {
((JSONArray)v).write(writer);
} else {
writer.write(JSONObject.valueToString(v));
}
b = true;
}
writer.write(']');
return writer;
} catch (IOException e) {
throw new JSONException(e);
}
}
@Override
public boolean equals(Object o) {
if (o instanceof JSONArray)
return myArrayList.equals(((JSONArray) o).myArrayList);
return false;
}
@Override
public int hashCode() {
return myArrayList.hashCode();
}
}
|
|
/*
* ASM: a very small and fast Java bytecode manipulation framework
* Copyright (c) 2000-2007 INRIA, France Telecom
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. Neither the name of the copyright holders nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
* THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.powermock.api.mockito.repackaged.asm.tree.analysis;
import org.powermock.api.mockito.repackaged.asm.Type;
import java.util.List;
/**
* An extended {@link BasicVerifier} that performs more precise verifications.
* This verifier computes exact class types, instead of using a single "object
* reference" type (as done in the {@link BasicVerifier}).
*
* @author Eric Bruneton
* @author Bing Ran
*/
public class SimpleVerifier extends BasicVerifier {
/**
* The class that is verified.
*/
private final Type currentClass;
/**
* The super class of the class that is verified.
*/
private final Type currentSuperClass;
/**
* The interfaces implemented by the class that is verified.
*/
private final List currentClassInterfaces;
/**
* If the class that is verified is an interface.
*/
private final boolean isInterface;
/**
* Constructs a new .
*/
public SimpleVerifier() {
this(null, null, false);
}
/**
* Constructs a new to verify a specific class. This
* class will not be loaded into the JVM since it may be incorrect.
*
* @param currentClass the class that is verified.
* @param currentSuperClass the super class of the class that is verified.
* @param isInterface if the class that is verified is an interface.
*/
public SimpleVerifier(
final Type currentClass,
final Type currentSuperClass,
final boolean isInterface)
{
this(currentClass, currentSuperClass, null, isInterface);
}
/**
* Constructs a new to verify a specific class. This
* class will not be loaded into the JVM since it may be incorrect.
*
* @param currentClass the class that is verified.
* @param currentSuperClass the super class of the class that is verified.
* @param currentClassInterfaces the interfaces implemented by the class
* that is verified.
* @param isInterface if the class that is verified is an interface.
*/
public SimpleVerifier(
final Type currentClass,
final Type currentSuperClass,
final List currentClassInterfaces,
final boolean isInterface)
{
this.currentClass = currentClass;
this.currentSuperClass = currentSuperClass;
this.currentClassInterfaces = currentClassInterfaces;
this.isInterface = isInterface;
}
public Value newValue(final Type type) {
if (type == null) {
return BasicValue.UNINITIALIZED_VALUE;
}
boolean isArray = type.getSort() == Type.ARRAY;
if (isArray) {
switch (type.getElementType().getSort()) {
case Type.BOOLEAN:
case Type.CHAR:
case Type.BYTE:
case Type.SHORT:
return new BasicValue(type);
}
}
Value v = super.newValue(type);
if (v == BasicValue.REFERENCE_VALUE) {
if (isArray) {
v = newValue(type.getElementType());
String desc = ((BasicValue) v).getType().getDescriptor();
for (int i = 0; i < type.getDimensions(); ++i) {
desc = '[' + desc;
}
v = new BasicValue(Type.getType(desc));
} else {
v = new BasicValue(type);
}
}
return v;
}
protected boolean isArrayValue(final Value value) {
Type t = ((BasicValue) value).getType();
return t != null
&& ("Lnull;".equals(t.getDescriptor()) || t.getSort() == Type.ARRAY);
}
protected Value getElementValue(final Value objectArrayValue)
throws AnalyzerException
{
Type arrayType = ((BasicValue) objectArrayValue).getType();
if (arrayType != null) {
if (arrayType.getSort() == Type.ARRAY) {
return newValue(Type.getType(arrayType.getDescriptor()
.substring(1)));
} else if ("Lnull;".equals(arrayType.getDescriptor())) {
return objectArrayValue;
}
}
throw new Error("Internal error");
}
protected boolean isSubTypeOf(final Value value, final Value expected) {
Type expectedType = ((BasicValue) expected).getType();
Type type = ((BasicValue) value).getType();
switch (expectedType.getSort()) {
case Type.INT:
case Type.FLOAT:
case Type.LONG:
case Type.DOUBLE:
return type == expectedType;
case Type.ARRAY:
case Type.OBJECT:
if ("Lnull;".equals(type.getDescriptor())) {
return true;
} else if (type.getSort() == Type.OBJECT
|| type.getSort() == Type.ARRAY)
{
return isAssignableFrom(expectedType, type);
} else {
return false;
}
default:
throw new Error("Internal error");
}
}
public Value merge(final Value v, final Value w) {
if (!v.equals(w)) {
Type t = ((BasicValue) v).getType();
Type u = ((BasicValue) w).getType();
if (t != null
&& (t.getSort() == Type.OBJECT || t.getSort() == Type.ARRAY))
{
if (u != null
&& (u.getSort() == Type.OBJECT || u.getSort() == Type.ARRAY))
{
if ("Lnull;".equals(t.getDescriptor())) {
return w;
}
if ("Lnull;".equals(u.getDescriptor())) {
return v;
}
if (isAssignableFrom(t, u)) {
return v;
}
if (isAssignableFrom(u, t)) {
return w;
}
// TODO case of array classes of the same dimension
// TODO should we look also for a common super interface?
// problem: there may be several possible common super
// interfaces
do {
if (t == null || isInterface(t)) {
return BasicValue.REFERENCE_VALUE;
}
t = getSuperClass(t);
if (isAssignableFrom(t, u)) {
return newValue(t);
}
} while (true);
}
}
return BasicValue.UNINITIALIZED_VALUE;
}
return v;
}
protected boolean isInterface(final Type t) {
if (currentClass != null && t.equals(currentClass)) {
return isInterface;
}
return getClass(t).isInterface();
}
protected Type getSuperClass(final Type t) {
if (currentClass != null && t.equals(currentClass)) {
return currentSuperClass;
}
Class c = getClass(t).getSuperclass();
return c == null ? null : Type.getType(c);
}
protected boolean isAssignableFrom(final Type t, final Type u) {
if (t.equals(u)) {
return true;
}
if (currentClass != null && t.equals(currentClass)) {
if (getSuperClass(u) == null) {
return false;
} else {
return isAssignableFrom(t, getSuperClass(u));
}
}
if (currentClass != null && u.equals(currentClass)) {
if (isAssignableFrom(t, currentSuperClass)) {
return true;
}
if (currentClassInterfaces != null) {
for (int i = 0; i < currentClassInterfaces.size(); ++i) {
Type v = (Type) currentClassInterfaces.get(i);
if (isAssignableFrom(t, v)) {
return true;
}
}
}
return false;
}
return getClass(t).isAssignableFrom(getClass(u));
}
protected Class getClass(final Type t) {
try {
if (t.getSort() == Type.ARRAY) {
return Class.forName(t.getDescriptor().replace('/', '.'));
}
return Class.forName(t.getClassName());
} catch (ClassNotFoundException e) {
throw new RuntimeException(e.toString());
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.core.scan.executor.util;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.BitSet;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import org.apache.carbondata.core.cache.Cache;
import org.apache.carbondata.core.cache.CacheProvider;
import org.apache.carbondata.core.cache.CacheType;
import org.apache.carbondata.core.cache.dictionary.Dictionary;
import org.apache.carbondata.core.cache.dictionary.DictionaryColumnUniqueIdentifier;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.datastore.compression.Compressor;
import org.apache.carbondata.core.datastore.compression.CompressorFactory;
import org.apache.carbondata.core.keygenerator.KeyGenException;
import org.apache.carbondata.core.keygenerator.KeyGenerator;
import org.apache.carbondata.core.metadata.AbsoluteTableIdentifier;
import org.apache.carbondata.core.metadata.CarbonMetadata;
import org.apache.carbondata.core.metadata.ColumnIdentifier;
import org.apache.carbondata.core.metadata.datatype.DataType;
import org.apache.carbondata.core.metadata.datatype.DataTypes;
import org.apache.carbondata.core.metadata.encoder.Encoding;
import org.apache.carbondata.core.metadata.schema.table.CarbonTable;
import org.apache.carbondata.core.metadata.schema.table.RelationIdentifier;
import org.apache.carbondata.core.metadata.schema.table.column.CarbonDimension;
import org.apache.carbondata.core.metadata.schema.table.column.CarbonMeasure;
import org.apache.carbondata.core.scan.complextypes.ArrayQueryType;
import org.apache.carbondata.core.scan.complextypes.PrimitiveQueryType;
import org.apache.carbondata.core.scan.complextypes.StructQueryType;
import org.apache.carbondata.core.scan.expression.ColumnExpression;
import org.apache.carbondata.core.scan.expression.Expression;
import org.apache.carbondata.core.scan.filter.GenericQueryType;
import org.apache.carbondata.core.scan.filter.resolver.FilterResolverIntf;
import org.apache.carbondata.core.scan.model.ProjectionDimension;
import org.apache.carbondata.core.scan.model.ProjectionMeasure;
import org.apache.carbondata.core.util.CarbonUtil;
import org.apache.commons.lang3.ArrayUtils;
/**
* Utility class for query execution
*/
public class QueryUtil {
/**
* Below method will be used to get the masked byte range based on the query
* dimension. It will give the range in the mdkey. This will be used to get
* the actual key array from masked mdkey
*
* @param queryDimensions query dimension selected in query
* @param keyGenerator key generator
* @return masked key
*/
public static int[] getMaskedByteRange(List<ProjectionDimension> queryDimensions,
KeyGenerator keyGenerator) {
Set<Integer> byteRangeSet = new TreeSet<Integer>();
int[] byteRange = null;
for (int i = 0; i < queryDimensions.size(); i++) {
// as no dictionary column and complex type columns
// are not selected in the mdkey
// so we will not select the those dimension for calculating the
// range
if (queryDimensions.get(i).getDimension().getKeyOrdinal() == -1) {
continue;
}
// get the offset of the dimension in the mdkey
byteRange =
keyGenerator.getKeyByteOffsets(queryDimensions.get(i).getDimension().getKeyOrdinal());
for (int j = byteRange[0]; j <= byteRange[1]; j++) {
byteRangeSet.add(j);
}
}
int[] maksedByteRange = new int[byteRangeSet.size()];
int index = 0;
Iterator<Integer> iterator = byteRangeSet.iterator();
// add the masked byte range
while (iterator.hasNext()) {
maksedByteRange[index++] = iterator.next();
}
return maksedByteRange;
}
public static int[] getMaskedByteRangeBasedOrdinal(List<Integer> ordinals,
KeyGenerator keyGenerator) {
Set<Integer> byteRangeSet = new TreeSet<Integer>();
int[] byteRange = null;
for (int i = 0; i < ordinals.size(); i++) {
// get the offset of the dimension in the mdkey
byteRange = keyGenerator.getKeyByteOffsets(ordinals.get(i));
for (int j = byteRange[0]; j <= byteRange[1]; j++) {
byteRangeSet.add(j);
}
}
int[] maksedByteRange = new int[byteRangeSet.size()];
int index = 0;
Iterator<Integer> iterator = byteRangeSet.iterator();
// add the masked byte range
while (iterator.hasNext()) {
maksedByteRange[index++] = iterator.next();
}
return maksedByteRange;
}
/**
* Below method will return the max key based on the dimension ordinal
*
* @param keyOrdinalList
* @param generator
* @return
* @throws KeyGenException
*/
public static byte[] getMaxKeyBasedOnOrinal(List<Integer> keyOrdinalList, KeyGenerator generator)
throws KeyGenException {
long[] max = new long[generator.getDimCount()];
Arrays.fill(max, 0L);
for (int i = 0; i < keyOrdinalList.size(); i++) {
// adding for dimension which is selected in query
max[keyOrdinalList.get(i)] = Long.MAX_VALUE;
}
return generator.generateKey(max);
}
/**
* To get the max key based on dimensions. i.e. all other dimensions will be
* set to 0 bits and the required query dimension will be masked with all
* LONG.MAX so that we can mask key and then compare while aggregating This
* can be useful during filter query when only few dimensions were selected
* out of row group
*
* @param queryDimensions dimension selected in query
* @param generator key generator
* @return max key for dimension
* @throws KeyGenException if any problem while generating the key
*/
public static byte[] getMaxKeyBasedOnDimensions(List<ProjectionDimension> queryDimensions,
KeyGenerator generator) throws KeyGenException {
long[] max = new long[generator.getDimCount()];
Arrays.fill(max, 0L);
for (int i = 0; i < queryDimensions.size(); i++) {
// as no dictionary column and complex type columns
// are not selected in the mdkey
// so we will not select the those dimension for calculating the
// range
if (queryDimensions.get(i).getDimension().getKeyOrdinal() == -1) {
continue;
}
// adding for dimension which is selected in query
max[queryDimensions.get(i).getDimension().getKeyOrdinal()] = Long.MAX_VALUE;
}
return generator.generateKey(max);
}
/**
* Below method will be used to get the masked key for query
*
* @param keySize size of the masked key
* @param maskedKeyRanges masked byte range
* @return masked bytes
*/
public static int[] getMaskedByte(int keySize, int[] maskedKeyRanges) {
int[] maskedKey = new int[keySize];
// all the non selected dimension will be filled with -1
Arrays.fill(maskedKey, -1);
for (int i = 0; i < maskedKeyRanges.length; i++) {
maskedKey[maskedKeyRanges[i]] = i;
}
return maskedKey;
}
/**
* Below method will be used to get the dimension block index in file based
* on query dimension
*
* @param queryDimensions query dimension
* @param dimensionOrdinalToChunkMapping mapping of dimension block in file to query dimension
* @return block index of file
*/
public static int[] getDimensionChunkIndexes(List<ProjectionDimension> queryDimensions,
Map<Integer, Integer> dimensionOrdinalToChunkMapping,
Set<CarbonDimension> filterDimensions,
Set<Integer> allProjectionListDimensionIndexes) {
// using set as in row group columns will point to same block
Set<Integer> dimensionChunkIndex = new HashSet<Integer>();
Set<Integer> filterDimensionOrdinal = getFilterDimensionOrdinal(filterDimensions);
int chunkIndex = 0;
for (int i = 0; i < queryDimensions.size(); i++) {
if (queryDimensions.get(i).getDimension().hasEncoding(Encoding.IMPLICIT)) {
continue;
}
Integer dimensionOrdinal = queryDimensions.get(i).getDimension().getOrdinal();
allProjectionListDimensionIndexes.add(dimensionOrdinalToChunkMapping.get(dimensionOrdinal));
if (queryDimensions.get(i).getDimension().getNumberOfChild() > 0) {
addChildrenBlockIndex(allProjectionListDimensionIndexes,
queryDimensions.get(i).getDimension());
}
if (!filterDimensionOrdinal.contains(dimensionOrdinal)) {
chunkIndex = dimensionOrdinalToChunkMapping.get(dimensionOrdinal);
dimensionChunkIndex.add(chunkIndex);
if (queryDimensions.get(i).getDimension().getNumberOfChild() > 0) {
addChildrenBlockIndex(dimensionChunkIndex, queryDimensions.get(i).getDimension());
}
}
}
int[] dimensionIndex = ArrayUtils
.toPrimitive(dimensionChunkIndex.toArray(new Integer[dimensionChunkIndex.size()]));
Arrays.sort(dimensionIndex);
return dimensionIndex;
}
/**
* Below method will be used to add the children block index
* this will be basically for complex dimension which will have children
*
* @param blockIndexes block indexes
* @param dimension parent dimension
*/
private static void addChildrenBlockIndex(Set<Integer> blockIndexes, CarbonDimension dimension) {
for (int i = 0; i < dimension.getNumberOfChild(); i++) {
addChildrenBlockIndex(blockIndexes, dimension.getListOfChildDimensions().get(i));
blockIndexes.add(dimension.getListOfChildDimensions().get(i).getOrdinal());
}
}
/**
* Below method will be used to get the dictionary mapping for all the
* dictionary encoded dimension present in the query
*
* @param queryDimensions query dimension present in the query this will be used to
* convert the result from surrogate key to actual data
* @return dimension unique id to its dictionary map
* @throws IOException
*/
public static Map<String, Dictionary> getDimensionDictionaryDetail(
List<ProjectionDimension> queryDimensions, Set<CarbonDimension> filterComplexDimensions,
CarbonTable carbonTable) throws IOException {
// to store complex dimension and its child id unique column id list, this is required as
// dimension can be present in projection and filter
// so we need to get only one instance of dictionary
// direct dictionary skip is done only for the dictionary lookup
Set<String> dictionaryDimensionFromQuery = new HashSet<String>();
for (int i = 0; i < queryDimensions.size(); i++) {
List<Encoding> encodingList = queryDimensions.get(i).getDimension().getEncoder();
// TODO need to remove the data type check for parent column in complex type no need to
// write encoding dictionary
if (CarbonUtil.hasEncoding(encodingList, Encoding.DICTIONARY) && !CarbonUtil
.hasEncoding(encodingList, Encoding.DIRECT_DICTIONARY) && !CarbonUtil
.hasEncoding(encodingList, Encoding.IMPLICIT)
&& queryDimensions.get(i).getDimension().getNumberOfChild() > 0) {
getChildDimensionDictionaryDetail(queryDimensions.get(i).getDimension(),
dictionaryDimensionFromQuery);
}
}
Iterator<CarbonDimension> iterator = filterComplexDimensions.iterator();
while (iterator.hasNext()) {
CarbonDimension filterDim = iterator.next();
// only to add complex dimension
if (filterDim.getNumberOfChild() > 0) {
getChildDimensionDictionaryDetail(filterDim, dictionaryDimensionFromQuery);
}
}
// converting to list as api exposed needed list which i think
// is not correct
List<String> dictionaryColumnIdList =
new ArrayList<String>(dictionaryDimensionFromQuery.size());
dictionaryColumnIdList.addAll(dictionaryDimensionFromQuery);
return getDictionaryMap(dictionaryColumnIdList, carbonTable);
}
/**
* Below method will be used to fill the children dimension column id
*
* @param queryDimensions query dimension
* @param dictionaryDimensionFromQuery dictionary dimension for query
*/
private static void getChildDimensionDictionaryDetail(CarbonDimension queryDimensions,
Set<String> dictionaryDimensionFromQuery) {
for (int j = 0; j < queryDimensions.getNumberOfChild(); j++) {
List<Encoding> encodingList = queryDimensions.getListOfChildDimensions().get(j).getEncoder();
if (queryDimensions.getListOfChildDimensions().get(j).getNumberOfChild() > 0) {
getChildDimensionDictionaryDetail(queryDimensions.getListOfChildDimensions().get(j),
dictionaryDimensionFromQuery);
} else if (!CarbonUtil.hasEncoding(encodingList, Encoding.DIRECT_DICTIONARY)) {
dictionaryDimensionFromQuery
.add(queryDimensions.getListOfChildDimensions().get(j).getColumnId());
}
}
}
/**
* Below method will be used to get the column id to its dictionary mapping
*
* @param dictionaryColumnIdList dictionary column list
* @return dictionary mapping
* @throws IOException
*/
private static Map<String, Dictionary> getDictionaryMap(List<String> dictionaryColumnIdList,
CarbonTable carbonTable) throws IOException {
// if any complex dimension not present in query then return the empty map
if (dictionaryColumnIdList.size() == 0) {
return new HashMap<>();
}
// this for dictionary unique identifier
List<DictionaryColumnUniqueIdentifier> dictionaryColumnUniqueIdentifiers =
getDictionaryColumnUniqueIdentifierList(dictionaryColumnIdList, carbonTable);
CacheProvider cacheProvider = CacheProvider.getInstance();
Cache<DictionaryColumnUniqueIdentifier, Dictionary> forwardDictionaryCache = cacheProvider
.createCache(CacheType.FORWARD_DICTIONARY);
List<Dictionary> columnDictionaryList =
forwardDictionaryCache.getAll(dictionaryColumnUniqueIdentifiers);
Map<String, Dictionary> columnDictionaryMap = new HashMap<>(columnDictionaryList.size());
for (int i = 0; i < dictionaryColumnUniqueIdentifiers.size(); i++) {
// TODO: null check for column dictionary, if cache size is less it
// might return null here, in that case throw exception
columnDictionaryMap
.put(dictionaryColumnUniqueIdentifiers.get(i).getColumnIdentifier().getColumnId(),
columnDictionaryList.get(i));
}
return columnDictionaryMap;
}
/**
* Below method will be used to get the dictionary column unique identifier
*
* @param dictionaryColumnIdList dictionary
* @return
*/
private static List<DictionaryColumnUniqueIdentifier> getDictionaryColumnUniqueIdentifierList(
List<String> dictionaryColumnIdList, CarbonTable carbonTable) throws IOException {
List<DictionaryColumnUniqueIdentifier> dictionaryColumnUniqueIdentifiers =
new ArrayList<>(dictionaryColumnIdList.size());
for (String columnId : dictionaryColumnIdList) {
CarbonDimension dimension = CarbonMetadata.getInstance()
.getCarbonDimensionBasedOnColIdentifier(carbonTable, columnId);
if (dimension != null) {
AbsoluteTableIdentifier dictionarySourceAbsoluteTableIdentifier;
ColumnIdentifier columnIdentifier;
if (null != dimension.getColumnSchema().getParentColumnTableRelations() && !dimension
.getColumnSchema().getParentColumnTableRelations().isEmpty()) {
dictionarySourceAbsoluteTableIdentifier =
getTableIdentifierForColumn(dimension);
columnIdentifier = new ColumnIdentifier(
dimension.getColumnSchema().getParentColumnTableRelations().get(0).getColumnId(),
dimension.getColumnProperties(), dimension.getDataType());
} else {
dictionarySourceAbsoluteTableIdentifier = carbonTable.getAbsoluteTableIdentifier();
columnIdentifier = dimension.getColumnIdentifier();
}
String dictionaryPath = carbonTable.getTableInfo().getFactTable().getTableProperties()
.get(CarbonCommonConstants.DICTIONARY_PATH);
dictionaryColumnUniqueIdentifiers.add(
new DictionaryColumnUniqueIdentifier(dictionarySourceAbsoluteTableIdentifier,
columnIdentifier, dimension.getDataType(), dictionaryPath));
}
}
return dictionaryColumnUniqueIdentifiers;
}
public static AbsoluteTableIdentifier getTableIdentifierForColumn(
CarbonDimension carbonDimension) {
RelationIdentifier parentRelationIdentifier =
carbonDimension.getColumnSchema().getParentColumnTableRelations().get(0)
.getRelationIdentifier();
String parentTablePath = CarbonMetadata.getInstance()
.getCarbonTable(parentRelationIdentifier.getDatabaseName(),
parentRelationIdentifier.getTableName()).getTablePath();
RelationIdentifier relation = carbonDimension.getColumnSchema()
.getParentColumnTableRelations()
.get(0)
.getRelationIdentifier();
String parentTableName = relation.getTableName();
String parentDatabaseName = relation.getDatabaseName();
String parentTableId = relation.getTableId();
return AbsoluteTableIdentifier.from(parentTablePath, parentDatabaseName, parentTableName,
parentTableId);
}
/**
* Below method will used to get the method will be used to get the measure
* block indexes to be read from the file
*
* @param queryMeasures query measure
* @param expressionMeasure measure present in the expression
* @param ordinalToBlockIndexMapping measure ordinal to block mapping
* @return block indexes
*/
public static int[] getMeasureChunkIndexes(List<ProjectionMeasure> queryMeasures,
List<CarbonMeasure> expressionMeasure, Map<Integer, Integer> ordinalToBlockIndexMapping,
Set<CarbonMeasure> filterMeasures, List<Integer> allProjectionListMeasureIdexes) {
Set<Integer> measureChunkIndex = new HashSet<Integer>();
Set<Integer> filterMeasureOrdinal = getFilterMeasureOrdinal(filterMeasures);
for (int i = 0; i < queryMeasures.size(); i++) {
Integer measureOrdinal = queryMeasures.get(i).getMeasure().getOrdinal();
allProjectionListMeasureIdexes.add(measureOrdinal);
if (!filterMeasureOrdinal.contains(measureOrdinal)) {
measureChunkIndex.add(ordinalToBlockIndexMapping.get(measureOrdinal));
}
}
for (int i = 0; i < expressionMeasure.size(); i++) {
measureChunkIndex.add(ordinalToBlockIndexMapping.get(expressionMeasure.get(i).getOrdinal()));
}
int[] measureIndexes =
ArrayUtils.toPrimitive(measureChunkIndex.toArray(new Integer[measureChunkIndex.size()]));
Arrays.sort(measureIndexes);
return measureIndexes;
}
/**
* Below method will be used to get mapping whether dimension is present in
* order by or not
*
* @param sortedDimensions sort dimension present in order by query
* @param queryDimensions query dimension
* @return sort dimension indexes
*/
public static byte[] getSortDimensionIndexes(List<ProjectionDimension> sortedDimensions,
List<ProjectionDimension> queryDimensions) {
byte[] sortedDims = new byte[queryDimensions.size()];
int indexOf = 0;
for (int i = 0; i < sortedDims.length; i++) {
indexOf = sortedDimensions.indexOf(queryDimensions.get(i));
if (indexOf > -1) {
sortedDims[i] = 1;
}
}
return sortedDims;
}
/**
* return true if given key is found in array
*
* @param data
* @param key
* @return
*/
public static boolean searchInArray(int[] data, int key) {
for (int i = 0; i < data.length; i++) {
if (key == data[i]) {
return true;
}
}
return false;
}
/**
* Below method will be used to get masked byte
*
* @param data actual data
* @param maxKey max key
* @param maskByteRanges mask byte range
* @param byteCount
* @return masked byte
*/
public static byte[] getMaskedKey(byte[] data, byte[] maxKey, int[] maskByteRanges,
int byteCount) {
byte[] maskedKey = new byte[byteCount];
int counter = 0;
int byteRange = 0;
for (int i = 0; i < byteCount; i++) {
byteRange = maskByteRanges[i];
if (byteRange != -1) {
maskedKey[counter++] = (byte) (data[byteRange] & maxKey[byteRange]);
}
}
return maskedKey;
}
/**
* Below method will be used to fill block indexes of the query dimension
* which will be used in creating a output row Here is method we are passing
* two list which store the indexes one for dictionary column other for not
* dictionary column. This is done for specific purpose so that in one
* iteration we will be able to fill both type dimension block indexes
*
* @param projectDimensions dimension present in the query
* @param columnOrdinalToChunkIndexMapping column ordinal to block index mapping
* @param dictionaryDimensionChunkIndex list to store dictionary column block indexes
* @param noDictionaryDimensionChunkIndex list to store no dictionary block indexes
*/
public static void fillQueryDimensionChunkIndexes(
List<ProjectionDimension> projectDimensions,
Map<Integer, Integer> columnOrdinalToChunkIndexMapping,
Set<Integer> dictionaryDimensionChunkIndex,
List<Integer> noDictionaryDimensionChunkIndex) {
for (ProjectionDimension queryDimension : projectDimensions) {
if (CarbonUtil.hasEncoding(queryDimension.getDimension().getEncoder(), Encoding.DICTIONARY)
&& queryDimension.getDimension().getNumberOfChild() == 0) {
dictionaryDimensionChunkIndex
.add(columnOrdinalToChunkIndexMapping.get(queryDimension.getDimension().getOrdinal()));
} else if (
!CarbonUtil.hasEncoding(queryDimension.getDimension().getEncoder(), Encoding.IMPLICIT)
&& queryDimension.getDimension().getNumberOfChild() == 0) {
noDictionaryDimensionChunkIndex
.add(columnOrdinalToChunkIndexMapping.get(queryDimension.getDimension().getOrdinal()));
}
}
}
/**
* Below method will be used to get the map of for complex dimension and its type
* which will be used to during query execution to
*
* @param queryDimensions complex dimension in query
* @param dimensionToBlockIndexMap dimension to block index in file map
* @return complex dimension and query type
*/
public static Map<Integer, GenericQueryType> getComplexDimensionsMap(
List<ProjectionDimension> queryDimensions, Map<Integer, Integer> dimensionToBlockIndexMap,
int[] eachComplexColumnValueSize, Map<String, Dictionary> columnIdToDictionaryMap,
Set<CarbonDimension> filterDimensions) {
Map<Integer, GenericQueryType> complexTypeMap = new HashMap<Integer, GenericQueryType>();
for (ProjectionDimension dimension : queryDimensions) {
CarbonDimension actualDimension;
CarbonDimension complexDimension = null;
if (null != dimension.getDimension().getComplexParentDimension()) {
// get the parent dimension column.
actualDimension = dimension.getParentDimension();
if (dimension.getDimension().isComplex()) {
complexDimension = dimension.getDimension();
}
} else {
actualDimension = dimension.getDimension();
}
if (actualDimension.getNumberOfChild() == 0) {
continue;
}
if (complexDimension != null) {
fillParentDetails(dimensionToBlockIndexMap, complexDimension, complexTypeMap,
eachComplexColumnValueSize, columnIdToDictionaryMap);
}
fillParentDetails(dimensionToBlockIndexMap, actualDimension, complexTypeMap,
eachComplexColumnValueSize, columnIdToDictionaryMap);
}
if (null != filterDimensions) {
for (CarbonDimension filterDimension : filterDimensions) {
// do not fill nay details for implicit dimension type
if (filterDimension.hasEncoding(Encoding.IMPLICIT)
|| filterDimension.getNumberOfChild() == 0) {
continue;
}
fillParentDetails(dimensionToBlockIndexMap, filterDimension, complexTypeMap,
eachComplexColumnValueSize, columnIdToDictionaryMap);
}
}
return complexTypeMap;
}
private static void fillParentDetails(Map<Integer, Integer> dimensionToBlockIndexMap,
CarbonDimension dimension, Map<Integer, GenericQueryType> complexTypeMap,
int[] eachComplexColumnValueSize, Map<String, Dictionary> columnIdToDictionaryMap) {
int parentBlockIndex = dimensionToBlockIndexMap.get(dimension.getOrdinal());
GenericQueryType parentQueryType;
if (DataTypes.isArrayType(dimension.getDataType())) {
parentQueryType =
new ArrayQueryType(dimension.getColName(), dimension.getColName(), parentBlockIndex);
} else if (DataTypes.isStructType(dimension.getDataType())) {
parentQueryType =
new StructQueryType(dimension.getColName(), dimension.getColName(),
dimensionToBlockIndexMap.get(dimension.getOrdinal()));
} else {
throw new UnsupportedOperationException(dimension.getDataType().getName() +
" is not supported");
}
complexTypeMap.put(dimension.getOrdinal(), parentQueryType);
fillChildrenDetails(eachComplexColumnValueSize, columnIdToDictionaryMap, parentBlockIndex,
dimension, parentQueryType);
}
private static int fillChildrenDetails(int[] eachComplexColumnValueSize,
Map<String, Dictionary> columnIdToDictionaryMap, int parentBlockIndex,
CarbonDimension dimension, GenericQueryType parentQueryType) {
for (int i = 0; i < dimension.getNumberOfChild(); i++) {
DataType dataType = dimension.getListOfChildDimensions().get(i).getDataType();
if (DataTypes.isArrayType(dataType)) {
parentQueryType.addChildren(
new ArrayQueryType(dimension.getListOfChildDimensions().get(i).getColName(),
dimension.getColName(), ++parentBlockIndex));
} else if (DataTypes.isStructType(dataType)) {
parentQueryType.addChildren(
new StructQueryType(dimension.getListOfChildDimensions().get(i).getColName(),
dimension.getColName(), ++parentBlockIndex));
} else {
boolean isDirectDictionary = CarbonUtil
.hasEncoding(dimension.getListOfChildDimensions().get(i).getEncoder(),
Encoding.DIRECT_DICTIONARY);
boolean isDictionary = CarbonUtil
.hasEncoding(dimension.getListOfChildDimensions().get(i).getEncoder(),
Encoding.DICTIONARY);
parentQueryType.addChildren(
new PrimitiveQueryType(dimension.getListOfChildDimensions().get(i).getColName(),
dimension.getColName(), ++parentBlockIndex,
dimension.getListOfChildDimensions().get(i).getDataType(),
eachComplexColumnValueSize[dimension.getListOfChildDimensions().get(i)
.getComplexTypeOrdinal()], columnIdToDictionaryMap
.get(dimension.getListOfChildDimensions().get(i).getColumnId()),
isDirectDictionary));
}
if (dimension.getListOfChildDimensions().get(i).getNumberOfChild() > 0) {
parentBlockIndex = fillChildrenDetails(eachComplexColumnValueSize, columnIdToDictionaryMap,
parentBlockIndex, dimension.getListOfChildDimensions().get(i), parentQueryType);
}
}
return parentBlockIndex;
}
public static void getAllFilterDimensions(FilterResolverIntf filterResolverTree,
Set<CarbonDimension> filterDimensions, Set<CarbonMeasure> filterMeasure) {
if (null == filterResolverTree) {
return;
}
List<ColumnExpression> dimensionResolvedInfos = new ArrayList<ColumnExpression>();
Expression filterExpression = filterResolverTree.getFilterExpression();
addColumnDimensions(filterExpression, filterDimensions, filterMeasure);
for (ColumnExpression info : dimensionResolvedInfos) {
if (info.isDimension() && info.getDimension().getNumberOfChild() > 0) {
filterDimensions.add(info.getDimension());
}
}
}
/**
* This method will check if a given expression contains a column expression
* recursively and add the dimension instance to the set which holds the dimension
* instances of the complex filter expressions.
*/
private static void addColumnDimensions(Expression expression,
Set<CarbonDimension> filterDimensions, Set<CarbonMeasure> filterMeasure) {
if (null != expression && expression instanceof ColumnExpression) {
if (((ColumnExpression) expression).isDimension()) {
filterDimensions.add(((ColumnExpression) expression).getDimension());
} else {
filterMeasure.add((CarbonMeasure) ((ColumnExpression) expression).getCarbonColumn());
}
return;
} else if (null != expression) {
for (Expression child : expression.getChildren()) {
addColumnDimensions(child, filterDimensions, filterMeasure);
}
}
}
private static Set<Integer> getFilterMeasureOrdinal(Set<CarbonMeasure> filterMeasures) {
Set<Integer> filterMeasuresOrdinal = new HashSet<>();
for (CarbonMeasure filterMeasure : filterMeasures) {
filterMeasuresOrdinal.add(filterMeasure.getOrdinal());
}
return filterMeasuresOrdinal;
}
private static Set<Integer> getFilterDimensionOrdinal(Set<CarbonDimension> filterDimensions) {
Set<Integer> filterDimensionsOrdinal = new HashSet<>();
for (CarbonDimension filterDimension : filterDimensions) {
filterDimensionsOrdinal.add(filterDimension.getOrdinal());
getChildDimensionOrdinal(filterDimension, filterDimensionsOrdinal);
}
return filterDimensionsOrdinal;
}
/**
* Below method will be used to fill the children dimension column id
*/
private static void getChildDimensionOrdinal(CarbonDimension queryDimensions,
Set<Integer> filterDimensionsOrdinal) {
for (int j = 0; j < queryDimensions.getNumberOfChild(); j++) {
List<Encoding> encodingList = queryDimensions.getListOfChildDimensions().get(j).getEncoder();
if (queryDimensions.getListOfChildDimensions().get(j).getNumberOfChild() > 0) {
getChildDimensionOrdinal(queryDimensions.getListOfChildDimensions().get(j),
filterDimensionsOrdinal);
} else if (!CarbonUtil.hasEncoding(encodingList, Encoding.DIRECT_DICTIONARY)) {
filterDimensionsOrdinal.add(queryDimensions.getListOfChildDimensions().get(j).getOrdinal());
}
}
}
/**
* Below method will be used to convert the thrift presence meta to wrapper
* presence meta
*
* @param presentMetadataThrift
* @return wrapper presence meta
*/
public static BitSet getNullBitSet(
org.apache.carbondata.format.PresenceMeta presentMetadataThrift) {
Compressor compressor = CompressorFactory.getInstance().getCompressor();
final byte[] present_bit_stream = presentMetadataThrift.getPresent_bit_stream();
if (null != present_bit_stream) {
return BitSet
.valueOf(compressor.unCompressByte(present_bit_stream));
} else {
return new BitSet(1);
}
}
}
|
|
// Copyright 2000-2022 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license.
package com.intellij.codeInspection.javaDoc;
import com.intellij.codeInsight.daemon.QuickFixBundle;
import com.intellij.codeInspection.CommonQuickFixBundle;
import com.intellij.codeInspection.LocalQuickFix;
import com.intellij.codeInspection.ProblemDescriptor;
import com.intellij.java.JavaBundle;
import com.intellij.openapi.project.Project;
import com.intellij.pom.Navigatable;
import com.intellij.profile.codeInspection.ProjectInspectionProfileManager;
import com.intellij.psi.*;
import com.intellij.psi.javadoc.PsiDocComment;
import com.intellij.psi.javadoc.PsiDocTag;
import com.intellij.psi.javadoc.PsiDocTagValue;
import com.intellij.psi.javadoc.PsiInlineDocTag;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.util.containers.ContainerUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
public class JavaDocFixes {
private JavaDocFixes(){
}
public static class AddMissingTagFix implements LocalQuickFix {
private final String myTag;
private final String myValue;
AddMissingTagFix(@NotNull String tag, @NotNull String value) {
myTag = tag;
myValue = value;
}
@Override
public void applyFix(@NotNull Project project, @NotNull ProblemDescriptor descriptor) {
PsiDocComment docComment = PsiTreeUtil.getParentOfType(descriptor.getEndElement(), PsiDocComment.class);
if (docComment != null) {
PsiDocTag tag = JavaPsiFacade.getElementFactory(project).createDocTagFromText("@" + myTag + " " + myValue);
PsiElement addedTag;
PsiElement anchor = getAnchor(descriptor);
if (anchor != null) {
addedTag = docComment.addBefore(tag, anchor);
}
else {
addedTag = docComment.add(tag);
}
moveCaretAfter(addedTag);
}
}
@Nullable
protected PsiElement getAnchor(ProblemDescriptor descriptor) {
return null;
}
private static void moveCaretAfter(PsiElement newCaretPosition) {
PsiElement sibling = newCaretPosition.getNextSibling();
if (sibling != null) {
((Navigatable)sibling).navigate(true);
}
}
@Override
@NotNull
public String getName() {
return JavaBundle.message("inspection.javadoc.problem.add.tag", myTag, myValue);
}
@Override
@NotNull
public String getFamilyName() {
return JavaBundle.message("inspection.javadoc.problem.add.tag.family");
}
}
public static class AddMissingParamTagFix extends AddMissingTagFix {
private final String myName;
AddMissingParamTagFix(String name) {
super("param", name);
myName = name;
}
@NotNull
@Override
public String getFamilyName() {
return JavaBundle.message("inspection.javadoc.problem.add.param.tag.family");
}
@Override
@Nullable
protected PsiElement getAnchor(ProblemDescriptor descriptor) {
PsiElement element = descriptor.getPsiElement();
PsiElement parent = element == null ? null : element.getParent();
if (!(parent instanceof PsiDocComment)) return null;
final PsiDocComment docComment = (PsiDocComment)parent;
final PsiJavaDocumentedElement owner = docComment.getOwner();
if (!(owner instanceof PsiMethod)) return null;
PsiParameter[] parameters = ((PsiMethod)owner).getParameterList().getParameters();
PsiParameter myParam = ContainerUtil.find(parameters, psiParameter -> myName.equals(psiParameter.getName()));
if (myParam == null) return null;
PsiDocTag[] tags = docComment.findTagsByName("param");
if (tags.length == 0) { //insert as first tag or append to description
tags = docComment.getTags();
if (tags.length == 0) return null;
return tags[0];
}
PsiParameter nextParam = PsiTreeUtil.getNextSiblingOfType(myParam, PsiParameter.class);
while (nextParam != null) {
for (PsiDocTag tag : tags) {
if (matches(nextParam, tag)) {
return tag;
}
}
nextParam = PsiTreeUtil.getNextSiblingOfType(nextParam, PsiParameter.class);
}
PsiParameter prevParam = PsiTreeUtil.getPrevSiblingOfType(myParam, PsiParameter.class);
while (prevParam != null) {
for (PsiDocTag tag : tags) {
if (matches(prevParam, tag)) {
return PsiTreeUtil.getNextSiblingOfType(tag, PsiDocTag.class);
}
}
prevParam = PsiTreeUtil.getPrevSiblingOfType(prevParam, PsiParameter.class);
}
return null;
}
private static boolean matches(PsiParameter param, PsiDocTag tag) {
PsiDocTagValue valueElement = tag.getValueElement();
String name = param.getName();
return valueElement != null && valueElement.getText().trim().startsWith(name);
}
@Override
@NotNull
public String getName() {
return JavaBundle.message("inspection.javadoc.problem.add.param.tag", myName);
}
}
public static class AddUnknownTagToCustoms implements LocalQuickFix {
private final JavadocDeclarationInspection myInspection;
private final String myTag;
AddUnknownTagToCustoms(@NotNull JavadocDeclarationInspection inspection, @NotNull String tag) {
myInspection = inspection;
myTag = tag;
}
@Override
public void applyFix(@NotNull Project project, @NotNull ProblemDescriptor descriptor) {
myInspection.registerAdditionalTag(myTag);
ProjectInspectionProfileManager.getInstance(project).fireProfileChanged();
}
@Override
@NotNull
public String getName() {
return QuickFixBundle.message("add.docTag.to.custom.tags", myTag);
}
@Override
public boolean startInWriteAction() {
return false;
}
@Override
@NotNull
public String getFamilyName() {
//noinspection DialogTitleCapitalization
return QuickFixBundle.message("fix.javadoc.family");
}
}
public static class RemoveTagFix implements LocalQuickFix {
private final String myTagName;
RemoveTagFix(String tagName) {
myTagName = tagName;
}
@NotNull
@Override
public String getName() {
return JavaBundle.message("quickfix.text.remove.javadoc.0", myTagName);
}
@NotNull
@Override
public String getFamilyName() {
return JavaBundle.message("quickfix.family.remove.javadoc.tag");
}
@Override
public void applyFix(@NotNull Project project, @NotNull ProblemDescriptor descriptor) {
PsiDocTag tag = PsiTreeUtil.getParentOfType(descriptor.getPsiElement(), PsiDocTag.class);
if (tag != null) {
tag.delete();
}
}
}
private static abstract class AbstractUnknownTagFix implements LocalQuickFix {
@Override
public void applyFix(@NotNull Project project, @NotNull ProblemDescriptor descriptor) {
final PsiElement element = descriptor.getPsiElement();
if (element == null) return;
final PsiElement enclosingTag = element.getParent();
if (enclosingTag == null) return;
final PsiElement javadoc = enclosingTag.getParent();
if (javadoc == null) return;
final PsiDocComment donorJavadoc = createDonorJavadoc(element);
final PsiElement codeTag = extractElement(donorJavadoc);
if (codeTag == null) return;
for (var e = enclosingTag.getFirstChild(); e != element && e != null; e = e.getNextSibling()) {
javadoc.addBefore(e, enclosingTag);
}
javadoc.addBefore(codeTag, enclosingTag);
for (var e = element.getNextSibling(); e != null; e = e.getNextSibling()) {
javadoc.addBefore(e, enclosingTag);
}
final PsiElement sibling = enclosingTag.getNextSibling();
if (sibling != null && sibling.getNode().getElementType() == TokenType.WHITE_SPACE) {
javadoc.addBefore(sibling, enclosingTag);
}
enclosingTag.delete();
}
protected abstract @NotNull PsiDocComment createDonorJavadoc(@NotNull PsiElement element);
protected abstract @Nullable PsiElement extractElement(@Nullable PsiDocComment donorJavadoc);
}
public static class EncloseWithCodeFix extends AbstractUnknownTagFix {
private final String myName;
public EncloseWithCodeFix(String name) {
myName = name;
}
@Override
public @NotNull String getFamilyName() {
return CommonQuickFixBundle.message("fix.replace.x.with.y", myName, "{@code " + myName + "}");
}
@Override
protected @NotNull PsiDocComment createDonorJavadoc(@NotNull PsiElement element) {
final PsiElementFactory instance = PsiElementFactory.getInstance(element.getProject());
return instance.createDocCommentFromText(String.format("/** {@code %s} */", element.getText()));
}
@Override
protected @Nullable PsiElement extractElement(@Nullable PsiDocComment donorJavadoc) {
return PsiTreeUtil.findChildOfType(donorJavadoc, PsiInlineDocTag.class);
}
}
public static class EscapeAtQuickFix extends AbstractUnknownTagFix {
private final String myName;
public EscapeAtQuickFix(String name) {
myName = name;
}
@Override
public @NotNull String getFamilyName() {
return CommonQuickFixBundle.message("fix.replace.x.with.y", myName, "@" + myName.substring(1));
}
@Override
protected @NotNull PsiDocComment createDonorJavadoc(@NotNull PsiElement element) {
final PsiElementFactory instance = PsiElementFactory.getInstance(element.getProject());
return instance.createDocCommentFromText("/** @" + element.getText().substring(1) + " */");
}
@Override
protected @Nullable PsiElement extractElement(@Nullable PsiDocComment donorJavadoc) {
if (donorJavadoc == null) return null;
return donorJavadoc.getChildren()[2];
}
}
}
|
|
/*
* SPDX-License-Identifier: Apache-2.0
*
* Copyright 2015-2021 Andres Almiray
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kordamp.ikonli.material2;
import org.kordamp.ikonli.Ikon;
/**
* @author Andres Almiray
*/
public enum Material2RoundMZ implements Ikon {
MAIL("mdrmz-mail", '\ue3de'),
MAIL_OUTLINE("mdrmz-mail_outline", '\ue3e0'),
MAP("mdrmz-map", '\ue3e1'),
MAPS_UGC("mdrmz-maps_ugc", '\ue7c8'),
MARK_CHAT_READ("mdrmz-mark_chat_read", '\ue7ca'),
MARK_CHAT_UNREAD("mdrmz-mark_chat_unread", '\ue7cc'),
MARK_EMAIL_READ("mdrmz-mark_email_read", '\ue7ce'),
MARK_EMAIL_UNREAD("mdrmz-mark_email_unread", '\ue7d0'),
MARKUNREAD("mdrmz-markunread", '\ue3e3'),
MARKUNREAD_MAILBOX("mdrmz-markunread_mailbox", '\ue3e5'),
MASKS("mdrmz-masks", '\ue8ab'),
MAXIMIZE("mdrmz-maximize", '\ue3e7'),
MEDIATION("mdrmz-mediation", '\ue7d2'),
MEDICAL_SERVICES("mdrmz-medical_services", '\ue7d3'),
MEETING_ROOM("mdrmz-meeting_room", '\ue3e8'),
MEMORY("mdrmz-memory", '\ue3ea'),
MENU("mdrmz-menu", '\ue3ec'),
MENU_BOOK("mdrmz-menu_book", '\ue3ed'),
MENU_OPEN("mdrmz-menu_open", '\ue3ef'),
MERGE_TYPE("mdrmz-merge_type", '\ue3f0'),
MESSAGE("mdrmz-message", '\ue3f1'),
MIC("mdrmz-mic", '\ue3f3'),
MIC_NONE("mdrmz-mic_none", '\ue3f5'),
MIC_OFF("mdrmz-mic_off", '\ue3f7'),
MICROWAVE("mdrmz-microwave", '\ue879'),
MILITARY_TECH("mdrmz-military_tech", '\ue7d5'),
MINIMIZE("mdrmz-minimize", '\ue3f9'),
MINUS("mdrmz-minus", '\ue3fa'),
MISCELLANEOUS_SERVICES("mdrmz-miscellaneous_services", '\ue7d7'),
MISSED_VIDEO_CALL("mdrmz-missed_video_call", '\ue3fb'),
MMS("mdrmz-mms", '\ue3fd'),
MOBILE_FRIENDLY("mdrmz-mobile_friendly", '\ue3ff'),
MOBILE_OFF("mdrmz-mobile_off", '\ue400'),
MOBILE_SCREEN_SHARE("mdrmz-mobile_screen_share", '\ue401'),
MODE_COMMENT("mdrmz-mode_comment", '\ue403'),
MODEL_TRAINING("mdrmz-model_training", '\ue7d8'),
MONETIZATION_ON("mdrmz-monetization_on", '\ue405'),
MONEY("mdrmz-money", '\ue407'),
MONEY_OFF("mdrmz-money_off", '\ue409'),
MONOCHROME_PHOTOS("mdrmz-monochrome_photos", '\ue40a'),
MOOD("mdrmz-mood", '\ue40c'),
MOOD_BAD("mdrmz-mood_bad", '\ue40e'),
MOPED("mdrmz-moped", '\ue7d9'),
MORE("mdrmz-more", '\ue410'),
MORE_HORIZ("mdrmz-more_horiz", '\ue412'),
MORE_TIME("mdrmz-more_time", '\ue7db'),
MORE_VERT("mdrmz-more_vert", '\ue413'),
MOTION_PHOTOS_ON("mdrmz-motion_photos_on", '\ue8ad'),
MOTION_PHOTOS_PAUSE("mdrmz-motion_photos_pause", '\ue8bd'),
MOTION_PHOTOS_PAUSED("mdrmz-motion_photos_paused", '\ue8ae'),
MOTORCYCLE("mdrmz-motorcycle", '\ue414'),
MOUSE("mdrmz-mouse", '\ue416'),
MOVE_TO_INBOX("mdrmz-move_to_inbox", '\ue418'),
MOVIE("mdrmz-movie", '\ue41a'),
MOVIE_CREATION("mdrmz-movie_creation", '\ue41c'),
MOVIE_FILTER("mdrmz-movie_filter", '\ue41e'),
MULTILINE_CHART("mdrmz-multiline_chart", '\ue420'),
MULTIPLE_STOP("mdrmz-multiple_stop", '\ue7dc'),
MUSEUM("mdrmz-museum", '\ue421'),
MUSIC_NOTE("mdrmz-music_note", '\ue423'),
MUSIC_OFF("mdrmz-music_off", '\ue425'),
MUSIC_VIDEO("mdrmz-music_video", '\ue427'),
MY_LOCATION("mdrmz-my_location", '\ue429'),
NAT("mdrmz-nat", '\ue7dd'),
NATURE("mdrmz-nature", '\ue42b'),
NATURE_PEOPLE("mdrmz-nature_people", '\ue42d'),
NAVIGATE_BEFORE("mdrmz-navigate_before", '\ue42f'),
NAVIGATE_NEXT("mdrmz-navigate_next", '\ue430'),
NAVIGATION("mdrmz-navigation", '\ue431'),
NEAR_ME("mdrmz-near_me", '\ue433'),
NEAR_ME_DISABLED("mdrmz-near_me_disabled", '\ue87b'),
NETWORK_CELL("mdrmz-network_cell", '\ue435'),
NETWORK_CHECK("mdrmz-network_check", '\ue437'),
NETWORK_LOCKED("mdrmz-network_locked", '\ue438'),
NETWORK_WIFI("mdrmz-network_wifi", '\ue439'),
NEW_RELEASES("mdrmz-new_releases", '\ue43b'),
NEXT_PLAN("mdrmz-next_plan", '\ue7df'),
NEXT_WEEK("mdrmz-next_week", '\ue43d'),
NFC("mdrmz-nfc", '\ue43f'),
NIGHT_SHELTER("mdrmz-night_shelter", '\ue87d'),
NIGHTS_STAY("mdrmz-nights_stay", '\ue440'),
NO_BACKPACK("mdrmz-no_backpack", '\ue8be'),
NO_CELL("mdrmz-no_cell", '\ue7e1'),
NO_DRINKS("mdrmz-no_drinks", '\ue7e3'),
NO_ENCRYPTION("mdrmz-no_encryption", '\ue442'),
NO_FLASH("mdrmz-no_flash", '\ue7e5'),
NO_FOOD("mdrmz-no_food", '\ue7e7'),
NO_LUGGAGE("mdrmz-no_luggage", '\ue8c0'),
NO_MEALS("mdrmz-no_meals", '\ue87f'),
NO_MEETING_ROOM("mdrmz-no_meeting_room", '\ue444'),
NO_PHOTOGRAPHY("mdrmz-no_photography", '\ue7e9'),
NO_SIM("mdrmz-no_sim", '\ue446'),
NO_STROLLER("mdrmz-no_stroller", '\ue7eb'),
NO_TRANSFER("mdrmz-no_transfer", '\ue880'),
NORTH("mdrmz-north", '\ue882'),
NORTH_EAST("mdrmz-north_east", '\ue883'),
NORTH_WEST("mdrmz-north_west", '\ue884'),
NOT_ACCESSIBLE("mdrmz-not_accessible", '\ue7ed'),
NOT_EQUAL("mdrmz-not_equal", '\ue448'),
NOT_INTERESTED("mdrmz-not_interested", '\ue449'),
NOT_LISTED_LOCATION("mdrmz-not_listed_location", '\ue44a'),
NOT_STARTED("mdrmz-not_started", '\ue7ee'),
NOTE("mdrmz-note", '\ue44c'),
NOTE_ADD("mdrmz-note_add", '\ue44e'),
NOTES("mdrmz-notes", '\ue450'),
NOTIFICATION_IMPORTANT("mdrmz-notification_important", '\ue451'),
NOTIFICATIONS("mdrmz-notifications", '\ue453'),
NOTIFICATIONS_ACTIVE("mdrmz-notifications_active", '\ue455'),
NOTIFICATIONS_NONE("mdrmz-notifications_none", '\ue457'),
NOTIFICATIONS_OFF("mdrmz-notifications_off", '\ue459'),
NOTIFICATIONS_PAUSED("mdrmz-notifications_paused", '\ue45b'),
OFFLINE_BOLT("mdrmz-offline_bolt", '\ue45d'),
OFFLINE_PIN("mdrmz-offline_pin", '\ue45f'),
ONDEMAND_VIDEO("mdrmz-ondemand_video", '\ue461'),
ONLINE_PREDICTION("mdrmz-online_prediction", '\ue7f0'),
OPACITY("mdrmz-opacity", '\ue463'),
OPEN_IN_BROWSER("mdrmz-open_in_browser", '\ue465'),
OPEN_IN_FULL("mdrmz-open_in_full", '\ue7f1'),
OPEN_IN_NEW("mdrmz-open_in_new", '\ue466'),
OPEN_WITH("mdrmz-open_with", '\ue467'),
OUTBOND("mdrmz-outbond", '\ue8c2'),
OUTDOOR_GRILL("mdrmz-outdoor_grill", '\ue468'),
OUTLET("mdrmz-outlet", '\ue7f2'),
OUTLINED_FLAG("mdrmz-outlined_flag", '\ue46a'),
PAGES("mdrmz-pages", '\ue46b'),
PAGEVIEW("mdrmz-pageview", '\ue46d'),
PALETTE("mdrmz-palette", '\ue46f'),
PAN_TOOL("mdrmz-pan_tool", '\ue471'),
PANORAMA("mdrmz-panorama", '\ue473'),
PANORAMA_FISH_EYE("mdrmz-panorama_fish_eye", '\ue475'),
PANORAMA_HORIZONTAL("mdrmz-panorama_horizontal", '\ue477'),
PANORAMA_VERTICAL("mdrmz-panorama_vertical", '\ue479'),
PANORAMA_WIDE_ANGLE("mdrmz-panorama_wide_angle", '\ue47b'),
PARTY_MODE("mdrmz-party_mode", '\ue47d'),
PAUSE("mdrmz-pause", '\ue47f'),
PAUSE_CIRCLE_FILLED("mdrmz-pause_circle_filled", '\ue480'),
PAUSE_CIRCLE_OUTLINE("mdrmz-pause_circle_outline", '\ue482'),
PAUSE_PRESENTATION("mdrmz-pause_presentation", '\ue483'),
PAYMENT("mdrmz-payment", '\ue485'),
PAYMENTS("mdrmz-payments", '\ue7f4'),
PEDAL_BIKE("mdrmz-pedal_bike", '\ue7f6'),
PENDING("mdrmz-pending", '\ue7f7'),
PENDING_ACTIONS("mdrmz-pending_actions", '\ue7f9'),
PEOPLE("mdrmz-people", '\ue487'),
PEOPLE_ALT("mdrmz-people_alt", '\ue489'),
PEOPLE_OUTLINE("mdrmz-people_outline", '\ue48b'),
PERCENTAGE("mdrmz-percentage", '\ue48d'),
PERM_CAMERA_MIC("mdrmz-perm_camera_mic", '\ue48f'),
PERM_CONTACT_CALENDAR("mdrmz-perm_contact_calendar", '\ue491'),
PERM_DATA_SETTING("mdrmz-perm_data_setting", '\ue493'),
PERM_DEVICE_INFORMATION("mdrmz-perm_device_information", '\ue494'),
PERM_IDENTITY("mdrmz-perm_identity", '\ue496'),
PERM_MEDIA("mdrmz-perm_media", '\ue498'),
PERM_PHONE_MSG("mdrmz-perm_phone_msg", '\ue49a'),
PERM_SCAN_WIFI("mdrmz-perm_scan_wifi", '\ue49c'),
PERSON("mdrmz-person", '\ue49e'),
PERSON_ADD("mdrmz-person_add", '\ue4a0'),
PERSON_ADD_ALT_1("mdrmz-person_add_alt_1", '\ue7fb'),
PERSON_ADD_DISABLED("mdrmz-person_add_disabled", '\ue4a2'),
PERSON_OUTLINE("mdrmz-person_outline", '\ue4a4'),
PERSON_PIN("mdrmz-person_pin", '\ue4a6'),
PERSON_PIN_CIRCLE("mdrmz-person_pin_circle", '\ue4a8'),
PERSON_REMOVE("mdrmz-person_remove", '\ue7fd'),
PERSON_REMOVE_ALT_1("mdrmz-person_remove_alt_1", '\ue7ff'),
PERSON_SEARCH("mdrmz-person_search", '\ue801'),
PERSONAL_VIDEO("mdrmz-personal_video", '\ue4aa'),
PEST_CONTROL("mdrmz-pest_control", '\ue803'),
PEST_CONTROL_RODENT("mdrmz-pest_control_rodent", '\ue805'),
PETS("mdrmz-pets", '\ue4ac'),
PHONE("mdrmz-phone", '\ue4ad'),
PHONE_ANDROID("mdrmz-phone_android", '\ue4af'),
PHONE_BLUETOOTH_SPEAKER("mdrmz-phone_bluetooth_speaker", '\ue4b1'),
PHONE_CALLBACK("mdrmz-phone_callback", '\ue4b3'),
PHONE_DISABLED("mdrmz-phone_disabled", '\ue4b5'),
PHONE_ENABLED("mdrmz-phone_enabled", '\ue4b6'),
PHONE_FORWARDED("mdrmz-phone_forwarded", '\ue4b7'),
PHONE_IN_TALK("mdrmz-phone_in_talk", '\ue4b9'),
PHONE_IPHONE("mdrmz-phone_iphone", '\ue4bb'),
PHONE_LOCKED("mdrmz-phone_locked", '\ue4bd'),
PHONE_MISSED("mdrmz-phone_missed", '\ue4bf'),
PHONE_PAUSED("mdrmz-phone_paused", '\ue4c1'),
PHONELINK("mdrmz-phonelink", '\ue4c3'),
PHONELINK_ERASE("mdrmz-phonelink_erase", '\ue4c5'),
PHONELINK_LOCK("mdrmz-phonelink_lock", '\ue4c6'),
PHONELINK_OFF("mdrmz-phonelink_off", '\ue4c7'),
PHONELINK_RING("mdrmz-phonelink_ring", '\ue4c9'),
PHONELINK_SETUP("mdrmz-phonelink_setup", '\ue4cb'),
PHOTO("mdrmz-photo", '\ue4cc'),
PHOTO_ALBUM("mdrmz-photo_album", '\ue4ce'),
PHOTO_CAMERA("mdrmz-photo_camera", '\ue4d0'),
PHOTO_FILTER("mdrmz-photo_filter", '\ue4d2'),
PHOTO_LIBRARY("mdrmz-photo_library", '\ue4d3'),
PHOTO_SIZE_SELECT_ACTUAL("mdrmz-photo_size_select_actual", '\ue4d5'),
PHOTO_SIZE_SELECT_LARGE("mdrmz-photo_size_select_large", '\ue4d7'),
PHOTO_SIZE_SELECT_SMALL("mdrmz-photo_size_select_small", '\ue4d8'),
PICTURE_AS_PDF("mdrmz-picture_as_pdf", '\ue4d9'),
PICTURE_IN_PICTURE("mdrmz-picture_in_picture", '\ue4db'),
PICTURE_IN_PICTURE_ALT("mdrmz-picture_in_picture_alt", '\ue4dd'),
PIE_CHART("mdrmz-pie_chart", '\ue4df'),
PIN("mdrmz-pin", '\ue4e1'),
PIN_DROP("mdrmz-pin_drop", '\ue4e3'),
PIN_OFF("mdrmz-pin_off", '\ue4e5'),
PLACE("mdrmz-place", '\ue4e7'),
PLAGIARISM("mdrmz-plagiarism", '\ue807'),
PLAY_ARROW("mdrmz-play_arrow", '\ue4e9'),
PLAY_CIRCLE_FILLED("mdrmz-play_circle_filled", '\ue4eb'),
PLAY_CIRCLE_FILLED_WHITE("mdrmz-play_circle_filled_white", '\ue4ed'),
PLAY_CIRCLE_OUTLINE("mdrmz-play_circle_outline", '\ue4ef'),
PLAY_FOR_WORK("mdrmz-play_for_work", '\ue4f0'),
PLAYLIST_ADD("mdrmz-playlist_add", '\ue4f1'),
PLAYLIST_ADD_CHECK("mdrmz-playlist_add_check", '\ue4f2'),
PLAYLIST_PLAY("mdrmz-playlist_play", '\ue4f3'),
PLUMBING("mdrmz-plumbing", '\ue809'),
PLUS("mdrmz-plus", '\ue4f4'),
PLUS_MINUS("mdrmz-plus_minus", '\ue4f5'),
PLUS_MINUS_ALT("mdrmz-plus_minus_alt", '\ue4f6'),
PLUS_ONE("mdrmz-plus_one", '\ue4f7'),
POINT_OF_SALE("mdrmz-point_of_sale", '\ue80a'),
POLICY("mdrmz-policy", '\ue4f8'),
POLL("mdrmz-poll", '\ue4fa'),
POLYMER("mdrmz-polymer", '\ue4fc'),
POOL("mdrmz-pool", '\ue4fd'),
PORTABLE_WIFI_OFF("mdrmz-portable_wifi_off", '\ue4ff'),
PORTRAIT("mdrmz-portrait", '\ue500'),
POST_ADD("mdrmz-post_add", '\ue502'),
POWER("mdrmz-power", '\ue503'),
POWER_INPUT("mdrmz-power_input", '\ue505'),
POWER_OFF("mdrmz-power_off", '\ue506'),
POWER_SETTINGS_NEW("mdrmz-power_settings_new", '\ue508'),
PREGNANT_WOMAN("mdrmz-pregnant_woman", '\ue509'),
PRESENT_TO_ALL("mdrmz-present_to_all", '\ue50a'),
PREVIEW("mdrmz-preview", '\ue80c'),
PRINT("mdrmz-print", '\ue50c'),
PRINT_DISABLED("mdrmz-print_disabled", '\ue50e'),
PRIORITY_HIGH("mdrmz-priority_high", '\ue510'),
PRIVACY_TIP("mdrmz-privacy_tip", '\ue80e'),
PSYCHOLOGY("mdrmz-psychology", '\ue810'),
PUBLIC("mdrmz-public", '\ue511'),
PUBLIC_OFF("mdrmz-public_off", '\ue812'),
PUBLISH("mdrmz-publish", '\ue513'),
PUBLISHED_WITH_CHANGES("mdrmz-published_with_changes", '\ue8c4'),
PUSH_PIN("mdrmz-push_pin", '\ue814'),
QR_CODE("mdrmz-qr_code", '\ue816'),
QR_CODE_2("mdrmz-qr_code_2", '\ue8d1'),
QR_CODE_SCANNER("mdrmz-qr_code_scanner", '\ue885'),
QRCODE("mdrmz-qrcode", '\ue515'),
QUERY_BUILDER("mdrmz-query_builder", '\ue517'),
QUESTION_ANSWER("mdrmz-question_answer", '\ue519'),
QUEUE("mdrmz-queue", '\ue51b'),
QUEUE_MUSIC("mdrmz-queue_music", '\ue51d'),
QUEUE_PLAY_NEXT("mdrmz-queue_play_next", '\ue51f'),
QUICKREPLY("mdrmz-quickreply", '\ue818'),
RADIO("mdrmz-radio", '\ue520'),
RADIO_BUTTON_CHECKED("mdrmz-radio_button_checked", '\ue522'),
RADIO_BUTTON_UNCHECKED("mdrmz-radio_button_unchecked", '\ue523'),
RATE_REVIEW("mdrmz-rate_review", '\ue524'),
READ_MORE("mdrmz-read_more", '\ue81a'),
RECEIPT("mdrmz-receipt", '\ue526'),
RECEIPT_LONG("mdrmz-receipt_long", '\ue81b'),
RECENT_ACTORS("mdrmz-recent_actors", '\ue528'),
RECORD_VOICE_OVER("mdrmz-record_voice_over", '\ue52a'),
REDEEM("mdrmz-redeem", '\ue52c'),
REDO("mdrmz-redo", '\ue52e'),
REDUCE_CAPACITY("mdrmz-reduce_capacity", '\ue8af'),
REFRESH("mdrmz-refresh", '\ue52f'),
REMOVE("mdrmz-remove", '\ue530'),
REMOVE_CIRCLE("mdrmz-remove_circle", '\ue531'),
REMOVE_CIRCLE_OUTLINE("mdrmz-remove_circle_outline", '\ue533'),
REMOVE_FROM_QUEUE("mdrmz-remove_from_queue", '\ue534'),
REMOVE_RED_EYE("mdrmz-remove_red_eye", '\ue536'),
REMOVE_SHOPPING_CART("mdrmz-remove_shopping_cart", '\ue538'),
REORDER("mdrmz-reorder", '\ue53a'),
REPEAT("mdrmz-repeat", '\ue53b'),
REPEAT_ONE("mdrmz-repeat_one", '\ue53c'),
REPLAY("mdrmz-replay", '\ue53d'),
REPLAY_10("mdrmz-replay_10", '\ue53e'),
REPLAY_30("mdrmz-replay_30", '\ue53f'),
REPLAY_5("mdrmz-replay_5", '\ue540'),
REPLY("mdrmz-reply", '\ue541'),
REPLY_ALL("mdrmz-reply_all", '\ue542'),
REPORT("mdrmz-report", '\ue543'),
REPORT_OFF("mdrmz-report_off", '\ue545'),
REPORT_PROBLEM("mdrmz-report_problem", '\ue547'),
REQUEST_PAGE("mdrmz-request_page", '\ue8c5'),
REQUEST_QUOTE("mdrmz-request_quote", '\ue81d'),
RESTAURANT("mdrmz-restaurant", '\ue549'),
RESTAURANT_MENU("mdrmz-restaurant_menu", '\ue54a'),
RESTORE("mdrmz-restore", '\ue54b'),
RESTORE_FROM_TRASH("mdrmz-restore_from_trash", '\ue54c'),
RESTORE_PAGE("mdrmz-restore_page", '\ue54e'),
RICE_BOWL("mdrmz-rice_bowl", '\ue886'),
RING_VOLUME("mdrmz-ring_volume", '\ue550'),
ROCKET("mdrmz-rocket", '\ue552'),
ROOFING("mdrmz-roofing", '\ue888'),
ROOM("mdrmz-room", '\ue554'),
ROOM_PREFERENCES("mdrmz-room_preferences", '\ue81f'),
ROOM_SERVICE("mdrmz-room_service", '\ue556'),
ROTATE_90_DEGREES_CCW("mdrmz-rotate_90_degrees_ccw", '\ue558'),
ROTATE_LEFT("mdrmz-rotate_left", '\ue55a'),
ROTATE_RIGHT("mdrmz-rotate_right", '\ue55b'),
ROUNDED_CORNER("mdrmz-rounded_corner", '\ue55c'),
ROUTER("mdrmz-router", '\ue55d'),
ROWING("mdrmz-rowing", '\ue55f'),
RSS_FEED("mdrmz-rss_feed", '\ue560'),
RULE("mdrmz-rule", '\ue821'),
RULE_FOLDER("mdrmz-rule_folder", '\ue822'),
RUN_CIRCLE("mdrmz-run_circle", '\ue824'),
RV_HOOKUP("mdrmz-rv_hookup", '\ue561'),
SANITIZER("mdrmz-sanitizer", '\ue8b0'),
SATELLITE("mdrmz-satellite", '\ue563'),
SAVE("mdrmz-save", '\ue565'),
SAVE_ALT("mdrmz-save_alt", '\ue567'),
SCANNER("mdrmz-scanner", '\ue568'),
SCATTER_PLOT("mdrmz-scatter_plot", '\ue56a'),
SCHEDULE("mdrmz-schedule", '\ue56c'),
SCHOOL("mdrmz-school", '\ue56e'),
SCIENCE("mdrmz-science", '\ue826'),
SCORE("mdrmz-score", '\ue570'),
SCREEN_LOCK_LANDSCAPE("mdrmz-screen_lock_landscape", '\ue572'),
SCREEN_LOCK_PORTRAIT("mdrmz-screen_lock_portrait", '\ue574'),
SCREEN_LOCK_ROTATION("mdrmz-screen_lock_rotation", '\ue576'),
SCREEN_ROTATION("mdrmz-screen_rotation", '\ue577'),
SCREEN_SHARE("mdrmz-screen_share", '\ue579'),
SD_CARD("mdrmz-sd_card", '\ue57b'),
SD_STORAGE("mdrmz-sd_storage", '\ue57d'),
SEARCH("mdrmz-search", '\ue57f'),
SEARCH_OFF("mdrmz-search_off", '\ue828'),
SECURITY("mdrmz-security", '\ue580'),
SELECT_ALL("mdrmz-select_all", '\ue582'),
SELF_IMPROVEMENT("mdrmz-self_improvement", '\ue829'),
SEND("mdrmz-send", '\ue583'),
SENSOR_DOOR("mdrmz-sensor_door", '\ue82a'),
SENSOR_WINDOW("mdrmz-sensor_window", '\ue82c'),
SENTIMENT_DISSATISFIED("mdrmz-sentiment_dissatisfied", '\ue585'),
SENTIMENT_NEUTRAL("mdrmz-sentiment_neutral", '\ue587'),
SENTIMENT_SATISFIED("mdrmz-sentiment_satisfied", '\ue589'),
SENTIMENT_SATISFIED_ALT("mdrmz-sentiment_satisfied_alt", '\ue58b'),
SENTIMENT_SLIGHTLY_DISSATISFIED("mdrmz-sentiment_slightly_dissatisfied", '\ue58d'),
SENTIMENT_VERY_DISSATISFIED("mdrmz-sentiment_very_dissatisfied", '\ue58f'),
SENTIMENT_VERY_SATISFIED("mdrmz-sentiment_very_satisfied", '\ue591'),
SET_MEAL("mdrmz-set_meal", '\ue88a'),
SETTINGS("mdrmz-settings", '\ue593'),
SETTINGS_APPLICATIONS("mdrmz-settings_applications", '\ue595'),
SETTINGS_BACKUP_RESTORE("mdrmz-settings_backup_restore", '\ue597'),
SETTINGS_BLUETOOTH("mdrmz-settings_bluetooth", '\ue598'),
SETTINGS_BRIGHTNESS("mdrmz-settings_brightness", '\ue599'),
SETTINGS_CELL("mdrmz-settings_cell", '\ue59b'),
SETTINGS_ETHERNET("mdrmz-settings_ethernet", '\ue59d'),
SETTINGS_INPUT_ANTENNA("mdrmz-settings_input_antenna", '\ue59e'),
SETTINGS_INPUT_COMPONENT("mdrmz-settings_input_component", '\ue59f'),
SETTINGS_INPUT_COMPOSITE("mdrmz-settings_input_composite", '\ue5a1'),
SETTINGS_INPUT_HDMI("mdrmz-settings_input_hdmi", '\ue5a3'),
SETTINGS_INPUT_SVIDEO("mdrmz-settings_input_svideo", '\ue5a5'),
SETTINGS_OVERSCAN("mdrmz-settings_overscan", '\ue5a7'),
SETTINGS_PHONE("mdrmz-settings_phone", '\ue5a9'),
SETTINGS_POWER("mdrmz-settings_power", '\ue5ab'),
SETTINGS_REMOTE("mdrmz-settings_remote", '\ue5ac'),
SETTINGS_SYSTEM_DAYDREAM("mdrmz-settings_system_daydream", '\ue5ae'),
SETTINGS_VOICE("mdrmz-settings_voice", '\ue5b0'),
SHARE("mdrmz-share", '\ue5b2'),
SHOP("mdrmz-shop", '\ue5b4'),
SHOP_TWO("mdrmz-shop_two", '\ue5b6'),
SHOPPING_BAG("mdrmz-shopping_bag", '\ue82e'),
SHOPPING_BASKET("mdrmz-shopping_basket", '\ue5b8'),
SHOPPING_CART("mdrmz-shopping_cart", '\ue5ba'),
SHORT_TEXT("mdrmz-short_text", '\ue5bc'),
SHOW_CHART("mdrmz-show_chart", '\ue5bd'),
SHUFFLE("mdrmz-shuffle", '\ue5be'),
SHUTTER_SPEED("mdrmz-shutter_speed", '\ue5bf'),
SICK("mdrmz-sick", '\ue8b2'),
SIGNAL_CELLULAR_0_BAR("mdrmz-signal_cellular_0_bar", '\ue5c1'),
SIGNAL_CELLULAR_1_BAR("mdrmz-signal_cellular_1_bar", '\ue5c3'),
SIGNAL_CELLULAR_2_BAR("mdrmz-signal_cellular_2_bar", '\ue5c5'),
SIGNAL_CELLULAR_3_BAR("mdrmz-signal_cellular_3_bar", '\ue5c7'),
SIGNAL_CELLULAR_4_BAR("mdrmz-signal_cellular_4_bar", '\ue5c9'),
SIGNAL_CELLULAR_ALT("mdrmz-signal_cellular_alt", '\ue5ca'),
SIGNAL_CELLULAR_CONNECTED_NO_INTERNET_0_BAR("mdrmz-signal_cellular_connected_no_internet_0_bar", '\ue5cb'),
SIGNAL_CELLULAR_CONNECTED_NO_INTERNET_1_BAR("mdrmz-signal_cellular_connected_no_internet_1_bar", '\ue5cd'),
SIGNAL_CELLULAR_CONNECTED_NO_INTERNET_2_BAR("mdrmz-signal_cellular_connected_no_internet_2_bar", '\ue5cf'),
SIGNAL_CELLULAR_CONNECTED_NO_INTERNET_3_BAR("mdrmz-signal_cellular_connected_no_internet_3_bar", '\ue5d1'),
SIGNAL_CELLULAR_CONNECTED_NO_INTERNET_4_BAR("mdrmz-signal_cellular_connected_no_internet_4_bar", '\ue5d3'),
SIGNAL_CELLULAR_NO_SIM("mdrmz-signal_cellular_no_sim", '\ue5d4'),
SIGNAL_CELLULAR_NULL("mdrmz-signal_cellular_null", '\ue5d6'),
SIGNAL_CELLULAR_OFF("mdrmz-signal_cellular_off", '\ue5d7'),
SIGNAL_WIFI_0_BAR("mdrmz-signal_wifi_0_bar", '\ue5d8'),
SIGNAL_WIFI_1_BAR("mdrmz-signal_wifi_1_bar", '\ue5da'),
SIGNAL_WIFI_1_BAR_LOCK("mdrmz-signal_wifi_1_bar_lock", '\ue5dc'),
SIGNAL_WIFI_2_BAR("mdrmz-signal_wifi_2_bar", '\ue5de'),
SIGNAL_WIFI_2_BAR_LOCK("mdrmz-signal_wifi_2_bar_lock", '\ue5e0'),
SIGNAL_WIFI_3_BAR("mdrmz-signal_wifi_3_bar", '\ue5e2'),
SIGNAL_WIFI_3_BAR_LOCK("mdrmz-signal_wifi_3_bar_lock", '\ue5e4'),
SIGNAL_WIFI_4_BAR("mdrmz-signal_wifi_4_bar", '\ue5e6'),
SIGNAL_WIFI_4_BAR_LOCK("mdrmz-signal_wifi_4_bar_lock", '\ue5e7'),
SIGNAL_WIFI_OFF("mdrmz-signal_wifi_off", '\ue5e8'),
SIM_CARD("mdrmz-sim_card", '\ue5e9'),
SIM_CARD_ALERT("mdrmz-sim_card_alert", '\ue5eb'),
SINGLE_BED("mdrmz-single_bed", '\ue5ed'),
SKIP_NEXT("mdrmz-skip_next", '\ue5ef'),
SKIP_PREVIOUS("mdrmz-skip_previous", '\ue5f1'),
SLIDESHOW("mdrmz-slideshow", '\ue5f3'),
SLOW_MOTION_VIDEO("mdrmz-slow_motion_video", '\ue5f5'),
SMART_BUTTON("mdrmz-smart_button", '\ue830'),
SMARTPHONE("mdrmz-smartphone", '\ue5f6'),
SMOKE_FREE("mdrmz-smoke_free", '\ue5f8'),
SMOKING_ROOMS("mdrmz-smoking_rooms", '\ue5f9'),
SMS("mdrmz-sms", '\ue5fb'),
SMS_FAILED("mdrmz-sms_failed", '\ue5fd'),
SNIPPET_FOLDER("mdrmz-snippet_folder", '\ue831'),
SNOOZE("mdrmz-snooze", '\ue5ff'),
SOAP("mdrmz-soap", '\ue833'),
SORT("mdrmz-sort", '\ue600'),
SORT_BY_ALPHA("mdrmz-sort_by_alpha", '\ue601'),
SOURCE("mdrmz-source", '\ue835'),
SOUTH("mdrmz-south", '\ue88c'),
SOUTH_EAST("mdrmz-south_east", '\ue88d'),
SOUTH_WEST("mdrmz-south_west", '\ue88e'),
SPA("mdrmz-spa", '\ue602'),
SPACE_BAR("mdrmz-space_bar", '\ue604'),
SPEAKER("mdrmz-speaker", '\ue605'),
SPEAKER_GROUP("mdrmz-speaker_group", '\ue607'),
SPEAKER_NOTES("mdrmz-speaker_notes", '\ue609'),
SPEAKER_NOTES_OFF("mdrmz-speaker_notes_off", '\ue60b'),
SPEAKER_PHONE("mdrmz-speaker_phone", '\ue60d'),
SPEED("mdrmz-speed", '\ue60f'),
SPELLCHECK("mdrmz-spellcheck", '\ue610'),
SPORTS("mdrmz-sports", '\ue611'),
SPORTS_BAR("mdrmz-sports_bar", '\ue88f'),
SPORTS_BASEBALL("mdrmz-sports_baseball", '\ue612'),
SPORTS_BASKETBALL("mdrmz-sports_basketball", '\ue614'),
SPORTS_CRICKET("mdrmz-sports_cricket", '\ue616'),
SPORTS_ESPORTS("mdrmz-sports_esports", '\ue618'),
SPORTS_FOOTBALL("mdrmz-sports_football", '\ue61a'),
SPORTS_GOLF("mdrmz-sports_golf", '\ue61c'),
SPORTS_HANDBALL("mdrmz-sports_handball", '\ue61e'),
SPORTS_HOCKEY("mdrmz-sports_hockey", '\ue61f'),
SPORTS_KABADDI("mdrmz-sports_kabaddi", '\ue620'),
SPORTS_MMA("mdrmz-sports_mma", '\ue621'),
SPORTS_MOTORSPORTS("mdrmz-sports_motorsports", '\ue623'),
SPORTS_RUGBY("mdrmz-sports_rugby", '\ue625'),
SPORTS_SOCCER("mdrmz-sports_soccer", '\ue627'),
SPORTS_TENNIS("mdrmz-sports_tennis", '\ue629'),
SPORTS_VOLLEYBALL("mdrmz-sports_volleyball", '\ue62a'),
SQUARE_FOOT("mdrmz-square_foot", '\ue62c'),
STACKED_LINE_CHART("mdrmz-stacked_line_chart", '\ue8c7'),
STAIRS("mdrmz-stairs", '\ue837'),
STAR("mdrmz-star", '\ue62e'),
STAR_BORDER("mdrmz-star_border", '\ue630'),
STAR_HALF("mdrmz-star_half", '\ue631'),
STAR_OUTLINE("mdrmz-star_outline", '\ue748'),
STAR_RATE("mdrmz-star_rate", '\ue632'),
STARS("mdrmz-stars", '\ue633'),
STAY_CURRENT_LANDSCAPE("mdrmz-stay_current_landscape", '\ue635'),
STAY_CURRENT_PORTRAIT("mdrmz-stay_current_portrait", '\ue637'),
STAY_PRIMARY_LANDSCAPE("mdrmz-stay_primary_landscape", '\ue639'),
STAY_PRIMARY_PORTRAIT("mdrmz-stay_primary_portrait", '\ue63b'),
STICKY_NOTE_2("mdrmz-sticky_note_2", '\ue891'),
STOP("mdrmz-stop", '\ue63d'),
STOP_CIRCLE("mdrmz-stop_circle", '\ue63f'),
STOP_SCREEN_SHARE("mdrmz-stop_screen_share", '\ue641'),
STORAGE("mdrmz-storage", '\ue643'),
STORE("mdrmz-store", '\ue644'),
STORE_MALL_DIRECTORY("mdrmz-store_mall_directory", '\ue646'),
STOREFRONT("mdrmz-storefront", '\ue648'),
STRAIGHTEN("mdrmz-straighten", '\ue64a'),
STREETVIEW("mdrmz-streetview", '\ue64c'),
STRIKETHROUGH_S("mdrmz-strikethrough_s", '\ue64d'),
STROLLER("mdrmz-stroller", '\ue839'),
STYLE("mdrmz-style", '\ue64e'),
SUBDIRECTORY_ARROW_LEFT("mdrmz-subdirectory_arrow_left", '\ue650'),
SUBDIRECTORY_ARROW_RIGHT("mdrmz-subdirectory_arrow_right", '\ue651'),
SUBJECT("mdrmz-subject", '\ue652'),
SUBSCRIPT("mdrmz-subscript", '\ue83b'),
SUBSCRIPTIONS("mdrmz-subscriptions", '\ue653'),
SUBTITLES("mdrmz-subtitles", '\ue655'),
SUBTITLES_OFF("mdrmz-subtitles_off", '\ue83c'),
SUBWAY("mdrmz-subway", '\ue657'),
SUPERSCRIPT("mdrmz-superscript", '\ue83e'),
SUPERVISED_USER_CIRCLE("mdrmz-supervised_user_circle", '\ue659'),
SUPERVISOR_ACCOUNT("mdrmz-supervisor_account", '\ue65b'),
SUPPORT("mdrmz-support", '\ue83f'),
SUPPORT_AGENT("mdrmz-support_agent", '\ue841'),
SURROUND_SOUND("mdrmz-surround_sound", '\ue65d'),
SWAP_CALLS("mdrmz-swap_calls", '\ue65f'),
SWAP_HORIZ("mdrmz-swap_horiz", '\ue660'),
SWAP_HORIZONTAL_CIRCLE("mdrmz-swap_horizontal_circle", '\ue661'),
SWAP_VERT("mdrmz-swap_vert", '\ue663'),
SWAP_VERTICAL_CIRCLE("mdrmz-swap_vertical_circle", '\ue664'),
SWITCH_CAMERA("mdrmz-switch_camera", '\ue666'),
SWITCH_LEFT("mdrmz-switch_left", '\ue842'),
SWITCH_RIGHT("mdrmz-switch_right", '\ue844'),
SWITCH_VIDEO("mdrmz-switch_video", '\ue668'),
SYNC("mdrmz-sync", '\ue66a'),
SYNC_ALT("mdrmz-sync_alt", '\ue66b'),
SYNC_DISABLED("mdrmz-sync_disabled", '\ue66c'),
SYNC_PROBLEM("mdrmz-sync_problem", '\ue66d'),
SYSTEM_UPDATE("mdrmz-system_update", '\ue66e'),
SYSTEM_UPDATE_ALT("mdrmz-system_update_alt", '\ue670'),
TAB("mdrmz-tab", '\ue671'),
TAB_UNSELECTED("mdrmz-tab_unselected", '\ue672'),
TABLE_CHART("mdrmz-table_chart", '\ue673'),
TABLE_ROWS("mdrmz-table_rows", '\ue846'),
TABLE_VIEW("mdrmz-table_view", '\ue848'),
TABLET("mdrmz-tablet", '\ue675'),
TABLET_ANDROID("mdrmz-tablet_android", '\ue677'),
TABLET_MAC("mdrmz-tablet_mac", '\ue679'),
TAG_FACES("mdrmz-tag_faces", '\ue67b'),
TAP_AND_PLAY("mdrmz-tap_and_play", '\ue67d'),
TAPAS("mdrmz-tapas", '\ue893'),
TERRAIN("mdrmz-terrain", '\ue67e'),
TEXT_FIELDS("mdrmz-text_fields", '\ue680'),
TEXT_FORMAT("mdrmz-text_format", '\ue681'),
TEXT_ROTATE_UP("mdrmz-text_rotate_up", '\ue682'),
TEXT_ROTATE_VERTICAL("mdrmz-text_rotate_vertical", '\ue683'),
TEXT_ROTATION_ANGLEDOWN("mdrmz-text_rotation_angledown", '\ue684'),
TEXT_ROTATION_ANGLEUP("mdrmz-text_rotation_angleup", '\ue685'),
TEXT_ROTATION_DOWN("mdrmz-text_rotation_down", '\ue686'),
TEXT_ROTATION_NONE("mdrmz-text_rotation_none", '\ue687'),
TEXT_SNIPPET("mdrmz-text_snippet", '\ue84a'),
TEXTSMS("mdrmz-textsms", '\ue688'),
TEXTURE("mdrmz-texture", '\ue68a'),
THEATERS("mdrmz-theaters", '\ue68b'),
THUMB_DOWN("mdrmz-thumb_down", '\ue68d'),
THUMB_DOWN_ALT("mdrmz-thumb_down_alt", '\ue68f'),
THUMB_UP("mdrmz-thumb_up", '\ue691'),
THUMB_UP_ALT("mdrmz-thumb_up_alt", '\ue693'),
THUMBS_UP_DOWN("mdrmz-thumbs_up_down", '\ue695'),
TIME_TO_LEAVE("mdrmz-time_to_leave", '\ue697'),
TIMELAPSE("mdrmz-timelapse", '\ue699'),
TIMELINE("mdrmz-timeline", '\ue69b'),
TIMER("mdrmz-timer", '\ue69c'),
TIMER_10("mdrmz-timer_10", '\ue69e'),
TIMER_3("mdrmz-timer_3", '\ue69f'),
TIMER_OFF("mdrmz-timer_off", '\ue6a0'),
TITLE("mdrmz-title", '\ue6a2'),
TOC("mdrmz-toc", '\ue6a3'),
TODAY("mdrmz-today", '\ue6a4'),
TOGGLE_OFF("mdrmz-toggle_off", '\ue6a6'),
TOGGLE_ON("mdrmz-toggle_on", '\ue6a8'),
TOLL("mdrmz-toll", '\ue6aa'),
TONALITY("mdrmz-tonality", '\ue6ac'),
TOPIC("mdrmz-topic", '\ue84c'),
TOUCH_APP("mdrmz-touch_app", '\ue6ae'),
TOUR("mdrmz-tour", '\ue84e'),
TOYS("mdrmz-toys", '\ue6b0'),
TRACK_CHANGES("mdrmz-track_changes", '\ue6b2'),
TRAFFIC("mdrmz-traffic", '\ue6b3'),
TRAIN("mdrmz-train", '\ue6b5'),
TRAM("mdrmz-tram", '\ue6b7'),
TRANSFER_WITHIN_A_STATION("mdrmz-transfer_within_a_station", '\ue6b9'),
TRANSFORM("mdrmz-transform", '\ue6ba'),
TRANSIT_ENTEREXIT("mdrmz-transit_enterexit", '\ue6bb'),
TRANSLATE("mdrmz-translate", '\ue6bc'),
TRENDING_DOWN("mdrmz-trending_down", '\ue6bd'),
TRENDING_FLAT("mdrmz-trending_flat", '\ue6be'),
TRENDING_UP("mdrmz-trending_up", '\ue6bf'),
TRIP_ORIGIN("mdrmz-trip_origin", '\ue6c0'),
TTY("mdrmz-tty", '\ue850'),
TUNE("mdrmz-tune", '\ue6c1'),
TURNED_IN("mdrmz-turned_in", '\ue6c2'),
TURNED_IN_NOT("mdrmz-turned_in_not", '\ue6c4'),
TV("mdrmz-tv", '\ue6c5'),
TV_OFF("mdrmz-tv_off", '\ue6c7'),
TWO_WHEELER("mdrmz-two_wheeler", '\ue749'),
UMBRELLA("mdrmz-umbrella", '\ue852'),
UNARCHIVE("mdrmz-unarchive", '\ue6c9'),
UNDO("mdrmz-undo", '\ue6cb'),
UNFOLD_LESS("mdrmz-unfold_less", '\ue6cc'),
UNFOLD_MORE("mdrmz-unfold_more", '\ue6cd'),
UNPUBLISHED("mdrmz-unpublished", '\ue8c8'),
UNSUBSCRIBE("mdrmz-unsubscribe", '\ue6ce'),
UPDATE("mdrmz-update", '\ue6d0'),
UPDATE_DISABLED("mdrmz-update_disabled", '\ue8d2'),
UPGRADE("mdrmz-upgrade", '\ue854'),
USB("mdrmz-usb", '\ue6d1'),
VERIFIED("mdrmz-verified", '\ue855'),
VERIFIED_USER("mdrmz-verified_user", '\ue6d2'),
VERTICAL_ALIGN_BOTTOM("mdrmz-vertical_align_bottom", '\ue6d4'),
VERTICAL_ALIGN_CENTER("mdrmz-vertical_align_center", '\ue6d5'),
VERTICAL_ALIGN_TOP("mdrmz-vertical_align_top", '\ue6d6'),
VERTICAL_DISTRIBUTE("mdrmz-vertical_distribute", '\ue8d3'),
VERTICAL_SPLIT("mdrmz-vertical_split", '\ue6d7'),
VIBRATION("mdrmz-vibration", '\ue6d9'),
VIDEO_CALL("mdrmz-video_call", '\ue6db'),
VIDEO_LABEL("mdrmz-video_label", '\ue6dd'),
VIDEO_LIBRARY("mdrmz-video_library", '\ue6df'),
VIDEO_SETTINGS("mdrmz-video_settings", '\ue857'),
VIDEOCAM("mdrmz-videocam", '\ue6e1'),
VIDEOCAM_OFF("mdrmz-videocam_off", '\ue6e3'),
VIDEOGAME_ASSET("mdrmz-videogame_asset", '\ue6e5'),
VIEW_AGENDA("mdrmz-view_agenda", '\ue6e7'),
VIEW_ARRAY("mdrmz-view_array", '\ue6e9'),
VIEW_CAROUSEL("mdrmz-view_carousel", '\ue6eb'),
VIEW_COLUMN("mdrmz-view_column", '\ue6ed'),
VIEW_COMFY("mdrmz-view_comfy", '\ue6ef'),
VIEW_COMPACT("mdrmz-view_compact", '\ue6f1'),
VIEW_DAY("mdrmz-view_day", '\ue6f3'),
VIEW_HEADLINE("mdrmz-view_headline", '\ue6f5'),
VIEW_LIST("mdrmz-view_list", '\ue6f6'),
VIEW_MODULE("mdrmz-view_module", '\ue6f8'),
VIEW_QUILT("mdrmz-view_quilt", '\ue6fa'),
VIEW_SIDEBAR("mdrmz-view_sidebar", '\ue858'),
VIEW_STREAM("mdrmz-view_stream", '\ue6fc'),
VIEW_WEEK("mdrmz-view_week", '\ue6fe'),
VIGNETTE("mdrmz-vignette", '\ue700'),
VISIBILITY("mdrmz-visibility", '\ue702'),
VISIBILITY_OFF("mdrmz-visibility_off", '\ue704'),
VOICE_CHAT("mdrmz-voice_chat", '\ue706'),
VOICE_OVER_OFF("mdrmz-voice_over_off", '\ue708'),
VOICEMAIL("mdrmz-voicemail", '\ue70a'),
VOLUME_DOWN("mdrmz-volume_down", '\ue70b'),
VOLUME_MUTE("mdrmz-volume_mute", '\ue70d'),
VOLUME_OFF("mdrmz-volume_off", '\ue70f'),
VOLUME_UP("mdrmz-volume_up", '\ue711'),
VPN_KEY("mdrmz-vpn_key", '\ue713'),
VPN_LOCK("mdrmz-vpn_lock", '\ue715'),
WALLPAPER("mdrmz-wallpaper", '\ue717'),
WARNING("mdrmz-warning", '\ue718'),
WASH("mdrmz-wash", '\ue85a'),
WATCH("mdrmz-watch", '\ue71a'),
WATCH_LATER("mdrmz-watch_later", '\ue71c'),
WATER_DAMAGE("mdrmz-water_damage", '\ue895'),
WAVES("mdrmz-waves", '\ue71e'),
WB_AUTO("mdrmz-wb_auto", '\ue71f'),
WB_CLOUDY("mdrmz-wb_cloudy", '\ue721'),
WB_INCANDESCENT("mdrmz-wb_incandescent", '\ue723'),
WB_IRIDESCENT("mdrmz-wb_iridescent", '\ue725'),
WB_SUNNY("mdrmz-wb_sunny", '\ue727'),
WC("mdrmz-wc", '\ue729'),
WEB("mdrmz-web", '\ue72a'),
WEB_ASSET("mdrmz-web_asset", '\ue72c'),
WEEKEND("mdrmz-weekend", '\ue72e'),
WEST("mdrmz-west", '\ue897'),
WHATSHOT("mdrmz-whatshot", '\ue730'),
WHEELCHAIR_PICKUP("mdrmz-wheelchair_pickup", '\ue85c'),
WHERE_TO_VOTE("mdrmz-where_to_vote", '\ue732'),
WIDGETS("mdrmz-widgets", '\ue734'),
WIFI("mdrmz-wifi", '\ue736'),
WIFI_CALLING("mdrmz-wifi_calling", '\ue85d'),
WIFI_LOCK("mdrmz-wifi_lock", '\ue737'),
WIFI_OFF("mdrmz-wifi_off", '\ue738'),
WIFI_PROTECTED_SETUP("mdrmz-wifi_protected_setup", '\ue85f'),
WIFI_TETHERING("mdrmz-wifi_tethering", '\ue739'),
WINE_BAR("mdrmz-wine_bar", '\ue898'),
WORK("mdrmz-work", '\ue73a'),
WORK_OFF("mdrmz-work_off", '\ue73c'),
WORK_OUTLINE("mdrmz-work_outline", '\ue73e'),
WRAP_TEXT("mdrmz-wrap_text", '\ue73f'),
WRONG_LOCATION("mdrmz-wrong_location", '\ue860'),
WYSIWYG("mdrmz-wysiwyg", '\ue861'),
YOUTUBE_SEARCHED_FOR("mdrmz-youtube_searched_for", '\ue740'),
ZOOM_IN("mdrmz-zoom_in", '\ue741'),
ZOOM_OUT("mdrmz-zoom_out", '\ue742'),
ZOOM_OUT_MAP("mdrmz-zoom_out_map", '\ue743');
public static Material2RoundMZ findByDescription(String description) {
for (Material2RoundMZ font : values()) {
if (font.getDescription().equals(description)) {
return font;
}
}
throw new IllegalArgumentException("Icon description '" + description + "' is invalid!");
}
private String description;
private int code;
Material2RoundMZ(String description, int code) {
this.description = description;
this.code = code;
}
@Override
public String getDescription() {
return description;
}
@Override
public int getCode() {
return code;
}
}
|
|
package brooklyn.entity.container.policy;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertTrue;
import java.util.List;
import java.util.Map;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import brooklyn.entity.BrooklynAppUnitTestSupport;
import brooklyn.entity.Entity;
import brooklyn.entity.basic.BasicStartableImpl;
import brooklyn.entity.basic.DynamicGroup;
import brooklyn.entity.basic.EntityInternal;
import brooklyn.entity.container.docker.DockerInfrastructure;
import brooklyn.entity.group.DynamicCluster;
import brooklyn.entity.proxying.EntitySpec;
import brooklyn.event.SensorEvent;
import brooklyn.event.SensorEventListener;
import brooklyn.event.basic.BasicNotificationSensor;
import brooklyn.location.docker.DockerLocation;
import brooklyn.location.docker.strategy.MaxContainersPlacementStrategy;
import brooklyn.policy.EnricherSpec;
import brooklyn.test.Asserts;
import brooklyn.test.EntityTestUtils;
import brooklyn.util.time.Duration;
import brooklyn.util.time.Time;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
public class ContainerHeadroomEnricherTest extends BrooklynAppUnitTestSupport {
private final Map<String, Duration> assertMap = ImmutableMap.of("timeout", Duration.ONE_SECOND);
private EntityInternal entity;
private RecordingSensorEventListener listener;
@BeforeMethod(alwaysRun=true)
@Override
public void setUp() throws Exception {
super.setUp();
entity = (EntityInternal) app.createAndManageChild(EntitySpec.create(DockerInfrastructure.class)
.impl(DockerInfrastructureSimulated.class)
.configure(MaxContainersPlacementStrategy.DOCKER_CONTAINER_CLUSTER_MAX_SIZE, 8));
listener = new RecordingSensorEventListener();
app.subscribe(entity, ContainerHeadroomEnricher.DOCKER_CONTAINER_CLUSTER_HOT, listener);
app.subscribe(entity, ContainerHeadroomEnricher.DOCKER_CONTAINER_CLUSTER_COLD, listener);
app.subscribe(entity, ContainerHeadroomEnricher.DOCKER_CONTAINER_CLUSTER_OK, listener);
}
@Test
public void testNoEventsWhenAllOk() throws Exception {
entity.addEnricher(EnricherSpec.create(ContainerHeadroomEnricher.class)
.configure(ContainerHeadroomEnricher.CONTAINER_HEADROOM, 4));
entity.setAttribute(DockerInfrastructure.DOCKER_HOST_COUNT, 2);
entity.setAttribute(DockerInfrastructure.DOCKER_CONTAINER_COUNT, 8);
assertNoEventsContinually();
}
// Integration because takes over a second, and because time-sensitive:
// If we initially get two events with the second arriving more than one
// second later then our subsequent assertion will fail.
@Test(groups="integration")
public void testTooHotWhenHeadroomExceeded() throws Exception {
entity.addEnricher(EnricherSpec.create(ContainerHeadroomEnricher.class)
.configure(ContainerHeadroomEnricher.CONTAINER_HEADROOM, 4));
// Too hot: headroom insufficient by one container
// Note we can get either 1 or 2 events for this (if the hostcount event is
// processed after containerCount attribute has been set, then we'll get a too-hot
// for that as well; otherwise it will ignore the event). Hence we use
// clearEventsContinually below.
entity.setAttribute(DockerInfrastructure.DOCKER_HOST_COUNT, 2);
entity.setAttribute(DockerInfrastructure.DOCKER_CONTAINER_COUNT, 13);
assertTooHot(new CurrentStatus()
.hostCount(2)
.needed(13 - (16 - 4))
.utilization(13d/16) //0.8125
.lowThreshold((16d - (4 + 8)) / 16) // 0.25
.highThreshold(12d/16)); // 0.75
// Too hot - 28 containers would require 4 hosts (leaving headroom of 4)
listener.clearEventsContinually();
entity.setAttribute(DockerInfrastructure.DOCKER_CONTAINER_COUNT, 28);
assertTooHot(new CurrentStatus()
.hostCount(2)
.needed(28 - (16 - 4)) // 16
.utilization(28d/16) // 1.75
.lowThreshold((16d - (4 + 8)) / 16) // 0.25
.highThreshold(12d/16)); // 0.75
// Make everything ok again
listener.clearEvents();
entity.setAttribute(DockerInfrastructure.DOCKER_CONTAINER_COUNT, 8);
assertOk(new CurrentStatus()
.hostCount(2)
.needed(8 - (16 - 4)) // 16
.utilization(8d/16) // 1.75
.lowThreshold((16d - (4 + 8)) / 16) // 0.25
.highThreshold(12d/16)); // 0.75
// Expect not to get repeated "ok"
listener.clearEvents();
entity.setAttribute(DockerInfrastructure.DOCKER_CONTAINER_COUNT, 9);
assertNoEventsContinually();
}
// Integration because takes over a second, and because time-sensitive:
// See comment on testTooHotThenOk.
@Test(groups="integration")
public void testTooColdThenOk() throws Exception {
entity.addEnricher(EnricherSpec.create(ContainerHeadroomEnricher.class)
.configure(ContainerHeadroomEnricher.CONTAINER_HEADROOM, 4));
// Too cold - only need one host rather than 10
entity.setAttribute(DockerInfrastructure.DOCKER_HOST_COUNT, 10);
entity.setAttribute(DockerInfrastructure.DOCKER_CONTAINER_COUNT, 1);
assertTooCold(new CurrentStatus()
.hostCount(10)
.needed(1 - (80 - 4))
.utilization(1d/80)
.lowThreshold((80d - (4 + 8)) / 80)
.highThreshold(76d/80));
// Too hot - only need one host rather than 2
listener.clearEventsContinually();
entity.setAttribute(DockerInfrastructure.DOCKER_HOST_COUNT, 2);
assertTooCold(new CurrentStatus()
.hostCount(2)
.needed(1 - (16 - 4))
.utilization(1d/16) // 1.75
.lowThreshold((16d - (4 + 8)) / 16) // 0.25
.highThreshold(12d/16)); // 0.75
// Make everything ok again
listener.clearEvents();
entity.setAttribute(DockerInfrastructure.DOCKER_CONTAINER_COUNT, 8);
assertOk(new CurrentStatus()
.hostCount(2)
.needed(8 - 16 + 4) // 16
.utilization(8d/16) // 1.75
.lowThreshold((16d - (4 + 8)) / 16) // 0.25
.highThreshold(12d/16)); // 0.75
// Expect not to get repeated "ok"
listener.clearEvents();
entity.setAttribute(DockerInfrastructure.DOCKER_CONTAINER_COUNT, 9);
assertNoEventsContinually();
}
private void assertNoEventsContinually() {
Asserts.succeedsContinually(new Runnable() {
public void run() {
assertEquals(listener.getEvents(), ImmutableList.of());
}});
}
private void assertTooHot(final CurrentStatus status) {
assertTemperatureEvent(status, ContainerHeadroomEnricher.DOCKER_CONTAINER_CLUSTER_HOT);
}
private void assertTooCold(final CurrentStatus status) {
assertTemperatureEvent(status, ContainerHeadroomEnricher.DOCKER_CONTAINER_CLUSTER_COLD);
}
private void assertOk(final CurrentStatus status) {
assertTemperatureEvent(status, ContainerHeadroomEnricher.DOCKER_CONTAINER_CLUSTER_OK);
}
private void assertTemperatureEvent(final CurrentStatus status, final BasicNotificationSensor<Map> eventType) {
EntityTestUtils.assertAttributeEqualsEventually(assertMap, entity, ContainerHeadroomEnricher.CONTAINERS_NEEDED, status.needed);
EntityTestUtils.assertAttributeEqualsEventually(assertMap, entity, ContainerHeadroomEnricher.DOCKER_CONTAINER_UTILISATION, status.utilization);
Asserts.succeedsEventually(assertMap, new Runnable() {
public void run() {
List<SensorEvent<Object>> events = listener.getEvents();
// Accept up to 2 duplicates - could be responding to rapid succession of setting hostCount + containerCount
assertTrue(events.size() == 1 || events.size() == 2, "events="+events);
if (events.size() == 2) {
assertEquals(events.get(0).getSensor(), events.get(1).getSensor());
assertEquals(events.get(0).getValue(), events.get(1).getValue());
}
assertEquals(events.get(0).getSensor(), eventType);
assertEquals(events.get(0).getValue(), ImmutableMap.of(
"pool.current.size", status.hostCount,
"pool.current.workrate", status.utilization,
"pool.low.threshold", status.lowThreshold,
"pool.high.threshold", status.highThreshold));
}});
}
private static class CurrentStatus {
int hostCount;
int needed;
double utilization;
double lowThreshold;
double highThreshold;
CurrentStatus hostCount(int val) {
hostCount = val; return this;
}
CurrentStatus needed(int val) {
needed = val; return this;
}
CurrentStatus utilization(double val) {
utilization = val; return this;
}
CurrentStatus lowThreshold(double val) {
lowThreshold = val; return this;
}
CurrentStatus highThreshold(double val) {
highThreshold = val; return this;
}
}
public static class DockerInfrastructureSimulated extends BasicStartableImpl implements DockerInfrastructure {
private int currentSize = 0;
@Override
public Integer resize(Integer desiredSize) {
currentSize = desiredSize;
return currentSize;
}
@Override
public Integer getCurrentSize() {
return currentSize;
}
@Override
public boolean isLocationAvailable() {
return false;
}
@Override
public void deleteLocation() {
// no-op
}
@Override
public List<Entity> getDockerHostList() {
return ImmutableList.<Entity>of();
}
@Override
public List<Entity> getDockerContainerList() {
return ImmutableList.<Entity>of();
}
@Override
public DockerLocation getDynamicLocation() {
throw new UnsupportedOperationException();
}
@Override
public DockerLocation createLocation(Map<String, ?> flags) {
throw new UnsupportedOperationException();
}
@Override
public DynamicCluster getDockerHostCluster() {
throw new UnsupportedOperationException();
}
@Override
public DynamicGroup getContainerFabric() {
throw new UnsupportedOperationException();
}
}
public static class RecordingSensorEventListener implements SensorEventListener<Object> {
List<SensorEvent<Object>> events = Lists.newCopyOnWriteArrayList();
@Override
public void onEvent(SensorEvent<Object> event) {
events.add(event);
}
public List<SensorEvent<Object>> getEvents() {
return events;
}
public void clearEvents() {
events.clear();
}
public void clearEventsContinually() {
Time.sleep(Duration.ONE_SECOND);
clearEvents();
}
}
}
|
|
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.packaging.impl.artifacts;
import com.intellij.compiler.CompilerConfiguration;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.roots.CompilerProjectExtension;
import com.intellij.openapi.util.Condition;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.Trinity;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.VfsUtilCore;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.packaging.artifacts.*;
import com.intellij.packaging.elements.*;
import com.intellij.packaging.impl.elements.*;
import com.intellij.util.PathUtil;
import com.intellij.util.Processor;
import com.intellij.util.SmartList;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.containers.FList;
import gnu.trove.THashSet;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.*;
/**
* @author nik
*/
public class ArtifactUtil {
private ArtifactUtil() {
}
public static CompositePackagingElement<?> copyFromRoot(@NotNull CompositePackagingElement<?> oldRoot, @NotNull Project project) {
final CompositePackagingElement<?> newRoot = (CompositePackagingElement<?>)copyElement(oldRoot, project);
copyChildren(oldRoot, newRoot, project);
return newRoot;
}
public static void copyChildren(CompositePackagingElement<?> oldParent, CompositePackagingElement<?> newParent, @NotNull Project project) {
for (PackagingElement<?> child : oldParent.getChildren()) {
newParent.addOrFindChild(copyWithChildren(child, project));
}
}
@NotNull
public static <S> PackagingElement<S> copyWithChildren(@NotNull PackagingElement<S> element, @NotNull Project project) {
final PackagingElement<S> copy = copyElement(element, project);
if (element instanceof CompositePackagingElement<?>) {
copyChildren((CompositePackagingElement<?>)element, (CompositePackagingElement<?>)copy, project);
}
return copy;
}
@NotNull
private static <S> PackagingElement<S> copyElement(@NotNull PackagingElement<S> element, @NotNull Project project) {
//noinspection unchecked
final PackagingElement<S> copy = (PackagingElement<S>)element.getType().createEmpty(project);
copy.loadState(element.getState());
return copy;
}
public static <E extends PackagingElement<?>> boolean processPackagingElements(@NotNull Artifact artifact, @Nullable PackagingElementType<E> type,
@NotNull final Processor<? super E> processor,
final @NotNull PackagingElementResolvingContext resolvingContext,
final boolean processSubstitutions) {
return processPackagingElements(artifact, type, new PackagingElementProcessor<E>() {
@Override
public boolean process(@NotNull E e, @NotNull PackagingElementPath path) {
return processor.process(e);
}
}, resolvingContext, processSubstitutions);
}
public static <E extends PackagingElement<?>> boolean processPackagingElements(@NotNull Artifact artifact, @Nullable PackagingElementType<E> type,
@NotNull PackagingElementProcessor<? super E> processor,
final @NotNull PackagingElementResolvingContext resolvingContext,
final boolean processSubstitutions) {
return processPackagingElements(artifact.getRootElement(), type, processor, resolvingContext, processSubstitutions, artifact.getArtifactType());
}
public static <E extends PackagingElement<?>> boolean processPackagingElements(final PackagingElement<?> rootElement, @Nullable PackagingElementType<E> type,
@NotNull PackagingElementProcessor<? super E> processor,
final @NotNull PackagingElementResolvingContext resolvingContext,
final boolean processSubstitutions,
final ArtifactType artifactType) {
return processElementRecursively(rootElement, type, processor, resolvingContext, processSubstitutions, artifactType,
PackagingElementPath.EMPTY, new HashSet<>());
}
private static <E extends PackagingElement<?>> boolean processElementsRecursively(final List<? extends PackagingElement<?>> elements,
@Nullable PackagingElementType<E> type,
@NotNull PackagingElementProcessor<? super E> processor,
final @NotNull PackagingElementResolvingContext resolvingContext,
final boolean processSubstitutions, ArtifactType artifactType,
@NotNull PackagingElementPath path,
Set<PackagingElement<?>> processed) {
for (PackagingElement<?> element : elements) {
if (!processElementRecursively(element, type, processor, resolvingContext, processSubstitutions, artifactType, path, processed)) {
return false;
}
}
return true;
}
public static void processRecursivelySkippingIncludedArtifacts(Artifact artifact,
final Processor<PackagingElement<?>> processor,
PackagingElementResolvingContext context) {
processPackagingElements(artifact.getRootElement(), null, new PackagingElementProcessor<PackagingElement<?>>() {
@Override
public boolean process(@NotNull PackagingElement<?> element, @NotNull PackagingElementPath path) {
return processor.process(element);
}
@Override
public boolean shouldProcessSubstitution(ComplexPackagingElement<?> element) {
return !(element instanceof ArtifactPackagingElement);
}
}, context, true, artifact.getArtifactType());
}
private static <E extends PackagingElement<?>> boolean processElementRecursively(@NotNull PackagingElement<?> element, @Nullable PackagingElementType<E> type,
@NotNull PackagingElementProcessor<? super E> processor,
@NotNull PackagingElementResolvingContext resolvingContext,
final boolean processSubstitutions,
ArtifactType artifactType,
@NotNull PackagingElementPath path, Set<PackagingElement<?>> processed) {
if (!processor.shouldProcess(element) || !processed.add(element)) {
return true;
}
if (type == null || element.getType().equals(type)) {
if (!processor.process((E)element, path)) {
return false;
}
}
if (element instanceof CompositePackagingElement<?>) {
final CompositePackagingElement<?> composite = (CompositePackagingElement<?>)element;
return processElementsRecursively(composite.getChildren(), type, processor, resolvingContext, processSubstitutions, artifactType,
path.appendComposite(composite), processed);
}
else if (element instanceof ComplexPackagingElement<?> && processSubstitutions) {
final ComplexPackagingElement<?> complexElement = (ComplexPackagingElement<?>)element;
if (processor.shouldProcessSubstitution(complexElement)) {
final List<? extends PackagingElement<?>> substitution = complexElement.getSubstitution(resolvingContext, artifactType);
if (substitution != null) {
return processElementsRecursively(substitution, type, processor, resolvingContext, processSubstitutions, artifactType,
path.appendComplex(complexElement), processed);
}
}
}
return true;
}
public static void removeDuplicates(@NotNull CompositePackagingElement<?> parent) {
List<PackagingElement<?>> prevChildren = new ArrayList<>();
List<PackagingElement<?>> toRemove = new ArrayList<>();
for (PackagingElement<?> child : parent.getChildren()) {
if (child instanceof CompositePackagingElement<?>) {
removeDuplicates((CompositePackagingElement<?>)child);
}
boolean merged = false;
for (PackagingElement<?> prevChild : prevChildren) {
if (child.isEqualTo(prevChild)) {
if (child instanceof CompositePackagingElement<?>) {
for (PackagingElement<?> childElement : ((CompositePackagingElement<?>)child).getChildren()) {
((CompositePackagingElement<?>)prevChild).addOrFindChild(childElement);
}
}
merged = true;
break;
}
}
if (merged) {
toRemove.add(child);
}
else {
prevChildren.add(child);
}
}
for (PackagingElement<?> child : toRemove) {
parent.removeChild(child);
}
}
public static <S> void copyProperties(ArtifactProperties<?> from, ArtifactProperties<S> to) {
//noinspection unchecked
to.loadState((S)from.getState());
}
@Nullable
public static String getDefaultArtifactOutputPath(@NotNull String artifactName, final @NotNull Project project) {
final CompilerProjectExtension extension = CompilerProjectExtension.getInstance(project);
if (extension == null) return null;
String outputUrl = extension.getCompilerOutputUrl();
if (outputUrl == null || outputUrl.length() == 0) {
final VirtualFile baseDir = project.getBaseDir();
if (baseDir == null) return null;
outputUrl = baseDir.getUrl() + "/out";
}
return VfsUtilCore.urlToPath(outputUrl) + "/artifacts/" + FileUtil.sanitizeFileName(artifactName);
}
public static <E extends PackagingElement<?>> boolean processElementsWithSubstitutions(@NotNull List<? extends PackagingElement<?>> elements,
@NotNull PackagingElementResolvingContext context,
@NotNull ArtifactType artifactType,
@NotNull PackagingElementPath parentPath,
@NotNull PackagingElementProcessor<E> processor) {
return processElementsWithSubstitutions(elements, context, artifactType, parentPath, processor, new THashSet<>());
}
private static <E extends PackagingElement<?>> boolean processElementsWithSubstitutions(@NotNull List<? extends PackagingElement<?>> elements,
@NotNull PackagingElementResolvingContext context,
@NotNull ArtifactType artifactType,
@NotNull PackagingElementPath parentPath,
@NotNull PackagingElementProcessor<E> processor,
final Set<PackagingElement<?>> processed) {
for (PackagingElement<?> element : elements) {
if (!processed.add(element)) {
continue;
}
if (element instanceof ComplexPackagingElement<?> && processor.shouldProcessSubstitution((ComplexPackagingElement)element)) {
final ComplexPackagingElement<?> complexElement = (ComplexPackagingElement<?>)element;
final List<? extends PackagingElement<?>> substitution = complexElement.getSubstitution(context, artifactType);
if (substitution != null &&
!processElementsWithSubstitutions(substitution, context, artifactType, parentPath.appendComplex(complexElement), processor, processed)) {
return false;
}
}
else if (!processor.process((E)element, parentPath)) {
return false;
}
}
return true;
}
public static List<PackagingElement<?>> findByRelativePath(@NotNull CompositePackagingElement<?> parent, @NotNull String relativePath,
@NotNull PackagingElementResolvingContext context, @NotNull ArtifactType artifactType) {
final List<PackagingElement<?>> result = new ArrayList<>();
processElementsByRelativePath(parent, relativePath, context, artifactType, PackagingElementPath.EMPTY, new PackagingElementProcessor<PackagingElement<?>>() {
@Override
public boolean process(@NotNull PackagingElement<?> packagingElement, @NotNull PackagingElementPath path) {
result.add(packagingElement);
return true;
}
});
return result;
}
public static boolean processElementsByRelativePath(@NotNull final CompositePackagingElement<?> parent, @NotNull String relativePath,
@NotNull final PackagingElementResolvingContext context, @NotNull final ArtifactType artifactType,
@NotNull PackagingElementPath parentPath,
@NotNull final PackagingElementProcessor<PackagingElement<?>> processor) {
relativePath = StringUtil.trimStart(relativePath, "/");
if (relativePath.isEmpty()) {
return true;
}
int i = relativePath.indexOf('/');
final String firstName = i != -1 ? relativePath.substring(0, i) : relativePath;
final String tail = i != -1 ? relativePath.substring(i+1) : "";
return processElementsWithSubstitutions(parent.getChildren(), context, artifactType, parentPath.appendComposite(parent), new PackagingElementProcessor<PackagingElement<?>>() {
@Override
public boolean process(@NotNull PackagingElement<?> element, @NotNull PackagingElementPath path) {
boolean process = false;
if (element instanceof CompositePackagingElement && firstName.equals(((CompositePackagingElement<?>)element).getName())) {
process = true;
}
else if (element instanceof FileCopyPackagingElement) {
final FileCopyPackagingElement fileCopy = (FileCopyPackagingElement)element;
if (firstName.equals(fileCopy.getOutputFileName())) {
process = true;
}
}
if (process) {
if (tail.length() == 0) {
if (!processor.process(element, path)) return false;
}
else if (element instanceof CompositePackagingElement<?>) {
return processElementsByRelativePath((CompositePackagingElement)element, tail, context, artifactType, path, processor);
}
}
return true;
}
});
}
public static boolean processDirectoryChildren(@NotNull CompositePackagingElement<?> parent,
@NotNull PackagingElementPath pathToParent,
@NotNull String relativePath,
@NotNull final PackagingElementResolvingContext context,
@NotNull final ArtifactType artifactType,
@NotNull final PackagingElementProcessor<PackagingElement<?>> processor) {
return processElementsByRelativePath(parent, relativePath, context, artifactType, pathToParent, new PackagingElementProcessor<PackagingElement<?>>() {
@Override
public boolean process(@NotNull PackagingElement<?> element, @NotNull PackagingElementPath path) {
if (element instanceof DirectoryPackagingElement) {
final List<PackagingElement<?>> children = ((DirectoryPackagingElement)element).getChildren();
if (!processElementsWithSubstitutions(children, context, artifactType, path.appendComposite((DirectoryPackagingElement)element), processor)) {
return false;
}
}
return true;
}
});
}
public static void processFileOrDirectoryCopyElements(Artifact artifact,
PackagingElementProcessor<FileOrDirectoryCopyPackagingElement<?>> processor,
PackagingElementResolvingContext context,
boolean processSubstitutions) {
processPackagingElements(artifact, PackagingElementFactoryImpl.FILE_COPY_ELEMENT_TYPE, processor, context, processSubstitutions);
processPackagingElements(artifact, PackagingElementFactoryImpl.DIRECTORY_COPY_ELEMENT_TYPE, processor, context, processSubstitutions);
processPackagingElements(artifact, PackagingElementFactoryImpl.EXTRACTED_DIRECTORY_ELEMENT_TYPE, processor, context, processSubstitutions);
}
public static Collection<Trinity<Artifact, PackagingElementPath, String>> findContainingArtifactsWithOutputPaths(@NotNull final VirtualFile file,
@NotNull Project project,
final Artifact[] artifacts) {
final boolean isResourceFile = CompilerConfiguration.getInstance(project).isResourceFile(file);
final List<Trinity<Artifact, PackagingElementPath, String>> result = new ArrayList<>();
final PackagingElementResolvingContext context = ArtifactManager.getInstance(project).getResolvingContext();
for (final Artifact artifact : artifacts) {
processPackagingElements(artifact, null, new PackagingElementProcessor<PackagingElement<?>>() {
@Override
public boolean process(@NotNull PackagingElement<?> element, @NotNull PackagingElementPath path) {
if (element instanceof FileOrDirectoryCopyPackagingElement<?>) {
final VirtualFile root = ((FileOrDirectoryCopyPackagingElement)element).findFile();
if (root != null && VfsUtilCore.isAncestor(root, file, false)) {
final String relativePath;
if (root.equals(file) && element instanceof FileCopyPackagingElement) {
relativePath = ((FileCopyPackagingElement)element).getOutputFileName();
}
else {
relativePath = VfsUtilCore.getRelativePath(file, root, '/');
}
result.add(Trinity.create(artifact, path, relativePath));
return false;
}
}
else if (isResourceFile && element instanceof ModuleOutputPackagingElement) {
final String relativePath = getRelativePathInSources(file, (ModuleOutputPackagingElement)element, context);
if (relativePath != null) {
result.add(Trinity.create(artifact, path, relativePath));
return false;
}
}
return true;
}
}, context, true);
}
return result;
}
@Nullable
private static String getRelativePathInSources(@NotNull VirtualFile file, final @NotNull ModuleOutputPackagingElement moduleElement,
@NotNull PackagingElementResolvingContext context) {
for (VirtualFile sourceRoot : moduleElement.getSourceRoots(context)) {
if (VfsUtilCore.isAncestor(sourceRoot, file, true)) {
return VfsUtilCore.getRelativePath(file, sourceRoot, '/');
}
}
return null;
}
@Nullable
public static VirtualFile findSourceFileByOutputPath(Artifact artifact, String outputPath, PackagingElementResolvingContext context) {
final List<VirtualFile> files = findSourceFilesByOutputPath(artifact.getRootElement(), outputPath, context, artifact.getArtifactType());
return files.isEmpty() ? null : files.get(0);
}
@Nullable
public static VirtualFile findSourceFileByOutputPath(CompositePackagingElement<?> parent, String outputPath,
PackagingElementResolvingContext context, ArtifactType artifactType) {
final List<VirtualFile> files = findSourceFilesByOutputPath(parent, outputPath, context, artifactType);
return files.isEmpty() ? null : files.get(0);
}
public static List<VirtualFile> findSourceFilesByOutputPath(CompositePackagingElement<?> parent, final String outputPath,
final PackagingElementResolvingContext context, final ArtifactType artifactType) {
final String path = StringUtil.trimStart(outputPath, "/");
if (path.isEmpty()) {
return Collections.emptyList();
}
int i = path.indexOf('/');
final String firstName = i != -1 ? path.substring(0, i) : path;
final String tail = i != -1 ? path.substring(i+1) : "";
final List<VirtualFile> result = new SmartList<>();
processElementsWithSubstitutions(parent.getChildren(), context, artifactType, PackagingElementPath.EMPTY, new PackagingElementProcessor<PackagingElement<?>>() {
@Override
public boolean process(@NotNull PackagingElement<?> element, @NotNull PackagingElementPath elementPath) {
//todo[nik] replace by method findSourceFile() in PackagingElement
if (element instanceof CompositePackagingElement) {
final CompositePackagingElement<?> compositeElement = (CompositePackagingElement<?>)element;
if (firstName.equals(compositeElement.getName())) {
result.addAll(findSourceFilesByOutputPath(compositeElement, tail, context, artifactType));
}
}
else if (element instanceof FileCopyPackagingElement) {
final FileCopyPackagingElement fileCopyElement = (FileCopyPackagingElement)element;
if (firstName.equals(fileCopyElement.getOutputFileName()) && tail.isEmpty()) {
ContainerUtil.addIfNotNull(result, fileCopyElement.findFile());
}
}
else if (element instanceof DirectoryCopyPackagingElement || element instanceof ExtractedDirectoryPackagingElement) {
final VirtualFile sourceRoot = ((FileOrDirectoryCopyPackagingElement<?>)element).findFile();
if (sourceRoot != null) {
ContainerUtil.addIfNotNull(result, sourceRoot.findFileByRelativePath(path));
}
}
else if (element instanceof ModuleOutputPackagingElement) {
final CompilerConfiguration compilerConfiguration = CompilerConfiguration.getInstance(context.getProject());
for (VirtualFile sourceRoot : ((ModuleOutputPackagingElement)element).getSourceRoots(context)) {
final VirtualFile sourceFile = sourceRoot.findFileByRelativePath(path);
if (sourceFile != null && compilerConfiguration.isResourceFile(sourceFile)) {
result.add(sourceFile);
}
}
}
return true;
}
});
return result;
}
public static boolean processParents(@NotNull Artifact artifact,
@NotNull PackagingElementResolvingContext context,
@NotNull ParentElementProcessor processor,
int maxLevel) {
return processParents(artifact, context, processor, FList.emptyList(), maxLevel,
new THashSet<>());
}
private static boolean processParents(@NotNull final Artifact artifact, @NotNull final PackagingElementResolvingContext context,
@NotNull final ParentElementProcessor processor, FList<Pair<Artifact, CompositePackagingElement<?>>> pathToElement,
final int maxLevel, final Set<Artifact> processed) {
if (!processed.add(artifact)) return true;
final FList<Pair<Artifact, CompositePackagingElement<?>>> pathFromRoot;
final CompositePackagingElement<?> rootElement = artifact.getRootElement();
if (rootElement instanceof ArtifactRootElement<?>) {
pathFromRoot = pathToElement;
}
else {
if (!processor.process(rootElement, pathToElement, artifact)) {
return false;
}
pathFromRoot = pathToElement.prepend(new Pair<>(artifact, rootElement));
}
if (pathFromRoot.size() > maxLevel) return true;
for (final Artifact anArtifact : context.getArtifactModel().getArtifacts()) {
if (processed.contains(anArtifact)) continue;
final PackagingElementProcessor<ArtifactPackagingElement> elementProcessor =
new PackagingElementProcessor<ArtifactPackagingElement>() {
@Override
public boolean shouldProcessSubstitution(ComplexPackagingElement<?> element) {
return !(element instanceof ArtifactPackagingElement);
}
@Override
public boolean process(@NotNull ArtifactPackagingElement element, @NotNull PackagingElementPath path) {
if (artifact.getName().equals(element.getArtifactName())) {
FList<Pair<Artifact, CompositePackagingElement<?>>> currentPath = pathFromRoot;
final List<CompositePackagingElement<?>> parents = path.getParents();
for (int i = 0, parentsSize = parents.size(); i < parentsSize - 1; i++) {
CompositePackagingElement<?> parent = parents.get(i);
if (!processor.process(parent, currentPath, anArtifact)) {
return false;
}
currentPath = currentPath.prepend(new Pair<>(anArtifact, parent));
if (currentPath.size() > maxLevel) {
return true;
}
}
if (!parents.isEmpty()) {
CompositePackagingElement<?> lastParent = parents.get(parents.size() - 1);
if (lastParent instanceof ArtifactRootElement<?> && !processor.process(lastParent, currentPath, anArtifact)) {
return false;
}
}
return processParents(anArtifact, context, processor, currentPath, maxLevel, processed);
}
return true;
}
};
if (!processPackagingElements(anArtifact, ArtifactElementType.ARTIFACT_ELEMENT_TYPE, elementProcessor, context, true)) {
return false;
}
}
return true;
}
public static void removeChildrenRecursively(@NotNull CompositePackagingElement<?> element, @NotNull Condition<PackagingElement<?>> condition) {
List<PackagingElement<?>> toRemove = new ArrayList<>();
for (PackagingElement<?> child : element.getChildren()) {
if (child instanceof CompositePackagingElement<?>) {
final CompositePackagingElement<?> compositeChild = (CompositePackagingElement<?>)child;
removeChildrenRecursively(compositeChild, condition);
if (compositeChild.getChildren().isEmpty()) {
toRemove.add(child);
}
}
else if (condition.value(child)) {
toRemove.add(child);
}
}
element.removeChildren(toRemove);
}
public static boolean shouldClearArtifactOutputBeforeRebuild(Artifact artifact) {
final String outputPath = artifact.getOutputPath();
return !StringUtil.isEmpty(outputPath) && artifact.getRootElement() instanceof ArtifactRootElement<?>;
}
public static Set<Module> getModulesIncludedInArtifacts(final @NotNull Collection<? extends Artifact> artifacts, final @NotNull Project project) {
final Set<Module> modules = new THashSet<>();
final PackagingElementResolvingContext resolvingContext = ArtifactManager.getInstance(project).getResolvingContext();
for (Artifact artifact : artifacts) {
processPackagingElements(artifact, null, element -> {
if (element instanceof ModuleOutputPackagingElement) {
ContainerUtil.addIfNotNull(modules, ((ModuleOutputPackagingElement)element).findModule(resolvingContext));
}
return true;
}, resolvingContext, true);
}
return modules;
}
public static Collection<Artifact> getArtifactsContainingModuleOutput(@NotNull final Module module) {
ArtifactManager artifactManager = ArtifactManager.getInstance(module.getProject());
final PackagingElementResolvingContext context = artifactManager.getResolvingContext();
final Set<Artifact> result = new HashSet<>();
Processor<PackagingElement<?>> processor = element -> {
if (element instanceof ProductionModuleOutputPackagingElement
&& module.equals(((ProductionModuleOutputPackagingElement)element).findModule(context))) {
return false;
}
if (element instanceof ArtifactPackagingElement && result.contains(((ArtifactPackagingElement)element).findArtifact(context))) {
return false;
}
return true;
};
for (Artifact artifact : artifactManager.getSortedArtifacts()) {
boolean contains = !processPackagingElements(artifact, null, processor, context, true);
if (contains) {
result.add(artifact);
}
}
return result;
}
public static List<Artifact> getArtifactWithOutputPaths(Project project) {
final List<Artifact> result = new ArrayList<>();
for (Artifact artifact : ArtifactManager.getInstance(project).getSortedArtifacts()) {
if (!StringUtil.isEmpty(artifact.getOutputPath())) {
result.add(artifact);
}
}
return result;
}
public static String suggestArtifactFileName(String artifactName) {
return PathUtil.suggestFileName(artifactName, true, true);
}
@Nullable
public static Artifact addArtifact(@NotNull ModifiableArtifactModel artifactModel,
@NotNull ArtifactType type,
@NotNull ArtifactTemplate artifactTemplate) {
final ArtifactTemplate.NewArtifactConfiguration configuration = artifactTemplate.createArtifact();
if (configuration == null) {
return null;
}
final String baseName = configuration.getArtifactName();
String name = baseName;
int i = 2;
while (artifactModel.findArtifact(name) != null) {
name = baseName + i;
i++;
}
ArtifactType actualType = configuration.getArtifactType();
if (actualType == null) {
actualType = type;
}
final ModifiableArtifact artifact = artifactModel.addArtifact(name, actualType, configuration.getRootElement());
artifactTemplate.setUpArtifact(artifact, configuration);
return artifact;
}
}
|
|
package org.ovirt.engine.ui.webadmin.section.main.view.popup.storage;
import org.ovirt.engine.ui.common.CommonApplicationConstants;
import org.ovirt.engine.ui.common.CommonApplicationResources;
import org.ovirt.engine.ui.common.CommonApplicationTemplates;
import org.ovirt.engine.ui.common.idhandler.ElementIdHandler;
import org.ovirt.engine.ui.common.idhandler.WithElementId;
import org.ovirt.engine.ui.common.widget.dialog.AdvancedParametersExpander;
import org.ovirt.engine.ui.common.widget.editor.EntityModelTextBoxOnlyEditor;
import org.ovirt.engine.ui.common.widget.editor.ListModelListBoxOnlyEditor;
import org.ovirt.engine.ui.common.widget.uicommon.storage.AbstractStorageView;
import org.ovirt.engine.ui.uicommonweb.models.EntityModel;
import org.ovirt.engine.ui.uicommonweb.models.storage.NfsStorageModel;
import org.ovirt.engine.ui.webadmin.ApplicationConstants;
import org.ovirt.engine.ui.webadmin.gin.ClientGinjectorProvider;
import com.google.gwt.core.client.GWT;
import com.google.gwt.dom.client.Style;
import com.google.gwt.dom.client.TableElement;
import com.google.gwt.editor.client.SimpleBeanEditorDriver;
import com.google.gwt.resources.client.CssResource;
import com.google.gwt.text.shared.AbstractRenderer;
import com.google.gwt.uibinder.client.UiBinder;
import com.google.gwt.uibinder.client.UiField;
import com.google.gwt.user.client.ui.Label;
import com.google.gwt.user.client.ui.UIObject;
import com.google.gwt.user.client.ui.ValueBox;
import com.google.gwt.user.client.ui.Widget;
import com.google.inject.Inject;
public class NfsStorageView extends AbstractStorageView<NfsStorageModel> {
interface Driver extends SimpleBeanEditorDriver<NfsStorageModel, NfsStorageView> {
Driver driver = GWT.create(Driver.class);
}
interface ViewUiBinder extends UiBinder<Widget, NfsStorageView> {
ViewUiBinder uiBinder = GWT.create(ViewUiBinder.class);
}
interface ViewIdHandler extends ElementIdHandler<NfsStorageView> {
ViewIdHandler idHandler = GWT.create(ViewIdHandler.class);
}
@UiField
WidgetStyle style;
@UiField
@WithElementId
@Path(value = "path.entity")
EntityModelTextBoxOnlyEditor pathEditor;
@UiField
@Ignore
Label pathLabel;
@UiField
@Ignore
Label pathHintLabel;
@UiField
@Ignore
AdvancedParametersExpander expander;
@UiField
@Ignore
Label warningLabel;
@UiField
@Ignore
TableElement expanderContent;
@UiField(provided = true)
@WithElementId
@Path(value = "version.selectedItem")
ListModelListBoxOnlyEditor<Object> versionEditor;
@UiField
@Ignore
EntityModelTextBoxOnlyEditor versionReadOnlyEditor;
@UiField
@Ignore
Label versionLabel;
@UiField
@WithElementId
@Path(value = "retransmissions.entity")
EntityModelTextBoxOnlyEditor retransmissionsEditor;
@UiField
@Ignore
Label retransmissionsLabel;
@UiField
@WithElementId
@Path(value = "timeout.entity")
EntityModelTextBoxOnlyEditor timeoutEditor;
@UiField
@Ignore
Label timeoutLabel;
@UiField
Label message;
protected static CommonApplicationConstants constants = GWT.create(CommonApplicationConstants.class);
protected static CommonApplicationResources resources = GWT.create(CommonApplicationResources.class);
protected static CommonApplicationTemplates templates = GWT.create(CommonApplicationTemplates.class);
@Inject
public NfsStorageView() {
initEditors();
initWidget(ViewUiBinder.uiBinder.createAndBindUi(this));
localize(ClientGinjectorProvider.instance().getApplicationConstants());
initExpander();
ViewIdHandler.idHandler.generateAndSetIds(this);
addStyles();
Driver.driver.initialize(this);
}
private void initExpander() {
expander.initWithContent(expanderContent);
}
void addStyles() {
pathEditor.addContentWidgetStyleName(style.pathEditorContent());
expanderContent.setClassName(style.expanderContent());
}
void initEditors() {
versionEditor = new ListModelListBoxOnlyEditor<Object>(new AbstractRenderer<Object>() {
@Override
public String render(Object object) {
EntityModel model = (EntityModel) object;
return model.getTitle();
}
});
}
void localize(ApplicationConstants constants) {
pathLabel.setText(constants.storagePopupNfsPathLabel());
pathHintLabel.setText(constants.storagePopupNfsPathHintLabel());
warningLabel.setText(constants.advancedOptionsLabel());
versionLabel.setText(constants.storagePopupNfsVersionLabel());
retransmissionsLabel.setText(constants.storagePopupNfsRetransmissionsLabel());
timeoutLabel.setText(constants.storagePopupNfsTimeoutLabel());
}
@Override
public void edit(NfsStorageModel object) {
Driver.driver.edit(object);
EntityModel version = (EntityModel) object.getVersion().getSelectedItem();
versionReadOnlyEditor.asValueBox().setValue(version != null ? version.getTitle() : null);
pathHintLabel.setVisible(object.getPath().getIsAvailable() && object.getPath().getIsChangable());
styleTextBoxEditor(pathEditor, object.getPath().getIsChangable());
styleTextBoxEditor(timeoutEditor, object.getTimeout().getIsChangable());
styleTextBoxEditor(retransmissionsEditor, object.getRetransmissions().getIsChangable());
styleTextBoxEditor(versionReadOnlyEditor, object.getVersion().getIsChangable());
setElementVisibility(versionEditor, object.getVersion().getIsChangable() && object.getVersion().getIsAvailable());
setElementVisibility(versionReadOnlyEditor, !object.getVersion().getIsChangable() || !object.getVersion().getIsAvailable());
setElementVisibility(versionLabel, object.getVersion().getIsAvailable());
setElementVisibility(retransmissionsLabel, object.getRetransmissions().getIsAvailable());
setElementVisibility(timeoutLabel, object.getTimeout().getIsAvailable());
// When all advanced fields are unavailable - hide the expander.
boolean anyField = object.getVersion().getIsAvailable()
|| object.getRetransmissions().getIsAvailable()
|| object.getTimeout().getIsAvailable();
expander.getElement().getStyle().setVisibility(anyField ? Style.Visibility.VISIBLE : Style.Visibility.HIDDEN);
}
@Override
public NfsStorageModel flush() {
return Driver.driver.flush();
}
interface WidgetStyle extends CssResource {
String pathEditorContent();
String expanderContent();
}
@Override
public void focus() {
pathEditor.setFocus(true);
}
/*
* Makes a provided editor look like label (enabled, read-only textbox).
*/
private void styleTextBoxEditor(EntityModelTextBoxOnlyEditor editor, boolean enabled) {
if (!enabled) {
editor.setEnabled(true);
ValueBox<Object> valueBox = editor.asValueBox();
valueBox.setReadOnly(true);
valueBox.getElement().getStyle().setBorderWidth(0, Style.Unit.PX);
}
}
private void setElementVisibility(UIObject object, boolean value) {
object.getElement().getStyle().setDisplay(value ? Style.Display.BLOCK : Style.Display.NONE);
}
}
|
|
//
// This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.8-b130911.1802
// See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2016.03.10 at 01:55:24 PM CST
//
package net.wisedream.ezhc.bean;
import javax.xml.bind.annotation.*;
import java.math.BigInteger;
import java.util.ArrayList;
import java.util.List;
/**
* <p>Java class for anonymous complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <all>
* <element name="UserAgent" minOccurs="0">
* <simpleType>
* <restriction base="{http://www.w3.org/2001/XMLSchema}string">
* <whiteSpace value="collapse"/>
* </restriction>
* </simpleType>
* </element>
* <element name="Proxy" type="{http://www.wisedream.net/XML-HTTPCLIENT/0.2}host" minOccurs="0"/>
* <element name="ConnectTimeout" type="{http://www.w3.org/2001/XMLSchema}integer"/>
* <element name="ConnRequestTimeout" type="{http://www.w3.org/2001/XMLSchema}integer"/>
* <element name="SocketTimeout" type="{http://www.w3.org/2001/XMLSchema}integer"/>
* <element name="Credentials" minOccurs="0">
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="Credential" maxOccurs="unbounded" minOccurs="0">
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence maxOccurs="unbounded">
* <element name="Scope" type="{http://www.wisedream.net/XML-HTTPCLIENT/0.2}host"/>
* </sequence>
* <attribute name="username" use="required" type="{http://www.w3.org/2001/XMLSchema}string" />
* <attribute name="password" use="required" type="{http://www.w3.org/2001/XMLSchema}string" />
* </restriction>
* </complexContent>
* </complexType>
* </element>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </element>
* <element name="CookieStore" minOccurs="0">
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence maxOccurs="unbounded" minOccurs="0">
* <element name="Cookie">
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <attribute name="domain" use="required" type="{http://www.w3.org/2001/XMLSchema}string" />
* <attribute name="path" type="{http://www.w3.org/2001/XMLSchema}string" default="/" />
* <attribute name="name" use="required" type="{http://www.w3.org/2001/XMLSchema}string" />
* <attribute name="value" use="required" type="{http://www.w3.org/2001/XMLSchema}string" />
* </restriction>
* </complexContent>
* </complexType>
* </element>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </element>
* <element name="DefaultHeaders" minOccurs="0">
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence maxOccurs="unbounded" minOccurs="0">
* <element name="Header" type="{http://www.wisedream.net/XML-HTTPCLIENT/0.2}header"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </element>
* <element name="SpecificHeaders" minOccurs="0">
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence maxOccurs="unbounded" minOccurs="0">
* <element name="Header">
* <complexType>
* <complexContent>
* <extension base="{http://www.wisedream.net/XML-HTTPCLIENT/0.2}header">
* <attribute name="urlMatches" use="required" type="{http://www.w3.org/2001/XMLSchema}string" />
* </extension>
* </complexContent>
* </complexType>
* </element>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </element>
* </all>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "", propOrder = {
})
@XmlRootElement(name = "HttpClient")
public class HttpClient {
@XmlElement(name = "UserAgent")
protected String userAgent;
@XmlElement(name = "Proxy")
protected Host proxy;
@XmlElement(name = "ConnectTimeout", required = true, defaultValue = "-1")
protected BigInteger connectTimeout;
@XmlElement(name = "ConnRequestTimeout", required = true, defaultValue = "-1")
protected BigInteger connRequestTimeout;
@XmlElement(name = "SocketTimeout", required = true, defaultValue = "-1")
protected BigInteger socketTimeout;
@XmlElement(name = "Credentials")
protected Credentials credentials;
@XmlElement(name = "CookieStore")
protected CookieStore cookieStore;
@XmlElement(name = "DefaultHeaders")
protected DefaultHeaders defaultHeaders;
@XmlElement(name = "SpecificHeaders")
protected SpecificHeaders specificHeaders;
/**
* Gets the value of the userAgent property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getUserAgent() {
return userAgent;
}
/**
* Sets the value of the userAgent property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setUserAgent(String value) {
this.userAgent = value;
}
/**
* Gets the value of the proxy property.
*
* @return
* possible object is
* {@link net.wisedream.ezhc.bean.Host }
*
*/
public Host getProxy() {
return proxy;
}
/**
* Sets the value of the proxy property.
*
* @param value
* allowed object is
* {@link net.wisedream.ezhc.bean.Host }
*
*/
public void setProxy(Host value) {
this.proxy = value;
}
/**
* Gets the value of the connectTimeout property.
*
* @return
* possible object is
* {@link java.math.BigInteger }
*
*/
public BigInteger getConnectTimeout() {
return connectTimeout;
}
/**
* Sets the value of the connectTimeout property.
*
* @param value
* allowed object is
* {@link java.math.BigInteger }
*
*/
public void setConnectTimeout(BigInteger value) {
this.connectTimeout = value;
}
/**
* Gets the value of the connRequestTimeout property.
*
* @return
* possible object is
* {@link java.math.BigInteger }
*
*/
public BigInteger getConnRequestTimeout() {
return connRequestTimeout;
}
/**
* Sets the value of the connRequestTimeout property.
*
* @param value
* allowed object is
* {@link java.math.BigInteger }
*
*/
public void setConnRequestTimeout(BigInteger value) {
this.connRequestTimeout = value;
}
/**
* Gets the value of the socketTimeout property.
*
* @return
* possible object is
* {@link java.math.BigInteger }
*
*/
public BigInteger getSocketTimeout() {
return socketTimeout;
}
/**
* Sets the value of the socketTimeout property.
*
* @param value
* allowed object is
* {@link java.math.BigInteger }
*
*/
public void setSocketTimeout(BigInteger value) {
this.socketTimeout = value;
}
/**
* Gets the value of the credentials property.
*
* @return
* possible object is
* {@link net.wisedream.ezhc.bean.HttpClient.Credentials }
*
*/
public Credentials getCredentials() {
return credentials;
}
/**
* Sets the value of the credentials property.
*
* @param value
* allowed object is
* {@link net.wisedream.ezhc.bean.HttpClient.Credentials }
*
*/
public void setCredentials(Credentials value) {
this.credentials = value;
}
/**
* Gets the value of the cookieStore property.
*
* @return
* possible object is
* {@link net.wisedream.ezhc.bean.HttpClient.CookieStore }
*
*/
public CookieStore getCookieStore() {
return cookieStore;
}
/**
* Sets the value of the cookieStore property.
*
* @param value
* allowed object is
* {@link net.wisedream.ezhc.bean.HttpClient.CookieStore }
*
*/
public void setCookieStore(CookieStore value) {
this.cookieStore = value;
}
/**
* Gets the value of the defaultHeaders property.
*
* @return
* possible object is
* {@link net.wisedream.ezhc.bean.HttpClient.DefaultHeaders }
*
*/
public DefaultHeaders getDefaultHeaders() {
return defaultHeaders;
}
/**
* Sets the value of the defaultHeaders property.
*
* @param value
* allowed object is
* {@link net.wisedream.ezhc.bean.HttpClient.DefaultHeaders }
*
*/
public void setDefaultHeaders(DefaultHeaders value) {
this.defaultHeaders = value;
}
/**
* Gets the value of the specificHeaders property.
*
* @return
* possible object is
* {@link net.wisedream.ezhc.bean.HttpClient.SpecificHeaders }
*
*/
public SpecificHeaders getSpecificHeaders() {
return specificHeaders;
}
/**
* Sets the value of the specificHeaders property.
*
* @param value
* allowed object is
* {@link net.wisedream.ezhc.bean.HttpClient.SpecificHeaders }
*
*/
public void setSpecificHeaders(SpecificHeaders value) {
this.specificHeaders = value;
}
/**
* <p>Java class for anonymous complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence maxOccurs="unbounded" minOccurs="0">
* <element name="Cookie">
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <attribute name="domain" use="required" type="{http://www.w3.org/2001/XMLSchema}string" />
* <attribute name="path" type="{http://www.w3.org/2001/XMLSchema}string" default="/" />
* <attribute name="name" use="required" type="{http://www.w3.org/2001/XMLSchema}string" />
* <attribute name="value" use="required" type="{http://www.w3.org/2001/XMLSchema}string" />
* </restriction>
* </complexContent>
* </complexType>
* </element>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "", propOrder = {
"cookie"
})
public static class CookieStore {
@XmlElement(name = "Cookie")
protected List<Cookie> cookie;
/**
* Gets the value of the cookie property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the cookie property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getCookie().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link net.wisedream.ezhc.bean.HttpClient.CookieStore.Cookie }
*
*
*/
public List<Cookie> getCookie() {
if (cookie == null) {
cookie = new ArrayList<Cookie>();
}
return this.cookie;
}
/**
* <p>Java class for anonymous complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <attribute name="domain" use="required" type="{http://www.w3.org/2001/XMLSchema}string" />
* <attribute name="path" type="{http://www.w3.org/2001/XMLSchema}string" default="/" />
* <attribute name="name" use="required" type="{http://www.w3.org/2001/XMLSchema}string" />
* <attribute name="value" use="required" type="{http://www.w3.org/2001/XMLSchema}string" />
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "")
public static class Cookie {
@XmlAttribute(name = "domain", required = true)
protected String domain;
@XmlAttribute(name = "path")
protected String path;
@XmlAttribute(name = "name", required = true)
protected String name;
@XmlAttribute(name = "value", required = true)
protected String value;
/**
* Gets the value of the domain property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getDomain() {
return domain;
}
/**
* Sets the value of the domain property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setDomain(String value) {
this.domain = value;
}
/**
* Gets the value of the path property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getPath() {
if (path == null) {
return "/";
} else {
return path;
}
}
/**
* Sets the value of the path property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setPath(String value) {
this.path = value;
}
/**
* Gets the value of the name property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getName() {
return name;
}
/**
* Sets the value of the name property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setName(String value) {
this.name = value;
}
/**
* Gets the value of the value property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getValue() {
return value;
}
/**
* Sets the value of the value property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setValue(String value) {
this.value = value;
}
}
}
/**
* <p>Java class for anonymous complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="Credential" maxOccurs="unbounded" minOccurs="0">
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence maxOccurs="unbounded">
* <element name="Scope" type="{http://www.wisedream.net/XML-HTTPCLIENT/0.0.1}host"/>
* </sequence>
* <attribute name="username" use="required" type="{http://www.w3.org/2001/XMLSchema}string" />
* <attribute name="password" use="required" type="{http://www.w3.org/2001/XMLSchema}string" />
* </restriction>
* </complexContent>
* </complexType>
* </element>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "", propOrder = {
"credential"
})
public static class Credentials {
@XmlElement(name = "Credential")
protected List<Credential> credential;
/**
* Gets the value of the credential property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the credential property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getCredential().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link net.wisedream.ezhc.bean.HttpClient.Credentials.Credential }
*
*
*/
public List<Credential> getCredential() {
if (credential == null) {
credential = new ArrayList<Credential>();
}
return this.credential;
}
/**
* <p>Java class for anonymous complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence maxOccurs="unbounded">
* <element name="Scope" type="{http://www.wisedream.net/XML-HTTPCLIENT/0.0.1}host"/>
* </sequence>
* <attribute name="username" use="required" type="{http://www.w3.org/2001/XMLSchema}string" />
* <attribute name="password" use="required" type="{http://www.w3.org/2001/XMLSchema}string" />
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "", propOrder = {
"scope"
})
public static class Credential {
@XmlElement(name = "Scope", required = true)
protected List<Host> scope;
@XmlAttribute(name = "username", required = true)
protected String username;
@XmlAttribute(name = "password", required = true)
protected String password;
/**
* Gets the value of the scope property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the scope property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getScope().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link net.wisedream.ezhc.bean.Host }
*
*
*/
public List<Host> getScope() {
if (scope == null) {
scope = new ArrayList<Host>();
}
return this.scope;
}
/**
* Gets the value of the username property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getUsername() {
return username;
}
/**
* Sets the value of the username property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setUsername(String value) {
this.username = value;
}
/**
* Gets the value of the password property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getPassword() {
return password;
}
/**
* Sets the value of the password property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setPassword(String value) {
this.password = value;
}
}
}
/**
* <p>Java class for anonymous complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence maxOccurs="unbounded" minOccurs="0">
* <element name="Header" type="{http://www.wisedream.net/XML-HTTPCLIENT/0.0.1}header"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "", propOrder = {
"header"
})
public static class DefaultHeaders {
@XmlElement(name = "Header")
protected List<Header> header;
/**
* Gets the value of the header property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the header property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getHeader().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link Header }
*
*
*/
public List<Header> getHeader() {
if (header == null) {
header = new ArrayList<Header>();
}
return this.header;
}
}
/**
* <p>Java class for anonymous complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence maxOccurs="unbounded" minOccurs="0">
* <element name="Header">
* <complexType>
* <complexContent>
* <extension base="{http://www.wisedream.net/XML-HTTPCLIENT/0.0.1}header">
* <attribute name="urlMatches" use="required" type="{http://www.w3.org/2001/XMLSchema}string" />
* </extension>
* </complexContent>
* </complexType>
* </element>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "", propOrder = {
"header"
})
public static class SpecificHeaders {
@XmlElement(name = "Header")
protected List<Header> header;
/**
* Gets the value of the header property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the header property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getHeader().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link net.wisedream.ezhc.bean.HttpClient.SpecificHeaders.Header }
*
*
*/
public List<Header> getHeader() {
if (header == null) {
header = new ArrayList<Header>();
}
return this.header;
}
/**
* <p>Java class for anonymous complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType>
* <complexContent>
* <extension base="{http://www.wisedream.net/XML-HTTPCLIENT/0.0.1}header">
* <attribute name="urlMatches" use="required" type="{http://www.w3.org/2001/XMLSchema}string" />
* </extension>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "")
public static class Header
extends net.wisedream.ezhc.bean.Header
{
@XmlAttribute(name = "urlMatches", required = true)
protected String urlMatches;
/**
* Gets the value of the urlMatches property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getUrlMatches() {
return urlMatches;
}
/**
* Sets the value of the urlMatches property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setUrlMatches(String value) {
this.urlMatches = value;
}
}
}
}
|
|
/**
* Copyright 2014 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package rx;
/**
* An object representing a notification sent to an {@link Observable}.
*/
public final class Notification<T> {
private final Kind kind;
private final Throwable throwable;
private final T value;
private static final Notification<Void> ON_COMPLETED = new Notification<Void>(Kind.OnCompleted, null, null);
/**
* Creates and returns a {@code Notification} of variety {@code Kind.OnNext}, and assigns it a value.
*
* @param t
* the item to assign to the notification as its value
* @return an {@code OnNext} variety of {@code Notification}
*/
public static <T> Notification<T> createOnNext(T t) {
return new Notification<T>(Kind.OnNext, t, null);
}
/**
* Creates and returns a {@code Notification} of variety {@code Kind.OnError}, and assigns it an exception.
*
* @param e
* the exception to assign to the notification
* @return an {@code OnError} variety of {@code Notification}
*/
public static <T> Notification<T> createOnError(Throwable e) {
return new Notification<T>(Kind.OnError, null, e);
}
/**
* Creates and returns a {@code Notification} of variety {@code Kind.OnCompleted}.
*
* @return an {@code OnCompleted} variety of {@code Notification}
*/
@SuppressWarnings("unchecked")
public static <T> Notification<T> createOnCompleted() {
return (Notification<T>) ON_COMPLETED;
}
/**
* Creates and returns a {@code Notification} of variety {@code Kind.OnCompleted}.
*
* @warn param "type" undescribed
* @param type
* @return an {@code OnCompleted} variety of {@code Notification}
*/
@SuppressWarnings("unchecked")
public static <T> Notification<T> createOnCompleted(Class<T> type) {
return (Notification<T>) ON_COMPLETED;
}
private Notification(Kind kind, T value, Throwable e) {
this.value = value;
this.throwable = e;
this.kind = kind;
}
/**
* Retrieves the exception associated with this (onError) notification.
*
* @return the Throwable associated with this (onError) notification
*/
public Throwable getThrowable() {
return throwable;
}
/**
* Retrieves the item associated with this (onNext) notification.
*
* @return the item associated with this (onNext) notification
*/
public T getValue() {
return value;
}
/**
* Indicates whether this notification has an item associated with it.
*
* @return a boolean indicating whether or not this notification has an item associated with it
*/
public boolean hasValue() {
return isOnNext() && value != null;
// isn't "null" a valid item?
}
/**
* Indicates whether this notification has an exception associated with it.
*
* @return a boolean indicating whether this notification has an exception associated with it
*/
public boolean hasThrowable() {
return isOnError() && throwable != null;
}
/**
* Retrieves the kind of this notification: {@code OnNext}, {@code OnError}, or {@code OnCompleted}
*
* @return the kind of the notification: {@code OnNext}, {@code OnError}, or {@code OnCompleted}
*/
public Kind getKind() {
return kind;
}
/**
* Indicates whether this notification represents an {@code onError} event.
*
* @return a boolean indicating whether this notification represents an {@code onError} event
*/
public boolean isOnError() {
return getKind() == Kind.OnError;
}
/**
* Indicates whether this notification represents an {@code onCompleted} event.
*
* @return a boolean indicating whether this notification represents an {@code onCompleted} event
*/
public boolean isOnCompleted() {
return getKind() == Kind.OnCompleted;
}
/**
* Indicates whether this notification represents an {@code onNext} event.
*
* @return a boolean indicating whether this notification represents an {@code onNext} event
*/
public boolean isOnNext() {
return getKind() == Kind.OnNext;
}
/**
* Forwards this notification on to a specified {@link Observer}.
*/
public void accept(Observer<? super T> observer) {
if (isOnNext()) {
observer.onNext(getValue());
} else if (isOnCompleted()) {
observer.onCompleted();
} else if (isOnError()) {
observer.onError(getThrowable());
}
}
public enum Kind {
OnNext, OnError, OnCompleted
}
@Override
public String toString() {
StringBuilder str = new StringBuilder("[").append(super.toString()).append(" ").append(getKind());
if (hasValue())
str.append(" ").append(getValue());
if (hasThrowable())
str.append(" ").append(getThrowable().getMessage());
str.append("]");
return str.toString();
}
@Override
public int hashCode() {
int hash = getKind().hashCode();
if (hasValue())
hash = hash * 31 + getValue().hashCode();
if (hasThrowable())
hash = hash * 31 + getThrowable().hashCode();
return hash;
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (this == obj) {
return true;
}
if (obj.getClass() != getClass()) {
return false;
}
Notification<?> notification = (Notification<?>) obj;
if (notification.getKind() != getKind()) {
return false;
}
if (hasValue() && !getValue().equals(notification.getValue())) {
return false;
}
if (hasThrowable() && !getThrowable().equals(notification.getThrowable())) {
return false;
}
if (!hasValue() && !hasThrowable() && notification.hasValue()) {
return false;
}
if (!hasValue() && !hasThrowable() && notification.hasThrowable()) {
return false;
}
return true;
}
}
|
|
package engine;
import java.util.*;
import data.*;
// PortGA - java version
// This class should implement the basic calculations and generation methods of a
// portfolio, so that I can use both GA and GP genomes to generate
// the same portfolio.
// The portfolio is composed of three parts: A vector of names, which contains
// The name of the composing assets, a vector of weights, and a vector of
// stock. The input in the portfolio might be either the weights, or the
// number of each stock.
/* Todo:
* Portfolio Operation:
* -- Portfolio Adding
* -- Portfolio subtracting
* -- Portfolio weight updating from changed stock value
* -- rounding off lots based on stock price
*/
public class Portfolio {
static final Comparator<PortWeight> CINDEX =
new Comparator<PortWeight>()
{
public int compare(PortWeight w1, PortWeight w2)
{
if (w1.index < w2.index)
return -1;
if (w1.index > w2.index)
return 1;
return 0;
};
};
Vector<PortWeight> pwlist;
Market mkt;
public Portfolio()
{
pwlist = new Vector<PortWeight>();
mkt = Market.getInstance();
}
/* Copy the portfolio */
public Portfolio copy()
{
Portfolio cp = new Portfolio();
for(int i = 0; i < pwlist.size(); i++)
{
PortWeight pw = pwlist.get(i);
cp.pwlist.add(new PortWeight(pw.name,pw.index,pw.weight));
}
return cp;
}
// Clears the portfolio and set new weights.
public void setWeights(double[] wgt)
{
pwlist.clear();
for (int i = 0; i < wgt.length; i++)
{
if (wgt[i] > 0)
pwlist.add(new PortWeight(mkt.assets.get(i).name,i,wgt[i]));
}
}
public void setWeights(Double[] wgt)
{
pwlist.clear();
for (int i = 0; i < wgt.length; i++)
{
if (wgt[i] > 0)
pwlist.add(new PortWeight(mkt.assets.get(i).name,i,wgt[i]));
}
}
/* N is the Nth asset in the portfolio - order is not guaranteed */
public double getWeightByIndex(int n)
{
for (int i = 0; i < pwlist.size(); i++)
{
PortWeight p = pwlist.get(i);
if (p.index == n)
return p.weight;
}
return 0;
}
public double getWeightByPos(int n)
{
return pwlist.get(n).weight;
}
public int getIndexByPos(int n)
{
return pwlist.get(n).index;
}
public int getMaxWeightIndex()
{
Collections.sort(pwlist); // This should order the assets by weight
return pwlist.get(0).index;
}
public int getAssetSize()
{
return pwlist.size();
}
public void setWeight(int idx, double wgt)
{
for(int i = 0; i < pwlist.size(); i++)
if (pwlist.get(i).index == idx)
{
if (wgt == 0)
pwlist.remove(i);
else
pwlist.get(i).weight = wgt;
return;
}
if (wgt > 0)
pwlist.add(new PortWeight(mkt.assets.get(idx).name,idx,wgt));
return;
}
/**
* Compose this portfolio as a merge of the two portfolios given
*/
public void Merge(Portfolio pleft, Portfolio pright, double wgt)
{
Collections.sort(pleft.pwlist,CINDEX);
Collections.sort(pright.pwlist,CINDEX);
int i = 0;
int j = 0;
Vector<PortWeight> newplist = new Vector<PortWeight>();
while (i < pleft.pwlist.size() && j < pright.pwlist.size())
{
PortWeight mypw = pleft.pwlist.get(i);
PortWeight p2pw = pright.pwlist.get(j);
if (mypw.index == p2pw.index)
{
newplist.add(new PortWeight(mypw.name, mypw.index, (mypw.weight * wgt) + (p2pw.weight * (1 - wgt))));
i++;
j++;
}
else if (mypw.index > p2pw.index)
{
newplist.add(new PortWeight(p2pw.name, p2pw.index,p2pw.weight * (1 - wgt)));
j++;
}
else if (p2pw.index > mypw.index)
{
newplist.add(new PortWeight(mypw.name, mypw.index,mypw.weight * wgt));
i++;
}
}
while (i < pleft.pwlist.size())
{
PortWeight mypw = pleft.pwlist.get(i);
newplist.add(new PortWeight(mypw.name, mypw.index,mypw.weight * wgt));
i++;
}
while (j < pright.pwlist.size())
{
PortWeight p2pw = pright.pwlist.get(j);
newplist.add(new PortWeight(p2pw.name, p2pw.index,p2pw.weight * (1 - wgt)));
j++;
}
pwlist = newplist;
}
// Set sum of weights = 1
// does not change lots -- need to make internally consistent
// after using this.
public void normalizeWeight()
{
double sum = 0;
for (int i = 0; i < pwlist.size(); i++)
{
sum += pwlist.get(i).weight;
}
for (int i = 0; i < pwlist.size(); i++)
{
pwlist.get(i).weight = pwlist.get(i).weight/sum;
}
}
/* Returns the total value of the portfolio at a given date. It requires
* The number of lots to be set.
*/
public Double totalValue(Date d)
{
double val = 0.0;
Market mkt = Market.getInstance();
//FIXME: Add real lots later
for (int i = 0; i < pwlist.size(); i++)
{
PortWeight pw = pwlist.get(i);
val += pw.weight * 100 * mkt.assets.get(pw.index).getPriceByDate(d);
}
return val;
}
/**
*
* Generates a String which lists, in decrescent order
* the component assets of this Portfolio.
*
* The parameter tresh is the weight treshold for an
* asset in the portfolio to be included in the string.
* To include all assets (even those with weight 0),
* set tresh to a negative value.
*
* @param tresh
* @return
*/
public String dump(Double tresh)
{
String result = new String();
Collections.sort(pwlist);
int i = 0;
while(i < pwlist.size() && pwlist.get(i).weight > tresh)
{
result = result + pwlist.get(i).name + ":\t" + pwlist.get(i).weight + "\n";
i++;
}
return result;
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.eagle.alert.siddhi;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.SortedMap;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.BlockingQueue;
import org.apache.eagle.alert.config.AbstractPolicyDefinition;
import org.apache.eagle.alert.entity.AlertStreamSchemaEntity;
import org.apache.eagle.alert.common.AlertConstants;
import org.apache.eagle.alert.policy.PolicyManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.wso2.siddhi.core.ExecutionPlanRuntime;
import org.wso2.siddhi.core.SiddhiManager;
import org.wso2.siddhi.core.query.output.callback.QueryCallback;
import org.wso2.siddhi.core.stream.input.InputHandler;
import org.wso2.siddhi.query.api.execution.query.Query;
import org.wso2.siddhi.query.api.execution.query.selection.OutputAttribute;
import org.apache.eagle.alert.entity.AlertAPIEntity;
import org.apache.eagle.alert.entity.AlertDefinitionAPIEntity;
import org.apache.eagle.alert.policy.PolicyEvaluator;
import org.apache.eagle.dataproc.core.JsonSerDeserUtils;
import org.apache.eagle.dataproc.core.ValuesArray;
import com.typesafe.config.Config;
/**
* when policy is updated or deleted, SiddhiManager.shutdown should be invoked to release resources.
* during this time, synchronization is important
*/
public class SiddhiPolicyEvaluator implements PolicyEvaluator{
private final static Logger LOG = LoggerFactory.getLogger(SiddhiPolicyEvaluator.class);
public static final int DEFAULT_QUEUE_SIZE = 1000;
private final BlockingQueue<AlertAPIEntity> queue = new ArrayBlockingQueue<AlertAPIEntity>(DEFAULT_QUEUE_SIZE);
private volatile SiddhiRuntime siddhiRuntime;
private String[] sourceStreams;
private boolean needValidation;
private String policyId;
private Config config;
private final static String EXECUTION_PLAN_NAME = "query";
/**
* everything dependent on policyDef should be together and switched in runtime
*/
public static class SiddhiRuntime{
QueryCallback callback;
Map<String, InputHandler> siddhiInputHandlers;
SiddhiManager siddhiManager;
SiddhiPolicyDefinition policyDef;
List<String> outputFields;
String executionPlanName;
}
public SiddhiPolicyEvaluator(Config config, String policyName, AbstractPolicyDefinition policyDef, String[] sourceStreams){
this(config, policyName, policyDef, sourceStreams, false);
}
public SiddhiPolicyEvaluator(Config config, String policyId, AbstractPolicyDefinition policyDef, String[] sourceStreams, boolean needValidation){
this.config = config;
this.policyId = policyId;
this.needValidation = needValidation;
this.sourceStreams = sourceStreams;
init(policyDef);
}
public void init(AbstractPolicyDefinition policyDef){
siddhiRuntime = createSiddhiRuntime((SiddhiPolicyDefinition)policyDef);
}
public static String addContextFieldIfNotExist(String expression) {
// select fieldA, fieldB --> select eagleAlertContext, fieldA, fieldB
int pos = expression.indexOf("select ") + 7;
int index = pos;
boolean isSelectStarPattern = true;
while(index < expression.length()) {
if (expression.charAt(index) == ' ') index++;
else if (expression.charAt(index) == '*') break;
else {
isSelectStarPattern = false;
break;
}
}
if (isSelectStarPattern) return expression;
StringBuilder sb = new StringBuilder();
sb.append(expression.substring(0, pos));
sb.append(SiddhiStreamMetadataUtils.EAGLE_ALERT_CONTEXT_FIELD + ",");
sb.append(expression.substring(pos, expression.length()));
return sb.toString();
}
private SiddhiRuntime createSiddhiRuntime(SiddhiPolicyDefinition policyDef){
SiddhiManager siddhiManager = new SiddhiManager();
Map<String, InputHandler> siddhiInputHandlers = new HashMap<String, InputHandler>();
StringBuilder sb = new StringBuilder();
for(String sourceStream : sourceStreams){
String streamDef = SiddhiStreamMetadataUtils.convertToStreamDef(sourceStream);
LOG.info("Siddhi stream definition : " + streamDef);
sb.append(streamDef);
}
String expression = addContextFieldIfNotExist(policyDef.getExpression());
String executionPlan = sb.toString() + " @info(name = '" + EXECUTION_PLAN_NAME + "') " + expression;
ExecutionPlanRuntime executionPlanRuntime = siddhiManager.createExecutionPlanRuntime(executionPlan);
for(String sourceStream : sourceStreams){
siddhiInputHandlers.put(sourceStream, executionPlanRuntime.getInputHandler(sourceStream));
}
executionPlanRuntime.start();
QueryCallback callback = new SiddhiQueryCallbackImpl(config, this);
LOG.info("Siddhi query: " + expression);
executionPlanRuntime.addCallback(EXECUTION_PLAN_NAME, callback);
List<String> outputFields = new ArrayList<String>();
try {
Field field = QueryCallback.class.getDeclaredField(EXECUTION_PLAN_NAME);
field.setAccessible(true);
Query query = (Query)field.get(callback);
List<OutputAttribute> list = query.getSelector().getSelectionList();
for (OutputAttribute output : list) {
outputFields.add(output.getRename());
}
}
catch (Exception ex) {
LOG.error("Got an Exception when initial outputFields ", ex);
}
SiddhiRuntime runtime = new SiddhiRuntime();
runtime.siddhiInputHandlers = siddhiInputHandlers;
runtime.siddhiManager = siddhiManager;
runtime.callback = callback;
runtime.policyDef = policyDef;
runtime.outputFields = outputFields;
runtime.executionPlanName = executionPlanRuntime.getName();
return runtime;
}
/**
* 1. input has 3 fields, first is siddhi context, second is streamName, the last one is map of attribute name/value
* 2. runtime check for input data (This is very expensive, so we ignore for now)
* the size of input map should be equal to size of attributes which stream metadata defines
* the attribute names should be equal to attribute names which stream metadata defines
* the input field cannot be null
*/
@SuppressWarnings({ "rawtypes", "unchecked" })
@Override
public void evaluate(ValuesArray data) throws Exception {
if(LOG.isDebugEnabled()) LOG.debug("Siddhi policy evaluator consumers data :" + data);
Object siddhiAlertContext = data.get(0);
String streamName = (String)data.get(1);
SortedMap map = (SortedMap)data.get(2);
validateEventInRuntime(streamName, map);
synchronized(siddhiRuntime){
//insert siddhiAlertContext into the first field
List<Object> input = new ArrayList<>();
input.add(siddhiAlertContext);
putAttrsIntoInputStream(input, streamName, map);
siddhiRuntime.siddhiInputHandlers.get(streamName).send(input.toArray(new Object[0]));
}
}
/**
* this is a heavy operation, we should avoid to use
* @param sourceStream
* @param data
*/
private void validateEventInRuntime(String sourceStream, SortedMap data){
if(!needValidation)
return;
SortedMap<String, AlertStreamSchemaEntity> map = StreamMetadataManager.getInstance().getMetadataEntityMapForStream(sourceStream);
if(!map.keySet().equals(data.keySet()))
throw new IllegalStateException("incoming data schema is different from supported data schema, incoming data: " + data.keySet() + ", schema: " + map.keySet());
}
private void putAttrsIntoInputStream(List<Object> input, String streamName, SortedMap map) {
if(!needValidation) {
input.addAll(map.values());
return;
}
for (Object key : map.keySet()) {
Object value = map.get(key);
if (value == null) {
input.add(SiddhiStreamMetadataUtils.getAttrDefaultValue(streamName, (String)key));
}
else input.add(value);
}
}
@Override
public void onPolicyUpdate(AlertDefinitionAPIEntity newAlertDef) {
AbstractPolicyDefinition policyDef = null;
try {
policyDef = JsonSerDeserUtils.deserialize(newAlertDef.getPolicyDef(),
AbstractPolicyDefinition.class, PolicyManager.getInstance().getPolicyModules(newAlertDef.getTags().get(AlertConstants.POLICY_TYPE)));
}
catch (Exception ex) {
LOG.error("Initial policy def error, ", ex);
}
SiddhiRuntime previous = siddhiRuntime;
siddhiRuntime = createSiddhiRuntime((SiddhiPolicyDefinition)policyDef);
synchronized(previous){
previous.siddhiManager.getExecutionPlanRuntime(previous.executionPlanName).shutdown();
}
}
@Override
public void onPolicyDelete(){
synchronized(siddhiRuntime){
LOG.info("Going to shutdown siddhi execution plan, planName: " + siddhiRuntime.executionPlanName);
siddhiRuntime.siddhiManager.getExecutionPlanRuntime(siddhiRuntime.executionPlanName).shutdown();
LOG.info("Siddhi execution plan " + siddhiRuntime.executionPlanName + " is successfully shutdown ");
}
}
@Override
public String toString(){
// show the policyDef
return siddhiRuntime.policyDef.toString();
}
public String[] getStreamNames() {
return sourceStreams;
}
public Map<String, String> getAdditionalContext() {
Map<String, String> context = new HashMap<String, String>();
StringBuilder sourceStreams = new StringBuilder();
for (String streamName : getStreamNames()) {
sourceStreams.append(streamName + ",");
}
if (sourceStreams.length() > 0) {
sourceStreams.deleteCharAt(sourceStreams.length() - 1);
}
context.put(AlertConstants.SOURCE_STREAMS, sourceStreams.toString());
context.put(AlertConstants.POLICY_ID, policyId);
return context;
}
public List<String> getOutputStreamAttrNameList() {
return siddhiRuntime.outputFields;
}
}
|
|
/*
* Copyright 2013-2014 must-be.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.consulo.compiler.server.application;
import com.intellij.core.CoreFileTypeRegistry;
import com.intellij.ide.StartupProgress;
import com.intellij.ide.plugins.IdeaPluginDescriptor;
import com.intellij.ide.plugins.PluginManagerCore;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.application.*;
import com.intellij.openapi.application.ex.ApplicationEx2;
import com.intellij.openapi.command.CommandProcessor;
import com.intellij.openapi.components.StateStorageException;
import com.intellij.openapi.components.impl.ApplicationPathMacroManager;
import com.intellij.openapi.components.impl.ComponentManagerImpl;
import com.intellij.openapi.components.impl.stores.ApplicationStoreImpl;
import com.intellij.openapi.components.impl.stores.IApplicationStore;
import com.intellij.openapi.components.impl.stores.IComponentStore;
import com.intellij.openapi.extensions.ExtensionPointName;
import com.intellij.openapi.extensions.Extensions;
import com.intellij.openapi.fileTypes.FileTypeRegistry;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.project.ex.ProjectManagerEx;
import com.intellij.openapi.project.impl.ProjectManagerImpl;
import com.intellij.openapi.util.*;
import com.intellij.util.io.storage.HeavyProcessLatch;
import org.consulo.lombok.annotations.Logger;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.mustbe.consulo.RequiredDispatchThread;
import org.mustbe.consulo.RequiredReadAction;
import org.mustbe.consulo.RequiredWriteAction;
import org.picocontainer.MutablePicoContainer;
import javax.swing.*;
import java.awt.*;
import java.io.IOException;
import java.util.concurrent.*;
/**
* @author VISTALL
* @since 11:26/12.08.13
*/
@Logger
public class CompilerServerApplication extends ComponentManagerImpl implements ApplicationEx2 {
private static class ExecutorServiceHolder {
private static final ExecutorService ourThreadExecutorsService = createServiceImpl();
private static ThreadPoolExecutor createServiceImpl() {
return new ThreadPoolExecutor(10, Integer.MAX_VALUE, 60L, TimeUnit.SECONDS, new SynchronousQueue<Runnable>(), new ThreadFactory() {
@NotNull
@Override
@SuppressWarnings({"HardCodedStringLiteral"})
public Thread newThread(@NotNull Runnable r) {
return new Thread(r, "CompilerServerApplication pooled thread");
}
});
}
}
public static CompilerServerApplication createApplication() {
final CompilerServerApplication app = new CompilerServerApplication();
ApplicationManager.setApplication(app, new Getter<FileTypeRegistry>() {
@Override
public FileTypeRegistry get() {
return new CoreFileTypeRegistry();
}
}, app
);
return app;
}
private boolean myDisposeInProgress;
public CompilerServerApplication() {
super(null);
ApplicationManager.setApplication(this, Disposer.newDisposable());
getPicoContainer().registerComponentInstance(Application.class, this);
loadApplicationComponents();
registerShutdownHook();
}
private void loadApplicationComponents() {
PluginManagerCore.initPlugins(new StartupProgress() {
@Override
public void showProgress(String message, float progress) {
}
});
final IdeaPluginDescriptor[] plugins = PluginManagerCore.getPlugins();
for (IdeaPluginDescriptor plugin : plugins) {
if (PluginManagerCore.shouldSkipPlugin(plugin)) continue;
loadComponentsConfiguration(plugin.getAppComponents(), plugin, false);
}
}
@Override
protected void bootstrapPicoContainer(@NotNull String name) {
super.bootstrapPicoContainer(name);
getPicoContainer().registerComponentImplementation(IComponentStore.class, ApplicationStoreImpl.class);
getPicoContainer().registerComponentImplementation(ApplicationPathMacroManager.class);
}
@Override
public void initializeComponent(Object component, boolean service) {
getStateStore().initComponent(component, service);
}
@NotNull
@Override
public IApplicationStore getStateStore() {
return (IApplicationStore)getPicoContainer().getComponentInstance(IComponentStore.class);
}
@NotNull
@Override
protected MutablePicoContainer createPicoContainer() {
return Extensions.getRootArea().getPicoContainer();
}
@Override
public synchronized void dispose() {
ShutDownTracker.getInstance().ensureStopperThreadsFinished();
disposeComponents();
ExecutorServiceHolder.ourThreadExecutorsService.shutdownNow();
super.dispose();
}
private void registerShutdownHook() {
ShutDownTracker.getInstance(); // Necessary to avoid creating an instance while already shutting down.
ShutDownTracker.getInstance().registerShutdownTask(new Runnable() {
@Override
public void run() {
if (isDisposed() || isDisposeInProgress()) {
return;
}
ShutDownTracker.invokeAndWait(isUnitTestMode(), true, new Runnable() {
@Override
public void run() {
if (ApplicationManager.getApplication() != CompilerServerApplication.this) return;
myDisposeInProgress = true;
if (!disposeSelf(true)) {
myDisposeInProgress = false;
}
}
});
}
});
}
private boolean disposeSelf(final boolean checkCanCloseProject) {
final CommandProcessor commandProcessor = CommandProcessor.getInstance();
final boolean[] canClose = {true};
for (final Project project : ProjectManagerEx.getInstanceEx().getOpenProjects()) {
try {
commandProcessor.executeCommand(project, new Runnable() {
@Override
public void run() {
final ProjectManagerImpl manager = (ProjectManagerImpl)ProjectManagerEx.getInstanceEx();
if (!manager.closeProject(project, true, true, checkCanCloseProject)) {
canClose[0] = false;
}
}
}, ApplicationBundle.message("command.exit"), null);
}
catch (Throwable e) {
LOGGER.error(e);
}
if (!canClose[0]) {
return false;
}
}
ApplicationManager.getApplication().runWriteAction(new Runnable() {
@Override
public void run() {
Disposer.dispose(CompilerServerApplication.this);
}
});
Disposer.assertIsEmpty();
return true;
}
@Override
public boolean isInternal() {
return false;
}
@Override
public boolean isEAP() {
return false;
}
@Override
public void runReadAction(@NotNull Runnable action) {
try {
action.run();
}
catch (Exception e) {
LOGGER.error(e);
}
}
@Override
public <T> T runReadAction(@NotNull Computable<T> computation) {
try {
return computation.compute();
}
catch (Exception e) {
LOGGER.error(e);
return null;
}
}
@Override
public <T, E extends Throwable> T runReadAction(@NotNull ThrowableComputable<T, E> computation) throws E {
return computation.compute();
}
@RequiredDispatchThread
@Override
public void runWriteAction(@NotNull Runnable action) {
try {
action.run();
}
catch (Exception e) {
LOGGER.error(e);
}
}
@RequiredDispatchThread
@Override
public <T> T runWriteAction(@NotNull Computable<T> computation) {
try {
return computation.compute();
}
catch (Exception e) {
LOGGER.error(e);
return null;
}
}
@RequiredDispatchThread
@Override
public <T, E extends Throwable> T runWriteAction(@NotNull ThrowableComputable<T, E> computation) throws E {
return computation.compute();
}
@RequiredDispatchThread
@Override
public boolean hasWriteAction(@Nullable Class<?> actionClass) {
return true;
}
@RequiredReadAction
@Override
public void assertReadAccessAllowed() {
}
@RequiredWriteAction
@Override
public void assertWriteAccessAllowed() {
}
@RequiredDispatchThread
@Override
public void assertIsDispatchThread() {
}
@Override
public void addApplicationListener(@NotNull ApplicationListener listener) {
}
@Override
public void addApplicationListener(@NotNull ApplicationListener listener, @NotNull Disposable parent) {
}
@Override
public void removeApplicationListener(@NotNull ApplicationListener listener) {
}
@Override
public void saveAll() {
}
@Override
public void saveSettings() {
}
@Override
public void exit() {
}
@Override
public boolean isWriteAccessAllowed() {
return true;
}
@Override
public boolean isReadAccessAllowed() {
return true;
}
@Override
public boolean isDispatchThread() {
return true;
}
@NotNull
@Override
public ModalityInvokator getInvokator() {
return null;
}
@Override
public void invokeLater(@NotNull Runnable runnable) {
try {
runnable.run();
}
catch (Exception e) {
LOGGER.error(e);
}
}
@Override
public void invokeLater(@NotNull Runnable runnable, @NotNull Condition expired) {
try {
runnable.run();
}
catch (Exception e) {
LOGGER.error(e);
}
}
@Override
public void invokeLater(@NotNull Runnable runnable, @NotNull ModalityState state) {
try {
runnable.run();
}
catch (Exception e) {
LOGGER.error(e);
}
}
@Override
public void invokeLater(@NotNull Runnable runnable, @NotNull ModalityState state, @NotNull Condition expired) {
try {
runnable.run();
}
catch (Exception e) {
LOGGER.error(e);
}
}
@Override
public void invokeAndWait(@NotNull Runnable runnable, @NotNull ModalityState modalityState) {
try {
runnable.run();
}
catch (Exception e) {
LOGGER.error(e);
}
}
@NotNull
@Override
public ModalityState getCurrentModalityState() {
return ModalityState.NON_MODAL;
}
@NotNull
@Override
public ModalityState getModalityStateForComponent(@NotNull Component c) {
return ModalityState.NON_MODAL;
}
@NotNull
@Override
public ModalityState getDefaultModalityState() {
return ModalityState.NON_MODAL;
}
@NotNull
@Override
public ModalityState getNoneModalityState() {
return ModalityState.NON_MODAL;
}
@NotNull
@Override
public ModalityState getAnyModalityState() {
return ModalityState.NON_MODAL;
}
@Override
public long getStartTime() {
return 0;
}
@RequiredDispatchThread
@Override
public long getIdleTime() {
return 0;
}
@Override
public boolean isUnitTestMode() {
return false;
}
@Override
public boolean isHeadlessEnvironment() {
return true;
}
@Override
public boolean isCompilerServerMode() {
return true;
}
@Override
public boolean isCommandLine() {
return false;
}
@NotNull
@Override
public Future<?> executeOnPooledThread(@NotNull Runnable action) {
return ExecutorServiceHolder.ourThreadExecutorsService.submit(action);
}
@NotNull
@Override
public <T> Future<T> executeOnPooledThread(@NotNull Callable<T> action) {
return ExecutorServiceHolder.ourThreadExecutorsService.submit(action);
}
@Override
public boolean isDisposeInProgress() {
return myDisposeInProgress || ShutDownTracker.isShutdownHookRunning();
}
@Override
public boolean isRestartCapable() {
return false;
}
@Override
public void restart() {
}
@Override
public boolean isActive() {
return true;
}
@NotNull
@Override
public AccessToken acquireReadActionLock() {
return AccessToken.EMPTY_ACCESS_TOKEN;
}
@RequiredDispatchThread
@NotNull
@Override
public AccessToken acquireWriteActionLock(@Nullable Class marker) {
return AccessToken.EMPTY_ACCESS_TOKEN;
}
@Override
public void load(String path) throws IOException {
getStateStore().setOptionsPath(path);
getStateStore().setConfigPath(PathManager.getConfigPath());
AccessToken accessToken = HeavyProcessLatch.INSTANCE.processStarted("app store load");
try {
getStateStore().load();
}
catch (StateStorageException e) {
throw new IOException(e.getMessage());
}
finally {
accessToken.finish();
}
}
@Override
public boolean isLoaded() {
return true;
}
@NotNull
@Override
public String getName() {
return "idea";
}
@Override
public boolean holdsReadLock() {
return false;
}
@Override
public boolean isWriteActionInProgress() {
return false;
}
@Override
public boolean isWriteActionPending() {
return false;
}
@Override
public void doNotSave() {
}
@Override
public void doNotSave(boolean value) {
}
@Override
public boolean isDoNotSave() {
return true;
}
@Override
public void exit(boolean force, boolean exitConfirmed) {
}
@Override
public void restart(boolean force) {
}
@RequiredDispatchThread
@Override
public boolean runProcessWithProgressSynchronously(@NotNull Runnable process,
@NotNull String progressTitle,
boolean canBeCanceled,
Project project) {
process.run();
return true;
}
@RequiredDispatchThread
@Override
public boolean runProcessWithProgressSynchronously(@NotNull Runnable process,
@NotNull String progressTitle,
boolean canBeCanceled,
@Nullable Project project,
JComponent parentComponent) {
process.run();
return true;
}
@RequiredDispatchThread
@Override
public boolean runProcessWithProgressSynchronously(@NotNull Runnable process,
@NotNull String progressTitle,
boolean canBeCanceled,
@Nullable Project project,
JComponent parentComponent,
String cancelText) {
process.run();
return true;
}
@RequiredDispatchThread
@Override
public void assertIsDispatchThread(@Nullable JComponent component) {
}
@Override
public void assertTimeConsuming() {
}
@Override
public void runEdtSafeAction(@NotNull Runnable runnable) {
runnable.run();
}
@Override
public boolean tryRunReadAction(@NotNull Runnable action) {
action.run();
return true;
}
@NotNull
@Override
public <T> T[] getExtensions(final ExtensionPointName<T> extensionPointName) {
return Extensions.getRootArea().getExtensionPoint(extensionPointName).getExtensions();
}
}
|
|
package funcoes;
import atributos.Fornecedor;
import java.sql.CallableStatement;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.logging.Level;
import java.util.logging.Logger;
public class FornecedorDAO {
public static int CadFornecedor(Fornecedor fornecedor){
PreparedStatement stmt;
int id = 0;
try {
String sql = ("INSERT INTO tabfornecedor(tabusuario_id_usuario, tabContato_id_contato,fornecedor) VALUES(?,?,?)");
stmt = Conexao.getConnection().prepareStatement(sql);
stmt.setInt(1, fornecedor.getCodUsuario());
stmt.setInt(2, fornecedor.getCodContato());
stmt.setString(3, fornecedor.getFornecedor());
stmt.executeUpdate();
ResultSet rs = stmt.getGeneratedKeys();
if (rs.next()) {
id = rs.getInt(1);
}
stmt.close();
} catch (SQLException ex) {
Logger.getLogger(FornecedorDAO.class.getName()).log(Level.SEVERE, null, ex);
throw new RuntimeException("Erro ao Cadastrar Fornecedor: ",ex);
}
return id;
}
public static ArrayList CarregaFornecedor(int id) {
Statement stmt;
ArrayList<Fornecedor> fornecedor = new ArrayList<Fornecedor>();
try {
String Sql = "select * from vw_fornecedores "
+ "where id_forn = '"+ id +"';";
ResultSet rs;
stmt = Conexao.getConnection().createStatement();
rs = stmt.executeQuery(Sql);
while(rs.next()){
Fornecedor f = new Fornecedor();
f.setIdForn(rs.getInt("id_forn"));
f.setFornecedor((rs.getString("fornecedor")));
f.setCodContato(rs.getInt("tabContato_id_contato"));
//f.setEmail(rs.getString("email"));
fornecedor.add(f);
}
rs.close();
stmt.close();
} catch (SQLException ex) {
Logger.getLogger(FornecedorDAO.class.getName()).log(Level.SEVERE, null, ex);
throw new RuntimeException("Erro ao carregar os dados do fornecedor: ", ex);
}
return fornecedor;
}
public static int idContato(int id) {
Statement stmt;
Fornecedor f = new Fornecedor();
try {
String Sql = "SELECT tabContato_id_contato FROM tabfornecedor WHERE id_forn = '"+ id +"';";
ResultSet rs;
stmt = Conexao.getConnection().createStatement();
rs = stmt.executeQuery(Sql);
while(rs.next()){
f.setCodContato(rs.getInt("tabContato_id_contato"));
}
rs.close();
stmt.close();
} catch (SQLException ex) {
Logger.getLogger(FornecedorDAO.class.getName()).log(Level.SEVERE, null, ex);
throw new RuntimeException(ex);
}
return f.getCodContato();
}
public static void ExcluirFornecedor(int id) {
CallableStatement stmt;
try {
stmt = Conexao.getConnection().prepareCall("{call ExcluirFornecedor(?)}");
stmt.setInt(1, id);
stmt.execute();
stmt.close();
} catch (SQLException ex) {
Logger.getLogger(FornecedorDAO.class.getName()).log(Level.SEVERE, null, ex);
throw new RuntimeException("Erro ao excluir os dados do fornecedor: ",ex);
}
}
public static void UpdateFornecedor(Fornecedor forn, int id){
PreparedStatement stmt;
try {
String sql = ("UPDATE tabfornecedor SET fornecedor='" + forn.getFornecedor()
+ "' where id_forn = '" + id + "';");
stmt = Conexao.getConnection().prepareStatement(sql);
stmt.executeUpdate();
stmt.close();
} catch (SQLException ex) {
Logger.getLogger(FornecedorDAO.class.getName()).log(Level.SEVERE, null, ex);
throw new RuntimeException("Erro ao Alterar os dados do fornecedor: ",ex);
}
}
public static ArrayList<Fornecedor> ListarFornecedor(){
Statement stmt;
ArrayList<Fornecedor> fornecedores = new ArrayList<Fornecedor>();
try {
String Sql = "SELECT * FROM tabfornecedor;";
ResultSet rs;
stmt = Conexao.getConnection().createStatement();
rs = stmt.executeQuery(Sql);
while(rs.next()){
Fornecedor f = new Fornecedor();
f.setIdForn(rs.getInt("id_forn"));
f.setFornecedor((rs.getString("fornecedor")));
fornecedores.add(f);
}
rs.close();
stmt.close();
} catch (SQLException ex) {
Logger.getLogger(FornecedorDAO.class.getName()).log(Level.SEVERE, null, ex);
throw new RuntimeException("Erro ao Listar os dados do Clientes: ",ex);
}
return fornecedores;
}
public static boolean VerificarFornecedor(String fornecedor) {
Statement stmt;
boolean achou = true;
int fo = 0;
String Sql = "SELECT COUNT(0), fornecedor FROM tabfornecedor WHERE fornecedor = '" + fornecedor + "';";
try {
ResultSet rs;
stmt = Conexao.getConnection().createStatement();
rs = stmt.executeQuery(Sql);
rs.first();
do {
fo = rs.getInt("COUNT(0)");
} while (rs.next());
if (fo == 0) {
achou = false;
}
rs.close();
stmt.close();
} catch (SQLException ex) {
Logger.getLogger(FornecedorDAO.class.getName()).log(Level.SEVERE, null, ex);
throw new RuntimeException("Erro ao Carregar os dados do fornecedor: ", ex);
}
return achou;
}
}
|
|
/****************************************************************************
* Copyright (C) 2012 ecsec GmbH.
* All rights reserved.
* Contact: ecsec GmbH ([email protected])
*
* This file is part of the Open eCard App.
*
* GNU General Public License Usage
* This file may be used under the terms of the GNU General Public
* License version 3.0 as published by the Free Software Foundation
* and appearing in the file LICENSE.GPL included in the packaging of
* this file. Please review the following information to ensure the
* GNU General Public License version 3.0 requirements will be met:
* http://www.gnu.org/copyleft/gpl.html.
*
* Other Usage
* Alternatively, this file may be used in accordance with the terms
* and conditions contained in a signed written agreement between
* you and ecsec GmbH.
*
***************************************************************************/
package org.openecard.crypto.common.asn1.cvc;
import java.io.IOException;
import java.security.cert.CertificateException;
import java.util.ArrayList;
import java.util.Enumeration;
import org.openecard.bouncycastle.asn1.ASN1ObjectIdentifier;
import org.openecard.bouncycastle.asn1.ASN1Sequence;
import org.openecard.bouncycastle.asn1.ASN1Set;
import org.openecard.bouncycastle.asn1.ASN1String;
import org.openecard.bouncycastle.asn1.ASN1TaggedObject;
import org.openecard.bouncycastle.asn1.DERIA5String;
import org.openecard.bouncycastle.asn1.DEROctetString;
import org.openecard.bouncycastle.asn1.DERSet;
import org.openecard.bouncycastle.asn1.DERTaggedObject;
import org.openecard.crypto.common.asn1.eac.oid.CVCertificatesObjectIdentifier;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* See BSI-TR-03110, version 2.10, part 3, section C.
*
* <pre>
* CertificateDescription ::= SEQUENCE {
* descriptionType OBJECT IDENTIFIER,
* issuerName [1] UTF8String,
* issuerURL [2] PrintableString OPTIONAL,
* subjectName [3] UTF8String,
* subjectURL [4] PrintableString OPTIONAL,
* termsOfUsage [5] ANY DEFINED BY descriptionType,
* redirectURL [6] PrintableString OPTIONAL,
* commCertificates [7] SET OF OCTET STRING OPTIONAL
* }
* </pre>
*
* @author Moritz Horsch <[email protected]>
*/
public class CertificateDescription {
private static final Logger _logger = LoggerFactory.getLogger(CertificateDescription.class);
private String descriptionType;
private String issuerName;
private String issuerURL;
private String subjectName;
private String subjectURL;
private Object termsOfUsage;
private String redirectURL;
private ArrayList<byte[]> commCertificates;
private byte[] encoded;
/**
* Creates a new CertificateDescription.
*
* @param obj Encoded CertificateDescription
* @return CertificateDescription
*/
public static CertificateDescription getInstance(Object obj) throws CertificateException {
if (obj instanceof CertificateDescription) {
return (CertificateDescription) obj;
} else if (obj instanceof ASN1Set) {
return new CertificateDescription((ASN1Sequence) obj);
} else if (obj instanceof byte[]) {
try {
return new CertificateDescription((ASN1Sequence) ASN1Sequence.fromByteArray((byte[]) obj));
} catch (IOException e) {
_logger.error("Cannot parse CertificateDescription", e);
throw new IllegalArgumentException("Cannot parse CertificateDescription");
}
}
throw new IllegalArgumentException("Unknown object in factory: " + obj.getClass());
}
/**
* Creates a new CertificateDescription.
*
* @param seq Encoded CertificateDescription
*/
private CertificateDescription(ASN1Sequence seq) throws CertificateException {
try {
encoded = seq.getEncoded();
Enumeration elements = seq.getObjects();
descriptionType = ASN1ObjectIdentifier.getInstance(elements.nextElement()).toString();
while (elements.hasMoreElements()) {
ASN1TaggedObject taggedObject = DERTaggedObject.getInstance(elements.nextElement());
int tag = taggedObject.getTagNo();
switch (tag) {
case 1:
issuerName = ((ASN1String) taggedObject.getObject()).getString();
break;
case 2:
issuerURL = ((ASN1String) taggedObject.getObject()).getString();
break;
case 3:
subjectName = ((ASN1String) taggedObject.getObject()).getString();
break;
case 4:
subjectURL = ((ASN1String) taggedObject.getObject()).getString();
break;
case 5:
if (descriptionType.equals(CVCertificatesObjectIdentifier.id_plainFormat)) {
termsOfUsage = ((ASN1String) taggedObject.getObject()).getString();
} else if (descriptionType.equals(CVCertificatesObjectIdentifier.id_htmlFormat)) {
termsOfUsage = ((DERIA5String) taggedObject.getObject()).getString();
} else if (descriptionType.equals(CVCertificatesObjectIdentifier.id_pdfFormat)) {
termsOfUsage = ((DEROctetString) taggedObject.getObject()).getEncoded();
}
break;
case 6:
redirectURL = ((ASN1String) taggedObject.getObject()).getString();
break;
case 7:
Enumeration commCerts = ((DERSet) taggedObject.getObject()).getObjects();
commCertificates = new ArrayList<byte[]>();
while (commCerts.hasMoreElements()) {
commCertificates.add(((DEROctetString) commCerts.nextElement()).getEncoded());
}
break;
default:
throw new IllegalArgumentException("Unknown object in CertificateDescription");
}
}
} catch (IOException e) {
_logger.error("Cannot parse CertificateDescription", e);
throw new CertificateException("Cannot parse CertificateDescription");
}
}
/**
* Returns DescriptionType.
*
* @return DescriptionType
*/
public String getDescriptionType() {
return descriptionType;
}
/**
* Returns the IssuerName.
*
* @return IssuerName
*/
public String getIssuerName() {
return issuerName;
}
/**
* Returns the IssuerURL.
*
* @return IssuerURL
*/
public String getIssuerURL() {
return issuerURL;
}
/**
* Returns the SubjectName.
*
* @return SubjectName
*/
public String getSubjectName() {
return subjectName;
}
/**
* Returns the SubjectURL.
*
* @return SubjectURL
*/
public String getSubjectURL() {
return subjectURL;
}
/**
* Returns the TermsOfUsage.
*
* @return TermsOfUsage
*/
public Object getTermsOfUsage() {
return termsOfUsage;
}
/**
* Returns the RedirectURL.
*
* @return RedirectURL
*/
public String getRedirectURL() {
return redirectURL;
}
/**
* Returns the CommCertificates.
*
* @return CommCertificates
*/
public ArrayList<byte[]> getCommCertificates() {
return commCertificates;
}
/**
* Returns the certificate description as a byte array.
*
* @return Certificate description as a byte array
*/
public byte[] getEncoded() {
return encoded;
}
}
|
|
package org.jfrog.hudson.plugins.artifactory.generic;
import hudson.Extension;
import hudson.Launcher;
import hudson.model.AbstractBuild;
import hudson.model.AbstractProject;
import hudson.model.Action;
import hudson.model.BuildListener;
import hudson.model.FreeStyleProject;
import hudson.model.Hudson;
import hudson.model.Result;
import hudson.tasks.BuildWrapper;
import hudson.tasks.BuildWrapperDescriptor;
import net.sf.json.JSONObject;
import org.apache.commons.lang.StringUtils;
import org.jfrog.build.api.Artifact;
import org.jfrog.build.api.Dependency;
import org.jfrog.build.api.dependency.BuildDependency;
import org.jfrog.build.client.ProxyConfiguration;
import org.jfrog.build.extractor.clientConfiguration.client.ArtifactoryBuildInfoClient;
import org.jfrog.build.extractor.clientConfiguration.client.ArtifactoryDependenciesClient;
import org.jfrog.hudson.plugins.artifactory.ArtifactoryBuilder;
import org.jfrog.hudson.plugins.artifactory.BuildInfoAwareConfigurator;
import org.jfrog.hudson.plugins.artifactory.DeployerOverrider;
import org.jfrog.hudson.plugins.artifactory.action.ActionableHelper;
import org.jfrog.hudson.plugins.artifactory.action.BuildInfoResultAction;
import org.jfrog.hudson.plugins.artifactory.config.ArtifactoryServer;
import org.jfrog.hudson.plugins.artifactory.config.Credentials;
import org.jfrog.hudson.plugins.artifactory.config.ServerDetails;
import org.jfrog.hudson.plugins.artifactory.util.IncludesExcludes;
import org.kohsuke.stapler.DataBoundConstructor;
import org.kohsuke.stapler.StaplerRequest;
import java.io.IOException;
import java.util.Collection;
import java.util.List;
/**
* Freestyle Generic configurator
*
* @author Shay Yaakov
*/
public class ArtifactoryGenericConfigurator extends BuildWrapper implements DeployerOverrider,
BuildInfoAwareConfigurator {
private final ServerDetails details;
private final Credentials overridingDeployerCredentials;
private final String deployPattern;
private final String resolvePattern;
private final String matrixParams;
private final boolean deployBuildInfo;
/**
* Include environment variables in the generated build info
*/
private final boolean includeEnvVars;
private final IncludesExcludes envVarsPatterns;
private final boolean discardOldBuilds;
private final boolean discardBuildArtifacts;
private transient List<Dependency> publishedDependencies;
private transient List<BuildDependency> buildDependencies;
@DataBoundConstructor
public ArtifactoryGenericConfigurator(ServerDetails details, Credentials overridingDeployerCredentials,
String deployPattern, String resolvePattern, String matrixParams, boolean deployBuildInfo,
boolean includeEnvVars, IncludesExcludes envVarsPatterns, boolean discardOldBuilds,
boolean discardBuildArtifacts) {
this.details = details;
this.overridingDeployerCredentials = overridingDeployerCredentials;
this.deployPattern = deployPattern;
this.resolvePattern = resolvePattern;
this.matrixParams = matrixParams;
this.deployBuildInfo = deployBuildInfo;
this.includeEnvVars = includeEnvVars;
this.envVarsPatterns = envVarsPatterns;
this.discardOldBuilds = discardOldBuilds;
this.discardBuildArtifacts = discardBuildArtifacts;
}
public String getArtifactoryName() {
return details != null ? details.artifactoryName : null;
}
public String getArtifactoryUrl() {
return details != null ? details.getArtifactoryUrl() : null;
}
public boolean isOverridingDefaultDeployer() {
return getOverridingDeployerCredentials() != null;
}
public String getRepositoryKey() {
return details.repositoryKey;
}
public Credentials getOverridingDeployerCredentials() {
return overridingDeployerCredentials;
}
public String getDeployPattern() {
return deployPattern;
}
public String getResolvePattern() {
return resolvePattern;
}
public String getMatrixParams() {
return matrixParams;
}
public boolean isDeployBuildInfo() {
return deployBuildInfo;
}
public boolean isIncludeEnvVars() {
return includeEnvVars;
}
public IncludesExcludes getEnvVarsPatterns() {
return envVarsPatterns;
}
public boolean isRunChecks() {
// There is no use of license checks in a generic build
return false;
}
public String getViolationRecipients() {
return null;
}
public boolean isIncludePublishArtifacts() {
return false;
}
public String getScopes() {
return null;
}
public boolean isLicenseAutoDiscovery() {
return false;
}
public boolean isDiscardOldBuilds() {
return discardOldBuilds;
}
public boolean isDiscardBuildArtifacts() {
return discardBuildArtifacts;
}
public boolean isEnableIssueTrackerIntegration() {
return false;
}
public boolean isAggregateBuildIssues() {
return false;
}
public String getAggregationBuildStatus() {
return null;
}
public ArtifactoryServer getArtifactoryServer() {
List<ArtifactoryServer> servers = getDescriptor().getArtifactoryServers();
for (ArtifactoryServer server : servers) {
if (server.getName().equals(getArtifactoryName())) {
return server;
}
}
return null;
}
@Override
public Collection<? extends Action> getProjectActions(AbstractProject project) {
return ActionableHelper.getArtifactoryProjectAction(details.getArtifactoryUrl(), project);
}
@Override
public Environment setUp(final AbstractBuild build, Launcher launcher, BuildListener listener)
throws IOException, InterruptedException {
final String artifactoryServerName = getArtifactoryName();
if (StringUtils.isBlank(artifactoryServerName)) {
return super.setUp(build, launcher, listener);
}
final ArtifactoryServer artifactoryServer = getArtifactoryServer();
if (artifactoryServer == null) {
listener.getLogger().format("[JFROG] No Artifactory server configured for %s. " +
"Please check your configuration.", artifactoryServerName).println();
build.setResult(Result.FAILURE);
throw new IllegalArgumentException("No Artifactory server configured for " + artifactoryServerName);
}
Credentials preferredDeployer;
ArtifactoryServer server = getArtifactoryServer();
if (isOverridingDefaultDeployer()) {
preferredDeployer = getOverridingDeployerCredentials();
} else {
preferredDeployer = server.getResolvingCredentials();
}
hudson.ProxyConfiguration proxy = Hudson.getInstance().proxy;
ProxyConfiguration proxyConfiguration = null;
if (proxy != null && proxy.getName() != null) {
proxyConfiguration = new ProxyConfiguration();
proxyConfiguration.host = proxy.name;
proxyConfiguration.port = proxy.port;
proxyConfiguration.username = proxy.getUserName();
proxyConfiguration.password = proxy.getPassword();
}
ArtifactoryDependenciesClient dependenciesClient = server.createArtifactoryDependenciesClient(
preferredDeployer.getUsername(), preferredDeployer.getPassword(), proxyConfiguration,
listener);
try {
GenericArtifactsResolver artifactsResolver = new GenericArtifactsResolver(build, listener,
dependenciesClient, getResolvePattern());
publishedDependencies = artifactsResolver.retrievePublishedDependencies();
buildDependencies = artifactsResolver.retrieveBuildDependencies();
return createEnvironmentOnSuccessfulSetup();
} catch (Exception e) {
e.printStackTrace(listener.error(e.getMessage()));
} finally {
dependenciesClient.shutdown();
}
return null;
}
private Environment createEnvironmentOnSuccessfulSetup() {
return new Environment() {
@Override
public boolean tearDown(AbstractBuild build, BuildListener listener)
throws IOException, InterruptedException {
Result result = build.getResult();
if (result != null && result.isWorseThan(Result.SUCCESS)) {
return true; // build failed. Don't publish
}
Credentials preferredDeployer;
ArtifactoryServer server = getArtifactoryServer();
if (isOverridingDefaultDeployer()) {
preferredDeployer = getOverridingDeployerCredentials();
} else {
preferredDeployer = server.getResolvingCredentials();
}
ArtifactoryBuildInfoClient client = server.createArtifactoryClient(preferredDeployer.getUsername(),
preferredDeployer.getPassword(), server.createProxyConfiguration(Hudson.getInstance().proxy));
try {
GenericArtifactsDeployer artifactsDeployer = new GenericArtifactsDeployer(build,
ArtifactoryGenericConfigurator.this, listener, preferredDeployer);
artifactsDeployer.deploy();
List<Artifact> deployedArtifacts = artifactsDeployer.getDeployedArtifacts();
if (deployBuildInfo) {
new GenericBuildInfoDeployer(ArtifactoryGenericConfigurator.this, client, build,
listener, deployedArtifacts, buildDependencies, publishedDependencies).deploy();
// add the result action (prefer always the same index)
build.getActions().add(0, new BuildInfoResultAction(getArtifactoryUrl(), build));
}
return true;
} catch (Exception e) {
e.printStackTrace(listener.error(e.getMessage()));
} finally {
client.shutdown();
}
// failed
build.setResult(Result.FAILURE);
return true;
}
};
}
@Override
public DescriptorImpl getDescriptor() {
return (DescriptorImpl) super.getDescriptor();
}
@Extension(optional = true)
public static class DescriptorImpl extends BuildWrapperDescriptor {
public DescriptorImpl() {
super(ArtifactoryGenericConfigurator.class);
load();
}
@Override
public boolean isApplicable(AbstractProject<?, ?> item) {
return item.getClass().isAssignableFrom(FreeStyleProject.class);
}
@Override
public String getDisplayName() {
return "Generic-Artifactory Integration";
}
@Override
public boolean configure(StaplerRequest req, JSONObject json) throws FormException {
req.bindParameters(this, "generic");
save();
return true;
}
/**
* Returns the list of {@link org.jfrog.hudson.ArtifactoryServer} configured.
*
* @return can be empty but never null.
*/
public List<ArtifactoryServer> getArtifactoryServers() {
ArtifactoryBuilder.DescriptorImpl descriptor = (ArtifactoryBuilder.DescriptorImpl)
Hudson.getInstance().getDescriptor(ArtifactoryBuilder.class);
return descriptor.getArtifactoryServers();
}
}
}
|
|
/*-
* -\-\-
* styx-client
* --
* Copyright (C) 2016 - 2017 Spotify AB
* --
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* -/-/-
*/
package com.spotify.styx.client;
import static com.spotify.styx.client.GrpcContextKey.AUTHORIZATION_KEY;
import static com.spotify.styx.client.FutureOkHttpClient.forUri;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.google.auth.oauth2.GoogleCredentials;
import com.spotify.styx.api.BackfillPayload;
import com.spotify.styx.api.BackfillsPayload;
import com.spotify.styx.api.ResourcesPayload;
import com.spotify.styx.api.RunStateDataPayload;
import com.spotify.styx.api.TestServiceAccountUsageAuthorizationRequest;
import com.spotify.styx.api.TestServiceAccountUsageAuthorizationRequestBuilder;
import com.spotify.styx.api.TestServiceAccountUsageAuthorizationResponse;
import com.spotify.styx.model.Backfill;
import com.spotify.styx.model.BackfillInput;
import com.spotify.styx.model.EditableBackfillInput;
import com.spotify.styx.model.Event;
import com.spotify.styx.model.Resource;
import com.spotify.styx.model.TriggerParameters;
import com.spotify.styx.model.TriggerRequest;
import com.spotify.styx.model.Workflow;
import com.spotify.styx.model.WorkflowConfiguration;
import com.spotify.styx.model.WorkflowId;
import com.spotify.styx.model.WorkflowInstance;
import com.spotify.styx.model.WorkflowState;
import com.spotify.styx.model.WorkflowWithState;
import com.spotify.styx.model.data.EventInfo;
import com.spotify.styx.model.data.WorkflowInstanceExecutionData;
import com.spotify.styx.serialization.Json;
import com.spotify.styx.util.EventUtil;
import java.io.IOException;
import java.net.URI;
import java.security.GeneralSecurityException;
import java.time.Duration;
import java.time.Instant;
import java.util.Arrays;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
import java.util.UUID;
import java.util.concurrent.CompletionStage;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import java.util.stream.StreamSupport;
import okhttp3.HttpUrl;
import okhttp3.OkHttpClient;
import okhttp3.Request;
import okhttp3.Response;
import okhttp3.ResponseBody;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Styx OkHttp Client Implementation. In case of API errors, the {@link Throwable} in the returned
* {@link CompletionStage} will be of kind {@link ApiErrorException}. Other errors will be treated
* as {@link RuntimeException} instead.
*/
class StyxOkHttpClient implements StyxClient {
private static final Logger LOG = LoggerFactory.getLogger(StyxOkHttpClient.class);
private static final Duration DEFAULT_CONNECT_TIMEOUT = Duration.ofSeconds(10);
private static final Duration DEFAULT_READ_TIMEOUT = Duration.ofSeconds(90);
private static final Duration DEFAULT_WRITE_TIMEOUT = Duration.ofSeconds(90);
static final String STYX_API_VERSION = "v3";
private static final String STYX_CLIENT_VERSION =
"Styx Client " + StyxOkHttpClient.class.getPackage().getImplementationVersion();
private final URI apiHost;
private final FutureOkHttpClient client;
private final GoogleIdTokenAuth auth;
private StyxOkHttpClient(String apiHost, FutureOkHttpClient client, GoogleIdTokenAuth auth) {
if (apiHost.contains("://")) {
this.apiHost = URI.create(apiHost);
} else {
this.apiHost = URI.create("https://" + apiHost);
}
this.client = Objects.requireNonNull(client, "client");
this.auth = Objects.requireNonNull(auth, "auth");
}
static OkHttpClient defaultOkHttpClient() {
return new OkHttpClient.Builder()
.connectTimeout(DEFAULT_CONNECT_TIMEOUT.getSeconds(), TimeUnit.SECONDS)
.readTimeout(DEFAULT_READ_TIMEOUT.getSeconds(), TimeUnit.SECONDS)
.writeTimeout(DEFAULT_WRITE_TIMEOUT.getSeconds(), TimeUnit.SECONDS)
.build();
}
public static StyxClient create(String apiHost) {
return create(apiHost, FutureOkHttpClient.create(defaultOkHttpClient()), GoogleIdTokenAuth.ofDefaultCredential());
}
public static StyxClient create(String apiHost, GoogleCredentials credentials) {
return create(apiHost, FutureOkHttpClient.create(defaultOkHttpClient()), GoogleIdTokenAuth.of(credentials));
}
public static StyxClient create(String apiHost, OkHttpClient client) {
return create(apiHost, FutureOkHttpClient.create(client), GoogleIdTokenAuth.ofDefaultCredential());
}
public static StyxClient create(String apiHost, OkHttpClient client, GoogleCredentials credentials) {
return create(apiHost, FutureOkHttpClient.create(client), GoogleIdTokenAuth.of(credentials));
}
static StyxClient create(String apiHost, FutureOkHttpClient client, GoogleIdTokenAuth auth) {
return new StyxOkHttpClient(apiHost, client, auth);
}
@Override
public CompletionStage<RunStateDataPayload> activeStates(Optional<String> componentId) {
var url = urlBuilder("status", "activeStates");
componentId.ifPresent(id -> url.addQueryParameter("component", id));
return execute(forUri(url), RunStateDataPayload.class);
}
@Override
public CompletionStage<List<EventInfo>> eventsForWorkflowInstance(String componentId,
String workflowId,
String parameter) {
return execute(forUri(urlBuilder("status", "events", componentId, workflowId, parameter)))
.thenApply(response -> {
final JsonNode jsonNode;
try (final ResponseBody responseBody = response.body()) {
assert responseBody != null;
jsonNode = Json.OBJECT_MAPPER.readTree(responseBody.bytes());
} catch (IOException e) {
throw new RuntimeException("Invalid json returned from API", e);
}
if (!jsonNode.isObject()) {
throw new RuntimeException("Unexpected json returned from API");
}
final ArrayNode events = ((ObjectNode) jsonNode).withArray("events");
return StreamSupport.stream(events.spliterator(), false)
.map(eventWithTimestamp -> {
final long ts = eventWithTimestamp.get("timestamp").asLong();
final JsonNode event = eventWithTimestamp.get("event");
try {
final Event typedEvent = Json.OBJECT_MAPPER.convertValue(event, Event.class);
return EventInfo.create(ts, EventUtil.name(typedEvent), EventUtil.info(typedEvent));
} catch (IllegalArgumentException e) {
// fall back to just inspecting the json
return EventInfo.create(ts, event.get("@type").asText(), "");
}
})
.collect(Collectors.toList());
});
}
@Override
public CompletionStage<TestServiceAccountUsageAuthorizationResponse> testServiceAccountUsageAuthorization(
String serviceAccountEmail, String principalEmail) {
final TestServiceAccountUsageAuthorizationRequest request = new TestServiceAccountUsageAuthorizationRequestBuilder()
.serviceAccount(serviceAccountEmail)
.principal(principalEmail).build();
return execute(forUri(urlBuilder("status", "testServiceAccountUsageAuthorization"), "POST", request),
TestServiceAccountUsageAuthorizationResponse.class);
}
@Override
public CompletionStage<Workflow> workflow(String componentId, String workflowId) {
return execute(forUri(urlBuilder("workflows", componentId, workflowId)), Workflow.class);
}
@Override
public CompletionStage<List<Workflow>> workflows(String componentId) {
return execute(forUri(urlBuilder("workflows", componentId)), Workflow[].class)
.thenApply(Arrays::asList);
}
@Override
public CompletionStage<List<Workflow>> workflows() {
return execute(forUri(urlBuilder("workflows")), Workflow[].class)
.thenApply(Arrays::asList);
}
@Override
public CompletionStage<Workflow> createOrUpdateWorkflow(String componentId, WorkflowConfiguration workflowConfig) {
return execute(forUri(urlBuilder("workflows", componentId), "POST", workflowConfig),
Workflow.class);
}
@Override
public CompletionStage<Void> deleteWorkflow(String componentId, String workflowId) {
return execute(forUri(urlBuilder("workflows", componentId, workflowId), "DELETE"))
.thenApply(response -> null);
}
@Override
public CompletionStage<WorkflowState> workflowState(String componentId, String workflowId) {
return execute(forUri(urlBuilder("workflows", componentId, workflowId, "state")),
WorkflowState.class);
}
@Override
public CompletionStage<WorkflowWithState> workflowWithState(String componentId, String workflowId) {
return execute(forUri(urlBuilder("workflows", componentId, workflowId, "full")), WorkflowWithState.class);
}
@Override
public CompletionStage<WorkflowInstanceExecutionData> workflowInstanceExecutions(String componentId,
String workflowId,
String parameter) {
return execute(forUri(urlBuilder("workflows", componentId, workflowId, "instances", parameter)),
WorkflowInstanceExecutionData.class);
}
@Override
public CompletionStage<WorkflowState> updateWorkflowState(String componentId, String workflowId,
WorkflowState workflowState) {
return execute(forUri(urlBuilder("workflows", componentId, workflowId, "state"), "PATCH", workflowState),
WorkflowState.class);
}
@Override
public CompletionStage<Void> triggerWorkflowInstance(String componentId, String workflowId,
String parameter) {
return triggerWorkflowInstance(componentId, workflowId, parameter, TriggerParameters.zero());
}
@Override
public CompletionStage<Void> triggerWorkflowInstance(String componentId,
String workflowId,
String parameter,
TriggerParameters triggerParameters) {
return triggerWorkflowInstance(componentId, workflowId, parameter, triggerParameters, false);
}
@Override
public CompletionStage<Void> triggerWorkflowInstance(String componentId,
String workflowId,
String parameter,
TriggerParameters triggerParameters,
boolean allowFuture) {
final TriggerRequest triggerRequest =
TriggerRequest.of(WorkflowId.create(componentId, workflowId), parameter, triggerParameters);
return execute(
forUri(urlBuilder("scheduler", "trigger")
.addQueryParameter("allowFuture", String.valueOf(allowFuture)), "POST", triggerRequest))
.thenApply(response -> null);
}
@Override
public CompletionStage<Void> haltWorkflowInstance(String componentId,
String workflowId,
String parameter) {
var url = urlBuilder("scheduler", "halt");
var workflowInstance = WorkflowInstance.create(
WorkflowId.create(componentId, workflowId),
parameter);
return execute(forUri(url, "POST", workflowInstance))
.thenApply(response -> null);
}
@Override
public CompletionStage<Void> retryWorkflowInstance(String componentId,
String workflowId,
String parameter) {
var url = urlBuilder("scheduler", "retry");
var workflowInstance = WorkflowInstance.create(
WorkflowId.create(componentId, workflowId),
parameter);
return execute(forUri(url, "POST", workflowInstance))
.thenApply(response -> null);
}
@Override
public CompletionStage<Resource> resourceCreate(String resourceId, int concurrency) {
final Resource resource = Resource.create(resourceId, concurrency);
return execute(forUri(urlBuilder("resources"), "POST", resource),
Resource.class);
}
@Override
public CompletionStage<Resource> resourceEdit(String resourceId, int concurrency) {
final Resource resource = Resource.create(resourceId, concurrency);
return execute(forUri(urlBuilder("resources", resourceId), "PUT", resource),
Resource.class);
}
@Override
public CompletionStage<Resource> resource(String resourceId) {
var url = urlBuilder("resources", resourceId);
return execute(forUri(url), Resource.class);
}
@Override
public CompletionStage<ResourcesPayload> resourceList() {
var url = urlBuilder("resources");
return execute(forUri(url), ResourcesPayload.class);
}
@Override
public CompletionStage<Backfill> backfillCreate(String componentId, String workflowId,
String start, String end,
int concurrency) {
return backfillCreate(componentId, workflowId, start, end, concurrency, null);
}
@Override
public CompletionStage<Backfill> backfillCreate(String componentId, String workflowId,
String start, String end,
int concurrency,
String description) {
final BackfillInput backfill = BackfillInput.newBuilder()
.start(Instant.parse(start))
.end(Instant.parse(end))
.component(componentId)
.workflow(workflowId)
.concurrency(concurrency)
.description(Optional.ofNullable(description))
.build();
return backfillCreate(backfill);
}
@Override
public CompletionStage<Backfill> backfillCreate(BackfillInput backfill) {
return backfillCreate(backfill, false);
}
@Override
public CompletionStage<Backfill> backfillCreate(BackfillInput backfill, boolean allowFuture) {
return execute(forUri(
urlBuilder("backfills")
.addQueryParameter("allowFuture", String.valueOf(allowFuture)),
"POST", backfill), Backfill.class);
}
@Override
public CompletionStage<Backfill> backfillEditConcurrency(String backfillId, int concurrency) {
final EditableBackfillInput editableBackfillInput = EditableBackfillInput.newBuilder()
.id(backfillId)
.concurrency(concurrency)
.build();
var url = urlBuilder("backfills", backfillId);
return execute(forUri(url, "PUT", editableBackfillInput), Backfill.class);
}
@Override
public CompletionStage<Void> backfillHalt(String backfillId) {
return backfillHalt(backfillId, false);
}
@Override
public CompletionStage<Void> backfillHalt(String backfillId, boolean graceful) {
var url = urlBuilder("backfills", backfillId);
url.addQueryParameter("graceful", Boolean.toString(graceful));
return execute(forUri(url, "DELETE")).thenApply(response -> null);
}
@Override
public CompletionStage<BackfillPayload> backfill(String backfillId, boolean includeStatus) {
var url = urlBuilder("backfills", backfillId);
url.addQueryParameter("status", Boolean.toString(includeStatus));
return execute(forUri(url), BackfillPayload.class);
}
@Override
public CompletionStage<BackfillsPayload> backfillList(Optional<String> componentId,
Optional<String> workflowId,
boolean showAll,
boolean includeStatus) {
var url = urlBuilder("backfills");
componentId.ifPresent(c -> url.addQueryParameter("component", c));
workflowId.ifPresent(w -> url.addQueryParameter("workflow", w));
url.addQueryParameter("showAll", Boolean.toString(showAll));
url.addQueryParameter("status", Boolean.toString(includeStatus));
return execute(forUri(url), BackfillsPayload.class);
}
private <T> CompletionStage<T> execute(Request request, Class<T> tClass) {
return execute(request).thenApply(response -> {
try (final ResponseBody responseBody = response.body()) {
assert responseBody != null;
return Json.OBJECT_MAPPER.readValue(responseBody.bytes(), tClass);
} catch (IOException e) {
throw new RuntimeException("Error while reading the received payload: " + e.getMessage(), e);
}
});
}
private CompletionStage<Response> execute(Request request) {
var authToken = Optional
.ofNullable(AUTHORIZATION_KEY.get())
.or(() -> {
try {
return auth.getToken(apiHost.toString());
} catch (IOException | GeneralSecurityException e) {
// Credential probably invalid, configured wrongly or the token request failed.
throw new ClientErrorException("Authentication failure: " + e.getMessage(), e);
}
});
final String requestId = UUID.randomUUID().toString().replace("-", ""); // UUID with no dashes, easier to deal with
return client.send(decorateRequest(request, requestId, authToken)).handle((response, e) -> {
if (e != null) {
throw new ClientErrorException("Request failed: " + request.method() + " " + request.url(), e);
} else {
final String effectiveRequestId;
final String responseRequestId = response.headers().get("X-Request-Id");
if (responseRequestId != null && !responseRequestId.equals(requestId)) {
// If some proxy etc dropped our request ID header, we might get another one back.
effectiveRequestId = responseRequestId;
LOG.warn("Request ID mismatch: '{}' != '{}'", requestId, responseRequestId);
} else {
effectiveRequestId = requestId;
}
if (!response.isSuccessful()) {
throw new ApiErrorException(response.code() + " " + response.message(), response.code(),
authToken.isPresent(), effectiveRequestId);
}
return response;
}
});
}
private Request decorateRequest(Request request, String requestId, Optional<String> authToken) {
var builder = request
.newBuilder()
.addHeader("User-Agent", STYX_CLIENT_VERSION)
.addHeader("X-Request-Id", requestId);
authToken.ifPresent(t -> builder.addHeader("Authorization", "Bearer " + t));
return builder.build();
}
private HttpUrl.Builder urlBuilder(String... pathSegments) {
var builder = new HttpUrl.Builder()
.scheme(apiHost.getScheme())
.host(apiHost.getHost())
.addPathSegment("api")
.addPathSegment(STYX_API_VERSION);
Arrays.stream(pathSegments).forEach(builder::addPathSegment);
if (apiHost.getPort() != -1) {
builder.port(apiHost.getPort());
}
return builder;
}
@Override
public void close() {
client.close();
}
}
|
|
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.apigateway.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* <p>
* Represents a method response of a given HTTP status code returned to the client. The method response is passed from
* the back end through the associated integration response that can be transformed using a mapping template.
* </p>
* <div class="remarks">
* <p/>
* <h4>Example: A <b>MethodResponse</b> instance of an API</h4>
* <h5>Request</h5>
* <p>
* The example request retrieves a <b>MethodResponse</b> of the 200 status code.
* </p>
*
* <pre>
* <code>GET /restapis/fugvjdxtri/resources/3kzxbg5sa2/methods/GET/responses/200 HTTP/1.1 Content-Type: application/json Host: apigateway.us-east-1.amazonaws.com X-Amz-Date: 20160603T222952Z Authorization: AWS4-HMAC-SHA256 Credential={access_key_ID}/20160603/us-east-1/apigateway/aws4_request, SignedHeaders=content-type;host;x-amz-date, Signature={sig4_hash}</code>
* </pre>
*
* <h5>Response</h5>
* <p>
* The successful response returns <code>200 OK</code> status and a payload as follows:
* </p>
*
* <pre>
* <code>{ "_links": { "curies": { "href": "https://docs.aws.amazon.com/apigateway/latest/developerguide/restapi-method-response-{rel}.html", "name": "methodresponse", "templated": true }, "self": { "href": "/restapis/fugvjdxtri/resources/3kzxbg5sa2/methods/GET/responses/200", "title": "200" }, "methodresponse:delete": { "href": "/restapis/fugvjdxtri/resources/3kzxbg5sa2/methods/GET/responses/200" }, "methodresponse:update": { "href": "/restapis/fugvjdxtri/resources/3kzxbg5sa2/methods/GET/responses/200" } }, "responseModels": { "application/json": "Empty" }, "responseParameters": { "method.response.header.Content-Type": false }, "statusCode": "200" }</code>
* </pre>
* <p/>
* </div> <div class="seeAlso"> <a>Method</a>, <a>IntegrationResponse</a>, <a>Integration</a> <a
* href="https://docs.aws.amazon.com/apigateway/latest/developerguide/how-to-create-api.html">Creating an API</a> </div>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class MethodResponse implements Serializable, Cloneable, StructuredPojo {
/**
* <p>
* The method response's status code.
* </p>
*/
private String statusCode;
/**
* <p>
* A key-value map specifying required or optional response parameters that API Gateway can send back to the caller.
* A key defines a method response header and the value specifies whether the associated method response header is
* required or not. The expression of the key must match the pattern <code>method.response.header.{name}</code>,
* where <code>name</code> is a valid and unique header name. API Gateway passes certain integration response data
* to the method response headers specified here according to the mapping you prescribe in the API's
* <a>IntegrationResponse</a>. The integration response data that can be mapped include an integration response
* header expressed in <code>integration.response.header.{name}</code>, a static value enclosed within a pair of
* single quotes (e.g., <code>'application/json'</code>), or a JSON expression from the back-end response payload in
* the form of <code>integration.response.body.{JSON-expression}</code>, where <code>JSON-expression</code> is a
* valid JSON expression without the <code>$</code> prefix.)
* </p>
*/
private java.util.Map<String, Boolean> responseParameters;
/**
* <p>
* Specifies the <a>Model</a> resources used for the response's content-type. Response models are represented as a
* key/value map, with a content-type as the key and a <a>Model</a> name as the value.
* </p>
*/
private java.util.Map<String, String> responseModels;
/**
* <p>
* The method response's status code.
* </p>
*
* @param statusCode
* The method response's status code.
*/
public void setStatusCode(String statusCode) {
this.statusCode = statusCode;
}
/**
* <p>
* The method response's status code.
* </p>
*
* @return The method response's status code.
*/
public String getStatusCode() {
return this.statusCode;
}
/**
* <p>
* The method response's status code.
* </p>
*
* @param statusCode
* The method response's status code.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public MethodResponse withStatusCode(String statusCode) {
setStatusCode(statusCode);
return this;
}
/**
* <p>
* A key-value map specifying required or optional response parameters that API Gateway can send back to the caller.
* A key defines a method response header and the value specifies whether the associated method response header is
* required or not. The expression of the key must match the pattern <code>method.response.header.{name}</code>,
* where <code>name</code> is a valid and unique header name. API Gateway passes certain integration response data
* to the method response headers specified here according to the mapping you prescribe in the API's
* <a>IntegrationResponse</a>. The integration response data that can be mapped include an integration response
* header expressed in <code>integration.response.header.{name}</code>, a static value enclosed within a pair of
* single quotes (e.g., <code>'application/json'</code>), or a JSON expression from the back-end response payload in
* the form of <code>integration.response.body.{JSON-expression}</code>, where <code>JSON-expression</code> is a
* valid JSON expression without the <code>$</code> prefix.)
* </p>
*
* @return A key-value map specifying required or optional response parameters that API Gateway can send back to the
* caller. A key defines a method response header and the value specifies whether the associated method
* response header is required or not. The expression of the key must match the pattern
* <code>method.response.header.{name}</code>, where <code>name</code> is a valid and unique header name.
* API Gateway passes certain integration response data to the method response headers specified here
* according to the mapping you prescribe in the API's <a>IntegrationResponse</a>. The integration response
* data that can be mapped include an integration response header expressed in
* <code>integration.response.header.{name}</code>, a static value enclosed within a pair of single quotes
* (e.g., <code>'application/json'</code>), or a JSON expression from the back-end response payload in the
* form of <code>integration.response.body.{JSON-expression}</code>, where <code>JSON-expression</code> is a
* valid JSON expression without the <code>$</code> prefix.)
*/
public java.util.Map<String, Boolean> getResponseParameters() {
return responseParameters;
}
/**
* <p>
* A key-value map specifying required or optional response parameters that API Gateway can send back to the caller.
* A key defines a method response header and the value specifies whether the associated method response header is
* required or not. The expression of the key must match the pattern <code>method.response.header.{name}</code>,
* where <code>name</code> is a valid and unique header name. API Gateway passes certain integration response data
* to the method response headers specified here according to the mapping you prescribe in the API's
* <a>IntegrationResponse</a>. The integration response data that can be mapped include an integration response
* header expressed in <code>integration.response.header.{name}</code>, a static value enclosed within a pair of
* single quotes (e.g., <code>'application/json'</code>), or a JSON expression from the back-end response payload in
* the form of <code>integration.response.body.{JSON-expression}</code>, where <code>JSON-expression</code> is a
* valid JSON expression without the <code>$</code> prefix.)
* </p>
*
* @param responseParameters
* A key-value map specifying required or optional response parameters that API Gateway can send back to the
* caller. A key defines a method response header and the value specifies whether the associated method
* response header is required or not. The expression of the key must match the pattern
* <code>method.response.header.{name}</code>, where <code>name</code> is a valid and unique header name. API
* Gateway passes certain integration response data to the method response headers specified here according
* to the mapping you prescribe in the API's <a>IntegrationResponse</a>. The integration response data that
* can be mapped include an integration response header expressed in
* <code>integration.response.header.{name}</code>, a static value enclosed within a pair of single quotes
* (e.g., <code>'application/json'</code>), or a JSON expression from the back-end response payload in the
* form of <code>integration.response.body.{JSON-expression}</code>, where <code>JSON-expression</code> is a
* valid JSON expression without the <code>$</code> prefix.)
*/
public void setResponseParameters(java.util.Map<String, Boolean> responseParameters) {
this.responseParameters = responseParameters;
}
/**
* <p>
* A key-value map specifying required or optional response parameters that API Gateway can send back to the caller.
* A key defines a method response header and the value specifies whether the associated method response header is
* required or not. The expression of the key must match the pattern <code>method.response.header.{name}</code>,
* where <code>name</code> is a valid and unique header name. API Gateway passes certain integration response data
* to the method response headers specified here according to the mapping you prescribe in the API's
* <a>IntegrationResponse</a>. The integration response data that can be mapped include an integration response
* header expressed in <code>integration.response.header.{name}</code>, a static value enclosed within a pair of
* single quotes (e.g., <code>'application/json'</code>), or a JSON expression from the back-end response payload in
* the form of <code>integration.response.body.{JSON-expression}</code>, where <code>JSON-expression</code> is a
* valid JSON expression without the <code>$</code> prefix.)
* </p>
*
* @param responseParameters
* A key-value map specifying required or optional response parameters that API Gateway can send back to the
* caller. A key defines a method response header and the value specifies whether the associated method
* response header is required or not. The expression of the key must match the pattern
* <code>method.response.header.{name}</code>, where <code>name</code> is a valid and unique header name. API
* Gateway passes certain integration response data to the method response headers specified here according
* to the mapping you prescribe in the API's <a>IntegrationResponse</a>. The integration response data that
* can be mapped include an integration response header expressed in
* <code>integration.response.header.{name}</code>, a static value enclosed within a pair of single quotes
* (e.g., <code>'application/json'</code>), or a JSON expression from the back-end response payload in the
* form of <code>integration.response.body.{JSON-expression}</code>, where <code>JSON-expression</code> is a
* valid JSON expression without the <code>$</code> prefix.)
* @return Returns a reference to this object so that method calls can be chained together.
*/
public MethodResponse withResponseParameters(java.util.Map<String, Boolean> responseParameters) {
setResponseParameters(responseParameters);
return this;
}
/**
* Add a single ResponseParameters entry
*
* @see MethodResponse#withResponseParameters
* @returns a reference to this object so that method calls can be chained together.
*/
public MethodResponse addResponseParametersEntry(String key, Boolean value) {
if (null == this.responseParameters) {
this.responseParameters = new java.util.HashMap<String, Boolean>();
}
if (this.responseParameters.containsKey(key))
throw new IllegalArgumentException("Duplicated keys (" + key.toString() + ") are provided.");
this.responseParameters.put(key, value);
return this;
}
/**
* Removes all the entries added into ResponseParameters.
*
* @return Returns a reference to this object so that method calls can be chained together.
*/
public MethodResponse clearResponseParametersEntries() {
this.responseParameters = null;
return this;
}
/**
* <p>
* Specifies the <a>Model</a> resources used for the response's content-type. Response models are represented as a
* key/value map, with a content-type as the key and a <a>Model</a> name as the value.
* </p>
*
* @return Specifies the <a>Model</a> resources used for the response's content-type. Response models are
* represented as a key/value map, with a content-type as the key and a <a>Model</a> name as the value.
*/
public java.util.Map<String, String> getResponseModels() {
return responseModels;
}
/**
* <p>
* Specifies the <a>Model</a> resources used for the response's content-type. Response models are represented as a
* key/value map, with a content-type as the key and a <a>Model</a> name as the value.
* </p>
*
* @param responseModels
* Specifies the <a>Model</a> resources used for the response's content-type. Response models are represented
* as a key/value map, with a content-type as the key and a <a>Model</a> name as the value.
*/
public void setResponseModels(java.util.Map<String, String> responseModels) {
this.responseModels = responseModels;
}
/**
* <p>
* Specifies the <a>Model</a> resources used for the response's content-type. Response models are represented as a
* key/value map, with a content-type as the key and a <a>Model</a> name as the value.
* </p>
*
* @param responseModels
* Specifies the <a>Model</a> resources used for the response's content-type. Response models are represented
* as a key/value map, with a content-type as the key and a <a>Model</a> name as the value.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public MethodResponse withResponseModels(java.util.Map<String, String> responseModels) {
setResponseModels(responseModels);
return this;
}
/**
* Add a single ResponseModels entry
*
* @see MethodResponse#withResponseModels
* @returns a reference to this object so that method calls can be chained together.
*/
public MethodResponse addResponseModelsEntry(String key, String value) {
if (null == this.responseModels) {
this.responseModels = new java.util.HashMap<String, String>();
}
if (this.responseModels.containsKey(key))
throw new IllegalArgumentException("Duplicated keys (" + key.toString() + ") are provided.");
this.responseModels.put(key, value);
return this;
}
/**
* Removes all the entries added into ResponseModels.
*
* @return Returns a reference to this object so that method calls can be chained together.
*/
public MethodResponse clearResponseModelsEntries() {
this.responseModels = null;
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getStatusCode() != null)
sb.append("StatusCode: ").append(getStatusCode()).append(",");
if (getResponseParameters() != null)
sb.append("ResponseParameters: ").append(getResponseParameters()).append(",");
if (getResponseModels() != null)
sb.append("ResponseModels: ").append(getResponseModels());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof MethodResponse == false)
return false;
MethodResponse other = (MethodResponse) obj;
if (other.getStatusCode() == null ^ this.getStatusCode() == null)
return false;
if (other.getStatusCode() != null && other.getStatusCode().equals(this.getStatusCode()) == false)
return false;
if (other.getResponseParameters() == null ^ this.getResponseParameters() == null)
return false;
if (other.getResponseParameters() != null && other.getResponseParameters().equals(this.getResponseParameters()) == false)
return false;
if (other.getResponseModels() == null ^ this.getResponseModels() == null)
return false;
if (other.getResponseModels() != null && other.getResponseModels().equals(this.getResponseModels()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getStatusCode() == null) ? 0 : getStatusCode().hashCode());
hashCode = prime * hashCode + ((getResponseParameters() == null) ? 0 : getResponseParameters().hashCode());
hashCode = prime * hashCode + ((getResponseModels() == null) ? 0 : getResponseModels().hashCode());
return hashCode;
}
@Override
public MethodResponse clone() {
try {
return (MethodResponse) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.apigateway.model.transform.MethodResponseMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
|
|
/*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.route53.model;
import java.io.Serializable;
import com.amazonaws.AmazonWebServiceRequest;
/**
* <p>
* The input for a ListResourceRecordSets request.
* </p>
*/
public class ListResourceRecordSetsRequest extends AmazonWebServiceRequest
implements Serializable, Cloneable {
/**
* <p>
* The ID of the hosted zone that contains the resource record sets that you
* want to get.
* </p>
*/
private String hostedZoneId;
/**
* <p>
* The first name in the lexicographic ordering of domain names that you
* want the <code>ListResourceRecordSets</code> request to list.
* </p>
*/
private String startRecordName;
/**
* <p>
* The DNS type at which to begin the listing of resource record sets.
* </p>
* <p>
* Valid values: <code>A</code> | <code>AAAA</code> | <code>CNAME</code> |
* <code>MX</code> | <code>NS</code> | <code>PTR</code> | <code>SOA</code> |
* <code>SPF</code> | <code>SRV</code> | <code>TXT</code>
* </p>
* <p>
* Values for Weighted Resource Record Sets: <code>A</code> |
* <code>AAAA</code> | <code>CNAME</code> | <code>TXT</code>
* </p>
* <p>
* Values for Regional Resource Record Sets: <code>A</code> |
* <code>AAAA</code> | <code>CNAME</code> | <code>TXT</code>
* </p>
* <p>
* Values for Alias Resource Record Sets: <code>A</code> | <code>AAAA</code>
* </p>
* <p>
* Constraint: Specifying <code>type</code> without specifying
* <code>name</code> returns an <a>InvalidInput</a> error.
* </p>
*/
private String startRecordType;
/**
* <p>
* <i>Weighted resource record sets only:</i> If results were truncated for
* a given DNS name and type, specify the value of
* <code>ListResourceRecordSetsResponse$NextRecordIdentifier</code> from the
* previous response to get the next resource record set that has the
* current DNS name and type.
* </p>
*/
private String startRecordIdentifier;
/**
* <p>
* The maximum number of records you want in the response body.
* </p>
*/
private String maxItems;
/**
* Default constructor for ListResourceRecordSetsRequest object. Callers
* should use the setter or fluent setter (with...) methods to initialize
* the object after creating it.
*/
public ListResourceRecordSetsRequest() {
}
/**
* Constructs a new ListResourceRecordSetsRequest object. Callers should use
* the setter or fluent setter (with...) methods to initialize any
* additional object members.
*
* @param hostedZoneId
* The ID of the hosted zone that contains the resource record sets
* that you want to get.
*/
public ListResourceRecordSetsRequest(String hostedZoneId) {
setHostedZoneId(hostedZoneId);
}
/**
* <p>
* The ID of the hosted zone that contains the resource record sets that you
* want to get.
* </p>
*
* @param hostedZoneId
* The ID of the hosted zone that contains the resource record sets
* that you want to get.
*/
public void setHostedZoneId(String hostedZoneId) {
this.hostedZoneId = hostedZoneId;
}
/**
* <p>
* The ID of the hosted zone that contains the resource record sets that you
* want to get.
* </p>
*
* @return The ID of the hosted zone that contains the resource record sets
* that you want to get.
*/
public String getHostedZoneId() {
return this.hostedZoneId;
}
/**
* <p>
* The ID of the hosted zone that contains the resource record sets that you
* want to get.
* </p>
*
* @param hostedZoneId
* The ID of the hosted zone that contains the resource record sets
* that you want to get.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public ListResourceRecordSetsRequest withHostedZoneId(String hostedZoneId) {
setHostedZoneId(hostedZoneId);
return this;
}
/**
* <p>
* The first name in the lexicographic ordering of domain names that you
* want the <code>ListResourceRecordSets</code> request to list.
* </p>
*
* @param startRecordName
* The first name in the lexicographic ordering of domain names that
* you want the <code>ListResourceRecordSets</code> request to list.
*/
public void setStartRecordName(String startRecordName) {
this.startRecordName = startRecordName;
}
/**
* <p>
* The first name in the lexicographic ordering of domain names that you
* want the <code>ListResourceRecordSets</code> request to list.
* </p>
*
* @return The first name in the lexicographic ordering of domain names that
* you want the <code>ListResourceRecordSets</code> request to list.
*/
public String getStartRecordName() {
return this.startRecordName;
}
/**
* <p>
* The first name in the lexicographic ordering of domain names that you
* want the <code>ListResourceRecordSets</code> request to list.
* </p>
*
* @param startRecordName
* The first name in the lexicographic ordering of domain names that
* you want the <code>ListResourceRecordSets</code> request to list.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public ListResourceRecordSetsRequest withStartRecordName(
String startRecordName) {
setStartRecordName(startRecordName);
return this;
}
/**
* <p>
* The DNS type at which to begin the listing of resource record sets.
* </p>
* <p>
* Valid values: <code>A</code> | <code>AAAA</code> | <code>CNAME</code> |
* <code>MX</code> | <code>NS</code> | <code>PTR</code> | <code>SOA</code> |
* <code>SPF</code> | <code>SRV</code> | <code>TXT</code>
* </p>
* <p>
* Values for Weighted Resource Record Sets: <code>A</code> |
* <code>AAAA</code> | <code>CNAME</code> | <code>TXT</code>
* </p>
* <p>
* Values for Regional Resource Record Sets: <code>A</code> |
* <code>AAAA</code> | <code>CNAME</code> | <code>TXT</code>
* </p>
* <p>
* Values for Alias Resource Record Sets: <code>A</code> | <code>AAAA</code>
* </p>
* <p>
* Constraint: Specifying <code>type</code> without specifying
* <code>name</code> returns an <a>InvalidInput</a> error.
* </p>
*
* @param startRecordType
* The DNS type at which to begin the listing of resource record
* sets. </p>
* <p>
* Valid values: <code>A</code> | <code>AAAA</code> |
* <code>CNAME</code> | <code>MX</code> | <code>NS</code> |
* <code>PTR</code> | <code>SOA</code> | <code>SPF</code> |
* <code>SRV</code> | <code>TXT</code>
* </p>
* <p>
* Values for Weighted Resource Record Sets: <code>A</code> |
* <code>AAAA</code> | <code>CNAME</code> | <code>TXT</code>
* </p>
* <p>
* Values for Regional Resource Record Sets: <code>A</code> |
* <code>AAAA</code> | <code>CNAME</code> | <code>TXT</code>
* </p>
* <p>
* Values for Alias Resource Record Sets: <code>A</code> |
* <code>AAAA</code>
* </p>
* <p>
* Constraint: Specifying <code>type</code> without specifying
* <code>name</code> returns an <a>InvalidInput</a> error.
* @see RRType
*/
public void setStartRecordType(String startRecordType) {
this.startRecordType = startRecordType;
}
/**
* <p>
* The DNS type at which to begin the listing of resource record sets.
* </p>
* <p>
* Valid values: <code>A</code> | <code>AAAA</code> | <code>CNAME</code> |
* <code>MX</code> | <code>NS</code> | <code>PTR</code> | <code>SOA</code> |
* <code>SPF</code> | <code>SRV</code> | <code>TXT</code>
* </p>
* <p>
* Values for Weighted Resource Record Sets: <code>A</code> |
* <code>AAAA</code> | <code>CNAME</code> | <code>TXT</code>
* </p>
* <p>
* Values for Regional Resource Record Sets: <code>A</code> |
* <code>AAAA</code> | <code>CNAME</code> | <code>TXT</code>
* </p>
* <p>
* Values for Alias Resource Record Sets: <code>A</code> | <code>AAAA</code>
* </p>
* <p>
* Constraint: Specifying <code>type</code> without specifying
* <code>name</code> returns an <a>InvalidInput</a> error.
* </p>
*
* @return The DNS type at which to begin the listing of resource record
* sets. </p>
* <p>
* Valid values: <code>A</code> | <code>AAAA</code> |
* <code>CNAME</code> | <code>MX</code> | <code>NS</code> |
* <code>PTR</code> | <code>SOA</code> | <code>SPF</code> |
* <code>SRV</code> | <code>TXT</code>
* </p>
* <p>
* Values for Weighted Resource Record Sets: <code>A</code> |
* <code>AAAA</code> | <code>CNAME</code> | <code>TXT</code>
* </p>
* <p>
* Values for Regional Resource Record Sets: <code>A</code> |
* <code>AAAA</code> | <code>CNAME</code> | <code>TXT</code>
* </p>
* <p>
* Values for Alias Resource Record Sets: <code>A</code> |
* <code>AAAA</code>
* </p>
* <p>
* Constraint: Specifying <code>type</code> without specifying
* <code>name</code> returns an <a>InvalidInput</a> error.
* @see RRType
*/
public String getStartRecordType() {
return this.startRecordType;
}
/**
* <p>
* The DNS type at which to begin the listing of resource record sets.
* </p>
* <p>
* Valid values: <code>A</code> | <code>AAAA</code> | <code>CNAME</code> |
* <code>MX</code> | <code>NS</code> | <code>PTR</code> | <code>SOA</code> |
* <code>SPF</code> | <code>SRV</code> | <code>TXT</code>
* </p>
* <p>
* Values for Weighted Resource Record Sets: <code>A</code> |
* <code>AAAA</code> | <code>CNAME</code> | <code>TXT</code>
* </p>
* <p>
* Values for Regional Resource Record Sets: <code>A</code> |
* <code>AAAA</code> | <code>CNAME</code> | <code>TXT</code>
* </p>
* <p>
* Values for Alias Resource Record Sets: <code>A</code> | <code>AAAA</code>
* </p>
* <p>
* Constraint: Specifying <code>type</code> without specifying
* <code>name</code> returns an <a>InvalidInput</a> error.
* </p>
*
* @param startRecordType
* The DNS type at which to begin the listing of resource record
* sets. </p>
* <p>
* Valid values: <code>A</code> | <code>AAAA</code> |
* <code>CNAME</code> | <code>MX</code> | <code>NS</code> |
* <code>PTR</code> | <code>SOA</code> | <code>SPF</code> |
* <code>SRV</code> | <code>TXT</code>
* </p>
* <p>
* Values for Weighted Resource Record Sets: <code>A</code> |
* <code>AAAA</code> | <code>CNAME</code> | <code>TXT</code>
* </p>
* <p>
* Values for Regional Resource Record Sets: <code>A</code> |
* <code>AAAA</code> | <code>CNAME</code> | <code>TXT</code>
* </p>
* <p>
* Values for Alias Resource Record Sets: <code>A</code> |
* <code>AAAA</code>
* </p>
* <p>
* Constraint: Specifying <code>type</code> without specifying
* <code>name</code> returns an <a>InvalidInput</a> error.
* @return Returns a reference to this object so that method calls can be
* chained together.
* @see RRType
*/
public ListResourceRecordSetsRequest withStartRecordType(
String startRecordType) {
setStartRecordType(startRecordType);
return this;
}
/**
* <p>
* The DNS type at which to begin the listing of resource record sets.
* </p>
* <p>
* Valid values: <code>A</code> | <code>AAAA</code> | <code>CNAME</code> |
* <code>MX</code> | <code>NS</code> | <code>PTR</code> | <code>SOA</code> |
* <code>SPF</code> | <code>SRV</code> | <code>TXT</code>
* </p>
* <p>
* Values for Weighted Resource Record Sets: <code>A</code> |
* <code>AAAA</code> | <code>CNAME</code> | <code>TXT</code>
* </p>
* <p>
* Values for Regional Resource Record Sets: <code>A</code> |
* <code>AAAA</code> | <code>CNAME</code> | <code>TXT</code>
* </p>
* <p>
* Values for Alias Resource Record Sets: <code>A</code> | <code>AAAA</code>
* </p>
* <p>
* Constraint: Specifying <code>type</code> without specifying
* <code>name</code> returns an <a>InvalidInput</a> error.
* </p>
*
* @param startRecordType
* The DNS type at which to begin the listing of resource record
* sets. </p>
* <p>
* Valid values: <code>A</code> | <code>AAAA</code> |
* <code>CNAME</code> | <code>MX</code> | <code>NS</code> |
* <code>PTR</code> | <code>SOA</code> | <code>SPF</code> |
* <code>SRV</code> | <code>TXT</code>
* </p>
* <p>
* Values for Weighted Resource Record Sets: <code>A</code> |
* <code>AAAA</code> | <code>CNAME</code> | <code>TXT</code>
* </p>
* <p>
* Values for Regional Resource Record Sets: <code>A</code> |
* <code>AAAA</code> | <code>CNAME</code> | <code>TXT</code>
* </p>
* <p>
* Values for Alias Resource Record Sets: <code>A</code> |
* <code>AAAA</code>
* </p>
* <p>
* Constraint: Specifying <code>type</code> without specifying
* <code>name</code> returns an <a>InvalidInput</a> error.
* @return Returns a reference to this object so that method calls can be
* chained together.
* @see RRType
*/
public void setStartRecordType(RRType startRecordType) {
this.startRecordType = startRecordType.toString();
}
/**
* <p>
* The DNS type at which to begin the listing of resource record sets.
* </p>
* <p>
* Valid values: <code>A</code> | <code>AAAA</code> | <code>CNAME</code> |
* <code>MX</code> | <code>NS</code> | <code>PTR</code> | <code>SOA</code> |
* <code>SPF</code> | <code>SRV</code> | <code>TXT</code>
* </p>
* <p>
* Values for Weighted Resource Record Sets: <code>A</code> |
* <code>AAAA</code> | <code>CNAME</code> | <code>TXT</code>
* </p>
* <p>
* Values for Regional Resource Record Sets: <code>A</code> |
* <code>AAAA</code> | <code>CNAME</code> | <code>TXT</code>
* </p>
* <p>
* Values for Alias Resource Record Sets: <code>A</code> | <code>AAAA</code>
* </p>
* <p>
* Constraint: Specifying <code>type</code> without specifying
* <code>name</code> returns an <a>InvalidInput</a> error.
* </p>
*
* @param startRecordType
* The DNS type at which to begin the listing of resource record
* sets. </p>
* <p>
* Valid values: <code>A</code> | <code>AAAA</code> |
* <code>CNAME</code> | <code>MX</code> | <code>NS</code> |
* <code>PTR</code> | <code>SOA</code> | <code>SPF</code> |
* <code>SRV</code> | <code>TXT</code>
* </p>
* <p>
* Values for Weighted Resource Record Sets: <code>A</code> |
* <code>AAAA</code> | <code>CNAME</code> | <code>TXT</code>
* </p>
* <p>
* Values for Regional Resource Record Sets: <code>A</code> |
* <code>AAAA</code> | <code>CNAME</code> | <code>TXT</code>
* </p>
* <p>
* Values for Alias Resource Record Sets: <code>A</code> |
* <code>AAAA</code>
* </p>
* <p>
* Constraint: Specifying <code>type</code> without specifying
* <code>name</code> returns an <a>InvalidInput</a> error.
* @return Returns a reference to this object so that method calls can be
* chained together.
* @see RRType
*/
public ListResourceRecordSetsRequest withStartRecordType(
RRType startRecordType) {
setStartRecordType(startRecordType);
return this;
}
/**
* <p>
* <i>Weighted resource record sets only:</i> If results were truncated for
* a given DNS name and type, specify the value of
* <code>ListResourceRecordSetsResponse$NextRecordIdentifier</code> from the
* previous response to get the next resource record set that has the
* current DNS name and type.
* </p>
*
* @param startRecordIdentifier
* Weighted resource record sets only:</i> If results were truncated
* for a given DNS name and type, specify the value of
* <code>ListResourceRecordSetsResponse$NextRecordIdentifier
*/
public void setStartRecordIdentifier(String startRecordIdentifier) {
this.startRecordIdentifier = startRecordIdentifier;
}
/**
* <p>
* <i>Weighted resource record sets only:</i> If results were truncated for
* a given DNS name and type, specify the value of
* <code>ListResourceRecordSetsResponse$NextRecordIdentifier</code> from the
* previous response to get the next resource record set that has the
* current DNS name and type.
* </p>
*
* @return Weighted resource record sets only:</i> If results were truncated
* for a given DNS name and type, specify the value of
* <code>ListResourceRecordSetsResponse$NextRecordIdentifier
*/
public String getStartRecordIdentifier() {
return this.startRecordIdentifier;
}
/**
* <p>
* <i>Weighted resource record sets only:</i> If results were truncated for
* a given DNS name and type, specify the value of
* <code>ListResourceRecordSetsResponse$NextRecordIdentifier</code> from the
* previous response to get the next resource record set that has the
* current DNS name and type.
* </p>
*
* @param startRecordIdentifier
* Weighted resource record sets only:</i> If results were truncated
* for a given DNS name and type, specify the value of
* <code>ListResourceRecordSetsResponse$NextRecordIdentifier
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public ListResourceRecordSetsRequest withStartRecordIdentifier(
String startRecordIdentifier) {
setStartRecordIdentifier(startRecordIdentifier);
return this;
}
/**
* <p>
* The maximum number of records you want in the response body.
* </p>
*
* @param maxItems
* The maximum number of records you want in the response body.
*/
public void setMaxItems(String maxItems) {
this.maxItems = maxItems;
}
/**
* <p>
* The maximum number of records you want in the response body.
* </p>
*
* @return The maximum number of records you want in the response body.
*/
public String getMaxItems() {
return this.maxItems;
}
/**
* <p>
* The maximum number of records you want in the response body.
* </p>
*
* @param maxItems
* The maximum number of records you want in the response body.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public ListResourceRecordSetsRequest withMaxItems(String maxItems) {
setMaxItems(maxItems);
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getHostedZoneId() != null)
sb.append("HostedZoneId: " + getHostedZoneId() + ",");
if (getStartRecordName() != null)
sb.append("StartRecordName: " + getStartRecordName() + ",");
if (getStartRecordType() != null)
sb.append("StartRecordType: " + getStartRecordType() + ",");
if (getStartRecordIdentifier() != null)
sb.append("StartRecordIdentifier: " + getStartRecordIdentifier()
+ ",");
if (getMaxItems() != null)
sb.append("MaxItems: " + getMaxItems());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof ListResourceRecordSetsRequest == false)
return false;
ListResourceRecordSetsRequest other = (ListResourceRecordSetsRequest) obj;
if (other.getHostedZoneId() == null ^ this.getHostedZoneId() == null)
return false;
if (other.getHostedZoneId() != null
&& other.getHostedZoneId().equals(this.getHostedZoneId()) == false)
return false;
if (other.getStartRecordName() == null
^ this.getStartRecordName() == null)
return false;
if (other.getStartRecordName() != null
&& other.getStartRecordName().equals(this.getStartRecordName()) == false)
return false;
if (other.getStartRecordType() == null
^ this.getStartRecordType() == null)
return false;
if (other.getStartRecordType() != null
&& other.getStartRecordType().equals(this.getStartRecordType()) == false)
return false;
if (other.getStartRecordIdentifier() == null
^ this.getStartRecordIdentifier() == null)
return false;
if (other.getStartRecordIdentifier() != null
&& other.getStartRecordIdentifier().equals(
this.getStartRecordIdentifier()) == false)
return false;
if (other.getMaxItems() == null ^ this.getMaxItems() == null)
return false;
if (other.getMaxItems() != null
&& other.getMaxItems().equals(this.getMaxItems()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime
* hashCode
+ ((getHostedZoneId() == null) ? 0 : getHostedZoneId()
.hashCode());
hashCode = prime
* hashCode
+ ((getStartRecordName() == null) ? 0 : getStartRecordName()
.hashCode());
hashCode = prime
* hashCode
+ ((getStartRecordType() == null) ? 0 : getStartRecordType()
.hashCode());
hashCode = prime
* hashCode
+ ((getStartRecordIdentifier() == null) ? 0
: getStartRecordIdentifier().hashCode());
hashCode = prime * hashCode
+ ((getMaxItems() == null) ? 0 : getMaxItems().hashCode());
return hashCode;
}
@Override
public ListResourceRecordSetsRequest clone() {
return (ListResourceRecordSetsRequest) super.clone();
}
}
|
|
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package git4idea.merge;
import com.intellij.dvcs.DvcsUtil;
import com.intellij.execution.process.ProcessOutputTypes;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Key;
import com.intellij.openapi.util.Trinity;
import com.intellij.openapi.vcs.FilePath;
import com.intellij.openapi.vcs.VcsException;
import com.intellij.openapi.vcs.history.VcsRevisionNumber;
import com.intellij.openapi.vcs.merge.MergeData;
import com.intellij.openapi.vcs.merge.MergeProvider;
import com.intellij.openapi.vcs.merge.MergeProvider2;
import com.intellij.openapi.vcs.merge.MergeSession;
import com.intellij.openapi.vfs.CharsetToolkit;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.util.ArrayUtil;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.ui.ColumnInfo;
import com.intellij.vcsUtil.VcsFileUtil;
import com.intellij.vcsUtil.VcsRunnable;
import com.intellij.vcsUtil.VcsUtil;
import git4idea.GitFileRevision;
import git4idea.GitRevisionNumber;
import git4idea.GitUtil;
import git4idea.commands.GitCommand;
import git4idea.commands.GitLineHandler;
import git4idea.commands.GitLineHandlerAdapter;
import git4idea.commands.GitSimpleHandler;
import git4idea.history.GitHistoryUtils;
import git4idea.i18n.GitBundle;
import git4idea.repo.GitRepository;
import git4idea.util.GitFileUtils;
import git4idea.util.StringScanner;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.File;
import java.io.IOException;
import java.util.*;
import static git4idea.GitUtil.CHERRY_PICK_HEAD;
import static git4idea.GitUtil.MERGE_HEAD;
/**
* Merge-changes provider for Git, used by IDEA internal 3-way merge tool
*/
public class GitMergeProvider implements MergeProvider2 {
private static final int ORIGINAL_REVISION_NUM = 1; // common parent
private static final int YOURS_REVISION_NUM = 2; // file content on the local branch: "Yours"
private static final int THEIRS_REVISION_NUM = 3; // remote file content: "Theirs"
private static final Logger LOG = Logger.getInstance(GitMergeProvider.class);
@NotNull private final Project myProject;
/**
* If true the merge provider has a reverse meaning, i. e. yours and theirs are swapped.
* It should be used when conflict is resolved after rebase or unstash.
*/
@NotNull private final Set<VirtualFile> myReverseRoots;
private enum ReverseRequest {
REVERSE,
FORWARD,
DETECT
}
private GitMergeProvider(@NotNull Project project, @NotNull Set<VirtualFile> reverseRoots) {
myProject = project;
myReverseRoots = reverseRoots;
}
public GitMergeProvider(@NotNull Project project, boolean reverse) {
this(project, findReverseRoots(project, reverse ? ReverseRequest.REVERSE : ReverseRequest.FORWARD));
}
@NotNull
public static MergeProvider detect(@NotNull Project project) {
return new GitMergeProvider(project, findReverseRoots(project, ReverseRequest.DETECT));
}
@NotNull
private static Set<VirtualFile> findReverseRoots(@NotNull Project project, @NotNull ReverseRequest reverseOrDetect) {
Set<VirtualFile> reverseMap = ContainerUtil.newHashSet();
for (GitRepository repository : GitUtil.getRepositoryManager(project).getRepositories()) {
boolean reverse;
if (reverseOrDetect == ReverseRequest.DETECT) {
reverse = repository.getState().equals(GitRepository.State.REBASING);
}
else {
reverse = reverseOrDetect == ReverseRequest.REVERSE;
}
if (reverse) {
reverseMap.add(repository.getRoot());
}
}
return reverseMap;
}
@Override
@NotNull
public MergeData loadRevisions(@NotNull final VirtualFile file) throws VcsException {
final MergeData mergeData = new MergeData();
final VirtualFile root = GitUtil.getGitRoot(file);
final FilePath path = VcsUtil.getFilePath(file.getPath());
VcsRunnable runnable = new VcsRunnable() {
@Override
@SuppressWarnings({"ConstantConditions"})
public void run() throws VcsException {
GitFileRevision original = new GitFileRevision(myProject, path, new GitRevisionNumber(":" + ORIGINAL_REVISION_NUM));
GitFileRevision current = new GitFileRevision(myProject, path, new GitRevisionNumber(":" + yoursRevision(root)));
GitFileRevision last = new GitFileRevision(myProject, path, new GitRevisionNumber(":" + theirsRevision(root)));
try {
try {
mergeData.ORIGINAL = original.getContent();
}
catch (Exception ex) {
/// unable to load original revision, use the current instead
/// This could happen in case if rebasing.
try {
mergeData.ORIGINAL = file.contentsToByteArray();
}
catch (IOException e) {
LOG.error(e);
mergeData.ORIGINAL = ArrayUtil.EMPTY_BYTE_ARRAY;
}
}
mergeData.CURRENT = loadRevisionCatchingErrors(current);
mergeData.LAST = loadRevisionCatchingErrors(last);
// TODO: can be done once for a root
mergeData.CURRENT_REVISION_NUMBER = findCurrentRevisionNumber(root);
mergeData.LAST_REVISION_NUMBER = findLastRevisionNumber(root);
mergeData.ORIGINAL_REVISION_NUMBER = findOriginalRevisionNumber(root, mergeData.CURRENT_REVISION_NUMBER, mergeData.LAST_REVISION_NUMBER);
Trinity<String, String, String> blobs = getAffectedBlobs(root, file);
mergeData.CURRENT_FILE_PATH = getBlobPathInRevision(root, file, blobs.getFirst(), mergeData.CURRENT_REVISION_NUMBER);
mergeData.ORIGINAL_FILE_PATH = getBlobPathInRevision(root, file, blobs.getSecond(), mergeData.ORIGINAL_REVISION_NUMBER);
mergeData.LAST_FILE_PATH = getBlobPathInRevision(root, file, blobs.getThird(), mergeData.LAST_REVISION_NUMBER);
}
catch (IOException e) {
throw new IllegalStateException("Failed to load file content", e);
}
}
};
VcsUtil.runVcsProcessWithProgress(runnable, GitBundle.message("merge.load.files"), false, myProject);
return mergeData;
}
@NotNull
private Trinity<String, String, String> getAffectedBlobs(@NotNull VirtualFile root, @NotNull VirtualFile file) {
try {
GitSimpleHandler h = new GitSimpleHandler(myProject, root, GitCommand.LS_FILES);
h.addParameters("--exclude-standard", "--unmerged", "-z");
h.endOptions();
h.addRelativeFiles(Collections.singleton(file));
String output = h.run();
StringScanner s = new StringScanner(output);
String lastBlob = null;
String currentBlob = null;
String originalBlob = null;
while (s.hasMoreData()) {
s.spaceToken(); // permissions
String blob = s.spaceToken();
int source = Integer.parseInt(s.tabToken()); // stage
s.boundedToken('\u0000'); // file name
if (source == theirsRevision(root)) {
lastBlob = blob;
}
else if (source == yoursRevision(root)) {
currentBlob = blob;
}
else if (source == ORIGINAL_REVISION_NUM) {
originalBlob = blob;
}
else {
throw new IllegalStateException("Unknown revision " + source + " for the file: " + file);
}
}
return Trinity.create(currentBlob, originalBlob, lastBlob);
}
catch (VcsException e) {
LOG.warn(e);
return Trinity.create(null, null, null);
}
}
@Nullable
private FilePath getBlobPathInRevision(@NotNull VirtualFile root,
@NotNull VirtualFile file,
@Nullable String blob,
@Nullable VcsRevisionNumber revision) {
if (blob == null || revision == null) return null;
// fast check if file was not renamed
FilePath path = doGetBlobPathInRevision(root, blob, revision, file);
if (path != null) return path;
return doGetBlobPathInRevision(root, blob, revision, null);
}
@Nullable
private FilePath doGetBlobPathInRevision(@NotNull final VirtualFile root,
@NotNull final String blob,
@NotNull VcsRevisionNumber revision,
@Nullable VirtualFile file) {
final FilePath[] result = new FilePath[1];
final boolean[] pathAmbiguous = new boolean[1];
GitLineHandler h = new GitLineHandler(myProject, root, GitCommand.LS_TREE);
h.addParameters(revision.asString());
if (file != null) {
h.endOptions();
h.addRelativeFiles(Collections.singleton(file));
}
else {
h.addParameters("-r");
h.endOptions();
}
h.addLineListener(new GitLineHandlerAdapter() {
@Override
public void onLineAvailable(String line, Key outputType) {
if (outputType != ProcessOutputTypes.STDOUT) return;
if (!line.contains(blob)) return;
if (pathAmbiguous[0]) return;
try {
StringScanner s = new StringScanner(line);
s.spaceToken(); // permissions
String type = s.spaceToken(); // type
String recordBlob = s.tabToken(); // blob
FilePath file = VcsUtil.getFilePath(root, GitUtil.unescapePath(s.line()));
if (!"blob".equals(type)) return;
if (!blob.equals(recordBlob)) return;
if (result[0] == null) {
result[0] = file;
}
else {
// there are multiple files with given content in this revision.
// we don't know which is right, so do not return any
pathAmbiguous[0] = true;
}
}
catch (VcsException e) {
LOG.warn(e);
}
}
});
h.runInCurrentThread(null);
if (pathAmbiguous[0]) return null;
return result[0];
}
@Nullable
private GitRevisionNumber findLastRevisionNumber(@NotNull VirtualFile root) {
return myReverseRoots.contains(root) ? resolveHead(root) : resolveMergeHead(root);
}
@Nullable
private GitRevisionNumber findCurrentRevisionNumber(@NotNull VirtualFile root) {
return myReverseRoots.contains(root) ? resolveMergeHead(root) : resolveHead(root);
}
@Nullable
private GitRevisionNumber findOriginalRevisionNumber(@NotNull VirtualFile root,
@Nullable VcsRevisionNumber currentRevision,
@Nullable VcsRevisionNumber lastRevision) {
if (currentRevision == null || lastRevision == null) return null;
try {
return GitHistoryUtils.getMergeBase(myProject, root, currentRevision.asString(), lastRevision.asString());
}
catch (VcsException e) {
LOG.warn(e);
return null;
}
}
@Nullable
private GitRevisionNumber resolveMergeHead(@NotNull VirtualFile root) {
try {
return GitRevisionNumber.resolve(myProject, root, MERGE_HEAD);
}
catch (VcsException e) {
LOG.info("Couldn't resolve the MERGE_HEAD in " + root + ": " + e.getMessage()); // this may be not a bug, just cherry-pick
}
try {
return GitRevisionNumber.resolve(myProject, root, CHERRY_PICK_HEAD);
}
catch (VcsException e) {
LOG.info("Couldn't resolve the CHERRY_PICK_HEAD in " + root + ": " + e.getMessage());
}
GitRepository repository = GitUtil.getRepositoryManager(myProject).getRepositoryForRoot(root);
assert repository != null;
File rebaseApply = repository.getRepositoryFiles().getRebaseApplyDir();
GitRevisionNumber rebaseRevision = readRevisionFromFile(root, new File(rebaseApply, "original-commit"));
if (rebaseRevision != null) return rebaseRevision;
File rebaseMerge = repository.getRepositoryFiles().getRebaseMergeDir();
GitRevisionNumber mergeRevision = readRevisionFromFile(root, new File(rebaseMerge, "stopped-sha"));
if (mergeRevision != null) return mergeRevision;
return null;
}
@Nullable
private GitRevisionNumber readRevisionFromFile(@NotNull VirtualFile root, @NotNull File file) {
if (!file.exists()) return null;
String revision = DvcsUtil.tryLoadFileOrReturn(file, null, CharsetToolkit.UTF8);
if (revision == null) return null;
try {
return GitRevisionNumber.resolve(myProject, root, revision);
}
catch (VcsException e) {
LOG.info("Couldn't resolve revision '" + revision + "' in " + root + ": " + e.getMessage());
return null;
}
}
@Nullable
private GitRevisionNumber resolveHead(@NotNull VirtualFile root) {
try {
return GitRevisionNumber.resolve(myProject, root, "HEAD");
}
catch (VcsException e) {
LOG.error("Couldn't resolve the HEAD in " + root, e);
return null;
}
}
private static byte[] loadRevisionCatchingErrors(@NotNull GitFileRevision revision) throws VcsException, IOException {
try {
return revision.getContent();
} catch (VcsException e) {
String m = e.getMessage().trim();
if (m.startsWith("fatal: ambiguous argument ")
|| (m.startsWith("fatal: Path '") && m.contains("' exists on disk, but not in '"))
|| (m.contains("is in the index, but not at stage ")
|| (m.contains("bad revision")))) {
return ArrayUtil.EMPTY_BYTE_ARRAY;
}
else {
throw e;
}
}
}
/**
* @return number for "yours" revision (taking {@code reverse} flag in account)
* @param root
*/
private int yoursRevision(@NotNull VirtualFile root) {
return myReverseRoots.contains(root) ? THEIRS_REVISION_NUM : YOURS_REVISION_NUM;
}
/**
* @return number for "theirs" revision (taking {@code reverse} flag in account)
* @param root
*/
private int theirsRevision(@NotNull VirtualFile root) {
return myReverseRoots.contains(root) ? YOURS_REVISION_NUM : THEIRS_REVISION_NUM;
}
@Override
public void conflictResolvedForFile(@NotNull VirtualFile file) {
try {
GitFileUtils.addFiles(myProject, GitUtil.getGitRoot(file), file);
}
catch (VcsException e) {
LOG.error("Confirming conflict resolution failed", e);
}
}
@Override
public boolean isBinary(@NotNull VirtualFile file) {
return file.getFileType().isBinary();
}
@Override
@NotNull
public MergeSession createMergeSession(@NotNull List<VirtualFile> files) {
return new MyMergeSession(files);
}
/**
* The conflict descriptor
*/
private static class Conflict {
VirtualFile myFile;
VirtualFile myRoot;
Status myStatusTheirs;
Status myStatusYours;
enum Status {
MODIFIED, // modified on the branch
DELETED // deleted on the branch
}
}
/**
* The merge session, it queries conflict information.
*/
private class MyMergeSession implements MergeSession {
Map<VirtualFile, Conflict> myConflicts = new HashMap<>();
MyMergeSession(List<VirtualFile> filesToMerge) {
// get conflict type by the file
try {
for (Map.Entry<VirtualFile, List<VirtualFile>> e : GitUtil.sortFilesByGitRoot(filesToMerge).entrySet()) {
Map<String, Conflict> cs = new HashMap<>();
VirtualFile root = e.getKey();
List<VirtualFile> files = e.getValue();
GitSimpleHandler h = new GitSimpleHandler(myProject, root, GitCommand.LS_FILES);
h.setStdoutSuppressed(true);
h.setSilent(true);
h.addParameters("--exclude-standard", "--unmerged", "-t", "-z");
h.endOptions();
String output = h.run();
StringScanner s = new StringScanner(output);
while (s.hasMoreData()) {
if (!"M".equals(s.spaceToken())) {
s.boundedToken('\u0000');
continue;
}
s.spaceToken(); // permissions
s.spaceToken(); // commit hash
int source = Integer.parseInt(s.tabToken());
String file = s.boundedToken('\u0000');
Conflict c = cs.get(file);
if (c == null) {
c = new Conflict();
c.myRoot = root;
cs.put(file, c);
}
if (source == theirsRevision(root)) {
c.myStatusTheirs = Conflict.Status.MODIFIED;
}
else if (source == yoursRevision(root)) {
c.myStatusYours = Conflict.Status.MODIFIED;
}
else if (source != ORIGINAL_REVISION_NUM) {
throw new IllegalStateException("Unknown revision " + source + " for the file: " + file);
}
}
for (VirtualFile f : files) {
String path = VcsFileUtil.relativePath(root, f);
Conflict c = cs.get(path);
if (c == null) {
LOG.error(String.format("The conflict not found for file: %s(%s)%nFull ls-files output: %n%s%nAll files: %n%s",
f.getPath(), path, output, files));
continue;
}
c.myFile = f;
if (c.myStatusTheirs == null) {
c.myStatusTheirs = Conflict.Status.DELETED;
}
if (c.myStatusYours == null) {
c.myStatusYours = Conflict.Status.DELETED;
}
myConflicts.put(f, c);
}
}
}
catch (VcsException ex) {
throw new IllegalStateException("The git operation should not fail in this context", ex);
}
}
@NotNull
@Override
public ColumnInfo[] getMergeInfoColumns() {
return new ColumnInfo[]{new StatusColumn(false), new StatusColumn(true)};
}
@Override
public boolean canMerge(@NotNull VirtualFile file) {
Conflict c = myConflicts.get(file);
return c != null && !file.isDirectory();
}
@Override
public void conflictResolvedForFile(@NotNull VirtualFile file, @NotNull Resolution resolution) {
Conflict c = myConflicts.get(file);
if (c == null) {
LOG.error("Conflict was not loaded for the file: " + file.getPath());
return;
}
try {
Conflict.Status status;
switch (resolution) {
case AcceptedTheirs:
status = c.myStatusTheirs;
break;
case AcceptedYours:
status = c.myStatusYours;
break;
case Merged:
status = Conflict.Status.MODIFIED;
break;
default:
throw new IllegalArgumentException("Unsupported resolution for unmergable files(" + file.getPath() + "): " + resolution);
}
switch (status) {
case MODIFIED:
GitFileUtils.addFiles(myProject, c.myRoot, file);
break;
case DELETED:
GitFileUtils.deleteFiles(myProject, c.myRoot, file);
break;
default:
throw new IllegalArgumentException("Unsupported status(" + file.getPath() + "): " + status);
}
}
catch (VcsException e) {
LOG.error("Unexpected exception during the git operation (" + file.getPath() + ")", e);
}
}
/**
* The column shows either "yours" or "theirs" status
*/
class StatusColumn extends ColumnInfo<VirtualFile, String> {
/**
* if false, "yours" status is displayed, otherwise "theirs"
*/
private final boolean myIsTheirs;
public StatusColumn(boolean isTheirs) {
super(isTheirs ? GitBundle.message("merge.tool.column.theirs.status") : GitBundle.message("merge.tool.column.yours.status"));
myIsTheirs = isTheirs;
}
@Override
public String valueOf(VirtualFile file) {
Conflict c = myConflicts.get(file);
if (c == null) {
LOG.error("No conflict for the file " + file);
return "";
}
Conflict.Status s = myIsTheirs ? c.myStatusTheirs : c.myStatusYours;
switch (s) {
case MODIFIED:
return GitBundle.message("merge.tool.column.status.modified");
case DELETED:
return GitBundle.message("merge.tool.column.status.deleted");
default:
throw new IllegalStateException("Unknown status " + s + " for file " + file.getPath());
}
}
@Override
public String getMaxStringValue() {
return GitBundle.message("merge.tool.column.status.modified");
}
@Override
public int getAdditionalWidth() {
return 10;
}
}
}
}
|
|
package org.bouncycastle.pqc.crypto.rainbow;
import java.security.SecureRandom;
import org.bouncycastle.crypto.AsymmetricCipherKeyPair;
import org.bouncycastle.crypto.AsymmetricCipherKeyPairGenerator;
import org.bouncycastle.crypto.KeyGenerationParameters;
import org.bouncycastle.pqc.crypto.rainbow.util.ComputeInField;
import org.bouncycastle.pqc.crypto.rainbow.util.GF2Field;
/**
* This class implements AsymmetricCipherKeyPairGenerator. It is used
* as a generator for the private and public key of the Rainbow Signature
* Scheme.
* <p>
* Detailed information about the key generation is to be found in the paper of
* Jintai Ding, Dieter Schmidt: Rainbow, a New Multivariable Polynomial
* Signature Scheme. ACNS 2005: 164-175 (http://dx.doi.org/10.1007/11496137_12)
*/
public class RainbowKeyPairGenerator
implements AsymmetricCipherKeyPairGenerator
{
private boolean initialized = false;
private SecureRandom sr;
private RainbowKeyGenerationParameters rainbowParams;
/* linear affine map L1: */
private short[][] A1; // matrix of the lin. affine map L1(n-v1 x n-v1 matrix)
private short[][] A1inv; // inverted A1
private short[] b1; // translation element of the lin.affine map L1
/* linear affine map L2: */
private short[][] A2; // matrix of the lin. affine map (n x n matrix)
private short[][] A2inv; // inverted A2
private short[] b2; // translation elemt of the lin.affine map L2
/* components of F: */
private int numOfLayers; // u (number of sets S)
private Layer layers[]; // layers of polynomials of F
private int[] vi; // set of vinegar vars per layer.
/* components of Public Key */
private short[][] pub_quadratic; // quadratic(mixed) coefficients
private short[][] pub_singular; // singular coefficients
private short[] pub_scalar; // scalars
// TODO
/**
* The standard constructor tries to generate the Rainbow algorithm identifier
* with the corresponding OID.
* <p>
*/
public RainbowKeyPairGenerator()
{
}
/**
* This function generates a Rainbow key pair.
*
* @return the generated key pair
*/
public AsymmetricCipherKeyPair genKeyPair()
{
RainbowPrivateKeyParameters privKey;
RainbowPublicKeyParameters pubKey;
if (!initialized)
{
initializeDefault();
}
/* choose all coefficients at random */
keygen();
/* now marshall them to PrivateKey */
privKey = new RainbowPrivateKeyParameters(A1inv, b1, A2inv, b2, vi, layers);
/* marshall to PublicKey */
pubKey = new RainbowPublicKeyParameters(vi[vi.length - 1] - vi[0], pub_quadratic, pub_singular, pub_scalar);
return new AsymmetricCipherKeyPair(pubKey, privKey);
}
// TODO
public void initialize(
KeyGenerationParameters param)
{
this.rainbowParams = (RainbowKeyGenerationParameters)param;
// set source of randomness
this.sr = new SecureRandom();
// unmarshalling:
this.vi = this.rainbowParams.getParameters().getVi();
this.numOfLayers = this.rainbowParams.getParameters().getNumOfLayers();
this.initialized = true;
}
private void initializeDefault()
{
RainbowKeyGenerationParameters rbKGParams = new RainbowKeyGenerationParameters(new SecureRandom(), new RainbowParameters());
initialize(rbKGParams);
}
/**
* This function calls the functions for the random generation of the coefficients
* and the matrices needed for the private key and the method for computing the public key.
*/
private void keygen()
{
generateL1();
generateL2();
generateF();
computePublicKey();
}
/**
* This function generates the invertible affine linear map L1 = A1*x + b1
* <p>
* The translation part b1, is stored in a separate array. The inverse of
* the matrix-part of L1 A1inv is also computed here.
* <p>
* This linear map hides the output of the map F. It is on k^(n-v1).
*/
private void generateL1()
{
// dimension = n-v1 = vi[last] - vi[first]
int dim = vi[vi.length - 1] - vi[0];
this.A1 = new short[dim][dim];
this.A1inv = null;
ComputeInField c = new ComputeInField();
/* generation of A1 at random */
while (A1inv == null)
{
for (int i = 0; i < dim; i++)
{
for (int j = 0; j < dim; j++)
{
A1[i][j] = (short)(sr.nextInt() & GF2Field.MASK);
}
}
A1inv = c.inverse(A1);
}
/* generation of the translation vector at random */
b1 = new short[dim];
for (int i = 0; i < dim; i++)
{
b1[i] = (short)(sr.nextInt() & GF2Field.MASK);
}
}
/**
* This function generates the invertible affine linear map L2 = A2*x + b2
* <p>
* The translation part b2, is stored in a separate array. The inverse of
* the matrix-part of L2 A2inv is also computed here.
* <p>
* This linear map hides the output of the map F. It is on k^(n).
*/
private void generateL2()
{
// dimension = n = vi[last]
int dim = vi[vi.length - 1];
this.A2 = new short[dim][dim];
this.A2inv = null;
ComputeInField c = new ComputeInField();
/* generation of A2 at random */
while (this.A2inv == null)
{
for (int i = 0; i < dim; i++)
{
for (int j = 0; j < dim; j++)
{ // one col extra for b
A2[i][j] = (short)(sr.nextInt() & GF2Field.MASK);
}
}
this.A2inv = c.inverse(A2);
}
/* generation of the translation vector at random */
b2 = new short[dim];
for (int i = 0; i < dim; i++)
{
b2[i] = (short)(sr.nextInt() & GF2Field.MASK);
}
}
/**
* This function generates the private map F, which consists of u-1 layers.
* Each layer consists of oi polynomials where oi = vi[i+1]-vi[i].
* <p>
* The methods for the generation of the coefficients of these polynomials
* are called here.
*/
private void generateF()
{
this.layers = new Layer[this.numOfLayers];
for (int i = 0; i < this.numOfLayers; i++)
{
layers[i] = new Layer(this.vi[i], this.vi[i + 1], sr);
}
}
/**
* This function computes the public key from the private key.
* <p>
* The composition of F with L2 is computed, followed by applying L1 to the
* composition's result. The singular and scalar values constitute to the
* public key as is, the quadratic terms are compacted in
* <tt>compactPublicKey()</tt>
*/
private void computePublicKey()
{
ComputeInField c = new ComputeInField();
int rows = this.vi[this.vi.length - 1] - this.vi[0];
int vars = this.vi[this.vi.length - 1];
// Fpub
short[][][] coeff_quadratic_3dim = new short[rows][vars][vars];
this.pub_singular = new short[rows][vars];
this.pub_scalar = new short[rows];
// Coefficients of layers of Private Key F
short[][][] coeff_alpha;
short[][][] coeff_beta;
short[][] coeff_gamma;
short[] coeff_eta;
// Needed for counters;
int oils = 0;
int vins = 0;
int crnt_row = 0; // current row (polynomial)
short vect_tmp[] = new short[vars]; // vector tmp;
short sclr_tmp = 0;
// Composition of F and L2: Insert L2 = A2*x+b2 in F
for (int l = 0; l < this.layers.length; l++)
{
// get coefficients of current layer
coeff_alpha = this.layers[l].getCoeffAlpha();
coeff_beta = this.layers[l].getCoeffBeta();
coeff_gamma = this.layers[l].getCoeffGamma();
coeff_eta = this.layers[l].getCoeffEta();
oils = coeff_alpha[0].length;// this.layers[l].getOi();
vins = coeff_beta[0].length;// this.layers[l].getVi();
// compute polynomials of layer
for (int p = 0; p < oils; p++)
{
// multiply alphas
for (int x1 = 0; x1 < oils; x1++)
{
for (int x2 = 0; x2 < vins; x2++)
{
// multiply polynomial1 with polynomial2
vect_tmp = c.multVect(coeff_alpha[p][x1][x2],
this.A2[x1 + vins]);
coeff_quadratic_3dim[crnt_row + p] = c.addSquareMatrix(
coeff_quadratic_3dim[crnt_row + p], c
.multVects(vect_tmp, this.A2[x2]));
// mul poly1 with scalar2
vect_tmp = c.multVect(this.b2[x2], vect_tmp);
this.pub_singular[crnt_row + p] = c.addVect(vect_tmp,
this.pub_singular[crnt_row + p]);
// mul scalar1 with poly2
vect_tmp = c.multVect(coeff_alpha[p][x1][x2],
this.A2[x2]);
vect_tmp = c.multVect(b2[x1 + vins], vect_tmp);
this.pub_singular[crnt_row + p] = c.addVect(vect_tmp,
this.pub_singular[crnt_row + p]);
// mul scalar1 with scalar2
sclr_tmp = GF2Field.multElem(coeff_alpha[p][x1][x2],
this.b2[x1 + vins]);
this.pub_scalar[crnt_row + p] = GF2Field.addElem(
this.pub_scalar[crnt_row + p], GF2Field
.multElem(sclr_tmp, this.b2[x2]));
}
}
// multiply betas
for (int x1 = 0; x1 < vins; x1++)
{
for (int x2 = 0; x2 < vins; x2++)
{
// multiply polynomial1 with polynomial2
vect_tmp = c.multVect(coeff_beta[p][x1][x2],
this.A2[x1]);
coeff_quadratic_3dim[crnt_row + p] = c.addSquareMatrix(
coeff_quadratic_3dim[crnt_row + p], c
.multVects(vect_tmp, this.A2[x2]));
// mul poly1 with scalar2
vect_tmp = c.multVect(this.b2[x2], vect_tmp);
this.pub_singular[crnt_row + p] = c.addVect(vect_tmp,
this.pub_singular[crnt_row + p]);
// mul scalar1 with poly2
vect_tmp = c.multVect(coeff_beta[p][x1][x2],
this.A2[x2]);
vect_tmp = c.multVect(this.b2[x1], vect_tmp);
this.pub_singular[crnt_row + p] = c.addVect(vect_tmp,
this.pub_singular[crnt_row + p]);
// mul scalar1 with scalar2
sclr_tmp = GF2Field.multElem(coeff_beta[p][x1][x2],
this.b2[x1]);
this.pub_scalar[crnt_row + p] = GF2Field.addElem(
this.pub_scalar[crnt_row + p], GF2Field
.multElem(sclr_tmp, this.b2[x2]));
}
}
// multiply gammas
for (int n = 0; n < vins + oils; n++)
{
// mul poly with scalar
vect_tmp = c.multVect(coeff_gamma[p][n], this.A2[n]);
this.pub_singular[crnt_row + p] = c.addVect(vect_tmp,
this.pub_singular[crnt_row + p]);
// mul scalar with scalar
this.pub_scalar[crnt_row + p] = GF2Field.addElem(
this.pub_scalar[crnt_row + p], GF2Field.multElem(
coeff_gamma[p][n], this.b2[n]));
}
// add eta
this.pub_scalar[crnt_row + p] = GF2Field.addElem(
this.pub_scalar[crnt_row + p], coeff_eta[p]);
}
crnt_row = crnt_row + oils;
}
// Apply L1 = A1*x+b1 to composition of F and L2
{
// temporary coefficient arrays
short[][][] tmp_c_quad = new short[rows][vars][vars];
short[][] tmp_c_sing = new short[rows][vars];
short[] tmp_c_scal = new short[rows];
for (int r = 0; r < rows; r++)
{
for (int q = 0; q < A1.length; q++)
{
tmp_c_quad[r] = c.addSquareMatrix(tmp_c_quad[r], c
.multMatrix(A1[r][q], coeff_quadratic_3dim[q]));
tmp_c_sing[r] = c.addVect(tmp_c_sing[r], c.multVect(
A1[r][q], this.pub_singular[q]));
tmp_c_scal[r] = GF2Field.addElem(tmp_c_scal[r], GF2Field
.multElem(A1[r][q], this.pub_scalar[q]));
}
tmp_c_scal[r] = GF2Field.addElem(tmp_c_scal[r], b1[r]);
}
// set public key
coeff_quadratic_3dim = tmp_c_quad;
this.pub_singular = tmp_c_sing;
this.pub_scalar = tmp_c_scal;
}
compactPublicKey(coeff_quadratic_3dim);
}
/**
* The quadratic (or mixed) terms of the public key are compacted from a n x
* n matrix per polynomial to an upper diagonal matrix stored in one integer
* array of n (n + 1) / 2 elements per polynomial. The ordering of elements
* is lexicographic and the result is updating <tt>this.pub_quadratic</tt>,
* which stores the quadratic elements of the public key.
*
* @param coeff_quadratic_to_compact 3-dimensional array containing a n x n Matrix for each of the
* n - v1 polynomials
*/
private void compactPublicKey(short[][][] coeff_quadratic_to_compact)
{
int polynomials = coeff_quadratic_to_compact.length;
int n = coeff_quadratic_to_compact[0].length;
int entries = n * (n + 1) / 2;// the small gauss
this.pub_quadratic = new short[polynomials][entries];
int offset = 0;
for (int p = 0; p < polynomials; p++)
{
offset = 0;
for (int x = 0; x < n; x++)
{
for (int y = x; y < n; y++)
{
if (y == x)
{
this.pub_quadratic[p][offset] = coeff_quadratic_to_compact[p][x][y];
}
else
{
this.pub_quadratic[p][offset] = GF2Field.addElem(
coeff_quadratic_to_compact[p][x][y],
coeff_quadratic_to_compact[p][y][x]);
}
offset++;
}
}
}
}
public void init(KeyGenerationParameters param)
{
this.initialize(param);
}
public AsymmetricCipherKeyPair generateKeyPair()
{
return genKeyPair();
}
}
|
|
package org.insightech.er.editor.controller.editpart.element;
import java.beans.PropertyChangeEvent;
import java.util.ArrayList;
import java.util.List;
import org.eclipse.draw2d.FreeformLayer;
import org.eclipse.draw2d.FreeformLayout;
import org.eclipse.draw2d.IFigure;
import org.eclipse.gef.CompoundSnapToHelper;
import org.eclipse.gef.EditPolicy;
import org.eclipse.gef.SnapToGeometry;
import org.eclipse.gef.SnapToGrid;
import org.eclipse.gef.SnapToHelper;
import org.eclipse.gef.editpolicies.SnapFeedbackPolicy;
import org.eclipse.swt.graphics.Color;
import org.insightech.er.Resources;
import org.insightech.er.editor.controller.editpart.element.node.NodeElementEditPart;
import org.insightech.er.editor.controller.editpolicy.ERDiagramLayoutEditPolicy;
import org.insightech.er.editor.model.ERDiagram;
import org.insightech.er.editor.model.diagram_contents.element.connection.ConnectionElement;
import org.insightech.er.editor.model.diagram_contents.element.node.NodeElement;
import org.insightech.er.editor.model.settings.Settings;
public class ERDiagramEditPart extends AbstractModelEditPart {
/**
* {@inheritDoc}
*/
@Override
public void deactivate() {
try {
super.deactivate();
} catch (Throwable t) {
t.printStackTrace();
}
}
/**
* {@inheritDoc}
*/
@Override
protected IFigure createFigure() {
FreeformLayer layer = new FreeformLayer();
layer.setLayoutManager(new FreeformLayout());
return layer;
}
/**
* {@inheritDoc}
*/
@Override
protected void createEditPolicies() {
this.installEditPolicy(EditPolicy.LAYOUT_ROLE,
new ERDiagramLayoutEditPolicy());
this.installEditPolicy("Snap Feedback", new SnapFeedbackPolicy());
}
/**
* {@inheritDoc}
*/
@Override
protected List getModelChildren() {
List<Object> modelChildren = new ArrayList<Object>();
ERDiagram diagram = (ERDiagram) this.getModel();
// category must be first.
modelChildren.addAll(diagram.getDiagramContents().getSettings()
.getCategorySetting().getSelectedCategories());
modelChildren.addAll(diagram.getDiagramContents().getContents()
.getNodeElementList());
if (diagram.getChangeTrackingList().isCalculated()) {
modelChildren.addAll(diagram.getChangeTrackingList()
.getRemovedNodeElementSet());
}
modelChildren.add(diagram.getDiagramContents().getSettings()
.getModelProperties());
return modelChildren;
}
@Override
public void doPropertyChange(PropertyChangeEvent event) {
if (event.getPropertyName().equals("refreshChildren")) {
this.refreshChildren();
} else if (event.getPropertyName().equals("refreshConnection")) {
for (NodeElement nodeElement : this.getDiagram()
.getDiagramContents().getContents().getNodeElementList()) {
for (ConnectionElement connection : nodeElement.getIncomings()) {
connection.refreshVisuals();
}
}
} else if (event.getPropertyName().equals("refreshSettings")) {
this.refreshChildren();
this.refreshSettings();
} else if (event.getPropertyName().equals("refreshWithConnection")) {
this.refresh();
for (NodeElement nodeElement : this.getDiagram()
.getDiagramContents().getContents().getNodeElementList()) {
for (ConnectionElement connection : nodeElement.getIncomings()) {
connection.refreshVisuals();
}
}
this.getViewer().deselectAll();
/*
* List<NodeElement> nodeElementList = (List<NodeElement>) event
* .getNewValue();
*
* if (nodeElementList != null) { SelectionManager selectionManager
* = this.getViewer() .getSelectionManager();
*
* Map<NodeElement, EditPart> modelToEditPart =
* getModelToEditPart();
*
* for (NodeElement nodeElement : nodeElementList) {
* selectionManager.appendSelection(modelToEditPart
* .get(nodeElement)); } }
*/
}
/*
* } else if (event.getPropertyName()
* .equals(ERDiagram.PROPERTY_CHANGE_ALL)) {
*
* this.refresh(); this.refreshRelations();
*
* List<NodeElement> nodeElementList = (List<NodeElement>) event
* .getNewValue();
*
* if (nodeElementList != null) { this.getViewer().deselectAll();
* SelectionManager selectionManager = this.getViewer()
* .getSelectionManager();
*
* Map<NodeElement, EditPart> modelToEditPart = getModelToEditPart();
*
* for (NodeElement nodeElement : nodeElementList) {
* selectionManager.appendSelection(modelToEditPart .get(nodeElement));
* } }
*/
super.doPropertyChange(event);
}
@Override
final public void refresh() {
refreshChildren();
refreshVisuals();
refreshSourceConnections();
refreshTargetConnections();
}
/**
* {@inheritDoc}
*/
@Override
public void refreshVisuals() {
ERDiagram element = (ERDiagram) this.getModel();
int[] color = element.getColor();
if (color != null) {
Color bgColor = Resources.getColor(color);
this.getViewer().getControl().setBackground(bgColor);
}
for (Object child : this.getChildren()) {
if (child instanceof NodeElementEditPart) {
NodeElementEditPart part = (NodeElementEditPart) child;
part.refreshVisuals();
}
}
}
private void refreshSettings() {
ERDiagram diagram = (ERDiagram) this.getModel();
Settings settings = diagram.getDiagramContents().getSettings();
for (Object child : this.getChildren()) {
if (child instanceof NodeElementEditPart) {
NodeElementEditPart part = (NodeElementEditPart) child;
part.refreshSettings(settings);
}
}
}
// private Map<NodeElement, EditPart> getModelToEditPart() {
// Map<NodeElement, EditPart> modelToEditPart = new HashMap<NodeElement,
// EditPart>();
// List children = getChildren();
//
// for (int i = 0; i < children.size(); i++) {
// EditPart editPart = (EditPart) children.get(i);
// modelToEditPart.put((NodeElement) editPart.getModel(), editPart);
// }
//
// return modelToEditPart;
// }
@Override
public Object getAdapter(Class key) {
if (key == SnapToHelper.class) {
List<SnapToHelper> helpers = new ArrayList<SnapToHelper>();
helpers.add(new SnapToGeometry(this));
if (Boolean.TRUE.equals(getViewer().getProperty(
SnapToGeometry.PROPERTY_SNAP_ENABLED))) {
helpers.add(new SnapToGrid(this));
}
// if (Boolean.TRUE.equals(getViewer().getProperty(
// SnapToGrid.PROPERTY_GRID_ENABLED))) {
// helpers.add(new SnapToGrid(this));
// }
if (helpers.size() == 0) {
return null;
} else {
return new CompoundSnapToHelper(
helpers.toArray(new SnapToHelper[0]));
}
}
return super.getAdapter(key);
}
}
|
|
/*
* Copyright (c) 2017 Microchip Technology Inc. and its subsidiaries (Microchip). All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* You may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and limitations under the License.
*/
package com.microchip.mplab.nbide.embedded.arduino.wizard;
import com.microchip.crownking.opt.Version;
import com.microchip.mplab.mdbcore.MessageMediator.ActionList;
import com.microchip.mplab.mdbcore.MessageMediator.DialogBoxType;
import com.microchip.mplab.mdbcore.MessageMediator.Message;
import com.microchip.mplab.mdbcore.MessageMediator.MessageMediator;
import com.microchip.mplab.nbide.embedded.api.ui.TypeAheadComboBox;
import com.microchip.mplab.nbide.embedded.arduino.importer.ArduinoConfig;
import static com.microchip.mplab.nbide.embedded.arduino.importer.ArduinoConfig.ROOT_PLATFORM_ARCH;
import static com.microchip.mplab.nbide.embedded.arduino.importer.ArduinoConfig.ROOT_PLATFORM_VENDOR;
import com.microchip.mplab.nbide.embedded.arduino.importer.Platform;
import java.awt.Component;
import java.awt.event.ActionEvent;
import java.awt.event.KeyEvent;
import java.io.File;
import java.io.IOException;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;
import javax.swing.JFileChooser;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;
import org.netbeans.api.project.Project;
import org.netbeans.api.project.ProjectManager;
import org.netbeans.spi.project.ui.support.ProjectChooser;
import org.openide.WizardDescriptor;
import org.openide.filesystems.FileObject;
import org.openide.filesystems.FileUtil;
import org.openide.util.HelpCtx;
import org.openide.util.Lookup;
import org.openide.util.NbBundle;
import org.openide.util.NbPreferences;
import com.microchip.mplab.nbide.embedded.makeproject.ui.wizards.SelectProjectInfoPanel;
import javax.swing.JTextField;
import static com.microchip.mplab.nbide.embedded.makeproject.api.wizards.WizardProperty.*;
import static com.microchip.mplab.nbide.embedded.arduino.wizard.ImportWizardProperty.*;
import static com.microchip.mplab.nbide.embedded.arduino.importer.Requirements.MINIMUM_ARDUINO_VERSION;
import com.microchip.mplab.nbide.embedded.arduino.importer.Board;
import com.microchip.mplab.nbide.embedded.arduino.importer.BoardConfiguration;
import com.microchip.mplab.nbide.embedded.arduino.importer.PlatformFactory;
import com.microchip.mplab.nbide.embedded.arduino.utils.ArduinoProjectFileFilter;
import java.awt.event.FocusEvent;
import java.awt.event.ItemEvent;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.swing.DefaultComboBoxModel;
import javax.swing.JComboBox;
import javax.swing.filechooser.FileFilter;
import org.openide.util.Exceptions;
// TODO: Introduce more Optional return types
public class ProjectSetupStep implements WizardDescriptor.Panel<WizardDescriptor> {
private static final String MAKEFILE_NAME = "Makefile"; // NOI18N
private final Set<ChangeListener> listeners = new HashSet<>();
private Map<String, String> boardIdLookup = new HashMap<>();
private final ArduinoConfig arduinoConfig;
private final PlatformFactory platformFactory;
private final MPLABDeviceAssistant deviceAssistant;
private List<Platform> allPlatforms;
private Platform currentPlatform;
private Board board;
private WizardDescriptor wizardDescriptor;
private ProjectSetupPanel view;
public ProjectSetupStep( ArduinoConfig arduinoConfig, PlatformFactory platformFactory, MPLABDeviceAssistant deviceAssistant ) {
this.arduinoConfig = arduinoConfig;
this.platformFactory = platformFactory;
this.deviceAssistant = deviceAssistant;
}
@Override
public Component getComponent() {
if (view == null) {
view = new ProjectSetupPanel(this);
try {
allPlatforms = new ArrayList<>(platformFactory.getAllPlatforms(arduinoConfig.getSettingsPath()));
Collections.sort(allPlatforms, (Platform p1, Platform p2) -> p1.getDisplayName().orElse("").compareTo(p2.getDisplayName().orElse("")));
view.platformCombo.setModel( new PlatformComboModel(allPlatforms) );
} catch (IOException ex) {
Exceptions.printStackTrace(ex);
}
}
return view;
}
@Override
public HelpCtx getHelp() {
return new HelpCtx("56f8deKxLuo_628366");
}
@Override
public boolean isValid() {
Boolean overwriteExistingProject = (Boolean) wizardDescriptor.getProperty(OVERWRITE_EXISTING_PROJECT.key());
if (overwriteExistingProject == null) {
overwriteExistingProject = false;
}
if (!isSourceProjectValid()) {
wizardDescriptor.putProperty(WizardDescriptor.PROP_ERROR_MESSAGE, NbBundle.getMessage(ProjectSetupPanel.class, "MSG_IllegalSourceProject"));
return false;
}
if (!isArduinoDirectoryValid()) {
wizardDescriptor.putProperty(WizardDescriptor.PROP_ERROR_MESSAGE, NbBundle.getMessage(ProjectSetupPanel.class, "MSG_IllegalArduinoDirectory"));
return false;
}
if (!isArduinoVersionValid()) {
wizardDescriptor.putProperty(WizardDescriptor.PROP_ERROR_MESSAGE, NbBundle.getMessage(ProjectSetupPanel.class, "MSG_IllegalArduinoVersion"));
return false;
}
if (!isPlatformDirectoryValid()) {
wizardDescriptor.putProperty(WizardDescriptor.PROP_ERROR_MESSAGE, NbBundle.getMessage(ProjectSetupPanel.class, "MSG_IllegalPlatformDirectory"));
return false;
}
if (!isBoardValid()) {
wizardDescriptor.putProperty(WizardDescriptor.PROP_ERROR_MESSAGE, NbBundle.getMessage(ProjectSetupPanel.class, "MSG_UnknownArduinoBoard"));
return false;
}
if (!isToolchainValid()) {
wizardDescriptor.putProperty(WizardDescriptor.PROP_ERROR_MESSAGE, NbBundle.getMessage(ProjectSetupPanel.class, "MSG_NoMatchingToolchainFound"));
return false;
}
if (!isValidProjectName()) {
wizardDescriptor.putProperty(WizardDescriptor.PROP_ERROR_MESSAGE, NbBundle.getMessage(ProjectSetupPanel.class, "MSG_IllegalProjectName"));
return false;
}
File f = new File( readLocationStringFromField(view.targetProjectLocationField) ).getAbsoluteFile();
if (getCanonicalFile(f) == null) {
String message = NbBundle.getMessage(ProjectSetupPanel.class, "MSG_IllegalProjectLocation");
wizardDescriptor.putProperty(WizardDescriptor.PROP_ERROR_MESSAGE, message);
return false;
}
File destFolder = getCanonicalFile( new File( readLocationStringFromField(view.projectDirectoryField) ).getAbsoluteFile());
if (destFolder == null) {
String message = NbBundle.getMessage(ProjectSetupPanel.class, "MSG_ProjectFolderIllegal");
wizardDescriptor.putProperty(WizardDescriptor.PROP_ERROR_MESSAGE, message);
return false;
}
File projLoc = getCanonicalFile(new File( readLocationStringFromField( view.targetProjectLocationField ) ).getAbsoluteFile());
if (projLoc == null) {
wizardDescriptor.putProperty(WizardDescriptor.PROP_ERROR_MESSAGE, NbBundle.getMessage(ProjectSetupPanel.class, "MSG_ProjectFolderReadOnly"));
return false;
}
if (destFolder.exists()) {
if (destFolder.isFile()) {
wizardDescriptor.putProperty(WizardDescriptor.PROP_ERROR_MESSAGE, NbBundle.getMessage(ProjectSetupPanel.class, "MSG_NotAFolder", MAKEFILE_NAME));
return false;
}
if (new File(destFolder.getPath() + File.separator + MAKEFILE_NAME).exists() && !overwriteExistingProject) {
// Folder exists and is not empty
wizardDescriptor.putProperty(WizardDescriptor.PROP_ERROR_MESSAGE, NbBundle.getMessage(ProjectSetupPanel.class, "MSG_MakefileExists", MAKEFILE_NAME));
return false;
}
File nbProj = new File(destFolder.getPath() + File.separator + "nbproject");
if (nbProj.exists() && nbProj.listFiles().length != 0 && !overwriteExistingProject) {
// Folder exists and is not empty
wizardDescriptor.putProperty(WizardDescriptor.PROP_ERROR_MESSAGE, NbBundle.getMessage(ProjectSetupPanel.class, "MSG_ProjectFolderExists"));
return false;
}
}
if (validatePathLength(destFolder) == false) {
wizardDescriptor.putProperty(WizardDescriptor.PROP_ERROR_MESSAGE, NbBundle.getMessage(ProjectSetupPanel.class, "MSG_ErrorProjectNamePathTooLong"));
return false;
}
// Set the error message to null if there is no warning message to display
if (wizardDescriptor.getProperty(WizardDescriptor.PROP_WARNING_MESSAGE) == null) {
wizardDescriptor.putProperty(WizardDescriptor.PROP_ERROR_MESSAGE, null);
}
return true;
}
@Override
public final void addChangeListener(ChangeListener l) {
synchronized (listeners) {
listeners.add(l);
}
}
@Override
public final void removeChangeListener(ChangeListener l) {
synchronized (listeners) {
listeners.remove(l);// NOI18N
}
}
@Override
public void readSettings(WizardDescriptor settings) {
wizardDescriptor = settings;
// Source Project Location
File lastSourceProjectLocation = (File) wizardDescriptor.getProperty(SOURCE_PROJECT_DIR.key());
if (lastSourceProjectLocation == null) {
String loc = NbPreferences.forModule(SelectProjectInfoPanel.class).get(LAST_SOURCE_PROJECT_LOCATION.key(), null);
if (loc == null) {
loc = System.getProperty("user.home");
}
lastSourceProjectLocation = new File(loc);
}
view.sourceProjectLocationField.setText(lastSourceProjectLocation.getAbsolutePath());
// Target Project Location
File lastTargetProjectLocation = (File) wizardDescriptor.getProperty(PROJECT_LOCATION.key());
if (lastTargetProjectLocation == null) {
String loc = NbPreferences.forModule(SelectProjectInfoPanel.class).get(LAST_PROJECT_LOCATION.key(), null);
if (loc == null) {
loc = System.getProperty("netbeans.projects.dir");
}
if (loc == null) {
loc = System.getProperty("user.home");
}
lastTargetProjectLocation = new File(loc);
}
view.targetProjectLocationField.setText(lastTargetProjectLocation.getAbsolutePath());
// Arduino Install Location
File arduinoDir = (File) wizardDescriptor.getProperty(ARDUINO_DIR.key());
if (arduinoDir == null) {
String lastArduinoLocation = NbPreferences.forModule(SelectProjectInfoPanel.class).get(LAST_ARDUINO_LOCATION.key(), null);
if ( lastArduinoLocation != null && Files.exists( Paths.get(lastArduinoLocation) ) ) {
arduinoDir = new File(lastArduinoLocation);
}
}
if (arduinoDir != null) {
String currentArduinoLocation = readLocationStringFromField( view.arduinoLocationField );
if ( !currentArduinoLocation.equals( arduinoDir.getAbsolutePath() ) ) {
view.arduinoLocationField.setText(arduinoDir.getAbsolutePath());
}
}
// Platform
currentPlatform = (Platform) wizardDescriptor.getProperty(ARDUINO_PLATFORM.key());
if (currentPlatform == null) {
String vendorArch = NbPreferences.forModule(SelectProjectInfoPanel.class).get(LAST_ARDUINO_PLATFORM.key(), null);
if ( vendorArch != null ) {
String[] split = vendorArch.split(":");
String vendor = split[0];
String architecture = split[1];
allPlatforms.stream().filter(
p -> vendor.equalsIgnoreCase(p.getVendor()) && architecture.equalsIgnoreCase(p.getArchitecture())
).findFirst().ifPresent(
p -> currentPlatform = p
);
} else {
try {
// Default platform
currentPlatform = new PlatformFactory().createPlatform(arduinoConfig.getSettingsPath(), ROOT_PLATFORM_VENDOR, ROOT_PLATFORM_ARCH);
} catch (IOException ex) {
Exceptions.printStackTrace(ex);
}
}
}
if (currentPlatform != null) {
view.platformCombo.setSelectedItem( currentPlatform );
onPlatformChanged();
}
// Platform Location
File platformCoreDir = (File) wizardDescriptor.getProperty(ARDUINO_PLATFORM_DIR.key());
if (platformCoreDir == null) {
String lastPlatformCoreLocation = NbPreferences.forModule(SelectProjectInfoPanel.class).get(LAST_ARDUINO_PLATFORM_LOCATION.key(), null);
if ( lastPlatformCoreLocation != null && Files.exists( Paths.get(lastPlatformCoreLocation) ) ) {
platformCoreDir = new File(lastPlatformCoreLocation);
}
}
if (platformCoreDir != null) {
view.platformLocationField.setText( platformCoreDir.getAbsolutePath() );
} else {
view.platformLocationField.setText( currentPlatform.getRootPath().toString() );
}
// Target Device:
String boardName = (String) wizardDescriptor.getProperty(BOARD_NAME.key());
loadBoardsToCombo();
if (boardName == null) {
boardName = NbPreferences.forModule(SelectProjectInfoPanel.class).get(BOARD_NAME.key(), null);
}
if (boardName != null) {
if ( boardIdLookup.containsKey(boardName) ) {
view.boardCombo.setSelectedItem(boardName);
updateBoard();
} else {
boardName = null;
}
}
// Copy all dependencies:
Object copyDependencies = wizardDescriptor.getProperty(COPY_CORE_FILES.key());
view.copyDependenciesCheckBox.setSelected( copyDependencies != null ? (boolean) copyDependencies : true);
// Target Project Directory:
setTargetProjectDirectoryField();
}
@Override
public void storeSettings(WizardDescriptor settings) {
String projectName = readLocationStringFromField( view.projectNameField );
String sourceProjectDir = readLocationStringFromField( view.sourceProjectLocationField );
String arduinoDir = readLocationStringFromField( view.arduinoLocationField );
String platformDir = readLocationStringFromField(view.platformLocationField );
String boardName = readSelectedValueFromComboBox(view.boardCombo);
String targetLocation = readLocationStringFromField( view.targetProjectLocationField );
String targetDir = readLocationStringFromField( view.projectDirectoryField );
boolean copyCoreFiles = view.copyDependenciesCheckBox.isSelected();
if ( !board.hasOptions() ) {
deviceAssistant.storeSettings(settings);
settings.putProperty(BOARD_CONFIGURATION.key(), new BoardConfiguration(board));
}
settings.putProperty(SOURCE_PROJECT_DIR.key(), new File(sourceProjectDir));
settings.putProperty(ARDUINO_DIR.key(), new File(arduinoDir));
settings.putProperty(ARDUINO_PLATFORM.key(), currentPlatform );
settings.putProperty(ARDUINO_PLATFORM_DIR.key(), new File(platformDir));
settings.putProperty(BOARD_NAME.key(), boardName);
settings.putProperty(BOARD.key(), board);
settings.putProperty(COPY_CORE_FILES.key(), copyCoreFiles);
settings.putProperty(DEVICE_HEADER_PRESENT.key(), false);
settings.putProperty(PLUGIN_BOARD_PRESENT.key(), false);
settings.putProperty(PROJECT_DIR.key(), new File(targetDir));
settings.putProperty(PROJECT_NAME.key(), projectName);
settings.putProperty(MAKE_FILENAME.key(), MAKEFILE_NAME);
File projectsDir = new File(targetLocation);
if (projectsDir.isDirectory()) {
ProjectChooser.setProjectsFolder(projectsDir);
}
settings.putProperty(PROJECT_LOCATION.key(), projectsDir);
settings.putProperty(MAIN_CLASS.key(), null);
settings.putProperty(CREATE_MAIN_FILE.key(), Boolean.FALSE);
settings.putProperty(MAIN_FILENAME.key(), null);
settings.putProperty(MAIN_FILE_TEMPLATE.key(), null);
NbPreferences.forModule(SelectProjectInfoPanel.class).put(LAST_PROJECT_LOCATION.key(), projectsDir.getAbsolutePath());
NbPreferences.forModule(SelectProjectInfoPanel.class).put(LAST_SOURCE_PROJECT_LOCATION.key(), new File(sourceProjectDir).getParent());
NbPreferences.forModule(SelectProjectInfoPanel.class).put(LAST_ARDUINO_LOCATION.key(), arduinoDir);
NbPreferences.forModule(SelectProjectInfoPanel.class).put(LAST_ARDUINO_PLATFORM.key(), currentPlatform.getVendor() + ":" + currentPlatform.getArchitecture() );
NbPreferences.forModule(SelectProjectInfoPanel.class).put(LAST_ARDUINO_PLATFORM_LOCATION.key(), platformDir);
NbPreferences.forModule(SelectProjectInfoPanel.class).put(BOARD_NAME.key(), boardName);
}
//**************************************************
//*************** EVENT LISTENERS ******************
//**************************************************
void overwriteCheckBoxActionPerformed(ActionEvent evt) {
if (view.overwriteCheckBox.isSelected()) {
MessageMediator mandm = Lookup.getDefault().lookup(MessageMediator.class);
Message newMessage = new Message( NbBundle.getMessage(ProjectSetupPanel.class, "MSG_OverwriteConfirmationRequest"), "MPLAB X IDE", null, DialogBoxType.QUESTION_BLOCKING_YES_NO );
int overwrite = mandm.handleMessage(newMessage, ActionList.DialogPopupOnly);
if (overwrite != 0) {
view.overwriteCheckBox.setSelected(false);
}
}
wizardDescriptor.putProperty(OVERWRITE_EXISTING_PROJECT.key(), view.overwriteCheckBox.isSelected());
fireChangeEvent();
}
void sourceProjectLocationButtonActionPerformed(ActionEvent evt) {
showDirectoryChooser( view.sourceProjectLocationField, "DLG_SourceProjectLocation", new ArduinoProjectFileFilter() );
String projectDir = readLocationStringFromField( view.sourceProjectLocationField );
if (projectDir != null && !projectDir.isEmpty()) {
view.projectNameField.setText(Paths.get(projectDir).getFileName().toString());
setTargetProjectDirectoryField();
}
fireChangeEvent();
}
void targetProjectLocationButtonActionPerformed(ActionEvent evt) {
showDirectoryChooser( view.targetProjectLocationField, "DLG_TargetProjectLocation" );
setTargetProjectDirectoryField();
}
void arduinoLocationButtonActionPerformed(ActionEvent evt) {
File arduinoDir = showDirectoryChooser( view.arduinoLocationField, "DLG_ArduinoDirectory" );
if ( arduinoDir != null ) {
fireChangeEvent();
}
}
void platformLocationButtonActionPerformed(ActionEvent evt) {
showDirectoryChooser(view.platformLocationField, "DLG_ArduinoCoreDirectory" );
onPlatformLocationChanged();
}
void projectNameFieldKeyReleased(KeyEvent evt) {
String projectName = view.projectNameField.getText().trim();
if (projectName.endsWith("\\")
|| projectName.endsWith("/")
|| projectName.endsWith(File.separator + File.separator)
|| projectName.endsWith(":")
|| projectName.endsWith("*")
|| projectName.endsWith("?")
|| projectName.endsWith("\"")
|| projectName.endsWith("<")
|| projectName.endsWith(">")
|| projectName.endsWith("|")) {
String newProjectName = projectName.substring(0, projectName.length() - 1);
view.projectNameField.setText(newProjectName);
}
setTargetProjectDirectoryField();
}
void targetProjectLocationFieldKeyReleased(KeyEvent evt) {
String projectLoc = view.projectDirectoryField.getText().trim();
String badSlash = File.separator.equals(("/")) ? "\\" : "/";
if (projectLoc.endsWith(badSlash)
|| projectLoc.endsWith(File.separator + File.separator)
|| projectLoc.endsWith("*")
|| projectLoc.endsWith("?")
|| projectLoc.endsWith("<")
|| projectLoc.endsWith(">")
|| projectLoc.endsWith("|")) {
String newProjectLoc = projectLoc.substring(0, projectLoc.length() - 1);
view.projectDirectoryField.setText(newProjectLoc);
}
setTargetProjectDirectoryField();
}
void platformLocationFieldFocusLost(FocusEvent evt) {
onPlatformLocationChanged();
}
void arduinoLocationFieldFocusLost(FocusEvent evt) {
fireChangeEvent();
}
void targetProjectLocationFieldFocusLost(FocusEvent evt) {
fireChangeEvent();
}
void sourceProjectLocationFieldFocusLost(FocusEvent evt) {
fireChangeEvent();
}
void boardComboItemStateChanged(ItemEvent evt) {
updateBoard();
fireChangeEvent();
}
void platformComboItemStateChanged(ItemEvent evt) {
if ( evt.getStateChange() == ItemEvent.SELECTED && evt.getItem() instanceof Platform ) {
currentPlatform = (Platform) evt.getItem();
onPlatformChanged();
}
}
//**************************************************
//*************** PRIVATE METHODS ******************
//**************************************************
private String readLocationStringFromField( JTextField field ) {
return field.getText().replaceAll("[*?\\\"<>|]", "").trim();
}
private String readSelectedValueFromComboBox( JComboBox<String> comboBox ) {
String value = (String) comboBox.getSelectedItem();
if ( value != null ) {
return value.trim();
} else {
return null;
}
}
private File showDirectoryChooser(JTextField pathField, String dialogTitleKey) {
return showDirectoryChooser(pathField, dialogTitleKey, null);
}
private File showDirectoryChooser(JTextField pathField, String dialogTitleKey, FileFilter fileFilter) {
String startDir = readLocationStringFromField(pathField);
JFileChooser chooser = new JFileChooser();
chooser.setCurrentDirectory(null);
chooser.setFileSelectionMode(JFileChooser.FILES_AND_DIRECTORIES);
chooser.setFileFilter( fileFilter );
if (startDir.length() > 0) {
File f = new File(startDir);
if (f.exists()) {
if (f.isFile()) {
chooser.setCurrentDirectory(f.getParentFile());
} else if (f.isDirectory()) {
chooser.setCurrentDirectory(f);
}
}
} else {
File f = new File(System.getProperty("netbeans.projects.dir")); // NOI18N
if (f.exists()) {
chooser.setCurrentDirectory(f);
}
}
chooser.setDialogTitle( NbBundle.getMessage(ProjectSetupPanel.class, dialogTitleKey) );
if (JFileChooser.APPROVE_OPTION == chooser.showOpenDialog(view)) { //NOI18N
File selectedFile = chooser.getSelectedFile();
File selectedDir = selectedFile.isDirectory() ? selectedFile : selectedFile.getParentFile();
pathField.setText(selectedDir.getAbsolutePath());
return selectedDir;
}
return null;
}
private boolean validatePathLength(File projFolder) {
// Windows is the only known operating system with a relatively short (260) limitation on path length that can cause issues during the make process
if (System.getProperty("os.name").contains("Windows")) {
int workingLength = projFolder.getAbsolutePath().length();
workingLength += projFolder.getName().length();
workingLength += "/dist/".length(); // NOI18N
workingLength += "default".length() + 1; // NOI18N
workingLength += 2 * ("production".length() + 1); // NOI18N
workingLength += 4; // extension name and .
if (workingLength < 259) {
if (workingLength >= 130) {
wizardDescriptor.putProperty(WizardDescriptor.PROP_ERROR_MESSAGE, null);
wizardDescriptor.putProperty(WizardDescriptor.PROP_WARNING_MESSAGE, NbBundle.getMessage(ProjectSetupPanel.class, "MSG_WarningProjectNamePathTooLong"));
} else {
wizardDescriptor.putProperty(WizardDescriptor.PROP_WARNING_MESSAGE, null);
}
return true;
} else {
return false;
}
} else {
return true;
}
}
private boolean isSourceProjectValid() {
File sourceProjectDir = new File( readLocationStringFromField(view.sourceProjectLocationField ) );
if (!sourceProjectDir.exists()) {
return false;
}
if (!sourceProjectDir.isDirectory()) {
return false;
}
for (String f : sourceProjectDir.list()) {
if (f.endsWith(".ino")) {
return true;
}
}
return false;
}
private boolean isArduinoDirectoryValid() {
Path p = Paths.get( readLocationStringFromField( view.arduinoLocationField ) );
return arduinoConfig.isValidArduinoInstallPath(p);
}
private boolean isArduinoVersionValid() {
return arduinoConfig.isCurrentVersionValid( new Version(MINIMUM_ARDUINO_VERSION) );
}
private boolean isPlatformDirectoryValid() {
Path p = Paths.get( readLocationStringFromField(view.platformLocationField ) );
return platformFactory.isValidPlatformRootPath(p);
}
private boolean isBoardValid() {
return board != null;
}
private boolean isToolchainValid() {
return board.hasOptions() ? true : deviceAssistant.isToolchainValid();
}
private boolean isValidProjectName() {
String projectName = view.projectNameField.getText().trim();
// unix allows a lot of strange names, but let's prohibit this for project
// using symbols invalid on Windows
if (projectName.length() == 0 || projectName.startsWith(" ")
|| projectName.contains("\\")
|| projectName.contains("/")
|| projectName.contains(":")
|| projectName.contains("*")
|| projectName.contains("?")
|| projectName.contains("\"")
|| projectName.contains("<")
|| projectName.contains(">")
|| projectName.contains("|")) {
return false;
}
// check ability to create file with specified name on target OS
boolean ok = false;
try {
File file = File.createTempFile(projectName + "dummy", "");
ok = true;
file.delete();
} catch (Exception ex) {
Exceptions.printStackTrace(ex);
}
return ok;
}
private void setTargetProjectDirectoryField() {
String targetProjectLocation = readLocationStringFromField( view.targetProjectLocationField );
String projName = view.projectNameField.getText().trim();
view.projectDirectoryField.setText( Paths.get( targetProjectLocation, projName + ".X" ).toAbsolutePath().toString() );
checkForExistingProject();
fireChangeEvent();
}
private void onPlatformLocationChanged() {
resolvePlatformFromPath();
loadBoardsToCombo();
fireChangeEvent();
}
private void onPlatformChanged() {
loadBoardsToCombo();
view.platformLocationField.setText( currentPlatform.getRootPath().toString() );
updateBoard();
fireChangeEvent();
}
private void checkForExistingProject() {
//if project already exists, enable the check box
if ("".equalsIgnoreCase(view.projectNameField.getText().trim())) {
view.overwriteCheckBox.setEnabled(false);
} else {
File projFolder = FileUtil.normalizeFile( new File( readLocationStringFromField(view.projectDirectoryField) ) );
FileObject dirFO = FileUtil.toFileObject(projFolder);
Project proj = null;
if (dirFO != null) {
try {
proj = ProjectManager.getDefault().findProject(dirFO);
} catch (IOException | IllegalArgumentException ex) {
view.overwriteCheckBox.setEnabled(false);
}
}
if (proj == null) {
view.overwriteCheckBox.setEnabled(false);
} else {
view.overwriteCheckBox.setEnabled(true);
}
}
}
private void loadBoardsToCombo() {
String currentlySelectedBoardName = (view.boardCombo.getSelectedItem() != null) ? view.boardCombo.getSelectedItem().toString() : null;
boardIdLookup = currentPlatform.getBoardNamesToIDsLookup();
List<String> boardNames = new ArrayList<>(boardIdLookup.keySet());
// Sort the board names list in alphabetical order:
Collections.sort(boardNames);
// Set up the combo box:
DefaultComboBoxModel<String> cbm = new DefaultComboBoxModel<>(boardNames.toArray(new String[boardNames.size()]));
view.boardCombo.setModel(cbm);
// TODO: Verify whether calling TypeAheadComboBox.enable many times does not have adverse effects
TypeAheadComboBox.enable(view.boardCombo);
if ( currentlySelectedBoardName != null && boardNames.contains(currentlySelectedBoardName) ) {
view.boardCombo.setSelectedItem(currentlySelectedBoardName);
}
}
private void updateBoard() {
String boardName = readSelectedValueFromComboBox(view.boardCombo);
String boardId = boardIdLookup.get(boardName);
if ( boardId != null ) {
board = currentPlatform.getBoard(boardId).orElseThrow( () -> new RuntimeException("Failed to find a board with id: \""+boardId+"\""));
if ( !board.hasOptions() ) {
deviceAssistant.updateDeviceAndToolchain( new BoardConfiguration(board) );
}
} else {
board = null;
deviceAssistant.updateDeviceAndToolchain(null);
}
}
private void fireChangeEvent() {
Iterator<ChangeListener> it;
synchronized (listeners) {
it = new HashSet<>(listeners).iterator();
}
ChangeEvent ev = new ChangeEvent(this);
while (it.hasNext()) {
it.next().stateChanged(ev);
}
}
private static File getCanonicalFile(File file) {
try {
return file.getCanonicalFile();
} catch (IOException e) {
// TODO: What should we do with this exception?
return null;
}
}
private void resolvePlatformFromPath() {
Path p = Paths.get( readLocationStringFromField(view.platformLocationField) );
if ( platformFactory.isValidPlatformRootPath(p) ) {
try {
currentPlatform = platformFactory.createPlatformFromRootDirectory(p);
} catch (IOException ex) {
Exceptions.printStackTrace(ex);
}
}
loadBoardsToCombo();
}
private static class PlatformComboModel extends DefaultComboBoxModel<Platform> {
PlatformComboModel( List<Platform> platforms ) {
super( platforms.toArray( new Platform[platforms.size()] ) );
}
}
}
|
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.sdk.extensions.sql;
import static org.apache.beam.sdk.extensions.sql.impl.utils.CalciteUtils.BOOLEAN;
import static org.apache.beam.sdk.extensions.sql.impl.utils.CalciteUtils.INTEGER;
import static org.apache.beam.sdk.extensions.sql.impl.utils.CalciteUtils.VARCHAR;
import static org.apache.beam.sdk.extensions.sql.utils.DateTimeUtils.parseTimestampWithUTCTimeZone;
import static org.apache.beam.sdk.schemas.Schema.toSchema;
import static org.hamcrest.Matchers.equalTo;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertThat;
import java.util.stream.Stream;
import org.apache.beam.sdk.extensions.sql.impl.ParseException;
import org.apache.beam.sdk.extensions.sql.meta.Table;
import org.apache.beam.sdk.extensions.sql.meta.provider.test.TestTableProvider;
import org.apache.beam.sdk.extensions.sql.meta.provider.text.TextTableProvider;
import org.apache.beam.sdk.extensions.sql.meta.store.InMemoryMetaStore;
import org.apache.beam.sdk.schemas.Schema;
import org.apache.beam.sdk.schemas.Schema.Field;
import org.apache.beam.sdk.values.Row;
import org.junit.Test;
/** UnitTest for {@link BeamSqlCli}. */
public class BeamSqlCliTest {
@Test
public void testExecute_createTextTable() throws Exception {
InMemoryMetaStore metaStore = new InMemoryMetaStore();
metaStore.registerProvider(new TextTableProvider());
BeamSqlCli cli = new BeamSqlCli().metaStore(metaStore);
cli.execute(
"CREATE EXTERNAL TABLE person (\n"
+ "id int COMMENT 'id', \n"
+ "name varchar COMMENT 'name', \n"
+ "age int COMMENT 'age') \n"
+ "TYPE 'text' \n"
+ "COMMENT '' LOCATION '/home/admin/orders'");
Table table = metaStore.getTables().get("person");
assertNotNull(table);
assertEquals(
Stream.of(
Field.of("id", INTEGER).withDescription("id").withNullable(true),
Field.of("name", VARCHAR).withDescription("name").withNullable(true),
Field.of("age", INTEGER).withDescription("age").withNullable(true))
.collect(toSchema()),
table.getSchema());
}
@Test
public void testExecute_createTableWithPrefixArrayField() throws Exception {
InMemoryMetaStore metaStore = new InMemoryMetaStore();
metaStore.registerProvider(new TextTableProvider());
BeamSqlCli cli = new BeamSqlCli().metaStore(metaStore);
cli.execute(
"CREATE EXTERNAL TABLE person (\n"
+ "id int COMMENT 'id', \n"
+ "name varchar COMMENT 'name', \n"
+ "age int COMMENT 'age', \n"
+ "tags ARRAY<VARCHAR>, \n"
+ "matrix ARRAY<ARRAY<INTEGER>> \n"
+ ") \n"
+ "TYPE 'text' \n"
+ "COMMENT '' LOCATION '/home/admin/orders'");
Table table = metaStore.getTables().get("person");
assertNotNull(table);
assertEquals(
Stream.of(
Field.of("id", INTEGER).withDescription("id").withNullable(true),
Field.of("name", VARCHAR).withDescription("name").withNullable(true),
Field.of("age", INTEGER).withDescription("age").withNullable(true),
Field.of("tags", Schema.FieldType.array(VARCHAR)).withNullable(true),
Field.of("matrix", Schema.FieldType.array(Schema.FieldType.array(INTEGER)))
.withNullable(true))
.collect(toSchema()),
table.getSchema());
}
@Test
public void testExecute_createTableWithPrefixMapField() throws Exception {
InMemoryMetaStore metaStore = new InMemoryMetaStore();
metaStore.registerProvider(new TextTableProvider());
BeamSqlCli cli = new BeamSqlCli().metaStore(metaStore);
cli.execute(
"CREATE EXTERNAL TABLE person (\n"
+ "id int COMMENT 'id', \n"
+ "name varchar COMMENT 'name', \n"
+ "age int COMMENT 'age', \n"
+ "tags MAP<VARCHAR, VARCHAR>, \n"
+ "nestedMap MAP<INTEGER, MAP<VARCHAR, INTEGER>> \n"
+ ") \n"
+ "TYPE 'text' \n"
+ "COMMENT '' LOCATION '/home/admin/orders'");
Table table = metaStore.getTables().get("person");
assertNotNull(table);
assertEquals(
Stream.of(
Field.of("id", INTEGER).withDescription("id").withNullable(true),
Field.of("name", VARCHAR).withDescription("name").withNullable(true),
Field.of("age", INTEGER).withDescription("age").withNullable(true),
Field.of("tags", Schema.FieldType.map(VARCHAR, VARCHAR)).withNullable(true),
Field.of(
"nestedMap",
Schema.FieldType.map(INTEGER, Schema.FieldType.map(VARCHAR, INTEGER)))
.withNullable(true))
.collect(toSchema()),
table.getSchema());
}
@Test
public void testExecute_createTableWithRowField() throws Exception {
InMemoryMetaStore metaStore = new InMemoryMetaStore();
metaStore.registerProvider(new TextTableProvider());
BeamSqlCli cli = new BeamSqlCli().metaStore(metaStore);
cli.execute(
"CREATE EXTERNAL TABLE person (\n"
+ "id int COMMENT 'id', \n"
+ "name varchar COMMENT 'name', \n"
+ "age int COMMENT 'age', \n"
+ "address ROW ( \n"
+ " street VARCHAR, \n"
+ " country VARCHAR \n"
+ " ), \n"
+ "addressAngular ROW< \n"
+ " street VARCHAR, \n"
+ " country VARCHAR \n"
+ " >, \n"
+ "isRobot BOOLEAN"
+ ") \n"
+ "TYPE 'text' \n"
+ "COMMENT '' LOCATION '/home/admin/orders'");
Table table = metaStore.getTables().get("person");
assertNotNull(table);
assertEquals(
Stream.of(
Field.of("id", INTEGER).withDescription("id").withNullable(true),
Field.of("name", VARCHAR).withDescription("name").withNullable(true),
Field.of("age", INTEGER).withDescription("age").withNullable(true),
Field.of(
"address",
Schema.FieldType.row(
Schema.builder()
.addNullableField("street", VARCHAR)
.addNullableField("country", VARCHAR)
.build()))
.withNullable(true),
Field.of(
"addressAngular",
Schema.FieldType.row(
Schema.builder()
.addNullableField("street", VARCHAR)
.addNullableField("country", VARCHAR)
.build()))
.withNullable(true),
Field.of("isRobot", BOOLEAN).withNullable(true))
.collect(toSchema()),
table.getSchema());
}
@Test
public void testExecute_dropTable() throws Exception {
InMemoryMetaStore metaStore = new InMemoryMetaStore();
metaStore.registerProvider(new TextTableProvider());
BeamSqlCli cli = new BeamSqlCli().metaStore(metaStore);
cli.execute(
"CREATE EXTERNAL TABLE person (\n"
+ "id int COMMENT 'id', \n"
+ "name varchar COMMENT 'name', \n"
+ "age int COMMENT 'age') \n"
+ "TYPE 'text' \n"
+ "COMMENT '' LOCATION '/home/admin/orders'");
Table table = metaStore.getTables().get("person");
assertNotNull(table);
cli.execute("drop table person");
table = metaStore.getTables().get("person");
assertNull(table);
}
@Test(expected = ParseException.class)
public void testExecute_dropTable_assertTableRemovedFromPlanner() throws Exception {
InMemoryMetaStore metaStore = new InMemoryMetaStore();
metaStore.registerProvider(new TextTableProvider());
BeamSqlCli cli = new BeamSqlCli().metaStore(metaStore);
cli.execute(
"CREATE EXTERNAL TABLE person (\n"
+ "id int COMMENT 'id', \n"
+ "name varchar COMMENT 'name', \n"
+ "age int COMMENT 'age') \n"
+ "TYPE 'text' \n"
+ "COMMENT '' LOCATION '/home/admin/orders'");
cli.execute("drop table person");
cli.explainQuery("select * from person");
}
@Test
public void testExplainQuery() throws Exception {
InMemoryMetaStore metaStore = new InMemoryMetaStore();
metaStore.registerProvider(new TextTableProvider());
BeamSqlCli cli = new BeamSqlCli().metaStore(metaStore);
cli.execute(
"CREATE EXTERNAL TABLE person (\n"
+ "id int COMMENT 'id', \n"
+ "name varchar COMMENT 'name', \n"
+ "age int COMMENT 'age') \n"
+ "TYPE 'text' \n"
+ "COMMENT '' LOCATION '/home/admin/orders'");
String plan = cli.explainQuery("select * from person");
assertThat(
plan,
equalTo(
"BeamCalcRel(expr#0..2=[{inputs}], proj#0..2=[{exprs}])\n"
+ " BeamIOSourceRel(table=[[beam, person]])\n"));
}
@Test
public void test_time_types() throws Exception {
InMemoryMetaStore metaStore = new InMemoryMetaStore();
TestTableProvider testTableProvider = new TestTableProvider();
metaStore.registerProvider(testTableProvider);
BeamSqlCli cli = new BeamSqlCli().metaStore(metaStore);
cli.execute(
"CREATE EXTERNAL TABLE test_table (\n"
+ "f_date DATE, \n"
+ "f_time TIME, \n"
+ "f_ts TIMESTAMP"
+ ") \n"
+ "TYPE 'test'");
cli.execute(
"INSERT INTO test_table VALUES ("
+ "DATE '2018-11-01', "
+ "TIME '15:23:59', "
+ "TIMESTAMP '2018-07-01 21:26:07.123' )");
Table table = metaStore.getTables().get("test_table");
assertNotNull(table);
TestTableProvider.TableWithRows tableWithRows = testTableProvider.tables().get(table.getName());
assertEquals(1, tableWithRows.getRows().size());
Row row = tableWithRows.getRows().get(0);
assertEquals(3, row.getFieldCount());
// test DATE field
assertEquals("2018-11-01", row.getDateTime("f_date").toString("yyyy-MM-dd"));
// test TIME field
assertEquals("15:23:59.000", row.getDateTime("f_time").toString("HH:mm:ss.SSS"));
// test TIMESTAMP field
assertEquals(parseTimestampWithUTCTimeZone("2018-07-01 21:26:07.123"), row.getDateTime("f_ts"));
}
}
|
|
/*
* Copyright (C) 2009 The Libphonenumber Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.i18n.phonenumbers;
import com.google.i18n.phonenumbers.Phonemetadata.NumberFormat;
import com.google.i18n.phonenumbers.Phonemetadata.PhoneMetadata;
import com.google.i18n.phonenumbers.Phonemetadata.PhoneMetadataCollection;
import com.google.i18n.phonenumbers.Phonemetadata.PhoneNumberDesc;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.NodeList;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.regex.Pattern;
import java.util.regex.PatternSyntaxException;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
/**
* Library to build phone number metadata from the XML format.
*
* @author Shaopeng Jia
*/
public class BuildMetadataFromXml {
private static final Logger LOGGER = Logger.getLogger(BuildMetadataFromXml.class.getName());
// String constants used to fetch the XML nodes and attributes.
private static final String CARRIER_CODE_FORMATTING_RULE = "carrierCodeFormattingRule";
private static final String CARRIER_SPECIFIC = "carrierSpecific";
private static final String COUNTRY_CODE = "countryCode";
private static final String EMERGENCY = "emergency";
private static final String EXAMPLE_NUMBER = "exampleNumber";
private static final String FIXED_LINE = "fixedLine";
private static final String FORMAT = "format";
private static final String GENERAL_DESC = "generalDesc";
private static final String INTERNATIONAL_PREFIX = "internationalPrefix";
private static final String INTL_FORMAT = "intlFormat";
private static final String LEADING_DIGITS = "leadingDigits";
private static final String LEADING_ZERO_POSSIBLE = "leadingZeroPossible";
private static final String MAIN_COUNTRY_FOR_CODE = "mainCountryForCode";
private static final String MOBILE = "mobile";
private static final String MOBILE_NUMBER_PORTABLE_REGION = "mobileNumberPortableRegion";
private static final String NATIONAL_NUMBER_PATTERN = "nationalNumberPattern";
private static final String NATIONAL_PREFIX = "nationalPrefix";
private static final String NATIONAL_PREFIX_FORMATTING_RULE = "nationalPrefixFormattingRule";
private static final String NATIONAL_PREFIX_OPTIONAL_WHEN_FORMATTING =
"nationalPrefixOptionalWhenFormatting";
private static final String NATIONAL_PREFIX_FOR_PARSING = "nationalPrefixForParsing";
private static final String NATIONAL_PREFIX_TRANSFORM_RULE = "nationalPrefixTransformRule";
private static final String NO_INTERNATIONAL_DIALLING = "noInternationalDialling";
private static final String NUMBER_FORMAT = "numberFormat";
private static final String PAGER = "pager";
private static final String PATTERN = "pattern";
private static final String PERSONAL_NUMBER = "personalNumber";
private static final String POSSIBLE_NUMBER_PATTERN = "possibleNumberPattern";
private static final String PREFERRED_EXTN_PREFIX = "preferredExtnPrefix";
private static final String PREFERRED_INTERNATIONAL_PREFIX = "preferredInternationalPrefix";
private static final String PREMIUM_RATE = "premiumRate";
private static final String SHARED_COST = "sharedCost";
private static final String SHORT_CODE = "shortCode";
private static final String STANDARD_RATE = "standardRate";
private static final String TOLL_FREE = "tollFree";
private static final String UAN = "uan";
private static final String VOICEMAIL = "voicemail";
private static final String VOIP = "voip";
// Build the PhoneMetadataCollection from the input XML file.
public static PhoneMetadataCollection buildPhoneMetadataCollection(String inputXmlFile,
boolean liteBuild) throws Exception {
DocumentBuilderFactory builderFactory = DocumentBuilderFactory.newInstance();
DocumentBuilder builder = builderFactory.newDocumentBuilder();
File xmlFile = new File(inputXmlFile);
Document document = builder.parse(xmlFile);
document.getDocumentElement().normalize();
Element rootElement = document.getDocumentElement();
NodeList territory = rootElement.getElementsByTagName("territory");
PhoneMetadataCollection.Builder metadataCollection = PhoneMetadataCollection.newBuilder();
int numOfTerritories = territory.getLength();
// TODO: Look for other uses of these constants and possibly pull them out into
// a separate constants file.
boolean isShortNumberMetadata = inputXmlFile.contains("ShortNumberMetadata");
boolean isAlternateFormatsMetadata = inputXmlFile.contains("PhoneNumberAlternateFormats");
for (int i = 0; i < numOfTerritories; i++) {
Element territoryElement = (Element) territory.item(i);
String regionCode = "";
// For the main metadata file this should always be set, but for other supplementary data
// files the country calling code may be all that is needed.
if (territoryElement.hasAttribute("id")) {
regionCode = territoryElement.getAttribute("id");
}
PhoneMetadata metadata = loadCountryMetadata(regionCode, territoryElement, liteBuild,
isShortNumberMetadata, isAlternateFormatsMetadata);
metadataCollection.addMetadata(metadata);
}
return metadataCollection.build();
}
// Build a mapping from a country calling code to the region codes which denote the country/region
// represented by that country code. In the case of multiple countries sharing a calling code,
// such as the NANPA countries, the one indicated with "isMainCountryForCode" in the metadata
// should be first.
public static Map<Integer, List<String>> buildCountryCodeToRegionCodeMap(
PhoneMetadataCollection metadataCollection) {
Map<Integer, List<String>> countryCodeToRegionCodeMap =
new TreeMap<Integer, List<String>>();
for (PhoneMetadata metadata : metadataCollection.getMetadataList()) {
String regionCode = metadata.getId();
int countryCode = metadata.getCountryCode();
if (countryCodeToRegionCodeMap.containsKey(countryCode)) {
if (metadata.getMainCountryForCode()) {
countryCodeToRegionCodeMap.get(countryCode).add(0, regionCode);
} else {
countryCodeToRegionCodeMap.get(countryCode).add(regionCode);
}
} else {
// For most countries, there will be only one region code for the country calling code.
List<String> listWithRegionCode = new ArrayList<String>(1);
if (!regionCode.isEmpty()) { // For alternate formats, there are no region codes at all.
listWithRegionCode.add(regionCode);
}
countryCodeToRegionCodeMap.put(countryCode, listWithRegionCode);
}
}
return countryCodeToRegionCodeMap;
}
private static String validateRE(String regex) {
return validateRE(regex, false);
}
// @VisibleForTesting
static String validateRE(String regex, boolean removeWhitespace) {
// Removes all the whitespace and newline from the regexp. Not using pattern compile options to
// make it work across programming languages.
String compressedRegex = removeWhitespace ? regex.replaceAll("\\s", "") : regex;
Pattern.compile(compressedRegex);
// We don't ever expect to see | followed by a ) in our metadata - this would be an indication
// of a bug. If one wants to make something optional, we prefer ? to using an empty group.
int errorIndex = compressedRegex.indexOf("|)");
if (errorIndex >= 0) {
LOGGER.log(Level.SEVERE,
"Error with original regex: " + regex + "\n| should not be followed directly " +
"by ) in phone number regular expressions.");
throw new PatternSyntaxException("| followed by )", compressedRegex, errorIndex);
}
// return the regex if it is of correct syntax, i.e. compile did not fail with a
// PatternSyntaxException.
return compressedRegex;
}
/**
* Returns the national prefix of the provided country element.
*/
// @VisibleForTesting
static String getNationalPrefix(Element element) {
return element.hasAttribute(NATIONAL_PREFIX) ? element.getAttribute(NATIONAL_PREFIX) : "";
}
// @VisibleForTesting
static PhoneMetadata.Builder loadTerritoryTagMetadata(String regionCode, Element element,
String nationalPrefix) {
PhoneMetadata.Builder metadata = PhoneMetadata.newBuilder();
metadata.setId(regionCode);
if (element.hasAttribute(COUNTRY_CODE)) {
metadata.setCountryCode(Integer.parseInt(element.getAttribute(COUNTRY_CODE)));
}
if (element.hasAttribute(LEADING_DIGITS)) {
metadata.setLeadingDigits(validateRE(element.getAttribute(LEADING_DIGITS)));
}
metadata.setInternationalPrefix(validateRE(element.getAttribute(INTERNATIONAL_PREFIX)));
if (element.hasAttribute(PREFERRED_INTERNATIONAL_PREFIX)) {
String preferredInternationalPrefix = element.getAttribute(PREFERRED_INTERNATIONAL_PREFIX);
metadata.setPreferredInternationalPrefix(preferredInternationalPrefix);
}
if (element.hasAttribute(NATIONAL_PREFIX_FOR_PARSING)) {
metadata.setNationalPrefixForParsing(
validateRE(element.getAttribute(NATIONAL_PREFIX_FOR_PARSING), true));
if (element.hasAttribute(NATIONAL_PREFIX_TRANSFORM_RULE)) {
metadata.setNationalPrefixTransformRule(
validateRE(element.getAttribute(NATIONAL_PREFIX_TRANSFORM_RULE)));
}
}
if (!nationalPrefix.isEmpty()) {
metadata.setNationalPrefix(nationalPrefix);
if (!metadata.hasNationalPrefixForParsing()) {
metadata.setNationalPrefixForParsing(nationalPrefix);
}
}
if (element.hasAttribute(PREFERRED_EXTN_PREFIX)) {
metadata.setPreferredExtnPrefix(element.getAttribute(PREFERRED_EXTN_PREFIX));
}
if (element.hasAttribute(MAIN_COUNTRY_FOR_CODE)) {
metadata.setMainCountryForCode(true);
}
if (element.hasAttribute(LEADING_ZERO_POSSIBLE)) {
metadata.setLeadingZeroPossible(true);
}
if (element.hasAttribute(MOBILE_NUMBER_PORTABLE_REGION)) {
metadata.setMobileNumberPortableRegion(true);
}
return metadata;
}
/**
* Extracts the pattern for international format. If there is no intlFormat, default to using the
* national format. If the intlFormat is set to "NA" the intlFormat should be ignored.
*
* @throws RuntimeException if multiple intlFormats have been encountered.
* @return whether an international number format is defined.
*/
// @VisibleForTesting
static boolean loadInternationalFormat(PhoneMetadata.Builder metadata,
Element numberFormatElement,
NumberFormat nationalFormat) {
NumberFormat.Builder intlFormat = NumberFormat.newBuilder();
NodeList intlFormatPattern = numberFormatElement.getElementsByTagName(INTL_FORMAT);
boolean hasExplicitIntlFormatDefined = false;
if (intlFormatPattern.getLength() > 1) {
LOGGER.log(Level.SEVERE,
"A maximum of one intlFormat pattern for a numberFormat element should be " +
"defined.");
String countryId = metadata.getId().length() > 0 ?
metadata.getId() : Integer.toString(metadata.getCountryCode());
throw new RuntimeException("Invalid number of intlFormat patterns for country: " + countryId);
} else if (intlFormatPattern.getLength() == 0) {
// Default to use the same as the national pattern if none is defined.
intlFormat.mergeFrom(nationalFormat);
} else {
intlFormat.setPattern(numberFormatElement.getAttribute(PATTERN));
setLeadingDigitsPatterns(numberFormatElement, intlFormat);
String intlFormatPatternValue = intlFormatPattern.item(0).getFirstChild().getNodeValue();
if (!intlFormatPatternValue.equals("NA")) {
intlFormat.setFormat(intlFormatPatternValue);
}
hasExplicitIntlFormatDefined = true;
}
if (intlFormat.hasFormat()) {
metadata.addIntlNumberFormat(intlFormat);
}
return hasExplicitIntlFormatDefined;
}
/**
* Extracts the pattern for the national format.
*
* @throws RuntimeException if multiple or no formats have been encountered.
*/
// @VisibleForTesting
static void loadNationalFormat(PhoneMetadata.Builder metadata, Element numberFormatElement,
NumberFormat.Builder format) {
setLeadingDigitsPatterns(numberFormatElement, format);
format.setPattern(validateRE(numberFormatElement.getAttribute(PATTERN)));
NodeList formatPattern = numberFormatElement.getElementsByTagName(FORMAT);
int numFormatPatterns = formatPattern.getLength();
if (numFormatPatterns != 1) {
LOGGER.log(Level.SEVERE, "One format pattern for a numberFormat element should be defined.");
String countryId = metadata.getId().length() > 0 ?
metadata.getId() : Integer.toString(metadata.getCountryCode());
throw new RuntimeException("Invalid number of format patterns (" + numFormatPatterns +
") for country: " + countryId);
}
format.setFormat(formatPattern.item(0).getFirstChild().getNodeValue());
}
/**
* Extracts the available formats from the provided DOM element. If it does not contain any
* nationalPrefixFormattingRule, the one passed-in is retained. The nationalPrefix,
* nationalPrefixFormattingRule and nationalPrefixOptionalWhenFormatting values are provided from
* the parent (territory) element.
*/
// @VisibleForTesting
static void loadAvailableFormats(PhoneMetadata.Builder metadata,
Element element, String nationalPrefix,
String nationalPrefixFormattingRule,
boolean nationalPrefixOptionalWhenFormatting) {
String carrierCodeFormattingRule = "";
if (element.hasAttribute(CARRIER_CODE_FORMATTING_RULE)) {
carrierCodeFormattingRule = validateRE(
getDomesticCarrierCodeFormattingRuleFromElement(element, nationalPrefix));
}
NodeList numberFormatElements = element.getElementsByTagName(NUMBER_FORMAT);
boolean hasExplicitIntlFormatDefined = false;
int numOfFormatElements = numberFormatElements.getLength();
if (numOfFormatElements > 0) {
for (int i = 0; i < numOfFormatElements; i++) {
Element numberFormatElement = (Element) numberFormatElements.item(i);
NumberFormat.Builder format = NumberFormat.newBuilder();
if (numberFormatElement.hasAttribute(NATIONAL_PREFIX_FORMATTING_RULE)) {
format.setNationalPrefixFormattingRule(
getNationalPrefixFormattingRuleFromElement(numberFormatElement, nationalPrefix));
} else {
format.setNationalPrefixFormattingRule(nationalPrefixFormattingRule);
}
if (format.hasNationalPrefixFormattingRule()) {
if (numberFormatElement.hasAttribute(NATIONAL_PREFIX_OPTIONAL_WHEN_FORMATTING)) {
format.setNationalPrefixOptionalWhenFormatting(
Boolean.valueOf(numberFormatElement.getAttribute(
NATIONAL_PREFIX_OPTIONAL_WHEN_FORMATTING)));
} else {
format.setNationalPrefixOptionalWhenFormatting(nationalPrefixOptionalWhenFormatting);
}
}
if (numberFormatElement.hasAttribute(CARRIER_CODE_FORMATTING_RULE)) {
format.setDomesticCarrierCodeFormattingRule(validateRE(
getDomesticCarrierCodeFormattingRuleFromElement(numberFormatElement,
nationalPrefix)));
} else {
format.setDomesticCarrierCodeFormattingRule(carrierCodeFormattingRule);
}
loadNationalFormat(metadata, numberFormatElement, format);
metadata.addNumberFormat(format);
if (loadInternationalFormat(metadata, numberFormatElement, format.build())) {
hasExplicitIntlFormatDefined = true;
}
}
// Only a small number of regions need to specify the intlFormats in the xml. For the majority
// of countries the intlNumberFormat metadata is an exact copy of the national NumberFormat
// metadata. To minimize the size of the metadata file, we only keep intlNumberFormats that
// actually differ in some way to the national formats.
if (!hasExplicitIntlFormatDefined) {
metadata.clearIntlNumberFormat();
}
}
}
// @VisibleForTesting
static void setLeadingDigitsPatterns(Element numberFormatElement, NumberFormat.Builder format) {
NodeList leadingDigitsPatternNodes = numberFormatElement.getElementsByTagName(LEADING_DIGITS);
int numOfLeadingDigitsPatterns = leadingDigitsPatternNodes.getLength();
if (numOfLeadingDigitsPatterns > 0) {
for (int i = 0; i < numOfLeadingDigitsPatterns; i++) {
format.addLeadingDigitsPattern(
validateRE((leadingDigitsPatternNodes.item(i)).getFirstChild().getNodeValue(), true));
}
}
}
// @VisibleForTesting
static String getNationalPrefixFormattingRuleFromElement(Element element,
String nationalPrefix) {
String nationalPrefixFormattingRule = element.getAttribute(NATIONAL_PREFIX_FORMATTING_RULE);
// Replace $NP with national prefix and $FG with the first group ($1).
nationalPrefixFormattingRule =
nationalPrefixFormattingRule.replaceFirst("\\$NP", nationalPrefix)
.replaceFirst("\\$FG", "\\$1");
return nationalPrefixFormattingRule;
}
// @VisibleForTesting
static String getDomesticCarrierCodeFormattingRuleFromElement(Element element,
String nationalPrefix) {
String carrierCodeFormattingRule = element.getAttribute(CARRIER_CODE_FORMATTING_RULE);
// Replace $FG with the first group ($1) and $NP with the national prefix.
carrierCodeFormattingRule = carrierCodeFormattingRule.replaceFirst("\\$FG", "\\$1")
.replaceFirst("\\$NP", nationalPrefix);
return carrierCodeFormattingRule;
}
// @VisibleForTesting
static boolean isValidNumberType(String numberType) {
return numberType.equals(FIXED_LINE) || numberType.equals(MOBILE) ||
numberType.equals(GENERAL_DESC);
}
/**
* Processes a phone number description element from the XML file and returns it as a
* PhoneNumberDesc. If the description element is a fixed line or mobile number, the general
* description will be used to fill in the whole element if necessary, or any components that are
* missing. For all other types, the general description will only be used to fill in missing
* components if the type has a partial definition. For example, if no "tollFree" element exists,
* we assume there are no toll free numbers for that locale, and return a phone number description
* with "NA" for both the national and possible number patterns.
*
* @param generalDesc a generic phone number description that will be used to fill in missing
* parts of the description
* @param countryElement the XML element representing all the country information
* @param numberType the name of the number type, corresponding to the appropriate tag in the XML
* file with information about that type
* @return complete description of that phone number type
*/
// @VisibleForTesting
static PhoneNumberDesc.Builder processPhoneNumberDescElement(PhoneNumberDesc.Builder generalDesc,
Element countryElement,
String numberType,
boolean liteBuild) {
NodeList phoneNumberDescList = countryElement.getElementsByTagName(numberType);
PhoneNumberDesc.Builder numberDesc = PhoneNumberDesc.newBuilder();
if (phoneNumberDescList.getLength() == 0 && !isValidNumberType(numberType)) {
numberDesc.setNationalNumberPattern("NA");
numberDesc.setPossibleNumberPattern("NA");
return numberDesc;
}
numberDesc.mergeFrom(generalDesc.build());
if (phoneNumberDescList.getLength() > 0) {
Element element = (Element) phoneNumberDescList.item(0);
NodeList possiblePattern = element.getElementsByTagName(POSSIBLE_NUMBER_PATTERN);
if (possiblePattern.getLength() > 0) {
numberDesc.setPossibleNumberPattern(
validateRE(possiblePattern.item(0).getFirstChild().getNodeValue(), true));
}
NodeList validPattern = element.getElementsByTagName(NATIONAL_NUMBER_PATTERN);
if (validPattern.getLength() > 0) {
numberDesc.setNationalNumberPattern(
validateRE(validPattern.item(0).getFirstChild().getNodeValue(), true));
}
if (!liteBuild) {
NodeList exampleNumber = element.getElementsByTagName(EXAMPLE_NUMBER);
if (exampleNumber.getLength() > 0) {
numberDesc.setExampleNumber(exampleNumber.item(0).getFirstChild().getNodeValue());
}
}
}
return numberDesc;
}
// @VisibleForTesting
static void setRelevantDescPatterns(PhoneMetadata.Builder metadata, Element element,
boolean liteBuild, boolean isShortNumberMetadata) {
PhoneNumberDesc.Builder generalDesc = PhoneNumberDesc.newBuilder();
generalDesc = processPhoneNumberDescElement(generalDesc, element, GENERAL_DESC, liteBuild);
metadata.setGeneralDesc(generalDesc);
if (!isShortNumberMetadata) {
// Set fields used only by regular length phone numbers.
metadata.setFixedLine(
processPhoneNumberDescElement(generalDesc, element, FIXED_LINE, liteBuild));
metadata.setMobile(
processPhoneNumberDescElement(generalDesc, element, MOBILE, liteBuild));
metadata.setSharedCost(
processPhoneNumberDescElement(generalDesc, element, SHARED_COST, liteBuild));
metadata.setVoip(
processPhoneNumberDescElement(generalDesc, element, VOIP, liteBuild));
metadata.setPersonalNumber(
processPhoneNumberDescElement(generalDesc, element, PERSONAL_NUMBER, liteBuild));
metadata.setPager(
processPhoneNumberDescElement(generalDesc, element, PAGER, liteBuild));
metadata.setUan(
processPhoneNumberDescElement(generalDesc, element, UAN, liteBuild));
metadata.setVoicemail(
processPhoneNumberDescElement(generalDesc, element, VOICEMAIL, liteBuild));
metadata.setNoInternationalDialling(
processPhoneNumberDescElement(generalDesc, element, NO_INTERNATIONAL_DIALLING,
liteBuild));
metadata.setSameMobileAndFixedLinePattern(
metadata.getMobile().getNationalNumberPattern().equals(
metadata.getFixedLine().getNationalNumberPattern()));
} else {
// Set fields used only by short numbers.
metadata.setStandardRate(
processPhoneNumberDescElement(generalDesc, element, STANDARD_RATE, liteBuild));
metadata.setShortCode(
processPhoneNumberDescElement(generalDesc, element, SHORT_CODE, liteBuild));
metadata.setCarrierSpecific(
processPhoneNumberDescElement(generalDesc, element, CARRIER_SPECIFIC, liteBuild));
metadata.setEmergency(
processPhoneNumberDescElement(generalDesc, element, EMERGENCY, liteBuild));
}
// Set fields used by both regular length and short numbers.
metadata.setTollFree(
processPhoneNumberDescElement(generalDesc, element, TOLL_FREE, liteBuild));
metadata.setPremiumRate(
processPhoneNumberDescElement(generalDesc, element, PREMIUM_RATE, liteBuild));
}
// @VisibleForTesting
static PhoneMetadata loadCountryMetadata(String regionCode, Element element, boolean liteBuild,
boolean isShortNumberMetadata, boolean isAlternateFormatsMetadata) {
String nationalPrefix = getNationalPrefix(element);
PhoneMetadata.Builder metadata =
loadTerritoryTagMetadata(regionCode, element, nationalPrefix);
String nationalPrefixFormattingRule =
getNationalPrefixFormattingRuleFromElement(element, nationalPrefix);
loadAvailableFormats(metadata, element, nationalPrefix.toString(),
nationalPrefixFormattingRule.toString(),
element.hasAttribute(NATIONAL_PREFIX_OPTIONAL_WHEN_FORMATTING));
if (!isAlternateFormatsMetadata) {
// The alternate formats metadata does not need most of the patterns to be set.
setRelevantDescPatterns(metadata, element, liteBuild, isShortNumberMetadata);
}
return metadata.build();
}
}
|
|
/*
* Copyright 2002-2016 The Jamocha Team
*
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.jamocha.org/
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for
* the specific language governing permissions and limitations under the License.
*/
package org.jamocha.util;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import lombok.experimental.UtilityClass;
import java.util.*;
import java.util.function.*;
import java.util.stream.Collector;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import java.util.stream.StreamSupport;
import static java.util.stream.Collectors.*;
/**
* @author Fabian Ohler <[email protected]>
*/
@UtilityClass
public class Lambdas {
/* composition with generic return type */
public static <A, B, C> Function<? super A, ? extends C> compose(final Function<? super A, ? extends B> a,
final Function<? super B, ? extends C> b) {
return a.andThen(b);
}
public static <A, B, C, D> Function<? super A, ? extends D> compose(final Function<? super A, ? extends B> a,
final Function<? super B, ? extends C> b, final Function<? super C, ? extends D> c) {
return compose(compose(a, b), c);
}
public static <A, B, C, D, E> Function<? super A, ? extends E> compose(final Function<? super A, ? extends B> a,
final Function<? super B, ? extends C> b, final Function<? super C, ? extends D> c,
final Function<? super D, ? extends E> d) {
return compose(compose(a, b, c), d);
}
public static <A, B, C, D, E, F> Function<? super A, ? extends F> compose(final Function<? super A, ? extends B> a,
final Function<? super B, ? extends C> b, final Function<? super C, ? extends D> c,
final Function<? super D, ? extends E> d, final Function<? super E, ? extends F> e) {
return compose(compose(a, b, c, d), e);
}
/* composition with return type int */
public static <A, B> ToIntFunction<? super A> composeToInt(final Function<? super A, ? extends B> a,
final ToIntFunction<? super B> b) {
return t -> b.applyAsInt(a.apply(t));
}
public static <A, B, C> ToIntFunction<? super A> composeToInt(final Function<? super A, ? extends B> a,
final Function<? super B, ? extends C> b, final ToIntFunction<? super C> c) {
return composeToInt(compose(a, b), c);
}
public static <A, B, C, D> ToIntFunction<? super A> composeToInt(final Function<? super A, ? extends B> a,
final Function<? super B, ? extends C> b, final Function<? super C, ? extends D> c,
final ToIntFunction<? super D> d) {
return composeToInt(compose(a, b, c), d);
}
public static <A, B, C, D, E> ToIntFunction<? super A> composeToInt(final Function<? super A, ? extends B> a,
final Function<? super B, ? extends C> b, final Function<? super C, ? extends D> c,
final Function<? super D, ? extends E> d, final ToIntFunction<? super E> e) {
return composeToInt(compose(a, b, c, d), e);
}
/* composition with return type double */
public static <A, B> ToDoubleFunction<? super A> composeToDouble(final Function<? super A, ? extends B> a,
final ToDoubleFunction<? super B> b) {
return t -> b.applyAsDouble(a.apply(t));
}
public static <A, B, C> ToDoubleFunction<? super A> composeToDouble(final Function<? super A, ? extends B> a,
final Function<? super B, ? extends C> b, final ToDoubleFunction<? super C> c) {
return composeToDouble(compose(a, b), c);
}
public static <A, B, C, D> ToDoubleFunction<? super A> composeToDouble(final Function<? super A, ? extends B> a,
final Function<? super B, ? extends C> b, final Function<? super C, ? extends D> c,
final ToDoubleFunction<? super D> d) {
return composeToDouble(compose(a, b, c), d);
}
public static <A, B, C, D, E> ToDoubleFunction<? super A> composeToDouble(final Function<? super A, ? extends B> a,
final Function<? super B, ? extends C> b, final Function<? super C, ? extends D> c,
final Function<? super D, ? extends E> d, final ToDoubleFunction<? super E> e) {
return composeToDouble(compose(a, b, c, d), e);
}
/* composition with return type long */
public static <A, B> ToLongFunction<? super A> composeToLong(final Function<? super A, ? extends B> a,
final ToLongFunction<? super B> b) {
return t -> b.applyAsLong(a.apply(t));
}
public static <A, B, C> ToLongFunction<? super A> composeToLong(final Function<? super A, ? extends B> a,
final Function<? super B, ? extends C> b, final ToLongFunction<? super C> c) {
return composeToLong(compose(a, b), c);
}
public static <A, B, C, D> ToLongFunction<? super A> composeToLong(final Function<? super A, ? extends B> a,
final Function<? super B, ? extends C> b, final Function<? super C, ? extends D> c,
final ToLongFunction<? super D> d) {
return composeToLong(compose(a, b, c), d);
}
public static <A, B, C, D, E> ToLongFunction<? super A> composeToLong(final Function<? super A, ? extends B> a,
final Function<? super B, ? extends C> b, final Function<? super C, ? extends D> c,
final Function<? super D, ? extends E> d, final ToLongFunction<? super E> e) {
return composeToLong(compose(a, b, c, d), e);
}
/* predicate negation */
public static <A> Predicate<? super A> negate(final Predicate<? super A> p) {
return p.negate();
}
/* computeIfAbsent helpers */
public static <A, B> Function<A, HashSet<B>> newHashSet() {
return x -> new HashSet<B>();
}
public static <A, B> Function<A, LinkedHashSet<B>> newLinkedHashSet() {
return x -> new LinkedHashSet<B>();
}
public static <A, B> Function<A, Set<B>> newIdentityHashSet() {
return x -> Collections.newSetFromMap(new IdentityHashMap<>());
}
public static <A, B, C> Function<A, HashMap<B, C>> newHashMap() {
return x -> new HashMap<B, C>();
}
public static <A, B, C> Function<A, LinkedHashMap<B, C>> newLinkedHashMap() {
return x -> new LinkedHashMap<B, C>();
}
public static <A, B, C> Function<A, IdentityHashMap<B, C>> newIdentityHashMap() {
return x -> new IdentityHashMap<B, C>();
}
public static <A, B> Function<A, TreeSet<B>> newTreeSet() {
return x -> new TreeSet<B>();
}
public static <A, B, C> Function<A, TreeMap<B, C>> newTreeMap() {
return x -> new TreeMap<B, C>();
}
public static <A, B> Function<? super A, ? extends ArrayList<B>> newArrayList() {
return x -> new ArrayList<B>();
}
/*
* computeIfAbsentHelpers using the value
*/
public static <A> Function<A, Set<A>> toSingleton() {
return x -> Collections.singleton(x);
}
public static <A> Function<A, List<A>> toSingletonList() {
return x -> Collections.singletonList(x);
}
public static <A, B> BiFunction<A, B, Map<A, B>> toSingletonMap() {
return (a, b) -> Collections.singletonMap(a, b);
}
/*
* collector helpers
*/
public static <A> Collector<A, ?, Set<A>> toIdentityHashSet() {
return toCollection(Sets::newIdentityHashSet);
}
public static <A> Collector<A, ?, HashSet<A>> toHashSet() {
return toCollection(Sets::newHashSet);
}
public static <A extends Comparable<A>> Collector<A, ?, TreeSet<A>> toTreeSet() {
return toCollection(Sets::newTreeSet);
}
public static <A> Collector<A, ?, ArrayList<A>> toArrayList() {
return toCollection(Lists::newArrayList);
}
public static <A> Collector<A, ?, ImmutableList<A>> toImmutableList() {
return Collectors.collectingAndThen(Collectors.toList(), ImmutableList::copyOf);
}
public static <A> Collector<A, ?, LinkedList<A>> toLinkedList() {
return toCollection(Lists::newLinkedList);
}
/*
* iterable to stream
*/
public static <T> Stream<T> stream(final Iterable<T> iterable) {
return StreamSupport.stream(iterable.spliterator(), false);
}
/*
* stream as iterable
*/
public static <T> Iterable<T> iterable(final Stream<T> stream) {
return stream::iterator;
}
public static <T> Optional<T> or(final Optional<T> first, final Optional<T> second) {
return first.isPresent() ? first : second;
}
public static <T> Set<T> newIdentityHashSet(final Iterable<T> elements) {
final Set<T> newIdentityHashSet = Sets.newIdentityHashSet();
Iterables.addAll(newIdentityHashSet, elements);
return newIdentityHashSet;
}
public static <T, K> Collector<T, ?, ArrayList<ArrayList<T>>> groupingIntoListOfLists(
final Function<? super T, ? extends K> classifier) {
final Collector<T, ?, Map<K, ArrayList<T>>> groupingBy = groupingBy(classifier, toArrayList());
return Collectors.collectingAndThen(groupingBy, map -> new ArrayList<>(map.values()));
}
public static <T, K> Collector<T, ?, ArrayList<Set<T>>> groupingIntoListOfSets(
final Function<? super T, ? extends K> classifier) {
final Collector<T, ?, Map<K, Set<T>>> groupingBy = groupingBy(classifier, toSet());
return Collectors.collectingAndThen(groupingBy, map -> new ArrayList<>(map.values()));
}
public static <B, A> B foldl(final BiFunction<B, A, B> reducer, final B initialValue, final Iterable<A> iterable) {
B result = initialValue;
for (final A a : iterable) {
result = reducer.apply(result, a);
}
return result;
}
public static <B, A> B foldl(final BiFunction<B, A, B> reducer, final B initialValue, final Stream<A> stream) {
return foldl(reducer, initialValue, iterable(stream));
}
}
|
|
/**
* "First, solve the problem. Then, write the code. -John Johnson"
* "Or use Vangav M"
* www.vangav.com
* */
/**
* MIT License
*
* Copyright (c) 2016 Vangav
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to
* deal in the Software without restriction, including without limitation the
* rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
* sell copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
* IN THE SOFTWARE.
* */
/**
* Community
* Facebook Group: Vangav Open Source - Backend
* fb.com/groups/575834775932682/
* Facebook Page: Vangav
* fb.com/vangav.f
*
* Third party communities for Vangav Backend
* - play framework
* - cassandra
* - datastax
*
* Tag your question online (e.g.: stack overflow, etc ...) with
* #vangav_backend
* to easier find questions/answers online
* */
package com.vangav.backend.security.authentication.transaction_tokens;
import java.util.HashMap;
import java.util.Map;
import java.util.UUID;
import com.vangav.backend.content.generation.RandomGeneratorInl;
import com.vangav.backend.data_structures_and_algorithms.tuple.Pair;
import com.vangav.backend.exceptions.CodeException;
import com.vangav.backend.exceptions.VangavException.ExceptionClass;
import com.vangav.backend.exceptions.VangavException.ExceptionType;
import com.vangav.backend.exceptions.handlers.ArgumentsInl;
import com.vangav.backend.security.authentication.transaction_tokens.json.TransactionTokensJson;
/**
* @author mustapha
* fb.com/mustapha.abdallah
*/
/**
* TransactionTokensGeneratorInl has inline static methods for generating
* transaction token pairs
* Pairs of <server token, client token> are generated where both the server
* and authentic client sides keep a copy of those pairs. A client token can
* be viewed as a one-time-use password. Upon authentication (past the first
* layer authentication using a user-password) the server sends an unused
* server token and the client replies with the corresponding client token.
* Upon exhausting all tokens, the server generates a new set of tokens and
* securely give the client a copy of those newly issued tokens.
* Transaction Tokens are used as a second layer of authentication usually
* for accessing highly sensitive tools/information like:
* - money transfer (some online banking systems use a form of transaction
* tokens called TAN Transaction Authentication Number for online
* transfer operations)
*/
public class TransactionTokensGeneratorInl {
// disable default instantiation
private TransactionTokensGeneratorInl () {}
/**
* TokenType
* Special characters are:
* ! "#$%&'()*+,-./:;<=>?@[\]^_`{|}~
* */
public enum TokenType {
UUID,
DIGITS_8,
DIGITS_16,
ALPHA_NUMERIC_8,
ALPHA_NUMERIC_16,
ALPHA_NUMERIC_32,
ALPHA_NUMERIC_64,
ALPHA_NUMERIC_SPECIAL_CHARACTERS_8,
ALPHA_NUMERIC_SPECIAL_CHARACTERS_16,
ALPHA_NUMERIC_SPECIAL_CHARACTERS_32,
ALPHA_NUMERIC_SPECIAL_CHARACTERS_64
}
/**
* generateTransactionTokensJson
* @param tokenType
* @param tokensCount
* @return TransactionTokensJson JSON Object containing (pram tokensCount)
* randomly generated Transaction Token Pairs with
* (param tokenType type)
* @throws Exception
*/
public static TransactionTokensJson generateTransactionTokensJson (
final TokenType tokenType,
final int tokensCount) throws Exception {
return
new TransactionTokensJson(
generateTransactionTokensMap(tokenType, tokensCount) );
}
/**
* generateTransactionTokensMap
* @param tokenType
* @param tokensCount
* @return a <String, String> Map containing (pram tokensCount)
* randomly generated Transaction Token Pairs with
* (param tokenType type)
* @throws Exception
*/
public static Map<String, String> generateTransactionTokensMap (
final TokenType tokenType,
final int tokensCount) throws Exception {
ArgumentsInl.checkNotNull(
"Token Type",
tokenType,
ExceptionType.CODE_EXCEPTION);
ArgumentsInl.checkIntGreaterThanOrEqual(
"Tokens Count",
tokensCount,
1,
ExceptionType.CODE_EXCEPTION);
Map<String, String> result = new HashMap<String, String>();
Pair<String, String> currTokenPair;
for (int i = 0; i < tokensCount; i ++) {
currTokenPair = generateTransactionTokensPair(tokenType);
result.put(currTokenPair.getFirst(), currTokenPair.getSecond() );
}
return result;
}
/**
* generateTransactionTokensPair
* @param tokenType
* @return a pair of Transaction Tokens (server token and client token) with
* param tokenType type
* @throws Exception
*/
private static Pair<String, String> generateTransactionTokensPair (
final TokenType tokenType) throws Exception {
switch (tokenType) {
case UUID: {
return new Pair<String, String> (
UUID.randomUUID().toString(),
UUID.randomUUID().toString() );
}
case DIGITS_8: {
return new Pair<String, String> (
RandomGeneratorInl.generateRandomNumericString(8),
RandomGeneratorInl.generateRandomNumericString(8) );
}
case DIGITS_16: {
return new Pair<String, String> (
RandomGeneratorInl.generateRandomNumericString(16),
RandomGeneratorInl.generateRandomNumericString(16) );
}
case ALPHA_NUMERIC_8: {
return new Pair<String, String> (
RandomGeneratorInl.generateRandomAlphaNumericString(8),
RandomGeneratorInl.generateRandomAlphaNumericString(8) );
}
case ALPHA_NUMERIC_16: {
return new Pair<String, String> (
RandomGeneratorInl.generateRandomAlphaNumericString(16),
RandomGeneratorInl.generateRandomAlphaNumericString(16) );
}
case ALPHA_NUMERIC_32: {
return new Pair<String, String> (
RandomGeneratorInl.generateRandomAlphaNumericString(32),
RandomGeneratorInl.generateRandomAlphaNumericString(32) );
}
case ALPHA_NUMERIC_64: {
return new Pair<String, String> (
RandomGeneratorInl.generateRandomAlphaNumericString(64),
RandomGeneratorInl.generateRandomAlphaNumericString(64) );
}
case ALPHA_NUMERIC_SPECIAL_CHARACTERS_8: {
return new Pair<String, String> (
RandomGeneratorInl
.generateRandomAlphaNumericSpecialCharactersString(8),
RandomGeneratorInl
.generateRandomAlphaNumericSpecialCharactersString(8) );
}
case ALPHA_NUMERIC_SPECIAL_CHARACTERS_16: {
return new Pair<String, String> (
RandomGeneratorInl
.generateRandomAlphaNumericSpecialCharactersString(16),
RandomGeneratorInl
.generateRandomAlphaNumericSpecialCharactersString(16) );
}
case ALPHA_NUMERIC_SPECIAL_CHARACTERS_32: {
return new Pair<String, String> (
RandomGeneratorInl
.generateRandomAlphaNumericSpecialCharactersString(32),
RandomGeneratorInl
.generateRandomAlphaNumericSpecialCharactersString(32) );
}
case ALPHA_NUMERIC_SPECIAL_CHARACTERS_64: {
return new Pair<String, String> (
RandomGeneratorInl
.generateRandomAlphaNumericSpecialCharactersString(64),
RandomGeneratorInl
.generateRandomAlphaNumericSpecialCharactersString(64) );
}
default:
throw new CodeException(
181,
10,
"Unhandled TokenType ["
+ tokenType.toString()
+ "]",
ExceptionClass.TYPE);
}
}
}
|
|
package org.cagrid.demo.photosharing.guicomponents;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.rmi.RemoteException;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import javax.swing.JButton;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.JTable;
import javax.swing.table.AbstractTableModel;
import org.cagrid.demo.photosharing.domain.User;
import org.cagrid.demo.photosharing.gallery.client.GalleryClient;
import org.cagrid.demo.photosharing.stubs.types.PhotoSharingException;
public class PermissionsPanel {
private GalleryClient client;
public PermissionsPanel(GalleryClient client) {
this.client = client;
}
public JPanel getPermissionsPanel() {
JPanel panel = new JPanel();
MyTableModel model = new MyTableModel();
model.addTableModelListener(new PermissionsModelListener(this.client));
JTable table = new JTable(model);
JScrollPane scrollPane = new JScrollPane(table);
panel.add(scrollPane);
JButton refreshButton = new JButton("refresh");
refreshButton.addActionListener(new RefreshButtonActionListener(model, this.client));
panel.add(refreshButton);
return panel;
}
static class RefreshButtonActionListener implements ActionListener {
private MyTableModel model;
private GalleryClient client;
public RefreshButtonActionListener(MyTableModel model, GalleryClient client) {
this.model = model;
this.client = client;
}
public void actionPerformed(ActionEvent e) {
//update the model
//retrieve permissions from service
try {
User[] viewers = this.client.listAllUsersWithViewPrivileges();
User[] adders = this.client.listUsersWithAddPrivileges();
Set<UserPermission> permissionSet = new HashSet<UserPermission>();
if (viewers != null) {
for (User viewer : viewers) {
UserPermission cur = new UserPermission();
cur.setIdentity(viewer.getUserIdentity());
if (!(permissionSet.contains(cur))) {
//UserPermission to list
cur.setView(Boolean.TRUE);
permissionSet.add(cur);
} else {
//set view permission on existing UserPermission object to true
Iterator i = permissionSet.iterator();
while (i.hasNext()) {
UserPermission p = (UserPermission)i.next();
if (p.equals(cur)) {
p.setView(Boolean.TRUE);
}
}
}
}
}
if (adders != null) {
for (User adder : adders) {
UserPermission cur = new UserPermission();
cur.setIdentity(adder.getUserIdentity());
if (!(permissionSet.contains(cur))) {
//UserPermission to list
cur.setAdd(Boolean.TRUE);
permissionSet.add(cur);
} else {
//set view permission on existing UserPermission object to true
Iterator i = permissionSet.iterator();
while (i.hasNext()) {
UserPermission p = (UserPermission)i.next();
if (p.equals(cur)) {
p.setAdd(Boolean.TRUE);
}
}
}
}
}
this.model.setUserPermissions(new ArrayList(permissionSet));
} catch (PhotoSharingException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
} catch (RemoteException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
}
}
static class UserPermission {
public UserPermission() {
this.identity = null;
this.view = Boolean.FALSE;
this.add = Boolean.FALSE;
}
public String getIdentity() {
return identity;
}
public void setIdentity(String identity) {
this.identity = identity;
}
public Boolean isView() {
return view;
}
public void setView(Boolean view) {
this.view = view;
}
public Boolean isAdd() {
return add;
}
public void setAdd(Boolean add) {
this.add = add;
}
private String identity;
private Boolean view;
private Boolean add;
@Override
public boolean equals(Object obj) {
if (!(obj instanceof UserPermission)) {
return false;
}
UserPermission permission = (UserPermission)obj;
return this.identity.equals(permission.identity);
}
@Override
public int hashCode() {
return this.identity.hashCode();
}
}
class MyTableModel extends AbstractTableModel {
public MyTableModel() {
this.userPermissions = new ArrayList<UserPermission>();
}
private String[] columnNames = new String[] { "Identity", "View Images", "Add Images" };
private List<UserPermission> userPermissions;
public int getColumnCount() {
return columnNames.length;
}
public int getRowCount() {
return userPermissions.size();
}
public String getColumnName(int col) {
return columnNames[col];
}
public Object getValueAt(int row, int col) {
if (col == 0) {
return userPermissions.get(row).getIdentity();
} else if (col == 1) {
return userPermissions.get(row).isView();
} else { // if (col == 2) { NOTE: only have 3 columns total
return userPermissions.get(row).isAdd();
}
}
public Class getColumnClass(int c) {
return getValueAt(0, c).getClass();
}
/*
* Don't need to implement this method unless your table's
* editable.
*/
public boolean isCellEditable(int row, int col) {
//Note that the data/cell address is constant,
//no matter where the cell appears onscreen.
if (col < 1) {
return false;
} else {
return true;
}
}
/*
* Don't need to implement this method unless your table's
* data can change.
*/
public void setValueAt(Object value, int row, int col) {
if (col == 0) {
this.userPermissions.get(row).setIdentity((String)value);
} else if (col == 1) {
this.userPermissions.get(row).setView((Boolean)value);
} else { //if (col == 2)
this.userPermissions.get(row).setAdd((Boolean)value);
}
fireTableCellUpdated(row, col);
}
//CALL THIS ONLY FROM SWING THREAD
public void addUserPermission(UserPermission userPermission) {
this.userPermissions.add(userPermission);
}
//CALL THIS ONLY FROM SWING THREAD
public void removeUserPermission(UserPermission userPermission) {
this.userPermissions.remove(userPermission);
}
public List<UserPermission> getUserPermissions() {
return this.userPermissions;
}
//CALL THIS ONLY FROM SWING THREAD
public void setUserPermissions(List<UserPermission> newPermissions) {
this.userPermissions = newPermissions;
this.fireTableStructureChanged();
}
}
}
|
|
package de.felixbruns.jotify.media;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import de.felixbruns.jotify.media.Link.InvalidSpotifyURIException;
import de.felixbruns.jotify.util.Hex;
/**
* Holds information about a playlist.
*
* @author Felix Bruns <[email protected]>
*
* @category Media
*/
public class Playlist implements Iterable<Track> {
private String id;
private String name;
private String author;
private List<Track> tracks;
private long revision;
private long checksum;
private boolean collaborative;
private String description;
private String picture;
public Playlist(){
this.id = null;
this.name = null;
this.author = null;
this.tracks = new ArrayList<Track>();
this.revision = -1;
this.checksum = -1;
this.collaborative = false;
this.description = null;
this.picture = null;
}
public Playlist(String id){
this(id, null, null, false);
}
public Playlist(String id, String name, String author, boolean collaborative){
/* Check if id is a 32-character hex string. */
if(id.length() == 32 && Hex.isHex(id)){
this.id = id;
}
/* Otherwise try to parse it as a Spotify URI. */
else{
try{
this.id = Link.create(id).getId();
}
catch(InvalidSpotifyURIException e){
throw new IllegalArgumentException(
"Given id is neither a 32-character hex string nor a valid Spotify URI: " + id, e
);
}
}
/* Set other playlist properties. */
this.name = name;
this.author = author;
this.tracks = new ArrayList<Track>();
this.revision = -1;
this.checksum = -1;
this.collaborative = collaborative;
this.description = null;
this.picture = null;
}
public String getId(){
return this.id;
}
public void setId(String id){
this.id = id;
}
public String getName(){
return this.name;
}
public void setName(String name){
this.name = name;
}
public String getAuthor(){
return this.author;
}
public void setAuthor(String author){
this.author = author;
}
public List<Track> getTracks(){
return this.tracks;
}
public void setTracks(List<Track> tracks){
this.tracks = tracks;
}
public boolean hasTracks(){
return !this.tracks.isEmpty();
}
public long getRevision(){
return this.revision;
}
public void setRevision(long revision){
this.revision = revision;
}
public boolean isCollaborative(){
return this.collaborative;
}
public void setCollaborative(boolean collaborative){
this.collaborative = collaborative;
}
public String getDescription(){
return this.description;
}
public void setDescription(String description){
this.description = description;
}
public String getPicture(){
return this.picture;
}
public void setPicture(String picture){
this.picture = picture;
}
/**
* Get and update the checksum of this playlist.
*
* @return The checksum.
*/
public long getChecksum(){
Checksum checksum = new Checksum();
for(Track track : this.tracks){
checksum.update(track);
}
this.checksum = checksum.getValue();
return this.checksum;
}
/**
* Set the current checksum of this playlist.
*
* @param checksum The current checksum.
*/
public void setChecksum(long checksum){
this.checksum = checksum;
}
/**
* Create a link from this playlist.
*
* @return A {@link Link} object which can then
* be used to retreive the Spotify URI.
*/
public Link getLink(){
return Link.create(this);
}
public Iterator<Track> iterator(){
return this.tracks.iterator();
}
public static Playlist fromResult(String name, String author, Result result){
Playlist playlist = new Playlist();
playlist.name = name;
playlist.author = author;
for(Track track : result.getTracks()){
playlist.tracks.add(track);
}
return playlist;
}
public boolean equals(Object o){
if(o instanceof Playlist){
Playlist p = (Playlist)o;
return this.id.equals(p.id);
}
return false;
}
public int hashCode(){
return (this.id != null) ? this.id.hashCode() : 0;
}
public String toString(){
return String.format("[Playlist: %s, %s, %d, %s]", this.author, this.name, this.revision, this.id);
}
}
|
|
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.history.integration;
import com.intellij.CommonBundle;
import com.intellij.history.Clock;
import com.intellij.history.core.ContentFactory;
import com.intellij.history.core.LocalVcs;
import com.intellij.history.core.Paths;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.command.CommandProcessor;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.fileEditor.FileDocumentManager;
import com.intellij.openapi.fileTypes.FileType;
import com.intellij.openapi.fileTypes.FileTypeManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.roots.FileIndex;
import com.intellij.openapi.roots.ProjectRootManager;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.vfs.CharsetToolkit;
import com.intellij.openapi.vfs.LocalFileSystem;
import com.intellij.openapi.vfs.ReadonlyStatusHandler;
import com.intellij.openapi.vfs.VirtualFile;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
public class IdeaGateway {
protected Project myProject;
protected FileFilter myFileFilter;
public IdeaGateway(Project p) {
myProject = p;
myFileFilter = createFileFilter();
}
protected FileFilter createFileFilter() {
FileIndex fi = getRootManager().getFileIndex();
FileTypeManager tm = FileTypeManager.getInstance();
return new FileFilter(fi, tm);
}
public Project getProject() {
return myProject;
}
// todo get rid of file filter
public FileFilter getFileFilter() {
return myFileFilter;
}
public String getBaseDir() {
return myProject.getBaseDir().getPath();
}
public List<VirtualFile> getContentRoots() {
return Arrays.asList(getRootManager().getContentRoots());
}
private ProjectRootManager getRootManager() {
return ProjectRootManager.getInstance(myProject);
}
public boolean askForProceeding(String s) {
return Messages.showYesNoDialog(myProject, s, CommonBundle.getWarningTitle(), Messages.getWarningIcon()) == 0;
}
public void showError(String s) {
Messages.showErrorDialog(myProject, s, CommonBundle.getErrorTitle());
}
public void showMessage(String s, String title) {
Messages.showInfoMessage(myProject, s, title);
}
public void performCommandInsideWriteAction(final String name, final Runnable r) {
ApplicationManager.getApplication().runWriteAction(new Runnable() {
public void run() {
performCommand(name, r);
}
});
}
private void performCommand(String name, Runnable r) {
CommandProcessor.getInstance().executeCommand(myProject, r, name, null);
}
public boolean ensureFilesAreWritable(List<VirtualFile> ff) {
ReadonlyStatusHandler h = ReadonlyStatusHandler.getInstance(myProject);
return !h.ensureFilesWritable(ff.toArray(new VirtualFile[0])).hasReadonlyFiles();
}
public VirtualFile findVirtualFile(String path) {
return getFileSystem().findFileByPath(path);
}
public VirtualFile findOrCreateFileSafely(String path, boolean isDirectory) throws IOException {
return findOrCreateFileSafely(this, path, isDirectory);
}
public VirtualFile findOrCreateFileSafely(Object requestor, String path, boolean isDirectory) throws IOException {
VirtualFile f = findVirtualFile(path);
if (f != null && f.isDirectory() != isDirectory) {
f.delete(this);
f = null;
}
if (f == null) {
VirtualFile parent = findOrCreateFileSafely(Paths.getParentOf(path), true);
String name = Paths.getNameOf(path);
f = isDirectory
? parent.createChildDirectory(requestor, name)
: parent.createChildData(requestor, name);
}
return f;
}
public List<VirtualFile> getAllFilesFrom(String path) {
return collectFiles(findVirtualFile(path), new ArrayList<VirtualFile>());
}
private List<VirtualFile> collectFiles(VirtualFile f, List<VirtualFile> result) {
if (f.isDirectory()) {
for (VirtualFile child : f.getChildren()) {
collectFiles(child, result);
}
}
else {
result.add(f);
}
return result;
}
public void registerUnsavedDocuments(LocalVcs vcs) {
vcs.beginChangeSet();
for (Document d : getUnsavedDocuments()) {
VirtualFile f = getFile(d);
if (shouldNotRegister(f)) continue;
registerDocumentContents(vcs, f, d);
}
vcs.endChangeSet(null);
}
public void registerUnsavedDocuments(LocalVcs vcs, VirtualFile f) {
if (shouldNotRegister(f)) return;
if (f.isDirectory()) {
for (VirtualFile each : f.getChildren()) {
registerUnsavedDocuments(vcs, each);
}
}
else {
registerDocumentContents(vcs, f, getDocument(f));
}
}
private void registerDocumentContents(LocalVcs vcs, VirtualFile f, Document d) {
vcs.changeFileContent(f.getPath(), contentFactoryFor(d), Clock.getCurrentTimestamp());
}
private boolean shouldNotRegister(VirtualFile f) {
if (f == null) return true;
if (!f.isValid()) return true;
if (!getFileFilter().isAllowedAndUnderContentRoot(f)) return true;
return false;
}
private ContentFactory contentFactoryFor(final Document d) {
return new ContentFactory() {
@Override
public byte[] getBytes() {
return bytesFromDocument(d);
}
@Override
public long getLength() {
return getBytes().length;
}
};
}
protected byte[] bytesFromDocument(Document d) {
try {
return d.getText().getBytes(getFile(d).getCharset().name());
}
catch (UnsupportedEncodingException e) {
return d.getText().getBytes();
}
}
public String stringFromBytes(byte[] bytes, String path) {
try {
VirtualFile file = findVirtualFile(path);
if (file == null) {
return CharsetToolkit.bytesToString(bytes);
}
return new String(bytes, file.getCharset().name());
}
catch (UnsupportedEncodingException e1) {
return new String(bytes);
}
}
public void saveAllUnsavedDocuments() {
getDocManager().saveAllDocuments();
}
protected Document[] getUnsavedDocuments() {
return getDocManager().getUnsavedDocuments();
}
protected VirtualFile getFile(Document d) {
return getDocManager().getFile(d);
}
protected Document getDocument(VirtualFile f) {
return getDocManager().getDocument(f);
}
public Document getDocument(String path) {
return getDocument(findVirtualFile(path));
}
public FileType getFileType(String fileName) {
return FileTypeManager.getInstance().getFileTypeByFileName(fileName);
}
private LocalFileSystem getFileSystem() {
return LocalFileSystem.getInstance();
}
private FileDocumentManager getDocManager() {
return FileDocumentManager.getInstance();
}
}
|
|
/*
* Copyright 2015-2019 The OpenZipkin Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package zipkin2;
import org.junit.Test;
import zipkin2.codec.SpanBytesDecoder;
import zipkin2.codec.SpanBytesEncoder;
import static java.util.Arrays.asList;
import static org.assertj.core.api.Assertions.assertThat;
import static zipkin2.TestObjects.FRONTEND;
public class SpanBytesDecoderDetectorTest {
Span span1 =
Span.newBuilder()
.traceId("a")
.id("b")
.name("get")
.timestamp(10)
.duration(30)
.kind(Span.Kind.SERVER)
.shared(true)
.putTag("http.method", "GET")
.localEndpoint(FRONTEND)
.build();
Span span2 =
Span.newBuilder()
.traceId("a")
.parentId("b")
.id("c")
.name("get")
.timestamp(15)
.duration(10)
.localEndpoint(FRONTEND)
.build();
@Test
public void decoderForMessage_json_v1() {
byte[] message = SpanBytesEncoder.JSON_V1.encode(span1);
assertThat(SpanBytesDecoderDetector.decoderForMessage(message))
.isEqualTo(SpanBytesDecoder.JSON_V1);
}
@Test(expected = IllegalArgumentException.class)
public void decoderForMessage_json_v1_list() {
byte[] message = SpanBytesEncoder.JSON_V1.encodeList(asList(span1, span2));
SpanBytesDecoderDetector.decoderForMessage(message);
}
@Test
public void decoderForListMessage_json_v1() {
byte[] message = SpanBytesEncoder.JSON_V1.encodeList(asList(span1, span2));
assertThat(SpanBytesDecoderDetector.decoderForListMessage(message))
.isEqualTo(SpanBytesDecoder.JSON_V1);
}
@Test(expected = IllegalArgumentException.class)
public void decoderForListMessage_json_v1_singleItem() {
byte[] message = SpanBytesEncoder.JSON_V1.encode(span1);
SpanBytesDecoderDetector.decoderForListMessage(message);
}
/** Single-element reads were for legacy non-list encoding. Don't add new code that does this */
@Test(expected = UnsupportedOperationException.class)
public void decoderForMessage_json_v2() {
byte[] message = SpanBytesEncoder.JSON_V2.encode(span1);
assertThat(SpanBytesDecoderDetector.decoderForMessage(message))
.isEqualTo(SpanBytesDecoder.JSON_V2);
}
@Test(expected = IllegalArgumentException.class)
public void decoderForMessage_json_v2_list() {
byte[] message = SpanBytesEncoder.JSON_V2.encodeList(asList(span1, span2));
SpanBytesDecoderDetector.decoderForMessage(message);
}
@Test
public void decoderForListMessage_json_v2() {
byte[] message = SpanBytesEncoder.JSON_V2.encodeList(asList(span1, span2));
assertThat(SpanBytesDecoderDetector.decoderForListMessage(message))
.isEqualTo(SpanBytesDecoder.JSON_V2);
}
@Test
public void decoderForListMessage_json_v2_partial_localEndpoint() {
Span span =
Span.newBuilder()
.traceId("a")
.id("b")
.localEndpoint(Endpoint.newBuilder().serviceName("foo").build())
.build();
byte[] message = SpanBytesEncoder.JSON_V2.encodeList(asList(span));
assertThat(SpanBytesDecoderDetector.decoderForListMessage(message))
.isEqualTo(SpanBytesDecoder.JSON_V2);
}
@Test
public void decoderForListMessage_json_v2_partial_remoteEndpoint() {
Span span =
Span.newBuilder()
.traceId("a")
.id("b")
.kind(Span.Kind.CLIENT)
.remoteEndpoint(Endpoint.newBuilder().serviceName("foo").build())
.build();
byte[] message = SpanBytesEncoder.JSON_V2.encodeList(asList(span));
assertThat(SpanBytesDecoderDetector.decoderForListMessage(message))
.isEqualTo(SpanBytesDecoder.JSON_V2);
}
@Test
public void decoderForListMessage_json_v2_partial_tag() {
Span span = Span.newBuilder().traceId("a").id("b").putTag("foo", "bar").build();
byte[] message = SpanBytesEncoder.JSON_V2.encodeList(asList(span));
assertThat(SpanBytesDecoderDetector.decoderForListMessage(message))
.isEqualTo(SpanBytesDecoder.JSON_V2);
}
@Test(expected = IllegalArgumentException.class)
public void decoderForListMessage_json_v2_singleItem() {
byte[] message = SpanBytesEncoder.JSON_V2.encode(span1);
SpanBytesDecoderDetector.decoderForListMessage(message);
}
@Test
public void decoderForMessage_thrift() {
byte[] message = SpanBytesEncoder.THRIFT.encode(span1);
assertThat(SpanBytesDecoderDetector.decoderForMessage(message))
.isEqualTo(SpanBytesDecoder.THRIFT);
}
@Test(expected = IllegalArgumentException.class)
public void decoderForMessage_thrift_list() {
byte[] message = SpanBytesEncoder.THRIFT.encodeList(asList(span1, span2));
SpanBytesDecoderDetector.decoderForMessage(message);
}
@Test
public void decoderForListMessage_thrift() {
byte[] message = SpanBytesEncoder.THRIFT.encodeList(asList(span1, span2));
assertThat(SpanBytesDecoderDetector.decoderForListMessage(message))
.isEqualTo(SpanBytesDecoder.THRIFT);
}
/**
* We encoded incorrectly for years, so we have to read this data eventhough it is wrong.
*
* <p>See openzipkin/zipkin-reporter-java#133
*/
@Test
public void decoderForListMessage_thrift_incorrectFirstByte() {
byte[] message = SpanBytesEncoder.THRIFT.encodeList(asList(span1, span2));
message[0] = 11; // We made a typo.. it should have been 12
assertThat(SpanBytesDecoderDetector.decoderForListMessage(message))
.isEqualTo(SpanBytesDecoder.THRIFT);
}
@Test(expected = IllegalArgumentException.class)
public void decoderForListMessage_thrift_singleItem() {
byte[] message = SpanBytesEncoder.THRIFT.encode(span1);
SpanBytesDecoderDetector.decoderForListMessage(message);
}
/** Single-element reads were for legacy non-list encoding. Don't add new code that does this */
@Test(expected = UnsupportedOperationException.class)
public void decoderForMessage_proto3() {
byte[] message = SpanBytesEncoder.PROTO3.encode(span1);
assertThat(SpanBytesDecoderDetector.decoderForMessage(message))
.isEqualTo(SpanBytesDecoder.PROTO3);
}
@Test(expected = UnsupportedOperationException.class)
public void decoderForMessage_proto3_list() {
byte[] message = SpanBytesEncoder.PROTO3.encodeList(asList(span1, span2));
SpanBytesDecoderDetector.decoderForMessage(message);
}
@Test
public void decoderForListMessage_proto3() {
byte[] message = SpanBytesEncoder.PROTO3.encodeList(asList(span1, span2));
assertThat(SpanBytesDecoderDetector.decoderForListMessage(message))
.isEqualTo(SpanBytesDecoder.PROTO3);
}
/** There is no difference between a list of size one and a single element in proto3 */
@Test
public void decoderForListMessage_proto3_singleItem() {
byte[] message = SpanBytesEncoder.PROTO3.encode(span1);
assertThat(SpanBytesDecoderDetector.decoderForListMessage(message))
.isEqualTo(SpanBytesDecoder.PROTO3);
}
@Test(expected = IllegalArgumentException.class)
public void decoderForMessage_unknown() {
SpanBytesDecoderDetector.decoderForMessage(new byte[] {'h'});
}
@Test(expected = IllegalArgumentException.class)
public void decoderForListMessage_unknown() {
SpanBytesDecoderDetector.decoderForListMessage(new byte[] {'h'});
}
}
|
|
/*******************************************************************************
* Copyright 2011 See AUTHORS file.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.badlogic.gdx.jnigen;
/**
* Defines the configuration for building a native shared library for a specific platform. Used with {@link AntScriptGenerator} to
* create Ant build files that invoke the compiler toolchain to create the shared libraries.
*/
public class BuildTarget {
public String ndkHome = "";
public String ndkSuffix = "";
/**
* The target operating system of a build target.
*/
public enum TargetOs {
Windows, Linux, MacOsX, Android, IOS
}
/**
* the target operating system
**/
public BuildTarget.TargetOs os;
/**
* whether this is a 64-bit build, not used for Android
**/
public boolean is64Bit;
/**
* the C files and directories to be included in the build, accepts Ant path format, must not be null
**/
public String[] cIncludes;
/**
* the C files and directories to be excluded from the build, accepts Ant path format, must not be null
**/
public String[] cExcludes;
/**
* the C++ files and directories to be included in the build, accepts Ant path format, must not be null
**/
public String[] cppIncludes;
/**
* the C++ files and directories to be excluded from the build, accepts Ant path format, must not be null
**/
public String[] cppExcludes;
/**
* the directories containing headers for the build, must not be null
**/
public String[] headerDirs;
/**
* prefix for the compiler (g++, gcc), useful for cross compilation, must not be null
**/
public String compilerPrefix;
/**
* suffix for the compiler (g++, gcc), useful for cross compilation, must not be null
**/
public String compilerSuffix;
/**
* the flags passed to the C compiler, must not be null
**/
public String cFlags;
/**
* the flags passed to the C++ compiler, must not be null
**/
public String cppFlags;
/**
* the flags passed to the linker, must not be null
**/
public String linkerFlags;
/**
* the name of the generated build file for this target, defaults to "build-${target}(64)?.xml", must not be null
**/
public String buildFileName;
/**
* whether to exclude this build target from the master build file, useful for debugging
**/
public boolean excludeFromMasterBuildFile = false;
/**
* Ant XML executed in a target before compilation
**/
public String preCompileTask;
/**
* Ant Xml executed in a target after compilation
**/
public String postCompileTask;
/**
* the libraries to be linked to the output, specify via e.g. -ldinput -ldxguid etc.
**/
public String libraries;
/**
* The name used for folders for this specific target. Defaults to "${target}(64)"
**/
public String osFileName;
/**
* The name used for the library file. This is a full file name, including file extension. Default is platform specific.
* E.g. "lib{sharedLibName}64.so"
**/
public String libName;
/**
* Creates a new build target. See members of this class for a description of the parameters.
*/
public BuildTarget(BuildTarget.TargetOs targetType, boolean is64Bit, String[] cIncludes, String[] cExcludes,
String[] cppIncludes, String[] cppExcludes, String[] headerDirs, String compilerPrefix, String cFlags, String cppFlags,
String linkerFlags) {
if (targetType == null) throw new IllegalArgumentException("targetType must not be null");
if (cIncludes == null) cIncludes = new String[0];
if (cExcludes == null) cExcludes = new String[0];
if (cppIncludes == null) cppIncludes = new String[0];
if (cppExcludes == null) cppExcludes = new String[0];
if (headerDirs == null) headerDirs = new String[0];
if (compilerPrefix == null) compilerPrefix = "";
if (cFlags == null) cFlags = "";
if (cppFlags == null) cppFlags = "";
if (linkerFlags == null) linkerFlags = "";
this.os = targetType;
this.is64Bit = is64Bit;
this.cIncludes = cIncludes;
this.cExcludes = cExcludes;
this.cppIncludes = cppIncludes;
this.cppExcludes = cppExcludes;
this.headerDirs = headerDirs;
this.compilerPrefix = compilerPrefix;
this.cFlags = cFlags;
this.cppFlags = cppFlags;
this.linkerFlags = linkerFlags;
this.libraries = "";
this.compilerSuffix = "";
}
/**
* Creates a new default BuildTarget for the given OS, using common default values.
*/
public static BuildTarget newDefaultTarget(BuildTarget.TargetOs type, boolean is64Bit) {
if (type == TargetOs.Windows && !is64Bit) {
// Windows 32-Bit
return new BuildTarget(TargetOs.Windows, false, new String[]{"**/*.c"}, new String[0], new String[]{"**/*.cpp"},
new String[0], new String[0], "i686-w64-mingw32-", "-c -Wall -O2 -mfpmath=sse -msse2 -fmessage-length=0 -m32",
"-c -Wall -O2 -mfpmath=sse -msse2 -fmessage-length=0 -m32",
"-Wl,--kill-at -shared -m32 -static -static-libgcc -static-libstdc++");
}
if (type == TargetOs.Windows && is64Bit) {
// Windows 64-Bit
return new BuildTarget(TargetOs.Windows, true, new String[]{"**/*.c"}, new String[0], new String[]{"**/*.cpp"},
new String[0], new String[0], "x86_64-w64-mingw32-", "-c -Wall -O2 -mfpmath=sse -msse2 -fmessage-length=0 -m64",
"-c -Wall -O2 -mfpmath=sse -msse2 -fmessage-length=0 -m64",
"-Wl,--kill-at -shared -static -static-libgcc -static-libstdc++ -m64");
}
if (type == TargetOs.Linux && !is64Bit) {
// Linux 32-Bit
return new BuildTarget(TargetOs.Linux, false, new String[]{"**/*.c"}, new String[0], new String[]{"**/*.cpp"},
new String[0], new String[0], "", "-c -Wall -O2 -mfpmath=sse -msse -fmessage-length=0 -m32 -fPIC",
"-c -Wall -O2 -mfpmath=sse -msse -fmessage-length=0 -m32 -fPIC", "-shared -m32");
}
if (type == TargetOs.Linux && is64Bit) {
// Linux 64-Bit
return new BuildTarget(TargetOs.Linux, true, new String[]{"**/*.c"}, new String[0], new String[]{"**/*.cpp"},
new String[0], new String[0], "", "-c -Wall -O2 -mfpmath=sse -msse -fmessage-length=0 -m64 -fPIC",
"-c -Wall -O2 -mfpmath=sse -msse -fmessage-length=0 -m64 -fPIC", "-shared -m64 -Wl,-wrap,memcpy");
}
if (type == TargetOs.MacOsX && !is64Bit) {
// Mac OS X x86 & x86_64
BuildTarget mac = new BuildTarget(TargetOs.MacOsX, false, new String[]{"**/*.c"}, new String[0],
new String[]{"**/*.cpp"}, new String[0], new String[0], "",
"-c -Wall -O2 -arch i386 -DFIXED_POINT -fmessage-length=0 -fPIC -mmacosx-version-min=10.14",
"-c -Wall -O2 -arch i386 -DFIXED_POINT -fmessage-length=0 -fPIC -mmacosx-version-min=10.14",
"-shared -arch i386 -mmacosx-version-min=10.14");
return mac;
}
if (type == TargetOs.MacOsX && is64Bit) {
// Mac OS X x86 & x86_64
BuildTarget mac = new BuildTarget(TargetOs.MacOsX, true, new String[]{"**/*.c"}, new String[0],
new String[]{"**/*.cpp"}, new String[0], new String[0], "",
"-c -Wall -O2 -arch x86_64 -DFIXED_POINT -fmessage-length=0 -fPIC -mmacosx-version-min=10.14",
"-c -Wall -O2 -arch x86_64 -DFIXED_POINT -fmessage-length=0 -fPIC -mmacosx-version-min=10.14",
"-shared -arch x86_64 -mmacosx-version-min=10.14");
return mac;
}
if (type == TargetOs.Android) {
BuildTarget android = new BuildTarget(TargetOs.Android, false, new String[]{"**/*.c"}, new String[0],
new String[]{"**/*.cpp"}, new String[0], new String[0], "", "-O2 -Wall -D__ANDROID__", "-O2 -Wall -D__ANDROID__",
"-lm");
return android;
}
if (type == TargetOs.IOS) {
// iOS, 386 simulator and armv7a, compiled to fat static lib
BuildTarget ios = new BuildTarget(TargetOs.IOS, false, new String[]{"**/*.c"}, new String[0],
new String[]{"**/*.cpp"}, new String[0], new String[0], "",
"-c -Wall -O2",
"-c -Wall -O2",
"rcs");
return ios;
}
throw new RuntimeException("Unknown target type");
}
}
|
|
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/monitoring/v3/alert_service.proto
package com.google.monitoring.v3;
/**
*
*
* <pre>
* The protocol for the `CreateAlertPolicy` request.
* </pre>
*
* Protobuf type {@code google.monitoring.v3.CreateAlertPolicyRequest}
*/
public final class CreateAlertPolicyRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.monitoring.v3.CreateAlertPolicyRequest)
CreateAlertPolicyRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use CreateAlertPolicyRequest.newBuilder() to construct.
private CreateAlertPolicyRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CreateAlertPolicyRequest() {
name_ = "";
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private CreateAlertPolicyRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 18:
{
com.google.monitoring.v3.AlertPolicy.Builder subBuilder = null;
if (alertPolicy_ != null) {
subBuilder = alertPolicy_.toBuilder();
}
alertPolicy_ =
input.readMessage(
com.google.monitoring.v3.AlertPolicy.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(alertPolicy_);
alertPolicy_ = subBuilder.buildPartial();
}
break;
}
case 26:
{
java.lang.String s = input.readStringRequireUtf8();
name_ = s;
break;
}
default:
{
if (!parseUnknownFieldProto3(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.monitoring.v3.AlertServiceProto
.internal_static_google_monitoring_v3_CreateAlertPolicyRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.monitoring.v3.AlertServiceProto
.internal_static_google_monitoring_v3_CreateAlertPolicyRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.monitoring.v3.CreateAlertPolicyRequest.class,
com.google.monitoring.v3.CreateAlertPolicyRequest.Builder.class);
}
public static final int NAME_FIELD_NUMBER = 3;
private volatile java.lang.Object name_;
/**
*
*
* <pre>
* The project in which to create the alerting policy. The format is
* `projects/[PROJECT_ID]`.
* Note that this field names the parent container in which the alerting
* policy will be written, not the name of the created policy. The alerting
* policy that is returned will have a name that contains a normalized
* representation of this name as a prefix but adds a suffix of the form
* `/alertPolicies/[POLICY_ID]`, identifying the policy in the container.
* </pre>
*
* <code>string name = 3;</code>
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
*
*
* <pre>
* The project in which to create the alerting policy. The format is
* `projects/[PROJECT_ID]`.
* Note that this field names the parent container in which the alerting
* policy will be written, not the name of the created policy. The alerting
* policy that is returned will have a name that contains a normalized
* representation of this name as a prefix but adds a suffix of the form
* `/alertPolicies/[POLICY_ID]`, identifying the policy in the container.
* </pre>
*
* <code>string name = 3;</code>
*/
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int ALERT_POLICY_FIELD_NUMBER = 2;
private com.google.monitoring.v3.AlertPolicy alertPolicy_;
/**
*
*
* <pre>
* The requested alerting policy. You should omit the `name` field in this
* policy. The name will be returned in the new policy, including
* a new [ALERT_POLICY_ID] value.
* </pre>
*
* <code>.google.monitoring.v3.AlertPolicy alert_policy = 2;</code>
*/
public boolean hasAlertPolicy() {
return alertPolicy_ != null;
}
/**
*
*
* <pre>
* The requested alerting policy. You should omit the `name` field in this
* policy. The name will be returned in the new policy, including
* a new [ALERT_POLICY_ID] value.
* </pre>
*
* <code>.google.monitoring.v3.AlertPolicy alert_policy = 2;</code>
*/
public com.google.monitoring.v3.AlertPolicy getAlertPolicy() {
return alertPolicy_ == null
? com.google.monitoring.v3.AlertPolicy.getDefaultInstance()
: alertPolicy_;
}
/**
*
*
* <pre>
* The requested alerting policy. You should omit the `name` field in this
* policy. The name will be returned in the new policy, including
* a new [ALERT_POLICY_ID] value.
* </pre>
*
* <code>.google.monitoring.v3.AlertPolicy alert_policy = 2;</code>
*/
public com.google.monitoring.v3.AlertPolicyOrBuilder getAlertPolicyOrBuilder() {
return getAlertPolicy();
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (alertPolicy_ != null) {
output.writeMessage(2, getAlertPolicy());
}
if (!getNameBytes().isEmpty()) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, name_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (alertPolicy_ != null) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getAlertPolicy());
}
if (!getNameBytes().isEmpty()) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, name_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.monitoring.v3.CreateAlertPolicyRequest)) {
return super.equals(obj);
}
com.google.monitoring.v3.CreateAlertPolicyRequest other =
(com.google.monitoring.v3.CreateAlertPolicyRequest) obj;
boolean result = true;
result = result && getName().equals(other.getName());
result = result && (hasAlertPolicy() == other.hasAlertPolicy());
if (hasAlertPolicy()) {
result = result && getAlertPolicy().equals(other.getAlertPolicy());
}
result = result && unknownFields.equals(other.unknownFields);
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
if (hasAlertPolicy()) {
hash = (37 * hash) + ALERT_POLICY_FIELD_NUMBER;
hash = (53 * hash) + getAlertPolicy().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.monitoring.v3.CreateAlertPolicyRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.monitoring.v3.CreateAlertPolicyRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.monitoring.v3.CreateAlertPolicyRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.monitoring.v3.CreateAlertPolicyRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.monitoring.v3.CreateAlertPolicyRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.monitoring.v3.CreateAlertPolicyRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.monitoring.v3.CreateAlertPolicyRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.monitoring.v3.CreateAlertPolicyRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.monitoring.v3.CreateAlertPolicyRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.monitoring.v3.CreateAlertPolicyRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.monitoring.v3.CreateAlertPolicyRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.monitoring.v3.CreateAlertPolicyRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.monitoring.v3.CreateAlertPolicyRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The protocol for the `CreateAlertPolicy` request.
* </pre>
*
* Protobuf type {@code google.monitoring.v3.CreateAlertPolicyRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.monitoring.v3.CreateAlertPolicyRequest)
com.google.monitoring.v3.CreateAlertPolicyRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.monitoring.v3.AlertServiceProto
.internal_static_google_monitoring_v3_CreateAlertPolicyRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.monitoring.v3.AlertServiceProto
.internal_static_google_monitoring_v3_CreateAlertPolicyRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.monitoring.v3.CreateAlertPolicyRequest.class,
com.google.monitoring.v3.CreateAlertPolicyRequest.Builder.class);
}
// Construct using com.google.monitoring.v3.CreateAlertPolicyRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {}
}
@java.lang.Override
public Builder clear() {
super.clear();
name_ = "";
if (alertPolicyBuilder_ == null) {
alertPolicy_ = null;
} else {
alertPolicy_ = null;
alertPolicyBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.monitoring.v3.AlertServiceProto
.internal_static_google_monitoring_v3_CreateAlertPolicyRequest_descriptor;
}
@java.lang.Override
public com.google.monitoring.v3.CreateAlertPolicyRequest getDefaultInstanceForType() {
return com.google.monitoring.v3.CreateAlertPolicyRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.monitoring.v3.CreateAlertPolicyRequest build() {
com.google.monitoring.v3.CreateAlertPolicyRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.monitoring.v3.CreateAlertPolicyRequest buildPartial() {
com.google.monitoring.v3.CreateAlertPolicyRequest result =
new com.google.monitoring.v3.CreateAlertPolicyRequest(this);
result.name_ = name_;
if (alertPolicyBuilder_ == null) {
result.alertPolicy_ = alertPolicy_;
} else {
result.alertPolicy_ = alertPolicyBuilder_.build();
}
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return (Builder) super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return (Builder) super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return (Builder) super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.monitoring.v3.CreateAlertPolicyRequest) {
return mergeFrom((com.google.monitoring.v3.CreateAlertPolicyRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.monitoring.v3.CreateAlertPolicyRequest other) {
if (other == com.google.monitoring.v3.CreateAlertPolicyRequest.getDefaultInstance())
return this;
if (!other.getName().isEmpty()) {
name_ = other.name_;
onChanged();
}
if (other.hasAlertPolicy()) {
mergeAlertPolicy(other.getAlertPolicy());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.monitoring.v3.CreateAlertPolicyRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage =
(com.google.monitoring.v3.CreateAlertPolicyRequest) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private java.lang.Object name_ = "";
/**
*
*
* <pre>
* The project in which to create the alerting policy. The format is
* `projects/[PROJECT_ID]`.
* Note that this field names the parent container in which the alerting
* policy will be written, not the name of the created policy. The alerting
* policy that is returned will have a name that contains a normalized
* representation of this name as a prefix but adds a suffix of the form
* `/alertPolicies/[POLICY_ID]`, identifying the policy in the container.
* </pre>
*
* <code>string name = 3;</code>
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The project in which to create the alerting policy. The format is
* `projects/[PROJECT_ID]`.
* Note that this field names the parent container in which the alerting
* policy will be written, not the name of the created policy. The alerting
* policy that is returned will have a name that contains a normalized
* representation of this name as a prefix but adds a suffix of the form
* `/alertPolicies/[POLICY_ID]`, identifying the policy in the container.
* </pre>
*
* <code>string name = 3;</code>
*/
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The project in which to create the alerting policy. The format is
* `projects/[PROJECT_ID]`.
* Note that this field names the parent container in which the alerting
* policy will be written, not the name of the created policy. The alerting
* policy that is returned will have a name that contains a normalized
* representation of this name as a prefix but adds a suffix of the form
* `/alertPolicies/[POLICY_ID]`, identifying the policy in the container.
* </pre>
*
* <code>string name = 3;</code>
*/
public Builder setName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* The project in which to create the alerting policy. The format is
* `projects/[PROJECT_ID]`.
* Note that this field names the parent container in which the alerting
* policy will be written, not the name of the created policy. The alerting
* policy that is returned will have a name that contains a normalized
* representation of this name as a prefix but adds a suffix of the form
* `/alertPolicies/[POLICY_ID]`, identifying the policy in the container.
* </pre>
*
* <code>string name = 3;</code>
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
onChanged();
return this;
}
/**
*
*
* <pre>
* The project in which to create the alerting policy. The format is
* `projects/[PROJECT_ID]`.
* Note that this field names the parent container in which the alerting
* policy will be written, not the name of the created policy. The alerting
* policy that is returned will have a name that contains a normalized
* representation of this name as a prefix but adds a suffix of the form
* `/alertPolicies/[POLICY_ID]`, identifying the policy in the container.
* </pre>
*
* <code>string name = 3;</code>
*/
public Builder setNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
onChanged();
return this;
}
private com.google.monitoring.v3.AlertPolicy alertPolicy_ = null;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.monitoring.v3.AlertPolicy,
com.google.monitoring.v3.AlertPolicy.Builder,
com.google.monitoring.v3.AlertPolicyOrBuilder>
alertPolicyBuilder_;
/**
*
*
* <pre>
* The requested alerting policy. You should omit the `name` field in this
* policy. The name will be returned in the new policy, including
* a new [ALERT_POLICY_ID] value.
* </pre>
*
* <code>.google.monitoring.v3.AlertPolicy alert_policy = 2;</code>
*/
public boolean hasAlertPolicy() {
return alertPolicyBuilder_ != null || alertPolicy_ != null;
}
/**
*
*
* <pre>
* The requested alerting policy. You should omit the `name` field in this
* policy. The name will be returned in the new policy, including
* a new [ALERT_POLICY_ID] value.
* </pre>
*
* <code>.google.monitoring.v3.AlertPolicy alert_policy = 2;</code>
*/
public com.google.monitoring.v3.AlertPolicy getAlertPolicy() {
if (alertPolicyBuilder_ == null) {
return alertPolicy_ == null
? com.google.monitoring.v3.AlertPolicy.getDefaultInstance()
: alertPolicy_;
} else {
return alertPolicyBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* The requested alerting policy. You should omit the `name` field in this
* policy. The name will be returned in the new policy, including
* a new [ALERT_POLICY_ID] value.
* </pre>
*
* <code>.google.monitoring.v3.AlertPolicy alert_policy = 2;</code>
*/
public Builder setAlertPolicy(com.google.monitoring.v3.AlertPolicy value) {
if (alertPolicyBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
alertPolicy_ = value;
onChanged();
} else {
alertPolicyBuilder_.setMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The requested alerting policy. You should omit the `name` field in this
* policy. The name will be returned in the new policy, including
* a new [ALERT_POLICY_ID] value.
* </pre>
*
* <code>.google.monitoring.v3.AlertPolicy alert_policy = 2;</code>
*/
public Builder setAlertPolicy(com.google.monitoring.v3.AlertPolicy.Builder builderForValue) {
if (alertPolicyBuilder_ == null) {
alertPolicy_ = builderForValue.build();
onChanged();
} else {
alertPolicyBuilder_.setMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The requested alerting policy. You should omit the `name` field in this
* policy. The name will be returned in the new policy, including
* a new [ALERT_POLICY_ID] value.
* </pre>
*
* <code>.google.monitoring.v3.AlertPolicy alert_policy = 2;</code>
*/
public Builder mergeAlertPolicy(com.google.monitoring.v3.AlertPolicy value) {
if (alertPolicyBuilder_ == null) {
if (alertPolicy_ != null) {
alertPolicy_ =
com.google.monitoring.v3.AlertPolicy.newBuilder(alertPolicy_)
.mergeFrom(value)
.buildPartial();
} else {
alertPolicy_ = value;
}
onChanged();
} else {
alertPolicyBuilder_.mergeFrom(value);
}
return this;
}
/**
*
*
* <pre>
* The requested alerting policy. You should omit the `name` field in this
* policy. The name will be returned in the new policy, including
* a new [ALERT_POLICY_ID] value.
* </pre>
*
* <code>.google.monitoring.v3.AlertPolicy alert_policy = 2;</code>
*/
public Builder clearAlertPolicy() {
if (alertPolicyBuilder_ == null) {
alertPolicy_ = null;
onChanged();
} else {
alertPolicy_ = null;
alertPolicyBuilder_ = null;
}
return this;
}
/**
*
*
* <pre>
* The requested alerting policy. You should omit the `name` field in this
* policy. The name will be returned in the new policy, including
* a new [ALERT_POLICY_ID] value.
* </pre>
*
* <code>.google.monitoring.v3.AlertPolicy alert_policy = 2;</code>
*/
public com.google.monitoring.v3.AlertPolicy.Builder getAlertPolicyBuilder() {
onChanged();
return getAlertPolicyFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* The requested alerting policy. You should omit the `name` field in this
* policy. The name will be returned in the new policy, including
* a new [ALERT_POLICY_ID] value.
* </pre>
*
* <code>.google.monitoring.v3.AlertPolicy alert_policy = 2;</code>
*/
public com.google.monitoring.v3.AlertPolicyOrBuilder getAlertPolicyOrBuilder() {
if (alertPolicyBuilder_ != null) {
return alertPolicyBuilder_.getMessageOrBuilder();
} else {
return alertPolicy_ == null
? com.google.monitoring.v3.AlertPolicy.getDefaultInstance()
: alertPolicy_;
}
}
/**
*
*
* <pre>
* The requested alerting policy. You should omit the `name` field in this
* policy. The name will be returned in the new policy, including
* a new [ALERT_POLICY_ID] value.
* </pre>
*
* <code>.google.monitoring.v3.AlertPolicy alert_policy = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.monitoring.v3.AlertPolicy,
com.google.monitoring.v3.AlertPolicy.Builder,
com.google.monitoring.v3.AlertPolicyOrBuilder>
getAlertPolicyFieldBuilder() {
if (alertPolicyBuilder_ == null) {
alertPolicyBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.monitoring.v3.AlertPolicy,
com.google.monitoring.v3.AlertPolicy.Builder,
com.google.monitoring.v3.AlertPolicyOrBuilder>(
getAlertPolicy(), getParentForChildren(), isClean());
alertPolicy_ = null;
}
return alertPolicyBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFieldsProto3(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.monitoring.v3.CreateAlertPolicyRequest)
}
// @@protoc_insertion_point(class_scope:google.monitoring.v3.CreateAlertPolicyRequest)
private static final com.google.monitoring.v3.CreateAlertPolicyRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.monitoring.v3.CreateAlertPolicyRequest();
}
public static com.google.monitoring.v3.CreateAlertPolicyRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CreateAlertPolicyRequest> PARSER =
new com.google.protobuf.AbstractParser<CreateAlertPolicyRequest>() {
@java.lang.Override
public CreateAlertPolicyRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new CreateAlertPolicyRequest(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<CreateAlertPolicyRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CreateAlertPolicyRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.monitoring.v3.CreateAlertPolicyRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.