proj_name
stringclasses 131
values | relative_path
stringlengths 30
228
| class_name
stringlengths 1
68
| func_name
stringlengths 1
48
| masked_class
stringlengths 78
9.82k
| func_body
stringlengths 46
9.61k
| len_input
int64 29
2.01k
| len_output
int64 14
1.94k
| total
int64 55
2.05k
| relevant_context
stringlengths 0
38.4k
|
---|---|---|---|---|---|---|---|---|---|
bastillion-io_Bastillion
|
Bastillion/src/main/java/io/bastillion/common/filter/AuthFilter.java
|
AuthFilter
|
doFilter
|
class AuthFilter implements Filter {
private static final Logger log = LoggerFactory.getLogger(AuthFilter.class);
public void init(FilterConfig config) throws ServletException {
}
public void destroy() {
}
/**
* doFilter determines if user is an administrator or redirect to login page
*
* @param req task request
* @param resp task response
* @param chain filter chain
* @throws ServletException
*/
public void doFilter(ServletRequest req, ServletResponse resp, FilterChain chain) throws ServletException {<FILL_FUNCTION_BODY>}
}
|
HttpServletRequest servletRequest = (HttpServletRequest) req;
HttpServletResponse servletResponse = (HttpServletResponse) resp;
boolean isAdmin = false;
try {
//read auth token
String authToken = AuthUtil.getAuthToken(servletRequest.getSession());
//check if exists
if (authToken != null && !authToken.trim().equals("")) {
//check if valid admin auth token
String userType = AuthDB.isAuthorized(AuthUtil.getUserId(servletRequest.getSession()), authToken);
if (userType != null) {
String uri = servletRequest.getRequestURI();
if (Auth.MANAGER.equals(userType)) {
isAdmin = true;
} else if (!uri.contains("/manage/") && Auth.ADMINISTRATOR.equals(userType)) {
isAdmin = true;
}
AuthUtil.setUserType(servletRequest.getSession(), userType);
//check to see if user has timed out
String timeStr = AuthUtil.getTimeout(servletRequest.getSession());
if (timeStr != null && !timeStr.trim().equals("")) {
SimpleDateFormat sdf = new SimpleDateFormat("MMddyyyyHHmmss");
Date sessionTimeout = sdf.parse(timeStr);
Date currentTime = new Date();
//if current time > timeout then redirect to login page
if (sessionTimeout == null || currentTime.after(sessionTimeout)) {
isAdmin = false;
} else {
AuthUtil.setTimeout(servletRequest.getSession());
}
} else {
isAdmin = false;
}
}
}
//if not admin redirect to login page
if (!isAdmin) {
AuthUtil.deleteAllSession(servletRequest.getSession());
servletResponse.sendRedirect(servletRequest.getContextPath() + "/");
} else {
chain.doFilter(req, resp);
}
} catch (SQLException | ParseException | IOException | GeneralSecurityException ex) {
AuthUtil.deleteAllSession(servletRequest.getSession());
log.error(ex.toString(), ex);
throw new ServletException(ex.toString(), ex);
}
| 159 | 566 | 725 |
<no_super_class>
|
bastillion-io_Bastillion
|
Bastillion/src/main/java/io/bastillion/common/util/AppConfig.java
|
AppConfig
|
decryptProperty
|
class AppConfig {
private static final Logger log = LoggerFactory.getLogger(AppConfig.class);
private static PropertiesConfiguration prop;
public static final String CONFIG_DIR = StringUtils.isNotEmpty(System.getProperty("CONFIG_DIR")) ? System.getProperty("CONFIG_DIR").trim() : AppConfig.class.getClassLoader().getResource(".").getPath();
static {
try {
//move configuration to specified dir
if (StringUtils.isNotEmpty(System.getProperty("CONFIG_DIR"))) {
File configFile = new File(CONFIG_DIR + "BastillionConfig.properties");
if (!configFile.exists()) {
File oldConfig = new File(AppConfig.class.getClassLoader().getResource(".").getPath() + "BastillionConfig.properties");
FileUtils.moveFile(oldConfig, configFile);
}
configFile = new File(CONFIG_DIR + "jaas.conf");
if (!configFile.exists()) {
File oldConfig = new File(AppConfig.class.getClassLoader().getResource(".").getPath() + "jaas.conf");
FileUtils.moveFile(oldConfig, configFile);
}
}
prop = new PropertiesConfiguration(CONFIG_DIR + "BastillionConfig.properties");
} catch (IOException | ConfigurationException ex) {
log.error(ex.toString(), ex);
}
}
private AppConfig() {
}
/**
* gets the property from config
*
* @param name property name
* @return configuration property
*/
public static String getProperty(String name) {
String property = null;
if (StringUtils.isNotEmpty(name)) {
if (StringUtils.isNotEmpty(System.getenv(name))) {
property = System.getenv(name);
} else if (StringUtils.isNotEmpty(System.getenv(name.toUpperCase()))) {
property = System.getenv(name.toUpperCase());
} else {
property = prop.getString(name);
}
}
return property;
}
/**
* gets the property from config
*
* @param name property name
* @param defaultValue default value if property is empty
* @return configuration property
*/
public static String getProperty(String name, String defaultValue) {
String value = getProperty(name);
if (StringUtils.isEmpty(value)) {
value = defaultValue;
}
return value;
}
/**
* gets the property from config and replaces placeholders
*
* @param name property name
* @param replacementMap name value pairs of place holders to replace
* @return configuration property
*/
public static String getProperty(String name, Map<String, String> replacementMap) {
String value = getProperty(name);
if (StringUtils.isNotEmpty(value)) {
//iterate through map to replace text
Set<String> keySet = replacementMap.keySet();
for (String key : keySet) {
//replace values in string
String rVal = replacementMap.get(key);
value = value.replace("${" + key + "}", rVal);
}
}
return value;
}
/**
* removes property from the config
*
* @param name property name
*/
public static void removeProperty(String name) throws ConfigurationException {
//remove property
prop.clearProperty(name);
prop.save();
}
/**
* updates the property in the config
*
* @param name property name
* @param value property value
*/
public static void updateProperty(String name, String value) throws ConfigurationException {
//remove property
if (StringUtils.isNotEmpty(value)) {
prop.setProperty(name, value);
prop.save();
}
}
/**
* checks if property is encrypted
*
* @param name property name
* @return true if property is encrypted
*/
public static boolean isPropertyEncrypted(String name) {
String property = prop.getString(name);
if (StringUtils.isNotEmpty(property)) {
return property.matches("^" + EncryptionUtil.CRYPT_ALGORITHM + "\\{.*\\}$");
} else {
return false;
}
}
/**
* decrypts and returns the property from config
*
* @param name property name
* @return configuration property
*/
public static String decryptProperty(String name) throws GeneralSecurityException {<FILL_FUNCTION_BODY>}
/**
* encrypts and updates the property in the config
*
* @param name property name
* @param value property value
*/
public static void encryptProperty(String name, String value) throws ConfigurationException, GeneralSecurityException {
//remove property
if (StringUtils.isNotEmpty(value)) {
prop.setProperty(name, EncryptionUtil.CRYPT_ALGORITHM + "{" + EncryptionUtil.encrypt(value) + "}");
prop.save();
}
}
}
|
String retVal = prop.getString(name);
if (StringUtils.isNotEmpty(retVal)) {
retVal = retVal.replaceAll("^" + EncryptionUtil.CRYPT_ALGORITHM + "\\{", "").replaceAll("\\}$", "");
retVal = EncryptionUtil.decrypt(retVal);
}
return retVal;
| 1,308 | 97 | 1,405 |
<no_super_class>
|
bastillion-io_Bastillion
|
Bastillion/src/main/java/io/bastillion/common/util/AuthUtil.java
|
AuthUtil
|
getAuthToken
|
class AuthUtil {
public static final String SESSION_ID = "sessionId";
public static final String USER_ID = "userId";
public static final String USERNAME = "username";
public static final String AUTH_TOKEN = "authToken";
public static final String TIMEOUT = "timeout";
private AuthUtil() {
}
/**
* query session for OTP shared secret
*
* @param session http session
* @return shared secret
*/
public static String getOTPSecret(HttpSession session) throws GeneralSecurityException {
String secret = (String) session.getAttribute("otp_secret");
secret = EncryptionUtil.decrypt(secret);
return secret;
}
/**
* set authentication type
*
* @param session http session
* @param authType authentication type
*/
public static void setAuthType(HttpSession session, String authType) {
if (authType != null) {
session.setAttribute("authType", authType);
}
}
/**
* query authentication type
*
* @param session http session
* @return authentication type
*/
public static String getAuthType(HttpSession session) {
String authType = (String) session.getAttribute("authType");
return authType;
}
/**
* set user type
*
* @param session http session
* @param userType user type
*/
public static void setUserType(HttpSession session, String userType) {
if (userType != null) {
session.setAttribute("userType", userType);
}
}
/**
* query user type
*
* @param session http session
* @return user type
*/
public static String getUserType(HttpSession session) {
String userType = (String) session.getAttribute("userType");
return userType;
}
/**
* set session id
*
* @param session http session
* @param sessionId session id
*/
public static void setSessionId(HttpSession session, Long sessionId) throws GeneralSecurityException {
if (sessionId != null) {
session.setAttribute(SESSION_ID, EncryptionUtil.encrypt(sessionId.toString()));
}
}
/**
* query session id
*
* @param session http session
* @return session id
*/
public static Long getSessionId(HttpSession session) throws GeneralSecurityException {
Long sessionId = null;
String sessionIdStr = EncryptionUtil.decrypt((String) session.getAttribute(SESSION_ID));
if (sessionIdStr != null && !sessionIdStr.trim().equals("")) {
sessionId = Long.parseLong(sessionIdStr);
}
return sessionId;
}
/**
* query session for user id
*
* @param session http session
* @return user id
*/
public static Long getUserId(HttpSession session) throws GeneralSecurityException {
Long userId = null;
String userIdStr = EncryptionUtil.decrypt((String) session.getAttribute(USER_ID));
if (userIdStr != null && !userIdStr.trim().equals("")) {
userId = Long.parseLong(userIdStr);
}
return userId;
}
/**
* query session for the username
*
* @param session http session
* @return username
*/
public static String getUsername(HttpSession session) {
return (String) session.getAttribute(USERNAME);
}
/**
* query session for authentication token
*
* @param session http session
* @return authentication token
*/
public static String getAuthToken(HttpSession session) throws GeneralSecurityException {<FILL_FUNCTION_BODY>}
/**
* query session for timeout
*
* @param session http session
* @return timeout string
*/
public static String getTimeout(HttpSession session) {
String timeout = (String) session.getAttribute(TIMEOUT);
return timeout;
}
/**
* set session OTP shared secret
*
* @param session http session
* @param secret shared secret
*/
public static void setOTPSecret(HttpSession session, String secret) throws GeneralSecurityException {
if (secret != null && !secret.trim().equals("")) {
session.setAttribute("otp_secret", EncryptionUtil.encrypt(secret));
}
}
/**
* set session user id
*
* @param session http session
* @param userId user id
*/
public static void setUserId(HttpSession session, Long userId) throws GeneralSecurityException {
if (userId != null) {
session.setAttribute(USER_ID, EncryptionUtil.encrypt(userId.toString()));
}
}
/**
* set session username
*
* @param session http session
* @param username username
*/
public static void setUsername(HttpSession session, String username) {
if (username != null) {
session.setAttribute(USERNAME, username);
}
}
/**
* set session authentication token
*
* @param session http session
* @param authToken authentication token
*/
public static void setAuthToken(HttpSession session, String authToken) throws GeneralSecurityException {
if (authToken != null && !authToken.trim().equals("")) {
session.setAttribute(AUTH_TOKEN, EncryptionUtil.encrypt(authToken));
}
}
/**
* set session timeout
*
* @param session http session
*/
public static void setTimeout(HttpSession session) {
//set session timeout
SimpleDateFormat sdf = new SimpleDateFormat("MMddyyyyHHmmss");
Calendar timeout = Calendar.getInstance();
timeout.add(Calendar.MINUTE, Integer.parseInt(AppConfig.getProperty("sessionTimeout", "15")));
session.setAttribute(TIMEOUT, sdf.format(timeout.getTime()));
}
/**
* delete all session information
*
* @param session
*/
public static void deleteAllSession(HttpSession session) {
session.setAttribute(TIMEOUT, null);
session.setAttribute(AUTH_TOKEN, null);
session.setAttribute(USER_ID, null);
session.setAttribute(SESSION_ID, null);
session.invalidate();
}
/**
* return client ip from servlet request
*
* @param servletRequest http servlet request
* @return client ip
*/
public static String getClientIPAddress(HttpServletRequest servletRequest) {
String clientIP = null;
if (StringUtils.isNotEmpty(AppConfig.getProperty("clientIPHeader"))) {
clientIP = servletRequest.getHeader(AppConfig.getProperty("clientIPHeader"));
}
if (StringUtils.isEmpty(clientIP)) {
clientIP = servletRequest.getRemoteAddr();
}
return clientIP;
}
}
|
String authToken = (String) session.getAttribute(AUTH_TOKEN);
authToken = EncryptionUtil.decrypt(authToken);
return authToken;
| 1,815 | 45 | 1,860 |
<no_super_class>
|
bastillion-io_Bastillion
|
Bastillion/src/main/java/io/bastillion/manage/control/LoginKtrl.java
|
LoginKtrl
|
loginSubmit
|
class LoginKtrl extends BaseKontroller {
private static final Logger log = LoggerFactory.getLogger(LoginKtrl.class);
//check if otp is enabled
@Model(name = "otpEnabled")
static final Boolean otpEnabled = ("required".equals(AppConfig.getProperty("oneTimePassword")) || "optional".equals(AppConfig.getProperty("oneTimePassword")));
private static final Logger loginAuditLogger = LoggerFactory.getLogger("io.bastillion.manage.control.LoginAudit");
private final String AUTH_ERROR = "Authentication Failed : Login credentials are invalid";
private final String AUTH_ERROR_NO_PROFILE = "Authentication Failed : There are no profiles assigned to this account";
private final String AUTH_ERROR_EXPIRED_ACCOUNT = "Authentication Failed : Account has expired";
@Model(name = "auth")
Auth auth;
public LoginKtrl(HttpServletRequest request, HttpServletResponse response) {
super(request, response);
}
@Kontrol(path = "/login", method = MethodType.GET)
public String login() {
return "/login.html";
}
@Kontrol(path = "/loginSubmit", method = MethodType.POST)
public String loginSubmit() throws ServletException {<FILL_FUNCTION_BODY>}
@Kontrol(path = "/logout", method = MethodType.GET)
public String logout() {
AuthUtil.deleteAllSession(getRequest().getSession());
return "redirect:/";
}
/**
* Validates fields for auth submit
*/
@Validate(input = "/login.html")
public void validateLoginSubmit() {
if (auth.getUsername() == null ||
auth.getUsername().trim().equals("")) {
addFieldError("auth.username", "Required");
}
if (auth.getPassword() == null ||
auth.getPassword().trim().equals("")) {
addFieldError("auth.password", "Required");
}
}
}
|
String retVal = "redirect:/admin/menu.html";
String authToken = null;
try {
authToken = AuthDB.login(auth);
//get client IP
String clientIP = AuthUtil.getClientIPAddress(getRequest());
if (authToken != null) {
User user = AuthDB.getUserByAuthToken(authToken);
if (user != null) {
String sharedSecret = null;
if (otpEnabled) {
sharedSecret = AuthDB.getSharedSecret(user.getId());
if (StringUtils.isNotEmpty(sharedSecret) && (auth.getOtpToken() == null || !OTPUtil.verifyToken(sharedSecret, auth.getOtpToken()))) {
loginAuditLogger.info(auth.getUsername() + " (" + clientIP + ") - " + AUTH_ERROR);
addError(AUTH_ERROR);
return "/login.html";
}
}
//check to see if admin has any assigned profiles
if (!User.MANAGER.equals(user.getUserType()) && (user.getProfileList() == null || user.getProfileList().size() <= 0)) {
loginAuditLogger.info(auth.getUsername() + " (" + clientIP + ") - " + AUTH_ERROR_NO_PROFILE);
addError(AUTH_ERROR_NO_PROFILE);
return "/login.html";
}
//check to see if account has expired
if (user.isExpired()) {
loginAuditLogger.info(auth.getUsername() + " (" + clientIP + ") - " + AUTH_ERROR_EXPIRED_ACCOUNT);
addError(AUTH_ERROR_EXPIRED_ACCOUNT);
return "/login.html";
}
AuthUtil.setAuthToken(getRequest().getSession(), authToken);
AuthUtil.setUserId(getRequest().getSession(), user.getId());
AuthUtil.setAuthType(getRequest().getSession(), user.getAuthType());
AuthUtil.setTimeout(getRequest().getSession());
AuthUtil.setUsername(getRequest().getSession(), user.getUsername());
AuthDB.updateLastLogin(user);
//for first time login redirect to set OTP
if (otpEnabled && StringUtils.isEmpty(sharedSecret)) {
retVal = "redirect:/admin/viewOTP.ktrl";
} else if ("changeme".equals(auth.getPassword()) && Auth.AUTH_BASIC.equals(user.getAuthType())) {
retVal = "redirect:/admin/userSettings.ktrl";
}
loginAuditLogger.info(auth.getUsername() + " (" + clientIP + ") - Authentication Success");
}
} else {
loginAuditLogger.info(auth.getUsername() + " (" + clientIP + ") - " + AUTH_ERROR);
addError(AUTH_ERROR);
retVal = "/login.html";
}
} catch (SQLException | GeneralSecurityException ex) {
log.error(ex.toString(), ex);
throw new ServletException(ex.toString(), ex);
}
return retVal;
| 572 | 868 | 1,440 |
<no_super_class>
|
bastillion-io_Bastillion
|
Bastillion/src/main/java/io/bastillion/manage/control/OTPKtrl.java
|
OTPKtrl
|
qrImage
|
class OTPKtrl extends BaseKontroller {
private static final Logger log = LoggerFactory.getLogger(OTPKtrl.class);
public static final boolean requireOTP = "required".equals(AppConfig.getProperty("oneTimePassword"));
//QR image size
private static final int QR_IMAGE_WIDTH = 325;
private static final int QR_IMAGE_HEIGHT = 325;
@Model(name = "qrImage")
String qrImage;
@Model(name = "sharedSecret")
String sharedSecret;
public OTPKtrl(HttpServletRequest request, HttpServletResponse response) {
super(request, response);
}
@Kontrol(path = "/admin/viewOTP", method = MethodType.GET)
public String viewOTP() throws ServletException {
sharedSecret = OTPUtil.generateSecret();
try {
AuthUtil.setOTPSecret(getRequest().getSession(), sharedSecret);
} catch (GeneralSecurityException ex) {
log.error(ex.toString(), ex);
throw new ServletException(ex.toString(), ex);
}
qrImage = new Date().getTime() + ".png";
return "/admin/two-factor_otp.html";
}
@Kontrol(path = "/admin/otpSubmit", method = MethodType.POST)
public String otpSubmit() throws ServletException {
try {
AuthDB.updateSharedSecret(sharedSecret, AuthUtil.getAuthToken(getRequest().getSession()));
} catch (SQLException | GeneralSecurityException ex) {
log.error(ex.toString(), ex);
throw new ServletException(ex.toString(), ex);
}
if (requireOTP) {
AuthUtil.deleteAllSession(getRequest().getSession());
}
return "redirect:/logout.ktrl";
}
@Kontrol(path = "/admin/qrImage", method = MethodType.GET)
public String qrImage() throws ServletException {<FILL_FUNCTION_BODY>}
}
|
String username;
String secret;
try {
username = UserDB.getUser(AuthUtil.getUserId(getRequest().getSession())).getUsername();
secret = AuthUtil.getOTPSecret(getRequest().getSession());
AuthUtil.setOTPSecret(getRequest().getSession(), null);
} catch (SQLException | GeneralSecurityException ex) {
log.error(ex.toString(), ex);
throw new ServletException(ex.toString(), ex);
}
try {
String qrCodeText = "otpauth://totp/Bastillion%20%28" + URLEncoder.encode(getRequest().getHeader("host").replaceAll("\\:.*$", ""), "utf-8") + "%29:" + username + "?secret=" + secret;
QRCodeWriter qrWriter = new QRCodeWriter();
Hashtable<EncodeHintType, String> hints = new Hashtable<>();
hints.put(EncodeHintType.CHARACTER_SET, "UTF-8");
BitMatrix matrix = qrWriter.encode(qrCodeText, BarcodeFormat.QR_CODE, QR_IMAGE_WIDTH, QR_IMAGE_HEIGHT, hints);
getResponse().setContentType("image/png");
BufferedImage image = new BufferedImage(QR_IMAGE_WIDTH, QR_IMAGE_HEIGHT, BufferedImage.TYPE_INT_RGB);
Graphics2D graphics = (Graphics2D) image.getGraphics();
graphics.setColor(Color.WHITE);
graphics.fillRect(0, 0, QR_IMAGE_WIDTH, QR_IMAGE_HEIGHT);
graphics.setColor(Color.BLACK);
for (int x = 0; x < QR_IMAGE_WIDTH; x++) {
for (int y = 0; y < QR_IMAGE_HEIGHT; y++) {
if (matrix.get(x, y)) {
graphics.fillRect(x, y, 1, 1);
}
}
}
ImageIO.write(image, "png", getResponse().getOutputStream());
getResponse().getOutputStream().flush();
getResponse().getOutputStream().close();
} catch (IOException | WriterException ex) {
log.error(ex.toString(), ex);
throw new ServletException(ex.toString(), ex);
}
return null;
| 592 | 675 | 1,267 |
<no_super_class>
|
bastillion-io_Bastillion
|
Bastillion/src/main/java/io/bastillion/manage/control/ProfileKtrl.java
|
ProfileKtrl
|
saveProfile
|
class ProfileKtrl extends BaseKontroller {
private static final Logger log = LoggerFactory.getLogger(ProfileKtrl.class);
@Model(name = "sortedSet")
SortedSet sortedSet = new SortedSet();
@Model(name = "profile")
Profile profile = new Profile();
public ProfileKtrl(HttpServletRequest request, HttpServletResponse response) {
super(request, response);
}
@Kontrol(path = "/manage/viewProfiles", method = MethodType.GET)
public String viewSystems() throws ServletException {
try {
sortedSet = ProfileDB.getProfileSet(sortedSet);
} catch (SQLException | GeneralSecurityException ex) {
log.error(ex.toString(), ex);
throw new ServletException(ex.toString(), ex);
}
return "/manage/view_profiles.html";
}
@Kontrol(path = "/manage/saveProfile", method = MethodType.POST)
public String saveProfile() throws ServletException {<FILL_FUNCTION_BODY>}
@Kontrol(path = "/manage/deleteProfile", method = MethodType.GET)
public String deleteProfile() throws ServletException {
if (profile.getId() != null) {
try {
ProfileDB.deleteProfile(profile.getId());
} catch (SQLException | GeneralSecurityException ex) {
log.error(ex.toString(), ex);
throw new ServletException(ex.toString(), ex);
}
}
return "redirect:/manage/viewProfiles.ktrl?sortedSet.orderByDirection=" + sortedSet.getOrderByDirection() + "&sortedSet.orderByField=" + sortedSet.getOrderByField();
}
/**
* validate save profile
*/
@Validate(input = "/manage/view_profiles.html")
public void validateSaveProfile() throws ServletException {
if (profile == null
|| profile.getNm() == null
|| profile.getNm().trim().equals("")) {
addFieldError("profile.nm", "Required");
}
if (!this.getFieldErrors().isEmpty()) {
try {
sortedSet = ProfileDB.getProfileSet(sortedSet);
} catch (SQLException | GeneralSecurityException ex) {
log.error(ex.toString(), ex);
throw new ServletException(ex.toString(), ex);
}
}
}
}
|
try {
if (profile.getId() != null) {
ProfileDB.updateProfile(profile);
} else {
ProfileDB.insertProfile(profile);
}
} catch (SQLException | GeneralSecurityException ex) {
log.error(ex.toString(), ex);
throw new ServletException(ex.toString(), ex);
}
return "redirect:/manage/viewProfiles.ktrl?sortedSet.orderByDirection=" + sortedSet.getOrderByDirection() + "&sortedSet.orderByField=" + sortedSet.getOrderByField();
| 693 | 161 | 854 |
<no_super_class>
|
bastillion-io_Bastillion
|
Bastillion/src/main/java/io/bastillion/manage/control/ProfileSystemsKtrl.java
|
ProfileSystemsKtrl
|
viewProfileSystems
|
class ProfileSystemsKtrl extends BaseKontroller {
private static final Logger log = LoggerFactory.getLogger(ProfileSystemsKtrl.class);
@Model(name = "profile")
Profile profile;
@Model(name = "sortedSet")
SortedSet sortedSet = new SortedSet();
@Model(name = "systemSelectId")
List<Long> systemSelectId = new ArrayList<>();
public ProfileSystemsKtrl(HttpServletRequest request, HttpServletResponse response) {
super(request, response);
}
@Kontrol(path = "/manage/viewProfileSystems", method = MethodType.GET)
public String viewProfileSystems() throws ServletException {<FILL_FUNCTION_BODY>}
@Kontrol(path = "/manage/assignSystemsToProfile", method = MethodType.POST)
public String assignSystemsToProfile() throws ServletException {
if (systemSelectId != null) {
try {
ProfileSystemsDB.setSystemsForProfile(profile.getId(), systemSelectId);
} catch (SQLException | GeneralSecurityException ex) {
log.error(ex.toString(), ex);
throw new ServletException(ex.toString(), ex);
}
}
RefreshAuthKeyUtil.refreshProfileSystems(profile.getId());
return "redirect:/manage/viewProfiles.ktrl";
}
}
|
if (profile != null && profile.getId() != null) {
try {
profile = ProfileDB.getProfile(profile.getId());
sortedSet = SystemDB.getSystemSet(sortedSet, profile.getId());
} catch (SQLException | GeneralSecurityException ex) {
log.error(ex.toString(), ex);
throw new ServletException(ex.toString(), ex);
}
}
return "/manage/view_profile_systems.html";
| 361 | 122 | 483 |
<no_super_class>
|
bastillion-io_Bastillion
|
Bastillion/src/main/java/io/bastillion/manage/control/ProfileUsersKtrl.java
|
ProfileUsersKtrl
|
assignSystemsToProfile
|
class ProfileUsersKtrl extends BaseKontroller {
private static final Logger log = LoggerFactory.getLogger(ProfileUsersKtrl.class);
@Model(name = "profile")
Profile profile;
@Model(name = "sortedSet")
SortedSet sortedSet = new SortedSet();
@Model(name = "userSelectId")
List<Long> userSelectId = new ArrayList<>();
public ProfileUsersKtrl(HttpServletRequest request, HttpServletResponse response) {
super(request, response);
}
@Kontrol(path = "/manage/viewProfileUsers", method = MethodType.GET)
public String viewProfileUsers() throws ServletException {
if (profile != null && profile.getId() != null) {
try {
profile = ProfileDB.getProfile(profile.getId());
sortedSet = UserDB.getAdminUserSet(sortedSet, profile.getId());
} catch (SQLException | GeneralSecurityException ex) {
log.error(ex.toString(), ex);
throw new ServletException(ex.toString(), ex);
}
}
return "/manage/view_profile_users.html";
}
@Kontrol(path = "/manage/assignUsersToProfile", method = MethodType.POST)
public String assignSystemsToProfile() throws ServletException {<FILL_FUNCTION_BODY>}
}
|
if (userSelectId != null) {
try {
UserProfileDB.setUsersForProfile(profile.getId(), userSelectId);
} catch (SQLException | GeneralSecurityException ex) {
log.error(ex.toString(), ex);
throw new ServletException(ex.toString(), ex);
}
}
RefreshAuthKeyUtil.refreshProfileSystems(profile.getId());
return "redirect:/manage/viewProfiles.ktrl";
| 351 | 120 | 471 |
<no_super_class>
|
bastillion-io_Bastillion
|
Bastillion/src/main/java/io/bastillion/manage/control/ScriptKtrl.java
|
ScriptKtrl
|
validateSaveScript
|
class ScriptKtrl extends BaseKontroller {
private static final Logger log = LoggerFactory.getLogger(ScriptKtrl.class);
@Model(name = "sortedSet")
SortedSet sortedSet = new SortedSet();
@Model(name = "script")
Script script = new Script();
public ScriptKtrl(HttpServletRequest request, HttpServletResponse response) {
super(request, response);
}
@Kontrol(path = "/admin/viewScripts", method = MethodType.GET)
public String viewScripts() throws ServletException {
try {
Long userId = AuthUtil.getUserId(getRequest().getSession());
sortedSet = ScriptDB.getScriptSet(sortedSet, userId);
} catch (SQLException | GeneralSecurityException ex) {
log.error(ex.toString(), ex);
throw new ServletException(ex.toString(), ex);
}
return "/admin/view_scripts.html";
}
@Kontrol(path = "/admin/saveScript", method = MethodType.POST)
public String saveScript() throws ServletException {
try {
Long userId = AuthUtil.getUserId(getRequest().getSession());
if (script.getId() != null) {
ScriptDB.updateScript(script, userId);
} else {
ScriptDB.insertScript(script, userId);
}
} catch (SQLException | GeneralSecurityException ex) {
log.error(ex.toString(), ex);
throw new ServletException(ex.toString(), ex);
}
return "redirect:/admin/viewScripts.ktrl?sortedSet.orderByDirection=" + sortedSet.getOrderByDirection() + "&sortedSet.orderByField=" + sortedSet.getOrderByField();
}
@Kontrol(path = "/admin/deleteScript", method = MethodType.GET)
public String deleteScript() throws ServletException {
if (script.getId() != null) {
try {
Long userId = AuthUtil.getUserId(getRequest().getSession());
ScriptDB.deleteScript(script.getId(), userId);
} catch (SQLException | GeneralSecurityException ex) {
log.error(ex.toString(), ex);
throw new ServletException(ex.toString(), ex);
}
}
return "redirect:/admin/viewScripts.ktrl?sortedSet.orderByDirection=" + sortedSet.getOrderByDirection() + "&sortedSet.orderByField=" + sortedSet.getOrderByField();
}
/**
* Validates all fields for adding a user
*/
@Validate(input = "/admin/view_scripts.html")
public void validateSaveScript() throws ServletException {<FILL_FUNCTION_BODY>}
}
|
if (script == null
|| script.getDisplayNm() == null
|| script.getDisplayNm().trim().equals("")) {
addFieldError("script.displayNm", "Required");
}
if (script == null
|| script.getScript() == null
|| script.getScript().trim().equals("")
|| (new Script()).getScript().trim().equals(script.getScript().trim())) {
addFieldError("script.script", "Required");
}
if (!this.getFieldErrors().isEmpty()) {
try {
Long userId = AuthUtil.getUserId(getRequest().getSession());
sortedSet = ScriptDB.getScriptSet(sortedSet, userId);
} catch (SQLException | GeneralSecurityException ex) {
log.error(ex.toString(), ex);
throw new ServletException(ex.toString(), ex);
}
}
| 707 | 227 | 934 |
<no_super_class>
|
bastillion-io_Bastillion
|
Bastillion/src/main/java/io/bastillion/manage/control/SessionAuditKtrl.java
|
SessionAuditKtrl
|
getJSONTermOutputForSession
|
class SessionAuditKtrl extends BaseKontroller {
private static final Logger log = LoggerFactory.getLogger(SessionAuditKtrl.class);
@Model(name = "sortedSet")
SortedSet sortedSet = new SortedSet();
@Model(name = "sessionId")
Long sessionId;
@Model(name = "instanceId")
Integer instanceId;
@Model(name = "sessionAudit")
SessionAudit sessionAudit;
@Model(name = "systemList")
List<HostSystem> systemList;
@Model(name = "userList")
List<User> userList;
public SessionAuditKtrl(HttpServletRequest request, HttpServletResponse response) {
super(request, response);
}
@Kontrol(path = "/manage/viewSessions", method = MethodType.GET)
public String viewSessions() throws ServletException {
if (sortedSet.getOrderByField() == null || sortedSet.getOrderByField().trim().equals("")) {
sortedSet.setOrderByField(SessionAuditDB.SORT_BY_SESSION_TM);
sortedSet.setOrderByDirection("desc");
}
try {
systemList = SystemDB.getSystemSet(new SortedSet(SystemDB.SORT_BY_NAME)).getItemList();
userList = UserDB.getUserSet(new SortedSet(SessionAuditDB.SORT_BY_USERNAME)).getItemList();
sortedSet = SessionAuditDB.getSessions(sortedSet);
} catch (SQLException | GeneralSecurityException ex) {
log.error(ex.toString(), ex);
throw new ServletException(ex.toString(), ex);
}
return "/manage/view_sessions.html";
}
@Kontrol(path = "/manage/getTermsForSession", method = MethodType.GET)
public String getTermsForSession() throws ServletException {
try {
sessionAudit = SessionAuditDB.getSessionsTerminals(sessionId);
} catch (SQLException | GeneralSecurityException ex) {
log.error(ex.toString(), ex);
throw new ServletException(ex.toString(), ex);
}
return "/manage/view_terms.html";
}
@Kontrol(path = "/manage/getJSONTermOutputForSession", method = MethodType.GET)
public String getJSONTermOutputForSession() throws ServletException {<FILL_FUNCTION_BODY>}
}
|
try {
String json = new Gson().toJson(SessionAuditDB.getTerminalLogsForSession(sessionId, instanceId));
getResponse().getOutputStream().write(json.getBytes());
} catch (SQLException | GeneralSecurityException | IOException ex) {
log.error(ex.toString(), ex);
throw new ServletException(ex.toString(), ex);
}
return null;
| 644 | 106 | 750 |
<no_super_class>
|
bastillion-io_Bastillion
|
Bastillion/src/main/java/io/bastillion/manage/control/SystemKtrl.java
|
SystemKtrl
|
validateSaveSystem
|
class SystemKtrl extends BaseKontroller {
private static final Logger log = LoggerFactory.getLogger(SystemKtrl.class);
public static final String REQUIRED = "Required";
@Model(name = "sortedSet")
SortedSet sortedSet = new SortedSet();
@Model(name = "hostSystem")
HostSystem hostSystem = new HostSystem();
@Model(name = "script")
Script script = null;
@Model(name = "password")
String password;
@Model(name = "passphrase")
String passphrase;
@Model(name = "profileList")
List<Profile> profileList = new ArrayList<>();
public SystemKtrl(HttpServletRequest request, HttpServletResponse response) {
super(request, response);
}
@Kontrol(path = "/admin/viewSystems", method = MethodType.GET)
public String viewAdminSystems() throws ServletException {
try {
Long userId = AuthUtil.getUserId(getRequest().getSession());
if (Auth.MANAGER.equals(AuthUtil.getUserType(getRequest().getSession()))) {
sortedSet = SystemDB.getSystemSet(sortedSet);
profileList = ProfileDB.getAllProfiles();
} else {
sortedSet = SystemDB.getUserSystemSet(sortedSet, userId);
profileList = UserProfileDB.getProfilesByUser(userId);
}
if (script != null && script.getId() != null) {
script = ScriptDB.getScript(script.getId(), userId);
}
} catch (SQLException | GeneralSecurityException ex) {
log.error(ex.toString(), ex);
throw new ServletException(ex.toString(), ex);
}
return "/admin/view_systems.html";
}
@Kontrol(path = "/manage/viewSystems", method = MethodType.GET)
public String viewManageSystems() throws ServletException {
try {
sortedSet = SystemDB.getSystemSet(sortedSet);
} catch (SQLException | GeneralSecurityException ex) {
log.error(ex.toString(), ex);
throw new ServletException(ex.toString(), ex);
}
return "/manage/view_systems.html";
}
@Kontrol(path = "/manage/saveSystem", method = MethodType.POST)
public String saveSystem() throws ServletException {
String retVal = "redirect:/manage/viewSystems.ktrl?sortedSet.orderByDirection=" + sortedSet.getOrderByDirection() + "&sortedSet.orderByField=" + sortedSet.getOrderByField();
hostSystem = SSHUtil.authAndAddPubKey(hostSystem, passphrase, password);
try {
if (hostSystem.getId() != null) {
SystemDB.updateSystem(hostSystem);
} else {
hostSystem.setId(SystemDB.insertSystem(hostSystem));
}
sortedSet = SystemDB.getSystemSet(sortedSet);
} catch (SQLException | GeneralSecurityException ex) {
log.error(ex.toString(), ex);
throw new ServletException(ex.toString(), ex);
}
if (!HostSystem.SUCCESS_STATUS.equals(hostSystem.getStatusCd())) {
retVal = "/manage/view_systems.html";
}
return retVal;
}
@Kontrol(path = "/manage/deleteSystem", method = MethodType.GET)
public String deleteSystem() throws ServletException {
if (hostSystem.getId() != null) {
try {
SystemDB.deleteSystem(hostSystem.getId());
} catch (SQLException | GeneralSecurityException ex) {
log.error(ex.toString(), ex);
throw new ServletException(ex.toString(), ex);
}
}
return "redirect:/manage/viewSystems.ktrl?sortedSet.orderByDirection=" + sortedSet.getOrderByDirection() + "&sortedSet.orderByField=" + sortedSet.getOrderByField();
}
/**
* Validates all fields for adding a host system
*/
@Validate(input = "/manage/view_systems.html")
public void validateSaveSystem() throws ServletException {<FILL_FUNCTION_BODY>}
}
|
if (hostSystem == null
|| hostSystem.getDisplayNm() == null
|| hostSystem.getDisplayNm().trim().equals("")) {
addFieldError("hostSystem.displayNm", REQUIRED);
}
if (hostSystem == null
|| hostSystem.getUser() == null
|| hostSystem.getUser().trim().equals("")) {
addFieldError("hostSystem.user", REQUIRED);
}
if (hostSystem == null
|| hostSystem.getHost() == null
|| hostSystem.getHost().trim().equals("")) {
addFieldError("hostSystem.host", REQUIRED);
}
if (hostSystem == null
|| hostSystem.getPort() == null) {
addFieldError("hostSystem.port", REQUIRED);
} else if (!(hostSystem.getPort() > 0)) {
addFieldError("hostSystem.port", "Invalid");
}
if (hostSystem == null
|| hostSystem.getAuthorizedKeys() == null
|| hostSystem.getAuthorizedKeys().trim().equals("") || hostSystem.getAuthorizedKeys().trim().equals("~")) {
addFieldError("hostSystem.authorizedKeys", REQUIRED);
}
if (!this.getFieldErrors().isEmpty()) {
try {
sortedSet = SystemDB.getSystemSet(sortedSet);
} catch (SQLException | GeneralSecurityException ex) {
log.error(ex.toString(), ex);
throw new ServletException(ex.toString(), ex);
}
}
| 1,208 | 432 | 1,640 |
<no_super_class>
|
bastillion-io_Bastillion
|
Bastillion/src/main/java/io/bastillion/manage/control/UploadAndPushKtrl.java
|
UploadAndPushKtrl
|
push
|
class UploadAndPushKtrl extends BaseKontroller {
private static final Logger log = LoggerFactory.getLogger(UploadAndPushKtrl.class);
public static final String UPLOAD_PATH = DBUtils.class.getClassLoader().getResource(".").getPath() + "../upload";
@Model(name = "upload")
File upload;
@Model(name = "uploadFileName")
String uploadFileName;
@Model(name = "idList")
List<Long> idList = new ArrayList<>();
@Model(name = "pushDir")
String pushDir = "~";
@Model(name = "hostSystemList")
List<HostSystem> hostSystemList;
@Model(name = "pendingSystemStatus")
HostSystem pendingSystemStatus;
@Model(name = "currentSystemStatus")
HostSystem currentSystemStatus;
public UploadAndPushKtrl(HttpServletRequest request, HttpServletResponse response) {
super(request, response);
}
@Kontrol(path = "/admin/setUpload", method = MethodType.GET)
public String setUpload() throws Exception {
Long userId = AuthUtil.getUserId(getRequest().getSession());
SystemStatusDB.setInitialSystemStatus(idList, userId, AuthUtil.getUserType(getRequest().getSession()));
return "/admin/upload.html";
}
@Kontrol(path = "/admin/uploadSubmit", method = MethodType.POST)
public String uploadSubmit() {
String retVal = "/admin/upload_result.html";
try {
Long userId = AuthUtil.getUserId(getRequest().getSession());
List<FileItem> multiparts = new ServletFileUpload(new DiskFileItemFactory()).parseRequest(getRequest());
for (FileItem item : multiparts) {
if (!item.isFormField()) {
uploadFileName = new File(item.getName()).getName();
File path = new File(UPLOAD_PATH);
if (!path.exists()) {
path.mkdirs();
}
upload = new File(UPLOAD_PATH + File.separator + uploadFileName);
item.write(upload);
} else {
pushDir = item.getString();
}
}
pendingSystemStatus = SystemStatusDB.getNextPendingSystem(userId);
hostSystemList = SystemStatusDB.getAllSystemStatus(userId);
} catch (Exception ex) {
log.error(ex.toString(), ex);
retVal = "/admin/upload.html";
}
//reset csrf token back since it's already set on page load
getRequest().getSession().setAttribute(SecurityFilter._CSRF,
getRequest().getParameter(SecurityFilter._CSRF));
return retVal;
}
@Kontrol(path = "/admin/push", method = MethodType.POST)
public String push() throws ServletException {<FILL_FUNCTION_BODY>}
}
|
try {
Long userId = AuthUtil.getUserId(getRequest().getSession());
Long sessionId = AuthUtil.getSessionId(getRequest().getSession());
//get next pending system
pendingSystemStatus = SystemStatusDB.getNextPendingSystem(userId);
if (pendingSystemStatus != null) {
//get session for system
SchSession session = null;
for (Integer instanceId : SecureShellKtrl.getUserSchSessionMap().get(sessionId).getSchSessionMap().keySet()) {
//if host system id matches pending system then upload
if (pendingSystemStatus.getId().equals(SecureShellKtrl.getUserSchSessionMap().get(sessionId).getSchSessionMap().get(instanceId).getHostSystem().getId())) {
session = SecureShellKtrl.getUserSchSessionMap().get(sessionId).getSchSessionMap().get(instanceId);
}
}
if (session != null) {
//push upload to system
currentSystemStatus = SSHUtil.pushUpload(pendingSystemStatus, session.getSession(), UPLOAD_PATH + "/" + uploadFileName, pushDir + "/" + uploadFileName);
//update system status
SystemStatusDB.updateSystemStatus(currentSystemStatus, userId);
pendingSystemStatus = SystemStatusDB.getNextPendingSystem(userId);
}
}
//if push has finished to all servers then delete uploaded file
if (pendingSystemStatus == null) {
File delFile = new File(UPLOAD_PATH, uploadFileName);
FileUtils.deleteQuietly(delFile);
//delete all expired files in upload path
File delDir = new File(UPLOAD_PATH);
if (delDir.isDirectory()) {
//set expire time to delete all files older than 48 hrs
Calendar expireTime = Calendar.getInstance();
expireTime.add(Calendar.HOUR, -48);
Iterator<File> filesToDelete = FileUtils.iterateFiles(delDir, new AgeFileFilter(expireTime.getTime()), TrueFileFilter.TRUE);
while (filesToDelete.hasNext()) {
delFile = filesToDelete.next();
delFile.delete();
}
}
}
hostSystemList = SystemStatusDB.getAllSystemStatus(userId);
} catch (SQLException | GeneralSecurityException ex) {
log.error(ex.toString(), ex);
throw new ServletException(ex.toString(), ex);
}
//reset csrf token back since it's already set on page load
getRequest().getSession().setAttribute(SecurityFilter._CSRF,
getRequest().getParameter(SecurityFilter._CSRF));
return "/admin/upload_result.html";
| 750 | 689 | 1,439 |
<no_super_class>
|
bastillion-io_Bastillion
|
Bastillion/src/main/java/io/bastillion/manage/control/UserSettingsKtrl.java
|
UserSettingsKtrl
|
passwordSubmit
|
class UserSettingsKtrl extends BaseKontroller {
private static final Logger log = LoggerFactory.getLogger(UserSettingsKtrl.class);
public static final String REQUIRED = "Required";
@Model(name = "themeMap")
static Map<String, String> themeMap1 = new LinkedHashMap<>(Map.ofEntries(
entry("Tango", "#2e3436,#cc0000,#4e9a06,#c4a000,#3465a4,#75507b,#06989a,#d3d7cf,#555753,#ef2929,#8ae234,#fce94f,#729fcf,#ad7fa8,#34e2e2,#eeeeec"),
entry("XTerm", "#000000,#cd0000,#00cd00,#cdcd00,#0000ee,#cd00cd,#00cdcd,#e5e5e5,#7f7f7f,#ff0000,#00ff00,#ffff00,#5c5cff,#ff00ff,#00ffff,#ffffff")
));
@Model(name = "planeMap")
static Map<String, String> planeMap1 = new LinkedHashMap<>(Map.ofEntries(
entry("Black on light yellow", "#FFFFDD,#000000"),
entry("Black on white", "#FFFFFF,#000000"),
entry("Gray on black", "#000000,#AAAAAA"),
entry("Green on black", "#000000,#00FF00"),
entry("White on black", "#000000,#FFFFFF")
));
@Model(name = "publicKey")
static String publicKey;
@Model(name = "auth")
Auth auth;
@Model(name = "userSettings")
UserSettings userSettings;
static {
try {
publicKey = PrivateKeyDB.getApplicationKey().getPublicKey();
} catch (SQLException | GeneralSecurityException ex) {
log.error(ex.toString(), ex);
}
}
public UserSettingsKtrl(HttpServletRequest request, HttpServletResponse response) {
super(request, response);
}
@Kontrol(path = "/admin/userSettings", method = MethodType.GET)
public String userSettings() throws ServletException {
try {
userSettings = UserThemeDB.getTheme(AuthUtil.getUserId(getRequest().getSession()));
} catch (SQLException | GeneralSecurityException ex) {
log.error(ex.toString(), ex);
throw new ServletException(ex.toString(), ex);
}
return "/admin/user_settings.html";
}
@Kontrol(path = "/admin/passwordSubmit", method = MethodType.POST)
public String passwordSubmit() throws ServletException {<FILL_FUNCTION_BODY>}
@Kontrol(path = "/admin/themeSubmit", method = MethodType.POST)
public String themeSubmit() throws ServletException {
userSettings.setTheme(userSettings.getTheme());
userSettings.setPlane(userSettings.getPlane());
try {
UserThemeDB.saveTheme(AuthUtil.getUserId(getRequest().getSession()), userSettings);
} catch (SQLException | GeneralSecurityException ex) {
log.error(ex.toString(), ex);
throw new ServletException(ex.toString(), ex);
}
return "redirect:/admin/menu.html";
}
/**
* Validates fields for password submit
*/
@Validate(input = "/admin/user_settings.html")
public void validatePasswordSubmit() {
if (auth.getPassword() == null ||
auth.getPassword().trim().equals("")) {
addFieldError("auth.password", REQUIRED);
}
if (auth.getPasswordConfirm() == null ||
auth.getPasswordConfirm().trim().equals("")) {
addFieldError("auth.passwordConfirm", REQUIRED);
}
if (auth.getPrevPassword() == null ||
auth.getPrevPassword().trim().equals("")) {
addFieldError("auth.prevPassword", REQUIRED);
}
}
}
|
String retVal = "/admin/user_settings.html";
if (!auth.getPassword().equals(auth.getPasswordConfirm())) {
addError("Passwords do not match");
} else if (!PasswordUtil.isValid(auth.getPassword())) {
addError(PasswordUtil.PASSWORD_REQ_ERROR_MSG);
} else {
try {
auth.setAuthToken(AuthUtil.getAuthToken(getRequest().getSession()));
if (AuthDB.updatePassword(auth)) {
retVal = "redirect:/admin/menu.html";
} else {
addError("Current password is invalid");
}
} catch (SQLException | GeneralSecurityException ex) {
log.error(ex.toString(), ex);
throw new ServletException(ex.toString(), ex);
}
}
return retVal;
| 1,212 | 245 | 1,457 |
<no_super_class>
|
bastillion-io_Bastillion
|
Bastillion/src/main/java/io/bastillion/manage/control/UsersKtrl.java
|
UsersKtrl
|
validateSaveUser
|
class UsersKtrl extends BaseKontroller {
private static final Logger log = LoggerFactory.getLogger(UsersKtrl.class);
public static final String REQUIRED = "Required";
@Model(name = "sortedSet")
SortedSet sortedSet = new SortedSet();
@Model(name = "user")
User user = new User();
@Model(name = "resetSharedSecret")
Boolean resetSharedSecret = false;
@Model(name = "userId")
Long userId;
public UsersKtrl(HttpServletRequest request, HttpServletResponse response) {
super(request, response);
}
@Kontrol(path = "/manage/viewUsers", method = MethodType.GET)
public String viewUsers() throws ServletException {
try {
userId = AuthUtil.getUserId(getRequest().getSession());
sortedSet = UserDB.getUserSet(sortedSet);
} catch (SQLException | GeneralSecurityException ex) {
log.error(ex.toString(), ex);
throw new ServletException(ex.toString(), ex);
}
return "/manage/view_users.html";
}
@Kontrol(path = "/manage/saveUser", method = MethodType.POST)
public String saveUser() throws ServletException {
String retVal = "redirect:/manage/viewUsers.ktrl?sortedSet.orderByDirection=" + sortedSet.getOrderByDirection() + "&sortedSet.orderByField=" + sortedSet.getOrderByField();
try {
if (user.getId() != null) {
if (user.getPassword() == null || user.getPassword().trim().equals("")) {
UserDB.updateUserNoCredentials(user);
} else {
UserDB.updateUserCredentials(user);
}
//check if reset is set
if (resetSharedSecret) {
UserDB.resetSharedSecret(user.getId());
}
} else {
UserDB.insertUser(user);
}
} catch (SQLException | GeneralSecurityException ex) {
log.error(ex.toString(), ex);
throw new ServletException(ex.toString(), ex);
}
return retVal;
}
@Kontrol(path = "/manage/deleteUser", method = MethodType.GET)
public String deleteUser() throws ServletException {
try {
if (user.getId() != null && !user.getId().equals(AuthUtil.getUserId(getRequest().getSession()))) {
UserDB.deleteUser(user.getId());
PublicKeyDB.deleteUserPublicKeys(user.getId());
RefreshAuthKeyUtil.refreshAllSystems();
}
} catch (SQLException | GeneralSecurityException ex) {
log.error(ex.toString(), ex);
throw new ServletException(ex.toString(), ex);
}
return "redirect:/manage/viewUsers.ktrl?sortedSet.orderByDirection=" + sortedSet.getOrderByDirection() + "&sortedSet.orderByField=" + sortedSet.getOrderByField();
}
@Kontrol(path = "/manage/unlockUser", method = MethodType.GET)
public String unlockUser() throws ServletException {
try {
if (user.getId() != null && !user.getId().equals(AuthUtil.getUserId(getRequest().getSession()))) {
UserDB.unlockAccount(user.getId());
}
} catch (SQLException | GeneralSecurityException ex) {
log.error(ex.toString(), ex);
throw new ServletException(ex.toString(), ex);
}
return "redirect:/manage/viewUsers.ktrl?sortedSet.orderByDirection=" + sortedSet.getOrderByDirection() + "&sortedSet.orderByField=" + sortedSet.getOrderByField();
}
/**
* Validates all fields for adding a user
*/
@Validate(input = "/manage/view_users.html")
public void validateSaveUser() throws ServletException {<FILL_FUNCTION_BODY>}
}
|
if (user == null
|| user.getUsername() == null
|| user.getUsername().trim().equals("")) {
addFieldError("user.username", REQUIRED);
}
if (user == null
|| user.getLastNm() == null
|| user.getLastNm().trim().equals("")) {
addFieldError("user.lastNm", REQUIRED);
}
if (user == null
|| user.getFirstNm() == null
|| user.getFirstNm().trim().equals("")) {
addFieldError("user.firstNm", REQUIRED);
}
if (user != null && user.getPassword() != null && !user.getPassword().trim().equals("")) {
if (!user.getPassword().equals(user.getPasswordConfirm())) {
addError("Passwords do not match");
} else if (!PasswordUtil.isValid(user.getPassword())) {
addError(PasswordUtil.PASSWORD_REQ_ERROR_MSG);
}
}
if (user != null && user.getId() == null && !Auth.AUTH_EXTERNAL.equals(user.getAuthType()) && (user.getPassword() == null || user.getPassword().trim().equals(""))) {
addError("Password is required");
}
try {
if (user != null && !UserDB.isUnique(user.getId(), user.getUsername())) {
addError("Username has been taken");
}
if (!this.getFieldErrors().isEmpty() || !this.getErrors().isEmpty()) {
userId = AuthUtil.getUserId(getRequest().getSession());
sortedSet = UserDB.getUserSet(sortedSet);
}
} catch (SQLException | GeneralSecurityException ex) {
log.error(ex.toString(), ex);
throw new ServletException(ex.toString(), ex);
}
| 1,133 | 528 | 1,661 |
<no_super_class>
|
bastillion-io_Bastillion
|
Bastillion/src/main/java/io/bastillion/manage/db/PrivateKeyDB.java
|
PrivateKeyDB
|
getApplicationKey
|
class PrivateKeyDB {
private PrivateKeyDB() {
}
/**
* returns public private key for application
*
* @return app key values
*/
public static ApplicationKey getApplicationKey() throws SQLException, GeneralSecurityException {<FILL_FUNCTION_BODY>}
}
|
ApplicationKey appKey = null;
Connection con = DBUtils.getConn();
PreparedStatement stmt = con.prepareStatement("select * from application_key");
ResultSet rs = stmt.executeQuery();
while (rs.next()) {
appKey = new ApplicationKey();
appKey.setId(rs.getLong("id"));
appKey.setPassphrase(EncryptionUtil.decrypt(rs.getString("passphrase")));
appKey.setPrivateKey(EncryptionUtil.decrypt(rs.getString("private_key")));
appKey.setPublicKey(rs.getString("public_key"));
}
DBUtils.closeRs(rs);
DBUtils.closeStmt(stmt);
DBUtils.closeConn(con);
return appKey;
| 77 | 207 | 284 |
<no_super_class>
|
bastillion-io_Bastillion
|
Bastillion/src/main/java/io/bastillion/manage/db/ProfileDB.java
|
ProfileDB
|
getProfileSet
|
class ProfileDB {
public static final String FILTER_BY_SYSTEM = "system";
public static final String FILTER_BY_USER = "username";
public static final String SORT_BY_PROFILE_NM = "nm";
private ProfileDB() {
}
/**
* method to do order by based on the sorted set object for profiles
*
* @return list of profiles
*/
public static SortedSet getProfileSet(SortedSet sortedSet) throws SQLException, GeneralSecurityException {<FILL_FUNCTION_BODY>}
/**
* returns all profile information
*
* @return list of profiles
*/
public static List<Profile> getAllProfiles() throws SQLException, GeneralSecurityException {
ArrayList<Profile> profileList = new ArrayList<>();
Connection con = DBUtils.getConn();
PreparedStatement stmt = con.prepareStatement("select * from profiles order by nm asc");
ResultSet rs = stmt.executeQuery();
while (rs.next()) {
Profile profile = new Profile();
profile.setId(rs.getLong("id"));
profile.setNm(rs.getString("nm"));
profile.setDesc(rs.getString("desc"));
profileList.add(profile);
}
DBUtils.closeRs(rs);
DBUtils.closeStmt(stmt);
DBUtils.closeConn(con);
return profileList;
}
/**
* returns profile based on id
*
* @param profileId profile id
* @return profile
*/
public static Profile getProfile(Long profileId) throws SQLException, GeneralSecurityException {
Connection con = DBUtils.getConn();
Profile profile = getProfile(con, profileId);
DBUtils.closeConn(con);
return profile;
}
/**
* returns profile based on id
*
* @param con db connection object
* @param profileId profile id
* @return profile
*/
public static Profile getProfile(Connection con, Long profileId) throws SQLException {
Profile profile = null;
PreparedStatement stmt = con.prepareStatement("select * from profiles where id=?");
stmt.setLong(1, profileId);
ResultSet rs = stmt.executeQuery();
while (rs.next()) {
profile = new Profile();
profile.setId(rs.getLong("id"));
profile.setNm(rs.getString("nm"));
profile.setDesc(rs.getString("desc"));
profile.setHostSystemList(ProfileSystemsDB.getSystemsByProfile(con, profileId));
}
DBUtils.closeRs(rs);
DBUtils.closeStmt(stmt);
return profile;
}
/**
* inserts new profile
*
* @param profile profile object
*/
public static void insertProfile(Profile profile) throws SQLException, GeneralSecurityException {
Connection con = DBUtils.getConn();
PreparedStatement stmt = con.prepareStatement("insert into profiles (nm, desc) values (?,?)");
stmt.setString(1, profile.getNm());
stmt.setString(2, profile.getDesc());
stmt.execute();
DBUtils.closeStmt(stmt);
DBUtils.closeConn(con);
}
/**
* updates profile
*
* @param profile profile object
*/
public static void updateProfile(Profile profile) throws SQLException, GeneralSecurityException {
Connection con = DBUtils.getConn();
PreparedStatement stmt = con.prepareStatement("update profiles set nm=?, desc=? where id=?");
stmt.setString(1, profile.getNm());
stmt.setString(2, profile.getDesc());
stmt.setLong(3, profile.getId());
stmt.execute();
DBUtils.closeStmt(stmt);
DBUtils.closeConn(con);
}
/**
* deletes profile
*
* @param profileId profile id
*/
public static void deleteProfile(Long profileId) throws SQLException, GeneralSecurityException {
Connection con = DBUtils.getConn();
PreparedStatement stmt = con.prepareStatement("delete from profiles where id=?");
stmt.setLong(1, profileId);
stmt.execute();
DBUtils.closeStmt(stmt);
DBUtils.closeConn(con);
}
}
|
ArrayList<Profile> profileList = new ArrayList<>();
String orderBy = "";
if (sortedSet.getOrderByField() != null && !sortedSet.getOrderByField().trim().equals("")) {
orderBy = " order by " + sortedSet.getOrderByField() + " " + sortedSet.getOrderByDirection();
}
String sql = "select distinct p.* from profiles p ";
if (StringUtils.isNotEmpty(sortedSet.getFilterMap().get(FILTER_BY_SYSTEM))) {
sql = sql + ", system_map m, system s where m.profile_id = p.id and m.system_id = s.id" +
" and (lower(s.display_nm) like ? or lower(s.host) like ?)";
} else if (StringUtils.isNotEmpty(sortedSet.getFilterMap().get(FILTER_BY_USER))) {
sql = sql + ", user_map m, users u where m.profile_id = p.id and m.user_id = u.id" +
" and (lower(u.first_nm) like ? or lower(u.last_nm) like ?" +
" or lower(u.email) like ? or lower(u.username) like ?)";
}
sql = sql + orderBy;
Connection con = DBUtils.getConn();
PreparedStatement stmt = con.prepareStatement(sql);
if (StringUtils.isNotEmpty(sortedSet.getFilterMap().get(FILTER_BY_SYSTEM))) {
stmt.setString(1, "%" + sortedSet.getFilterMap().get(FILTER_BY_SYSTEM).toLowerCase() + "%");
stmt.setString(2, "%" + sortedSet.getFilterMap().get(FILTER_BY_SYSTEM).toLowerCase() + "%");
} else if (StringUtils.isNotEmpty(sortedSet.getFilterMap().get(FILTER_BY_USER))) {
stmt.setString(1, "%" + sortedSet.getFilterMap().get(FILTER_BY_USER).toLowerCase() + "%");
stmt.setString(2, "%" + sortedSet.getFilterMap().get(FILTER_BY_USER).toLowerCase() + "%");
stmt.setString(3, "%" + sortedSet.getFilterMap().get(FILTER_BY_USER).toLowerCase() + "%");
stmt.setString(4, "%" + sortedSet.getFilterMap().get(FILTER_BY_USER).toLowerCase() + "%");
}
ResultSet rs = stmt.executeQuery();
while (rs.next()) {
Profile profile = new Profile();
profile.setId(rs.getLong("id"));
profile.setNm(rs.getString("nm"));
profile.setDesc(rs.getString("desc"));
profileList.add(profile);
}
DBUtils.closeRs(rs);
DBUtils.closeStmt(stmt);
DBUtils.closeConn(con);
sortedSet.setItemList(profileList);
return sortedSet;
| 1,136 | 785 | 1,921 |
<no_super_class>
|
bastillion-io_Bastillion
|
Bastillion/src/main/java/io/bastillion/manage/db/ProfileSystemsDB.java
|
ProfileSystemsDB
|
getSystemIdsByProfile
|
class ProfileSystemsDB {
private ProfileSystemsDB() {
}
/**
* sets host systems for profile
*
* @param profileId profile id
* @param systemIdList list of host system ids
*/
public static void setSystemsForProfile(Long profileId, List<Long> systemIdList) throws SQLException, GeneralSecurityException {
Connection con = DBUtils.getConn();
PreparedStatement stmt = con.prepareStatement("delete from system_map where profile_id=?");
stmt.setLong(1, profileId);
stmt.execute();
DBUtils.closeStmt(stmt);
for (Long systemId : systemIdList) {
stmt = con.prepareStatement("insert into system_map (profile_id, system_id) values (?,?)");
stmt.setLong(1, profileId);
stmt.setLong(2, systemId);
stmt.execute();
DBUtils.closeStmt(stmt);
}
DBUtils.closeConn(con);
}
/**
* returns a list of systems for a given profile
*
* @param con DB connection
* @param profileId profile id
* @return list of host systems
*/
public static List<HostSystem> getSystemsByProfile(Connection con, Long profileId) throws SQLException {
List<HostSystem> hostSystemList = new ArrayList<>();
PreparedStatement stmt = con.prepareStatement("select * from system s, system_map m where s.id=m.system_id and m.profile_id=? order by display_nm asc");
stmt.setLong(1, profileId);
ResultSet rs = stmt.executeQuery();
while (rs.next()) {
HostSystem hostSystem = new HostSystem();
hostSystem.setId(rs.getLong("id"));
hostSystem.setDisplayNm(rs.getString("display_nm"));
hostSystem.setUser(rs.getString("username"));
hostSystem.setHost(rs.getString("host"));
hostSystem.setPort(rs.getInt("port"));
hostSystem.setAuthorizedKeys(rs.getString("authorized_keys"));
hostSystemList.add(hostSystem);
}
DBUtils.closeRs(rs);
DBUtils.closeStmt(stmt);
return hostSystemList;
}
/**
* returns a list of systems for a given profile
*
* @param profileId profile id
* @return list of host systems
*/
public static List<HostSystem> getSystemsByProfile(Long profileId) throws SQLException, GeneralSecurityException {
Connection con = DBUtils.getConn();
List<HostSystem> hostSystemList = getSystemsByProfile(con, profileId);
DBUtils.closeConn(con);
return hostSystemList;
}
/**
* returns a list of system ids for a given profile
*
* @param con DB con
* @param profileId profile id
* @return list of host systems
*/
public static List<Long> getSystemIdsByProfile(Connection con, Long profileId) throws SQLException {<FILL_FUNCTION_BODY>}
/**
* returns a list of system ids for a given profile
*
* @param profileId profile id
* @return list of host systems
*/
public static List<Long> getSystemIdsByProfile(Long profileId) throws SQLException, GeneralSecurityException {
Connection con = DBUtils.getConn();
List<Long> systemIdList = getSystemIdsByProfile(con, profileId);
DBUtils.closeConn(con);
return systemIdList;
}
/**
* returns a list of system ids for a given profile
*
* @param con DB con
* @param profileId profile id
* @param userId user id
* @return list of host systems
*/
public static List<Long> getSystemIdsByProfile(Connection con, Long profileId, Long userId) throws SQLException {
List<Long> systemIdList = new ArrayList<>();
PreparedStatement stmt = con.prepareStatement("select sm.system_id from system_map sm, user_map um where um.profile_id=sm.profile_id and sm.profile_id=? and um.user_id=?");
stmt.setLong(1, profileId);
stmt.setLong(2, userId);
ResultSet rs = stmt.executeQuery();
while (rs.next()) {
systemIdList.add(rs.getLong("system_id"));
}
DBUtils.closeRs(rs);
DBUtils.closeStmt(stmt);
return systemIdList;
}
/**
* returns a list of system ids for a given profile
*
* @param profileId profile id
* @param userId user id
* @return list of host systems
*/
public static List<Long> getSystemIdsByProfile(Long profileId, Long userId) throws SQLException, GeneralSecurityException {
Connection con = DBUtils.getConn();
List<Long> systemIdList = getSystemIdsByProfile(con, profileId, userId);
DBUtils.closeConn(con);
return systemIdList;
}
}
|
List<Long> systemIdList = new ArrayList<>();
PreparedStatement stmt = con.prepareStatement("select * from system s, system_map m where s.id=m.system_id and m.profile_id=? order by display_nm asc");
stmt.setLong(1, profileId);
ResultSet rs = stmt.executeQuery();
while (rs.next()) {
systemIdList.add(rs.getLong("id"));
}
DBUtils.closeRs(rs);
DBUtils.closeStmt(stmt);
return systemIdList;
| 1,344 | 151 | 1,495 |
<no_super_class>
|
bastillion-io_Bastillion
|
Bastillion/src/main/java/io/bastillion/manage/db/ScriptDB.java
|
ScriptDB
|
getScript
|
class ScriptDB {
public static final String DISPLAY_NM = "display_nm";
public static final String SORT_BY_DISPLAY_NM = DISPLAY_NM;
private ScriptDB() {
}
/**
* returns scripts based on sort order defined
*
* @param sortedSet object that defines sort order
* @param userId user id
* @return sorted script list
*/
public static SortedSet getScriptSet(SortedSet sortedSet, Long userId) throws SQLException, GeneralSecurityException {
ArrayList<Script> scriptList = new ArrayList<>();
String orderBy = "";
if (sortedSet.getOrderByField() != null && !sortedSet.getOrderByField().trim().equals("")) {
orderBy = "order by " + sortedSet.getOrderByField() + " " + sortedSet.getOrderByDirection();
}
String sql = "select * from scripts where user_id=? " + orderBy;
Connection con = DBUtils.getConn();
PreparedStatement stmt = con.prepareStatement(sql);
stmt.setLong(1, userId);
ResultSet rs = stmt.executeQuery();
while (rs.next()) {
Script script = new Script();
script.setId(rs.getLong("id"));
script.setDisplayNm(rs.getString(DISPLAY_NM));
script.setScript(rs.getString("script"));
scriptList.add(script);
}
DBUtils.closeRs(rs);
DBUtils.closeStmt(stmt);
DBUtils.closeConn(con);
sortedSet.setItemList(scriptList);
return sortedSet;
}
/**
* returns script base on id
*
* @param scriptId script id
* @param userId user id
* @return script object
*/
public static Script getScript(Long scriptId, Long userId) throws SQLException, GeneralSecurityException {
Connection con = DBUtils.getConn();
Script script = getScript(con, scriptId, userId);
DBUtils.closeConn(con);
return script;
}
/**
* returns script base on id
*
* @param con DB connection
* @param scriptId script id
* @param userId user id
* @return script object
*/
public static Script getScript(Connection con, Long scriptId, Long userId) throws SQLException {<FILL_FUNCTION_BODY>}
/**
* inserts new script
*
* @param script script object
* @param userId user id
*/
public static void insertScript(Script script, Long userId) throws SQLException, GeneralSecurityException {
Connection con = DBUtils.getConn();
PreparedStatement stmt = con.prepareStatement("insert into scripts (display_nm, script, user_id) values (?,?,?)");
stmt.setString(1, script.getDisplayNm());
stmt.setString(2, script.getScript());
stmt.setLong(3, userId);
stmt.execute();
DBUtils.closeStmt(stmt);
DBUtils.closeConn(con);
}
/**
* updates existing script
*
* @param script script object
* @param userId user id
*/
public static void updateScript(Script script, Long userId) throws SQLException, GeneralSecurityException {
Connection con = DBUtils.getConn();
PreparedStatement stmt = con.prepareStatement("update scripts set display_nm=?, script=? where id=? and user_id=?");
stmt.setString(1, script.getDisplayNm());
stmt.setString(2, script.getScript());
stmt.setLong(3, script.getId());
stmt.setLong(4, userId);
stmt.execute();
DBUtils.closeStmt(stmt);
DBUtils.closeConn(con);
}
/**
* deletes script
*
* @param scriptId script id
* @param userId user id
*/
public static void deleteScript(Long scriptId, Long userId) throws SQLException, GeneralSecurityException {
Connection con = DBUtils.getConn();
PreparedStatement stmt = con.prepareStatement("delete from scripts where id=? and user_id=?");
stmt.setLong(1, scriptId);
stmt.setLong(2, userId);
stmt.execute();
DBUtils.closeStmt(stmt);
DBUtils.closeConn(con);
}
}
|
Script script = null;
PreparedStatement stmt = con.prepareStatement("select * from scripts where id=? and user_id=?");
stmt.setLong(1, scriptId);
stmt.setLong(2, userId);
ResultSet rs = stmt.executeQuery();
while (rs.next()) {
script = new Script();
script.setId(rs.getLong("id"));
script.setDisplayNm(rs.getString(DISPLAY_NM));
script.setScript(rs.getString("script"));
}
DBUtils.closeRs(rs);
DBUtils.closeStmt(stmt);
return script;
| 1,177 | 175 | 1,352 |
<no_super_class>
|
bastillion-io_Bastillion
|
Bastillion/src/main/java/io/bastillion/manage/db/SystemStatusDB.java
|
SystemStatusDB
|
getNextPendingSystem
|
class SystemStatusDB {
public static final String STATUS_CD = "status_cd";
private SystemStatusDB() {
}
/**
* set the initial status for selected systems
*
* @param systemSelectIds systems ids to set initial status
* @param userId user id
* @param userType user type
*/
public static void setInitialSystemStatus(List<Long> systemSelectIds, Long userId, String userType) throws SQLException, GeneralSecurityException {
Connection con = DBUtils.getConn();
//checks perms if to see if in assigned profiles
if (!Auth.MANAGER.equals(userType)) {
systemSelectIds = SystemDB.checkSystemPerms(con, systemSelectIds, userId);
}
//deletes all old systems
deleteAllSystemStatus(con, userId);
for (Long hostSystemId : systemSelectIds) {
HostSystem hostSystem = new HostSystem();
hostSystem.setId(hostSystemId);
hostSystem.setStatusCd(HostSystem.INITIAL_STATUS);
//insert new status
insertSystemStatus(con, hostSystem, userId);
}
DBUtils.closeConn(con);
}
/**
* deletes all records from status table for user
*
* @param con DB connection object
* @param userId user id
*/
private static void deleteAllSystemStatus(Connection con, Long userId) throws SQLException {
PreparedStatement stmt = con.prepareStatement("delete from status where user_id=?");
stmt.setLong(1, userId);
stmt.execute();
DBUtils.closeStmt(stmt);
}
/**
* inserts into the status table to keep track of key placement status
*
* @param con DB connection object
* @param hostSystem systems for authorized_keys replacement
* @param userId user id
*/
private static void insertSystemStatus(Connection con, HostSystem hostSystem, Long userId) throws SQLException {
PreparedStatement stmt = con.prepareStatement("insert into status (id, status_cd, user_id) values (?,?,?)");
stmt.setLong(1, hostSystem.getId());
stmt.setString(2, hostSystem.getStatusCd());
stmt.setLong(3, userId);
stmt.execute();
DBUtils.closeStmt(stmt);
}
/**
* updates the status table to keep track of key placement status
*
* @param hostSystem systems for authorized_keys replacement
* @param userId user id
*/
public static void updateSystemStatus(HostSystem hostSystem, Long userId) throws SQLException, GeneralSecurityException {
Connection con = DBUtils.getConn();
updateSystemStatus(con, hostSystem, userId);
DBUtils.closeConn(con);
}
/**
* updates the status table to keep track of key placement status
*
* @param con DB connection
* @param hostSystem systems for authorized_keys replacement
* @param userId user id
*/
public static void updateSystemStatus(Connection con, HostSystem hostSystem, Long userId) throws SQLException {
PreparedStatement stmt = con.prepareStatement("update status set status_cd=? where id=? and user_id=?");
stmt.setString(1, hostSystem.getStatusCd());
stmt.setLong(2, hostSystem.getId());
stmt.setLong(3, userId);
stmt.execute();
DBUtils.closeStmt(stmt);
}
/**
* returns all key placement statuses
*
* @param userId user id
*/
public static SortedSet getSortedSetStatus(Long userId) throws SQLException, GeneralSecurityException {
SortedSet sortedSet = new SortedSet();
sortedSet.setItemList(getAllSystemStatus(userId));
return sortedSet;
}
/**
* returns all key placement statuses
*
* @param userId user id
*/
public static List<HostSystem> getAllSystemStatus(Long userId) throws SQLException, GeneralSecurityException {
Connection con = DBUtils.getConn();
List<HostSystem> hostSystemList = getAllSystemStatus(con, userId);
DBUtils.closeConn(con);
return hostSystemList;
}
/**
* returns all key placement statuses
*
* @param con DB connection object
* @param userId user id
*/
private static List<HostSystem> getAllSystemStatus(Connection con, Long userId) throws SQLException {
List<HostSystem> hostSystemList = new ArrayList<>();
PreparedStatement stmt = con.prepareStatement("select * from status where user_id=? order by id asc");
stmt.setLong(1, userId);
ResultSet rs = stmt.executeQuery();
while (rs.next()) {
HostSystem hostSystem = SystemDB.getSystem(con, rs.getLong("id"));
hostSystem.setStatusCd(rs.getString(STATUS_CD));
hostSystemList.add(hostSystem);
}
DBUtils.closeRs(rs);
DBUtils.closeStmt(stmt);
return hostSystemList;
}
/**
* returns key placement status of system
*
* @param systemId system id
* @param userId user id
*/
public static HostSystem getSystemStatus(Long systemId, Long userId) throws SQLException, GeneralSecurityException {
HostSystem hostSystem = null;
Connection con = DBUtils.getConn();
PreparedStatement stmt = con.prepareStatement("select * from status where id=? and user_id=?");
stmt.setLong(1, systemId);
stmt.setLong(2, userId);
ResultSet rs = stmt.executeQuery();
while (rs.next()) {
hostSystem = SystemDB.getSystem(con, rs.getLong("id"));
hostSystem.setStatusCd(rs.getString(STATUS_CD));
}
DBUtils.closeRs(rs);
DBUtils.closeStmt(stmt);
DBUtils.closeConn(con);
return hostSystem;
}
/**
* returns the first system that authorized keys has not been tried
*
* @param userId user id
* @return hostSystem systems for authorized_keys replacement
*/
public static HostSystem getNextPendingSystem(Long userId) throws SQLException, GeneralSecurityException {<FILL_FUNCTION_BODY>}
}
|
HostSystem hostSystem = null;
Connection con = DBUtils.getConn();
PreparedStatement stmt = con.prepareStatement("select * from status where (status_cd like ? or status_cd like ? or status_cd like ?) and user_id=? order by id asc");
stmt.setString(1, HostSystem.INITIAL_STATUS);
stmt.setString(2, HostSystem.AUTH_FAIL_STATUS);
stmt.setString(3, HostSystem.PUBLIC_KEY_FAIL_STATUS);
stmt.setLong(4, userId);
ResultSet rs = stmt.executeQuery();
if (rs.next()) {
hostSystem = SystemDB.getSystem(con, rs.getLong("id"));
hostSystem.setStatusCd(rs.getString(STATUS_CD));
}
DBUtils.closeRs(rs);
DBUtils.closeStmt(stmt);
DBUtils.closeConn(con);
return hostSystem;
| 1,683 | 255 | 1,938 |
<no_super_class>
|
bastillion-io_Bastillion
|
Bastillion/src/main/java/io/bastillion/manage/db/UserProfileDB.java
|
UserProfileDB
|
checkIsUsersProfile
|
class UserProfileDB {
private UserProfileDB() {
}
/**
* sets users for profile
*
* @param profileId profile id
* @param userIdList list of user ids
*/
public static void setUsersForProfile(Long profileId, List<Long> userIdList) throws SQLException, GeneralSecurityException {
Connection con = DBUtils.getConn();
PreparedStatement stmt = con.prepareStatement("delete from user_map where profile_id=?");
stmt.setLong(1, profileId);
stmt.execute();
DBUtils.closeStmt(stmt);
for (Long userId : userIdList) {
stmt = con.prepareStatement("insert into user_map (profile_id, user_id) values (?,?)");
stmt.setLong(1, profileId);
stmt.setLong(2, userId);
stmt.execute();
DBUtils.closeStmt(stmt);
}
//delete all unassigned keys by profile
PublicKeyDB.deleteUnassignedKeysByProfile(con, profileId);
DBUtils.closeConn(con);
}
/**
* return a list of profiles for user
*
* @param userId user id
* @return profile list
*/
public static List<Profile> getProfilesByUser(Long userId) throws SQLException, GeneralSecurityException {
Connection con = DBUtils.getConn();
List<Profile> profileList = getProfilesByUser(con, userId);
DBUtils.closeConn(con);
return profileList;
}
/**
* return a list of profiles for user
*
* @param userId user id
* @return profile list
*/
public static List<Profile> getProfilesByUser(Connection con, Long userId) throws SQLException {
ArrayList<Profile> profileList = new ArrayList<>();
PreparedStatement stmt = con.prepareStatement("select * from profiles g, user_map m where g.id=m.profile_id and m.user_id=? order by nm asc");
stmt.setLong(1, userId);
ResultSet rs = stmt.executeQuery();
while (rs.next()) {
Profile profile = new Profile();
profile.setId(rs.getLong("id"));
profile.setNm(rs.getString("nm"));
profile.setDesc(rs.getString("desc"));
profileList.add(profile);
}
DBUtils.closeRs(rs);
DBUtils.closeStmt(stmt);
return profileList;
}
/**
* checks to determine if user belongs to profile
*
* @param userId user id
* @param profileId profile id
* @return true if user belongs to profile
*/
public static boolean checkIsUsersProfile(Long userId, Long profileId) throws SQLException, GeneralSecurityException {<FILL_FUNCTION_BODY>}
/**
* assigns profiles to given user
*
* @param userId user id
* @param allProfilesNmList list of all profiles
* @param assignedProfilesNmList list of assigned profiles
*/
public static void assignProfilesToUser(Connection con, Long userId, List<String> allProfilesNmList, List<String> assignedProfilesNmList) throws SQLException {
for (String profileNm : allProfilesNmList) {
if (StringUtils.isNotEmpty(profileNm)) {
Long profileId = null;
PreparedStatement stmt = con.prepareStatement("select id from profiles p where lower(p.nm) like ?");
stmt.setString(1, profileNm.toLowerCase());
ResultSet rs = stmt.executeQuery();
while (rs.next()) {
profileId = rs.getLong("id");
}
DBUtils.closeRs(rs);
DBUtils.closeStmt(stmt);
if (profileId != null) {
stmt = con.prepareStatement("delete from user_map where profile_id=?");
stmt.setLong(1, profileId);
stmt.execute();
DBUtils.closeStmt(stmt);
if (assignedProfilesNmList.contains(profileNm)) {
stmt = con.prepareStatement("insert into user_map (profile_id, user_id) values (?,?)");
stmt.setLong(1, profileId);
stmt.setLong(2, userId);
stmt.execute();
DBUtils.closeStmt(stmt);
}
//delete all unassigned keys by profile
PublicKeyDB.deleteUnassignedKeysByProfile(con, profileId);
}
}
}
}
/**
* assigns profiles to given user
*
* @param userId user id
* @param profileNm profile name
*/
public static void assignProfileToUser(Connection con, Long userId, String profileNm) throws SQLException {
if (StringUtils.isNotEmpty(profileNm)) {
Long profileId = null;
PreparedStatement stmt = con.prepareStatement("select id from profiles p where lower(p.nm) like ?");
stmt.setString(1, profileNm.toLowerCase());
ResultSet rs = stmt.executeQuery();
while (rs.next()) {
profileId = rs.getLong("id");
}
DBUtils.closeRs(rs);
DBUtils.closeStmt(stmt);
if (profileId != null) {
stmt = con.prepareStatement("delete from user_map where profile_id=?");
stmt.setLong(1, profileId);
stmt.execute();
DBUtils.closeStmt(stmt);
stmt = con.prepareStatement("insert into user_map (profile_id, user_id) values (?,?)");
stmt.setLong(1, profileId);
stmt.setLong(2, userId);
stmt.execute();
DBUtils.closeStmt(stmt);
//delete all unassigned keys by profile
PublicKeyDB.deleteUnassignedKeysByProfile(con, profileId);
}
}
}
}
|
boolean isUsersProfile = false;
Connection con = DBUtils.getConn();
PreparedStatement stmt = con.prepareStatement("select * from user_map where profile_id=? and user_id=?");
stmt.setLong(1, profileId);
stmt.setLong(2, userId);
ResultSet rs = stmt.executeQuery();
while (rs.next()) {
isUsersProfile = true;
}
DBUtils.closeRs(rs);
DBUtils.closeStmt(stmt);
DBUtils.closeConn(con);
return isUsersProfile;
| 1,593 | 159 | 1,752 |
<no_super_class>
|
bastillion-io_Bastillion
|
Bastillion/src/main/java/io/bastillion/manage/db/UserThemeDB.java
|
UserThemeDB
|
getTheme
|
class UserThemeDB {
private UserThemeDB() {
}
/**
* get user theme
*
* @param userId object
* @return user theme object
*/
public static UserSettings getTheme(Long userId) throws SQLException, GeneralSecurityException {<FILL_FUNCTION_BODY>}
/**
* saves user theme
*
* @param userId object
*/
public static void saveTheme(Long userId, UserSettings theme) throws SQLException, GeneralSecurityException {
Connection con = DBUtils.getConn();
PreparedStatement stmt = con.prepareStatement("delete from user_theme where user_id=?");
stmt.setLong(1, userId);
stmt.execute();
DBUtils.closeStmt(stmt);
if (StringUtils.isNotEmpty(theme.getPlane()) || StringUtils.isNotEmpty(theme.getTheme())) {
stmt = con.prepareStatement("insert into user_theme(user_id, bg, fg, d1, d2, d3, d4, d5, d6, d7, d8, b1, b2, b3, b4, b5, b6, b7, b8) values (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)");
stmt.setLong(1, userId);
stmt.setString(2, theme.getBg());
stmt.setString(3, theme.getFg());
//if contains all 16 theme colors insert
if (theme.getColors() != null && theme.getColors().length == 16) {
for (int i = 0; i < 16; i++) {
stmt.setString(i + 4, theme.getColors()[i]);
}
//else set to null
} else {
for (int i = 0; i < 16; i++) {
stmt.setString(i + 4, null);
}
}
stmt.execute();
DBUtils.closeStmt(stmt);
}
DBUtils.closeConn(con);
}
}
|
UserSettings theme = null;
Connection con = DBUtils.getConn();
PreparedStatement stmt = con.prepareStatement("select * from user_theme where user_id=?");
stmt.setLong(1, userId);
ResultSet rs = stmt.executeQuery();
if (rs.next()) {
theme = new UserSettings();
theme.setBg(rs.getString("bg"));
theme.setFg(rs.getString("fg"));
if (StringUtils.isNotEmpty(rs.getString("d1"))) {
String[] colors = new String[16];
colors[0] = rs.getString("d1");
colors[1] = rs.getString("d2");
colors[2] = rs.getString("d3");
colors[3] = rs.getString("d4");
colors[4] = rs.getString("d5");
colors[5] = rs.getString("d6");
colors[6] = rs.getString("d7");
colors[7] = rs.getString("d8");
colors[8] = rs.getString("b1");
colors[9] = rs.getString("b2");
colors[10] = rs.getString("b3");
colors[11] = rs.getString("b4");
colors[12] = rs.getString("b5");
colors[13] = rs.getString("b6");
colors[14] = rs.getString("b7");
colors[15] = rs.getString("b8");
theme.setColors(colors);
}
}
DBUtils.closeRs(rs);
DBUtils.closeStmt(stmt);
DBUtils.closeConn(con);
return theme;
| 541 | 456 | 997 |
<no_super_class>
|
bastillion-io_Bastillion
|
Bastillion/src/main/java/io/bastillion/manage/model/SortedSet.java
|
SortedSet
|
getOrderByField
|
class SortedSet {
private String orderByField = null;
private String orderByDirection = "asc";
private List itemList;
private Map<String, String> filterMap = new HashMap<>();
public SortedSet() {
}
public SortedSet(String orderByField) {
this.orderByField = orderByField;
}
public String getOrderByField() {<FILL_FUNCTION_BODY>}
public void setOrderByField(String orderByField) {
this.orderByField = orderByField;
}
public String getOrderByDirection() {
if ("asc".equalsIgnoreCase(orderByDirection)) {
return "asc";
} else {
return "desc";
}
}
public void setOrderByDirection(String orderByDirection) {
this.orderByDirection = orderByDirection;
}
public List getItemList() {
return itemList;
}
public void setItemList(List itemList) {
this.itemList = itemList;
}
public Map<String, String> getFilterMap() {
return filterMap;
}
public void setFilterMap(Map<String, String> filterMap) {
this.filterMap = filterMap;
}
}
|
if (orderByField != null) {
return orderByField.replaceAll("[^0-9,a-z,A-Z,\\_,\\.]", "");
}
return null;
| 334 | 55 | 389 |
<no_super_class>
|
bastillion-io_Bastillion
|
Bastillion/src/main/java/io/bastillion/manage/model/UserSettings.java
|
UserSettings
|
setPlane
|
class UserSettings {
String[] colors = null;
String bg;
String fg;
String plane;
String theme;
Integer ptyWidth;
Integer ptyHeight;
public String[] getColors() {
return colors;
}
public void setColors(String[] colors) {
this.colors = colors;
}
public String getBg() {
return bg;
}
public void setBg(String bg) {
this.bg = bg;
}
public String getFg() {
return fg;
}
public void setFg(String fg) {
this.fg = fg;
}
public String getPlane() {
if (StringUtils.isNotEmpty(bg) && StringUtils.isNotEmpty(fg)) {
plane = bg + "," + fg;
}
return plane;
}
public void setPlane(String plane) {<FILL_FUNCTION_BODY>}
public String getTheme() {
if (this.colors != null && this.colors.length == 16) {
theme = StringUtils.join(this.colors, ",");
}
return theme;
}
public void setTheme(String theme) {
if (StringUtils.isNotEmpty(theme) && theme.split(",").length == 16) {
this.setColors(theme.split(","));
}
this.theme = theme;
}
public Integer getPtyWidth() {
return ptyWidth;
}
public void setPtyWidth(Integer ptyWidth) {
this.ptyWidth = ptyWidth;
}
public Integer getPtyHeight() {
return ptyHeight;
}
public void setPtyHeight(Integer ptyHeight) {
this.ptyHeight = ptyHeight;
}
}
|
if (StringUtils.isNotEmpty(plane) && plane.split(",").length == 2) {
this.setBg(plane.split(",")[0]);
this.setFg(plane.split(",")[1]);
}
this.plane = plane;
| 494 | 73 | 567 |
<no_super_class>
|
bastillion-io_Bastillion
|
Bastillion/src/main/java/io/bastillion/manage/task/SecureShellTask.java
|
SecureShellTask
|
run
|
class SecureShellTask implements Runnable {
private static final Logger log = LoggerFactory.getLogger(SecureShellTask.class);
InputStream outFromChannel;
SessionOutput sessionOutput;
public SecureShellTask(SessionOutput sessionOutput, InputStream outFromChannel) {
this.sessionOutput = sessionOutput;
this.outFromChannel = outFromChannel;
}
public void run() {<FILL_FUNCTION_BODY>}
}
|
InputStreamReader isr = new InputStreamReader(outFromChannel);
BufferedReader br = new BufferedReader(isr);
SessionOutputUtil.addOutput(sessionOutput);
char[] buff = new char[1024];
int read;
try {
while ((read = br.read(buff)) != -1) {
SessionOutputUtil.addToOutput(sessionOutput.getSessionId(), sessionOutput.getInstanceId(), buff, 0, read);
Thread.sleep(50);
}
SessionOutputUtil.removeOutput(sessionOutput.getSessionId(), sessionOutput.getInstanceId());
} catch (IOException | InterruptedException ex) {
log.error(ex.toString(), ex);
}
| 121 | 183 | 304 |
<no_super_class>
|
bastillion-io_Bastillion
|
Bastillion/src/main/java/io/bastillion/manage/task/SentOutputTask.java
|
SentOutputTask
|
run
|
class SentOutputTask implements Runnable {
private static final Logger log = LoggerFactory.getLogger(SentOutputTask.class);
Session session;
Long sessionId;
User user;
public SentOutputTask(Long sessionId, Session session, User user) {
this.sessionId = sessionId;
this.session = session;
this.user = user;
}
public void run() {<FILL_FUNCTION_BODY>}
}
|
Gson gson = new Gson();
while (session.isOpen()) {
try {
Connection con = DBUtils.getConn();
List<SessionOutput> outputList = SessionOutputUtil.getOutput(con, sessionId, user);
if (!outputList.isEmpty()) {
String json = gson.toJson(outputList);
//send json to session
this.session.getBasicRemote().sendText(json);
}
Thread.sleep(25);
DBUtils.closeConn(con);
} catch (SQLException | GeneralSecurityException | IOException | InterruptedException ex) {
log.error(ex.toString(), ex);
}
}
| 120 | 171 | 291 |
<no_super_class>
|
bastillion-io_Bastillion
|
Bastillion/src/main/java/io/bastillion/manage/util/DSPool.java
|
DSPool
|
registerDataSource
|
class DSPool {
private static BasicDataSource dsPool = null;
private static final String BASE_DIR = AppConfig.CONFIG_DIR;
private static final String DB_DRIVER = AppConfig.getProperty("dbDriver");
private static final int MAX_ACTIVE = Integer.parseInt(AppConfig.getProperty("maxActive"));
private static final boolean TEST_ON_BORROW = Boolean.parseBoolean(AppConfig.getProperty("testOnBorrow"));
private static final int MIN_IDLE = Integer.parseInt(AppConfig.getProperty("minIdle"));
private static final int MAX_WAIT = Integer.parseInt(AppConfig.getProperty("maxWait"));
private DSPool() {
}
/**
* fetches the data source for H2 db
*
* @return data source pool
*/
public static BasicDataSource getDataSource() throws GeneralSecurityException {
if (dsPool == null) {
dsPool = registerDataSource();
}
return dsPool;
}
/**
* register the data source for H2 DB
*
* @return pooling database object
*/
private static BasicDataSource registerDataSource() throws GeneralSecurityException {<FILL_FUNCTION_BODY>}
}
|
System.setProperty("h2.baseDir", BASE_DIR);
// create a database connection
String user = AppConfig.getProperty("dbUser");
String password = AppConfig.decryptProperty("dbPassword");
String connectionURL = AppConfig.getProperty("dbConnectionURL");
if (connectionURL != null && connectionURL.contains("CIPHER=")) {
password = "filepwd " + password;
}
String validationQuery = "select 1";
BasicDataSource dataSource = new BasicDataSource();
dataSource.setDriverClassName(DB_DRIVER);
dataSource.setMaxTotal(MAX_ACTIVE);
dataSource.setTestOnBorrow(TEST_ON_BORROW);
dataSource.setMinIdle(MIN_IDLE);
dataSource.setMaxWaitMillis(MAX_WAIT);
dataSource.setValidationQuery(validationQuery);
dataSource.setUsername(user);
dataSource.setPassword(password);
dataSource.setUrl(connectionURL);
return dataSource;
| 320 | 269 | 589 |
<no_super_class>
|
bastillion-io_Bastillion
|
Bastillion/src/main/java/io/bastillion/manage/util/EncryptionUtil.java
|
EncryptionUtil
|
encrypt
|
class EncryptionUtil {
private static final Logger log = LoggerFactory.getLogger(EncryptionUtil.class);
//secret key
private static byte[] key = new byte[0];
static {
try {
key = KeyStoreUtil.getSecretBytes(KeyStoreUtil.ENCRYPTION_KEY_ALIAS);
} catch (GeneralSecurityException ex) {
log.error(ex.toString(), ex);
}
}
public static final String CRYPT_ALGORITHM = "AES";
public static final String HASH_ALGORITHM = "SHA-256";
private EncryptionUtil() {
}
/**
* generate salt for hash
*
* @return salt
*/
public static String generateSalt() {
byte[] salt = new byte[32];
SecureRandom secureRandom = new SecureRandom();
secureRandom.nextBytes(salt);
return new String(Base64.encodeBase64(salt));
}
/**
* return hash value of string
*
* @param str unhashed string
* @param salt salt for hash
* @return hash value of string
*/
public static String hash(String str, String salt) throws NoSuchAlgorithmException {
String hash = null;
MessageDigest md = MessageDigest.getInstance(HASH_ALGORITHM);
if (StringUtils.isNotEmpty(salt)) {
md.update(Base64.decodeBase64(salt.getBytes()));
}
md.update(str.getBytes(StandardCharsets.UTF_8));
hash = new String(Base64.encodeBase64(md.digest()));
return hash;
}
/**
* return hash value of string
*
* @param str unhashed string
* @return hash value of string
*/
public static String hash(String str) throws NoSuchAlgorithmException {
return hash(str, null);
}
/**
* return encrypted value of string
*
* @param key secret key
* @param str unencrypted string
* @return encrypted string
*/
public static String encrypt(byte[] key, String str) throws GeneralSecurityException {<FILL_FUNCTION_BODY>}
/**
* return decrypted value of encrypted string
*
* @param key secret key
* @param str encrypted string
* @return decrypted string
*/
public static String decrypt(byte[] key, String str) throws GeneralSecurityException {
String retVal = null;
if (str != null && str.length() > 0) {
Cipher c = Cipher.getInstance(CRYPT_ALGORITHM);
c.init(Cipher.DECRYPT_MODE, new SecretKeySpec(key, CRYPT_ALGORITHM));
byte[] decodedVal = Base64.decodeBase64(str.getBytes());
retVal = new String(c.doFinal(decodedVal));
}
return retVal;
}
/**
* return encrypted value of string
*
* @param str unencrypted string
* @return encrypted string
*/
public static String encrypt(String str) throws GeneralSecurityException {
return encrypt(key, str);
}
/**
* return decrypted value of encrypted string
*
* @param str encrypted string
* @return decrypted string
*/
public static String decrypt(String str) throws GeneralSecurityException {
return decrypt(key, str);
}
}
|
String retVal = null;
if (str != null && str.length() > 0) {
Cipher c = Cipher.getInstance(CRYPT_ALGORITHM);
c.init(Cipher.ENCRYPT_MODE, new SecretKeySpec(key, CRYPT_ALGORITHM));
byte[] encVal = c.doFinal(str.getBytes());
retVal = new String(Base64.encodeBase64(encVal));
}
return retVal;
| 912 | 131 | 1,043 |
<no_super_class>
|
bastillion-io_Bastillion
|
Bastillion/src/main/java/io/bastillion/manage/util/KeyStoreUtil.java
|
KeyStoreUtil
|
initializeKeyStore
|
class KeyStoreUtil {
private static final Logger log = LoggerFactory.getLogger(KeyStoreUtil.class);
private static KeyStore keyStore = null;
private static final String keyStoreFile = AppConfig.CONFIG_DIR
+ "/bastillion.jceks";
private static final char[] KEYSTORE_PASS = new char[]{
'G', '~', 'r', 'x', 'Z', 'E', 'w', 'f', 'a', '[', '!', 'f', 'Z', 'd', '*', 'L', '8', 'm', 'h', 'u', '#',
'j', '9', ':', '~', ';', 'U', '>', 'O', 'i', '8', 'r', 'C', '}', 'f', 't', '%', '[', 'H', 'h', 'M', '&',
'K', ':', 'l', '5', 'c', 'H', '6', 'r', 'A', 'E', '.', 'F', 'Y', 'W', '}', '{', '*', '8', 'd', 'E', 'C',
'A', '6', 'F', 'm', 'j', 'u', 'A', 'Q', '%', '{', '/', '@', 'm', '&', '5', 'S', 'q', '4', 'Q', '+', 'Y',
'|', 'X', 'W', 'z', '8', '<', 'j', 'd', 'a', '}', '`', '0', 'N', 'B', '3', 'i', 'v', '5', 'U', ' ', '2',
'd', 'd', '(', '&', 'J', '_', '9', 'o', '(', '2', 'I', '`', ';', '>', '#', '$', 'X', 'j', '&', '&', '%',
'>', '#', '7', 'q', '>', ')', 'L', 'A', 'v', 'h', 'j', 'i', '8', '~', ')', 'a', '~', 'W', '/', 'l', 'H',
'L', 'R', '+', '\\', 'i', 'R', '_', '+', 'y', 's', '0', 'n', '\'', '=', '{', 'B', ':', 'l', '1', '%', '^',
'd', 'n', 'H', 'X', 'B', '$', 'f', '"', '#', ')', '{', 'L', '/', 'q', '\'', 'O', '%', 's', 'M', 'Q', ']',
'D', 'v', ';', 'L', 'C', 'd', '?', 'D', 'l', 'h', 'd', 'i', 'N', '4', 'R', '>', 'O', ';', '$', '(', '4',
'-', '0', '^', 'Y', ')', '5', 'V', 'M', '7', 'S', 'a', 'c', 'D', 'C', 'w', 'A', 'o', 'n', 's', 'r', '*',
'G', '[', 'l', 'h', '$', 'U', 's', '_', 'D', 'f', 'X', '~', '.', '7', 'B', 'A', 'E', '(', '#', ']', ':',
'`', ',', 'k', 'y'};
private static final int KEYLENGTH = 256;
//Alias for encryption keystore
public static final String ENCRYPTION_KEY_ALIAS = "KEYBOX-ENCRYPTION_KEY";
static {
File f = new File(keyStoreFile);
//load or create keystore
try {
if (f.isFile() && f.canRead()) {
keyStore = KeyStore.getInstance("JCEKS");
FileInputStream keyStoreInputStream = new FileInputStream(f);
keyStore.load(keyStoreInputStream, KEYSTORE_PASS);
}
//create keystore
else {
initializeKeyStore();
}
} catch (IOException | GeneralSecurityException ex) {
log.error(ex.toString(), ex);
}
}
/**
* get secret entry for alias
*
* @param alias keystore secret alias
* @return secret byte array
*/
public static byte[] getSecretBytes(String alias) throws GeneralSecurityException {
KeyStore.SecretKeyEntry entry = (KeyStore.SecretKeyEntry) keyStore.getEntry(alias, new KeyStore.PasswordProtection(KEYSTORE_PASS));
return entry.getSecretKey().getEncoded();
}
/**
* get secret entry for alias
*
* @param alias keystore secret alias
* @return secret string
*/
public static String getSecretString(String alias) throws GeneralSecurityException {
KeyStore.SecretKeyEntry entry = (KeyStore.SecretKeyEntry) keyStore.getEntry(alias, new KeyStore.PasswordProtection(KEYSTORE_PASS));
return new String(entry.getSecretKey().getEncoded());
}
/**
* set secret in keystore
*
* @param alias keystore secret alias
* @param secret keystore entry
*/
public static void setSecret(String alias, byte[] secret) throws KeyStoreException {
KeyStore.ProtectionParameter protectionParameter = new KeyStore.PasswordProtection(KEYSTORE_PASS);
SecretKeySpec secretKey = new SecretKeySpec(secret, 0, secret.length, "AES");
KeyStore.SecretKeyEntry secretKeyEntry = new KeyStore.SecretKeyEntry(secretKey);
keyStore.setEntry(alias, secretKeyEntry, protectionParameter);
}
/**
* set secret in keystore
*
* @param alias keystore secret alias
* @param secret keystore entry
*/
public static void setSecret(String alias, String secret) throws KeyStoreException {
setSecret(alias, secret.getBytes());
}
/**
* delete existing and create new keystore
*/
public static void resetKeyStore() throws IOException, GeneralSecurityException {
File file = new File(keyStoreFile);
if (file.exists()) {
FileUtils.forceDelete(file);
}
//create new keystore
initializeKeyStore();
}
/**
* create new keystore
*/
private static void initializeKeyStore() throws GeneralSecurityException, IOException {<FILL_FUNCTION_BODY>}
}
|
keyStore = KeyStore.getInstance("JCEKS");
//create keystore
keyStore.load(null, KEYSTORE_PASS);
//set encryption key
KeyGenerator keyGenerator = KeyGenerator.getInstance("AES");
keyGenerator.init(KEYLENGTH);
KeyStoreUtil.setSecret(KeyStoreUtil.ENCRYPTION_KEY_ALIAS, keyGenerator.generateKey().getEncoded());
//write keystore
FileOutputStream fos = new FileOutputStream(keyStoreFile);
keyStore.store(fos, KEYSTORE_PASS);
fos.close();
| 1,649 | 152 | 1,801 |
<no_super_class>
|
bastillion-io_Bastillion
|
Bastillion/src/main/java/io/bastillion/manage/util/OTPUtil.java
|
OTPUtil
|
verifyToken
|
class OTPUtil {
private static final Logger log = LoggerFactory.getLogger(OTPUtil.class);
//sizes to generate OTP secret
private static final int SECRET_SIZE = 10;
private static final int NUM_SCRATCH_CODES = 5;
private static final int SCRATCH_CODE_SIZE = 4;
//token window in near future or past
private static final int TOKEN_WINDOW = 3;
//interval for validation token change
private static final int CHANGE_INTERVAL = 30;
private OTPUtil() {
}
/**
* generates OPT secret
*
* @return String shared secret
*/
public static String generateSecret() {
byte[] buffer = new byte[(NUM_SCRATCH_CODES * SCRATCH_CODE_SIZE) + SECRET_SIZE];
new SecureRandom().nextBytes(buffer);
byte[] secret = Arrays.copyOf(buffer, SECRET_SIZE);
return new String(new Base32().encode(secret));
}
/**
* verifies code for OTP secret
*
* @param secret shared secret
* @param token verification token
* @return true if success
*/
public static boolean verifyToken(String secret, long token) {
//check token in near future or past
int window = TOKEN_WINDOW;
for (int i = window; i >= -window; i--) {
long time = (new Date().getTime() / TimeUnit.SECONDS.toMillis(CHANGE_INTERVAL)) + i;
if (verifyToken(secret, token, time)) {
return true;
}
}
return false;
}
/**
* verifies code for OTP secret per time interval
*
* @param secret shared secret
* @param token verification token
* @param time time representation to calculate OTP
* @return true if success
*/
private static boolean verifyToken(String secret, long token, long time) {<FILL_FUNCTION_BODY>}
}
|
long calculated = -1;
byte[] key = new Base32().decode(secret);
SecretKeySpec secretKey = new SecretKeySpec(key, "HmacSHA1");
try {
Mac mac = Mac.getInstance("HmacSHA1");
mac.init(secretKey);
byte[] hash = mac.doFinal(ByteBuffer.allocate(8).putLong(time).array());
int offset = hash[hash.length - 1] & 0xF;
for (int i = 0; i < 4; ++i) {
calculated <<= 8;
calculated |= (hash[offset + i] & 0xFF);
}
calculated &= 0x7FFFFFFF;
calculated %= 1000000;
} catch (Exception ex) {
log.error(ex.toString(), ex);
}
return (calculated != -1 && calculated == token);
| 540 | 240 | 780 |
<no_super_class>
|
bastillion-io_Bastillion
|
Bastillion/src/main/java/io/bastillion/manage/util/RefreshAuthKeyUtil.java
|
RefreshAllSystemsTask
|
run
|
class RefreshAllSystemsTask implements Runnable {
private static final Logger log = LoggerFactory.getLogger(RefreshAllSystemsTask.class);
@Override
public void run() {<FILL_FUNCTION_BODY>}
}
|
//distribute all public keys
try {
SSHUtil.distributePubKeysToAllSystems();
} catch (SQLException | GeneralSecurityException ex) {
log.error(ex.toString(), ex);
}
| 65 | 59 | 124 |
<no_super_class>
|
bastillion-io_Bastillion
|
Bastillion/src/main/java/io/bastillion/manage/util/SessionOutputSerializer.java
|
SessionOutputSerializer
|
serialize
|
class SessionOutputSerializer implements JsonSerializer<Object> {
@Override
public JsonElement serialize(Object src, Type typeOfSrc, JsonSerializationContext context) {<FILL_FUNCTION_BODY>}
}
|
JsonObject object = new JsonObject();
if (typeOfSrc.equals(AuditWrapper.class)) {
AuditWrapper auditWrapper = (AuditWrapper) src;
object.addProperty("user_id", auditWrapper.getUser().getId());
object.addProperty("username", auditWrapper.getUser().getUsername());
object.addProperty("user_type", auditWrapper.getUser().getUserType());
object.addProperty("first_nm", auditWrapper.getUser().getFirstNm());
object.addProperty("last_nm", auditWrapper.getUser().getLastNm());
object.addProperty("email", auditWrapper.getUser().getEmail());
object.addProperty("session_id", auditWrapper.getSessionOutput().getSessionId());
object.addProperty("instance_id", auditWrapper.getSessionOutput().getInstanceId());
object.addProperty("host_id", auditWrapper.getSessionOutput().getId());
object.addProperty("host", auditWrapper.getSessionOutput().getDisplayLabel());
object.addProperty("output", auditWrapper.getSessionOutput().getOutput().toString());
object.addProperty("timestamp", new Date().getTime());
}
return object;
| 54 | 296 | 350 |
<no_super_class>
|
bastillion-io_Bastillion
|
Bastillion/src/main/java/io/bastillion/manage/util/SessionOutputUtil.java
|
SessionOutputUtil
|
getOutput
|
class SessionOutputUtil {
private static final Map<Long, UserSessionsOutput> userSessionsOutputMap = new ConcurrentHashMap<>();
public final static boolean enableInternalAudit = "true".equals(AppConfig.getProperty("enableInternalAudit"));
private static final Gson gson = new GsonBuilder().registerTypeAdapter(AuditWrapper.class, new SessionOutputSerializer()).create();
private static final Logger systemAuditLogger = LoggerFactory.getLogger("io.bastillion.manage.util.SystemAudit");
private SessionOutputUtil() {
}
/**
* removes session for user session
*
* @param sessionId session id
*/
public static void removeUserSession(Long sessionId) {
UserSessionsOutput userSessionsOutput = userSessionsOutputMap.get(sessionId);
if (userSessionsOutput != null) {
userSessionsOutput.getSessionOutputMap().clear();
}
userSessionsOutputMap.remove(sessionId);
}
/**
* removes session output for host system
*
* @param sessionId session id
* @param instanceId id of host system instance
*/
public static void removeOutput(Long sessionId, Integer instanceId) {
UserSessionsOutput userSessionsOutput = userSessionsOutputMap.get(sessionId);
if (userSessionsOutput != null) {
userSessionsOutput.getSessionOutputMap().remove(instanceId);
}
}
/**
* adds a new output
*
* @param sessionOutput session output object
*/
public static void addOutput(SessionOutput sessionOutput) {
UserSessionsOutput userSessionsOutput = userSessionsOutputMap.get(sessionOutput.getSessionId());
if (userSessionsOutput == null) {
userSessionsOutputMap.put(sessionOutput.getSessionId(), new UserSessionsOutput());
userSessionsOutput = userSessionsOutputMap.get(sessionOutput.getSessionId());
}
userSessionsOutput.getSessionOutputMap().put(sessionOutput.getInstanceId(), sessionOutput);
}
/**
* adds a new output
*
* @param sessionId session id
* @param instanceId id of host system instance
* @param value Array that is the source of characters
* @param offset The initial offset
* @param count The length
*/
public static void addToOutput(Long sessionId, Integer instanceId, char[] value, int offset, int count) {
UserSessionsOutput userSessionsOutput = userSessionsOutputMap.get(sessionId);
if (userSessionsOutput != null) {
userSessionsOutput.getSessionOutputMap().get(instanceId).getOutput().append(value, offset, count);
}
}
/**
* returns list of output lines
*
* @param sessionId session id object
* @param user user auth object
* @return session output list
*/
public static List<SessionOutput> getOutput(Connection con, Long sessionId, User user) throws SQLException {<FILL_FUNCTION_BODY>}
}
|
List<SessionOutput> outputList = new ArrayList<>();
UserSessionsOutput userSessionsOutput = userSessionsOutputMap.get(sessionId);
if (userSessionsOutput != null) {
for (Integer key : userSessionsOutput.getSessionOutputMap().keySet()) {
//get output chars and set to output
SessionOutput sessionOutput = userSessionsOutput.getSessionOutputMap().get(key);
if (sessionOutput != null && sessionOutput.getOutput() != null
&& StringUtils.isNotEmpty(sessionOutput.getOutput())) {
outputList.add(sessionOutput);
//send to audit logger
systemAuditLogger.info(gson.toJson(new AuditWrapper(user, sessionOutput)));
if (enableInternalAudit) {
SessionAuditDB.insertTerminalLog(con, sessionOutput);
}
userSessionsOutput.getSessionOutputMap().put(key, new SessionOutput(sessionId, sessionOutput));
}
}
}
return outputList;
| 787 | 267 | 1,054 |
<no_super_class>
|
OpenHFT_Chronicle-Map
|
Chronicle-Map/src/main/java/net/openhft/chronicle/hash/impl/ChronicleHashCloseOnExitHook.java
|
ChronicleHashCloseOnExitHook
|
closeAll
|
class ChronicleHashCloseOnExitHook {
private static WeakHashMap<VanillaChronicleHash<?, ?, ?, ?>.Identity, Long> maps = new WeakHashMap<>();
private static long order = 0;
static {
PriorityHook.add(80, ChronicleHashCloseOnExitHook::closeAll);
}
private ChronicleHashCloseOnExitHook() {
}
static synchronized void add(VanillaChronicleHash<?, ?, ?, ?> hash) {
if (maps == null)
throw new IllegalStateException("Shutdown in progress");
maps.put(hash.identity, order++);
}
static synchronized void remove(VanillaChronicleHash<?, ?, ?, ?> hash) {
if (maps == null)
return; // we are already in shutdown
maps.remove(hash.identity);
}
private static void closeAll() {<FILL_FUNCTION_BODY>}
}
|
try {
WeakHashMap<VanillaChronicleHash<?, ?, ?, ?>.Identity, Long> maps;
synchronized (ChronicleHashCloseOnExitHook.class) {
maps = ChronicleHashCloseOnExitHook.maps;
ChronicleHashCloseOnExitHook.maps = null;
}
TreeMap<Long, VanillaChronicleHash<?, ?, ?, ?>> orderedMaps = new TreeMap<>();
maps.forEach((identity, order) -> orderedMaps.put(order, identity.hash()));
// close later added maps first
orderedMaps.descendingMap().values().forEach(h -> {
try {
Runnable preShutdownAction = h.getPreShutdownAction();
if (preShutdownAction != null) {
try {
preShutdownAction.run();
} catch (Throwable throwable) {
try {
Jvm.error().on(ChronicleHashCloseOnExitHook.class,
"Error running pre-shutdown action for " + h.toIdentityString() +
" :", throwable);
} catch (Throwable t2) {
throwable.addSuppressed(t2);
throwable.printStackTrace();
}
}
}
h.close();
} catch (Throwable throwable) {
try {
Jvm.error().on(ChronicleHashCloseOnExitHook.class,
"Error while closing " + h.toIdentityString() +
" during shutdown hook:", throwable);
} catch (Throwable t2) {
// This may occur if the log service has already been shut down. Try to fall
// back to printStackTrace().
throwable.addSuppressed(t2);
throwable.printStackTrace();
}
}
});
} catch (Throwable throwable) {
try {
Jvm.error().on(ChronicleHashCloseOnExitHook.class,
"Error while closing maps during shutdown hook:", throwable);
} catch (Throwable t2) {
throwable.addSuppressed(t2);
throwable.printStackTrace();
}
}
| 250 | 541 | 791 |
<no_super_class>
|
OpenHFT_Chronicle-Map
|
Chronicle-Map/src/main/java/net/openhft/chronicle/hash/impl/InMemoryChronicleHashResources.java
|
InMemoryChronicleHashResources
|
releaseMemoryResource
|
class InMemoryChronicleHashResources extends ChronicleHashResources {
@Override
void releaseMemoryResource(final MemoryResource allocation) {<FILL_FUNCTION_BODY>}
}
|
assert SKIP_ASSERTIONS || assertAddress(allocation.address);
assert SKIP_ASSERTIONS || assertPosition(allocation.size);
OS.memory().freeMemory(allocation.address, allocation.size);
| 46 | 57 | 103 |
<methods>public non-sealed void <init>() ,public final boolean releaseManually() ,public void run() ,public final void setChronicleHashIdentityString(java.lang.String) <variables>private static final int COMPLETELY_CLOSED,private static final int OPEN,private static final int PARTIALLY_CLOSED,private java.lang.String chronicleHashIdentityString,private List<java.io.Closeable> closeables,private List<WeakReference<net.openhft.chronicle.hash.impl.ContextHolder>> contexts,private List<net.openhft.chronicle.hash.impl.MemoryResource> memoryResources,private volatile int state
|
OpenHFT_Chronicle-Map
|
Chronicle-Map/src/main/java/net/openhft/chronicle/hash/impl/IntCompactOffHeapLinearHashTable.java
|
IntCompactOffHeapLinearHashTable
|
writeEntryVolatile
|
class IntCompactOffHeapLinearHashTable extends CompactOffHeapLinearHashTable {
private static final long SCALE = 4L;
/**
* Must not store {@code h} in a field, to avoid memory leaks.
*
* @see net.openhft.chronicle.hash.impl.stage.hash.Chaining#initMap
*/
IntCompactOffHeapLinearHashTable(VanillaChronicleHash h) {
super(h);
}
@Override
long indexToPos(long index) {
return index * SCALE;
}
@Override
public long step(long pos) {
return (pos + SCALE) & capacityMask2;
}
@Override
public long stepBack(long pos) {
return (pos - SCALE) & capacityMask2;
}
@Override
public long readEntry(final long address,
final long pos) {
assert SKIP_ASSERTIONS || assertAddress(address);
assert SKIP_ASSERTIONS || assertPosition(pos);
return OS.memory().readInt(address + pos);
}
@Override
public long readEntryVolatile(final long address,
final long pos) {
assert SKIP_ASSERTIONS || assertAddress(address);
assert SKIP_ASSERTIONS || assertPosition(pos);
return OS.memory().readVolatileInt(address + pos);
}
@Override
public void writeEntryVolatile(final long address,
final long pos,
final long key,
final long value) {<FILL_FUNCTION_BODY>}
@Override
public void writeEntry(long address, long pos, long newEntry) {
assert SKIP_ASSERTIONS || assertAddress(address);
assert SKIP_ASSERTIONS || assertPosition(pos);
OS.memory().writeInt(address + pos, (int) newEntry);
}
@Override
public void clearEntry(long address, long pos) {
assert SKIP_ASSERTIONS || assertAddress(address);
assert SKIP_ASSERTIONS || assertPosition(pos);
OS.memory().writeInt(address + pos, 0);
}
}
|
assert SKIP_ASSERTIONS || assertAddress(address);
assert SKIP_ASSERTIONS || assertPosition(pos);
OS.memory().writeVolatileInt(address + pos, (int) entry(key, value));
| 557 | 57 | 614 |
<methods>public static long capacityFor(long) ,public void checkValueForPut(long) ,public abstract void clearEntry(long, long) ,public boolean empty(long) ,public long entry(long, long) ,public static int entrySize(int, int) ,public long hlPos(long) ,public long key(long) ,public static int keyBits(long, int) ,public static long mask(int) ,public long maskUnsetKey(long) ,public abstract long readEntry(long, long) ,public abstract long readEntryVolatile(long, long) ,public long remove(long, long) ,public abstract long step(long) ,public abstract long stepBack(long) ,public long value(long) ,public static int valueBits(long) ,public abstract void writeEntry(long, long, long) ,public abstract void writeEntryVolatile(long, long, long, long) <variables>public static final double MAX_LOAD_FACTOR,public static final int MAX_TIER_CHUNKS,public static final int MAX_TIER_ENTRIES,public static final double MAX_UPPER_BOUND_LOAD_FACTOR,public static final long UNSET_ENTRY,public static final long UNSET_KEY,private final non-sealed long capacityMask,final non-sealed long capacityMask2,private final non-sealed int keyBits,private final non-sealed long keyMask,private final non-sealed long valueMask
|
OpenHFT_Chronicle-Map
|
Chronicle-Map/src/main/java/net/openhft/chronicle/hash/impl/LongCompactOffHeapLinearHashTable.java
|
LongCompactOffHeapLinearHashTable
|
clearEntry
|
class LongCompactOffHeapLinearHashTable extends CompactOffHeapLinearHashTable {
private static final long SCALE = 8L;
/**
* Must not store {@code h} in a field, to avoid memory leaks.
*
* @see net.openhft.chronicle.hash.impl.stage.hash.Chaining#initMap
*/
LongCompactOffHeapLinearHashTable(VanillaChronicleHash h) {
super(h);
}
@Override
long indexToPos(long index) {
return index * SCALE;
}
@Override
public long step(long pos) {
return (pos + SCALE) & capacityMask2;
}
@Override
public long stepBack(long pos) {
return (pos - SCALE) & capacityMask2;
}
@Override
public long readEntry(final long address,
final long pos) {
assert SKIP_ASSERTIONS || assertAddress(address);
assert SKIP_ASSERTIONS || assertPosition(pos);
return OS.memory().readLong(address + pos);
}
@Override
public long readEntryVolatile(final long address,
final long pos) {
assert SKIP_ASSERTIONS || assertAddress(address);
assert SKIP_ASSERTIONS || assertPosition(pos);
return OS.memory().readVolatileLong(address + pos);
}
@Override
public void writeEntryVolatile(final long address,
final long pos,
final long key,
final long value) {
assert SKIP_ASSERTIONS || assertAddress(address);
assert SKIP_ASSERTIONS || assertPosition(pos);
OS.memory().writeVolatileLong(address + pos, entry(key, value));
}
@Override
public void writeEntry(final long address,
final long pos,
final long newEntry) {
assert SKIP_ASSERTIONS || assertAddress(address);
assert SKIP_ASSERTIONS || assertPosition(pos);
OS.memory().writeLong(address + pos, newEntry);
}
@Override
public void clearEntry(final long address,
final long pos) {<FILL_FUNCTION_BODY>}
}
|
assert SKIP_ASSERTIONS || assertAddress(address);
assert SKIP_ASSERTIONS || assertPosition(pos);
OS.memory().writeLong(address + pos, 0L);
| 570 | 50 | 620 |
<methods>public static long capacityFor(long) ,public void checkValueForPut(long) ,public abstract void clearEntry(long, long) ,public boolean empty(long) ,public long entry(long, long) ,public static int entrySize(int, int) ,public long hlPos(long) ,public long key(long) ,public static int keyBits(long, int) ,public static long mask(int) ,public long maskUnsetKey(long) ,public abstract long readEntry(long, long) ,public abstract long readEntryVolatile(long, long) ,public long remove(long, long) ,public abstract long step(long) ,public abstract long stepBack(long) ,public long value(long) ,public static int valueBits(long) ,public abstract void writeEntry(long, long, long) ,public abstract void writeEntryVolatile(long, long, long, long) <variables>public static final double MAX_LOAD_FACTOR,public static final int MAX_TIER_CHUNKS,public static final int MAX_TIER_ENTRIES,public static final double MAX_UPPER_BOUND_LOAD_FACTOR,public static final long UNSET_ENTRY,public static final long UNSET_KEY,private final non-sealed long capacityMask,final non-sealed long capacityMask2,private final non-sealed int keyBits,private final non-sealed long keyMask,private final non-sealed long valueMask
|
OpenHFT_Chronicle-Map
|
Chronicle-Map/src/main/java/net/openhft/chronicle/hash/impl/PersistedChronicleHashResources.java
|
PersistedChronicleHashResources
|
releaseExtraSystemResources
|
class PersistedChronicleHashResources extends ChronicleHashResources {
private File file;
public PersistedChronicleHashResources(File file) {
this.file = file;
OS.memory().storeFence(); // Emulate final semantics of the file field
}
@Override
void releaseMemoryResource(MemoryResource mapping) throws IOException {
OS.unmap(mapping.address, mapping.size);
}
@Override
Throwable releaseExtraSystemResources() {<FILL_FUNCTION_BODY>}
}
|
if (file == null)
return null;
Throwable thrown = null;
try {
CanonicalRandomAccessFiles.release(file);
file = null;
} catch (Throwable t) {
thrown = t;
}
return thrown;
| 136 | 71 | 207 |
<methods>public non-sealed void <init>() ,public final boolean releaseManually() ,public void run() ,public final void setChronicleHashIdentityString(java.lang.String) <variables>private static final int COMPLETELY_CLOSED,private static final int OPEN,private static final int PARTIALLY_CLOSED,private java.lang.String chronicleHashIdentityString,private List<java.io.Closeable> closeables,private List<WeakReference<net.openhft.chronicle.hash.impl.ContextHolder>> contexts,private List<net.openhft.chronicle.hash.impl.MemoryResource> memoryResources,private volatile int state
|
OpenHFT_Chronicle-Map
|
Chronicle-Map/src/main/java/net/openhft/chronicle/hash/impl/stage/data/bytes/EntryKeyBytesData.java
|
EntryKeyBytesData
|
innerGetUsing
|
class EntryKeyBytesData<K> extends AbstractData<K> {
@StageRef
VanillaChronicleHashHolder<K> hh;
@StageRef
KeyBytesInterop<K> ki;
@StageRef
SegmentStages s;
@StageRef
HashEntryStages<K> entry;
@StageRef
CheckOnEachPublicOperation checkOnEachPublicOperation;
@Stage("CachedEntryKey")
private K cachedEntryKey;
@Stage("CachedEntryKey")
private boolean cachedEntryKeyRead = false;
private void initCachedEntryKey() {
cachedEntryKey = innerGetUsing(cachedEntryKey);
cachedEntryKeyRead = true;
}
@Override
public RandomDataInput bytes() {
checkOnEachPublicOperation.checkOnEachPublicOperation();
return s.segmentBS;
}
@Override
public long offset() {
checkOnEachPublicOperation.checkOnEachPublicOperation();
return entry.keyOffset;
}
@Override
public long size() {
checkOnEachPublicOperation.checkOnEachPublicOperation();
return entry.keySize;
}
@Override
public long hash(LongHashFunction f) {
checkOnEachPublicOperation.checkOnEachPublicOperation();
return super.hash(f);
}
@Override
public K get() {
checkOnEachPublicOperation.checkOnEachPublicOperation();
return cachedEntryKey;
}
@Override
public K getUsing(K using) {
checkOnEachPublicOperation.checkOnEachPublicOperation();
return innerGetUsing(using);
}
private K innerGetUsing(K usingKey) {<FILL_FUNCTION_BODY>}
}
|
Bytes bytes = s.segmentBytesForRead();
bytes.readPosition(entry.keyOffset);
return ki.keyReader.read(bytes, size(), usingKey);
| 450 | 47 | 497 |
<methods>public boolean equals(java.lang.Object) ,public int hashCode() ,public java.lang.String toString() <variables>
|
OpenHFT_Chronicle-Map
|
Chronicle-Map/src/main/java/net/openhft/chronicle/hash/impl/stage/data/bytes/InputKeyBytesData.java
|
InputKeyBytesData
|
initInputKeyBytesStore
|
class InputKeyBytesData<K> extends AbstractData<K> {
@Stage("InputKeyBytes")
private final VanillaBytes inputKeyBytes = VanillaBytes.vanillaBytes();;
@StageRef
KeyBytesInterop<K> ki;
@StageRef
CheckOnEachPublicOperation checkOnEachPublicOperation;
@Stage("InputKeyBytesStore")
private BytesStore inputKeyBytesStore = null;
@Stage("InputKeyBytesStore")
private long inputKeyBytesOffset;
@Stage("InputKeyBytesStore")
private long inputKeyBytesSize;
@Stage("InputKeyBytes")
private boolean inputKeyBytesUsed = false;
@Stage("CachedInputKey")
private K cachedInputKey;
@Stage("CachedInputKey")
private boolean cachedInputKeyRead = false;
public void initInputKeyBytesStore(BytesStore bytesStore, long offset, long size) {<FILL_FUNCTION_BODY>}
boolean inputKeyBytesInit() {
return inputKeyBytesUsed;
}
void initInputKeyBytes() {
inputKeyBytes.bytesStore(inputKeyBytesStore, inputKeyBytesOffset, inputKeyBytesSize);
inputKeyBytesUsed = true;
}
void closeInputKeyBytes() {
inputKeyBytes.bytesStore(BytesStore.empty(), 0, 0);
inputKeyBytesUsed = false;
}
private void initCachedInputKey() {
cachedInputKey = innerGetUsing(cachedInputKey);
cachedInputKeyRead = true;
}
@Override
public RandomDataInput bytes() {
checkOnEachPublicOperation.checkOnEachPublicOperation();
return inputKeyBytes.bytesStore();
}
@Override
public long offset() {
checkOnEachPublicOperation.checkOnEachPublicOperation();
return inputKeyBytesOffset;
}
@Override
public long size() {
checkOnEachPublicOperation.checkOnEachPublicOperation();
return inputKeyBytesSize;
}
@Override
public K get() {
checkOnEachPublicOperation.checkOnEachPublicOperation();
return cachedInputKey;
}
@Override
public K getUsing(K using) {
checkOnEachPublicOperation.checkOnEachPublicOperation();
return innerGetUsing(using);
}
private K innerGetUsing(K usingKey) {
inputKeyBytes.readPosition(inputKeyBytesOffset);
return ki.keyReader.read(inputKeyBytes, inputKeyBytesSize, usingKey);
}
}
|
inputKeyBytesStore = bytesStore;
inputKeyBytesOffset = offset;
inputKeyBytesSize = size;
| 642 | 31 | 673 |
<methods>public boolean equals(java.lang.Object) ,public int hashCode() ,public java.lang.String toString() <variables>
|
OpenHFT_Chronicle-Map
|
Chronicle-Map/src/main/java/net/openhft/chronicle/hash/impl/stage/entry/AllocatedChunks.java
|
AllocatedChunks
|
initEntryAndKeyCopying
|
class AllocatedChunks {
@StageRef
public VanillaChronicleHashHolder<?> hh;
@StageRef
public SegmentStages s;
@StageRef
public HashEntryStages<?> entry;
@StageRef
public Alloc alloc;
public int allocatedChunks = 0;
public void initAllocatedChunks(int allocatedChunks) {
this.allocatedChunks = allocatedChunks;
}
/**
* @return {@code true} is tier has changed
*/
public boolean initEntryAndKeyCopying(
long entrySize, long bytesToCopy, long prevPos, int prevChunks) {<FILL_FUNCTION_BODY>}
}
|
initAllocatedChunks(hh.h().inChunks(entrySize));
long oldSegmentTierBaseAddr = s.tierBaseAddr;
long oldKeySizeAddr = oldSegmentTierBaseAddr + entry.keySizeOffset;
long oldKeyAddr = oldSegmentTierBaseAddr + entry.keyOffset;
int tierBeforeAllocation = s.tier;
long pos = alloc.alloc(allocatedChunks, prevPos, prevChunks);
entry.copyExistingEntry(pos, bytesToCopy, oldKeyAddr, oldKeySizeAddr);
return s.tier != tierBeforeAllocation;
| 188 | 153 | 341 |
<no_super_class>
|
OpenHFT_Chronicle-Map
|
Chronicle-Map/src/main/java/net/openhft/chronicle/hash/impl/stage/entry/HashEntryChecksumStrategy.java
|
HashEntryChecksumStrategy
|
computeChecksum
|
class HashEntryChecksumStrategy implements ChecksumStrategy {
@StageRef
SegmentStages s;
@StageRef
HashEntryStages<?> e;
@StageRef
KeyHashCode h;
@Override
public void computeAndStoreChecksum() {
int checksum = computeChecksum();
s.segmentBS.writeInt(e.entryEnd(), checksum);
}
@Override
public int computeChecksum() {<FILL_FUNCTION_BODY>}
@Override
public boolean innerCheckSum() {
int oldChecksum = storedChecksum();
int checksum = computeChecksum();
return oldChecksum == checksum;
}
@Override
public int storedChecksum() {
return s.segmentBS.readInt(e.entryEnd());
}
@Override
public long extraEntryBytes() {
return CHECKSUM_STORED_BYTES;
}
}
|
long keyHashCode = h.keyHashCode();
long keyEnd = e.keyEnd();
long len = e.entryEnd() - keyEnd;
long checksum;
if (len > 0) {
long addr = s.tierBaseAddr + keyEnd;
long payloadChecksum = LongHashFunction.xx_r39().hashMemory(addr, len);
checksum = hash8To16Bytes(e.keySize, keyHashCode, payloadChecksum);
} else {
// non replicated ChronicleSet has no payload
checksum = keyHashCode;
}
return (int) ((checksum >>> 32) ^ checksum);
| 257 | 173 | 430 |
<no_super_class>
|
OpenHFT_Chronicle-Map
|
Chronicle-Map/src/main/java/net/openhft/chronicle/hash/impl/stage/entry/HashEntryStages.java
|
HashEntryStages
|
checkSum
|
class HashEntryStages<K> implements HashEntry<K>, ChecksumEntry {
@StageRef
public VanillaChronicleHashHolder<?> hh;
@StageRef
public SegmentStages s;
@StageRef
public CheckOnEachPublicOperation checkOnEachPublicOperation;
@StageRef
public HashLookupPos hlp;
public long pos = -1;
@Stage("EntryOffset")
public long keySizeOffset = -1;
public long keySize = -1;
public long keyOffset = -1;
public boolean delayedUpdateChecksum = false;
@StageRef
public EntryKeyBytesData<K> entryKey;
@Stage("EntrySizeInChunks")
public int entrySizeInChunks = 0;
@StageRef
HashEntryChecksumStrategy hashEntryChecksumStrategy;
public final ChecksumStrategy checksumStrategy = hh.h().checksumEntries ?
hashEntryChecksumStrategy : NoChecksumStrategy.INSTANCE;
public void initPos(long pos) {
this.pos = pos;
}
public abstract void closePos();
public abstract boolean entryOffsetInit();
public void initEntryOffset(long keySizeOffset) {
this.keySizeOffset = keySizeOffset;
}
public void initEntryOffset() {
keySizeOffset = s.entrySpaceOffset + pos * hh.h().chunkSize;
}
public abstract void closeEntryOffset();
public void initKeySize(long keySize) {
this.keySize = keySize;
}
public abstract void closeKeySize();
public void initKeyOffset(long keyOffset) {
this.keyOffset = keyOffset;
}
public abstract void closeKeyOffset();
public void readExistingEntry(long pos) {
initPos(pos);
Bytes segmentBytes = s.segmentBytesForRead();
segmentBytes.readPosition(keySizeOffset);
initKeySize(hh.h().keySizeMarshaller.readSize(segmentBytes));
initKeyOffset(segmentBytes.readPosition());
}
public void readFoundEntry(long pos, long keySizeOffset, long keySize, long keyOffset) {
initPos(pos);
initEntryOffset(keySizeOffset);
initKeySize(keySize);
initKeyOffset(keyOffset);
}
public void closeEntry() {
closePos();
closeEntryOffset();
closeKeySize();
closeKeyOffset();
}
public void writeNewEntry(long pos, Data<?> key) {
initPos(pos);
initKeySize(key.size());
Bytes segmentBytes = s.segmentBytesForWrite();
segmentBytes.writePosition(keySizeOffset);
hh.h().keySizeMarshaller.writeSize(segmentBytes, keySize);
initKeyOffset(segmentBytes.writePosition());
key.writeTo(s.segmentBS, keyOffset);
}
public void copyExistingEntry(
long newPos, long bytesToCopy, long oldKeyAddr, long oldKeySizeAddr) {
initPos(newPos);
initKeyOffset(keySizeOffset + (oldKeyAddr - oldKeySizeAddr));
// Calling Access.copy() which is probably slower because not of abstractions,
// because there is no BytesStore.write(off, addr, len) method. Alternative is
// to make a final BytesStore rawMemoryStore = new PointerBytesStore().set(0, Long.MAX_V)
// and here: s.segmentBS.write(keySizeOffset, rawMemoryStore, keySizeAddr, bytesToCopy)
Access.copy(
nativeAccess(), null, oldKeySizeAddr,
checkedBytesStoreAccess(), s.segmentBS, keySizeOffset,
bytesToCopy);
}
public long keyEnd() {
return keyOffset + keySize;
}
public long entryEnd() {
return keyEnd();
}
public void initDelayedUpdateChecksum(boolean delayedUpdateChecksum) {
// makes delayedUpdateChecksum dependent on keySizeOffset and Locks stages, to trigger
// delayedUpdateChecksum close on these stages' close
assert entryOffsetInit() && keySizeOffset >= 0;
assert s.locksInit() && s.localLockState != LocalLockState.UNLOCKED;
assert delayedUpdateChecksum; // doesn't make sense to init to "uninit" false value
this.delayedUpdateChecksum = true;
}
abstract boolean delayedUpdateChecksumInit();
public void closeDelayedUpdateChecksum() {
if (hh.h().checksumEntries)
hashEntryChecksumStrategy.computeAndStoreChecksum();
delayedUpdateChecksum = false;
}
@Override
public void updateChecksum() {
checkOnEachPublicOperation.checkOnEachPublicOperation();
if (!hh.h().checksumEntries) {
throw new UnsupportedOperationException(hh.h().toIdentityString() +
": Checksum is not stored in this Chronicle Hash");
}
s.innerUpdateLock.lock();
initDelayedUpdateChecksum(true);
}
@Override
public boolean checkSum() {<FILL_FUNCTION_BODY>}
long entrySize() {
return checksumStrategy.extraEntryBytes() + entryEnd() - keySizeOffset;
}
@NotNull
@Override
public Data<K> key() {
checkOnEachPublicOperation.checkOnEachPublicOperation();
return entryKey;
}
void initEntrySizeInChunks() {
entrySizeInChunks = hh.h().inChunks(entrySize());
}
public void initEntrySizeInChunks(int actuallyUsedChunks) {
entrySizeInChunks = actuallyUsedChunks;
}
public void innerRemoveEntryExceptHashLookupUpdate() {
s.free(pos, entrySizeInChunks);
s.incrementModCount();
}
}
|
checkOnEachPublicOperation.checkOnEachPublicOperation();
if (!hh.h().checksumEntries) {
throw new UnsupportedOperationException(hh.h().toIdentityString() +
": Checksum is not stored in this Chronicle Hash");
}
// This is needed, because a concurrent update lock holder might perform an entry update,
// but not yet written a checksum (because checksum write is delayed to the update unlock).
// So checkSum() on read lock level might result to false negative results.
s.innerUpdateLock.lock();
return delayedUpdateChecksumInit() || checksumStrategy.innerCheckSum();
| 1,527 | 158 | 1,685 |
<no_super_class>
|
OpenHFT_Chronicle-Map
|
Chronicle-Map/src/main/java/net/openhft/chronicle/hash/impl/stage/entry/HashLookupPos.java
|
HashLookupPos
|
initHashLookupPos
|
class HashLookupPos {
public long hashLookupPos = -1;
@StageRef
HashLookupSearch hls;
@StageRef
SegmentStages s;
public abstract boolean hashLookupPosInit();
public void initHashLookupPos() {<FILL_FUNCTION_BODY>}
public void initHashLookupPos(long hashLookupPos) {
this.hashLookupPos = hashLookupPos;
}
@Stage("HashLookupPos")
public void setHashLookupPos(long hashLookupPos) {
this.hashLookupPos = hashLookupPos;
}
public abstract void closeHashLookupPos();
}
|
// Validation + make hashLookupPos a dependant of tier. This is needed, because after
// tier change should re-perform hashLookupSearch, starting from the searchStartPos.
// Not an assert statement, because segmentTier stage should be initialized regardless
// assertions enabled or not.
if (s.tier < 0)
throw new AssertionError();
s.innerReadLock.lock();
this.hashLookupPos = hls.searchStartPos;
| 180 | 121 | 301 |
<no_super_class>
|
OpenHFT_Chronicle-Map
|
Chronicle-Map/src/main/java/net/openhft/chronicle/hash/impl/stage/entry/HashLookupSearch.java
|
HashLookupSearch
|
nextPos
|
class HashLookupSearch {
@StageRef
public VanillaChronicleHashHolder<?> hh;
@Stage("SearchKey")
public long searchStartPos;
@StageRef
SegmentStages s;
@StageRef
HashLookupPos hlp;
@StageRef
KeySearch<?> ks;
@StageRef
MapEntryStages<?, ?> e;
@Stage("SearchKey")
long searchKey = UNSET_KEY;
public CompactOffHeapLinearHashTable hl() {
return hh.h().hashLookup;
}
public void initSearchKey(long searchKey) {
this.searchKey = searchKey;
searchStartPos = hl().hlPos(searchKey);
}
private long addr() {
return s.tierBaseAddr;
}
public long nextPos() {<FILL_FUNCTION_BODY>}
public void found() {
hlp.setHashLookupPos(hl().stepBack(hlp.hashLookupPos));
}
public void remove() {
hlp.setHashLookupPos(hl().remove(addr(), hlp.hashLookupPos));
}
public void putNewVolatile(long entryPos) {
// Correctness check + make putNewVolatile() dependant on keySearch, this, in turn,
// is needed for hlp.hashLookupPos re-initialization after nextTier().
// Not an assert statement, because ks.searchStatePresent() should run regardless assertions
// enabled or not.
boolean keySearchReInit = !ks.keySearchInit();
if (ks.searchStatePresent())
throw new AssertionError();
if (keySearchReInit) {
// if key search was re-init, entry was re-init too during the search
e.readExistingEntry(entryPos);
}
hl().checkValueForPut(entryPos);
hl().writeEntryVolatile(addr(), hlp.hashLookupPos, searchKey, entryPos);
}
public boolean checkSlotContainsExpectedKeyAndValue(long value) {
// volatile read not needed here because this method is for verifying within-thread
// invariants
long entry = hl().readEntry(addr(), hlp.hashLookupPos);
return hl().key(entry) == searchKey && hl().value(entry) == value;
}
}
|
long pos = hlp.hashLookupPos;
CompactOffHeapLinearHashTable hl = hl();
while (true) {
// read volatile to make a happens-before edge between entry insertion from concurrent
// thread under update lock and this thread (reading the entry)
long entry = hl.readEntryVolatile(addr(), pos);
if (hl.empty(entry)) {
hlp.setHashLookupPos(pos);
return -1L;
}
pos = hl.step(pos);
if (pos == searchStartPos)
break;
if (hl.key(entry) == searchKey) {
hlp.setHashLookupPos(pos);
return hl.value(entry);
}
}
throw new IllegalStateException(hh.h().toIdentityString() +
": HashLookup overflow should never occur");
| 618 | 222 | 840 |
<no_super_class>
|
OpenHFT_Chronicle-Map
|
Chronicle-Map/src/main/java/net/openhft/chronicle/hash/impl/stage/entry/ReadLock.java
|
ReadLock
|
unlock
|
class ReadLock implements InterProcessLock {
@StageRef
CheckOnEachPublicOperation checkOnEachPublicOperation;
@StageRef
SegmentStages s;
@StageRef
HashEntryStages entry;
@StageRef
HashLookupPos hlp;
@Override
public boolean isHeldByCurrentThread() {
checkOnEachPublicOperation.checkOnEachLockOperation();
return s.localLockState.read;
}
@Override
public void lock() {
checkOnEachPublicOperation.checkOnEachLockOperation();
if (s.localLockState == UNLOCKED) {
if (s.readZero() && s.updateZero() && s.writeZero()) {
try {
s.segmentHeader.readLock(s.segmentHeaderAddress);
} catch (InterProcessDeadLockException e) {
throw s.debugContextsAndLocks(e);
}
}
s.incrementRead();
s.setLocalLockState(READ_LOCKED);
}
}
@Override
public void lockInterruptibly() throws InterruptedException {
checkOnEachPublicOperation.checkOnEachLockOperation();
if (Thread.interrupted())
throw new InterruptedException();
if (s.localLockState == UNLOCKED) {
if (s.readZero() && s.updateZero() && s.writeZero()) {
try {
s.segmentHeader.readLockInterruptibly(s.segmentHeaderAddress);
} catch (InterProcessDeadLockException e) {
throw s.debugContextsAndLocks(e);
}
}
s.incrementRead();
s.setLocalLockState(READ_LOCKED);
}
}
@Override
public boolean tryLock() {
checkOnEachPublicOperation.checkOnEachLockOperation();
if (s.localLockState == UNLOCKED) {
if (!s.readZero() || !s.updateZero() || !s.writeZero() ||
s.segmentHeader.tryReadLock(s.segmentHeaderAddress)) {
s.incrementRead();
s.setLocalLockState(READ_LOCKED);
return true;
} else {
return false;
}
} else {
return true;
}
}
@Override
public boolean tryLock(long time, @NotNull TimeUnit unit) throws InterruptedException {
checkOnEachPublicOperation.checkOnEachLockOperation();
if (Thread.interrupted())
throw new InterruptedException();
if (s.localLockState == UNLOCKED) {
if (!s.readZero() || !s.updateZero() || !s.writeZero() ||
s.segmentHeader.tryReadLock(s.segmentHeaderAddress, time, unit)) {
s.incrementRead();
s.setLocalLockState(READ_LOCKED);
return true;
} else {
return false;
}
} else {
return true;
}
}
@Override
public void unlock() {<FILL_FUNCTION_BODY>}
@Override
public boolean isHeld() {
return s.localLockState != null &&
s.localLockState != UNLOCKED;
}
}
|
checkOnEachPublicOperation.checkOnEachLockOperation();
if (s.localLockState != UNLOCKED) {
// TODO what should close here?
hlp.closeHashLookupPos();
entry.closeEntry();
}
s.readUnlockAndDecrementCount();
s.setLocalLockState(UNLOCKED);
| 825 | 89 | 914 |
<no_super_class>
|
OpenHFT_Chronicle-Map
|
Chronicle-Map/src/main/java/net/openhft/chronicle/hash/impl/stage/entry/UpdateLock.java
|
UpdateLock
|
unlock
|
class UpdateLock implements InterProcessLock {
@StageRef
VanillaChronicleHashHolder<?> hh;
@StageRef
CheckOnEachPublicOperation checkOnEachPublicOperation;
@StageRef
SegmentStages s;
@StageRef
HashEntryStages<?> entry;
@Override
public boolean isHeldByCurrentThread() {
checkOnEachPublicOperation.checkOnEachLockOperation();
return s.localLockState.update;
}
@Override
public void lock() {
checkOnEachPublicOperation.checkOnEachLockOperation();
switch (s.localLockState) {
case UNLOCKED:
s.checkIterationContextNotLockedInThisThread();
if (s.updateZero() && s.writeZero()) {
if (!s.readZero())
throw forbiddenUpdateLockWhenOuterContextReadLocked();
try {
s.segmentHeader.updateLock(s.segmentHeaderAddress);
} catch (InterProcessDeadLockException e) {
throw s.debugContextsAndLocks(e);
}
}
s.incrementUpdate();
s.setLocalLockState(UPDATE_LOCKED);
return;
case READ_LOCKED:
throw forbiddenUpgrade();
case UPDATE_LOCKED:
case WRITE_LOCKED:
// do nothing
}
}
/**
* Non-static because after compilation it becomes inner class which forbids static methods
*/
@NotNull
private IllegalMonitorStateException forbiddenUpgrade() {
return new IllegalMonitorStateException(
hh.h().toIdentityString() + ": Cannot upgrade from read to update lock");
}
/**
* Non-static because after compilation it becomes inner class which forbids static methods
*/
@NotNull
private IllegalStateException forbiddenUpdateLockWhenOuterContextReadLocked() {
return new IllegalStateException(hh.h().toIdentityString() +
": Cannot acquire update lock, because outer context holds read lock. " +
"In this case you should acquire update lock in the outer context up front");
}
@Override
public void lockInterruptibly() throws InterruptedException {
checkOnEachPublicOperation.checkOnEachLockOperation();
if (Thread.interrupted())
throw new InterruptedException();
switch (s.localLockState) {
case UNLOCKED:
s.checkIterationContextNotLockedInThisThread();
if (s.updateZero() && s.writeZero()) {
if (!s.readZero())
throw forbiddenUpdateLockWhenOuterContextReadLocked();
try {
s.segmentHeader.updateLockInterruptibly(s.segmentHeaderAddress);
} catch (InterProcessDeadLockException e) {
throw s.debugContextsAndLocks(e);
}
}
s.incrementUpdate();
s.setLocalLockState(UPDATE_LOCKED);
return;
case READ_LOCKED:
throw forbiddenUpgrade();
case UPDATE_LOCKED:
case WRITE_LOCKED:
// do nothing
}
}
@Override
public boolean tryLock() {
checkOnEachPublicOperation.checkOnEachLockOperation();
switch (s.localLockState) {
case UNLOCKED:
s.checkIterationContextNotLockedInThisThread();
if (s.updateZero() && s.writeZero()) {
if (!s.readZero())
throw forbiddenUpdateLockWhenOuterContextReadLocked();
if (s.segmentHeader.tryUpdateLock(s.segmentHeaderAddress)) {
s.incrementUpdate();
s.setLocalLockState(UPDATE_LOCKED);
return true;
} else {
return false;
}
} else {
s.incrementUpdate();
s.setLocalLockState(UPDATE_LOCKED);
return true;
}
case READ_LOCKED:
throw forbiddenUpgrade();
case UPDATE_LOCKED:
case WRITE_LOCKED:
return true;
default:
throw new IllegalStateException(hh.h().toIdentityString() +
": unexpected localLockState=" + s.localLockState);
}
}
@Override
public boolean tryLock(long time, @NotNull TimeUnit unit) throws InterruptedException {
checkOnEachPublicOperation.checkOnEachLockOperation();
if (Thread.interrupted())
throw new InterruptedException();
switch (s.localLockState) {
case UNLOCKED:
s.checkIterationContextNotLockedInThisThread();
if (s.updateZero() && s.writeZero()) {
if (!s.readZero())
throw forbiddenUpdateLockWhenOuterContextReadLocked();
if (s.segmentHeader.tryUpdateLock(s.segmentHeaderAddress, time, unit)) {
s.incrementUpdate();
s.setLocalLockState(UPDATE_LOCKED);
return true;
} else {
return false;
}
} else {
s.incrementUpdate();
s.setLocalLockState(UPDATE_LOCKED);
return true;
}
case READ_LOCKED:
throw forbiddenUpgrade();
case UPDATE_LOCKED:
case WRITE_LOCKED:
return true;
default:
throw new IllegalStateException(hh.h().toIdentityString() +
": unexpected localLockState=" + s.localLockState);
}
}
@Override
public void unlock() {<FILL_FUNCTION_BODY>}
@Override
public boolean isHeld() {
return s.localLockState != null &&
s.localLockState != UNLOCKED;
}
}
|
checkOnEachPublicOperation.checkOnEachLockOperation();
switch (s.localLockState) {
case UNLOCKED:
case READ_LOCKED:
return;
case UPDATE_LOCKED:
entry.closeDelayedUpdateChecksum();
if (s.decrementUpdate() == 0 && s.writeZero()) {
s.segmentHeader.downgradeUpdateToReadLock(s.segmentHeaderAddress);
}
break;
case WRITE_LOCKED:
entry.closeDelayedUpdateChecksum();
if (s.decrementWrite() == 0) {
if (!s.updateZero()) {
s.segmentHeader.downgradeWriteToUpdateLock(s.segmentHeaderAddress);
} else {
s.segmentHeader.downgradeWriteToReadLock(s.segmentHeaderAddress);
}
}
}
s.incrementRead();
s.setLocalLockState(READ_LOCKED);
| 1,445 | 242 | 1,687 |
<no_super_class>
|
OpenHFT_Chronicle-Map
|
Chronicle-Map/src/main/java/net/openhft/chronicle/hash/impl/stage/hash/Chaining.java
|
Chaining
|
initUsed
|
class Chaining extends ChainingInterface {
public final List<ChainingInterface> contextChain;
public final int indexInContextChain;
/**
* First context, ever created in this thread. rootContextInThisThread === contextChain.get(0).
*/
public final ChainingInterface rootContextInThisThread;
@Stage("Used")
public boolean used;
@Stage("Used")
private boolean firstContextLockedInThisThread;
public Chaining(VanillaChronicleMap map) {
contextChain = new ArrayList<>();
contextChain.add(this);
indexInContextChain = 0;
rootContextInThisThread = this;
initMap(map);
}
public Chaining(ChainingInterface rootContextInThisThread, VanillaChronicleMap map) {
contextChain = rootContextInThisThread.getContextChain();
indexInContextChain = contextChain.size();
contextChain.add(this);
this.rootContextInThisThread = rootContextInThisThread;
initMap(map);
}
private static <T extends ChainingInterface> T initUsedAndReturn(
VanillaChronicleMap map, ChainingInterface context) {
try {
context.initUsed(true, map);
//noinspection unchecked
return (T) context;
} catch (Throwable throwable) {
try {
((AutoCloseable) context).close();
} catch (Throwable t) {
throwable.addSuppressed(t);
}
throw throwable;
}
}
@Override
public List<ChainingInterface> getContextChain() {
return contextChain;
}
public <T> T contextAtIndexInChain(int index) {
//noinspection unchecked
return (T) contextChain.get(index);
}
/**
* This method stores a reference to the context's owner ChronicleMap into a field of the
* context in the beginning of each usage of the context. Previously, this field was final and
* set only once during context creation. It was preventing ChronicleMap objects from becoming
* dead and collected by the GC, while any thread, from which the ChronicleMap was accessed
* (hence a thread local context created), is alive.
* <p>
* The chain of strong references:
* 1) Thread ->
* 2) ThreadLocalMap ->
* 3) Entry with ThreadLocal {@link net.openhft.chronicle.map.VanillaChronicleMap#cxt} as weak
* referent and a context (e. g. {@link net.openhft.chronicle.map.impl.CompiledMapQueryContext})
* as value (a simple field, not a weak reference!) ->
* 4) final reference to the owner {@link VanillaChronicleMap} ->
* 5) ThreadLocal {@link net.openhft.chronicle.map.VanillaChronicleMap#cxt} (a strong reference
* this time! note that this ThreadLocal is an instance field of VanillaChronicleMap)
* <p>
* So in order to break this chain at step 4), contexts store references to their owner
* ChronicleMaps only when contexts are used.
*/
public abstract void initMap(VanillaChronicleMap map);
@Override
public boolean usedInit() {
return used;
}
/**
* Init method parameterized to avoid automatic initialization. {@code used} argument should
* always be {@code true}.
*/
@Override
public void initUsed(boolean used, VanillaChronicleMap map) {<FILL_FUNCTION_BODY>}
@SuppressWarnings("unused")
void closeUsed() {
used = false;
if (firstContextLockedInThisThread)
rootContextInThisThread.unlockContextLocally();
}
@Override
public <T extends ChainingInterface> T getContext(
Class<? extends T> contextClass, BiFunction<ChainingInterface,
VanillaChronicleMap, T> createChaining,
VanillaChronicleMap map) {
for (int i = 0; i < contextChain.size(); i++) {
ChainingInterface context = contextChain.get(i);
if (context.getClass() == contextClass && !context.usedInit()) {
return initUsedAndReturn(map, context);
}
}
int maxNestedContexts = 1 << 10;
if (contextChain.size() > maxNestedContexts) {
throw new IllegalStateException(map.toIdentityString() +
": More than " + maxNestedContexts + " nested ChronicleHash contexts\n" +
"are not supported. Very probable that you simply forgot to close context\n" +
"somewhere (recommended to use try-with-resources statement).\n" +
"Otherwise this is a bug, please report with this\n" +
"stack trace on https://github.com/OpenHFT/Chronicle-Map/issues");
}
//noinspection unchecked
T context = createChaining.apply(this, map);
return initUsedAndReturn(map, context);
}
}
|
assert used;
firstContextLockedInThisThread = rootContextInThisThread.lockContextLocally(map);
initMap(map);
this.used = true;
| 1,304 | 46 | 1,350 |
<methods>public non-sealed void <init>() ,public abstract T getContext(Class<? extends T>, BiFunction<net.openhft.chronicle.hash.impl.stage.hash.ChainingInterface,VanillaChronicleMap#RAW,T>, VanillaChronicleMap#RAW) ,public abstract List<net.openhft.chronicle.hash.impl.stage.hash.ChainingInterface> getContextChain() ,public abstract void initUsed(boolean, VanillaChronicleMap#RAW) ,public abstract boolean usedInit() <variables>
|
OpenHFT_Chronicle-Map
|
Chronicle-Map/src/main/java/net/openhft/chronicle/hash/impl/stage/hash/OwnerThreadHolder.java
|
OwnerThreadHolder
|
checkAccessingFromOwnerThread
|
class OwnerThreadHolder {
final Thread owner = Thread.currentThread();
@StageRef
VanillaChronicleHashHolder<?> hh;
public void checkAccessingFromOwnerThread() {<FILL_FUNCTION_BODY>}
}
|
if (owner != Thread.currentThread()) {
throw new ConcurrentModificationException(hh.h().toIdentityString() +
": Context shouldn't be accessed from multiple threads");
}
| 67 | 52 | 119 |
<no_super_class>
|
OpenHFT_Chronicle-Map
|
Chronicle-Map/src/main/java/net/openhft/chronicle/hash/impl/stage/hash/ThreadLocalState.java
|
ThreadLocalState
|
closeContext
|
class ThreadLocalState {
private static final Memory MEMORY = OS.memory();
private static final long CONTEXT_LOCK_OFFSET;
private static final int CONTEXT_UNLOCKED = 0;
private static final int CONTEXT_LOCKED_LOCALLY = 1;
private static final int CONTEXT_CLOSED = 2;
static {
try {
Field contextLockField =
ThreadLocalState.class.getDeclaredField("contextLock");
contextLockField.setAccessible(true);
CONTEXT_LOCK_OFFSET = MEMORY.getFieldOffset(contextLockField);
} catch (NoSuchFieldException e) {
throw new AssertionError(e);
}
}
public boolean iterationContextLockedInThisThread;
private volatile int contextLock = CONTEXT_UNLOCKED;
/**
* Returns {@code true} if this is the outer context lock in this thread, {@code false} if this
* is a nested context.
*/
public boolean lockContextLocally(ChronicleHash<?, ?, ?, ?> hash) {
// hash().isOpen() check guarantees no starvation of a thread calling chMap.close() and
// trying to close this context by closeContext() method below, while the thread owning this
// context frequently locks and unlocks it (e. g. in a loop). This is also the only check
// for chMap openness during the whole context usage lifecycle.
if (hash.isOpen() && MEMORY.compareAndSwapInt(this, CONTEXT_LOCK_OFFSET,
CONTEXT_UNLOCKED, CONTEXT_LOCKED_LOCALLY)) {
return true;
} else {
if (contextLock == CONTEXT_LOCKED_LOCALLY)
return false;
// Don't extract this hash().isOpen() and the one above, because they could return
// different results: the first (above) could return true, the second (below) - false.
if (contextLock == CONTEXT_CLOSED || !hash.isOpen())
throw new ChronicleHashClosedException(hash);
throw new AssertionError("Unknown context lock state: " + contextLock);
}
}
public void unlockContextLocally() {
// Ensure all reads from mapped memory are done before thread calling chronicleMap.close()
// frees resources potentially unmapping some memory from where those reads are performed.
MEMORY.loadFence();
// Avoid volatile write to avoid expensive store-load barrier
MEMORY.writeOrderedInt(this, CONTEXT_LOCK_OFFSET, CONTEXT_UNLOCKED);
}
public void closeContext(String chronicleHashIdentityString) {<FILL_FUNCTION_BODY>}
private boolean tryCloseContext() {
return MEMORY.compareAndSwapInt(this, CONTEXT_LOCK_OFFSET,
CONTEXT_UNLOCKED, CONTEXT_CLOSED);
}
public abstract Thread owner();
/**
* Returns if this ThreadLocalState prevents a Map from being closed (which may be an asynchronous snapshot operation).
* <p>
* This method returns a snapshot value from a potentially volatile source that may change at any time.
*
* @return if prevents closing
*/
public boolean preventClose() {
return isLocked();
}
/**
* Returns if this ThreadLocalState is locked (asynchronous snapshot operation).
* <p>
* This method returns a snapshot value from a volatile source that may change at any time.
*
* @return if locked
*/
protected boolean isLocked() {
return contextLock == CONTEXT_LOCKED_LOCALLY;
}
}
|
if (tryCloseContext())
return;
// Unless there are bugs in this codebase, it could happen that
// contextLock == CONTEXT_CLOSED here only if closeContext() has succeed, and the subsequent
// contextHolder.clear() has failed in ChronicleHashResources.closeContext(), though this is
// hardly imaginable: contextHolder.clear() couldn't fail with OutOfMemoryError (because
// there are no allocations in this method) and StackOverflowError (because in this case
// closeContext() would fail with StackOverflowError before). But anyway it's probably
// a good idea to make this check rather than not to make.
if (contextLock == CONTEXT_CLOSED)
return;
// If first attempt of closing a context (i. e. moving from unused to closed state) failed,
// it means that the context is still in use. If this context belongs to the current thread,
// this is a bug, because we cannot "wait" until context is unused in the same thread:
if (owner() == Thread.currentThread()) {
throw new IllegalStateException(chronicleHashIdentityString +
": Attempt to close a Chronicle Hash in the context " +
"of not yet finished query or iteration");
}
// If the context belongs to a different thread, wait until that thread finishes it's work
// with the context:
// Double the current timeout for segment locks "without timeout", that effectively
// specifies maximum lock (hence context) holding time
long timeoutMillis = TimeUnit.SECONDS.toMillis(LOCK_TIMEOUT_SECONDS) * 2;
long lastTime = System.currentTimeMillis();
do {
if (tryCloseContext())
return;
// Unless there are bugs in this codebase, this should never happen. But anyway it's
// probably a good idea to make this check rather than not to make.
if (contextLock == CONTEXT_CLOSED)
return;
Thread.yield();
long now = System.currentTimeMillis();
if (now != lastTime) {
lastTime = now;
timeoutMillis--;
}
} while (timeoutMillis >= 0);
throw new RuntimeException(chronicleHashIdentityString +
": Failed to close a context, belonging to the thread\n" +
owner() + ", in the state: " + owner().getState() + "\n" +
"Possible reasons:\n" +
"- The context owner thread exited before closing this context. Ensure that you\n" +
"always close opened Chronicle Map's contexts, the best way to do this is to use\n" +
"try-with-resources blocks." +
"- The context owner thread runs some context operation (e. g. a query) for\n" +
"unexpectedly long time (at least " + LOCK_TIMEOUT_SECONDS + " seconds).\n" +
"You should either redesign your logic to spend less time in Chronicle Map\n" +
"contexts (recommended) or synchronize map.close() with queries externally,\n" +
"so that close() is called only after all query operations finished.\n" +
"- Iteration over a large Chronicle Map takes more than " + LOCK_TIMEOUT_SECONDS +
" seconds.\n" +
"In this case you should synchronize map.close() with iterations over the map\n" +
"externally, so that close() is called only after all iterations are finished.\n" +
"- This is a dead lock involving the context owner thread and this thread (from\n" +
"which map.close() method is called. Make sure you always close Chronicle Map\n" +
"contexts, preferably using try-with-resources blocks.");
| 912 | 923 | 1,835 |
<no_super_class>
|
OpenHFT_Chronicle-Map
|
Chronicle-Map/src/main/java/net/openhft/chronicle/hash/impl/stage/iter/HashSegmentIteration.java
|
HashSegmentIteration
|
forEachTierEntryWhile
|
class HashSegmentIteration<K, E extends HashEntry<K>>
implements HashEntry<K>, HashSegmentContext<K, E> {
@StageRef
public IterationSegmentStages s;
@StageRef
public CheckOnEachPublicOperation checkOnEachPublicOperation;
public boolean entryRemovedOnThisIteration = false;
public long hashLookupEntry = 0;
@StageRef
protected HashLookupPos hlp;
@StageRef
HashEntryStages<K> e;
@StageRef
VanillaChronicleHashHolder<?> hh;
public boolean shouldTestEntry() {
return true;
}
public Object entryForIteration() {
return this;
}
public long tierEntriesForIteration() {
throwExceptionIfClosed();
return s.tierEntries();
}
abstract boolean entryRemovedOnThisIterationInit();
protected void initEntryRemovedOnThisIteration(boolean entryRemovedOnThisIteration) {
this.entryRemovedOnThisIteration = entryRemovedOnThisIteration;
}
public abstract boolean hashLookupEntryInit();
public void initHashLookupEntry(long entry) {
throwExceptionIfClosed();
hashLookupEntry = entry;
}
abstract void closeHashLookupEntry();
@Override
public boolean forEachSegmentEntryWhile(Predicate<? super E> predicate) {
throwExceptionIfClosed();
checkOnEachPublicOperation.checkOnEachPublicOperation();
s.innerUpdateLock.lock();
return innerForEachSegmentEntryWhile(predicate);
}
public <T> boolean innerForEachSegmentEntryWhile(Predicate<? super T> predicate) {
try {
s.goToLastTier();
while (true) {
int currentTier = s.tier;
long currentTierBaseAddr = s.tierBaseAddr;
long currentTierIndex = s.tierIndex;
boolean interrupted = forEachTierEntryWhile(
predicate, currentTier, currentTierBaseAddr, currentTierIndex);
if (interrupted)
return false;
if (currentTier == 0)
return true;
s.prevTier();
}
} finally {
closeHashLookupEntry();
s.innerReadLock.unlock();
initEntryRemovedOnThisIteration(false);
}
}
public <T> boolean forEachTierEntryWhile(
Predicate<? super T> predicate,
int currentTier, long currentTierBaseAddr, long tierIndex) {<FILL_FUNCTION_BODY>}
public void hookAfterEachIteration() {
throwExceptionIfClosed();
}
@Override
public void forEachSegmentEntry(Consumer<? super E> action) {
throwExceptionIfClosed();
forEachSegmentEntryWhile(e -> {
action.accept(e);
return true;
});
}
public void checkEntryNotRemovedOnThisIteration() {
throwExceptionIfClosed();
if (entryRemovedOnThisIterationInit()) {
throw new IllegalStateException(
hh.h().toIdentityString() + ": Entry was already removed on this iteration");
}
}
@Override
public void doRemove() {
throwExceptionIfClosed();
checkOnEachPublicOperation.checkOnEachPublicOperation();
s.innerWriteLock.lock();
try {
iterationRemove();
} finally {
s.innerWriteLock.unlock();
}
initEntryRemovedOnThisIteration(true);
}
public void iterationRemove() {
throwExceptionIfClosed();
// this condition mean -- some other entry taken place of the removed one
if (hh.h().hashLookup.remove(s.tierBaseAddr, hlp.hashLookupPos) != hlp.hashLookupPos) {
// if so, should make step back, to compensate step forward on the next iteration,
// to consume the shifted entry
hlp.setHashLookupPos(hh.h().hashLookup.stepBack(hlp.hashLookupPos));
}
e.innerRemoveEntryExceptHashLookupUpdate();
}
}
|
long leftEntries = tierEntriesForIteration();
boolean interrupted = false;
long startPos = 0L;
CompactOffHeapLinearHashTable hashLookup = hh.h().hashLookup;
// volatile read not needed because iteration is performed at least under update lock
while (!hashLookup.empty(hashLookup.readEntry(currentTierBaseAddr, startPos))) {
startPos = hashLookup.step(startPos);
}
hlp.initHashLookupPos(startPos);
long currentHashLookupPos;
int steps = 0;
do {
// Step from hlp.hashLookupPos, not currentHashLookupPos (with additional initialization
// of this local variable to startPos outside the loop), because if e.remove() is
// called in the `predicate`, hlp.hashLookupPos is stepped back in doRemove(), and
// currentHashLookupPos become invalid
currentHashLookupPos = hashLookup.step(hlp.hashLookupPos);
steps++;
hlp.setHashLookupPos(currentHashLookupPos);
// volatile read not needed because iteration is performed at least under update lock
long entry = hashLookup.readEntry(currentTierBaseAddr, currentHashLookupPos);
initHashLookupEntry(entry);
if (!hashLookup.empty(entry)) {
e.readExistingEntry(hashLookup.value(entry));
if (shouldTestEntry()) {
initEntryRemovedOnThisIteration(false);
try {
if (!predicate.test((T) entryForIteration())) {
interrupted = true;
break;
} else {
if (--leftEntries == 0)
break;
}
} finally {
hookAfterEachIteration();
// if doReplaceValue() -> relocation() -> alloc() -> nextTier()
// was called, restore the tier we were iterating over
if (s.tier != currentTier) {
s.initSegmentTier_WithBaseAddr(
currentTier, currentTierBaseAddr, tierIndex);
// To cover shift deleted slot, at the next step forward.
// hash lookup entry is relocated to the next chained tier, and the
// slot in _current_ tier's hash lookup is shift deleted, see
// relocation()
currentHashLookupPos = hashLookup.stepBack(currentHashLookupPos);
steps--;
hlp.initHashLookupPos(currentHashLookupPos);
}
s.innerWriteLock.unlock();
// force entry checksum update (delayedUpdateChecksum depends on keyOffset)
e.closeKeyOffset();
}
}
}
// the `steps == 0` condition and this variable updates in the loop fix the bug, when
// shift deletion occurs on the first entry of the tier, and the currentHashLookupPos
// becomes equal to start pos without making the whole loop, but only visiting a single
// entry
} while (currentHashLookupPos != startPos || steps == 0);
if (!interrupted && leftEntries > 0) {
throw new IllegalStateException(hh.h().toIdentityString() +
": We went through a tier without interruption, " +
"but according to tier counters there should be " + leftEntries +
" more entries. Size diverged?");
}
return interrupted;
| 1,080 | 851 | 1,931 |
<no_super_class>
|
OpenHFT_Chronicle-Map
|
Chronicle-Map/src/main/java/net/openhft/chronicle/hash/impl/stage/iter/IterationAlloc.java
|
IterationAlloc
|
alloc
|
class IterationAlloc implements Alloc {
@StageRef
public SegmentStages s;
/**
* Move only to next tiers, to avoid double visiting of relocated entries during iteration
*/
@Override
public long alloc(int chunks, long prevPos, int prevChunks) {<FILL_FUNCTION_BODY>}
}
|
long ret = s.allocReturnCode(chunks);
if (prevPos >= 0)
s.free(prevPos, prevChunks);
if (ret >= 0)
return ret;
while (true) {
s.nextTier();
ret = s.allocReturnCode(chunks);
if (ret >= 0)
return ret;
}
| 91 | 96 | 187 |
<no_super_class>
|
OpenHFT_Chronicle-Map
|
Chronicle-Map/src/main/java/net/openhft/chronicle/hash/impl/stage/iter/IterationKeyHashCode.java
|
IterationKeyHashCode
|
initKeyHash
|
class IterationKeyHashCode implements KeyHashCode {
@StageRef
VanillaChronicleHashHolder<?> hh;
@StageRef
SegmentStages s;
@StageRef
HashEntryStages<?> e;
long keyHash = 0;
void initKeyHash() {<FILL_FUNCTION_BODY>}
@Override
public long keyHashCode() {
return keyHash;
}
}
|
long addr = s.tierBaseAddr + e.keyOffset;
long len = e.keySize;
keyHash = LongHashFunction.xx_r39().hashMemory(addr, len);
| 120 | 52 | 172 |
<no_super_class>
|
OpenHFT_Chronicle-Map
|
Chronicle-Map/src/main/java/net/openhft/chronicle/hash/impl/stage/iter/IterationSegmentStages.java
|
IterationSegmentStages
|
checkNestedContextsQueryDifferentKeys
|
class IterationSegmentStages extends SegmentStages {
@StageRef
VanillaChronicleHashHolder<?> hh;
@StageRef
HashSegmentIteration it;
@StageRef
HashLookupSearch hls;
/**
* During iteration, nextTier() is called in doReplaceValue() -> relocation() -> alloc().
* When the entry is relocated to the next tier, an entry should be inserted into hash
* lookup. To insert an entry into hashLookup, should 1) locate empty slot, see {@link
* KeySearch#initKeySearch()}, and 2) know the part of the hash code to insert, we know
* it during iteration
*/
@Override
public void nextTier() {
super.nextTier();
if (it.hashLookupEntryInit())
hls.initSearchKey(hh.h().hashLookup.key(it.hashLookupEntry));
}
public void initSegmentTier_WithBaseAddr(int tier, long tierBaseAddr, long tierIndex) {
this.tier = tier;
this.tierIndex = tierIndex;
this.tierBaseAddr = tierBaseAddr;
}
@Override
public void checkNestedContextsQueryDifferentKeys(
LocksInterface innermostContextOnThisSegment) {<FILL_FUNCTION_BODY>}
}
|
// this check is relevant only for query contexts
| 356 | 14 | 370 |
<methods>public non-sealed void <init>() ,public long allocReturnCode(int) ,public int changeAndGetLatestSameThreadSegmentModCount(int) ,public int changeAndGetTotalReadLockCount(int) ,public int changeAndGetTotalUpdateLockCount(int) ,public int changeAndGetTotalWriteLockCount(int) ,public void checkIterationContextNotLockedInThisThread() ,public void checkNestedContextsQueryDifferentKeys(net.openhft.chronicle.hash.impl.stage.entry.LocksInterface) ,public java.lang.RuntimeException debugContextsAndLocks(net.openhft.chronicle.hash.locks.InterProcessDeadLockException) ,public java.lang.String debugLocksState() ,public int decrementRead() ,public int decrementUpdate() ,public int decrementWrite() ,public void free(long, int) ,public void freeExtra(long, int, int) ,public void goToFirstTier() ,public void goToLastTier() ,public boolean hasNextTier() ,public void incrementModCount() ,public void incrementRead() ,public void incrementUpdate() ,public void incrementWrite() ,public void initSegmentIndex(int) ,public void initSegmentTier() ,public void initSegmentTier(int, long) ,public void initSegmentTier(int, long, long) ,public abstract boolean locksInit() ,public long lowestPossiblyFreeChunk() ,public void lowestPossiblyFreeChunk(long) ,public void nextTier() ,public long nextTierIndex() ,public void nextTierIndex(long) ,public void prevTier() ,public long prevTierIndex() ,public void prevTierIndex(long) ,public net.openhft.chronicle.hash.locks.InterProcessLock readLock() ,public void readUnlockAndDecrementCount() ,public boolean readZero() ,public boolean realloc(long, int, int) ,public Bytes#RAW segmentBytesForRead() ,public Bytes#RAW segmentBytesForWrite() ,public abstract boolean segmentIndexInit() ,public abstract boolean segmentTierInit() ,public void setLocalLockState(net.openhft.chronicle.hash.impl.LocalLockState) ,public void setNestedContextsLockedOnSameSegment(boolean) ,public void setNextNode(net.openhft.chronicle.hash.impl.stage.entry.LocksInterface) ,public long size() ,public long tierCountersAreaAddr() ,public long tierDeleted() ,public void tierDeleted(long) ,public long tierEntries() ,public void tierEntries(long) ,public net.openhft.chronicle.hash.locks.InterProcessLock updateLock() ,public boolean updateZero() ,public void verifyTierCountersAreaData() ,public net.openhft.chronicle.hash.locks.InterProcessLock writeLock() ,public boolean writeZero() <variables>net.openhft.chronicle.hash.impl.stage.hash.Chaining chaining,public net.openhft.chronicle.hash.impl.stage.hash.CheckOnEachPublicOperation checkOnEachPublicOperation,public int contextModCount,public long entrySpaceOffset,public final net.openhft.chronicle.algo.bitset.ReusableBitSet freeList,public VanillaChronicleHashHolder<?> hh,public net.openhft.chronicle.hash.impl.stage.entry.ReadLock innerReadLock,public net.openhft.chronicle.hash.impl.stage.entry.UpdateLock innerUpdateLock,public net.openhft.chronicle.hash.impl.stage.entry.WriteLock innerWriteLock,public int latestSameThreadSegmentModCount,net.openhft.chronicle.hash.impl.LocalLockState localLockState,public boolean nestedContextsLockedOnSameSegment,net.openhft.chronicle.hash.impl.stage.entry.LocksInterface nextNode,public net.openhft.chronicle.hash.impl.stage.entry.LocksInterface rootContextLockedOnThisSegment,public final net.openhft.chronicle.bytes.PointerBytesStore segmentBS,public final Bytes#RAW segmentBytes,public net.openhft.chronicle.hash.impl.SegmentHeader segmentHeader,public long segmentHeaderAddress,public int segmentIndex,public int tier,public long tierBaseAddr,public long tierIndex,int totalReadLockCount,int totalUpdateLockCount,int totalWriteLockCount
|
OpenHFT_Chronicle-Map
|
Chronicle-Map/src/main/java/net/openhft/chronicle/hash/impl/stage/iter/SegmentsRecovery.java
|
SegmentsRecovery
|
zeroOutFirstSegmentTierCountersArea
|
class SegmentsRecovery implements IterationContext {
@StageRef
VanillaChronicleHashHolder<?> hh;
@StageRef
SegmentStages s;
@StageRef
TierRecovery tierRecovery;
@Override
public void recoverSegments(
ChronicleHashCorruption.Listener corruptionListener,
ChronicleHashCorruptionImpl corruption) {
throwExceptionIfClosed();
VanillaChronicleHash<?, ?, ?, ?> h = hh.h();
for (int segmentIndex = 0; segmentIndex < h.actualSegments; segmentIndex++) {
s.initSegmentIndex(segmentIndex);
resetSegmentLock(corruptionListener, corruption);
zeroOutFirstSegmentTierCountersArea(corruptionListener, corruption);
tierRecovery.recoverTier(segmentIndex, corruptionListener, corruption);
}
VanillaGlobalMutableState globalMutableState = h.globalMutableState();
long storedExtraTiersInUse = globalMutableState.getExtraTiersInUse();
long allocatedExtraTiers = globalMutableState.getAllocatedExtraTierBulks() * h.tiersInBulk;
long expectedExtraTiersInUse =
Math.max(0, Math.min(storedExtraTiersInUse, allocatedExtraTiers));
long actualExtraTiersInUse = 0;
long firstFreeExtraTierIndex = -1;
for (long extraTierIndex = 0; extraTierIndex < expectedExtraTiersInUse; extraTierIndex++) {
long tierIndex = h.extraTierIndexToTierIndex(extraTierIndex);
// `tier` is unused in recoverTier(), 0 should be a safe value
s.initSegmentTier(0, tierIndex);
int segmentIndex = tierRecovery.recoverTier(
-1, corruptionListener, corruption);
if (segmentIndex >= 0) {
long tierCountersAreaAddr = s.tierCountersAreaAddr();
int storedSegmentIndex = TierCountersArea.segmentIndex(tierCountersAreaAddr);
if (storedSegmentIndex != segmentIndex) {
report(corruptionListener, corruption, segmentIndex, () ->
format("wrong segment index stored in tier counters area " +
"of tier with index {}: {}, should be, based on entries: {}",
tierIndex, storedSegmentIndex, segmentIndex)
);
TierCountersArea.segmentIndex(tierCountersAreaAddr, segmentIndex);
}
TierCountersArea.nextTierIndex(tierCountersAreaAddr, 0);
s.initSegmentIndex(segmentIndex);
s.goToLastTier();
s.nextTierIndex(tierIndex);
TierCountersArea.prevTierIndex(tierCountersAreaAddr, s.tierIndex);
TierCountersArea.tier(tierCountersAreaAddr, s.tier + 1);
actualExtraTiersInUse = extraTierIndex + 1;
} else {
firstFreeExtraTierIndex = extraTierIndex;
break;
}
}
if (storedExtraTiersInUse != actualExtraTiersInUse) {
long finalActualExtraTiersInUse = actualExtraTiersInUse;
report(corruptionListener, corruption, -1, () ->
format("wrong number of actual tiers in use in global mutable state, stored: {}, " +
"should be: " + storedExtraTiersInUse, finalActualExtraTiersInUse)
);
globalMutableState.setExtraTiersInUse(actualExtraTiersInUse);
}
long firstFreeTierIndex;
if (firstFreeExtraTierIndex == -1) {
if (allocatedExtraTiers > expectedExtraTiersInUse) {
firstFreeTierIndex = h.extraTierIndexToTierIndex(expectedExtraTiersInUse);
} else {
firstFreeTierIndex = 0;
}
} else {
firstFreeTierIndex = h.extraTierIndexToTierIndex(firstFreeExtraTierIndex);
}
if (firstFreeTierIndex > 0) {
long lastTierIndex = h.extraTierIndexToTierIndex(allocatedExtraTiers - 1);
h.linkAndZeroOutFreeTiers(firstFreeTierIndex, lastTierIndex);
}
long storedFirstFreeTierIndex = globalMutableState.getFirstFreeTierIndex();
if (storedFirstFreeTierIndex != firstFreeTierIndex) {
report(corruptionListener, corruption, -1, () ->
format("wrong first free tier index in global mutable state, stored: {}, " +
"should be: " + storedFirstFreeTierIndex, firstFreeTierIndex)
);
globalMutableState.setFirstFreeTierIndex(firstFreeTierIndex);
}
removeDuplicatesInSegments(corruptionListener, corruption);
}
private void removeDuplicatesInSegments(
ChronicleHashCorruption.Listener corruptionListener,
ChronicleHashCorruptionImpl corruption) {
VanillaChronicleHash<?, ?, ?, ?> h = hh.h();
for (int segmentIndex = 0; segmentIndex < h.actualSegments; segmentIndex++) {
s.initSegmentIndex(segmentIndex);
s.initSegmentTier();
s.goToLastTier();
while (true) {
tierRecovery.removeDuplicatesInSegment(corruptionListener, corruption);
if (s.tier > 0) {
s.prevTier();
} else {
break;
}
}
}
}
private void resetSegmentLock(
ChronicleHashCorruption.Listener corruptionListener,
ChronicleHashCorruptionImpl corruption) {
long lockState = s.segmentHeader.getLockState(s.segmentHeaderAddress);
if (lockState != s.segmentHeader.resetLockState()) {
report(corruptionListener, corruption, s.segmentIndex, () ->
format("lock of segment {} is not clear: {}",
s.segmentIndex, s.segmentHeader.lockStateToString(lockState))
);
s.segmentHeader.resetLock(s.segmentHeaderAddress);
}
}
private void zeroOutFirstSegmentTierCountersArea(
ChronicleHashCorruption.Listener corruptionListener,
ChronicleHashCorruptionImpl corruption) {<FILL_FUNCTION_BODY>}
}
|
s.nextTierIndex(0);
if (s.prevTierIndex() != 0) {
report(corruptionListener, corruption, s.segmentIndex, () ->
format("stored prev tier index in first tier of segment {}: {}, should be 0",
s.segmentIndex, s.prevTierIndex())
);
s.prevTierIndex(0);
}
long tierCountersAreaAddr = s.tierCountersAreaAddr();
if (TierCountersArea.segmentIndex(tierCountersAreaAddr) != 0) {
report(corruptionListener, corruption, s.segmentIndex, () ->
format("stored segment index in first tier of segment {}: {}, should be 0",
s.segmentIndex, TierCountersArea.segmentIndex(tierCountersAreaAddr))
);
TierCountersArea.segmentIndex(tierCountersAreaAddr, 0);
}
if (TierCountersArea.tier(tierCountersAreaAddr) != 0) {
report(corruptionListener, corruption, s.segmentIndex, () ->
format("stored tier in first tier of segment {}: {}, should be 0",
s.segmentIndex, TierCountersArea.tier(tierCountersAreaAddr))
);
TierCountersArea.tier(tierCountersAreaAddr, 0);
}
| 1,660 | 364 | 2,024 |
<no_super_class>
|
OpenHFT_Chronicle-Map
|
Chronicle-Map/src/main/java/net/openhft/chronicle/hash/impl/stage/query/HashQuery.java
|
HashQuery
|
tieredEntryPresent
|
class HashQuery<K> implements SetEntry<K> {
@StageRef
public VanillaChronicleHashHolder<K> hh;
final DataAccess<K> innerInputKeyDataAccess = hh.h().keyDataAccess.copy();
@StageRef
public SegmentStages s;
@StageRef
public HashEntryStages<K> entry;
@StageRef
public HashLookupSearch hashLookupSearch;
@StageRef
public CheckOnEachPublicOperation checkOnEachPublicOperation;
@StageRef
public HashLookupPos hlp;
@StageRef
public KeySearch<K> ks;
/**
* This stage exists for hooking {@link #innerInputKeyDataAccess} usage, to trigger {@link
* DataAccess#uninit()} on context exit
*/
@Stage("InputKeyDataAccess")
private boolean inputKeyDataAccessInitialized = false;
@Stage("PresenceOfEntry")
private EntryPresence entryPresence = null;
void initInputKeyDataAccess() {
inputKeyDataAccessInitialized = true;
}
void closeInputKeyDataAccess() {
innerInputKeyDataAccess.uninit();
inputKeyDataAccessInitialized = false;
}
public DataAccess<K> inputKeyDataAccess() {
initInputKeyDataAccess();
return innerInputKeyDataAccess;
}
public void dropSearchIfNestedContextsAndPresentHashLookupSlotCheckFailed() {
if (s.locksInit()) {
if (s.nestedContextsLockedOnSameSegment &&
s.rootContextLockedOnThisSegment.latestSameThreadSegmentModCount() !=
s.contextModCount) {
if (ks.keySearchInit() && ks.searchStatePresent() &&
!hashLookupSearch.checkSlotContainsExpectedKeyAndValue(entry.pos)) {
hlp.closeHashLookupPos();
}
}
}
}
public Data<K> queriedKey() {
checkOnEachPublicOperation.checkOnEachPublicOperation();
return ks.inputKey;
}
private void initPresenceOfEntry() {
if (ks.searchStatePresent() || tieredEntryPresent()) {
entryPresence = EntryPresence.PRESENT;
} else {
entryPresence = EntryPresence.ABSENT;
}
}
public void initPresenceOfEntry(EntryPresence entryPresence) {
this.entryPresence = entryPresence;
}
private boolean tieredEntryPresent() {<FILL_FUNCTION_BODY>}
public boolean entryPresent() {
return entryPresence == EntryPresence.PRESENT;
}
@Override
public void doRemove() {
checkOnEachPublicOperation.checkOnEachPublicOperation();
s.innerWriteLock.lock();
if (ks.searchStatePresent()) {
entry.innerRemoveEntryExceptHashLookupUpdate();
hashLookupSearch.remove();
ks.setSearchState(ABSENT);
initPresenceOfEntry(EntryPresence.ABSENT);
} else {
throw new IllegalStateException(
hh.h().toIdentityString() + ": Entry is absent when doRemove() is called");
}
}
public enum EntryPresence {PRESENT, ABSENT}
}
|
int firstTier = s.tier;
long firstTierBaseAddr = s.tierBaseAddr;
while (true) {
if (s.hasNextTier()) {
s.nextTier();
} else {
if (s.tier != 0)
s.initSegmentTier(); // loop to the root tier
}
if (s.tierBaseAddr == firstTierBaseAddr)
break;
if (ks.searchStatePresent())
return true;
}
// not found
if (firstTier != 0) {
// key is absent; probably are going to allocate a new entry;
// start trying from the root tier
s.initSegmentTier();
}
return false;
| 849 | 194 | 1,043 |
<no_super_class>
|
OpenHFT_Chronicle-Map
|
Chronicle-Map/src/main/java/net/openhft/chronicle/hash/impl/stage/query/KeySearch.java
|
KeySearch
|
initKeySearch
|
class KeySearch<K> {
@StageRef
public SegmentStages s;
@StageRef
public HashLookupSearch hashLookupSearch;
@StageRef
public HashEntryStages<K> entry;
public Data<K> inputKey = null;
@Stage("KeySearch")
protected SearchState searchState = null;
@StageRef
VanillaChronicleMapHolder<?, ?, ?> mh;
public abstract boolean inputKeyInit();
public void initInputKey(Data<K> inputKey) {
this.inputKey = inputKey;
}
public abstract boolean keySearchInit();
@Stage("KeySearch")
public void setSearchState(SearchState newSearchState) {
this.searchState = newSearchState;
}
public void initKeySearch() {<FILL_FUNCTION_BODY>}
boolean keyEquals(long keySize, long keyOffset) {
return inputKey.size() == keySize && inputKey.equivalent(s.segmentBS, keyOffset);
}
public boolean searchStatePresent() {
return searchState == PRESENT;
}
public boolean searchStateAbsent() {
return searchState == ABSENT;
}
public enum SearchState {
PRESENT,
ABSENT
}
}
|
for (long pos; (pos = hashLookupSearch.nextPos()) >= 0L; ) {
// otherwise we are inside iteration relocation.
// During iteration, key search occurs when doReplaceValue() exhausts space in
// the current segment, and insertion into the tiered segment requires to locate
// an empty slot in the hashLookup.
if (inputKeyInit()) {
long keySizeOffset = s.entrySpaceOffset + pos * mh.m().chunkSize;
Bytes segmentBytes = s.segmentBytesForRead();
segmentBytes.readPosition(keySizeOffset);
long keySize = mh.h().keySizeMarshaller.readSize(segmentBytes);
long keyOffset = segmentBytes.readPosition();
if (!keyEquals(keySize, keyOffset))
continue;
hashLookupSearch.found();
entry.readFoundEntry(pos, keySizeOffset, keySize, keyOffset);
searchState = PRESENT;
return;
}
}
searchState = SearchState.ABSENT;
| 343 | 259 | 602 |
<no_super_class>
|
OpenHFT_Chronicle-Map
|
Chronicle-Map/src/main/java/net/openhft/chronicle/hash/impl/stage/query/QueryAlloc.java
|
QueryAlloc
|
alloc
|
class QueryAlloc implements Alloc {
@StageRef
public SegmentStages s;
@Override
public long alloc(int chunks, long prevPos, int prevChunks) {<FILL_FUNCTION_BODY>}
}
|
long ret = s.allocReturnCode(chunks);
if (prevPos >= 0)
s.free(prevPos, prevChunks);
if (ret >= 0)
return ret;
int alreadyAttemptedTier = s.tier;
s.goToFirstTier();
while (true) {
if (s.tier != alreadyAttemptedTier) {
ret = s.allocReturnCode(chunks);
if (ret >= 0)
return ret;
}
s.nextTier();
}
| 63 | 142 | 205 |
<no_super_class>
|
OpenHFT_Chronicle-Map
|
Chronicle-Map/src/main/java/net/openhft/chronicle/hash/impl/stage/query/SearchAllocatedChunks.java
|
SearchAllocatedChunks
|
initEntryAndKey
|
class SearchAllocatedChunks extends AllocatedChunks {
@StageRef
KeySearch<?> ks;
/**
* @return {@code true} if tier has changed
*/
public boolean initEntryAndKey(long entrySize) {<FILL_FUNCTION_BODY>}
}
|
initAllocatedChunks(hh.h().inChunks(entrySize));
int tierBeforeAllocation = s.tier;
long pos = alloc.alloc(allocatedChunks, -1, 0);
entry.writeNewEntry(pos, ks.inputKey);
return s.tier != tierBeforeAllocation;
| 80 | 87 | 167 |
<methods>public non-sealed void <init>() ,public void initAllocatedChunks(int) ,public boolean initEntryAndKeyCopying(long, long, long, int) <variables>public net.openhft.chronicle.hash.impl.stage.entry.Alloc alloc,public int allocatedChunks,public HashEntryStages<?> entry,public VanillaChronicleHashHolder<?> hh,public net.openhft.chronicle.hash.impl.stage.entry.SegmentStages s
|
OpenHFT_Chronicle-Map
|
Chronicle-Map/src/main/java/net/openhft/chronicle/hash/impl/util/BuildVersion.java
|
BuildVersion
|
getVersionFromPom
|
class BuildVersion {
private static String version = null;
private BuildVersion() {
}
public static void main(String[] args) {
System.out.println(version());
}
/**
* @return version of ChronicleMap being used, or NULL if its not known
*/
public synchronized static String version() {
if (version != null) {
return version;
}
try {
// the best way to get the version is to read the map.version file
InputStream resource = BuildVersion.class.getClassLoader().getResourceAsStream("map" +
".version");
BufferedReader in = new BufferedReader(new InputStreamReader(resource, StandardCharsets.UTF_8));
version = in.readLine().trim();
if (!"${project.version}".equals(version()))
return version;
return version;
} catch (Exception e) {
// do nothing
}
// another way to get the version is to read it from the manifest
final String versionFromManifest = getVersionFromManifest();
if (versionFromManifest != null) {
version = versionFromManifest;
return version;
}
// as a fall back for development, we will read the version from the pom file
version = getVersionFromPom();
return version;
}
/**
* This should be used by everyone that has install chronicle map as a JAR
*
* @return gets the version out of the manifest, or null if it can not be read
*/
private static String getVersionFromManifest() {
return ChronicleMapBuilder.class.getPackage().getImplementationVersion();
}
/**
* reads the pom file to get this version, only to be used for development or within the IDE.
*
* @return gets the version from the pom.xml
*/
private static String getVersionFromPom() {<FILL_FUNCTION_BODY>}
}
|
final String absolutePath = new File(BuildVersion.class.getResource(BuildVersion.class
.getSimpleName() + ".class").getPath())
.getParentFile().getParentFile().getParentFile().getParentFile().getParentFile()
.getParentFile().getParentFile().getAbsolutePath();
final File file = new File(absolutePath + "/pom.xml");
try (InputStreamReader reader = new InputStreamReader(new FileInputStream(file), StandardCharsets.UTF_8)) {
final MavenXpp3Reader xpp3Reader = new MavenXpp3Reader();
Model model = xpp3Reader.read(reader);
return model.getVersion();
} catch (NoClassDefFoundError e) {
// if you want to get the version possibly in development add in to your pom
// pax-url-aether.jar
return null;
} catch (Exception e) {
return null;
}
| 500 | 240 | 740 |
<no_super_class>
|
OpenHFT_Chronicle-Map
|
Chronicle-Map/src/main/java/net/openhft/chronicle/hash/impl/util/CharSequences.java
|
CharSequences
|
equivalent
|
class CharSequences {
private CharSequences() {
}
public static boolean equivalent(@NotNull CharSequence a, @NotNull CharSequence b) {<FILL_FUNCTION_BODY>}
public static int hash(@NotNull CharSequence cs) {
if (cs instanceof String)
return cs.hashCode();
int h = 0;
for (int i = 0, len = cs.length(); i < len; i++) {
h = 31 * h + cs.charAt(i);
}
return h;
}
}
|
if (a.equals(b))
return true;
if (a instanceof String)
return ((String) a).contentEquals(b);
if (b instanceof String)
return ((String) b).contentEquals(a);
int len = a.length();
if (len != b.length())
return false;
for (int i = 0; i < len; i++) {
if (a.charAt(i) != b.charAt(i))
return false;
}
return true;
| 145 | 135 | 280 |
<no_super_class>
|
OpenHFT_Chronicle-Map
|
Chronicle-Map/src/main/java/net/openhft/chronicle/hash/impl/util/CleanerUtils.java
|
CleanerUtils
|
doClean
|
class CleanerUtils {
private static final Method CREATE_METHOD;
private static final Method CLEAN_METHOD;
static {
try {
Class<?> cleanerClass = Class.forName(Jvm.isJava9Plus() ? "jdk.internal.ref.Cleaner" : "sun.misc.Cleaner");
CREATE_METHOD = cleanerClass.getDeclaredMethod("create", Object.class, Runnable.class);
Jvm.setAccessible(CREATE_METHOD);
CLEAN_METHOD = cleanerClass.getDeclaredMethod("clean");
Jvm.setAccessible(CLEAN_METHOD);
} catch (ClassNotFoundException | NoSuchMethodException e) {
Jvm.error().on(CleanerUtils.class, "Unable to initialise CleanerUtils", e);
throw new RuntimeException(e);
}
}
public static Cleaner createCleaner(Object ob, Runnable thunk) {
try {
Object cleanerInstance = CREATE_METHOD.invoke(null, ob, thunk);
return () -> doClean(cleanerInstance);
} catch (IllegalAccessException | InvocationTargetException e) {
Jvm.error().on(CleanerUtils.class, "Unable to create cleaner", e);
throw new RuntimeException(e);
}
}
private static void doClean(Object cleanerInstance) {<FILL_FUNCTION_BODY>}
}
|
try {
CLEAN_METHOD.invoke(cleanerInstance);
} catch (IllegalAccessException | InvocationTargetException e) {
Jvm.warn().on(CleanerUtils.class, "Failed to clean buffer", e);
}
| 355 | 64 | 419 |
<no_super_class>
|
OpenHFT_Chronicle-Map
|
Chronicle-Map/src/main/java/net/openhft/chronicle/hash/impl/util/FileIOUtils.java
|
FileIOUtils
|
readFully
|
class FileIOUtils {
private FileIOUtils() {
}
public static void readFully(FileChannel fileChannel, long filePosition, ByteBuffer buffer)
throws IOException {<FILL_FUNCTION_BODY>}
public static void writeFully(FileChannel fileChannel, long filePosition, ByteBuffer buffer)
throws IOException {
int startBufferPosition = buffer.position();
while (buffer.remaining() > 0) {
fileChannel.write(buffer, filePosition + buffer.position() - startBufferPosition);
}
}
}
|
int startBufferPosition = buffer.position();
while (buffer.remaining() > 0 &&
buffer.position() < fileChannel.size()) {
int bytesRead = fileChannel.read(buffer,
filePosition + buffer.position() - startBufferPosition);
if (bytesRead == -1)
break;
}
| 139 | 83 | 222 |
<no_super_class>
|
OpenHFT_Chronicle-Map
|
Chronicle-Map/src/main/java/net/openhft/chronicle/hash/impl/util/Objects.java
|
Objects
|
builderEquals
|
class Objects {
private Objects() {
}
public static int hash(Object... values) {
return Arrays.hashCode(values);
}
public static boolean equal(@Nullable Object a, @Nullable Object b) {
return a != null ? a.equals(b) : b == null;
}
public static boolean builderEquals(@NotNull Object builder, @Nullable Object o) {<FILL_FUNCTION_BODY>}
public static void requireNonNull(Object obj) {
if (obj == null)
throw new NullPointerException();
}
}
|
return builder == o ||
o != null && builder.getClass() == o.getClass() &&
builder.toString().equals(o.toString());
| 148 | 41 | 189 |
<no_super_class>
|
OpenHFT_Chronicle-Map
|
Chronicle-Map/src/main/java/net/openhft/chronicle/hash/impl/util/Throwables.java
|
Throwables
|
returnOrSuppress
|
class Throwables {
private Throwables() {
}
public static RuntimeException propagate(Throwable t) {
// Avoid calling Objects.requireNonNull(), StackOverflowError-sensitive
if (t == null)
throw new NullPointerException();
if (t instanceof Error)
throw (Error) t;
if (t instanceof RuntimeException)
throw (RuntimeException) t;
throw new RuntimeException(t);
}
public static <T extends Throwable> T propagateNotWrapping(
Throwable t, Class<T> notWrappingThrowableType) throws T {
Objects.requireNonNull(t);
Objects.requireNonNull(notWrappingThrowableType);
if (t instanceof Error)
throw (Error) t;
if (t instanceof RuntimeException)
throw (RuntimeException) t;
if (notWrappingThrowableType.isInstance(t))
throw notWrappingThrowableType.cast(t);
throw new RuntimeException(t);
}
public static Throwable returnOrSuppress(Throwable thrown, Throwable t) {<FILL_FUNCTION_BODY>}
}
|
if (thrown == null) {
return t;
} else {
if (t != null)
thrown.addSuppressed(t);
return thrown;
}
| 295 | 50 | 345 |
<no_super_class>
|
OpenHFT_Chronicle-Map
|
Chronicle-Map/src/main/java/net/openhft/chronicle/hash/impl/util/jna/PosixFallocate.java
|
PosixFallocate
|
fallocate
|
class PosixFallocate {
private PosixFallocate() {
}
public static void fallocate(FileDescriptor descriptor, long offset, long length) throws IOException {<FILL_FUNCTION_BODY>}
private static int getNativeFileDescriptor(FileDescriptor descriptor) throws IOException {
try {
final Field field = descriptor.getClass().getDeclaredField("fd");
Jvm.setAccessible(field);
return (int) field.get(descriptor);
} catch (final Exception e) {
throw new IOException("unsupported FileDescriptor implementation", e);
}
}
}
|
int fd = getNativeFileDescriptor(descriptor);
if (fd != -1) {
int ret = PosixAPI.posix().fallocate(getNativeFileDescriptor(descriptor), 0, offset, length);
if (ret != 0) {
throw new IOException("posix_fallocate() returned " + ret);
}
}
| 153 | 93 | 246 |
<no_super_class>
|
OpenHFT_Chronicle-Map
|
Chronicle-Map/src/main/java/net/openhft/chronicle/hash/impl/util/math/ContinuedFraction.java
|
ContinuedFraction
|
evaluate
|
class ContinuedFraction {
/**
* Access the n-th a coefficient of the continued fraction. Since a can be
* a function of the evaluation point, x, that is passed in as well.
*
* @param n the coefficient index to retrieve.
* @param x the evaluation point.
* @return the n-th a coefficient.
*/
protected abstract double getA(int n, double x);
/**
* Access the n-th b coefficient of the continued fraction. Since b can be
* a function of the evaluation point, x, that is passed in as well.
*
* @param n the coefficient index to retrieve.
* @param x the evaluation point.
* @return the n-th b coefficient.
*/
protected abstract double getB(int n, double x);
/**
* Evaluates the continued fraction at the value x.
* <p>
* The implementation of this method is based on the modified Lentz algorithm as described
* on page 18 ff. in:
* <ul>
* <li>
* I. J. Thompson, A. R. Barnett. "Coulomb and Bessel Functions of Complex Arguments and Order."
* <a target="_blank" href="http://www.fresco.org.uk/papers/Thompson-JCP64p490.pdf">
* http://www.fresco.org.uk/papers/Thompson-JCP64p490.pdf</a>
* </li>
* </ul>
* <b>Note:</b> the implementation uses the terms a<sub>i</sub> and b<sub>i</sub> as defined in
* <a href="http://mathworld.wolfram.com/ContinuedFraction.html">Continued Fraction @ MathWorld</a>.
*
* @param x the evaluation point.
* @param epsilon maximum error allowed.
* @param maxIterations maximum number of convergents
* @return the value of the continued fraction evaluated at x.
* @throws IllegalStateException if the algorithm fails to converge.
* @throws IllegalStateException if maximal number of iterations is reached
*/
public double evaluate(double x, double epsilon, int maxIterations) {<FILL_FUNCTION_BODY>}
}
|
final double small = 1e-50;
double hPrev = getA(0, x);
// use the value of small as epsilon criteria for zero checks
if (Precision.isEquals(hPrev, 0.0, small)) {
hPrev = small;
}
int n = 1;
double dPrev = 0.0;
double cPrev = hPrev;
double hN = hPrev;
while (n < maxIterations) {
final double a = getA(n, x);
final double b = getB(n, x);
double dN = a + b * dPrev;
if (Precision.isEquals(dN, 0.0, small)) {
dN = small;
}
double cN = a + b / cPrev;
if (Precision.isEquals(cN, 0.0, small)) {
cN = small;
}
dN = 1 / dN;
final double deltaN = cN * dN;
hN = hPrev * deltaN;
if (Double.isInfinite(hN)) {
throw new IllegalStateException(
"Continued fraction convergents diverged to +/- infinity for value " + x);
}
if (Double.isNaN(hN)) {
throw new IllegalStateException(
"Continued fraction diverged to NaN for value " + x);
}
if (Math.abs(deltaN - 1.0) < epsilon) {
break;
}
dPrev = dN;
cPrev = cN;
hPrev = hN;
n++;
}
if (n >= maxIterations) {
throw new IllegalStateException(
"Continued fraction convergents failed to converge (in less than " +
maxIterations + " iterations) for value " + x);
}
return hN;
| 599 | 491 | 1,090 |
<no_super_class>
|
OpenHFT_Chronicle-Map
|
Chronicle-Map/src/main/java/net/openhft/chronicle/hash/impl/util/math/PoissonDistribution.java
|
PoissonDistribution
|
solveInverseCumulativeProbability
|
class PoissonDistribution {
private static final double EPSILON = 1e-12;
/**
* Poisson distribution is used to estimate segment fillings. Segments are not bound with
* Integer.MAX_VALUE, but it's not clear if algorithms from Commons Math could work with such
* big values as Long.MAX_VALUES
*/
private static final long UPPER_BOUND = 1L << 36;
private static final int MAX_ITERATIONS = 10000000;
public static double cumulativeProbability(double mean, long x) {
if (x < 0) {
return 0;
}
if (x >= UPPER_BOUND) {
return 1;
}
return Gamma.regularizedGammaQ((double) x + 1, mean, EPSILON, MAX_ITERATIONS);
}
public static long inverseCumulativeProbability(double mean, double p) {
checkProbability(p);
long lower = 0;
if (p == 0.0) {
return lower;
}
lower -= 1; // this ensures cumulativeProbability(lower) < p, which
// is important for the solving step
long upper = UPPER_BOUND;
if (p == 1.0) {
return upper;
}
// use the one-sided Chebyshev inequality to narrow the bracket
// cf. AbstractRealDistribution.inverseCumulativeProbability(double)
final double mu = mean;
// in Poisson distribution, variance == mean
double variance = mean;
final double sigma = Math.sqrt(variance);
final boolean chebyshevApplies = !(Double.isInfinite(mu) || Double.isNaN(mu) ||
Double.isInfinite(sigma) || Double.isNaN(sigma) || sigma == 0.0);
if (chebyshevApplies) {
double k = Math.sqrt((1.0 - p) / p);
double tmp = mu - k * sigma;
if (tmp > lower) {
lower = ((int) Math.ceil(tmp)) - 1L;
}
k = 1.0 / k;
tmp = mu + k * sigma;
if (tmp < upper) {
upper = ((int) Math.ceil(tmp)) - 1L;
}
}
return solveInverseCumulativeProbability(mean, p, lower, upper);
}
private static void checkProbability(double p) {
if (p < 0.0 || p > 1.0) {
throw new IllegalArgumentException("probability should be in [0.0, 1.0] bounds, " + p +
" given");
}
}
/**
* This is a utility function used by {@link
* #inverseCumulativeProbability(double, double)}. It assumes {@code 0 < p < 1} and
* that the inverse cumulative probability lies in the bracket {@code
* (lower, upper]}. The implementation does simple bisection to find the
* smallest {@code p}-quantile {@code inf{x in Z | P(X<=x) >= p}}.
*
* @param p the cumulative probability
* @param lower a value satisfying {@code cumulativeProbability(lower) < p}
* @param upper a value satisfying {@code p <= cumulativeProbability(upper)}
* @return the smallest {@code p}-quantile of this distribution
*/
private static long solveInverseCumulativeProbability(double mean, final double p,
long lower, long upper) {<FILL_FUNCTION_BODY>}
public static double meanByCumulativeProbabilityAndValue(double p, long x, double precision) {
checkProbability(p);
assert x > 0 && x < UPPER_BOUND;
double lower = 0;
double upper = UPPER_BOUND;
while (lower + precision < upper) {
double m = (lower + upper) / 2;
double pm = checkedCumulativeProbability(m, x);
if (pm < p) {
upper = m;
} else {
lower = m;
}
}
return lower;
}
/**
* Computes the cumulative probability function and checks for {@code NaN}
* values returned. Throws {@code MathInternalError} if the value is
* {@code NaN}. Rethrows any exception encountered evaluating the cumulative
* probability function. Throws {@code MathInternalError} if the cumulative
* probability function returns {@code NaN}.
*
* @param argument input value
* @return the cumulative probability
* @throws AssertionError if the cumulative probability is {@code NaN}
*/
private static double checkedCumulativeProbability(double mean, long argument) {
double result = cumulativeProbability(mean, argument);
if (Double.isNaN(result)) {
throw new AssertionError("Discrete cumulative probability function returned NaN " +
"for argument " + argument);
}
return result;
}
}
|
while (lower + 1 < upper) {
long xm = (lower + upper) / 2;
if (xm < lower || xm > upper) {
/*
* Overflow.
* There will never be an overflow in both calculation methods
* for xm at the same time
*/
xm = lower + (upper - lower) / 2;
}
double pm = checkedCumulativeProbability(mean, xm);
if (pm >= p) {
upper = xm;
} else {
lower = xm;
}
}
return upper;
| 1,285 | 156 | 1,441 |
<no_super_class>
|
OpenHFT_Chronicle-Map
|
Chronicle-Map/src/main/java/net/openhft/chronicle/hash/impl/util/math/Precision.java
|
Precision
|
isEquals
|
class Precision {
/**
* Offset to order signed double numbers lexicographically.
*/
private static final long SGN_MASK = 0x8000000000000000L;
/**
* Positive zero bits.
*/
private static final long POSITIVE_ZERO_DOUBLE_BITS = Double.doubleToRawLongBits(+0.0);
/**
* Negative zero bits.
*/
private static final long NEGATIVE_ZERO_DOUBLE_BITS = Double.doubleToRawLongBits(-0.0);
/**
* Returns {@code true} if there is no double value strictly between the
* arguments or the difference between them is within the range of allowed
* error (inclusive).
*
* @param x First value.
* @param y Second value.
* @param eps Amount of allowed absolute error.
* @return {@code true} if the values are two adjacent floating point
* numbers or they are within range of each other.
*/
public static boolean isEquals(double x, double y, double eps) {
return isEquals(x, y, 1) || Math.abs(y - x) <= eps;
}
/**
* Returns {@code true} if both arguments are equal or within the range of allowed
* error (inclusive).
* <p>
* Two float numbers are considered equal if there are {@code (maxUlps - 1)}
* (or fewer) floating point numbers between them, i.e. two adjacent
* floating point numbers are considered equal.
* <p>
* Adapted from <a
* href="http://randomascii.wordpress.com/2012/02/25/comparing-floating-point-numbers-2012-edition/">
* Bruce Dawson</a>
*
* @param x first value
* @param y second value
* @param maxUlps {@code (maxUlps - 1)} is the number of floating point
* values between {@code x} and {@code y}.
* @return {@code true} if there are fewer than {@code maxUlps} floating
* point values between {@code x} and {@code y}.
*/
public static boolean isEquals(final double x, final double y, final int maxUlps) {<FILL_FUNCTION_BODY>}
}
|
final long xInt = Double.doubleToRawLongBits(x);
final long yInt = Double.doubleToRawLongBits(y);
final boolean isEqual;
if (((xInt ^ yInt) & SGN_MASK) == 0L) {
// number have same sign, there is no risk of overflow
isEqual = Math.abs(xInt - yInt) <= maxUlps;
} else {
// number have opposite signs, take care of overflow
final long deltaPlus;
final long deltaMinus;
if (xInt < yInt) {
deltaPlus = yInt - POSITIVE_ZERO_DOUBLE_BITS;
deltaMinus = xInt - NEGATIVE_ZERO_DOUBLE_BITS;
} else {
deltaPlus = xInt - POSITIVE_ZERO_DOUBLE_BITS;
deltaMinus = yInt - NEGATIVE_ZERO_DOUBLE_BITS;
}
if (deltaPlus > maxUlps) {
isEqual = false;
} else {
isEqual = deltaMinus <= (maxUlps - deltaPlus);
}
}
return isEqual && !Double.isNaN(x) && !Double.isNaN(y);
| 624 | 332 | 956 |
<no_super_class>
|
OpenHFT_Chronicle-Map
|
Chronicle-Map/src/main/java/net/openhft/chronicle/hash/replication/DefaultEventualConsistencyStrategy.java
|
DefaultEventualConsistencyStrategy
|
decideOnRemoteModification
|
class DefaultEventualConsistencyStrategy {
private DefaultEventualConsistencyStrategy() {
}
/**
* Returns the acceptance decision, should be made about the modification operation in the
* given {@code context}, aiming to modify the given {@code entry}. This method doesn't do any
* changes to {@code entry} nor {@code context} state. {@link MapRemoteOperations} and
* {@link SetRemoteOperations} method implementations should guide the result of calling this
* method to do something to <i>actually</i> apply the remote operation.
*
* @param entry the entry to be modified
* @param context the remote operation context
* @return if the remote operation should be accepted or discarded
*/
public static AcceptanceDecision decideOnRemoteModification(
ReplicableEntry entry, RemoteOperationContext<?> context) {<FILL_FUNCTION_BODY>}
/**
* Decision, if {@link MapRemoteOperations remote modification operation} should be accepted
* or discarded. Used in {@link DefaultEventualConsistencyStrategy}.
*/
public enum AcceptanceDecision {
/**
* Acceptance decision -- the remote modification operation is applied to the local
* {@link ChronicleHash} state.
*/
ACCEPT,
/**
* Discard decision -- the remote modification operation is rejected.
*/
DISCARD
}
}
|
long remoteTimestamp = context.remoteTimestamp();
long originTimestamp = entry.originTimestamp();
// Last write wins
if (remoteTimestamp > originTimestamp)
return ACCEPT;
if (remoteTimestamp < originTimestamp)
return DISCARD;
// remoteTimestamp == originTimestamp below
byte remoteIdentifier = context.remoteIdentifier();
byte originIdentifier = entry.originIdentifier();
// Lower identifier wins
if (remoteIdentifier < originIdentifier)
return ACCEPT;
if (remoteIdentifier > originIdentifier)
return DISCARD;
// remoteTimestamp == originTimestamp && remoteIdentifier == originIdentifier below
// This could be, only if a node with the origin identifier was lost, a new Chronicle Hash
// instance was started up, but with system time which for some reason is very late, so
// that it provides the same time, as the "old" node with this identifier, before it was
// lost. (This is almost a theoretical situation.) In this case, give advantage to fresh
// entry updates to the "new" node. Entries with the same id and timestamp, bootstrapped
// "back" from other nodes in the system, are discarded on this new node (this is the
// of the condition originIdentifier == currentNodeIdentifier). But those new updates
// should win on other nodes.
//
// Another case, in which we could have remoteTimestamp == originTimestamp &&
// remoteIdentifier == originIdentifier, is replication of barely the same entry, if an
// entry is bootstrapped "back" from remote node to it's origin node. In this case the
// following condition also plays right (the update is discarded, due to it's redundancy).
return originIdentifier == context.currentNodeIdentifier() ? DISCARD : ACCEPT;
| 351 | 418 | 769 |
<no_super_class>
|
OpenHFT_Chronicle-Map
|
Chronicle-Map/src/main/java/net/openhft/chronicle/hash/replication/TimeProvider.java
|
TimeProvider
|
systemTimeIntervalBetween
|
class TimeProvider {
private static final AtomicLong lastTimeHolder = new AtomicLong();
private static LongSupplier millisecondSupplier = System::currentTimeMillis;
private TimeProvider() {
}
/**
* Returns a non-decreasing number, assumed to be used as a "timestamp".
* <p>
* Approximate system time interval between two calls of this method is retrievable via
* {@link #systemTimeIntervalBetween(long, long, TimeUnit)}, applied to the returned values
* from those {@code currentTime()} calls.
* <p>
* Safe and scalable for concurrent use from multiple threads.
*
* @return the current timestamp
*/
public static long currentTime() {
long now = MILLISECONDS.toNanos(millisecondSupplier.getAsLong());
while (true) {
long lastTime = lastTimeHolder.get();
if (now <= lastTime)
return lastTime;
if (lastTimeHolder.compareAndSet(lastTime, now))
return now;
Jvm.nanoPause();
}
}
/**
* Returns system time interval (i. e. wall time interval) between two time values, taken using
* {@link #currentTime()} method, with the highest possible precision, in the given time units.
*
* @param earlierTime {@link #currentTime()} result, taken at some moment in the past (earlier)
* @param laterTime {@link #currentTime()} result, taken at some moment in the past, but later
* than {@code earlierTime} was taken ("later" means there is a happens-before relationship
* between the two subject {@code currentTime()} calls)
* @param systemTimeIntervalUnit the time units to return system time interval in
* @return wall time interval between the specified moments in the given time unit
*/
public static long systemTimeIntervalBetween(
long earlierTime, long laterTime, TimeUnit systemTimeIntervalUnit) {<FILL_FUNCTION_BODY>}
static void overrideMillisecondSupplier(final LongSupplier millisecondSupplier) {
TimeProvider.millisecondSupplier = millisecondSupplier;
}
}
|
long intervalNanos = laterTime - earlierTime;
return systemTimeIntervalUnit.convert(intervalNanos, NANOSECONDS);
| 549 | 39 | 588 |
<no_super_class>
|
OpenHFT_Chronicle-Map
|
Chronicle-Map/src/main/java/net/openhft/chronicle/hash/serialization/ListMarshaller.java
|
ListMarshaller
|
read
|
class ListMarshaller<T>
implements BytesReader<List<T>>, BytesWriter<List<T>>, StatefulCopyable<ListMarshaller<T>> {
// Config fields
private BytesReader<T> elementReader;
private BytesWriter<? super T> elementWriter;
/**
* Constructs a {@code ListMarshaller} with the given list elements' serializers.
* <p>
* Use static factory {@link #of(BytesReader, BytesWriter)} instead of this constructor
* directly.
*
* @param elementReader list elements' reader
* @param elementWriter list elements' writer
*/
public ListMarshaller(BytesReader<T> elementReader, BytesWriter<? super T> elementWriter) {
this.elementReader = elementReader;
this.elementWriter = elementWriter;
}
/**
* Returns a {@code ListMarshaller} which uses the given list elements' serializers.
*
* @param elementReader list elements' reader
* @param elementWriter list elements' writer
* @param <T> type of list elements
* @return a {@code ListMarshaller} which uses the given list elements' serializers
*/
public static <T> ListMarshaller<T> of(
BytesReader<T> elementReader, BytesWriter<? super T> elementWriter) {
return new ListMarshaller<>(elementReader, elementWriter);
}
/**
* Returns a {@code ListMarshaller} which uses the given marshaller as both reader and writer of
* list elements. Example: <pre><code>
* ChronicleMap
* .of(String.class,{@code (Class<List<Integer>>)} ((Class) List.class))
* .valueMarshaller(ListMarshaller.of(IntegerMarshaller.INSTANCE))
* ...</code></pre>
*
* @param elementMarshaller list elements' marshaller
* @param <T> type of list elements
* @param <M> type of list elements' marshaller
* @return a {@code ListMarshaller} which uses the given list elements' marshaller
*/
public static <T, M extends BytesReader<T> & BytesWriter<? super T>> ListMarshaller<T> of(
M elementMarshaller) {
return of(elementMarshaller, elementMarshaller);
}
@NotNull
@Override
public List<T> read(Bytes in, @Nullable List<T> using) {<FILL_FUNCTION_BODY>}
@Override
public void write(Bytes out, @NotNull List<T> toWrite) {
out.writeInt(toWrite.size());
// indexed loop to avoid garbage creation
//noinspection ForLoopReplaceableByForEach
for (int i = 0; i < toWrite.size(); i++) {
elementWriter.write(out, toWrite.get(i));
}
}
@Override
public ListMarshaller<T> copy() {
if (elementReader instanceof StatefulCopyable ||
elementWriter instanceof StatefulCopyable) {
return new ListMarshaller<>(copyIfNeeded(elementReader), copyIfNeeded(elementWriter));
} else {
return this;
}
}
@Override
public void readMarshallable(@NotNull WireIn wireIn) {
elementReader = wireIn.read(() -> "elementReader").typedMarshallable();
elementWriter = wireIn.read(() -> "elementWriter").typedMarshallable();
}
@Override
public void writeMarshallable(@NotNull WireOut wireOut) {
wireOut.write(() -> "elementReader").typedMarshallable(elementReader);
wireOut.write(() -> "elementWriter").typedMarshallable(elementWriter);
}
}
|
int size = in.readInt();
if (using == null) {
using = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
using.add(null);
}
} else if (using.size() < size) {
while (using.size() < size) {
using.add(null);
}
} else if (using.size() > size) {
using.subList(size, using.size()).clear();
}
for (int i = 0; i < size; i++) {
using.set(i, elementReader.read(in, using.get(i)));
}
return using;
| 970 | 175 | 1,145 |
<no_super_class>
|
OpenHFT_Chronicle-Map
|
Chronicle-Map/src/main/java/net/openhft/chronicle/hash/serialization/MapMarshaller.java
|
MapMarshaller
|
read
|
class MapMarshaller<K, V> implements BytesReader<Map<K, V>>, BytesWriter<Map<K, V>>,
StatefulCopyable<MapMarshaller<K, V>> {
// Config fields
private BytesReader<K> keyReader;
private BytesWriter<? super K> keyWriter;
private BytesReader<V> valueReader;
private BytesWriter<? super V> valueWriter;
// Cache fields
private transient Deque<K> orderedKeys;
private transient Deque<V> orderedValues;
/**
* Constructs a {@code MapMarshaller} with the given map keys' and values' serializers.
*
* @param keyReader map keys' reader
* @param keyWriter map keys' writer
* @param valueReader map values' reader
* @param valueWriter map values' writer
*/
public MapMarshaller(
BytesReader<K> keyReader, BytesWriter<? super K> keyWriter,
BytesReader<V> valueReader, BytesWriter<? super V> valueWriter) {
this.keyReader = keyReader;
this.keyWriter = keyWriter;
this.valueReader = valueReader;
this.valueWriter = valueWriter;
initTransients();
}
private void initTransients() {
orderedKeys = new ArrayDeque<>();
orderedValues = new ArrayDeque<>();
}
@NotNull
@Override
public Map<K, V> read(Bytes in, @Nullable Map<K, V> using) {<FILL_FUNCTION_BODY>}
@Override
public void write(Bytes out, @NotNull Map<K, V> toWrite) {
out.writeInt(toWrite.size());
toWrite.forEach((k, v) -> {
keyWriter.write(out, k);
valueWriter.write(out, v);
});
}
@Override
public MapMarshaller<K, V> copy() {
return new MapMarshaller<>(copyIfNeeded(keyReader), copyIfNeeded(keyWriter),
copyIfNeeded(valueReader), copyIfNeeded(valueWriter));
}
@Override
public void readMarshallable(@NotNull WireIn wireIn) {
keyReader = wireIn.read(() -> "keyReader").typedMarshallable();
keyWriter = wireIn.read(() -> "keyWriter").typedMarshallable();
valueReader = wireIn.read(() -> "valueReader").typedMarshallable();
valueWriter = wireIn.read(() -> "valueWriter").typedMarshallable();
initTransients();
}
@Override
public void writeMarshallable(@NotNull WireOut wireOut) {
wireOut.write(() -> "keyReader").typedMarshallable(keyReader);
wireOut.write(() -> "keyWriter").typedMarshallable(keyWriter);
wireOut.write(() -> "valueReader").typedMarshallable(valueReader);
wireOut.write(() -> "valueWriter").typedMarshallable(valueWriter);
}
}
|
int size = in.readInt();
if (using == null) {
using = new HashMap<>(((int) (size / 0.75)));
for (int i = 0; i < size; i++) {
using.put(keyReader.read(in, null), valueReader.read(in, null));
}
} else {
using.forEach((k, v) -> {
orderedKeys.add(k);
orderedValues.add(v);
});
using.clear();
for (int i = 0; i < size; i++) {
using.put(keyReader.read(in, orderedKeys.pollFirst()),
valueReader.read(in, orderedValues.pollFirst()));
}
orderedKeys.clear(); // for GC, avoid zombie object links
orderedValues.clear();
}
return using;
| 780 | 220 | 1,000 |
<no_super_class>
|
OpenHFT_Chronicle-Map
|
Chronicle-Map/src/main/java/net/openhft/chronicle/hash/serialization/SetMarshaller.java
|
SetMarshaller
|
read
|
class SetMarshaller<T>
implements BytesReader<Set<T>>, BytesWriter<Set<T>>, StatefulCopyable<SetMarshaller<T>> {
// Config fields
private BytesReader<T> elementReader;
private BytesWriter<? super T> elementWriter;
/**
* Cache field
*/
private transient Deque<T> orderedElements;
/**
* Constructs a {@code SetMarshaller} with the given set elements' serializers.
* <p>
* Use static factory {@link #of(BytesReader, BytesWriter)} instead of this constructor
* directly.
*
* @param elementReader set elements' reader
* @param elementWriter set elements' writer
*/
public SetMarshaller(BytesReader<T> elementReader, BytesWriter<? super T> elementWriter) {
this.elementReader = elementReader;
this.elementWriter = elementWriter;
initTransients();
}
/**
* Returns a {@code SetMarshaller} which uses the given set elements' serializers.
*
* @param elementReader set elements' reader
* @param elementWriter set elements' writer
* @param <T> type of set elements
* @return a {@code SetMarshaller} which uses the given set elements' serializers
*/
public static <T> SetMarshaller<T> of(
BytesReader<T> elementReader, BytesWriter<? super T> elementWriter) {
return new SetMarshaller<>(elementReader, elementWriter);
}
/**
* Returns a {@code SetMarshaller} which uses the given marshaller as both reader and writer of
* set elements. Example: <pre><code>
* ChronicleMap
* .of(String.class,{@code (Class<Set<Integer>>)} ((Class) Set.class))
* .valueMarshaller(SetMarshaller.of(IntegerMarshaller.INSTANCE))
* ...</code></pre>
*
* @param elementMarshaller set elements' marshaller
* @param <T> type of set elements
* @param <M> type of set elements' marshaller
* @return a {@code SetMarshaller} which uses the given set elements' marshaller
*/
public static <T, M extends BytesReader<T> & BytesWriter<? super T>> SetMarshaller<T> of(
M elementMarshaller) {
return of(elementMarshaller, elementMarshaller);
}
private void initTransients() {
orderedElements = new ArrayDeque<>();
}
@NotNull
@Override
public Set<T> read(Bytes in, @Nullable Set<T> using) {<FILL_FUNCTION_BODY>}
@Override
public void write(Bytes out, @NotNull Set<T> toWrite) {
out.writeInt(toWrite.size());
toWrite.forEach(e -> elementWriter.write(out, e));
}
@Override
public SetMarshaller<T> copy() {
return new SetMarshaller<>(copyIfNeeded(elementReader), copyIfNeeded(elementWriter));
}
@Override
public void readMarshallable(@NotNull WireIn wireIn) {
elementReader = wireIn.read(() -> "elementReader").typedMarshallable();
elementWriter = wireIn.read(() -> "elementWriter").typedMarshallable();
initTransients();
}
@Override
public void writeMarshallable(@NotNull WireOut wireOut) {
wireOut.write(() -> "elementReader").typedMarshallable(elementReader);
wireOut.write(() -> "elementWriter").typedMarshallable(elementWriter);
}
}
|
int size = in.readInt();
if (using == null) {
using = new HashSet<>((int) (size / 0.75));
for (int i = 0; i < size; i++) {
using.add(elementReader.read(in, null));
}
} else {
orderedElements.addAll(using);
using.clear();
for (int i = 0; i < size; i++) {
using.add(elementReader.read(in, orderedElements.pollFirst()));
}
orderedElements.clear(); // for GC, avoid zombie object links
}
return using;
| 952 | 163 | 1,115 |
<no_super_class>
|
OpenHFT_Chronicle-Map
|
Chronicle-Map/src/main/java/net/openhft/chronicle/hash/serialization/impl/ByteArrayDataAccess.java
|
ByteArrayDataAccess
|
getUsing
|
class ByteArrayDataAccess extends AbstractData<byte[]> implements DataAccess<byte[]> {
/**
* Cache field
*/
private transient BytesStore<?, ?> bs;
/**
* State field
*/
private transient byte[] array;
public ByteArrayDataAccess() {
initTransients();
}
private void initTransients() {
bs = null;
}
@Override
public RandomDataInput bytes() {
return bs;
}
@Override
public long offset() {
return bs.start();
}
@Override
public long size() {
return bs.capacity();
}
@Override
public byte[] get() {
return array;
}
@Override
public byte[] getUsing(@Nullable byte[] using) {<FILL_FUNCTION_BODY>}
@Override
public Data<byte[]> getData(@NotNull byte[] instance) {
array = instance;
bs = BytesStore.wrap(array);
return this;
}
@Override
public void uninit() {
array = null;
bs = null;
}
@Override
public DataAccess<byte[]> copy() {
return new ByteArrayDataAccess();
}
@Override
public void writeMarshallable(@NotNull WireOut wireOut) {
// no fields to write
}
@Override
public void readMarshallable(@NotNull WireIn wireIn) {
// no fields to read
initTransients();
}
@Override
public String toString() {
return new String(array, StandardCharsets.UTF_8);
}
}
|
if (using == null || using.length != array.length)
using = new byte[array.length];
System.arraycopy(array, 0, using, 0, array.length);
return using;
| 439 | 57 | 496 |
<methods>public boolean equals(java.lang.Object) ,public int hashCode() ,public java.lang.String toString() <variables>
|
OpenHFT_Chronicle-Map
|
Chronicle-Map/src/main/java/net/openhft/chronicle/hash/serialization/impl/ByteArraySizedReader.java
|
ByteArraySizedReader
|
read
|
class ByteArraySizedReader
implements SizedReader<byte[]>, EnumMarshallable<ByteArraySizedReader> {
public static final ByteArraySizedReader INSTANCE = new ByteArraySizedReader();
private ByteArraySizedReader() {
}
@NotNull
@Override
public byte[] read(@NotNull Bytes in, long size, @Nullable byte[] using) {<FILL_FUNCTION_BODY>}
@NotNull
@Override
public ByteArraySizedReader readResolve() {
return INSTANCE;
}
}
|
if (size < 0L || size > (long) Integer.MAX_VALUE) {
throw new IORuntimeException("byte[] size should be non-negative int, " +
size + " given. Memory corruption?");
}
int arrayLength = (int) size;
if (using == null || arrayLength != using.length)
using = new byte[arrayLength];
in.read(using);
return using;
| 144 | 109 | 253 |
<no_super_class>
|
OpenHFT_Chronicle-Map
|
Chronicle-Map/src/main/java/net/openhft/chronicle/hash/serialization/impl/ByteBufferDataAccess.java
|
ByteBufferDataAccess
|
getData
|
class ByteBufferDataAccess extends AbstractData<ByteBuffer>
implements DataAccess<ByteBuffer> {
// Cache fields
private transient VanillaBytes<Void> bytes;
// State fields
private transient ByteBuffer bb;
private transient BytesStore bytesStore;
public ByteBufferDataAccess() {
initTransients();
}
private void initTransients() {
bytes = VanillaBytes.vanillaBytes();
}
@Override
public RandomDataInput bytes() {
return bytesStore;
}
@Override
public long offset() {
return bb.position();
}
@Override
public long size() {
return bb.remaining();
}
@Override
public ByteBuffer get() {
return bb;
}
@Override
public ByteBuffer getUsing(@Nullable ByteBuffer using) {
if (using == null || using.capacity() < bb.remaining()) {
using = ByteBuffer.allocate(bb.remaining());
} else {
using.position(0);
using.limit(bb.remaining());
}
bytes.bytesStore(bytesStore, bb.position(), bb.remaining());
bytes.read(using);
using.flip();
return using;
}
@Override
public Data<ByteBuffer> getData(@NotNull ByteBuffer instance) {<FILL_FUNCTION_BODY>}
@Override
public void uninit() {
bb = null;
bytesStore.release(ReferenceOwner.INIT);
bytesStore = null;
}
@Override
public DataAccess<ByteBuffer> copy() {
return new ByteBufferDataAccess();
}
@Override
public void readMarshallable(@NotNull WireIn wireIn) {
// no fields to read
initTransients();
}
@Override
public void writeMarshallable(@NotNull WireOut wireOut) {
// no fields to write
}
}
|
bb = instance;
ByteOrder originalOrder = instance.order();
bytesStore = BytesStore.follow(instance);
instance.order(originalOrder);
return this;
| 507 | 48 | 555 |
<methods>public boolean equals(java.lang.Object) ,public int hashCode() ,public java.lang.String toString() <variables>
|
OpenHFT_Chronicle-Map
|
Chronicle-Map/src/main/java/net/openhft/chronicle/hash/serialization/impl/ByteBufferSizedReader.java
|
ByteBufferSizedReader
|
read
|
class ByteBufferSizedReader
implements SizedReader<ByteBuffer>, EnumMarshallable<ByteBufferSizedReader> {
public static final ByteBufferSizedReader INSTANCE = new ByteBufferSizedReader();
private ByteBufferSizedReader() {
}
@NotNull
@Override
public ByteBuffer read(@NotNull Bytes in, long size, @Nullable ByteBuffer using) {<FILL_FUNCTION_BODY>}
@NotNull
@Override
public ByteBufferSizedReader readResolve() {
return INSTANCE;
}
}
|
if (size < 0L || size > (long) Integer.MAX_VALUE)
throw new IllegalArgumentException("ByteBuffer size should be non-negative int, " +
size + " given. Memory corruption?");
int bufferCap = (int) size;
if (using == null || using.capacity() < bufferCap) {
using = ByteBuffer.allocate(bufferCap);
} else {
using.position(0);
using.limit(bufferCap);
}
in.read(using);
using.flip();
return using;
| 143 | 140 | 283 |
<no_super_class>
|
OpenHFT_Chronicle-Map
|
Chronicle-Map/src/main/java/net/openhft/chronicle/hash/serialization/impl/ByteableDataAccess.java
|
ByteableDataAccess
|
getUsing
|
class ByteableDataAccess<T extends Byteable> extends InstanceCreatingMarshaller<T>
implements DataAccess<T>, Data<T> {
/**
* State field
*/
private transient T instance;
public ByteableDataAccess(Type tClass) {
super(tClass);
}
@Override
public RandomDataInput bytes() {
return instance.bytesStore();
}
@Override
public long offset() {
return instance.offset();
}
@Override
public long size() {
return instance.maxSize();
}
@Override
public T get() {
return instance;
}
@Override
public T getUsing(@Nullable T using) {<FILL_FUNCTION_BODY>}
@Override
public int hashCode() {
return dataHashCode();
}
@Override
public boolean equals(Object obj) {
return dataEquals(obj);
}
@Override
public String toString() {
return get().toString();
}
@Override
public Data<T> getData(@NotNull T instance) {
this.instance = instance;
return this;
}
@Override
public void uninit() {
instance = null;
}
@Override
public DataAccess<T> copy() {
return new ByteableDataAccess<>(tType());
}
}
|
if (using == null)
using = createInstance();
using.bytesStore(instance.bytesStore(), offset(), size());
return using;
| 363 | 39 | 402 |
<methods>public void readMarshallable(net.openhft.chronicle.wire.WireIn) ,public void writeMarshallable(net.openhft.chronicle.wire.WireOut) <variables>private java.lang.reflect.Type tClass
|
OpenHFT_Chronicle-Map
|
Chronicle-Map/src/main/java/net/openhft/chronicle/hash/serialization/impl/ByteableSizedReader.java
|
ByteableSizedReader
|
read
|
class ByteableSizedReader<T extends Byteable> extends InstanceCreatingMarshaller<T>
implements SizedReader<T> {
public ByteableSizedReader(Class<T> tClass) {
super(tClass);
}
@NotNull
@Override
public final T read(@NotNull Bytes in, long size, @Nullable T using) {<FILL_FUNCTION_BODY>}
}
|
if (using == null)
using = createInstance();
using.bytesStore(in.bytesStore(), in.readPosition(), size);
return using;
| 106 | 42 | 148 |
<methods>public void readMarshallable(net.openhft.chronicle.wire.WireIn) ,public void writeMarshallable(net.openhft.chronicle.wire.WireOut) <variables>private java.lang.reflect.Type tClass
|
OpenHFT_Chronicle-Map
|
Chronicle-Map/src/main/java/net/openhft/chronicle/hash/serialization/impl/BytesAsSizedReader.java
|
BytesAsSizedReader
|
copy
|
class BytesAsSizedReader<T>
implements SizedReader<T>, StatefulCopyable<BytesAsSizedReader<T>> {
/**
* Config field
*/
private BytesReader<T> reader;
public BytesAsSizedReader(BytesReader<T> reader) {
this.reader = reader;
}
@NotNull
@Override
public T read(Bytes in, long size, @Nullable T using) {
return reader.read(in, using);
}
@Override
public BytesAsSizedReader<T> copy() {<FILL_FUNCTION_BODY>}
@Override
public void readMarshallable(@NotNull WireIn wireIn) {
reader = wireIn.read(() -> "reader").typedMarshallable();
}
@Override
public void writeMarshallable(@NotNull WireOut wireOut) {
wireOut.write(() -> "reader").typedMarshallable(reader);
}
}
|
if (reader instanceof StatefulCopyable) {
return new BytesAsSizedReader<>(StatefulCopyable.copyIfNeeded(reader));
} else {
return this;
}
| 251 | 52 | 303 |
<no_super_class>
|
OpenHFT_Chronicle-Map
|
Chronicle-Map/src/main/java/net/openhft/chronicle/hash/serialization/impl/BytesMarshallableDataAccess.java
|
BytesMarshallableDataAccess
|
initBytes
|
class BytesMarshallableDataAccess<T extends BytesMarshallable>
extends InstanceCreatingMarshaller<T> implements DataAccess<T>, Data<T> {
// Cache fields
private transient boolean bytesInit;
private transient Bytes bytes;
private transient VanillaBytes targetBytes;
/**
* State field
*/
private transient T instance;
public BytesMarshallableDataAccess(Class<T> tClass) {
this(tClass, DEFAULT_BYTES_CAPACITY);
}
private BytesMarshallableDataAccess(Type tClass, long bytesCapacity) {
super(tClass);
initTransients(bytesCapacity);
}
@Override
public void readMarshallable(@NotNull WireIn wireIn) {
super.readMarshallable(wireIn);
initTransients(DEFAULT_BYTES_CAPACITY);
}
private void initTransients(long bytesCapacity) {
bytes = DefaultElasticBytes.allocateDefaultElasticBytes(bytesCapacity);
targetBytes = VanillaBytes.vanillaBytes();
}
@Override
public RandomDataInput bytes() {
initBytes();
return bytes.bytesStore();
}
private void initBytes() {<FILL_FUNCTION_BODY>}
@Override
public long offset() {
return 0;
}
@Override
public long size() {
initBytes();
return bytes.readRemaining();
}
@Override
public void writeTo(RandomDataOutput target, long targetOffset) {
if (bytesInit) {
target.write(targetOffset, bytes(), offset(), size());
} else {
targetBytes.bytesStore((BytesStore) target, targetOffset,
target.capacity() - targetOffset);
targetBytes.writePosition(targetOffset);
instance.writeMarshallable(targetBytes);
targetBytes.bytesStore(BytesStore.empty(), 0, 0);
}
}
@Override
public T get() {
return instance;
}
@Override
public T getUsing(@Nullable T using) {
if (using == null)
using = createInstance();
initBytes();
using.readMarshallable(bytes);
bytes.readPosition(0);
return using;
}
@Override
public int hashCode() {
return dataHashCode();
}
@Override
public boolean equals(Object obj) {
return dataEquals(obj);
}
@Override
public String toString() {
return get().toString();
}
@Override
public Data<T> getData(@NotNull T instance) {
this.instance = instance;
bytesInit = false;
return this;
}
@Override
public void uninit() {
instance = null;
}
@Override
public DataAccess<T> copy() {
return new BytesMarshallableDataAccess<>(tType(), bytes.realCapacity());
}
}
|
if (!bytesInit) {
bytes.clear();
instance.writeMarshallable(bytes);
bytesInit = true;
}
| 771 | 38 | 809 |
<methods>public void readMarshallable(net.openhft.chronicle.wire.WireIn) ,public void writeMarshallable(net.openhft.chronicle.wire.WireOut) <variables>private java.lang.reflect.Type tClass
|
OpenHFT_Chronicle-Map
|
Chronicle-Map/src/main/java/net/openhft/chronicle/hash/serialization/impl/BytesMarshallableReader.java
|
BytesMarshallableReader
|
read
|
class BytesMarshallableReader<T extends BytesMarshallable>
extends InstanceCreatingMarshaller<T> implements SizedReader<T>, BytesReader<T> {
public BytesMarshallableReader(Class<T> tClass) {
super(tClass);
}
@NotNull
@Override
public T read(@NotNull Bytes in, long size, @Nullable T using) {
return read(in, using);
}
@NotNull
@Override
public T read(Bytes in, @Nullable T using) {<FILL_FUNCTION_BODY>}
}
|
if (using == null)
using = createInstance();
using.readMarshallable(in);
return using;
| 153 | 34 | 187 |
<methods>public void readMarshallable(net.openhft.chronicle.wire.WireIn) ,public void writeMarshallable(net.openhft.chronicle.wire.WireOut) <variables>private java.lang.reflect.Type tClass
|
OpenHFT_Chronicle-Map
|
Chronicle-Map/src/main/java/net/openhft/chronicle/hash/serialization/impl/BytesMarshallableReaderWriter.java
|
BytesMarshallableReaderWriter
|
read
|
class BytesMarshallableReaderWriter<V extends BytesMarshallable>
extends CachingCreatingMarshaller<V> {
private static final ThreadLocal<VanillaBytes> VANILLA_BYTES_TL = ThreadLocal.withInitial(VanillaBytes::vanillaBytes);
public BytesMarshallableReaderWriter(Class<V> vClass) {
super(vClass);
}
@NotNull
@Override
public V read(Bytes in, long size, @Nullable V using) {<FILL_FUNCTION_BODY>}
@Override
protected void writeToWire(Wire wire, @NotNull V toWrite) {
toWrite.writeMarshallable(wire.bytes());
}
}
|
if (using == null)
using = createInstance();
VanillaBytes vanillaBytes = VANILLA_BYTES_TL.get();
vanillaBytes.bytesStore(in.bytesStore(), in.readPosition(), size);
using.readMarshallable(vanillaBytes);
return using;
| 184 | 78 | 262 |
<methods>public void <init>(Class<V>) ,public long size(V) ,public void write(Bytes#RAW, long, V) <variables>static final ThreadLocal<java.lang.Object> LAST_TL,static final ThreadLocal<net.openhft.chronicle.wire.Wire> WIRE_TL
|
OpenHFT_Chronicle-Map
|
Chronicle-Map/src/main/java/net/openhft/chronicle/hash/serialization/impl/BytesSizedMarshaller.java
|
BytesSizedMarshaller
|
read
|
class BytesSizedMarshaller implements SizedReader<Bytes<?>>, SizedWriter<Bytes<?>> {
@Override
public Bytes<?> read(Bytes in, long size, Bytes<?> using) {<FILL_FUNCTION_BODY>}
@Override
public long size(Bytes<?> toWrite) {
return toWrite.readRemaining();
}
@Override
public void write(Bytes out, long size, Bytes<?> toWrite) {
out.write(toWrite, toWrite.readPosition(), size);
}
}
|
final int size0 = Maths.toInt32(size);
if (using == null)
using = Bytes.allocateElasticOnHeap(size0);
in.read(using, size0);
return using;
| 148 | 63 | 211 |
<no_super_class>
|
OpenHFT_Chronicle-Map
|
Chronicle-Map/src/main/java/net/openhft/chronicle/hash/serialization/impl/CachingCreatingMarshaller.java
|
CachingCreatingMarshaller
|
write
|
class CachingCreatingMarshaller<V>
extends InstanceCreatingMarshaller<V>
implements SizedReader<V>, SizedWriter<V> {
static final ThreadLocal<Wire> WIRE_TL = ThreadLocal.withInitial(
() -> WireType.BINARY_LIGHT.apply(Bytes.allocateElasticOnHeap(128)));
static final ThreadLocal<Object> LAST_TL = new ThreadLocal<>();
public CachingCreatingMarshaller(Class<V> vClass) {
super(vClass);
}
@Override
public long size(@NotNull V toWrite) {
Wire wire = WIRE_TL.get();
wire.bytes().clear();
writeToWire(wire, toWrite);
LAST_TL.set(toWrite);
return wire.bytes().readRemaining();
}
protected abstract void writeToWire(Wire wire, @NotNull V toWrite);
@Override
public void write(Bytes out, long size, @NotNull V toWrite) {<FILL_FUNCTION_BODY>}
}
|
if (LAST_TL.get() == toWrite) {
Wire wire = WIRE_TL.get();
if (wire.bytes().readRemaining() == size) {
out.write(wire.bytes());
wire.bytes().clear();
LAST_TL.remove();
return;
}
}
BinaryWire wire = Wires.binaryWireForWrite(out, out.writePosition(), size);
writeToWire(wire, toWrite);
| 282 | 124 | 406 |
<methods>public void readMarshallable(net.openhft.chronicle.wire.WireIn) ,public void writeMarshallable(net.openhft.chronicle.wire.WireOut) <variables>private java.lang.reflect.Type tClass
|
OpenHFT_Chronicle-Map
|
Chronicle-Map/src/main/java/net/openhft/chronicle/hash/serialization/impl/CharSequenceBytesReader.java
|
CharSequenceBytesReader
|
read
|
class CharSequenceBytesReader implements BytesReader<CharSequence>,
StatefulCopyable<CharSequenceBytesReader>, EnumMarshallable<CharSequenceBytesReader> {
public static final CharSequenceBytesReader INSTANCE = new CharSequenceBytesReader();
private CharSequenceBytesReader() {
}
@NotNull
@Override
public CharSequence read(Bytes in, @Nullable CharSequence using) {<FILL_FUNCTION_BODY>}
@Override
public CharSequenceBytesReader copy() {
return INSTANCE;
}
@NotNull
@Override
public CharSequenceBytesReader readResolve() {
return INSTANCE;
}
}
|
StringBuilder usingSB;
if (using instanceof StringBuilder) {
usingSB = (StringBuilder) using;
} else {
usingSB = new StringBuilder();
}
if (in.readUtf8(usingSB)) {
return usingSB;
} else {
throw new NullPointerException("BytesReader couldn't read null");
}
| 168 | 93 | 261 |
<no_super_class>
|
OpenHFT_Chronicle-Map
|
Chronicle-Map/src/main/java/net/openhft/chronicle/hash/serialization/impl/CharSequenceSizedReader.java
|
CharSequenceSizedReader
|
read
|
class CharSequenceSizedReader implements SizedReader<CharSequence>,
StatefulCopyable<CharSequenceSizedReader>, ReadResolvable<CharSequenceSizedReader> {
public static final CharSequenceSizedReader INSTANCE = new CharSequenceSizedReader();
private CharSequenceSizedReader() {
}
@NotNull
@Override
public CharSequence read(
@NotNull Bytes in, long size, @Nullable CharSequence using) {<FILL_FUNCTION_BODY>}
@Override
public CharSequenceSizedReader copy() {
return INSTANCE;
}
@Override
public void readMarshallable(@NotNull WireIn wireIn) {
// no fields to read
}
@Override
public void writeMarshallable(@NotNull WireOut wireOut) {
// no fields to write
}
@NotNull
@Override
public CharSequenceSizedReader readResolve() {
return INSTANCE;
}
}
|
if (0 > size || size > Integer.MAX_VALUE)
throw new IllegalStateException("positive int size expected, " + size + " given");
int csLen = (int) size;
StringBuilder usingSB;
if (using instanceof StringBuilder) {
usingSB = ((StringBuilder) using);
usingSB.setLength(0);
usingSB.ensureCapacity(csLen);
} else {
usingSB = new StringBuilder(csLen);
}
BytesUtil.parseUtf8(in, usingSB, csLen);
return usingSB;
| 247 | 147 | 394 |
<no_super_class>
|
OpenHFT_Chronicle-Map
|
Chronicle-Map/src/main/java/net/openhft/chronicle/hash/serialization/impl/CharSequenceUtf8DataAccess.java
|
CharSequenceUtf8DataAccess
|
getUsing
|
class CharSequenceUtf8DataAccess
extends AbstractCharSequenceUtf8DataAccess<CharSequence> {
public CharSequenceUtf8DataAccess() {
this(DefaultElasticBytes.DEFAULT_BYTES_CAPACITY);
}
private CharSequenceUtf8DataAccess(long bytesCapacity) {
super(bytesCapacity);
}
@Override
public CharSequence getUsing(@Nullable CharSequence using) {<FILL_FUNCTION_BODY>}
@Override
public DataAccess<CharSequence> copy() {
return new CharSequenceUtf8DataAccess(bytes().realCapacity());
}
}
|
StringBuilder sb;
if (using instanceof StringBuilder) {
sb = (StringBuilder) using;
sb.setLength(0);
} else {
sb = new StringBuilder(cs.length());
}
sb.append(cs);
return sb;
| 160 | 71 | 231 |
<methods>public net.openhft.chronicle.bytes.RandomDataInput bytes() ,public java.lang.CharSequence get() ,public Data<java.lang.CharSequence> getData(java.lang.CharSequence) ,public long offset() ,public void readMarshallable(net.openhft.chronicle.wire.WireIn) ,public long size() ,public void uninit() ,public void writeMarshallable(net.openhft.chronicle.wire.WireOut) <variables>private transient Bytes#RAW bytes,transient java.lang.CharSequence cs
|
OpenHFT_Chronicle-Map
|
Chronicle-Map/src/main/java/net/openhft/chronicle/hash/serialization/impl/CommonMarshallableReaderWriter.java
|
CommonMarshallableReaderWriter
|
read
|
class CommonMarshallableReaderWriter<V extends CommonMarshallable>
extends CachingCreatingMarshaller<V> {
public CommonMarshallableReaderWriter(Class<V> vClass) {
super(vClass);
}
@NotNull
@Override
public V read(Bytes in, long size, @Nullable V using) {<FILL_FUNCTION_BODY>}
@Override
protected void writeToWire(Wire wire, @NotNull V toWrite) {
if (toWrite.usesSelfDescribingMessage()) {
((WriteMarshallable) toWrite).writeMarshallable(wire);
} else {
((WriteBytesMarshallable) toWrite).writeMarshallable(wire.bytes());
}
}
}
|
if (using == null)
using = createInstance();
if (using.usesSelfDescribingMessage()) {
((ReadMarshallable) using).readMarshallable(Wires.binaryWireForRead(in, in.readPosition(), size));
} else {
((ReadBytesMarshallable) using).readMarshallable(in);
}
return using;
| 190 | 96 | 286 |
<methods>public void <init>(Class<V>) ,public long size(V) ,public void write(Bytes#RAW, long, V) <variables>static final ThreadLocal<java.lang.Object> LAST_TL,static final ThreadLocal<net.openhft.chronicle.wire.Wire> WIRE_TL
|
OpenHFT_Chronicle-Map
|
Chronicle-Map/src/main/java/net/openhft/chronicle/hash/serialization/impl/ConstantSizeMarshaller.java
|
ConstantSizeMarshaller
|
writeSize
|
class ConstantSizeMarshaller implements SizeMarshaller {
/**
* Config field
*/
private long constantSize;
public ConstantSizeMarshaller(long constantSize) {
this.constantSize = constantSize;
}
@Override
public int storingLength(long size) {
return 0;
}
@Override
public long minStorableSize() {
return constantSize;
}
@Override
public long maxStorableSize() {
return constantSize;
}
@Override
public int minStoringLengthOfSizesInRange(long minSize, long maxSize) {
return 0;
}
@Override
public int maxStoringLengthOfSizesInRange(long minSize, long maxSize) {
return 0;
}
@Override
public void writeSize(Bytes out, long sizeToWrite) {<FILL_FUNCTION_BODY>}
@Override
public long readSize(Bytes in) {
return constantSize;
}
@Override
public void readMarshallable(@NotNull WireIn wireIn) {
constantSize = wireIn.read(() -> "constantSize").int64();
}
@Override
public void writeMarshallable(@NotNull WireOut wireOut) {
wireOut.write(() -> "constantSize").int64(constantSize);
}
}
|
if (sizeToWrite != constantSize) {
throw new IllegalArgumentException(
"sizeToWrite: " + sizeToWrite + ", constant size should be: " + constantSize);
}
// do nothing
| 357 | 55 | 412 |
<no_super_class>
|
OpenHFT_Chronicle-Map
|
Chronicle-Map/src/main/java/net/openhft/chronicle/hash/serialization/impl/DefaultElasticBytes.java
|
DefaultElasticBytes
|
allocateDefaultElasticBytes
|
class DefaultElasticBytes {
static final int DEFAULT_BYTES_CAPACITY = 32;
private DefaultElasticBytes() {
}
static Bytes<?> allocateDefaultElasticBytes(long bytesCapacity) {<FILL_FUNCTION_BODY>}
}
|
if (bytesCapacity <= 0x7FFFFFF0) {
return Bytes.elasticHeapByteBuffer((int) bytesCapacity);
} else {
return Bytes.allocateElasticDirect(bytesCapacity);
}
| 75 | 61 | 136 |
<no_super_class>
|
OpenHFT_Chronicle-Map
|
Chronicle-Map/src/main/java/net/openhft/chronicle/hash/serialization/impl/DoubleDataAccess.java
|
DoubleDataAccess
|
bytes
|
class DoubleDataAccess extends AbstractData<Double>
implements DataAccess<Double>, Data<Double> {
// Cache fields
private transient boolean bsInit;
private transient BytesStore bs;
/**
* State field
*/
private transient Double instance;
public DoubleDataAccess() {
initTransients();
}
private void initTransients() {
bs = BytesStore.wrap(new byte[8]);
}
@Override
public RandomDataInput bytes() {<FILL_FUNCTION_BODY>}
@Override
public long offset() {
return 0;
}
@Override
public long size() {
return 8;
}
@Override
public Double get() {
return instance;
}
@Override
public Double getUsing(@Nullable Double using) {
return instance;
}
@Override
public long hash(LongHashFunction f) {
return f.hashLong(Double.doubleToRawLongBits(instance));
}
@Override
public boolean equivalent(RandomDataInput source, long sourceOffset) {
return source.readLong(sourceOffset) == Double.doubleToRawLongBits(instance);
}
@Override
public void writeTo(RandomDataOutput target, long targetOffset) {
target.writeDouble(targetOffset, instance);
}
@Override
public Data<Double> getData(@NotNull Double instance) {
this.instance = instance;
bsInit = false;
return this;
}
@Override
public void uninit() {
instance = null;
}
@Override
public void readMarshallable(@NotNull WireIn wireIn) {
// no config fields to read
initTransients();
}
@Override
public void writeMarshallable(@NotNull WireOut wireOut) {
// no config fields to write
}
@Override
public DataAccess<Double> copy() {
return new DoubleDataAccess();
}
}
|
if (!bsInit) {
bs.writeDouble(0, instance);
bsInit = true;
}
return bs;
| 519 | 40 | 559 |
<methods>public boolean equals(java.lang.Object) ,public int hashCode() ,public java.lang.String toString() <variables>
|
OpenHFT_Chronicle-Map
|
Chronicle-Map/src/main/java/net/openhft/chronicle/hash/serialization/impl/ExternalBytesMarshallableDataAccess.java
|
ExternalBytesMarshallableDataAccess
|
readMarshallable
|
class ExternalBytesMarshallableDataAccess<T> extends InstanceCreatingMarshaller<T>
implements DataAccess<T>, Data<T> {
// Config fields
private SizedReader<T> reader;
private BytesWriter<? super T> writer;
/**
* Cache field
*/
private transient Bytes bytes;
/**
* State field
*/
private transient T instance;
public ExternalBytesMarshallableDataAccess(
Class<T> tClass, SizedReader<T> reader, BytesWriter<? super T> writer) {
this(tClass, reader, writer, DEFAULT_BYTES_CAPACITY);
}
private ExternalBytesMarshallableDataAccess(
Type tClass, SizedReader<T> reader, BytesWriter<? super T> writer,
long bytesCapacity) {
super(tClass);
this.writer = writer;
this.reader = reader;
initTransients(bytesCapacity);
}
private void initTransients(long bytesCapacity) {
bytes = DefaultElasticBytes.allocateDefaultElasticBytes(bytesCapacity);
}
@Override
public RandomDataInput bytes() {
return bytes.bytesStore();
}
@Override
public long offset() {
return 0;
}
@Override
public long size() {
return bytes.readRemaining();
}
@Override
public T get() {
return instance;
}
@Override
public T getUsing(@Nullable T using) {
if (using == null)
using = createInstance();
T result = reader.read(bytes, size(), using);
bytes.readPosition(0);
return result;
}
@Override
public int hashCode() {
return dataHashCode();
}
@Override
public boolean equals(Object obj) {
return dataEquals(obj);
}
@Override
public String toString() {
return get().toString();
}
@Override
public Data<T> getData(@NotNull T instance) {
this.instance = instance;
bytes.clear();
writer.write(bytes, instance);
return this;
}
@Override
public void uninit() {
instance = null;
}
@Override
public DataAccess<T> copy() {
return new ExternalBytesMarshallableDataAccess<>(
tType(), copyIfNeeded(reader), copyIfNeeded(writer), bytes.realCapacity());
}
@Override
public void readMarshallable(@NotNull WireIn wireIn) {<FILL_FUNCTION_BODY>}
@Override
public void writeMarshallable(@NotNull WireOut wireOut) {
super.writeMarshallable(wireOut);
wireOut.write(() -> "reader").typedMarshallable(reader);
wireOut.write(() -> "writer").typedMarshallable(writer);
}
}
|
super.readMarshallable(wireIn);
reader = wireIn.read(() -> "reader").typedMarshallable();
writer = wireIn.read(() -> "writer").typedMarshallable();
initTransients(DEFAULT_BYTES_CAPACITY);
| 760 | 69 | 829 |
<methods>public void readMarshallable(net.openhft.chronicle.wire.WireIn) ,public void writeMarshallable(net.openhft.chronicle.wire.WireOut) <variables>private java.lang.reflect.Type tClass
|
OpenHFT_Chronicle-Map
|
Chronicle-Map/src/main/java/net/openhft/chronicle/hash/serialization/impl/ExternalizableDataAccess.java
|
ExternalizableDataAccess
|
createInstance
|
class ExternalizableDataAccess<T extends Externalizable> extends SerializableDataAccess<T> {
/**
* Config field
*/
private Class<T> tClass;
public ExternalizableDataAccess(Class<T> tClass) {
this(tClass, DEFAULT_BYTES_CAPACITY);
}
private ExternalizableDataAccess(Class<T> tClass, long bytesCapacity) {
super(bytesCapacity);
this.tClass = tClass;
}
protected Class<T> tClass() {
return tClass;
}
protected T createInstance() {<FILL_FUNCTION_BODY>}
@Override
public T getUsing(@Nullable T using) {
if (using == null)
using = createInstance();
try {
using.readExternal(new ObjectInputStream(in));
bytes.readPosition(0);
return using;
} catch (IOException | ClassNotFoundException e) {
throw new RuntimeException(e);
}
}
@Override
public Data<T> getData(@NotNull T instance) {
this.instance = instance;
bytes.clear();
try {
ObjectOutputStream out = new ObjectOutputStream(this.out);
instance.writeExternal(out);
out.flush();
return this;
} catch (IOException e) {
throw new RuntimeException(e);
}
}
@Override
public DataAccess<T> copy() {
return new ExternalizableDataAccess<>(tClass, bytes.realCapacity());
}
@Override
public void readMarshallable(@NotNull WireIn wireIn) {
tClass = wireIn.read(() -> "tClass").typeLiteral();
initTransients(DEFAULT_BYTES_CAPACITY);
}
@Override
public void writeMarshallable(@NotNull WireOut wireOut) {
wireOut.write(() -> "tClass").typeLiteral(tClass);
}
}
|
try {
return tClass.newInstance();
} catch (InstantiationException | IllegalAccessException e) {
throw new RuntimeException(
"Externalizable " + tClass + " must have a public no-arg constructor", e);
}
| 501 | 64 | 565 |
<methods>public void <init>() ,public net.openhft.chronicle.bytes.RandomDataInput bytes() ,public DataAccess<T> copy() ,public T get() ,public Data<T> getData(T) ,public T getUsing(T) ,public long offset() ,public void readMarshallable(net.openhft.chronicle.wire.WireIn) ,public long size() ,public void uninit() ,public void writeMarshallable(net.openhft.chronicle.wire.WireOut) <variables>transient Bytes#RAW bytes,transient java.io.InputStream in,transient T instance,transient java.io.OutputStream out
|
OpenHFT_Chronicle-Map
|
Chronicle-Map/src/main/java/net/openhft/chronicle/hash/serialization/impl/ExternalizableReader.java
|
ExternalizableReader
|
read
|
class ExternalizableReader<T extends Externalizable> extends InstanceCreatingMarshaller<T>
implements SizedReader<T>, BytesReader<T> {
public ExternalizableReader(Class<T> tClass) {
super(tClass);
}
@NotNull
@Override
public T read(@NotNull Bytes in, long size, @Nullable T using) {
return read(in, using);
}
@NotNull
@Override
public T read(Bytes in, @Nullable T using) {<FILL_FUNCTION_BODY>}
}
|
if (using == null)
using = createInstance();
try {
using.readExternal(new ObjectInputStream(in.inputStream()));
return using;
} catch (IOException | ClassNotFoundException e) {
throw new RuntimeException(e);
}
| 147 | 68 | 215 |
<methods>public void readMarshallable(net.openhft.chronicle.wire.WireIn) ,public void writeMarshallable(net.openhft.chronicle.wire.WireOut) <variables>private java.lang.reflect.Type tClass
|
OpenHFT_Chronicle-Map
|
Chronicle-Map/src/main/java/net/openhft/chronicle/hash/serialization/impl/InstanceCreatingMarshaller.java
|
InstanceCreatingMarshaller
|
createInstance
|
class InstanceCreatingMarshaller<T> implements Marshallable {
private Type tClass;
/**
* Constructor for use in subclasses.
*
* @param tClass the class of objects deserialized
*/
protected InstanceCreatingMarshaller(Class<T> tClass) {
this.tClass = tClass;
}
protected InstanceCreatingMarshaller(Type tClass) {
this.tClass = tClass;
}
/**
* Returns the class of objects deserialized.
*/
protected Class<T> tClass() {
return (Class<T>) tClass;
}
protected Type tType() {
return tClass;
}
/**
* Creates a new {@code T} instance by calling {@link Class#newInstance()}. If you need
* different logic, i. e. calling a constructor with parameter, override this method in a
* subclass of the specific {@link DataAccess} or {@link SizedReader} and configure in {@link
* ChronicleMapBuilder}.
*
* @return a new instance to return from {@link Data#getUsing(Object)} or {@link
* SizedReader#read(net.openhft.chronicle.bytes.Bytes, long, Object)} method
*/
protected T createInstance() {<FILL_FUNCTION_BODY>}
@Override
public void readMarshallable(@NotNull WireIn wireIn) {
//noinspection unchecked
tClass = wireIn.read(() -> "tClass").lenientTypeLiteral();
}
@Override
public void writeMarshallable(@NotNull WireOut wireOut) {
wireOut.write(() -> "tClass").typeLiteral(tClass);
}
}
|
try {
return ObjectUtils.newInstance(tClass());
} catch (Exception e) {
throw new IllegalStateException("Some of default marshallers, chosen for the type\n" +
tClass + " by default, delegate to \n" +
this.getClass().getName() + " which assumes the type has a public no-arg\n" +
"constructor. If this is not true, you should either extend the marshaller,\n" +
"overriding createInstance() and copy() (if defined), and the extending\n" +
"class shouldn't be inner, because such classes couldn't be Serializable\n" +
"that is a requirement for marshaller classes, or write and configure your\n" +
"own marshaller for " + tClass + " type from scratch, and configure for the\n" +
"Chronicle Map via keyMarshaller[s]() or valueMarshaller[s]() methods", e);
}
| 444 | 240 | 684 |
<no_super_class>
|
OpenHFT_Chronicle-Map
|
Chronicle-Map/src/main/java/net/openhft/chronicle/hash/serialization/impl/IntegerDataAccess_3_13.java
|
IntegerDataAccess_3_13
|
bytes
|
class IntegerDataAccess_3_13 extends AbstractData<Integer>
implements DataAccess<Integer>, Data<Integer> {
// Cache fields
private transient boolean bsInit;
private transient BytesStore bs;
/**
* State field
*/
private transient Integer instance;
public IntegerDataAccess_3_13() {
initTransients();
}
private void initTransients() {
bs = BytesStore.wrap(new byte[4]);
}
@Override
public RandomDataInput bytes() {<FILL_FUNCTION_BODY>}
@Override
public long offset() {
return 0;
}
@Override
public long size() {
return 4;
}
@Override
public Integer get() {
return instance;
}
@Override
public Integer getUsing(@Nullable Integer using) {
return instance;
}
@Override
public long hash(LongHashFunction f) {
return f.hashInt(instance);
}
@Override
public boolean equivalent(RandomDataInput source, long sourceOffset) {
return source.readInt(sourceOffset) == instance;
}
@Override
public void writeTo(RandomDataOutput target, long targetOffset) {
target.writeInt(targetOffset, instance);
}
@Override
public Data<Integer> getData(@NotNull Integer instance) {
this.instance = instance;
bsInit = false;
return this;
}
@Override
public void uninit() {
instance = null;
}
@Override
public void readMarshallable(@NotNull WireIn wireIn) {
// no config fields to read
initTransients();
}
@Override
public void writeMarshallable(@NotNull WireOut wireOut) {
// no config fields to write
}
@Override
public DataAccess<Integer> copy() {
return new IntegerDataAccess_3_13();
}
}
|
if (!bsInit) {
bs.writeInt(0, instance);
bsInit = true;
}
return bs;
| 516 | 40 | 556 |
<methods>public boolean equals(java.lang.Object) ,public int hashCode() ,public java.lang.String toString() <variables>
|
OpenHFT_Chronicle-Map
|
Chronicle-Map/src/main/java/net/openhft/chronicle/hash/serialization/impl/LongDataAccess.java
|
LongDataAccess
|
bytes
|
class LongDataAccess extends AbstractData<Long>
implements DataAccess<Long>, Data<Long> {
// Cache fields
private transient boolean bsInit;
private transient BytesStore bs;
/**
* State field
*/
private transient Long instance;
public LongDataAccess() {
initTransients();
}
private void initTransients() {
bs = BytesStore.wrap(new byte[8]);
}
@Override
public RandomDataInput bytes() {<FILL_FUNCTION_BODY>}
@Override
public long offset() {
return 0;
}
@Override
public long size() {
return 8;
}
@Override
public Long get() {
return instance;
}
@Override
public Long getUsing(@Nullable Long using) {
return instance;
}
@Override
public long hash(LongHashFunction f) {
return f.hashLong(instance);
}
@Override
public boolean equivalent(RandomDataInput source, long sourceOffset) {
return source.readLong(sourceOffset) == instance;
}
@Override
public void writeTo(RandomDataOutput target, long targetOffset) {
target.writeLong(targetOffset, instance);
}
@Override
public Data<Long> getData(@NotNull Long instance) {
this.instance = instance;
bsInit = false;
return this;
}
@Override
public void uninit() {
instance = null;
}
@Override
public void readMarshallable(@NotNull WireIn wireIn) {
// no config fields to read
initTransients();
}
@Override
public void writeMarshallable(@NotNull WireOut wireOut) {
// no config fields to write
}
@Override
public DataAccess<Long> copy() {
return new LongDataAccess();
}
}
|
if (!bsInit) {
bs.writeLong(0, instance);
bsInit = true;
}
return bs;
| 501 | 40 | 541 |
<methods>public boolean equals(java.lang.Object) ,public int hashCode() ,public java.lang.String toString() <variables>
|
OpenHFT_Chronicle-Map
|
Chronicle-Map/src/main/java/net/openhft/chronicle/hash/serialization/impl/MarshallableReaderWriter.java
|
MarshallableReaderWriter
|
read
|
class MarshallableReaderWriter<V extends Marshallable>
extends CachingCreatingMarshaller<V> {
public MarshallableReaderWriter(Class<V> vClass) {
super(vClass);
}
@NotNull
@Override
public V read(Bytes in, long size, @Nullable V using) {<FILL_FUNCTION_BODY>}
@Override
protected void writeToWire(Wire wire, @NotNull V toWrite) {
toWrite.writeMarshallable(wire);
}
}
|
if (using == null)
using = createInstance();
using.readMarshallable(Wires.binaryWireForRead(in, in.readPosition(), size));
return using;
| 133 | 51 | 184 |
<methods>public void <init>(Class<V>) ,public long size(V) ,public void write(Bytes#RAW, long, V) <variables>static final ThreadLocal<java.lang.Object> LAST_TL,static final ThreadLocal<net.openhft.chronicle.wire.Wire> WIRE_TL
|
OpenHFT_Chronicle-Map
|
Chronicle-Map/src/main/java/net/openhft/chronicle/hash/serialization/impl/SerializableDataAccess.java
|
SerializableDataAccess
|
getUsing
|
class SerializableDataAccess<T extends Serializable> extends AbstractData<T>
implements DataAccess<T> {
// Cache fields
transient Bytes bytes;
transient OutputStream out;
transient InputStream in;
/**
* State field
*/
transient T instance;
public SerializableDataAccess() {
this(DEFAULT_BYTES_CAPACITY);
}
SerializableDataAccess(long bytesCapacity) {
initTransients(bytesCapacity);
}
void initTransients(long bytesCapacity) {
bytes = DefaultElasticBytes.allocateDefaultElasticBytes(bytesCapacity);
out = bytes.outputStream();
in = bytes.inputStream();
}
@Override
public RandomDataInput bytes() {
return bytes.bytesStore();
}
@Override
public long offset() {
return 0;
}
@Override
public long size() {
return bytes.readRemaining();
}
@Override
public T get() {
return instance;
}
// TODO reuse using object
@Override
public T getUsing(@Nullable T using) {<FILL_FUNCTION_BODY>}
@Override
public Data<T> getData(@NotNull T instance) {
this.instance = instance;
bytes.clear();
try {
ObjectOutputStream out = new ObjectOutputStream(this.out);
out.writeObject(instance);
out.flush();
return this;
} catch (IOException e) {
throw new RuntimeException(e);
}
}
@Override
public void uninit() {
instance = null;
}
@Override
public DataAccess<T> copy() {
return new SerializableDataAccess<>(bytes.realCapacity());
}
@Override
public void readMarshallable(@NotNull WireIn wireIn) {
// no fields to read
initTransients(DEFAULT_BYTES_CAPACITY);
}
@Override
public void writeMarshallable(@NotNull WireOut wireOut) {
// no fields to write
}
}
|
try {
T result = (T) new ObjectInputStream(in).readObject();
bytes.readPosition(0);
return result;
} catch (IOException | ClassNotFoundException e) {
throw new RuntimeException(e);
}
| 543 | 63 | 606 |
<methods>public boolean equals(java.lang.Object) ,public int hashCode() ,public java.lang.String toString() <variables>
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.