Search is not available for this dataset
content
stringlengths 60
399M
| max_stars_repo_name
stringlengths 6
110
|
---|---|
<|start_filename|>public/assets/js/get_comments.js<|end_filename|>
$(document).ready(() => {
// On click on .comment class...
$(".feed").on('click', '.comment', (event) => {
// Fetch the pid of the post
let pid = event.target.id.split("_")[1];
// Toggle the class
$(`#postid_${pid}.feed-comments`).toggleClass('hide-comment');
// If the selected feed comment has class of 'hide-content'...
if ($(`#postid_${pid}.feed-comments`).hasClass('hide-comment')) {
// ..then display the comment section and load comments
$(`#postid_${pid}.feed-comments`).show();
// ..and change the class to fas
$(`#postid_${pid}.comment`).removeClass('far').addClass('fas');
// Load into the appropriate post, the comments.
// Pass pid as POST argument via .load() method
$(`#postid_${pid}.feed-comments`).load('../../includes/get_comments.php', {pid:pid});
} else {
// Else.. just hide
$(`#postid_${pid}.feed-comments`).hide();
// ..and change the class to far
$(`#postid_${pid}.comment`).removeClass('fas').addClass('far');
}
});
$(".feed").on('keydown', '.post-comment', (event) => {
if (event.keyCode == 13) {
let pid = event.target.id.split("_")[1];
let comment_content = $(`#postid_${pid}.post-comment`).val();
$.ajax({
url: '../../includes/post_comment.php',
method: 'POST',
data: {pid:pid, comment_content:comment_content},
success: (data) => {
// On success, re-load the contents of the appropriate comment section
$(`#postid_${pid}.feed-comments`).load('../../includes/get_comments.php', {pid:pid});
}
});
}
});
});
<|start_filename|>public/assets/js/logout.js<|end_filename|>
$(() => {
let timer = 3;
var interval = setInterval(() => {
$(".countdown").text(timer);
timer--;
if (timer < 0) {
clearInterval(interval);
window.location.replace('/public');
}
}, 1000);
});
<|start_filename|>public/assets/js/upload_post.js<|end_filename|>
$(document).ready(() => {
$("#photo-input").on('click', (event) => {
// console.log(event.target.files);
});
$("#video-input").on('click', () => {
});
});
<|start_filename|>public/assets/js/auth.js<|end_filename|>
$(() => {
$("#login-form").validate({
rules: {
username: {
required: true,
minlength: 5
},
password: {
required: true,
minlength: 6
}
}
});
$("#signup-form").validate({
rules: {
username_signup: {
required: true,
minlength: 5
},
email_signup: {
required: true,
email: true
},
password_signup: {
required: true,
minlength: 6
},
password_signup_c: {
required: true,
minlength: 6,
equalTo: "#password_signup"
}
}
});
$("a[href*='#signup']").on('click', () => {
$("#login-form").toggle();
$("#signup-form").toggle();
});
$("a[href*='#login']").on('click', () => {
$("#login-form").toggle();
$("#signup-form").toggle();
});
if (window.location.hash == '#signup') {
$("#login-form").hide();
$("#signup-form").show();
} else if (window.location.hash == '#login') {
$("#login-form").show();
$("#signup-form").hide();
}
});
<|start_filename|>public/assets/css/custom.css<|end_filename|>
.h-100v {
min-height: 100vh; }
.auth-box {
padding: 2rem 2rem 1rem 2rem;
background: #fff;
text-align: center;
box-shadow: 0 0 5px 0;
color: #ccc; }
.auth-box .logo {
margin: 2rem auto; }
.auth-box h3 {
color: #34495e;
margin-bottom: 2rem; }
.auth-box h4 {
color: #34495e; }
.auth-box input {
border: 0;
border-bottom: 1px solid #ccc;
border-radius: 0;
background: #f8f8f8; }
.auth-box input:focus {
box-shadow: 0 0; }
.auth-box button {
width: 100%;
margin-top: 1rem; }
.auth-box a {
text-decoration: none; }
.auth-box a:hover {
font-weight: bold; }
.auth-box .helper {
margin-top: 1rem;
font-size: 15px;
color: #aaa; }
.auth-box label {
color: #e74c3c; }
.auth-box #signup-form {
display: none; }
#navbar {
background: #fff;
color: #ccc;
box-shadow: 0 0 5px 0; }
#navbar .fa-fw {
font-size: 1.8rem;
color: #ccc; }
#navbar .fa-fw:hover {
color: #aaa; }
.feed {
margin: 0 auto; }
.feed-row {
margin: 2rem auto;
background: #fff;
box-shadow: 0 0 5px 0;
border: 1px solid #ccc;
color: #ccc; }
.feed-row .feed-post {
padding: 0; }
.feed-row .feed-post h3 {
position: relative;
margin-left: 3.25rem;
top: 0.6rem;
font-size: 1rem;
text-transform: lowercase;
color: #34495e; }
.feed-row .feed-post h3 a {
color: inherit;
text-decoration: none; }
.feed-row .feed-post h3 a:hover {
font-weight: bold; }
.feed-row .feed-post .feed-user {
padding: 0.5rem 1rem;
margin-bottom: 0.75rem; }
.feed-row .feed-post .feed-user img.thumbnail {
position: absolute;
border-radius: 50%;
border: 1px solid #ccc; }
.feed-row .feed-post .feed-media img.feed-img {
max-width: 100%; }
.feed-row .feed-post .feed-media video {
max-width: 100%; }
.feed-row .feed-post .feed-reaction {
padding: 0.5rem 0;
text-align: center;
border-bottom: 1px solid #ccc; }
.feed-row .feed-post .feed-reaction span {
position: absolute;
top: 5px;
margin-left: 5px; }
.feed-row .feed-post .feed-reaction .far.fa-heart:hover {
cursor: pointer;
color: #e74c3c; }
.feed-row .feed-post .feed-reaction .far.fa-comment:hover {
cursor: pointer;
color: #3498db; }
.feed-row .feed-post .feed-reaction .fas {
cursor: pointer; }
.feed-row .feed-post .feed-reaction .fas.fa-heart {
color: #e74c3c; }
.feed-row .feed-post .feed-reaction .fas.fa-comment {
color: #3498db; }
.feed-row .feed-post .feed-comments .comment {
padding: 0.35rem 0.5rem; }
.feed-row .feed-post .feed-comments .comment a {
font-weight: bold; }
.feed-row .feed-post .feed-comments .comment .comment-user {
color: #34495e; }
.feed-row .feed-post .feed-comments .comment .comment-content {
color: #bbb; }
.feed-row .feed-post .feed-comments input {
width: 100%;
border: 0;
border-top: 1px solid #ccc;
padding: 0.25rem 0.5rem;
font-weight: 300; }
.upload {
margin: 0 auto;
text-align: center; }
.upload .upload-content {
margin: 2rem auto auto; }
.upload .upload-content .fas {
cursor: pointer;
color: #34495e; }
.upload .upload-content .fas:hover {
color: #444; }
.upload .upload-content input {
display: none; }
body {
background: #fafafa; }
/*# sourceMappingURL=custom.css.map */
<|start_filename|>public/assets/js/like_action.js<|end_filename|>
$(document).ready(() => {
$(".feed").load('../../includes/get_feed.php');
// Post Like Ajax request
$(".feed").on('click', '.like', (event) => {
let id = event.target.id;
let split_id = id.split("_");
let pid = split_id[1];
$.ajax({
url: '../../includes/like_action.php',
method: 'POST',
data: {pid:pid},
success: (data) => {
event.target.classList.remove('far');
event.target.classList.add('fas');
event.target.classList.remove('like');
event.target.classList.add('unlike');
}
});
});
$(".feed").on('click', '.unlike', (event) => {
let unlike_id = event.target.id;
let split_unlike_id = unlike_id.split("_");
let unlike_pid = split_unlike_id[1];
$.ajax({
url: '../../includes/unlike_action.php',
method: 'POST',
data: {unlike_pid:unlike_pid},
success: (data) => {
event.target.classList.remove('fas');
event.target.classList.add('far');
event.target.classList.remove('unlike');
event.target.classList.add('like');
}
});
});
}); | nikosgatzoulis/instagram-clone-php |
<|start_filename|>example/lib/sample_event.dart<|end_filename|>
import 'package:cell_calendar/cell_calendar.dart';
import 'package:flutter/material.dart';
List<CalendarEvent> sampleEvents() {
final today = DateTime.now();
final sampleEvents = [
CalendarEvent(
eventName: "New iPhone",
eventDate: today.add(Duration(days: -42)),
eventBackgroundColor: Colors.black),
CalendarEvent(
eventName: "Writing test",
eventDate: today.add(Duration(days: -30)),
eventBackgroundColor: Colors.deepOrange),
CalendarEvent(
eventName: "Play soccer",
eventDate: today.add(Duration(days: -7)),
eventBackgroundColor: Colors.greenAccent),
CalendarEvent(
eventName: "Learn about history",
eventDate: today.add(Duration(days: -7))),
CalendarEvent(
eventName: "Buy new keyboard",
eventDate: today.add(Duration(days: -7))),
CalendarEvent(
eventName: "Walk around the park",
eventDate: today.add(Duration(days: -7)),
eventBackgroundColor: Colors.deepOrange),
CalendarEvent(
eventName: "Buy a present for Rebecca",
eventDate: today.add(Duration(days: -7)),
eventBackgroundColor: Colors.pink),
CalendarEvent(
eventName: "Firebase", eventDate: today.add(Duration(days: -7))),
CalendarEvent(eventName: "Task Deadline", eventDate: today),
CalendarEvent(
eventName: "Jon's Birthday",
eventDate: today.add(Duration(days: 3)),
eventBackgroundColor: Colors.green),
CalendarEvent(
eventName: "Date with Rebecca",
eventDate: today.add(Duration(days: 7)),
eventBackgroundColor: Colors.pink),
CalendarEvent(
eventName: "Start to study Spanish",
eventDate: today.add(Duration(days: 13))),
CalendarEvent(
eventName: "Have lunch with Mike",
eventDate: today.add(Duration(days: 13)),
eventBackgroundColor: Colors.green),
CalendarEvent(
eventName: "Buy new Play Station software",
eventDate: today.add(Duration(days: 13)),
eventBackgroundColor: Colors.indigoAccent),
CalendarEvent(
eventName: "Update my flutter package",
eventDate: today.add(Duration(days: 13))),
CalendarEvent(
eventName: "Watch movies in my house",
eventDate: today.add(Duration(days: 21))),
CalendarEvent(
eventName: "Medical Checkup",
eventDate: today.add(Duration(days: 30)),
eventBackgroundColor: Colors.red),
CalendarEvent(
eventName: "Gym",
eventDate: today.add(Duration(days: 42)),
eventBackgroundColor: Colors.indigoAccent),
];
return sampleEvents;
}
| AndrewJEON/cell_calendar |
<|start_filename|>game.js<|end_filename|>
import Main from './js/main'
new Main()
| wqzwh/wx-game |
<|start_filename|>smoke-test/diagram-assertions-graphviz.test.js<|end_filename|>
const fs = require('fs')
const path = require('path')
const basePath = process.env.SMOKE_TEST_BASE_PATH
const diagramFileName = 'diagram.png';
console.info("Asserting expected diagram PNG at " + path.join(basePath, diagramFileName))
describe('Created diagram PNG file', ()=>{
it ('should exist', ()=>{
const dirFiles = fs.readdirSync(basePath)
expect(dirFiles).toContain(diagramFileName)
})
it ('should have a reasonable size', ()=>{
const fileProps = fs.statSync(path.join(basePath, diagramFileName))
// On my local setup and Graphviz impl. the diagram is 229KB. In other environmenrts it seems to differ.
expect(fileProps.size / 1024).toBeGreaterThan(160) //200KB
expect(fileProps.size / 1024).toBeLessThan(400) //200KB
})
})
<|start_filename|>testSetup.js<|end_filename|>
import fs from 'fs';
import rimraf from 'rimraf';
module.exports = async () => {
const basePath = `${process.cwd()}/test-generated`
rimraf.sync(basePath)
fs.mkdirSync(basePath)
};
<|start_filename|>jest.config.js<|end_filename|>
module.exports = {
testMatch: ['**/src/**/tests/**/*.test.ts'],
testPathIgnorePatterns: ["/node_modules/"],
reporters: ["default"],
globalSetup: "./testSetup.js"
}
<|start_filename|>smoke-test/diagram-assertions-cytoscape.test.js<|end_filename|>
const path = require('path')
const fs = require('fs')
const basePath = process.env.SMOKE_TEST_BASE_PATH
const diagramFolderName = 'diagram';
console.info("Asserting expected static website with Cytoscape at: " + path.join(basePath, diagramFolderName))
describe('Genretated static website', ()=>{
it ('should exist as expected', ()=>{
const index = new File(`${basePath}/${diagramFolderName}/index.html`)
expect(index.stats.size).toBeGreaterThanOrEqual(300)
expect(index.body).toContain("CDK-Dia")
const icons = new File(`${basePath}/${diagramFolderName}/icons`)
expect(icons.stats.isDirectory()).toBeTruthy()
const js = new File(`${basePath}/${diagramFolderName}/js`)
expect(js.stats.isDirectory()).toBeTruthy()
const elementsJson = new File(`${basePath}/${diagramFolderName}/cy-elements.json`)
expect(JSON.parse(elementsJson.body).length).toBeGreaterThanOrEqual(2)
const stylesJson = new File(`${basePath}/${diagramFolderName}/cy-styles.json`)
expect(JSON.parse(stylesJson.body).length).toBeGreaterThanOrEqual(15)
})
})
class File {
stats
body
constructor(path) {
this.stats = fs.statSync(path)
if (!this.stats.isDirectory()) {
this.body = fs.readFileSync(path).toString()
}
}
} | Flolight/cdk-dia |
<|start_filename|>skychanger-bukkit/src/main/java/com/dscalzi/skychanger/bukkit/internal/SkyChangeImpl.java<|end_filename|>
/*
* This file is part of SkyChanger, licensed under the MIT License (MIT).
*
* Copyright (c) 2017-2021 <NAME> <https://github.com/dscalzi/SkyChanger>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.dscalzi.skychanger.bukkit.internal;
import java.lang.reflect.Constructor;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import com.dscalzi.skychanger.bukkit.api.SkyChanger;
import com.dscalzi.skychanger.core.api.SkyAPI;
import com.dscalzi.skychanger.core.api.SkyPacket;
import com.dscalzi.skychanger.core.internal.wrap.IPlayer;
import com.dscalzi.skychanger.core.internal.manager.MessageManager;
import org.bukkit.Difficulty;
import org.bukkit.GameMode;
import org.bukkit.World;
import org.bukkit.entity.Player;
public class SkyChangeImpl implements SkyAPI {
public static final List<String> FREEZE_UNSUPPORTED = Stream.of("1.8", "1.13").collect(Collectors.toList());
/* API Methods */
@Override
public boolean changeSky(IPlayer p, float number) {
return changeSky(p, SkyPacket.RAIN_LEVEL_CHANGE, number);
}
@Override
public boolean changeSky(IPlayer p, SkyPacket packet, float number) {
try {
int major = ReflectionUtil.getMajor(), minor = ReflectionUtil.getMinor();
if(major > 1 || (major == 1 && minor >= 17)) {
MessageManager.getInstance().featureUnsupported(p, "1.17+");
}
Object payload = null;
if(major == 1) {
if(minor >= 17) {
payload = createPacket_117_plus(packet.getValue(), number);
} else if(minor >= 16) {
payload = createPacket_116(packet.getValue(), number);
} else {
payload = createPacket_18_to_115(packet.getValue(), number);
}
}
if(payload != null) {
if(minor >= 17) {
deliverPacket(payload, (Player)p.getOriginal());
} else {
deliverPacketLegacy(payload, (Player)p.getOriginal());
}
return true;
} else {
MessageManager.getInstance().logPacketError();
return false;
}
} catch(Throwable t) {
MessageManager.getInstance().logPacketError();
t.printStackTrace();
return false;
}
}
@Override
public boolean freeze(IPlayer p) {
return sendFreezePacket((Player)p.getOriginal());
}
@Override
public boolean unfreeze(IPlayer p) {
return p.teleport(p.getLocation());
}
/* NMS Utility */
protected Object getConnectionLegacy(Player player) throws InvocationTargetException, IllegalAccessException, NoSuchFieldException {
Class<?> ocbPlayer = ReflectionUtil.getOCBClass("entity.CraftPlayer");
Method getHandle = ReflectionUtil.getMethod(ocbPlayer, "getHandle");
Object nmsPlayer = Objects.requireNonNull(getHandle).invoke(player);
Field conField = nmsPlayer.getClass().getField("playerConnection");
return conField.get(nmsPlayer);
}
protected Object getConnection(Player player) throws InvocationTargetException, IllegalAccessException, NoSuchFieldException {
Class<?> ocbPlayer = ReflectionUtil.getOCBClass("entity.CraftPlayer");
Method getHandle = ReflectionUtil.getMethod(ocbPlayer, "getHandle");
Object nmsPlayer = Objects.requireNonNull(getHandle).invoke(player);
Field conField = nmsPlayer.getClass().getField("b");
return conField.get(nmsPlayer);
}
protected void deliverPacketLegacy(Object packet, Player player) throws NoSuchMethodException,
IllegalAccessException, NoSuchFieldException, InvocationTargetException {
Method sendPacket = ReflectionUtil.getNMSClassLegacy("PlayerConnection")
.getMethod( "sendPacket", ReflectionUtil.getNMSClassLegacy("Packet"));
sendPacket.invoke(this.getConnectionLegacy(player), packet);
}
protected void deliverPacket(Object packet, Player player) throws NoSuchMethodException,
IllegalAccessException, NoSuchFieldException, InvocationTargetException {
Method sendPacket = ReflectionUtil.getMCClass("server.network.PlayerConnection")
.getMethod( "sendPacket", ReflectionUtil.getMCClass("network.protocol.Packet"));
sendPacket.invoke(this.getConnection(player), packet);
}
/* Sky Change Packet Creation */
protected Object createPacket_18_to_115(int packetNum, float number) throws NoSuchMethodException,
IllegalAccessException, InvocationTargetException, InstantiationException {
Class<?> ClientboundGameEventPacket = ReflectionUtil.getNMSClassLegacy("PacketPlayOutGameStateChange");
Constructor<?> packetConstructor = ClientboundGameEventPacket.getConstructor(int.class, float.class);
return packetConstructor.newInstance(packetNum, number);
}
public Object createPacket_116(int packetNum, float number) throws NoSuchMethodException,
IllegalAccessException, InvocationTargetException, InstantiationException {
Class<?> ClientboundGameEventPacket = ReflectionUtil.getNMSClassLegacy("PacketPlayOutGameStateChange");
Class<?> packetTypeClass = ReflectionUtil.getDeclaredClass(ClientboundGameEventPacket, "a");
Constructor<?> packetConstructor = ClientboundGameEventPacket.getConstructor(packetTypeClass, float.class);
Constructor<?> packetTypeConstructor = Objects.requireNonNull(packetTypeClass).getConstructor(int.class);
Object packetType = packetTypeConstructor.newInstance(packetNum);
return packetConstructor.newInstance(packetType, number);
}
public Object createPacket_117_plus(int packetNum, float number) throws NoSuchMethodException,
IllegalAccessException, InvocationTargetException, InstantiationException {
Class<?> ClientboundGameEventPacket = ReflectionUtil.getMCClass("network.protocol.game.PacketPlayOutGameStateChange");
Class<?> packetTypeClass = ReflectionUtil.getDeclaredClass(ClientboundGameEventPacket, "a");
Constructor<?> packetConstructor = ClientboundGameEventPacket.getConstructor(packetTypeClass, float.class);
Constructor<?> packetTypeConstructor = Objects.requireNonNull(packetTypeClass).getConstructor(int.class);
Object packetType = packetTypeConstructor.newInstance(packetNum);
return packetConstructor.newInstance(packetType, number);
}
/* Freeze NMS Utility */
// 1.16+
private Object getTypeKey(Class<?> WorldClass, Object world) throws InvocationTargetException, IllegalAccessException {
Method getTypeKey = Objects.requireNonNull(ReflectionUtil.getMethod(WorldClass, "getTypeKey"));
return getTypeKey.invoke(world);
}
private Object getDimensionManager1162Plus(Class<?> WorldClass, Object world) throws InvocationTargetException, IllegalAccessException {
Method getDimensionManager = Objects.requireNonNull(ReflectionUtil.getMethod(WorldClass, "getDimensionManager"));
return getDimensionManager.invoke(world);
}
// 1.16+
private Object getDimensionKey(Class<?> WorldClass, Object world) throws InvocationTargetException, IllegalAccessException {
Method getDimensionKey = Objects.requireNonNull(ReflectionUtil.getMethod(WorldClass, "getDimensionKey"));
return getDimensionKey.invoke(world);
}
private Object getWorldServer(Player player) throws InvocationTargetException, IllegalAccessException {
Class<?> craftWorldClass = ReflectionUtil.getOCBClass("CraftWorld");
Method getHandle = Objects.requireNonNull(ReflectionUtil.getMethod(craftWorldClass, "getHandle"));
return getHandle.invoke(player.getWorld());
}
private Object getDimensionManager(Object worldServer) throws InvocationTargetException, IllegalAccessException, NoSuchFieldException {
Class<?> worldProviderClass = ReflectionUtil.getNMSClassLegacy("WorldProvider");
Class<?> worldClass = ReflectionUtil.getNMSClassLegacy("World");
Field worldProviderField = worldClass.getDeclaredField("worldProvider");
Object worldProvider = worldProviderField.get(worldServer);
Method getDimensionManager = Objects.requireNonNull(ReflectionUtil.getMethod(worldProviderClass, "getDimensionManager"));
return getDimensionManager.invoke(worldProvider);
}
// 1.13, 1.14, 1.15
private Object getWorldType(Object worldServer) throws InvocationTargetException, IllegalAccessException {
Class<?> WorldServerClass = ReflectionUtil.getNMSClassLegacy("WorldServer");
Method getWorldData = Objects.requireNonNull(ReflectionUtil.getMethod(WorldServerClass, "getWorldData"));
Object worldData = getWorldData.invoke(worldServer);
Class<?> worldDataClass = ReflectionUtil.getNMSClassLegacy("WorldData");
Method getType = Objects.requireNonNull(ReflectionUtil.getMethod(worldDataClass, "getType"));
return getType.invoke(worldData);
}
private int getWorldEnvironmentId(Player player) throws InvocationTargetException, IllegalAccessException {
Method getId = Objects.requireNonNull(ReflectionUtil.getMethod(World.Environment.class, "getId"));
return (int) getId.invoke(player.getWorld().getEnvironment());
}
private int getGameModeValue(Player player) throws InvocationTargetException, IllegalAccessException {
Method deprecatedGetValue = Objects.requireNonNull(ReflectionUtil.getMethod(GameMode.class, "getValue"));
return (int) deprecatedGetValue.invoke(player.getGameMode());
}
private Object getEnumGamemode(Class<?> EnumGamemodeClass, Player player) throws InvocationTargetException, IllegalAccessException {
Method gmGetById = Objects.requireNonNull(ReflectionUtil.getMethod(EnumGamemodeClass, "getById", int.class));
return gmGetById.invoke(null, getGameModeValue(player));
}
private Object getEnumDifficulty(Class<?> EnumDifficultyClass, Player player) throws InvocationTargetException, IllegalAccessException {
Method diffGetById = Objects.requireNonNull(ReflectionUtil.getMethod(EnumDifficultyClass, "getById", int.class));
Method deprecatedGetValue = Objects.requireNonNull(ReflectionUtil.getMethod(Difficulty.class, "getValue"));
return diffGetById.invoke(null, deprecatedGetValue.invoke(player.getWorld().getDifficulty()));
}
/* Freeze Packet Creation and Dispatch */
protected boolean sendFreezePacket(Player player) {
int major = ReflectionUtil.getMajor(), minor = ReflectionUtil.getMinor(), r = ReflectionUtil.getR();
if(FREEZE_UNSUPPORTED.contains(major + "." + minor)) {
MessageManager.getInstance().featureUnsupported(SkyChanger.wrapPlayer(player), FREEZE_UNSUPPORTED.toString());
} else if(major > 1 || (major == 1 && minor >= 17)) {
List<String> unsupportedList = new ArrayList<>(FREEZE_UNSUPPORTED);
unsupportedList.add("1.17+");
MessageManager.getInstance().featureUnsupported(SkyChanger.wrapPlayer(player), unsupportedList.toString());
}
try {
Object packet;
Class<?> ClientboundRespawnPacket = minor >= 17
? ReflectionUtil.getMCClass("network.protocol.game.PacketPlayOutRespawn")
: ReflectionUtil.getNMSClassLegacy("PacketPlayOutRespawn");
if (major == 1) {
if (minor >= 17) {
// 1.17
Class<?> EnumGamemodeClass = ReflectionUtil.getMCClass("world.level.EnumGamemode");
Object worldServer = getWorldServer(player);
Object gameMode = getEnumGamemode(EnumGamemodeClass, player);
Class<?> WorldClass = ReflectionUtil.getMCClass("world.level.World");
Class<?> ResourceKeyClass = ReflectionUtil.getMCClass("resources.ResourceKey");
Class<?> DimensionManagerClass = ReflectionUtil.getMCClass("world.level.dimension.DimensionManager");
Constructor<?> packetConstructor = ClientboundRespawnPacket.getConstructor(
DimensionManagerClass, // DimensionManager
ResourceKeyClass, // DimensionKey
long.class, // Seed
EnumGamemodeClass, // gameType
EnumGamemodeClass, // previousGameType
boolean.class, // isDebug
boolean.class, // isFlat
boolean.class); // keepAllPlayerData
packet = packetConstructor.newInstance(
getDimensionManager1162Plus(WorldClass, worldServer),
getDimensionKey(WorldClass, worldServer),
player.getWorld().getSeed(),
gameMode,
gameMode,
false,
false,
true);
} else if (minor >= 16) {
// 1.16
// Works sometimes so let's just say it works.
Class<?> EnumGamemodeClass = ReflectionUtil.getNMSClassLegacy("EnumGamemode");
Object worldServer = getWorldServer(player);
Object gameMode = getEnumGamemode(EnumGamemodeClass, player);
Class<?> WorldClass = ReflectionUtil.getNMSClassLegacy("World");
Class<?> ResourceKeyClass = ReflectionUtil.getNMSClassLegacy("ResourceKey");
if(r >= 2) {
// 1.16.2+
Class<?> DimensionManagerClass = ReflectionUtil.getNMSClassLegacy("DimensionManager");
Constructor<?> packetConstructor = ClientboundRespawnPacket.getConstructor(
DimensionManagerClass, // DimensionManager
ResourceKeyClass, // DimensionKey
long.class, // Seed
EnumGamemodeClass, // gameType
EnumGamemodeClass, // previousGameType
boolean.class, // isDebug
boolean.class, // isFlat
boolean.class); // keepAllPlayerData
packet = packetConstructor.newInstance(
getDimensionManager1162Plus(WorldClass, worldServer),
getDimensionKey(WorldClass, worldServer),
player.getWorld().getSeed(),
gameMode,
gameMode,
false,
false,
true);
} else {
// 1.16.1
Constructor<?> packetConstructor = ClientboundRespawnPacket.getConstructor(
ResourceKeyClass, // DimensionType
ResourceKeyClass, // DimensionKey
long.class, // Seed
EnumGamemodeClass, // gameType
EnumGamemodeClass, // previousGameType
boolean.class, // isDebug
boolean.class, // isFlat
boolean.class); // keepAllPlayerData
packet = packetConstructor.newInstance(
getTypeKey(WorldClass, worldServer),
getDimensionKey(WorldClass, worldServer),
player.getWorld().getSeed(),
gameMode,
gameMode,
false,
false,
true);
}
} else if (minor >= 13) {
// 1.13, 1.14, 1.15
Class<?> EnumGamemodeClass = ReflectionUtil.getNMSClassLegacy("EnumGamemode");
Object worldServer = getWorldServer(player);
Class<?> DimensionManagerClass = ReflectionUtil.getNMSClassLegacy("DimensionManager");
Class<?> WorldTypeClass = ReflectionUtil.getNMSClassLegacy("WorldType");
if (minor == 15) {
// 1.15 Constructor
Constructor<?> packetConstructor = ClientboundRespawnPacket.getConstructor(
DimensionManagerClass,
long.class,
WorldTypeClass,
EnumGamemodeClass);
packet = packetConstructor.newInstance(
getDimensionManager(worldServer),
player.getWorld().getSeed(),
getWorldType(worldServer),
getEnumGamemode(EnumGamemodeClass, player));
} else if (minor == 14) {
// 1.14 Constructor
Constructor<?> packetConstructor = ClientboundRespawnPacket.getConstructor(
DimensionManagerClass,
WorldTypeClass,
EnumGamemodeClass);
packet = packetConstructor.newInstance(
getDimensionManager(worldServer),
getWorldType(worldServer),
getEnumGamemode(EnumGamemodeClass, player));
} else {
// 1.13 Constructor
// Does not produce desired effect on 1.13
Class<?> EnumDifficultyClass = ReflectionUtil.getNMSClassLegacy("EnumDifficulty");
Constructor<?> packetConstructor = ClientboundRespawnPacket.getConstructor(
DimensionManagerClass,
EnumDifficultyClass,
WorldTypeClass,
EnumGamemodeClass);
packet = packetConstructor.newInstance(
getDimensionManager(worldServer),
getEnumDifficulty(EnumDifficultyClass, player),
getWorldType(worldServer),
getEnumGamemode(EnumGamemodeClass, player));
}
} else {
// 1.12 and Below
// 1.8, 1.9, 1.10, 1.11, 1.12
Class<?> EnumDifficultyClass = ReflectionUtil.getNMSClassLegacy("EnumDifficulty");
Class<?> WorldTypeClass = ReflectionUtil.getNMSClassLegacy("WorldType");
final Object WorldType_NORMAL = WorldTypeClass.getField("NORMAL").get(null);
if(minor >= 10) {
// 1.10 - 1.12 Constructor
Class<?> EnumGamemodeClass = ReflectionUtil.getNMSClassLegacy("EnumGamemode");
Constructor<?> packetConstructor = ClientboundRespawnPacket.getConstructor(int.class, EnumDifficultyClass, WorldTypeClass, EnumGamemodeClass);
packet = packetConstructor.newInstance(
getWorldEnvironmentId(player),
getEnumDifficulty(EnumDifficultyClass, player),
WorldType_NORMAL,
getEnumGamemode(EnumGamemodeClass, player));
} else {
// 1.8 - 1.9 Constructor
Class<?> WorldSettingsClass = ReflectionUtil.getNMSClassLegacy("WorldSettings");
Class<?> EnumGamemodeClass_Declared = ReflectionUtil.getDeclaredClass(WorldSettingsClass, "EnumGamemode");
Method getById = Objects.requireNonNull(ReflectionUtil.getMethod(EnumGamemodeClass_Declared, "getById", int.class));
Constructor<?> packetConstructor = ClientboundRespawnPacket.getConstructor(int.class, EnumDifficultyClass, WorldTypeClass, EnumGamemodeClass_Declared);
packet = packetConstructor.newInstance(
getWorldEnvironmentId(player),
getEnumDifficulty(EnumDifficultyClass, player),
WorldType_NORMAL,
getById.invoke(null, getGameModeValue(player)));
}
}
} else {
// Minecraft 2? Wow
MessageManager.getInstance().logPacketError();
return false;
}
if(minor >= 17) {
deliverPacket(packet, player);
} else {
deliverPacketLegacy(packet, player);
}
player.updateInventory();
return true;
} catch(Throwable t) {
MessageManager.getInstance().logPacketError();
t.printStackTrace();
return false;
}
}
}
<|start_filename|>skychanger-sponge/src/main/java/com/dscalzi/skychanger/sponge/internal/SkyChangeImpl.java<|end_filename|>
/*
* This file is part of SkyChanger, licensed under the MIT License (MIT).
*
* Copyright (c) 2017-2021 <NAME> <https://github.com/dscalzi/SkyChanger>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.dscalzi.skychanger.sponge.internal;
import com.dscalzi.skychanger.core.api.SkyAPI;
import com.dscalzi.skychanger.core.api.SkyPacket;
import com.dscalzi.skychanger.core.internal.wrap.IPlayer;
import net.minecraft.core.NonNullList;
import net.minecraft.network.protocol.game.ClientboundContainerSetContentPacket;
import net.minecraft.network.protocol.game.ClientboundContainerSetSlotPacket;
import net.minecraft.network.protocol.game.ClientboundGameEventPacket;
import net.minecraft.network.protocol.game.ClientboundRespawnPacket;
import net.minecraft.server.level.ServerPlayer;
import net.minecraft.world.item.ItemStack;
import org.spongepowered.api.entity.living.player.Player;
public class SkyChangeImpl implements SkyAPI {
@Override
public boolean changeSky(IPlayer p, float number) {
return changeSky(p, SkyPacket.RAIN_LEVEL_CHANGE, number);
}
@Override
public boolean changeSky(IPlayer p, SkyPacket packet, float number) {
return sendPacket((Player)p.getOriginal(), packet.getValue(), number);
}
@Override
public boolean freeze(IPlayer p) {
return sendFreezePacket((Player)p.getOriginal());
}
@Override
public boolean unfreeze(IPlayer p) {
return p.teleport(p.getLocation());
}
private boolean sendPacket(Player player, int stateIn, float number) {
ClientboundGameEventPacket packet = new ClientboundGameEventPacket(new ClientboundGameEventPacket.Type(stateIn), number);
((ServerPlayer)player).connection.send(packet);
return true;
}
private boolean sendFreezePacket(Player player) {
ServerPlayer sp = ((ServerPlayer)player);
ClientboundRespawnPacket packet = new ClientboundRespawnPacket(
sp.level.dimensionType(),
sp.level.dimension(),
player.world().seed(),
sp.gameMode.getGameModeForPlayer(),
sp.gameMode.getPreviousGameModeForPlayer(),
false,
false,
true
);
NonNullList<ItemStack> nonNullList = NonNullList.create();
for(int i = 0; i < sp.inventoryMenu.slots.size(); ++i) {
nonNullList.add(sp.inventoryMenu.slots.get(i).getItem());
}
sp.connection.send(packet);
sp.connection.send(new ClientboundContainerSetContentPacket(sp.inventoryMenu.containerId, nonNullList));
sp.connection.send(new ClientboundContainerSetSlotPacket(-1, -1, sp.inventory.getCarried()));
return true;
}
}
<|start_filename|>skychanger-sponge/src/main/java/com/dscalzi/skychanger/sponge/internal/WildcardPermissionUtil.java<|end_filename|>
/*
* This file is part of SkyChanger, licensed under the MIT License (MIT).
*
* Copyright (c) 2017-2021 <NAME> <https://github.com/dscalzi/SkyChanger>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.dscalzi.skychanger.sponge.internal;
import com.dscalzi.skychanger.core.internal.util.IWildcardPermissionUtil;
import com.dscalzi.skychanger.core.internal.wrap.IPermissible;
import com.dscalzi.skychanger.core.internal.wrap.IWorld;
import org.spongepowered.api.SystemSubject;
import org.spongepowered.api.service.permission.Subject;
import java.util.Map;
import java.util.function.Predicate;
public class WildcardPermissionUtil extends IWildcardPermissionUtil {
@Override
public boolean hasGeneralChangeskyWorldPerm(IPermissible p) {
return hasGeneralPerm(p, CWORLDPERM);
}
@Override
public boolean hasGeneralFreezeWorldPerm(IPermissible p) {
return hasGeneralPerm(p, FWORLDPERM);
}
@Override
public boolean hasGeneralChangeskyRadiusPerm(IPermissible p) {
return hasGeneralPerm(p, CRADIUSPERM);
}
@Override
public boolean hasGeneralFreezeRadiusPerm(IPermissible p) {
return hasGeneralPerm(p, FRADIUSPERM);
}
private static boolean hasGeneralPerm(IPermissible ip, String perm) {
Subject p = (Subject)ip.getOriginal();
if(p instanceof SystemSubject) {
return true;
}
for (Map<String, Boolean> d : p.subjectData().allPermissions().values()) {
for(Map.Entry<String, Boolean> s : d.entrySet()) {
if(s.getKey().toLowerCase().startsWith(perm)) {
if(s.getValue()) {
return true;
}
}
}
}
return p.hasPermission(perm);
}
@Override
public boolean hasChangeskyWorldPerm(IPermissible p, IWorld w) {
return hasWorldPerm(p, w, CWORLDPERM);
}
@Override
public boolean hasFreezeWorldPerm(IPermissible p, IWorld w) {
return hasWorldPerm(p, w, FWORLDPERM);
}
private static boolean hasWorldPerm(IPermissible p, IWorld w, String perm) {
return hasPerm(p, (s) -> s.getKey().substring(perm.length() + 1).equals(w.getName()), perm)
|| p.hasPermission(perm);
}
@Override
public boolean hasChangeskyRadiusPerm(IPermissible p, double radius) {
return hasRadiusPerm(p, radius, CRADIUSPERM);
}
@Override
public boolean hasFreezeRadiusPerm(IPermissible p, double radius) {
return hasRadiusPerm(p, radius, FRADIUSPERM);
}
public static boolean hasRadiusPerm(IPermissible p, double radius, String perm) {
return hasPerm(p, (s) -> {
try {
double radiusLimit = Double.parseDouble(s.getKey().substring(perm.length() + 1));
return radius <= radiusLimit;
} catch (NumberFormatException e) {
// Malformed permission.
return false;
}
}, perm) || p.hasPermission(perm);
}
private static boolean hasPerm(IPermissible ip, Predicate<Map.Entry<String, Boolean>> hasSpecificPermissionTest, String perm) {
Subject p = (Subject)ip.getOriginal();
if(p instanceof SystemSubject) {
return true;
}
boolean canByRight = false;
for (Map<String, Boolean> d : p.subjectData().allPermissions().values()) {
for(Map.Entry<String, Boolean> s : d.entrySet()) {
final String effective = s.getKey().toLowerCase();
if (effective.equals(perm)) {
canByRight = s.getValue();
} else if (effective.contains(perm) && hasSpecificPermissionTest.test(s)) {
return s.getValue();
}
}
}
return canByRight;
}
public static String changeskyBasePerm() {
return CWORLDPERM;
}
public static String freezeBasePerm() {
return FWORLDPERM;
}
}
<|start_filename|>skychanger-bukkit/src/main/java/com/dscalzi/skychanger/bukkit/api/SkyChanger.java<|end_filename|>
/*
* This file is part of SkyChanger, licensed under the MIT License (MIT).
*
* Copyright (c) 2017-2021 <NAME> <https://github.com/dscalzi/SkyChanger>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.dscalzi.skychanger.bukkit.api;
import com.dscalzi.skychanger.bukkit.SkyChangerPlugin;
import com.dscalzi.skychanger.bukkit.internal.SkyChangeImpl;
import com.dscalzi.skychanger.bukkit.internal.wrap.BukkitPlayer;
import com.dscalzi.skychanger.core.api.SkyAPI;
import com.dscalzi.skychanger.core.internal.wrap.IPlayer;
import org.bukkit.entity.Player;
/**
* Utility class to obtain references to components of SkyChanger.
*/
public class SkyChanger {
private static final SkyAPI api = new SkyChangeImpl();
/**
* Get the SkyChanger plugin. If SkyChanger is not loaded yet, then this will
* return null.
* <p>
* If you are depending on SkyChanger in your plugin, you should place
* <code>softdepend: [SkyChanger]</code> or <code>depend: [SkyChanger]</code> in
* your plugin.yml so that this won't return null for you.
*
* @return the SkyChanger plugin if it is loaded, otherwise null.
*/
@SuppressWarnings("unused")
public static SkyChangerPlugin getPlugin() {
return SkyChangerPlugin.inst();
}
/**
* Get an instance of the SkyChanger API.
*
* @return An instance of the SkyChanger API.
*/
public static SkyAPI getAPI() {
return api;
}
/**
* Wrap a player instance to be sent to the API.
*
* @param p The player to be wrapped.
*
* @return A wrapped IPlayer instance of the provided player.
*/
@SuppressWarnings("unused")
public static IPlayer wrapPlayer(Player p) {
return BukkitPlayer.of(p);
}
}
<|start_filename|>skychanger-core/src/main/java/com/dscalzi/skychanger/core/internal/command/CommandAdapter.java<|end_filename|>
/*
* This file is part of SkyChanger, licensed under the MIT License (MIT).
*
* Copyright (c) 2017-2021 <NAME> <https://github.com/dscalzi/SkyChanger>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.dscalzi.skychanger.core.internal.command;
import com.dscalzi.skychanger.core.api.SkyAPI;
import com.dscalzi.skychanger.core.api.SkyPacket;
import com.dscalzi.skychanger.core.internal.wrap.ICommandSender;
import com.dscalzi.skychanger.core.internal.wrap.IOfflinePlayer;
import com.dscalzi.skychanger.core.internal.wrap.IPlayer;
import com.dscalzi.skychanger.core.internal.manager.MessageManager;
import com.dscalzi.skychanger.core.internal.util.IWildcardPermissionUtil;
import com.dscalzi.skychanger.core.internal.wrap.ICommandBlock;
import com.dscalzi.skychanger.core.internal.wrap.ILocation;
import com.dscalzi.skychanger.core.internal.wrap.IPlugin;
import com.dscalzi.skychanger.core.internal.wrap.IWorld;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Pattern;
public class CommandAdapter {
private static final Pattern packetNum = Pattern.compile("[-+]?[0-9]*\\.?[0-9]+([eE][-+]?[0-9]+)?");
private MessageManager mm;
private final IPlugin plugin;
public CommandAdapter(IPlugin plugin) {
this.plugin = plugin;
}
public boolean resolve(ICommandSender sender, String[] args) {
this.mm = MessageManager.getInstance();
if (args.length > 0) {
if (packetNum.matcher(args[0]).matches()) {
this.cmdChangeSky(sender, args);
return true;
}
if (args[0].equalsIgnoreCase("help")) {
mm.helpMessage(sender);
return true;
}
if (args[0].equalsIgnoreCase("freeze")) {
this.cmdFreeze(sender, false, args);
return true;
}
if (args[0].equalsIgnoreCase("unfreeze")) {
this.cmdFreeze(sender, true, args);
return true;
}
if (args[0].equalsIgnoreCase("version")) {
this.cmdVersion(sender);
return true;
}
if (args[0].equalsIgnoreCase("reload")) {
this.cmdReload(sender);
return true;
}
}
mm.helpMessage(sender);
return false;
}
private void cmdChangeSky(ICommandSender sender, String[] args) {
final IWildcardPermissionUtil wpu = plugin.getWildcardPermissionUtil();
final String basePerm = "skychanger.changesky";
boolean s = sender.hasPermission(basePerm + ".self");
boolean o = sender.hasPermission(basePerm + ".others");
boolean a = sender.hasPermission(basePerm + ".all");
boolean w = wpu.hasGeneralChangeskyWorldPerm(sender);
boolean r = wpu.hasGeneralChangeskyRadiusPerm(sender);
if (!s && !o && !a && !w && !r) {
mm.noPermission(sender);
return;
}
float rainLevel;
Float thunderLevel = null;
try {
rainLevel = Float.parseFloat(args[0]);
} catch (NumberFormatException e) {
mm.floatingPointOverflow(sender, args[0]);
return;
}
if(args.length > 1) {
try {
thunderLevel = Float.valueOf(args[1]);
} catch (NumberFormatException ignored) {
// Not specified
}
}
// shifted right when thunderLevel is present
int flagPos = thunderLevel == null ? 1 : 2;
int valPos = flagPos+1;
if (!sender.hasPermission("skychanger.bypasslimit")) {
float upper = plugin.getConfigManager().getUpperLimit();
float lower = plugin.getConfigManager().getLowerLimit();
if (rainLevel > upper || (thunderLevel != null && thunderLevel > upper)) {
mm.outOfBoundsUpper(sender, upper);
return;
}
if (lower > rainLevel || (thunderLevel != null && lower > thunderLevel)) {
mm.outOfBoundsLower(sender, lower);
return;
}
}
final SkyAPI api = plugin.getAPI();
if (args.length > flagPos) {
// Check if requested for all
if (args[flagPos].equalsIgnoreCase("-a")) {
if (!a) {
mm.noPermission(sender);
return;
}
for (IPlayer p : plugin.getOnlinePlayers()) {
api.changeSky(p, SkyPacket.RAIN_LEVEL_CHANGE, rainLevel);
if(thunderLevel != null) {
api.changeSky(p, SkyPacket.THUNDER_LEVEL_CHANGE, thunderLevel);
}
}
mm.packetSent(sender, "-a (" + mm.getString("message.everyone") + ")");
return;
}
// Check if requested for world
if (args[flagPos].equalsIgnoreCase("-w")) {
IWorld t;
if (args.length > valPos) {
t = plugin.getWorld(args[valPos]);
if (t == null) {
mm.worldDoesntExist(sender, args[valPos]);
return;
}
} else {
if (!(sender.isPlayer())) {
mm.mustSpecifyWorld(sender);
return;
}
t = ((IPlayer) sender).getWorld();
}
if (!wpu.hasChangeskyWorldPerm(sender, t)) {
mm.noPermission(sender);
return;
}
for (IPlayer p : t.getPlayers()) {
api.changeSky(p, SkyPacket.RAIN_LEVEL_CHANGE, rainLevel);
if(thunderLevel != null) {
api.changeSky(p, SkyPacket.THUNDER_LEVEL_CHANGE, thunderLevel);
}
}
mm.packetSent(sender, mm.getString("message.allPlayersIn") + " " + t.getName());
return;
}
// Check if requested for radius
if (args[flagPos].equalsIgnoreCase("-r")) {
if (sender.isConsole()) {
MessageManager.getInstance().denyNonPlayer(sender);
return;
}
if(args.length > valPos) {
double radius;
double radiusSq;
try {
radius = Double.parseDouble(args[valPos]);
if (!wpu.hasChangeskyRadiusPerm(sender, radius)) {
mm.noPermission(sender);
return;
}
radiusSq = Math.pow(radius, valPos);
} catch (NumberFormatException e) {
MessageManager.getInstance().radiusFormatError(sender);
return;
}
ILocation origin;
if (sender.isPlayer()) {
origin = ((IPlayer)sender).getLocation();
} else if (sender.isCommandBlock()) {
origin = ((ICommandBlock)sender).getLocation();
} else {
MessageManager.getInstance().denyNonPlayer(sender);
return;
}
for(IPlayer p : origin.getWorld().getPlayers()) {
if(Math.abs(origin.distanceSquared(p.getLocation())) <= radiusSq) {
api.changeSky(p, SkyPacket.RAIN_LEVEL_CHANGE, rainLevel);
if(thunderLevel != null) {
api.changeSky(p, SkyPacket.THUNDER_LEVEL_CHANGE, thunderLevel);
}
}
}
mm.packetSent(sender, mm.getString("message.allPlayersInRadius") + " " + args[2]);
return;
} else {
mm.mustSpecifyRadius(sender);
return;
}
}
// Check if param is a player
if (!o) {
mm.noPermission(sender);
return;
}
IOfflinePlayer target;
try {
target = plugin.getOfflinePlayer(MessageManager.formatFromInput(args[flagPos]));
} catch (IllegalArgumentException e) {
target = plugin.getOfflinePlayer(args[flagPos]);
}
if (target == null || !target.isOnline()) {
mm.playerNotFound(sender, target == null || target.getName() == null ? args[flagPos] : target.getName());
return;
}
// If a player specified their own name, we run the command as if the player
// param was not
// given. The others permission therefore includes the self.
if (!(sender.isPlayer()) || !target.getUniqueId().equals(((IPlayer) sender).getUniqueId())) {
boolean res = api.changeSky(target.getPlayer(), SkyPacket.RAIN_LEVEL_CHANGE, rainLevel);
if(thunderLevel != null) {
res = res && api.changeSky(target.getPlayer(), SkyPacket.THUNDER_LEVEL_CHANGE, thunderLevel);
}
if (res)
mm.packetSent(sender, target.getName());
else
mm.packetError(sender, target.getName());
return;
}
}
if (!(sender.isPlayer())) {
MessageManager.getInstance().denyNonPlayer(sender);
return;
}
boolean res = api.changeSky((IPlayer) sender, SkyPacket.RAIN_LEVEL_CHANGE, rainLevel);
if(thunderLevel != null) {
res = res && api.changeSky((IPlayer) sender, SkyPacket.THUNDER_LEVEL_CHANGE, thunderLevel);
}
if (res)
mm.packetSent(sender);
else
mm.packetError(sender);
}
private void cmdFreeze(ICommandSender sender, boolean unfreeze, String[] args) {
final IWildcardPermissionUtil wpu = plugin.getWildcardPermissionUtil();
final String basePerm = "skychanger.freeze";
boolean s = sender.hasPermission(basePerm + ".self");
boolean o = sender.hasPermission(basePerm + ".others");
boolean a = sender.hasPermission(basePerm + ".all");
boolean w = wpu.hasGeneralFreezeWorldPerm(sender);
boolean r = wpu.hasGeneralFreezeRadiusPerm(sender);
if (!s && !o && !a && !w && !r) {
mm.noPermission(sender);
return;
}
final SkyAPI api = plugin.getAPI();
if (args.length > 1) {
// Check if requested for all
if (args[1].equalsIgnoreCase("-a")) {
if (!a) {
mm.noPermission(sender);
return;
}
for (IPlayer p : plugin.getOnlinePlayers()) {
if (unfreeze)
api.unfreeze(p);
else
api.freeze(p);
}
if (unfreeze)
mm.packetUnfreeze(sender, "-a (" + mm.getString("message.everyone") + ")");
else
mm.packetSent(sender, "@a (" + mm.getString("message.everyone") + ")");
return;
}
// Check if requested for world
if (args[1].equalsIgnoreCase("-w")) {
IWorld t;
if (args.length > 2) {
t = plugin.getWorld(args[2]);
if (t == null) {
mm.worldDoesntExist(sender, args[2]);
return;
}
} else {
if (!(sender.isPlayer())) {
mm.mustSpecifyWorld(sender);
return;
}
t = ((IPlayer) sender).getWorld();
}
if (!wpu.hasFreezeWorldPerm(sender, t)) {
mm.noPermission(sender);
return;
}
for (IPlayer p : t.getPlayers()) {
if (unfreeze)
api.unfreeze(p);
else
api.freeze(p);
}
if (unfreeze)
mm.packetUnfreeze(sender, mm.getString("message.allPlayersIn") + " " + t.getName());
else
mm.packetSent(sender, mm.getString("message.allPlayersIn") + " " + t.getName());
return;
}
// Check if requested for radius
if (args[1].equalsIgnoreCase("-r")) {
if (sender.isConsole()) {
MessageManager.getInstance().denyNonPlayer(sender);
return;
}
if(args.length > 2) {
double radius;
double radiusSq;
try {
radius = Double.parseDouble(args[2]);
if (!wpu.hasFreezeRadiusPerm(sender, radius)) {
mm.noPermission(sender);
return;
}
radiusSq = Math.pow(radius, 2);
} catch (NumberFormatException e) {
MessageManager.getInstance().radiusFormatError(sender);
return;
}
ILocation origin;
if (sender.isPlayer()) {
origin = ((IPlayer)sender).getLocation();
} else if (sender.isCommandBlock()) {
origin = ((ICommandBlock)sender).getLocation();
} else {
MessageManager.getInstance().denyNonPlayer(sender);
return;
}
for(IPlayer p : origin.getWorld().getPlayers()) {
if(Math.abs(origin.distanceSquared(p.getLocation())) <= radiusSq) {
if (unfreeze)
api.unfreeze(p);
else
api.freeze(p);
}
}
if (unfreeze)
mm.packetUnfreeze(sender, mm.getString("message.allPlayersInRadius") + " " + args[2]);
else
mm.packetSent(sender, mm.getString("message.allPlayersInRadius") + " " + args[2]);
return;
} else {
mm.mustSpecifyRadius(sender);
return;
}
}
// Check if param is a player
if (!o) {
mm.noPermission(sender);
return;
}
IOfflinePlayer target;
try {
target = plugin.getOfflinePlayer(MessageManager.formatFromInput(args[1]));
} catch (IllegalArgumentException e) {
target = plugin.getOfflinePlayer(args[1]);
}
if (target == null || !target.isOnline()) {
mm.playerNotFound(sender, target == null || target.getName() == null ? args[1] : target.getName());
return;
}
// If a player specified their own name, we run the command as if the player
// param was not
// given. The others permission therefore includes the self.
if (!(sender.isPlayer()) || !target.getUniqueId().equals(((IPlayer) sender).getUniqueId())) {
if ((!unfreeze && api.freeze(target.getPlayer()))
|| (unfreeze && target.getPlayer().teleport(target.getPlayer().getLocation())))
if (unfreeze)
mm.packetUnfreeze(sender, target.getName());
else
mm.packetSent(sender, target.getName());
else
mm.packetError(sender, target.getName());
return;
}
}
if (!(sender.isPlayer())) {
MessageManager.getInstance().denyNonPlayer(sender);
return;
}
IPlayer p = (IPlayer) sender;
if ((!unfreeze && api.freeze(p)) || (unfreeze && api.unfreeze(p)))
if (unfreeze)
mm.packetUnfreeze(sender);
else
mm.packetSent(sender);
else
mm.packetError(sender);
}
private void cmdReload(ICommandSender sender) {
if (!sender.hasPermission("skychanger.reload")) {
mm.noPermission(sender);
return;
}
if (plugin.getConfigManager().reload()) {
MessageManager.reload();
mm.reloadSuccessful(sender);
} else
mm.reloadFailed(sender);
}
private void cmdVersion(ICommandSender sender) {
mm.versionMessage(sender);
}
public List<String> tabComplete(ICommandSender sender, String[] args) {
final IWildcardPermissionUtil wpu = plugin.getWildcardPermissionUtil();
List<String> ret = new ArrayList<>();
boolean b = sender.hasPermission("skychanger.freeze.self") || sender.hasPermission("skychanger.freeze.others")
|| sender.hasPermission("skychanger.freeze.all") || wpu.hasGeneralFreezeWorldPerm(sender)
|| wpu.hasGeneralFreezeRadiusPerm(sender);
if (args.length == 1) {
if ("help".startsWith(args[0].toLowerCase()))
ret.add("help");
if (b && "freeze".startsWith(args[0].toLowerCase()))
ret.add("freeze");
if (b && "unfreeze".startsWith(args[0].toLowerCase()))
ret.add("unfreeze");
if ("version".startsWith(args[0].toLowerCase()))
ret.add("version");
if (sender.hasPermission("skychanger.reload") && "reload".startsWith(args[0].toLowerCase()))
ret.add("reload");
}
// isTweak = command is /skychanger # #
// args are shifted right.
final boolean isTweak = args.length >= 3 && packetNum.matcher(args[1]).matches();
final boolean isChangeSkyOrFreeze = !isTweak && args.length >= 2 && (packetNum.matcher(args[0]).matches() || args[0].equalsIgnoreCase("freeze")
|| args[0].equalsIgnoreCase("unfreeze"));
final int flagPos = isTweak ? 2 : 1;
if ((args.length == 3 && isTweak) || (args.length == 2 && isChangeSkyOrFreeze)) {
// Players
if (sender.hasPermission("skychanger.changesky.others") || sender.hasPermission("skychanger.freeze.others"))
plugin.getOnlinePlayers().forEach(player -> {
if (player.getName().toLowerCase().startsWith(args[flagPos].toLowerCase())) {
ret.add(player.getName());
}
});
// All flag
if ((sender.hasPermission("skychanger.changesky.all") || sender.hasPermission("skychanger.freeze.all"))
&& "-a".startsWith(args[flagPos].toLowerCase())) {
ret.add("-a");
}
// World flag
if (args[0].equalsIgnoreCase("freeze") || args[0].equalsIgnoreCase("unfreeze")) {
if ("-w".startsWith(args[flagPos].toLowerCase()) && wpu.hasGeneralFreezeWorldPerm(sender)) {
ret.add("-w");
}
} else {
if ("-w".startsWith(args[flagPos].toLowerCase()) && wpu.hasGeneralChangeskyWorldPerm(sender)) {
ret.add("-w");
}
}
// Radius flag
if (args[0].equalsIgnoreCase("freeze") || args[0].equalsIgnoreCase("unfreeze")) {
if ("-r".startsWith(args[flagPos].toLowerCase()) && wpu.hasGeneralFreezeRadiusPerm(sender)) {
ret.add("-r");
}
} else {
if ("-r".startsWith(args[flagPos].toLowerCase()) && wpu.hasGeneralChangeskyRadiusPerm(sender)) {
ret.add("-r");
}
}
}
// World names
if ((isChangeSkyOrFreeze && args.length == 3) || (isTweak && args.length == 4)) {
if(args[flagPos].equalsIgnoreCase("-w")) {
if (isTweak || packetNum.matcher(args[0]).matches()) {
for (IWorld w : plugin.getWorlds()) {
if (w.getName().toLowerCase().startsWith(args[flagPos + 1].toLowerCase())
&& wpu.hasChangeskyWorldPerm(sender, w)) {
ret.add(w.getName());
}
}
} else if (args[0].equalsIgnoreCase("freeze") || args[0].equalsIgnoreCase("unfreeze")) {
for (IWorld w : plugin.getWorlds()) {
if (w.getName().toLowerCase().startsWith(args[flagPos + 1].toLowerCase())
&& wpu.hasFreezeWorldPerm(sender, w)) {
ret.add(w.getName());
}
}
}
}
}
return ret;
}
}
<|start_filename|>skychanger-bukkit/src/main/java/com/dscalzi/skychanger/bukkit/internal/wrap/BukkitPlayer.java<|end_filename|>
/*
* This file is part of SkyChanger, licensed under the MIT License (MIT).
*
* Copyright (c) 2017-2021 <NAME> <https://github.com/dscalzi/SkyChanger>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.dscalzi.skychanger.bukkit.internal.wrap;
import com.dscalzi.skychanger.core.internal.wrap.ILocation;
import com.dscalzi.skychanger.core.internal.wrap.IPlayer;
import com.dscalzi.skychanger.core.internal.wrap.IWorld;
import org.bukkit.Location;
import org.bukkit.entity.Player;
import java.util.UUID;
public class BukkitPlayer extends BukkitCommandSender implements IPlayer {
private final Player p;
private BukkitPlayer(Player player) {
super(player);
this.p = player;
}
public static BukkitPlayer of(Player player) {
return player == null ? null : new BukkitPlayer(player);
}
@Override
public Object getOriginal() {
return p;
}
@Override
public IWorld getWorld() {
return BukkitWorld.of(p.getWorld());
}
@Override
public ILocation getLocation() {
return BukkitLocation.of(p.getLocation());
}
@Override
public boolean teleport(ILocation loc) {
return p.teleport((Location)loc.getOriginal());
}
@Override
public UUID getUniqueId() {
return p.getUniqueId();
}
@Override
public boolean isOnline() {
return p.isOnline();
}
@Override
public IPlayer getPlayer() {
return this;
}
@Override
public String getName() {
return p.getName();
}
}
<|start_filename|>skychanger-sponge/src/main/java/com/dscalzi/skychanger/sponge/internal/MainExecutor.java<|end_filename|>
/*
* This file is part of SkyChanger, licensed under the MIT License (MIT).
*
* Copyright (c) 2017-2021 <NAME> <https://github.com/dscalzi/SkyChanger>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.dscalzi.skychanger.sponge.internal;
import com.dscalzi.skychanger.core.internal.command.CommandAdapter;
import com.dscalzi.skychanger.sponge.SkyChangerPlugin;
import com.dscalzi.skychanger.sponge.internal.wrap.SpongeCommandSender;
import net.kyori.adventure.text.Component;
import org.spongepowered.api.command.*;
import org.spongepowered.api.command.parameter.ArgumentReader;
import java.util.List;
import java.util.Optional;
import java.util.stream.Collectors;
public class MainExecutor implements Command.Raw {
private final CommandAdapter adapter;
public MainExecutor(SkyChangerPlugin plugin) {
this.adapter = new CommandAdapter(plugin);
}
@Override
public CommandResult process(CommandCause cause, ArgumentReader.Mutable arguments) {
String argStr = arguments.remaining();
final String[] args = argStr.isEmpty() ? new String[0] : argStr.replaceAll("\\s{2,}", " ").split(" ");
adapter.resolve(SpongeCommandSender.of(cause), args);
return CommandResult.success();
}
@Override
public List<CommandCompletion> complete(CommandCause cause, ArgumentReader.Mutable arguments) {
String argStr = arguments.remaining();
String[] argsDirty = argStr.replaceAll("\\s{2,}", " ").split(" ");
String[] args = argStr.endsWith(" ") ? new String[argsDirty.length + 1] : argsDirty;
if(args != argsDirty) {
System.arraycopy(argsDirty, 0, args, 0, argsDirty.length);
args[args.length-1] = "";
}
return adapter.tabComplete(SpongeCommandSender.of(cause), args).stream().map(CommandCompletion::of).collect(Collectors.toList());
}
@Override
public boolean canExecute(CommandCause cause) {
return true;
}
@Override
public Optional<Component> shortDescription(CommandCause cause) {
return Optional.of(Component.text("Change the color of the sky."));
}
@Override
public Optional<Component> extendedDescription(CommandCause cause) {
return Optional.of(Component.text("Change the color of the sky."));
}
@Override
public Optional<Component> help(@SuppressWarnings("NullableProblems") CommandCause cause) {
Component t = Component.text("Run /SkyChanger to view usage.");
return Optional.of(t);
}
@Override
public Component usage(CommandCause cause) {
return Component.text("/SkyChanger <args>");
}
}
<|start_filename|>skychanger-sponge/src/main/java/com/dscalzi/skychanger/sponge/internal/managers/ConfigManager.java<|end_filename|>
/*
* This file is part of SkyChanger, licensed under the MIT License (MIT).
*
* Copyright (c) 2017-2021 <NAME> <https://github.com/dscalzi/SkyChanger>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.dscalzi.skychanger.sponge.internal.managers;
import com.dscalzi.skychanger.core.internal.manager.IConfigManager;
import com.dscalzi.skychanger.sponge.SkyChangerPlugin;
import org.spongepowered.api.Sponge;
import org.spongepowered.api.asset.Asset;
import org.spongepowered.configurate.CommentedConfigurationNode;
import java.io.File;
import java.io.IOException;
public class ConfigManager implements IConfigManager {
private static boolean initialized;
private static ConfigManager instance;
// TODO Will be implemented in a later version
private final double configVersion = 1.0;
private SkyChangerPlugin plugin;
private CommentedConfigurationNode config;
private ConfigManager(SkyChangerPlugin plugin) {
this.plugin = plugin;
loadConfig();
}
public void loadConfig() {
boolean res = verifyFile();
if(res) {
try {
this.config = this.plugin.getConfigLoader().load();
} catch (IOException e) {
plugin.severe("Failed to load config.");
e.printStackTrace();
}
} else {
this.config = null;
}
}
public boolean verifyFile() {
Asset asset = Sponge.assetManager().asset(plugin.getPlugin(), "skychanger.conf").orElse(null);
File file = plugin.getConfigDir().resolve("skychanger.conf").toFile();
if (!file.exists()) {
if(asset != null) {
try {
asset.copyToFile(file.toPath());
return true;
} catch (IOException e) {
plugin.severe("Failed to save default config.");
e.printStackTrace();
return false;
}
} else {
plugin.severe("Failed to locate default config.");
return false;
}
}
return true;
}
public static void initialize(SkyChangerPlugin plugin) {
if (!initialized) {
instance = new ConfigManager(plugin);
initialized = true;
}
}
public static boolean reloadStatic() {
if (!initialized)
return false;
return getInstance().reload();
}
@Override
public boolean reload() {
try {
loadConfig();
return true;
} catch (Exception e) {
e.printStackTrace();
return false;
}
}
public static ConfigManager getInstance() {
return ConfigManager.instance;
}
/* Configuration Accessors */
@Override
public float getUpperLimit() {
if(config == null) {
return 50.0F;
} else {
return Float.parseFloat(config.node("general_settings", "upper_limit").getString("50.0"));
}
}
@Override
public float getLowerLimit() {
if(config == null) {
return -50.0F;
} else {
return Float.parseFloat(config.node("general_settings", "lower_limit").getString("-50.0"));
}
}
@Override
public String getLanguage() {
if(config == null) {
return "en_US";
} else {
return config.node("general_settings", "language").getString("en_US");
}
}
@Override
public double getSystemConfigVersion() {
return this.configVersion;
}
@Override
public double getConfigVersion() {
if(config == null) {
return getSystemConfigVersion();
} else {
return config.node("ConfigVersion").getDouble(getSystemConfigVersion());
}
}
}
| dscalzi/SkyChanger |
<|start_filename|>composer.json<|end_filename|>
{
"name": "components/mathjs",
"description": "Math.js is an extensive math library for JavaScript and Node.js. It features a flexible expression parser with support for symbolic computation, comes with a large set of built-in functions and constants, and offers an integrated solution to work with different data types like numbers, big numbers, complex numbers, fractions, units, and matrices.",
"type": "component",
"homepage": "http://mathjs.org",
"license": "Apache-2.0",
"support": {
"issues": "https://github.com/josdejong/mathjs/issues",
"source": "https://github.com/josdejong/mathjs/",
"docs": "http://mathjs.org/docs"
},
"authors": [
{
"name": "<NAME>",
"email": "<EMAIL>"
}
],
"extra": {
"component": {
"scripts": [
"./dist/math.js"
],
"files": [
"./dist/math.min.js",
"./dist/math.map"
]
}
}
} | bnlcas/mathjs |
<|start_filename|>include/graphics.h<|end_filename|>
#pragma once
// Extracts the upper 16 bits of a 32-bit number
#define HIHALF(n) (((n) & 0xFFFF0000) >> 16)
// Extracts the lower 16 bits of a 32-bit number
#define LOHALF(n) ((n) & 0xFFFF)
struct MonCoords
{
// This would use a bitfield, but some function
// uses it as a u8 and casting won't match.
u8 size; // u8 width:4, height:4;
u8 y_offset;
u16 unused;
};
struct CompressedSpriteSheet
{
const u8* data; // LZ77 compressed pixel data
u16 size; // Uncompressed size of pixel data
u16 tag;
};
struct CompressedSpritePalette
{
const u8* data; // LZ77 compressed palette data
u16 tag;
u16 unused;
};
void __attribute__((long_call)) LZ77UnCompWram(const void *src, void *dest);
void __attribute__((long_call)) DrawSpindaSpots(u16, u32, u8*, u8);
<|start_filename|>include/base_stats.h<|end_filename|>
#pragma once
struct BaseStats
{
/* 0x00 */ u8 baseHP;
/* 0x01 */ u8 baseAttack;
/* 0x02 */ u8 baseDefense;
/* 0x03 */ u8 baseSpeed;
/* 0x04 */ u8 baseSpAttack;
/* 0x05 */ u8 baseSpDefense;
/* 0x06 */ u8 type1;
/* 0x07 */ u8 type2;
/* 0x08 */ u8 catchRate;
/* 0x09 */ u8 expYield;
/* 0x0A */ u16 evYield_HP:2;
/* 0x0A */ u16 evYield_Attack:2;
/* 0x0A */ u16 evYield_Defense:2;
/* 0x0A */ u16 evYield_Speed:2;
/* 0x0B */ u16 evYield_SpAttack:2;
/* 0x0B */ u16 evYield_SpDefense:2;
/* 0x0C */ u16 item1;
/* 0x0E */ u16 item2;
/* 0x10 */ u8 genderRatio;
/* 0x11 */ u8 eggCycles;
/* 0x12 */ u8 friendship;
/* 0x13 */ u8 growthRate;
/* 0x14 */ u8 eggGroup1;
/* 0x15 */ u8 eggGroup2;
/* 0x16 */ u8 ability1;
/* 0x17 */ u8 ability2;
/* 0x18 */ u8 safariZoneFleeRate;
/* 0x19 */ u8 bodyColor : 7;
u8 noFlip : 1;
/* 0x1A */ u8 hiddenAbility;
};
enum
{
EGG_GROUP_NONE,
EGG_GROUP_MONSTER,
EGG_GROUP_WATER_1,
EGG_GROUP_BUG,
EGG_GROUP_FLYING,
EGG_GROUP_FIELD,
EGG_GROUP_FAIRY,
EGG_GROUP_GRASS,
EGG_GROUP_HUMAN_LIKE,
EGG_GROUP_WATER_3,
EGG_GROUP_MINERAL,
EGG_GROUP_AMORPHOUS,
EGG_GROUP_WATER_2,
EGG_GROUP_DITTO,
EGG_GROUP_DRAGON,
EGG_GROUP_UNDISCOVERED
};
enum
{
GROWTH_MEDIUM_FAST,
GROWTH_ERRATIC,
GROWTH_FLUCTUATING,
GROWTH_MEDIUM_SLOW,
GROWTH_FAST,
GROWTH_SLOW
};
enum
{
BODY_COLOR_RED,
BODY_COLOR_BLUE,
BODY_COLOR_YELLOW,
BODY_COLOR_GREEN,
BODY_COLOR_BLACK,
BODY_COLOR_BROWN,
BODY_COLOR_PURPLE,
BODY_COLOR_GRAY,
BODY_COLOR_WHITE,
BODY_COLOR_PINK
};
#define TYPE_NORMAL 0x00
#define TYPE_FIGHTING 0x01
#define TYPE_FLYING 0x02
#define TYPE_POISON 0x03
#define TYPE_GROUND 0x04
#define TYPE_ROCK 0x05
#define TYPE_BUG 0x06
#define TYPE_GHOST 0x07
#define TYPE_STEEL 0x08
#define TYPE_MYSTERY 0x09
#define TYPE_FIRE 0x0a
#define TYPE_WATER 0x0b
#define TYPE_GRASS 0x0c
#define TYPE_ELECTRIC 0x0d
#define TYPE_PSYCHIC 0x0e
#define TYPE_ICE 0x0f
#define TYPE_DRAGON 0x10
#define TYPE_DARK 0x11
#define TYPE_ROOSTLESS 0x13
#define TYPE_BLANK 0x14
#define TYPE_FAIRY 0x17
#define MON_MALE 0x00
#define MON_FEMALE 0xFE
#define MON_GENDERLESS 0xFF
<|start_filename|>src/defines.h<|end_filename|>
#pragma once
#include "../include/types.h"
#include "../include/species.h"
#define EVOS_PER_MON 16
#define FALSE 0
#define TRUE 1
#define ARRAY_COUNT(array) (sizeof(array) / sizeof((array)[0]))
#define SPECIES_TABLES_TERMIN 0xFEFE
//CHANGE THESE IF YOU WANT
#define EXPAND_LEARNSETS //If you're using this feature, make sure you insert the Complete Fire Red Upgrade
// afterwards or the game will crash when selecting certain Pokemon. Comment out this
// line if you're using the CFRU to expand movesets.
//#define INCLUDE_FOOTPRINTS //If you uncomment this line, make sure to uncomment gMonFootprintTable in "repoints", and remove the footprint remover in "bytereplacement"
#define NUM_TMSHMS 128
#define NUM_MOVE_TUTOR_MOVES 128
<|start_filename|>src/updated_code.c<|end_filename|>
#include "defines.h"
#include "../include/evolution.h"
#include "../include/graphics.h"
#include "../include/main.h"
#include "../include/pokedex.h"
//Backsprite battle start
extern const u8 gSpeciesNames[][POKEMON_NAME_LENGTH + 1];
extern const u16 gSpeciesIdToCryId[];
extern const u16 gSpeciesToNationalPokedexNum[];
extern const u16 gPokedexOrder_Regional[];
extern const u16 gRegionalDexCount;
extern const u16 gPokedexOrder_Alphabetical[];
extern const u16 gPokedexOrderAlphabeticalCount;
extern const u16 gPokedexOrder_Weight[];
extern const u16 gPokedexOrderWeightCount;
extern const u16 gPokedexOrder_Height[];
extern const u16 gPokedexOrderHeightCount;
extern const u16 gPokedexOrder_Type[];
extern const u16 gPokedexOrderTypeCount;
extern const struct AlternateDexEntries gAlternateDexEntries[];
extern const struct CompressedSpriteSheet gMonBackPicTable[];
extern const struct CompressedSpriteSheet gMonFrontPicTable[];
extern const struct CompressedSpritePalette gMonPaletteTable[];
extern const struct CompressedSpritePalette gMonShinyPaletteTable[];
const u16 gNumSpecies = NUM_SPECIES;
const u16 gNumDexEntries = FINAL_DEX_ENTRY;
u8 __attribute__((long_call)) GetGenderFromSpeciesAndPersonality(u16 species, u32 personality);
u8 __attribute__((long_call)) GetUnownLetterFromPersonality(u32 personality);
bool8 __attribute__((long_call)) GetSetPokedexFlag(u16 nationalNum, u8 caseID);
s8 __attribute__((long_call)) DexFlagCheck(u16 nationalDexNo, u8 caseId, bool8 indexIsSpecies);
u16 __attribute__((long_call)) SpeciesToNationalPokedexNum(u16 species);
void __attribute__((long_call)) break_func();
//This file's functions
u16 TryGetFemaleGenderedSpecies(u16 species, u32 personality);
static u16 LoadNationalPokedexView(void);
u16 SpeciesToCryId(u16 species)
{
return species + 1;
}
struct PivotalDexSpecies
{
u16 species;
u16 dexNum;
};
static const struct PivotalDexSpecies sPivotalDexSpecies[] =
{
//These species have Pokemon grouped in order after them
{SPECIES_ROWLET, NATIONAL_DEX_ROWLET},
{SPECIES_CHESPIN, NATIONAL_DEX_CHESPIN},
{SPECIES_TURTWIG, NATIONAL_DEX_TURTWIG},
{SPECIES_TREECKO, NATIONAL_DEX_TREECKO},
{SPECIES_BULBASAUR, NATIONAL_DEX_BULBASAUR},
};
u16 NationalPokedexNumToSpecies(u16 nationalNum)
{
u16 species, i;
if (nationalNum == 0)
return 0;
species = 0;
//Optimization
if (nationalNum <= SPECIES_SHIFTRY || nationalNum >= SPECIES_TURTWIG) //Hoenn Mons are too out of order for this to work
{
for (i = 0; i < ARRAY_COUNT(sPivotalDexSpecies); ++i)
{
if (nationalNum > sPivotalDexSpecies[i].dexNum)
{
u16 difference = nationalNum - sPivotalDexSpecies[i].dexNum;
if (gSpeciesToNationalPokedexNum[sPivotalDexSpecies[i].species + difference - 1] == nationalNum)
return sPivotalDexSpecies[i].species + difference;
break;
}
}
}
while (species < (NUM_SPECIES - 1) && gSpeciesToNationalPokedexNum[species] != nationalNum)
species++;
if (species == NUM_SPECIES - 1)
return 0;
return species + 1;
}
const u8* TryLoadAlternateDexEntry(u16 species)
{
for (int i = 0; gAlternateDexEntries[i].species != SPECIES_TABLES_TERMIN; ++i)
{
if (gAlternateDexEntries[i].species == species)
return gAlternateDexEntries[i].description;
}
return 0;
}
void LoadSpecialPokePic(const struct CompressedSpriteSheet* src, void* dest, u16 species, u32 personality, bool8 isFrontPic)
{
u16 oldSpecies = species;
const struct CompressedSpriteSheet* table = isFrontPic ? gMonFrontPicTable : gMonBackPicTable;
species = TryGetFemaleGenderedSpecies(species, personality);
if (species != oldSpecies) //Updated sprite
src = &table[species];
if (species == SPECIES_UNOWN)
{
u16 i = GetUnownLetterFromPersonality(personality);
// The other Unowns are separate from Unown A.
if (i == 0)
i = SPECIES_UNOWN;
else
i += SPECIES_UNOWN_B - 1;
if (!isFrontPic)
LZ77UnCompWram((void*) gMonBackPicTable[i].data, dest);
else
LZ77UnCompWram((void*) gMonFrontPicTable[i].data, dest);
}
else if (species > NUM_SPECIES) // is species unknown? draw the ? icon
LZ77UnCompWram((void*) gMonFrontPicTable[0].data, dest);
else
LZ77UnCompWram((void*) src->data, dest);
DrawSpindaSpots(species, personality, dest, isFrontPic);
}
const u32* GetFrontSpritePalFromSpeciesAndPersonality(u16 species, u32 otId, u32 personality)
{
u32 shinyValue;
species = TryGetFemaleGenderedSpecies(species, personality);
if (species > NUM_SPECIES)
return (u32*) gMonPaletteTable[0].data;
shinyValue = HIHALF(otId) ^ LOHALF(otId) ^ HIHALF(personality) ^ LOHALF(personality);
if (shinyValue < 8)
return (u32*) gMonShinyPaletteTable[species].data;
else
return (u32*) gMonPaletteTable[species].data;
}
const struct CompressedSpritePalette* GetMonSpritePalStructFromOtIdPersonality(u16 species, u32 otId , u32 personality)
{
u32 shinyValue;
species = TryGetFemaleGenderedSpecies(species, personality);
shinyValue = HIHALF(otId) ^ LOHALF(otId) ^ HIHALF(personality) ^ LOHALF(personality);
if (shinyValue < 8)
return &gMonShinyPaletteTable[species];
else
return &gMonPaletteTable[species];
}
u16 TryGetFemaleGenderedSpecies(u16 species, u32 personality)
{
if (GetGenderFromSpeciesAndPersonality(species, personality) == MON_FEMALE)
{
switch (species) {
case SPECIES_HIPPOPOTAS:
species = SPECIES_HIPPOPOTAS_F;
break;
case SPECIES_HIPPOWDON:
species = SPECIES_HIPPOWDON_F;
break;
case SPECIES_UNFEZANT:
species = SPECIES_UNFEZANT_F;
break;
case SPECIES_FRILLISH:
species = SPECIES_FRILLISH_F;
break;
case SPECIES_JELLICENT:
species = SPECIES_JELLICENT_F;
break;
case SPECIES_PYROAR:
species = SPECIES_PYROAR_FEMALE;
break;
}
}
else if (species == SPECIES_XERNEAS && !gMain.inBattle)
species = SPECIES_XERNEAS_NATURAL;
return species;
}
u16 GetIconSpecies(u16 species, u32 personality)
{
u16 result;
if (species == SPECIES_UNOWN)
{
u16 letter = GetUnownLetterFromPersonality(personality);
if (letter == 0)
letter = SPECIES_UNOWN;
else
letter += (SPECIES_UNOWN_B - 1);
result = letter;
}
else
{
if (species > NUM_SPECIES)
result = 0;
else
result = TryGetFemaleGenderedSpecies(species, personality);
}
return result;
}
bool8 IsInBattle(void)
{
return gMain.inBattle;
}
u16 CountSpeciesInDex(u8 caseId, bool8 whichDex)
{
u16 count = 0;
u16 i;
switch (whichDex) {
case 0: //Regional
for (i = 0; i < gRegionalDexCount; ++i)
{
if (DexFlagCheck(SpeciesToNationalPokedexNum(gPokedexOrder_Regional[i]), caseId, FALSE))
count++;
}
break;
case 1: //National
for (i = 1; i <= FINAL_DEX_ENTRY; ++i)
{
if (DexFlagCheck(i, caseId, FALSE))
count++;
}
break;
}
return count;
}
u16 GetRegionalPokedexCount(u8 caseId)
{
u16 i, count;
for (i = 0, count = 0; i < gRegionalDexCount; ++i)
{
if (GetSetPokedexFlag(SpeciesToNationalPokedexNum(gPokedexOrder_Regional[i]), caseId))
count++;
}
return count;
}
bool16 HasAllRegionalMons(void)
{
u16 i;
for (i = 0; i < gRegionalDexCount; ++i)
{
if (!GetSetPokedexFlag(SpeciesToNationalPokedexNum(gPokedexOrder_Regional[i]), FLAG_GET_CAUGHT))
return FALSE;
}
return TRUE;
}
bool16 sp1B9_SeenAllRegionalMons(void)
{
u16 i;
for (i = 0; i < gRegionalDexCount; ++i)
{
if (!GetSetPokedexFlag(SpeciesToNationalPokedexNum(gPokedexOrder_Regional[i]), FLAG_GET_SEEN))
return FALSE;
}
return TRUE;
}
bool16 HasAllMons(void)
{
u16 i;
for (i = 1; i <= FINAL_DEX_ENTRY; ++i)
{
if (!GetSetPokedexFlag(i, FLAG_GET_CAUGHT))
return FALSE;
}
return TRUE;
}
u16 SpeciesToRegionalDexNum(u16 species)
{
u16 i;
for (i = 0; i < gRegionalDexCount; ++i)
{
if (gPokedexOrder_Regional[i] == species)
return i + 1;
}
return 0;
}
extern const u16 gPokedexOrder_Regional[];
extern const u16 gRegionalDexCount;
extern const u16 gPokedexOrder_Alphabetical[];
extern const u16 gPokedexOrderAlphabeticalCount;
extern const u16 gPokedexOrder_Weight[];
extern const u16 gPokedexOrderWeightCount;
extern const u16 gPokedexOrder_Height[];
extern const u16 gPokedexOrderHeightCount;
extern const u16 gPokedexOrder_Type[];
extern const u16 gPokedexOrderTypeCount;
u16 LoadPokedexViews(u8 type)
{
u16 i, counter, count, lastMeaningfulIndex;
const u16* dexList;
bool8 showUnseenSpecies = FALSE;
bool8 showUncaughtSpecies = FALSE;
switch (type) {
case 0:
dexList = gPokedexOrder_Regional;
count = gRegionalDexCount;
showUnseenSpecies = TRUE;
showUncaughtSpecies = TRUE;
break;
case 1:
dexList = gPokedexOrder_Alphabetical;
count = gPokedexOrderAlphabeticalCount;
showUncaughtSpecies = TRUE;
break;
case 2:
dexList = gPokedexOrder_Type;
count = gPokedexOrderTypeCount;
break;
case 3:
dexList = gPokedexOrder_Weight;
count = gPokedexOrderWeightCount;
break;
case 4:
dexList = gPokedexOrder_Height;
count = gPokedexOrderHeightCount;
break;
case 5:
default:
return LoadNationalPokedexView();
}
for (i = 0, counter = 0, lastMeaningfulIndex = 0; i < count; ++i)
{
u16 species = dexList[i];
bool8 seen = DexFlagCheck(species, FLAG_GET_SEEN, TRUE);
bool8 caught = DexFlagCheck(species, FLAG_GET_CAUGHT, TRUE);
if (!seen)
{
if (showUnseenSpecies)
{
gPokedexScreenDataPtr->listItem[counter].name = (void*) 0x8415F66; //-----
gPokedexScreenDataPtr->listItem[counter++].id = species | (0 << 16); //Unseen
}
}
else if (caught || showUncaughtSpecies)
{
lastMeaningfulIndex = counter + 1;
gPokedexScreenDataPtr->listItem[counter].name = gSpeciesNames[species];
if (caught)
gPokedexScreenDataPtr->listItem[counter++].id = species | (3 << 16); //Caught
else
gPokedexScreenDataPtr->listItem[counter++].id = species | (1 << 16); //Seen
}
}
if (lastMeaningfulIndex == 0)
{
//Fix empty list
lastMeaningfulIndex = 1;
gPokedexScreenDataPtr->listItem[0].name = (void*) 0x8415F66; //-----
gPokedexScreenDataPtr->listItem[0].id = gPokedexOrder_Regional[0] | (0 << 16); //Unseen
}
return lastMeaningfulIndex;
}
static u16 LoadNationalPokedexView(void)
{
u16 i, lastMeaningfulIndex;
for (i = 1, lastMeaningfulIndex = 0; i < NATIONAL_DEX_COUNT; ++i)
{
bool8 seen = DexFlagCheck(i, FLAG_GET_SEEN, FALSE);
bool8 caught = DexFlagCheck(i, FLAG_GET_CAUGHT, FALSE);
u16 species = NationalPokedexNumToSpecies(i);
u16 listIndex = i - 1;
if (!seen)
{
gPokedexScreenDataPtr->listItem[listIndex].name = (void*) 0x8415F66; //-----
gPokedexScreenDataPtr->listItem[listIndex].id = species | (0 << 16); //Unseen
}
else
{
lastMeaningfulIndex = i;
gPokedexScreenDataPtr->listItem[listIndex].name = gSpeciesNames[species];
if (caught)
gPokedexScreenDataPtr->listItem[listIndex].id = species | (3 << 16); //Caught
else
gPokedexScreenDataPtr->listItem[listIndex].id = species | (1 << 16); //Seen
}
}
return lastMeaningfulIndex;
}
<|start_filename|>include/cries.h<|end_filename|>
#pragma once
struct ToneData
{
u8 type;
u8 key;
u8 length; // sound length (compatible sound)
u8 pan_sweep; // pan or sweep (compatible sound ch. 1)
u8* wav; //struct WaveData *wav;
u8 attack;
u8 decay;
u8 sustain;
u8 release;
}; | GarfieldTheLightning/Dynamic-Pokemon-Expansion |
<|start_filename|>src/libzoo/io/FastOFStream.cpp<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#ifdef HAVE_BOOST
#include "FastOFStream.hh"
#include <unistd.h>
#include <string.h>
#include <fcntl.h>
#include <sys/mman.h>
#include <cassert>
#define BUFSIZE (1024*1024)
//(128*1048576)
#define VERBOSE 0
FastOFStream::FastOFStream( const char *filename, const char *mode )
: originalFilename_( filename )
, currentlyFlushingBuffer( 0 )
, terminate_( false )
, flusherThread_( boost::bind( &FastOFStream::FlushThreadFunc, this ) )
, usedAsBaseClass_( false )
, bufferOffsetWithFile_( 0 )
{
if ( VERBOSE ) cout << "FastOFStream: Opening " << filename << " in " << mode << " mode" << endl;
// assert ( mode[0] == 'w' && mode[1] == 0 );
if ( mode[0] == 'w' ) // todo: remove read version
fd = open( filename, O_WRONLY | O_CREAT | O_TRUNC, S_IRWXU );
else if ( mode[0] == 'r' ) // todo: remove read version
fd = open( filename, O_RDONLY );
else
assert( false && "Invalid mode to open file" );
// assert ( fd > 0 );
if ( fd > 0 )
{
buffer = new char[BUFSIZE];
buffer2 = new char[BUFSIZE];
bufferPtr = buffer;
}
else
{
buffer = NULL;
buffer2 = NULL;
}
}
/*
void FastOFStream::operator=( const FILE* f )
{
}
*/
FastOFStream::~FastOFStream()
{
if ( usedAsBaseClass_ )
return;
if ( VERBOSE ) cout << "FastOFStream destructor" << endl;
fclose();
/*
cout << " press a key to continue... was file; " << originalFilename_ << endl;
int x;
cin >> x;
*/
}
void FastOFStream::FlushThreadFunc()
{
while ( !terminate_ )
{
// cout << "hi" << (void*)this << endl;
{
boost::mutex::scoped_lock lock( mutex_ );
condition_.wait( lock );
if ( currentlyFlushingBuffer )
{
// cout << "Flushing buffer!" << endl;
write( fd, currentlyFlushingBuffer, BUFSIZE );
assert ( buffer2 == 0 );
buffer2 = currentlyFlushingBuffer;
currentlyFlushingBuffer = 0;
}
}
}
}
size_t FastOFStream::fwrite( const void *ptr0, size_t size, size_t count )
{
const char *ptr = reinterpret_cast<const char *>( ptr0 );
size_t n = size * count;
assert ( n <= BUFSIZE );
while ( n > 0 )
{
size_t nextN = 0;
if ( bufferPtr + n > buffer + BUFSIZE )
{
size_t thisN = buffer + BUFSIZE - bufferPtr;
nextN = n - thisN;
n = thisN;
}
memcpy( bufferPtr, ptr, n );
bufferPtr += n;
assert ( bufferPtr <= buffer + BUFSIZE );
// check if we reached the end of the buffer
if ( bufferPtr == buffer + BUFSIZE )
{
// flush full buffer to disk
// Wait if previous buffer is still being flushed
while ( currentlyFlushingBuffer != 0 )
{
cout << "oh no i have to wait!" << endl;
usleep( 10000 );
}
// swap buffers
currentlyFlushingBuffer = buffer;
buffer = buffer2;
buffer2 = 0;
bufferPtr = buffer;
bufferOffsetWithFile_ += BUFSIZE;
// cout << "flushing" << (void*)this << endl;
condition_.notify_all();
}
ptr += n;
n = nextN;
}
return count;
}
size_t FastOFStream::ftell()
{
size_t result = bufferOffsetWithFile_ + ( bufferPtr - buffer );
#ifdef DEBUG
cout << "FastOFStream::ftell: bufferOffsetWithFile_=" << bufferOffsetWithFile_ << ", bufferPtr=" << ( void * )bufferPtr << ", buffer=" << ( void * )buffer << ", result=" << ( void * )result << endl;
#endif
return result;
}
void FastOFStream::fflush()
{
fsync( fd );
}
void FastOFStream::fclose()
{
// Terminate flushing thread
terminate_ = true;
condition_.notify_all();
if ( fd > 0 )
{
while ( currentlyFlushingBuffer != 0 )
{
if ( VERBOSE ) cout << "Waiting for flushing to complete!" << endl;
usleep( 1000 );
}
// Flush the rest of the current buffer to disk
if ( VERBOSE ) cout << "Flushing end of buffer" << endl;
write( fd, buffer, bufferPtr - buffer );
close( fd );
}
if ( VERBOSE ) cout << "Joining thread" << endl;
if ( !flusherThread_.timed_join( boost::posix_time::milliseconds( 500 ) ) )
{
cerr << "Timed join expired" << endl;
flusherThread_.interrupt();
}
if ( buffer )
{
if ( VERBOSE ) cout << "Freeing buffers" << endl;
delete [] buffer;
delete [] buffer2;
}
if ( VERBOSE ) cout << "All complete!" << endl;
buffer = NULL;
buffer2 = NULL;
fd = 0;
}
#endif //ifdef HAVE_BOOST
<|start_filename|>src/metagenomics/OutputTsv.hh<|end_filename|>
void printTsvHeader( ofstream &output )
{
output << "#TaxId\tTaxLevel\tCount\tCountIncludingChildren" << endl;
}
void printTsvChildren( TAXMAP::iterator &iter, ofstream &output, int level, TAXMAP &taxInfo, unsigned int wordMinSizeCount )
{
bool dataAvailable = false;
for ( unsigned int s ( 0 ); s < wordMinSizeCount; s++ )
{
int magnitude = iter->second.wordCountPerSize_[s] + iter->second.wordCountPerSizeOfChildren_[s];
if ( magnitude != 0 ) dataAvailable = true;
}
if ( !dataAvailable ) return;
int id = iter->first;
unsigned int taxLevel = iter->second.taxLevel_;
output << id << '\t' << taxLevel;
for ( unsigned int s ( 0 ); s < wordMinSizeCount; s++ )
{
uint64_t magnitude = iter->second.wordCountPerSize_[s] + iter->second.wordCountPerSizeOfChildren_[s];
output << '\t' << iter->second.wordCountPerSize_[s] << '\t' << magnitude;
}
output << endl;
for ( TAXMAP::iterator iter = taxInfo.begin() ; iter != taxInfo.end(); ++iter )
{
if ( iter->second.parentId_ == id && id != 0 )
{
printTsvChildren( iter, output, level + 2, taxInfo, wordMinSizeCount );
}
}
}
<|start_filename|>src/shared/Algorithm.hh<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#ifndef TOOL_HH
#define TOOL_HH
enum CompressionFormatType
{
compressionASCII,
compressionRunLength,
compressionIncrementalRunLength,
compressionHuffman
};
class Algorithm
{
public:
virtual void run( void ) = 0; // run method, must be implemented by all tools
virtual ~Algorithm() {}
};
#endif /* TOOL_HH */
<|start_filename|>src/errors/HiTECStats.cpp<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#include "HiTECStats.hh"
#include "BCRext.hh"
#include "BCRexternalBWT.hh"
#include "Timer.hh"
#include "config.h"
#include <cstring>
#include <fstream>
#include <sstream>
#include <string>
#include <algorithm>
#ifdef HAVE_POSIX_FADVISE
#endif
using namespace std;
using namespace BeetlBwtParameters;
HiTECStats::HiTECStats(
const double errorRate,
const double genomeLength,
const int numberOfReads,
const int readLength
):
numberOfReads_( numberOfReads ),
readLength_( readLength ),
genomeLength_( genomeLength ),
errorRate_( errorRate )
{
}
double nCr( int n, int r )
{
//use logs to avoid overflow
double result = 0;
for ( int i = r + 1; i <= n; i++ )
result += log( i );
for ( int i = 1; i <= n - r; i++ )
result -= log( i );
return exp( result );
}
double HiTECStats::probDestructible( int witnessLength )
{
double result = 1;
//probability witness has errors
result *= (
1 - pow( 1 - errorRate_, witnessLength )
);
//probability that support is correct
result *= ( 1 - errorRate_ );
//probability occurs in genome
result *= 0.75 * (
1 - pow(
1 - pow( 0.25, witnessLength ),
genomeLength_
)
);
return result;
}
double HiTECStats::expectedDestructible( int witnessLength )
{
double result = 1;
result *= (
1 - pow(
1 - probDestructible( witnessLength ),
readLength_ - witnessLength
)
);
result *= pow( 1 - errorRate_, readLength_ );
result *= numberOfReads_;
return result;
}
double HiTECStats::expectedUncorrectable( int witnessLength )
{
//create matrix of 'uncorrectable read counts'...
//this is essentially an implementation of the formula:
//f_w(k,l) as found in the HiTEC paper
double **storedErrors = new double*[readLength_ + 1];
for ( int i = 0; i < readLength_ + 1; i++ )
storedErrors[i] = new double[readLength_ + 1];
for ( int length = 0; length < witnessLength; length++ )
for ( int numErrors = 0; numErrors <= length; numErrors++ )
storedErrors[numErrors][length] = nCr( length, numErrors );
for ( int length = witnessLength; length <= readLength_; length++ )
for ( int numErrors = 0; numErrors <= length; numErrors++ )
if ( numErrors >= ( int )length / witnessLength )
{
int sum = 0;
for ( int knockOff = 1; knockOff <= witnessLength; knockOff++ )
sum += storedErrors[numErrors - 1][length - knockOff];
storedErrors[numErrors][length] = sum;
}
double result = 0;
for ( int numErrors = 0; numErrors <= readLength_; numErrors++ )
result += storedErrors[numErrors][readLength_] * pow( errorRate_, numErrors ) * pow( 1 - errorRate_, readLength_ - numErrors );
for ( int i = 0; i < readLength_ + 1; i++ )
delete[] storedErrors[i];
delete[] storedErrors;
return result * numberOfReads_;
}
double HiTECStats::expectedCorrectWitnessNeighbourPairs( int support, int witnessLength )
{
double qc = ( readLength_ - witnessLength ) * pow( 1 - errorRate_, witnessLength + 1 ) / genomeLength_;
return nCr( numberOfReads_, support ) * pow( qc, support ) * pow( 1 - qc, numberOfReads_ - support ) * genomeLength_;
}
double HiTECStats::expectedIncorrectWitnessNeighbourPairs( int support, int witnessLength )
{
double qe = ( readLength_ - witnessLength ) * pow( 1 - errorRate_, witnessLength ) * errorRate_ / 3 / genomeLength_;
return nCr( numberOfReads_, support ) * pow( qe, support ) * pow( 1 - qe, numberOfReads_ - support ) * genomeLength_;
}
double HiTECStats::expectedErroneousReads()
{
return numberOfReads_ * ( 1 - pow( 1 - errorRate_, readLength_ ) );
}
int HiTECStats::Calculate_wm()
{
return Calculate_wm( readLength_ );
}
int HiTECStats::Calculate_wm( int maxValue )
{
double lowestScore = ( double )numberOfReads_;
int best_wm = 1;
for ( int candidate_wm = 1; candidate_wm <= maxValue; candidate_wm++ )
{
double candidateScore = expectedDestructible( candidate_wm ) + expectedUncorrectable( candidate_wm );
if ( candidateScore < lowestScore )
{
lowestScore = candidateScore;
best_wm = candidate_wm;
}
}
return best_wm;
}
int HiTECStats::Calculate_wM( double maxDestructibleRate )
{
//return the smallest witness length (at least wm) which gives desired specificity
for ( int candidate_Wm = 1; candidate_Wm <= readLength_; candidate_Wm++ )
if ( expectedDestructible( candidate_Wm ) < maxDestructibleRate * expectedErroneousReads() )
return candidate_Wm;
return readLength_;
}
int HiTECStats::CalculateSupport( int witnessLength )
{
int candidateThreshold = 1;
while (
expectedCorrectWitnessNeighbourPairs( candidateThreshold, witnessLength )
<
expectedIncorrectWitnessNeighbourPairs( candidateThreshold, witnessLength )
&&
candidateThreshold < numberOfReads_
)
candidateThreshold++;
return candidateThreshold + 2;
}
<|start_filename|>src/frontends/BeetlCompare.cpp<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#include "BeetlCompare.hh"
#include "Common.hh"
#include "config.h"
#include "countWords/CountWords.hh"
#include "parameters/CompareParameters.hh"
#include "libzoo/cli/Common.hh"
#include "libzoo/util/Logger.hh"
#include "libzoo/util/TemporaryFilesManager.hh"
#include <cstdlib>
#include <iostream>
#include <sstream>
#include <string.h>
#include <unistd.h>
using namespace std;
using namespace BeetlCompareParameters;
CompareParameters params;
void printUsage()
{
params.printUsage();
cout << "Notes:" << endl;
cout << " Mode = tumour-normal: Set A = tumour, set B = normal" << endl;
cout << " Mode = splice : Set A = DNA; set B = RNA" << endl;
cout << " Mode = reference : Set B is a reference genome" << endl;
cout << " Mode = metagenomics : Set B are merged reference genomes. Switches metagenome classifier on." << endl;
cout << endl;
}
void launchBeetlCompare()
{
vector<string> setA_filenames;
vector<string> setB_filenames;
vector<string> setB_C_filenames;
bool setA_isBwtCompressed;
bool setB_isBwtCompressed;
string setA_availableFileLetters;
string setB_availableFileLetters;
detectInputBwtProperties( params["input setA"], setA_filenames, setA_isBwtCompressed, setA_availableFileLetters );
detectInputBwtProperties( params["input setB"], setB_filenames, setB_isBwtCompressed, setB_availableFileLetters );
if ( setA_filenames.size() < 2 )
{
cerr << "Error: too few input files detected (run with -vv for more details)" << endl;
exit( -1 );
}
assert( setA_filenames.size() == setB_filenames.size() );
if ( params["mode"] == MODE_METAGENOMICS )
{
assert( strchr( setB_availableFileLetters.c_str(), 'C' ) && "{inputB}-C0x files cannot be found" );
setB_C_filenames = setB_filenames;
for ( unsigned int i = 0; i < setB_C_filenames.size(); ++i )
{
int pos = setB_C_filenames[i].size() - 3;
assert( setB_C_filenames[i][pos] == 'B' );
setB_C_filenames[i][pos] = 'C';
}
// Check that the 'C' files are encoded as ints and not anymore as shorts,
// by checking that the first few 32-bit non-zero values are <65535 (unsigned short encoding would lead to large ints)
ifstream fileC0( setB_C_filenames[0].c_str() );
uint32_t intVal;
for ( unsigned int i = 0; i < 10; ++i )
{
if ( !fileC0.read( ( char * )&intVal, 4 ) )
break;
if ( intVal > 65535 )
{
cerr << "Error: inputSetB-C0x files seem to be encoded as 2-bytes values. This version of BEETL expects 4 bytes per value.\nYou can use the tool scripts/misc/shortToInt.pl provided with BEETL source code to convert your files." << endl;
exit( -1 );
}
}
}
if ( !params["inputA format"].isSet() )
{
if ( setA_isBwtCompressed )
{
clog << "Set A detected as RLE compressed" << endl;
params["inputA format"] = "BWT_RLE";
}
else
{
clog << "Set A detected as ASCII" << endl;
params["inputA format"] = "BWT_ASCII";
}
}
if ( !params["inputB format"].isSet() )
{
if ( setB_isBwtCompressed )
{
clog << "Set B detected as RLE compressed" << endl;
params["inputB format"] = "BWT_RLE";
}
else
{
clog << "Set B detected as ASCII" << endl;
params["inputB format"] = "BWT_ASCII";
}
}
// Use default parameter values where needed
params.commitDefaultValues();
// Update variable with optinal user values
setA_isBwtCompressed = ( params["inputA format"] == INPUT_FORMAT_BWT_RLE );
setB_isBwtCompressed = ( params["inputB format"] == INPUT_FORMAT_BWT_RLE );
bool reportMinLength = ( params["report min length"] == REPORT_MINLENGTH_ON );
Logger::out() << "\nLaunching the following configuration of Beetl-compare:" << endl;
params.print( Logger::out(), false );
Logger::out() << endl;
Algorithm *pcountWords = new CountWords( setA_isBwtCompressed, setB_isBwtCompressed
, 'X'
, params["min occ"]
, params["max length"]
, setA_filenames, setB_filenames
, setB_C_filenames
, params["taxonomy"]
, reportMinLength
, params["min kmer length"]
, params["subset"]
, ¶ms
);
// run the "main" method
pcountWords->run();
// clean up
delete pcountWords;
TemporaryFilesManager::get().cleanup();
}
int main( const int argc, const char **argv )
{
// Generated using: http://patorjk.com/software/taag/#p=display&f=Soft&t=BEETL%20compare
cout << ",-----. ,------.,------.,--------.,--. " << endl;
cout << "| |) /_ | .---'| .---''--. .--'| | ,---. ,---. ,--,--,--. ,---. ,--,--.,--.--. ,---. " << endl;
cout << "| .-. \\| `--, | `--, | | | | | .--'| .-. || || .-. |' ,-. || .--'| .-. : " << endl;
cout << "| '--' /| `---.| `---. | | | '--. \\ `--.' '-' '| | | || '-' '\\ '-' || | \\ --. " << endl;
cout << "`------' `------'`------' `--' `-----' `---' `---' `--`--`--'| |-' `--`--'`--' `----' " << endl;
cout << " `--' " << endl;
cout << "Version " << PACKAGE_VERSION << endl;
cout << endl;
cout << "Command called:" << endl << " ";
for ( int i = 0; i < argc; ++i )
{
cout << " " << argv[i];
}
cout << "\n" << endl;
if ( !params.parseArgv( argc, argv ) || params["help"] == 1 || !params.chechRequiredParameters() )
{
printUsage();
exit( params["help"] == 0 );
}
// Checking for extra parameters required in metagenomics mode
if ( params["mode"] == MODE_METAGENOMICS && !params["taxonomy"].isSet() )
{
cerr << "Error: Missing Metagenomics-specific parameter: --taxonomy\n" << endl;
printUsage();
exit( 1 );
}
// Auto-detection of missing arguments
if ( !params["memory limit MB"].isSet() )
{
params["memory limit MB"] = detectMemoryLimitInMB();
}
TemporaryFilesManager::get().setRamLimit( params["memory limit MB"] );
// Launch
launchBeetlCompare();
return 0;
}
<|start_filename|>src/shared/Filename.cpp<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#include "Filename.hh"
#include "libzoo/util/TemporaryFilesManager.hh"
using namespace std;
std::string Filename::str() const
{
return str_;
}
std::string TmpFilename::str() const
{
if ( fullPathStr_.empty() )
{
const string &tempPath = TemporaryFilesManager::get().tempPath_;
if ( tempPath.empty() )
fullPathStr_ = str_;
else
fullPathStr_ = tempPath + string( "/" ) + str_;
}
return fullPathStr_;
}
<|start_filename|>src/frontends/BeetlIndex.cpp<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#include "BeetlIndex.hh"
#include "BwtIndex.hh"
#include "BwtReader.hh"
#include "Tools.hh"
#include "config.h"
//#include "search/SearchUsingBacktracker.hh"
#include "parameters/IndexParameters.hh"
#include "libzoo/cli/Common.hh"
#include "libzoo/util/Logger.hh"
#include <cstdlib>
#include <fstream>
#include <iostream>
#include <unistd.h>
using namespace std;
IndexParameters params;
void printUsage()
{
params.printUsage();
cout << "Notes:" << endl;
cout << " -j and -k are mutually exclusive, one of them being required.\n" << endl;
cout << endl;
}
void launchBeetlIndex()
{
// SearchUsingBacktracker search( params );
// search.run();
string indexPrefix = params.getStringValue( "input" );
bool compressed, forceOverwrite;
vector<string> pileNames;
string dummyStr;
int blockSize( 2048 );
if ( params["block size"].isSet() )
{
blockSize = params["block size"];
if ( blockSize <= 0 )
{
cerr << "Index block size must be greater than zero!" << endl;
exit( EXIT_FAILURE );
} // ~if
} // ~if
else
{
cerr << "No index block size specified, using default" << endl;
} // ~else
cerr << "Will index every " << blockSize << " bytes " << endl;
if ( params["force"].isSet() )
{
forceOverwrite = true;
} // ~else
detectInputBwtProperties( indexPrefix, pileNames, compressed, dummyStr );
if ( pileNames.empty() )
{
cerr << "Did not find any BWT files matching prefix " << indexPrefix
<< "." << endl
<< "If BWT files are named 'myName-B0?' specify '-i myname'."
<< endl;
exit ( EXIT_FAILURE );
}
// if(!compressed)
// {
// cerr<<"BWT files seems to be in ASCII format." << endl
// <<"Only run-length compressed BWT files can currently be indexed."
// << endl <<"Use beetl-convert to change format." << endl;
// exit( EXIT_FAILURE );
// }
#pragma omp parallel for
for ( unsigned int thisPile = 0; thisPile < pileNames.size(); ++thisPile )
{
const string pileName = pileNames[thisPile];
#pragma omp critical (IO)
cerr << "Indexing file " << pileName << endl;
// BwtReaderIndex<BwtReaderRunLength> reader( pileName.c_str(), params.getStringValue( "use shm" ) );
unique_ptr<BwtReaderBase> reader( instantiateBwtPileReader( pileName.c_str(), params.getStringValue( "use shm" ), false, true ) );
string indexFileName = pileName + ".idx";
FILE *pFile;
if ( !forceOverwrite && readWriteCheck( indexFileName.c_str(), false, false ) )
{
#pragma omp critical (IO)
cerr << "File " << indexFileName << " already exists! Rerun with --force to remove." << endl;
exit( EXIT_FAILURE );
fclose( pFile );
}
pFile = fopen( indexFileName.c_str() , "w" );
if ( pFile == NULL )
{
#pragma omp critical (IO)
cerr << "Problem opening file " << indexFileName << " for writing" << endl;
exit( EXIT_FAILURE );
}
buildIndex( reader.get(), pFile, blockSize );
fclose ( pFile );
}
}
int main( const int argc, const char **argv )
{
// Generated using: http://patorjk.com/software/taag/#p=display&f=Soft&t=BEETL%20index
cout << ",-----. ,------.,------.,--------.,--. ,--. ,--. " << endl;
cout << "| |) /_ | .---'| .---''--. .--'| | `--',--,--, ,-| | ,---. ,--. ,--. " << endl;
cout << "| .-. \\| `--, | `--, | | | | ,--.| \\' .-. || .-. : \\ `' / " << endl;
cout << "| '--' /| `---.| `---. | | | '--. | || || |\\ `-' |\\ --. / /. \\ " << endl;
cout << "`------' `------'`------' `--' `-----' `--'`--''--' `---' `----''--' '--' " << endl;
cout << "Version " << PACKAGE_VERSION << endl;
cout << endl;
cout << "Command called:" << endl << " ";
for ( int i = 0; i < argc; ++i )
{
cout << " " << argv[i];
}
cout << "\n" << endl;
if ( !params.parseArgv( argc, argv ) || params["help"] == 1 || !params.chechRequiredParameters() )
{
printUsage();
exit( params["help"] == 0 );
}
// Use default parameter values where needed
params.commitDefaultValues();
// Checking for required parameters - TBD set to default (prob 2048) if not specified
// if ( ! ( params["kmers input file"].isSet() ) )
// {
// cerr << "Error: Missing argument: -i is required\n" << endl;
// printUsage();
// exit( 1 );
// }
// Launch
launchBeetlIndex();
return 0;
}
<|start_filename|>src/BCR/Sorting.cpp<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#include "Sorting.hh"
#include <algorithm>
#include <cstdio>
#include <cstdlib>
#include <vector>
#ifdef _OPENMP
#include <omp.h>
#include <parallel/algorithm>
#endif //ifdef _OPENMP
bool cmpSortEl ( sortElement a, sortElement b )
{
if ( a.pileN == b.pileN )
return ( a.posN < b.posN );
else
return ( a.pileN < b.pileN );
}
void quickSort( vector< sortElement > &v )
{
#ifdef _OPENMP
__gnu_parallel::sort( v.begin(), v.end(), cmpSortEl );
#else //ifdef _OPENMP
sort( v.begin(), v.end(), cmpSortEl );
#endif //ifdef _OPENMP
}
<|start_filename|>src/search/IntervalFile.cpp<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#include "IntervalFile.hh"
#include <cassert>
#include <iostream>
using namespace std;
ostream &operator<<( std::ostream &os, const IntervalRecord &obj )
{
return os << "{ " << obj.kmer << " interval, position: " << obj.position << ", count: " << obj.count << " }";
}
IntervalWriter::IntervalWriter( std::ostream &file ): file_( file )
{
assert( file_.good() );
}
void IntervalWriter::write( const IntervalRecord &ir ) const
{
file_ << ir.kmer;
file_ << ' ' << ir.position;
file_ << ' ' << ir.count << endl;
}
void IntervalWriter::writeV2( const IntervalRecord &ir ) const
{
file_ << ir.kmer;
file_ << ' ' << ir.position;
file_ << ' ' << ir.count << ':';
for ( auto pos : ir.dollarSignPositions )
file_ << ' ' << pos;
file_ << endl;
}
bool IntervalReader::read( IntervalRecord &ir )
{
file_ >> ir.kmer;
file_ >> ir.position;
file_ >> ir.count;
if ( file_.good() )
return true;
else
return false;
}
vector<IntervalRecord> IntervalReader::readFullFileAsVector()
{
vector<IntervalRecord> result;
IntervalRecord rec;
while ( read( rec ) )
result.push_back( rec );
return result;
}
<|start_filename|>src/shared/LetterCount.hh<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#ifndef DEFINED_LETTERCOUNT_HH
#define DEFINED_LETTERCOUNT_HH
#include "Alphabet.hh"
#include "Types.hh"
#include <cassert>
#include <iostream>
#include <vector>
using std::vector;
template<typename T> struct LetterCountTemplate
{
LetterCountTemplate()
{
clear();
} // ~ctor
void clear( void )
{
for ( int i( 0 ); i < alphabetSize; i++ ) count_[i] = 0;
} // ~clear
void operator+=( const char c )
{
assert( whichPile[( int )c] < alphabetSize );
count_[whichPile[( int )c]]++;
assert( count_[whichPile[( int )c]] != 0 );
}
template<typename TT>void operator+=( const LetterCountTemplate<TT> &rhs )
{
for ( int i( 0 ); i < alphabetSize; i++ )
{
T newCount = count_[i] + rhs.count_[i];
assert( newCount >= count_[i] && "Overflow error in LetterCountTemplate" );
count_[i] = newCount;
}
} // ~clear
template<typename TT>void operator-=( const LetterCountTemplate<TT> &rhs )
{
// on your own head be it if you make an unsigned quantity negative...
for ( int i( 0 ); i < alphabetSize; i++ ) count_[i] -= rhs.count_[i];
} // ~clear
void countString( const char *const s, const T length )
{
for ( T i = 0; i < length; ++i )
operator+=( s[i] );
}
friend std::ostream &operator<<( std::ostream &os, const LetterCountTemplate &obj )
{
for ( int i( 0 ); i < alphabetSize; i++ )
os << " " << alphabet[i] << ":" << obj.count_[i];
return os;
}
friend std::istream &operator>>( std::istream &is, LetterCountTemplate &obj )
{
char letter, colon;
for ( int i( 0 ); i < alphabetSize; i++ )
{
is >> letter >> colon >> obj.count_[i];
if ( letter != alphabet[i] || colon != ':' )
{
std::cerr << "Error reading LetterCount from file: letter=" << letter << ", colon=" << colon << std::endl;
exit( 1 );
}
}
return is;
}
// LetterCountData count_;
T count_[alphabetSize];
}; // ~LetterCountTemplate
typedef LetterCountTemplate<LetterNumber> LetterCount;
typedef LetterCountTemplate<LetterNumberCompact> LetterCountCompact;
struct LetterCountEachPile : public vector<LetterCount>
{
LetterCountEachPile()
{
resize( alphabetSize );
clear();
} // ~ctor
void clear( const int startIndex = 0 )
{
for ( int i( startIndex ); i < alphabetSize; i++ ) ( *this )[i].clear();
} // ~clear
void print( void )
{
for ( int i( 0 ); i < alphabetSize; i++ )
{
std::cout << alphabet[i] << " pile" << ( *this )[i] << std::endl;
} // ~for
} // ~clear
void operator+=( const LetterCountEachPile &rhs )
{
for ( int i( 0 ); i < alphabetSize; i++ ) ( *this )[i] += rhs[i];
} // ~clear
};
#endif
<|start_filename|>src/backtracker/TwoBwtBackTracker.cpp<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#include "TwoBwtBackTracker.hh"
#include "libzoo/util/Logger.hh"
using namespace std;
TwoBwtBackTracker::TwoBwtBackTracker( BwtReaderBase *inBwtA, BwtReaderBase *inBwtB,
LetterNumber ¤tPosA, LetterNumber ¤tPosB,
RangeStoreExternal &rA, RangeStoreExternal &rB,
LetterCount &countsSoFarA, LetterCount &countsSoFarB,
int minOcc, const int maxLength, const string &subset, const int cycle,
const bool doesPropagateBkptToSeqNumInSetA,
const bool doesPropagateBkptToSeqNumInSetB,
const bool noComparisonSkip,
const bool propagateSequence )
: BackTrackerBase( subset, cycle, noComparisonSkip, propagateSequence )
, inBwtA_( inBwtA ), inBwtB_( inBwtB )
, currentPosA_( currentPosA ), currentPosB_( currentPosB )
, rA_( rA ), rB_( rB ), countsSoFarA_( countsSoFarA ), countsSoFarB_( countsSoFarB )
, minOcc_( minOcc ), maxLength_( maxLength ), subset_( subset ), cycle_( cycle )
, numNotSkippedA_( 0 )
, numNotSkippedB_( 0 )
, numSkippedA_( 0 )
, numSkippedB_( 0 )
, doesPropagateBkptToSeqNumInSetA_( doesPropagateBkptToSeqNumInSetA )
, doesPropagateBkptToSeqNumInSetB_( doesPropagateBkptToSeqNumInSetB )
{
for ( int l( 0 ); l < alphabetSize; ++l )
propagateIntervalA_[l] = propagateIntervalB_[l] = false;
}
/*
void TwoBwtBackTracker::process (
int pileNum
, string &thisWord
, IntervalHandlerBase &intervalHandler
)
{
LetterCount countsThisRangeA, countsThisRangeB;
Range thisRangeA, thisRangeB;
//string thisWord;
bool notAtLastA( true ), notAtLastB( true );
bool hasChild;
while ( 1 )
{
processSingletons(
pileNum
, notAtLastA
, rA_
, thisRangeA
, currentPosA_
, inBwtA_
, countsSoFarA_
, countsThisRangeA
, intervalHandler
, propagateIntervalA_
, thisWord
, doesPropagateBkptToSeqNumInSetA_
, NULL //( IntervalHandler_FoundCallbackPtr )( &IntervalHandlerBase::foundInAOnly )
, 1
);
processSingletons(
pileNum
, notAtLastB
, rB_
, thisRangeB
, currentPosB_
, inBwtB_
, countsSoFarB_
, countsThisRangeB
, intervalHandler
, propagateIntervalB_
, thisWord
, doesPropagateBkptToSeqNumInSetB_
, NULL //( IntervalHandler_FoundCallbackPtr )( &IntervalHandlerBase::foundInBOnly )
, 2
);
if ( notAtLastA == false )
{
assert ( notAtLastB == false );
break;
} // ~if
else
{
assert( ( thisRangeA.pos_ & matchFlag ) != 0 );
assert( ( thisRangeB.pos_ & matchFlag ) != 0 );
char *bwtSubstringA;
prepareCallbackArgs(
thisRangeA
, currentPosA_
, inBwtA_
, countsSoFarA_
, countsThisRangeA
, intervalHandler
, bwtSubstringA
);
char *bwtSubstringB;
prepareCallbackArgs(
thisRangeB
, currentPosB_
, inBwtB_
, countsSoFarB_
, countsThisRangeB
, intervalHandler
, bwtSubstringB
);
#ifdef PROPAGATE_SEQUENCE
assert( thisRangeA.word_.size() == thisRangeB.word_.size() );
#endif
bool isBreakpointDetected = false;
intervalHandler.foundInBoth
( pileNum,
countsThisRangeA,
countsThisRangeB,
thisRangeA,
thisRangeB,
propagateIntervalA_,
propagateIntervalB_,
isBreakpointDetected,
cycle_
);
if ( isBreakpointDetected )
{
thisRangeA.isBkptExtension_ = true;
thisRangeB.isBkptExtension_ = true;
}
#ifdef PROPAGATE_SEQUENCE
hasChild = false;
#endif
for ( AlphabetSymbol l( 1 ); l < alphabetSize; l++ )
{
if ( ( propagateIntervalA_[l] == true )
|| ( propagateIntervalB_[l] == true ) )
{
updatePropagatedSuffixWord( hasChild, thisRangeA, thisWord, l );
// thisWord+=thisRangeA.word_;
LetterNumber thisFlag
( ( ( propagateIntervalA_[l] == true )
&& ( propagateIntervalB_[l] == true ) ) ? matchFlag : 0 );
Range newRangeA( thisWord,
( countsSoFarA_.count_[l]
| thisFlag ),
countsThisRangeA.count_[l],
thisRangeA.isBkptExtension_ );
Range newRangeB( thisWord,
( countsSoFarB_.count_[l]
| thisFlag ),
countsThisRangeB.count_[l],
thisRangeB.isBkptExtension_ );
bool doAddRangeA = noComparisonSkip_ || !rA_.isRangeKnown( newRangeA, l, pileNum, subset_, cycle_ );
bool doAddRangeB = noComparisonSkip_ || !rB_.isRangeKnown( newRangeB, l, pileNum, subset_, cycle_ );
if ( thisFlag )
{
doAddRangeA = doAddRangeB = doAddRangeA || doAddRangeB;
}
if ( propagateIntervalA_[l] == true )
{
if ( doAddRangeA )
{
rA_.addRange( newRangeA, l, pileNum, subset_, cycle_ );
++numNotSkippedA_;
}
else
{
++numSkippedA_;
}
}
// if (countsThisRangeB.count_[l]>0)
if ( propagateIntervalB_[l] == true )
{
if ( doAddRangeB )
{
rB_.addRange( newRangeB, l, pileNum, subset_, cycle_ );
++numNotSkippedB_;
}
else
{
++numSkippedB_;
}
}
} // ~if
} // ~for
countsSoFarA_ += countsThisRangeA;
currentPosA_ += thisRangeA.num_;
countsSoFarB_ += countsThisRangeB;
currentPosB_ += thisRangeB.num_;
numRanges_++;
}
} // ~while
}
*/
void TwoBwtBackTracker::process ( int i, string &thisWord, IntervalHandlerBase &intervalHandler_ )
{
LetterCount countsThisRangeA, countsThisRangeB;
Range thisRangeA, thisRangeB;
// string thisWord;
bool notAtLastA( true ), notAtLastB( true );
bool hasChild = false;
while ( 1 )
{
while ( notAtLastA )
{
notAtLastA = rA_.getRange( thisRangeA );
if ( ( notAtLastA == false ) || ( ( thisRangeA.pos_ & matchFlag ) != 0 ) ) break;
Logger_if( LOG_SHOW_IF_VERY_VERBOSE ) Logger::out() << "RangeA: " << i << " "
<< thisRangeA.word_ << " "
<< thisRangeA.pos_ << " " << thisRangeA.num_
<< " -- " << currentPosA_
<< " < " << thisRangeB.word_
<< endl;
skipIfNecessary( thisRangeA, currentPosA_, ( *inBwtA_ ), countsSoFarA_ );
// count children
countsThisRangeA.clear();
inBwtA_->readAndCount( countsThisRangeA, thisRangeA.num_ );
Logger_if( LOG_SHOW_IF_VERY_VERBOSE )
{
Logger::out() << countsThisRangeA << endl;
}
intervalHandler_.foundInAOnly
( i,
countsSoFarA_,
countsThisRangeA,
NULL,
thisRangeA,
propagateIntervalA_ ,
cycle_
);
// Sequence numbers extraction
if ( doesPropagateBkptToSeqNumInSetA_ )
{
if ( countsThisRangeA.count_[0] > 0 )
{
#pragma omp critical (IO)
cout << "READNUM"
<< "(" << thisRangeA.isBkptExtension_ << ")"
<< " " << i
<< " " << ( thisRangeA.pos_ + 1 )
<< " " << thisRangeA.num_
<< " " << countsThisRangeA.count_[0]
<< endl;
}
for ( int l( 1 ); l < alphabetSize; l++ )
propagateIntervalA_[l] = ( countsThisRangeA.count_[l] > 0 );
}
// add ranges for any children
hasChild = false;
for ( int l( 1 ); l < alphabetSize; l++ )
{
// if (countsThisRangeA.count_[l]>=minOcc)
if ( propagateIntervalA_[l] == true )
{
if ( propagateSequence_ )
{
if ( hasChild == false )
{
assert( thisWord.size() == thisRangeA.word_.size() + 1 );
thisWord.replace( 1, thisRangeA.word_.size(), thisRangeA.word_ );
} // ~if
thisWord[0] = alphabet[l];
}
hasChild = true;
Range newRange( thisWord,
countsSoFarA_.count_[l],
countsThisRangeA.count_[l],
thisRangeA.isBkptExtension_ );
if ( noComparisonSkip_ ||
!rA_.isRangeKnown( newRange, l, i, subset_, cycle_ ) )
rA_.addRange( newRange, l, i, subset_, cycle_ );
} // ~if
} // ~for l
if ( hasChild == false )
{
#ifdef OLD
// if no children, print word itself
cout << "GOLD ";
cout << thisRangeA.word_;
cout << " " << thisRangeA.num_ << endl;
#endif
numSingletonRanges_++;
} // ~if
countsSoFarA_ += countsThisRangeA;
currentPosA_ += thisRangeA.num_;
numRanges_++;
} // ~while notAtLastA
while ( notAtLastB )
{
notAtLastB = rB_.getRange( thisRangeB );
if ( ( notAtLastB == false ) || ( ( thisRangeB.pos_ & matchFlag ) != 0 ) ) break;
Logger_if( LOG_SHOW_IF_VERY_VERBOSE ) Logger::out() << "RangeB: " << i << " "
<< thisRangeB.word_ << " "
<< thisRangeB.pos_ << " " << thisRangeB.num_
<< " -- " << currentPosB_
<< " < " << thisRangeB.word_
<< endl;
skipIfNecessary( thisRangeB, currentPosB_, ( *inBwtB_ ), countsSoFarB_ );
// count children
countsThisRangeB.clear();
uint charsRead = inBwtB_->readAndCount( countsThisRangeB, thisRangeB.num_ );
assert( charsRead == thisRangeB.num_ );
Logger_if( LOG_SHOW_IF_VERY_VERBOSE )
{
Logger::out() << countsThisRangeB << endl;
}
//#ifdef TBD
intervalHandler_.foundInBOnly
( i,
countsSoFarB_,
countsThisRangeB,
NULL,
thisRangeB,
propagateIntervalB_,
cycle_
);
//#endif
// add ranges for any children
if ( propagateSequence_ )
hasChild = false;
for ( int l( 1 ); l < alphabetSize; l++ )
{
// if (countsThisRangeB.count_[l]>=minOcc)
if ( propagateIntervalB_[l] == true )
{
if ( propagateSequence_ )
{
if ( hasChild == false )
{
assert( thisWord.size() == thisRangeB.word_.size() + 1 );
thisWord.replace( 1, thisRangeB.word_.size(), thisRangeB.word_ );
hasChild = true;
} // ~if
thisWord[0] = alphabet[l];
}
Range newRange( thisWord,
countsSoFarB_.count_[l],
countsThisRangeB.count_[l],
thisRangeB.isBkptExtension_ );
if ( noComparisonSkip_ ||
!rB_.isRangeKnown( newRange, l, i, subset_, cycle_ ) )
rB_.addRange( newRange, l, i, subset_, cycle_ );
} // ~if
} // ~for l
/*
if ( hasChild == false )
{
// if no children, print word itself
// cout << "GOLD " << thisRangeB.word_ << " " << thisRangeB.num_ << endl;
// numSingletonRanges_++;
} // ~if
*/
countsSoFarB_ += countsThisRangeB;
currentPosB_ += thisRangeB.num_;
// numRanges_++;
} // ~while
if ( notAtLastA == false )
{
assert ( notAtLastB == false );
break;
} // ~if
else
{
assert( ( thisRangeA.pos_ & matchFlag ) != 0 );
assert( ( thisRangeB.pos_ & matchFlag ) != 0 );
Logger_if( LOG_SHOW_IF_VERY_VERBOSE ) Logger::out() << "RangeA: " << i << " "
<< thisRangeA.word_ << " "
<< thisRangeA.pos_ << " " << thisRangeA.num_
<< " -- " << currentPosA_ << " = ";
Logger_if( LOG_SHOW_IF_VERY_VERBOSE ) Logger::out() << "RangeB: " << i << " "
<< thisRangeB.word_ << " "
<< thisRangeB.pos_ << " " << thisRangeB.num_
<< " -- " << currentPosB_ << endl;
Logger_if( LOG_SHOW_IF_VERY_VERBOSE ) Logger::out() << ( thisRangeA.pos_ & matchFlag ) << " " << ( thisRangeB.pos_ & matchFlag ) << endl;
skipIfNecessary( thisRangeA, currentPosA_, ( *inBwtA_ ), countsSoFarA_ );
// count children
countsThisRangeA.clear();
LetterNumber count = inBwtA_->readAndCount( countsThisRangeA, thisRangeA.num_ );
assert( count == thisRangeA.num_ );
Logger_if( LOG_SHOW_IF_VERY_VERBOSE )
{
Logger::out() << countsThisRangeA << endl;
}
skipIfNecessary( thisRangeB, currentPosB_, ( *inBwtB_ ), countsSoFarB_ );
// count children
countsThisRangeB.clear();
count = inBwtB_->readAndCount( countsThisRangeB, thisRangeB.num_ );
assert( count == thisRangeB.num_ );
Logger_if( LOG_SHOW_IF_VERY_VERBOSE )
{
Logger::out() << countsThisRangeB << endl;
}
if ( propagateSequence_ )
assert( thisRangeA.word_.size() == thisRangeB.word_.size() );
bool isBreakpointDetected = false;
intervalHandler_.foundInBoth
( i,
countsThisRangeA, countsThisRangeB,
thisRangeA, thisRangeB,
propagateIntervalA_, propagateIntervalB_,
isBreakpointDetected, cycle_ );
if ( isBreakpointDetected )
{
thisRangeA.isBkptExtension_ = true;
thisRangeB.isBkptExtension_ = true;
}
if ( propagateSequence_ )
hasChild = false;
for ( int l( 1 ); l < alphabetSize; l++ )
{
if ( ( propagateIntervalA_[l] == true )
|| ( propagateIntervalB_[l] == true ) )
{
if ( propagateSequence_ )
{
if ( hasChild == false )
{
assert( thisWord.size() == thisRangeA.word_.size() + 1 );
thisWord.replace( 1, thisRangeA.word_.size(), thisRangeA.word_ );
hasChild = true;
}
thisWord[0] = alphabet[l];
}
// thisWord+=thisRangeA.word_;
LetterNumber thisFlag
( ( ( propagateIntervalA_[l] == true )
&& ( propagateIntervalB_[l] == true ) ) ? matchFlag : 0 );
Range newRangeA( thisWord,
( countsSoFarA_.count_[l]
| thisFlag ),
countsThisRangeA.count_[l],
thisRangeA.isBkptExtension_ );
Range newRangeB( thisWord,
( countsSoFarB_.count_[l]
| thisFlag ),
countsThisRangeB.count_[l],
thisRangeB.isBkptExtension_ );
bool doAddRangeA = noComparisonSkip_ || !rA_.isRangeKnown( newRangeA, l, i, subset_, cycle_ );
bool doAddRangeB = noComparisonSkip_ || !rB_.isRangeKnown( newRangeB, l, i, subset_, cycle_ );
if ( thisFlag )
{
doAddRangeA = doAddRangeB = doAddRangeA || doAddRangeB;
}
if ( propagateIntervalA_[l] == true )
{
if ( doAddRangeA )
{
rA_.addRange( newRangeA, l, i, subset_, cycle_ );
++numNotSkippedA_;
}
else
{
++numSkippedA_;
}
}
// if (countsThisRangeB.count_[l]>0)
if ( propagateIntervalB_[l] == true )
{
if ( doAddRangeB )
{
rB_.addRange( newRangeB, l, i, subset_, cycle_ );
++numNotSkippedB_;
}
else
{
++numSkippedB_;
}
}
} // ~if
} // ~for
countsSoFarA_ += countsThisRangeA;
currentPosA_ += thisRangeA.num_;
countsSoFarB_ += countsThisRangeB;
currentPosB_ += thisRangeB.num_;
numRanges_++;
}
} // ~while
}
<|start_filename|>src/shared/Types.hh<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#ifndef DEFINED_TYPES_HH
#define DEFINED_TYPES_HH
#include <stdint.h>
#include <string>
using std::string;
// Standard data types
typedef unsigned char uchar;
typedef unsigned int uint;
typedef unsigned long ulong;
// Type to represent: Number of sequences
// below limits to 4 billion reads max - change to uint64_t for more
typedef uint32_t SequenceNumber;
// Type to represent: Sequence length (in biologic case 100)
typedef uint32_t SequenceLength;
// Type to represent: character position or number of characters in BWT
// Should work for BWT of up to 2^64 characters in size
typedef uint64_t LetterNumber;
const SequenceNumber maxSequenceNumber( static_cast<SequenceNumber>( -1 ) );
const SequenceLength maxSequenceLength( static_cast<SequenceLength>( -1 ) );
const LetterNumber maxLetterNumber( static_cast<LetterNumber>( -1 ) );
// Type to represent number of letters in an indexed chunk of the BWT
// Must be the case that max run size per token * max tokens per chunk
// fits into this number without overflow. Might be OK to replace with
// a 16-bit type but we'll leave this at 32 bit initially.
typedef uint32_t LetterNumberCompact;
// For Countwords
const LetterNumber matchFlag( ( ( LetterNumber )1 ) << ( ( 8 * sizeof( LetterNumber ) ) - 1 ) );
const LetterNumber matchMask( ~matchFlag );
// For Metagenomics
//#define METABEETL_FOR_HIV
const string taxLevelNames[] = {"superkingdom", "phylum", "class", "order", "family", "genus", "species", "subspecies", "unnamed rank 9", "unnamed rank 10", "unnamed rank 11"
#ifdef METABEETL_FOR_HIV
, "rank12"
, "rank13"
, "rank14"
, "rank15"
, "rank16"
, "rank17"
, "rank18"
, "rank19"
, "rank20"
, "rank21"
, "rank22"
, "rank23"
, "rank24"
, "rank25"
, "rank26"
, "rank27"
, "rank28"
, "rank29"
, "rank30"
, "rank31"
, "rank32"
, "rank33"
, "rank34"
, "rank35"
, "rank36"
, "rank37"
, "rank38"
, "rank39"
, "rank40"
, "rank41"
, "rank42"
, "rank43"
, "rank44"
, "rank45"
, "rank46"
, "rank47"
, "rank48"
, "rank49"
, "rank50"
, "rank51"
, "rank52"
, "rank53"
, "rank54"
, "rank55"
, "rank56"
, "rank57"
, "rank58"
, "rank59"
, "rank60"
, "rank61"
, "rank62"
, "rank63"
, "rank64"
, "rank65"
, "rank66"
, "rank67"
, "rank68"
, "rank69"
, "rank70"
, "rank71"
, "rank72"
, "rank73"
, "rank74"
, "rank75"
, "rank76"
, "rank77"
, "rank78"
, "rank79"
, "rank80"
, "rank81"
, "rank82"
, "rank83"
, "rank84"
, "rank85"
, "rank86"
, "rank87"
, "rank88"
, "rank89"
, "rank90"
, "rank91"
, "rank92"
, "rank93"
, "rank94"
, "rank95"
, "rank96"
, "rank97"
, "rank98"
, "rank99"
, "rank100"
, "rank101"
, "rank102"
, "rank103"
, "rank104"
, "rank105"
, "rank106"
, "rank107"
, "rank108"
, "rank109"
, "rank110"
, "rank111"
, "rank112"
, "rank113"
, "rank114"
, "rank115"
, "rank116"
, "rank117"
, "rank118"
, "rank119"
, "rank120"
, "rank121"
, "rank122"
#endif //ifdef METABEETL_FOR_HIV
};
#ifdef METABEETL_FOR_HIV
const uint taxLevelSize = 122;
#else
const uint taxLevelSize = 11; // Change this to 122 for HIV
#endif
typedef uint32_t MetagFileNumRefType;
// For generalized suffix array (GSA): Definition of each element
struct ElementType
{
SequenceLength sa; //It is the position in the sequence, so it goes from 0 a length read
SequenceNumber numSeq; //It is the number of the sequence.
};
// For Huffman encoder
union BitBuffer
{
unsigned int ui;
unsigned long long ull;
};
// USE_ATTRIBUTE_PACKED: if set, set __attribute__((packed)) for the
// struct sortElement in Sorting.hh; reduces the memory consumption from
// 24 to 17 bytes per input base
#define USE_ATTRIBUTE_PACKED 1
#endif
<|start_filename|>src/libzoo/util/ColorText.cpp<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#include "ColorText.hh"
#include <cstring>
#include <unistd.h>
using namespace std;
string ColorText::startRed = "";
string ColorText::endRed = "";
void ColorText::init( int activateColor )
{
static bool firstTime = true;
if ( !firstTime )
return;
firstTime = false;
if ( activateColor != 0 )
{
char const *t = getenv ( "TERM" );
if ( activateColor == 1
|| ( isatty ( STDOUT_FILENO ) && ( t == 0 || strcmp ( t, "dumb" ) != 0 ) )
)
{
startRed = "\033[7;31m"; // Note: 7=background to 1=text colour
endRed = "\033[0m";
}
}
}
<|start_filename|>src/BCR/PredictiveEncoding.cpp<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#include "PredictiveEncoding.hh"
#include <cassert>
#include <cmath>
#include <iostream>
#include <fstream>
using namespace std;
PredictionStatistics::PredictionStatistics()
: removedCount_( 0 )
, notRemovedCount_( 0 )
{
}
void PredictionStatistics::add( const bool isQualityRemoved, const char qual )
{
assert( qual >= 33 && "Error: Invalid Phred Q-score" );
if ( isQualityRemoved )
{
assert( qual != 33 && "Error: Trying to remove a zero Q-score" );
++removedCount_;
++( removedQualities_[qual - 33] );
}
else
{
++notRemovedCount_;
}
}
void PredictionStatistics::outputToFile( const string &filename )
{
ofstream os( filename.c_str() );
os << "Removed Qscores\t" << removedCount_ << endl;
os << "Kept Qscores\t" << notRemovedCount_ << endl;
double removedCount = 0;
double removedSum = 0;
double associatedErrorRateSum = 0;
for ( unsigned int i = 1; i < removedQualities_.size(); ++i )
{
os << "removed Qscore\t" << i << "\t" << removedQualities_[i] << endl;
removedCount += removedQualities_[i];
removedSum += removedQualities_[i] * i;
associatedErrorRateSum += removedQualities_[i] * pow( 10, -( double )i / 10.0 );
}
assert( removedCount == removedCount_ );
double bestReplacementQuality = -10 * log10( associatedErrorRateSum / removedCount );
os << "Mean removed Qscore\t" << ( removedSum / removedCount ) << "\t" << char( round( 33 + removedSum / removedCount ) ) << endl;
os << "Mean associated error rate\t" << ( associatedErrorRateSum / removedCount ) << endl;
os << "Best replacement QScore\t" << bestReplacementQuality << "\t" << char( round( 33 + bestReplacementQuality ) ) << endl;
}
<|start_filename|>src/backtracker/BackTrackerBase.cpp<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#include "BackTrackerBase.hh"
#include "EndPosFile.hh"
#include "IntervalHandlerBase.hh"
#include "libzoo/util/Logger.hh"
#include <algorithm>
using namespace std;
BackTrackerBase::BackTrackerBase( const string &subset, const int cycle, const bool noComparisonSkip, const bool propagateSequence )
: subset_( subset )
, cycle_( cycle )
, noComparisonSkip_( noComparisonSkip )
, numRanges_( 0 )
, numSingletonRanges_( 0 )
, propagateSequence_( propagateSequence )
{}
void BackTrackerBase::skipIfNecessary( const Range &thisRange,
LetterNumber ¤tPos,
BwtReaderBase &inBwt,
LetterCount &countsSoFar )
{
Logger_if( LOG_SHOW_IF_VERY_VERBOSE ) Logger::out() << "Want 2 skip " << thisRange.pos_ - currentPos << ": " << currentPos << " to " << thisRange.pos_ << endl;
if ( ( thisRange.pos_ & matchMask ) > currentPos )
{
Logger_if( LOG_SHOW_IF_VERY_VERBOSE ) Logger::out() << "Skipping " << thisRange.pos_ - currentPos << ": " << currentPos << " to " << thisRange.pos_ << endl;
inBwt.readAndCount( countsSoFar, ( thisRange.pos_ & matchMask ) - currentPos );
currentPos = ( thisRange.pos_ & matchMask );
} // ~if
if ( thisRange.pos_ < currentPos )
{
cerr << "thisRange is too low. Should be > " << currentPos << "." << endl;
}
} // ~BackTrackerBase::skipIfNecessary
void BackTrackerBase::processSingletons(
const int pileNum
, bool ¬AtLast
, RangeStoreExternal &rA_
, Range &thisRange
, LetterNumber ¤tPos
, BwtReaderBase *inBwt
, LetterCount &countsSoFar
, LetterCount &countsThisRange
, IntervalHandlerBase &intervalHandler
, AlphabetFlag &propagateInterval
, string &thisWord
, const bool doesPropagateToEnd
, IntervalHandler_FoundCallbackPtr foundCallbackPtr
, EndPosFile &endPosFile
, int sampleId
)
{
vector<Range> rAPile0;
while ( notAtLast )
{
notAtLast = rA_.getRange( thisRange );
Logger_if( LOG_FOR_DEBUGGING )
{
Logger::out() << "notAtLast=" << notAtLast << endl;
Logger::out() << "thisRange.pos_=" << thisRange.pos_ << endl;
}
Logger_if( LOG_SHOW_IF_VERY_VERBOSE )
{
if ( notAtLast )
{
Logger::out() << "RangeA: " << pileNum << " "
<< thisRange.word_ << " "
<< thisRange.pos_ << " " << thisRange.num_
<< " -- " << currentPos
<< " < " << thisRange.word_
<< " match=" << ( thisRange.pos_ & matchFlag )
<< endl;
}
else
{
Logger::out() << "RangeA: last reached" << endl;
}
}
if ( ( notAtLast == false ) || ( ( thisRange.pos_ & matchFlag ) != 0 ) ) break;
char *bwtSubstring = NULL;
prepareCallbackArgs(
thisRange
, currentPos
, inBwt
, countsSoFar
, countsThisRange
, intervalHandler
, bwtSubstring
);
if ( sampleId == 1 )
intervalHandler.foundInAOnly
(
pileNum,
countsSoFar,
countsThisRange,
bwtSubstring,
thisRange,
propagateInterval,
cycle_
);
else
intervalHandler.foundInBOnly
(
pileNum,
countsSoFar,
countsThisRange,
bwtSubstring,
thisRange,
propagateInterval,
cycle_
);
/*
( intervalHandler.*foundCallbackPtr )
(
pileNum,
countsSoFar,
countsThisRange,
bwtSubstring,
thisRange,
propagateInterval,
cycle_
);
*/
// Sequence numbers extraction
if ( doesPropagateToEnd )
{
if ( countsThisRange.count_[0] > 0 && thisRange.isBkptExtension_ )
{
#pragma omp critical (IO)
cout << "READNUM"
<< "(" << sampleId << ")"
<< " " << pileNum
<< " " << ( thisRange.pos_ + 1 )
<< " " << thisRange.num_
<< " " << countsThisRange.count_[0]
<< endl;
}
for ( int l( 0 ); l < alphabetSize; l++ )
propagateInterval[l] = ( countsThisRange.count_[l] > 0 );
}
// add ranges for any children
bool hasChild = false;
for ( int l( 0 ); l < alphabetSize; l++ )
{
// if (countsThisRange.count_[l]>=minOcc)
if ( propagateInterval[l] == true )
{
updatePropagatedSuffixWord( hasChild, thisRange, thisWord, l );
#ifdef OLD_BACKTRACKER_COMPAT
Range newRange( thisWord,
countsSoFar.count_[l],
countsThisRange.count_[l],
thisRange.isBkptExtension_ );
#else
Range &newRange = intervalHandler.getSubIntervalRange (
thisWord,
countsSoFar.count_[l],
countsThisRange.count_[l],
thisRange.isBkptExtension_,
thisRange,
l
);
#endif
if ( noComparisonSkip_ ||
!rA_.isRangeKnown( newRange, l, pileNum, subset_, cycle_ ) )
{
if ( l != 0 )
{
rA_.addRange( newRange, l, pileNum, subset_, cycle_ );
}
else
{
Logger_if( LOG_SHOW_IF_VERY_VERBOSE )
{
Logger::out() << "Adding range item to $ pile: ";
newRange.prettyPrint( Logger::out() );
Logger::out() << ", pileNum=" << pileNum << ", subset=" << subset_ << ", cycle=" << cycle_ << endl;
}
LetterNumber rangeStartPos = newRange.pos_;
LetterNumber rangeLength = newRange.num_;
newRange.num_ = 1;
for ( LetterNumber i = 0; i < rangeLength; ++i )
{
newRange.pos_ = rangeStartPos + i;
SequenceNumber newPos = endPosFile.convertDollarNumToSequenceNum( newRange.pos_ );
newRange.pos_ = newPos;
// rAPile0.push_back( newRange );
rA_.addOutOfOrderRange( newRange, l, pileNum, subset_, cycle_ );
}
}
}
#ifdef OLD_BACKTRACKER_COMPAT
#else
// delete newRange;
#endif
} // ~if
/*
std::sort( rAPile0.begin(), rAPile0.end(), compareRangeByPos );
for( Range &r: rAPile0 )
rA_.addRange( r, l, pileNum, subset_, cycle_ );
*/
rAPile0.clear();
} // ~for l
if ( hasChild == false )
{
#ifdef OLD
// if no children, print word itself
cout << "GOLD ";
cout << thisRange.word_;
cout << " " << thisRange.num_ << endl;
#endif
numSingletonRanges_++;
} // ~if
countsSoFar += countsThisRange;
currentPos += thisRange.num_;
numRanges_++;
} // ~while notAtLast
} /// END OF BLOCK1
void BackTrackerBase::prepareCallbackArgs(
Range &thisRange
, LetterNumber ¤tPos
, BwtReaderBase *inBwt
, LetterCount &countsSoFar
, LetterCount &countsThisRange
, IntervalHandlerBase &intervalHandler
, char *&bwtSubstring
)
{
skipIfNecessary( thisRange, currentPos, ( *inBwt ), countsSoFar );
if ( intervalHandler.needSubstring )
{
if ( thisRange.num_ > bwtSubstringStore_.size() )
bwtSubstringStore_.resize( thisRange.num_ );
bwtSubstring = &bwtSubstringStore_[0];
LetterNumber charsRead = ( *inBwt )( bwtSubstring, thisRange.num_ );
assert( charsRead == thisRange.num_ );
countsThisRange.clear();
countsThisRange.countString( bwtSubstring, thisRange.num_ );
}
else
{
// count children
countsThisRange.clear();
LetterNumber charsRead = inBwt->readAndCount( countsThisRange, thisRange.num_ );
assert( charsRead == thisRange.num_ );
Logger_if( LOG_SHOW_IF_VERY_VERBOSE )
{
Logger::out() << countsThisRange << endl;
}
}
} // END_OF_BLOCK2
void BackTrackerBase::updatePropagatedSuffixWord( bool &hasChild, const Range &thisRange, string &thisWord, const AlphabetSymbol l )
{
if ( propagateSequence_ )
{
if ( hasChild == false )
{
assert( thisWord.size() == thisRange.word_.size() + 1 );
thisWord.replace( 1, thisRange.word_.size(), thisRange.word_ );
} // ~if
thisWord[0] = alphabet[l];
}
hasChild = true;
} // END_OF_BLOCK3
<|start_filename|>src/shared/Config.hh<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#ifndef DEFINED_CONFIG_HH
#define DEFINED_CONFIG_HH
/* Control flags - modify and recompile to change program behaviour */
//#define DEBUG 1
#define USE_STANDARD_LEXICOGRAPHIC_ORDER 1
// USE_4_BITS_PER_BASE: if set, convert the input sequences from ASCII into
// a 4-bits-per-base format and use that from then on
#define USE_4_BITS_PER_BASE 1
// USE_PREFIX_ONLY: if set, don't copy the whole of the sequences but
// only the prefix that's yet to be processed
// NB - USE_PREFIX_ONLY must only be used with USE_4_BITS_PER_BASE
#define USE_PREFIX_ONLY 1
// TRACK_SEQUENCE_NUMBER: if set, store an unsigned integer with each
// sequence that reports its originating position
//#define TRACK_SEQUENCE_NUMBER 1
// REPORT_COMPRESSION_RATIO: if set, output the compression achieved in the
// partial BWTs (only has an effect if compression is enabled during runtime)
#define REPORT_COMPRESSION_RATIO 1
// REMOVE_TEMPORARY_FILES: if set, remove all files except BWT once creation
// is complete
#define REMOVE_TEMPORARY_FILES 1
/* Control parameters - modify and recompile to change program behaviour */
const int maxSeqSize( 32 * 1024 ); // 32k to fit Moleculo reads
const int bwtBufferSize( 16384 ); // 1<<20=1048576
// Read this many sequences into RAM at once
static const unsigned int ReadBufferSize( 1024 );
// LJ options
//#define SEND_DATA_TO_FILES_FOR_DEBUGGING
//#define READ_DATA_FROM_FILES_FOR_DEBUGGING
// ################## Huffman encoding parameters ###########################
#define ACTIVATE_HUFFMAN
#ifdef ACTIVATE_HUFFMAN
//#define USE_EXTRA_CHARACTER_Z
const int huffmanBufferSize( 128 );
const int huffmanWriterBufferSize( 2097152 );
// number of double codes used to introduce runs
const unsigned int numDoubleCodes( 6 );
// EOT character 'Z'
const unsigned int numSingleCodes( numDoubleCodes + 1 );
// maximal size of one token is 8 bit
const unsigned int maxTokenSize( 8 );
// we will have 256 different tokens
const unsigned int numTokens( 1 << maxTokenSize );
// 255 in binary -> 1111 1111
// used to extract the last 8 bits during processing
const unsigned int tokenMask( numTokens - 1 );
// for once a typedef here, otherwise types.hh would have to include config.hh
// this is a lookup table for fast 8bit mask -> orginal alphabet letter
// i.e. tokentable assigns several 8bit binary tokens to specific letters in
// the input alphabet. A lookup using this table supersedes a slow bit by bit
// comparison deciding which letter is the next.
typedef unsigned int TokenTable[numTokens];
// single letter codes
// hexadecimal codes for each letter used by Huffman encoder
// for reference / debugging below the corresponding binary values
// ACGTN$Z= 011 101 110 100 1110001 11100000
static const unsigned long long singleCharCode[numSingleCodes]
= {0x17, 0x6, 0x5, 0x3, 0x47, 0x1, 0x7}; // Tony's choice
// = {0x14, 0x6, 0x4, 0x1, 0x2B, 0x3, 0xA8}; // Tobias' try
// $ A C G N T Z
// 10111 011 101 110 1110001 100 11100000
// length in bit for each token
static const unsigned int singleCharLength[numSingleCodes]
//= {5,3,3,3,6,4,8}; // Tobias
= {5, 3, 3, 3, 7, 3, 8}; // Tony
// without 'Z', runlength encoding codes
// hexadecimal codes for each letter used by Huffman encoder
// for reference / debugging below the corresponding binary values
// ACGTN$ = 111001 1111 001 010 11100001 000
static const unsigned long long doubleCharCode[numDoubleCodes]
// $ A C G N T // no Z, double codes
= { 0x27, 0x0, 0x4, 0x2, 0x87, 0xF}; // Tony's choice
// = { 0xA9, 0x7, 0x2, 0xB, 0x55, 0x0}; // Tobias' choice
// length in bit for each token
static const unsigned int doubleCharLength[numDoubleCodes]
// $ A C G N T
// = {8,3,3,4,7,3}; // Tobias
= {6, 3, 3, 3, 8, 4}; // Tony
// position of the notinalphabet char in the baseNames array
static const unsigned int finalCharCode( 6 );
#endif //ifdef ACTIVATE_HUFFMAN
// ################## END Huffman parameters ###########################
#endif
<|start_filename|>src/shared/Tools.cpp<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**
** Collection of basic tools and defines
**/
#include "Tools.hh"
#include "Alphabet.hh"
#include "libzoo/util/Logger.hh"
#include <cstdlib>
#include <dirent.h>
#include <sstream>
#include <unistd.h>
#include <sys/syscall.h>
#include <sys/types.h>
using namespace std;
double Tools::startTime;
void Tools::StartTimer()
{
static const double in_seconds = 1.0 / static_cast<double>( CLOCKS_PER_SEC );
startTime = clock() * in_seconds;
}
double Tools::GetTime()
{
static const double in_seconds = 1.0 / static_cast<double>( CLOCKS_PER_SEC );
double curTime = clock() * in_seconds;
return curTime - startTime;
}
uchar *Tools::GetRandomString( unsigned min, unsigned max, unsigned &alphabetSize )
{
unsigned len = rand() % ( max - min ) + min;
alphabetSize = rand() % 26 + 1;
uchar *temp = new uchar[len + 2];
for ( unsigned i = 0; i < len; i++ )
temp[i] = 97 + rand() % alphabetSize;
temp[len] = 0u ;
temp[len + 1] = '\0';
return temp;
}
uint8_t Tools::FloorLog2( uint64_t i )
{
uint8_t b = 0;
if ( i == 0 )
return 0;
while ( i )
{
b++;
i >>= 1;
}
return b - 1;
}
//Creating table to find logn in small time
uint8_t *Tools::MakeTable()
{
uint8_t *table = new uint8_t[256];
for ( unsigned i = 0; i < 256; i++ )
{
if ( i == 0 )
table[i] = 0;
if ( i >= 1 && i < ( 1 << 1 ) )
table[i] = 1;
if ( i >= ( 1 << 1 ) && i < ( 1 << 2 ) )
table[i] = 2;
if ( i >= ( 1 << 2 ) && i < ( 1 << 3 ) )
table[i] = 3;
if ( i >= ( 1 << 3 ) && i < ( 1 << 4 ) )
table[i] = 4;
if ( i >= ( 1 << 4 ) && i < ( 1 << 5 ) )
table[i] = 5;
if ( i >= ( 1 << 5 ) && i < ( 1 << 6 ) )
table[i] = 6;
if ( i >= ( 1 << 6 ) && i < ( 1 << 7 ) )
table[i] = 6;
if ( i >= ( 1 << 7 ) && i < ( 1 << 8 ) )
table[i] = 7;
if ( i >= ( 1 << 8 ) && i < ( 1 << 9 ) )
table[i] = 8;
}
return table;
}
uint8_t Tools::FastFloorLog2( uint32_t i )
{
static uint8_t *table = MakeTable();
uint8_t u;
if ( i >> 24 ) u = 22 + table[ i >> 24];
else if ( i >> 16 ) u = 14 + table[ i >> 16];
else if ( i >> 8 ) u = 6 + table[ i >> 8];
else u = table[i] - 1;
return u;
}
uint8_t Tools::CeilLog2( uint64_t i )
{
uint8_t j = FloorLog2( i );
if ( ( uint64_t )( 1lu << j ) != i )
return j + 1;
return j;
}
uchar *Tools::GetFileContents( char *filename, size_t maxSize )
{
std::ifstream::pos_type posSize;
std::ifstream file ( ( char * )filename, std::ios::in | std::ios::binary | std::ios::ate );
if ( file.is_open() )
{
posSize = file.tellg();
size_t size = posSize;
if ( maxSize != 0 && size > maxSize )
size = maxSize;
char *memblock = new char [size + 1];
file.seekg ( 0, std::ios::beg );
file.read ( memblock, size );
memblock[size] = '\0';
file.close();
return ( uchar * )memblock;
}
else
return 0;
}
/*
void getFileName( const string &stem, const char code, const int pile,
string &fileName )
{
fileName = stem;
fileName += '-';
fileName += code;
fileName += '0';
assert( pile <= 9 );
fileName += ( char )( 48 + pile );
// cerr << "Made file name " << fileName << endl;
}
*/
bool isValidFastaFile( const char *fileName )
{
FILE *file;
int probe_char;
file = fopen( fileName, "r" );
if ( file != NULL )
{
cerr << "Checking fasta file " << fileName << endl;
}
else
{
cerr << "Could not read fasta file " << fileName << endl;
return 0;
}
// grep first char, should be a >, if not print error
probe_char = getc( file );
if ( probe_char != EOF && probe_char != '>' )
{
cerr << "Validation of fasta file " << fileName
<< " failed. Maybe wrong format?" << endl;
fclose( file );
return 0;
}
ungetc( probe_char, file );
fclose( file );
return 1;
}
bool isValidReadFile( const char *fileName )
{
FILE *file;
int probe_char;
file = fopen( fileName, "r" );
if ( file != NULL )
{
cerr << "Checking read file " << fileName << endl;
}
else
{
cerr << "Could not read file " << fileName << endl;
return 0;
}
// grep first char, should be a ACGTN, if not print error
probe_char = getc( file );
// only basic check, no EOF and no fastq or fasta header
if ( probe_char != EOF && ( probe_char == '@' || probe_char == '>' ) )
{
cerr << "Validation of read file " << fileName
<< " failed. Maybe wrong format?" << endl;
fclose( file );
return 0;
}
ungetc( probe_char, file );
fclose( file );
return 1;
}
bool readWriteCheck( const char *fileName, const bool checkWrite, const bool failIfError )
{
FILE *file;
file = fopen( fileName, checkWrite ? "w" : "r" );
if ( file == NULL && failIfError )
{
string mode = checkWrite ? "writing." : "reading.";
cerr << "Could not open file " << fileName << " for "
<< mode << " Aborting." << endl;
exit( EXIT_FAILURE );
}
if ( file )
fclose( file );
return ( file != NULL );
}
int isDirectoryEmpty( const string &dirname ) // return: -1=directory doesn't exist, 0=not empty, 1=empty
{
int n = 0;
struct dirent *d;
DIR *dir = opendir( dirname.c_str() );
if ( dir == NULL ) //Not a directory or doesn't exist
return -1;
while ( ( d = readdir( dir ) ) != NULL )
{
if ( ++n > 2 )
break;
}
closedir( dir );
if ( n <= 2 ) //Directory Empty
return 1;
else
return 0;
}
void checkIfEqual( const int arg1, const int arg2 )
{
if ( arg1 != arg2 )
{
cerr << "Validation failed: " << arg1 << " != "
<< arg2 << ". Should be (==). Aborting." << endl;
exit( EXIT_FAILURE );
}
}
void checkIfNotEqual( const int arg1, const int arg2 )
{
if ( arg1 == arg2 )
{
cerr << "Validation failed: " << arg1 << " == "
<< arg2 << ". Should be (!=). Aborting." << endl;
exit( EXIT_FAILURE );
}
}
bool hasPrefix( const string &fullString, const string &prefix )
{
return fullString.compare( 0, prefix.size(), prefix ) == 0;
}
bool hasSuffix( const string &fullString, const string &suffix )
{
return fullString.size() >= suffix.size()
&& fullString.compare( fullString.size() - suffix.size(), suffix.size(), suffix ) == 0;
}
vector<string> splitString ( string s, const string &token )
{
vector<string> vs;
while ( s.find( token ) != string::npos )
{
vs.push_back( s.substr( 0, s.find( token ) ) );
s = s.substr( s.find( token ) + ( token.length() ) );
}
vs.push_back( s );
return vs;
}
bool isBwtFileCompressed( const string &filename )
{
ifstream bwtFile( filename.c_str() );
for ( int i = 0; i < 10; ++i )
{
char c = 'A';
bwtFile.get( c );
switch ( toupper( c ) )
{
case 'A':
case 'C':
case 'G':
case 'T':
case 'N':
case '$':
break;
default:
return true;
}
}
return false;
}
void detectInputBwtProperties( const string &prefix, vector<string> &filenames, bool &isBwtCompressed, string &availableFileLetters )
{
// Detect {prefix}-B0* files
for ( unsigned i = 0; i < alphabetSize; ++i )
{
stringstream filename;
filename << prefix << "-B0" << i;
Logger_if( LOG_SHOW_IF_VERBOSE ) Logger::out() << "Probing " << filename.str() << "..." << endl;
if ( access( filename.str().c_str(), R_OK ) == -1 )
break;
filenames.push_back( filename.str() );
Logger_if( LOG_SHOW_IF_VERBOSE ) Logger::out() << "Discovered " << filename.str() << endl;
}
// Detect {prefix}-*01 files
for ( char c = 'A'; c <= 'Z' ; ++c )
{
stringstream filename;
filename << prefix << "-" << c << "01";
Logger_if( LOG_FOR_DEBUGGING ) Logger::out() << "Probing " << filename.str() << "..." << endl;
if ( access( filename.str().c_str(), R_OK ) == 0 )
{
Logger_if( LOG_SHOW_IF_VERY_VERBOSE ) Logger::out() << "Discovered " << filename.str() << endl;
availableFileLetters += c;
}
}
// Detect if files are run-length-encoded based on first few bytes
isBwtCompressed = false;
if ( !filenames.empty() )
{
for ( unsigned int j = 1; j < filenames.size() && !isBwtCompressed; ++j )
{
if ( isBwtFileCompressed( filenames[j] ) )
isBwtCompressed = true;
}
// When BWT0x are RLE-encoded: Check that BWT0 file has the same encoding as others, to detect the old situation where BWT0 was always ASCII-encoded
if ( isBwtCompressed )
{
bool isBwt0Compressed = isBwtFileCompressed( filenames[0] );
if ( !isBwt0Compressed )
{
cerr << "ERROR: " << filenames[0] << " is ASCII-encoded, whereas the other BWT files are using a different encoding." << endl;
cerr << " In this version of BEETL, all the files must have the same encoding." << endl;
cerr << " You can convert this file with beetl-convert --input-format=bwt_ascii --output-format=bwt_rle." << endl;
exit( -1 );
}
}
}
if ( isBwtCompressed )
{
Logger_if( LOG_SHOW_IF_VERY_VERBOSE ) Logger::out() << "BWT files detected as RLE compressed" << endl;
}
else
{
Logger_if( LOG_SHOW_IF_VERY_VERBOSE ) Logger::out() << "BWT files detected as ASCII" << endl;
}
}
int safeRename( const string &from, const string &to )
{
// renames/moves files even across partitions
if ( rename( from.c_str(), to.c_str() ) )
{
Logger_if( LOG_SHOW_IF_VERY_VERBOSE )
{
perror( ( "Info: BCRexternalBWT: Error \"renaming\" file " + from + " to " + to ).c_str() );
cerr << "Using mv command instead." << endl;
}
string cmd = "mv -f \"" + from + "\" \"" + to + "\"";
system( cmd.c_str() );
}
return 0;
}
void pauseBetweenCycles()
{
static int skip = 0;
if ( skip )
{
clog << "Iteration complete. Still continuing for " << skip << " iteration." << endl;
--skip;
}
else
{
fflush( 0 );
clog << "Iteration complete" << endl;
clog << " Press Return to continue, or enter a number of cycles to continue for..." << endl;
string input;
getline( cin, input );
stringstream ss( input );
ss >> skip;
if ( skip )
--skip;
}
}
void readProcSelfStat( int &out_pid, int &out_num_threads, int &out_processor )
{
using std::ios_base;
using std::ifstream;
using std::string;
int tid = syscall( SYS_gettid ); //gettid();
// 'file' stat seems to give the most reliable results
ostringstream oss;
oss << "/proc/" << tid << "/stat";
ifstream stat_stream( oss.str().c_str(), ios_base::in );
// dummy vars for leading entries in stat that we don't care about
//
string /*pid,*/ comm, state, ppid, pgrp, session, tty_nr, tpgid;
string flags, minflt, cminflt, majflt, cmajflt, utime, stime, cutime;
string cstime, priority, nice, /*num_threads,*/ itrealvalue, starttime, vsize, rss;
string rsslim, startcode, endcode, startstack, kstkesp, kstkeip, signal, blocked;
string sigignore, sigcatch, wchan, nswap, cnswap, exit_signal, /*processor,*/ rt_priority;
string policy, delayacct_blkio_ticks;
stat_stream >> out_pid >> comm >> state >> ppid >> pgrp >> session >> tty_nr >> tpgid
>> flags >> minflt >> cminflt >> majflt >> cmajflt >> utime >> stime >> cutime
>> cstime >> priority >> nice >> out_num_threads >> itrealvalue >> starttime >> vsize >> rss
>> rsslim >> startcode >> endcode >> startstack >> kstkesp >> kstkeip >> signal >> blocked
>> sigignore >> sigcatch >> wchan >> nswap >> cnswap >> exit_signal >> out_processor >> rt_priority
>> policy >> delayacct_blkio_ticks;
stat_stream.close();
out_pid = tid;
// long page_size_kb = sysconf(_SC_PAGE_SIZE) / 1024; // in case x86-64 is configured to use 2MB pages
// vm_usage = vsize / 1024.0;
// resident_set = rss * page_size_kb;
}
shared_ptr<istream> openInputFileOrDashAsCin( const string &filename )
{
shared_ptr<istream> input;
if ( filename == "-" )
input.reset( &cin, emptyDeleter() );
else
input.reset( new ifstream( filename.c_str() ) );
return input;
}
<|start_filename|>src/parameters/IndexParameters.hh<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#ifndef BEETL_INDEX_PARAMETERS_HH
#define BEETL_INDEX_PARAMETERS_HH
#include "libzoo/cli/ToolParameters.hh"
#include <string>
namespace BeetlIndexParameters
{
// Option container
enum SearchOptions
{
SEARCH_OPTION_COUNT // end marker
};
} // namespace BeetlIndexParameters
class IndexParameters : public ToolParameters
{
public:
IndexParameters()
{
using namespace BeetlIndexParameters;
addEntry( -1, "input", "--input", "-i", "Input filename prefix (i.e. BWT files are \"prefix-B0[0-6]\")", "", TYPE_STRING | REQUIRED );
addEntry( -1, "block size", "--block-size", "-b", "Interval between index points (smaller=faster but more RAM)", "", TYPE_INT );
addEntry( -1, "force", "--force", "-f", "Overwrite any existing index files", "", TYPE_SWITCH );
// addEntry( -1, "output", "--output", "-o", "Output filename", "searchedKmers_positions", TYPE_STRING | REQUIRED );
// addEntry( -1, "pause between cycles", "--pause-between-cycles", "", "Wait for a key press after each cycle", "", TYPE_SWITCH );
addDefaultVerbosityAndHelpEntries();
}
};
#endif //ifndef BEETL_INDEX_PARAMETERS_HH
<|start_filename|>src/backtracker/RangeStore.cpp<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#include "RangeStore.hh"
#include "libzoo/util/Logger.hh"
#include <algorithm>
#include <cstring>
#include <inttypes.h>
#include <sstream>
#include <unistd.h>
using namespace std;
//#define USE_RAM_FILES_FOR_READ_MODE
//#define USE_RAM_FILES_FOR_WRITE_MODE
//#define DONT_DELETE_PREVIOUS_CYCLE_FILES
#ifdef USE_RAM_FILES_FOR_READ_MODE
# define LOCAL_DEF__TEMPORARY_FILE__READ_MODE TemporaryRamFile
#else
# define LOCAL_DEF__TEMPORARY_FILE__READ_MODE TemporaryFile
#endif
#ifdef USE_RAM_FILES_FOR_WRITE_MODE
# define LOCAL_DEF__TEMPORARY_FILE__WRITE_MODE TemporaryRamFile
#else
# define LOCAL_DEF__TEMPORARY_FILE__WRITE_MODE TemporaryFile
#endif
bool RangeStore::isSubsetValid( const string &subset, const int cycle, const int pileNum, const int portionNum )
{
switch ( subset.size() )
{
case 0:
return true;
case 1:
if ( cycle == 1 && subset[subset.size() - cycle] != alphabet[portionNum] )
return false;
return true;
default:
if ( cycle < ( int )subset.size() && cycle >= 1 )
{
if ( subset[subset.size() - cycle - 1] != alphabet[pileNum] || subset[subset.size() - cycle] != alphabet[portionNum] )
return false;
}
}
return true;
}
//
// RangeStoreExternal
//
RangeStoreExternal::RangeStoreExternal( const bool propagateSequence, const string fileStem )
: fileStem_( fileStem )
, stateIn_( propagateSequence )
, stateOut_( alphabetSize, vector< RangeState >( alphabetSize, RangeState( propagateSequence ) ) )
, stateInForComparison_( alphabetSize, vector< RangeState >( alphabetSize, RangeState( propagateSequence ) ) )
{
setCycleNum( 0 );
string fileName;
for ( int i( 0 ); i < alphabetSize; ++i )
{
for ( int j( 0 ); j < alphabetSize; ++j )
{
stateOut_[i][j].clear();
getFileName( fileStemIn_, i, j, fileName );
if ( LOCAL_DEF__TEMPORARY_FILE__WRITE_MODE::remove( fileName.c_str() ) == 0 )
{
Logger_if( LOG_SHOW_IF_VERY_VERBOSE ) Logger::out() << "Removed " << fileName << endl;
}
getFileName( fileStemOut_, i, j, fileName );
if ( LOCAL_DEF__TEMPORARY_FILE__WRITE_MODE::remove( fileName.c_str() ) == 0 )
{
Logger_if( LOG_SHOW_IF_VERY_VERBOSE ) Logger::out() << "Removed " << fileName << endl;
}
} // ~for j
} // ~for i
} // ~ctor
RangeStoreExternal::~RangeStoreExternal()
{
}
/*
void RangeStoreExternal::swap( void )
{
Logger::out( LOG_SHOW_IF_VERY_VERBOSE ) << "swap" << endl;
Logger::out( LOG_SHOW_IF_VERY_VERBOSE ) << fileStemIn_ << " " << fileStemOut_ << endl;
string temp = fileStemIn_;
fileStemIn_ = fileStemOut_;
fileStemOut_ = temp;
Logger::out( LOG_SHOW_IF_VERY_VERBOSE ) << fileStemIn_ << " " << fileStemOut_ << endl;
} // ~swap
*/
void RangeStoreExternal::setCycleNum( const int cycleNum )
{
Logger_if( LOG_SHOW_IF_VERY_VERBOSE ) Logger::out() << "RangeStoreExternal: setting cycle num " << cycleNum << endl;
// Logger::out( LOG_SHOW_IF_VERY_VERBOSE ) << fileStemIn_ << " " << fileStemOut_ << endl;
// const string fileStem_ = "compareIntervals";
{
ostringstream oss;
oss << fileStem_ << "_cycle" << ( cycleNum - 1 );
fileStemIn_ = oss.str();
}
{
ostringstream oss;
oss << fileStem_ << "_cycle" << cycleNum;
fileStemOut_ = oss.str();
}
// Logger::out( LOG_SHOW_IF_VERY_VERBOSE ) << fileStemIn_ << " " << fileStemOut_ << endl;
} // ~swap
void RangeStoreExternal::setPortion( int pileNum, int portionNum )
{
Logger_if( LOG_SHOW_IF_VERY_VERBOSE ) Logger::out() << "set portion " << alphabet[pileNum] << alphabet[portionNum] << endl;
stateIn_.clear();
// if (stateIn_.pFile_!=NULL) fclose(stateIn_.pFile_);
string fileName;
getFileName( fileStemIn_, pileNum, portionNum, fileName );
Logger_if( LOG_SHOW_IF_VERY_VERBOSE ) Logger::out() << "Opening input portion file " << fileName << endl;
stateIn_.pFile_ = LOCAL_DEF__TEMPORARY_FILE__READ_MODE::fopen( fileName.c_str(), "rb" );
if ( stateIn_.pFile_ == NULL )
{
Logger_if( LOG_SHOW_IF_VERY_VERBOSE ) Logger::out() << "Warning: no file " << fileName
<< " found, presuming no ranges of interest in this region"
<< endl;
}
}
void RangeStoreExternal::deleteInputPortion( int i, int j )
{
stateInForComparison_[i][j].clear();
string fileName;
getFileName( fileStemIn_, i, j, fileName );
Logger_if( LOG_SHOW_IF_VERY_VERBOSE ) Logger::out() << "Deleting input portion file " << fileName << endl;
#ifndef DONT_DELETE_PREVIOUS_CYCLE_FILES
if ( LOCAL_DEF__TEMPORARY_FILE__READ_MODE::remove( fileName.c_str() ) != 0 )
{
Logger_if( LOG_SHOW_IF_VERY_VERBOSE ) Logger::out() << "Could not remove " << fileName << endl;
}
#endif
stateIn_.clear();
}
bool RangeStoreExternal::getRange( RangeState &stateFileIn, Range &thisRange )
{
bool success;
thisRange.clear();
/* seems more optimised by removing this
if ( !stateFileIn.good() )
{
success = false;
}
else
*/
{
stateFileIn >> thisRange;
success = stateFileIn.good();
}
Logger_if( LOG_SHOW_IF_VERY_VERBOSE )
{
Logger::out() << "get range: " << fileStemIn_ << " => ";
if ( success )
{
thisRange.prettyPrint( Logger::out() );
Logger::out() << endl;
if ( thisRange.pos_ == 0 ) usleep( 1000000 );
}
else
{
Logger::out() << "EOF" << endl;
}
}
return success;
} // ~getRange
bool RangeStoreExternal::getRange( Range &thisRange )
{
return getRange( stateIn_, thisRange );
}
bool RangeStoreExternal::isRangeKnown( const Range &r, const int pileNum, const int portionNum, const string &subset, const int cycle )
{
if ( !isSubsetValid( subset, cycle, pileNum, portionNum ) )
return true;
// Checks whether the interval already existed at the previous cycle
if ( stateInForComparison_[pileNum][portionNum].pFile_ == NULL )
{
string fileName;
getFileName( fileStemIn_, pileNum, portionNum, fileName );
stateInForComparison_[pileNum][portionNum].pFile_ = LOCAL_DEF__TEMPORARY_FILE__READ_MODE::fopen( fileName.c_str(), "rb" );
#ifdef ENCODE_POSITIONS_AS_OFFSETS
stateInForComparison_[pileNum][portionNum].lastProcessedPos_ = 0;
#endif
lastRangeReadForComparison_[pileNum][portionNum].pos_ = 0;
lastRangeReadForComparison_[pileNum][portionNum].num_ = 0;
}
if ( stateInForComparison_[pileNum][portionNum].pFile_ )
{
while ( lastRangeReadForComparison_[pileNum][portionNum].pos_ < r.pos_ )
{
if ( !getRange( stateInForComparison_[pileNum][portionNum], lastRangeReadForComparison_[pileNum][portionNum] ) )
lastRangeReadForComparison_[pileNum][portionNum].pos_ = maxLetterNumber;
}
if ( r.pos_ == lastRangeReadForComparison_[pileNum][portionNum].pos_ &&
r.num_ == lastRangeReadForComparison_[pileNum][portionNum].num_ )
{
Logger_if( LOG_SHOW_IF_VERY_VERBOSE )
{
Logger::out() << "Range detected as already processed: " << r.pos_ << " " << r.num_ << endl;
}
return true;
}
}
return false;
}
void RangeStoreExternal::addRange( const Range &r, const int pileNum, const int portionNum, const string &subset, const int cycle )
{
if ( !isSubsetValid( subset, cycle, pileNum, portionNum ) )
return;
Logger_if( LOG_SHOW_IF_VERY_VERBOSE )
{
Logger::out() << "set range: " << fileStemOut_ << " " << alphabet[pileNum] << " " << alphabet[portionNum]
<< " " << r.word_
<< " ";
r.prettyPrint( Logger::out() );
Logger::out() << endl;
}
if ( stateOut_[pileNum][portionNum].pFile_ == NULL )
{
string fileName;
getFileName( fileStemOut_, pileNum, portionNum, fileName );
Logger_if( LOG_SHOW_IF_VERY_VERBOSE ) Logger::out() << "Made output file name " << fileName << endl;
#ifdef USE_RAM_FILES_FOR_WRITE_MODE
stateOut_[pileNum][portionNum].pFile_ = LOCAL_DEF__TEMPORARY_FILE__WRITE_MODE::fopen( fileName.c_str(), "wb", static_cast<uint64_t>( TemporaryFilesManager::get().ramLimitMB_ * 1024 * ( 1024 / 64 ) * 0.5 ) ); // Reserves half of the available RAM for temporary files
#else
stateOut_[pileNum][portionNum].pFile_ = LOCAL_DEF__TEMPORARY_FILE__WRITE_MODE::fopen( fileName.c_str(), "wb" );
#endif
//#ifdef PROPAGATE_SEQUENCE
stateOut_[pileNum][portionNum].lastProcessedPos_ = 0;
//#endif
}
stateOut_[pileNum][portionNum] << r;
}
void RangeStoreExternal::addOutOfOrderRange( const Range &r, const int pileNum, const AlphabetSymbol portionNum, const string &subset, const int cycle )
{
assert( pileNum == 0 && "only used for reordering $ pile after using end-pos file permutation" );
assert( subset.empty() && "todo: implement with subset" );
outOfOrderRangesForPile0_.push_back( make_pair( r, portionNum ) );
}
void RangeStoreExternal::clear( bool doDeleteFiles )
{
// Flush out-of-order ranges if necessary
if ( !outOfOrderRangesForPile0_.empty() )
{
std::sort( outOfOrderRangesForPile0_.begin(), outOfOrderRangesForPile0_.end(), compareRangeByPosInPair );
for ( pair< Range, AlphabetSymbol > &rp : outOfOrderRangesForPile0_ )
{
Range &r = rp.first;
// AlphabetSymbol portionNum = rp.second;
addRange( r, 0, 0/*portionNum*/, "", 0 ); //subset_, cycle_ ); // subset not implemented, cycle only used with subset
}
outOfOrderRangesForPile0_.clear();
}
// Clean up
for ( int i( 0 ); i < alphabetSize; ++i )
{
for ( int j( 0 ); j < alphabetSize; ++j )
{
stateOut_[i][j].clear();
stateInForComparison_[i][j].clear();
if ( doDeleteFiles )
{
#ifndef DONT_DELETE_PREVIOUS_CYCLE_FILES
string fileName;
getFileName( fileStemIn_, i, j, fileName );
if ( LOCAL_DEF__TEMPORARY_FILE__READ_MODE::remove( fileName.c_str() ) != 0 )
{
Logger_if( LOG_SHOW_IF_VERY_VERBOSE ) Logger::out() << "Could not remove " << fileName << endl;
}
#endif
}
}
}
}
void RangeStoreExternal::getFileName( const string &stem, const int pile, const int portion,
string &fileName )
{
fileName = stem;
fileName += '-';
fileName += '0';
fileName += ( char )( 48 + pile );
fileName += '-';
fileName += '0';
fileName += ( char )( 48 + portion );
if ( pile > 9 || portion > 9 )
{
cerr << "Alphabet seems to be larger than 9 chars. Aborting." << endl;
exit( -1 );
}
}
<|start_filename|>src/frontends/AlignCorrectorStrings.cpp<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#include "AlignCorrectorStrings.hh"
#include "BCRext.hh"
#include "BCRexternalBWT.hh"
#include "Common.hh"
#include "DatasetMetadata.hh"
#include "parameters/BwtParameters.hh"
#include "config.h"
#include "libzoo/cli/Common.hh"
#include "libzoo/util/Logger.hh"
#include "errors/ErrorInfo.hh"
#include "errors/WitnessReader.hh"
#include "errors/AlignmentParameters.hh"
#include "errors/BwtCorrector.hh"
#include "errors/CorrectionAligner.hh"
#include <algorithm>
#include <cassert>
#include <cstring>
#include <fstream>
#include <iostream>
#include <memory>
#include <sstream>
#include <stdexcept>
#include <string>
#include <vector>
using namespace std;
int main( const int argc, const char **argv )
{
AlignmentParameters params;
if ( !params.parseArgv( argc, argv ) || params["help"] == 1 || !params.chechRequiredParameters() )
{
params.printUsage();
exit( params["help"] == 0 );
}
//read the corrections to be applied out of the corrections file...
vector<ErrorInfo> corrections = ErrorInfo::ReadCorrectionsFromCsv( params.getStringValue( "input corrections file" ) );
std::sort( corrections.begin(), corrections.end(), ErrorInfo::SortByRead );
cout << "Attempting to make " << corrections.size() << " corrections..." << endl;
unique_ptr<CorrectionAligner> aligner;
int alignmentType = params.getValue( "alignment type" );
if ( alignmentType == ALIGNMENT_TYPE_SW )
{
if ( !params["mismatch penalty"].isSet() || !params["deletion penalty"].isSet() || !params["insertion penalty"].isSet() )
{
cout << "Smith waterman alignment requires you to set the mismatch/deletion/insertion penalties and the witness length used to generate the corrections..." << endl;
exit( 1 );
}
cout << "Using Smith-Water man local alignment to position the corrections..." << endl;
aligner.reset( new SmithWatermanCorrectionAligner(
2,
params.getValue( "mismatch penalty" ),
params.getValue( "deletion penalty" ),
params.getValue( "insertion penalty" )
)
);
}
else if ( alignmentType == ALIGNMENT_TYPE_NO_INDELS )
{
if ( !params["correction quality"].isSet() || !params["min witness length"] )
{
cout << "Alignment with no indels requires you to set the witness length and min witness length parameter, and correction string quality parameter..." << endl;
exit( 1 );
}
cout << "Superimposing correction strings onto reads (without indels) and trimming to original length" << endl;
aligner.reset( new NoIndelAligner(
params.getStringValue( "correction quality" )[0],
params.getValue( "min witness length" ),
( params.getValue( "trim corrected reads" ) == 1 )
)
);
}
else if ( alignmentType == ALIGNMENT_TYPE_STITCH )
{
aligner.reset( new StitchAligner() );
}
else
{
cerr << "Error: unexpected alignment type" << endl;
assert( false );
}
string readsFileName = params.getStringValue( "input reads file" );
FILE *reads = fopen( readsFileName.c_str(), "r" );
string outputReadsFile = params.getStringValue( "corrected reads output file" );
cout << "Writing corrector-aligned reads to " << outputReadsFile << "..." << endl;
SeqReaderFile *readsFile = NULL;
switch ( params.getValue( "input reads format" ) )
{
case READS_FORMAT_FASTA:
readsFile = new SeqReaderFasta( reads );
break;
case READS_FORMAT_FASTQ:
readsFile = new SeqReaderFastq( reads );
break;
default:
cout << "Unsupported input reads file type" << endl;
exit( 1 );
}
ReadsFormat outFormat;
switch ( params.getValue( "output reads format" ) )
{
case READS_FORMAT_FASTQ:
outFormat = READS_FORMAT_FASTQ;
break;
case READS_FORMAT_FASTA:
outFormat = READS_FORMAT_FASTA;
break;
default:
cout << "Unsupported reads format!" << endl;
exit( 1 );
}
aligner->ApplyCorrections( readsFile, corrections, outputReadsFile, false, outFormat );
fclose( reads );
delete readsFile;
cout << "Done" << endl;
return 0;
}
<|start_filename|>src/frontends/DatasetMetadata.hh<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#ifndef DATASET_METADATA_HH
#define DATASET_METADATA_HH
#include "Types.hh"
#include <string>
using std::string;
class DatasetMetadata
{
public:
SequenceLength nCycles;
SequenceNumber nReads;
LetterNumber nBases;
float rleCompressibility;
void init( const string &input, const string &inputFormat );
};
extern DatasetMetadata datasetMetadata;
#endif //ifndef DATASET_METADATA_HH
<|start_filename|>src/BCRext/BwtReader.hh<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#ifndef INCLUDED_BWTREADER_HH
#define INCLUDED_BWTREADER_HH
#include "Alphabet.hh"
#include "Config.hh"
#include "LetterCount.hh"
#include "Types.hh"
#include <cassert>
#include <cstdio>
#include <string>
#include <vector>
//#define DONT_USE_MMAP
class BwtWriterBase;
class BwtReaderBase
{
public:
BwtReaderBase( const string &filename );
BwtReaderBase( const BwtReaderBase &obj );
virtual ~BwtReaderBase();
virtual BwtReaderBase *clone() const = 0;
virtual LetterNumber readAndCount( LetterCount &c, const LetterNumber numChars ) = 0;
LetterNumber readAndCount( LetterCount &c );
virtual LetterNumber readAndSend( BwtWriterBase &writer, const LetterNumber numChars ) = 0;
virtual LetterNumber readAndSend( BwtWriterBase &writer );
virtual LetterNumber operator()( char *p, LetterNumber numChars ) = 0;
virtual void rewindFile( void ) = 0;
virtual LetterNumber tellg( void ) const = 0;
virtual int seek( const LetterNumber posInFile, const LetterNumber baseNumber ) = 0;
const string filename_;
protected:
FILE *pFile_;
vector<uchar> buf_;
}; // ~class BwtReaderBase
class BwtReaderASCII : public BwtReaderBase
{
public:
BwtReaderASCII( const string &filename ) :
BwtReaderBase( filename ),
currentPos_( 0 ),
lastChar_( notInAlphabet ),
runLength_( 0 )
{
}
BwtReaderASCII( const BwtReaderASCII &obj ) :
BwtReaderBase( obj ),
currentPos_( obj.currentPos_ ),
lastChar_( obj.lastChar_ ),
runLength_( obj.runLength_ )
{
}
virtual ~BwtReaderASCII() {}
virtual BwtReaderASCII *clone() const
{
return new BwtReaderASCII( *this );
};
virtual LetterNumber readAndCount( LetterCount &c, const LetterNumber numChars );
virtual LetterNumber readAndSend( BwtWriterBase &writer, const LetterNumber numChars );
virtual LetterNumber operator()( char *p, LetterNumber numChars );
virtual void rewindFile( void );
virtual LetterNumber tellg( void ) const;
virtual int seek( const LetterNumber posInFile, const LetterNumber baseNumber );
protected:
LetterNumber currentPos_;
uchar lastChar_;
uint runLength_;
}; // ~class BwtReaderASCII
class BwtReaderRunLengthBase : public BwtReaderBase
{
public:
BwtReaderRunLengthBase( const string &filename );
BwtReaderRunLengthBase( const BwtReaderRunLengthBase &obj );
virtual ~BwtReaderRunLengthBase() {}
virtual LetterNumber readAndCount( LetterCount &c, const LetterNumber numChars );
virtual LetterNumber readAndSend( BwtWriterBase &writer, const LetterNumber numChars );
virtual LetterNumber operator()( char *p, LetterNumber numChars );
virtual void rewindFile( void );
virtual LetterNumber tellg( void ) const;
virtual int seek( const LetterNumber posInFile, const LetterNumber baseNumber );
virtual bool getRun( void ) = 0;
protected:
vector<uint> lengths_;
vector<uchar> codes_;
uchar *pBuf_;
uchar *pBufMax_;
bool finished_;
public: // exposed for buildIndex. TODO: make private again
uchar lastChar_;
uint runLength_;
LetterNumber currentPos_;
LetterNumber currentPosInFile_;
};
class BwtReaderRunLength : public BwtReaderRunLengthBase
{
public:
BwtReaderRunLength( const string &filename );
BwtReaderRunLength( const BwtReaderRunLength &obj );
virtual ~BwtReaderRunLength() {}
virtual BwtReaderRunLength *clone() const
{
return new BwtReaderRunLength( *this );
};
virtual bool getRun( void );
};
const vector<char> rleV3Header = { 'B', 'W', 'T', 13, 10, 26, 3, 0 };
class BwtReaderRunLengthV3 : public BwtReaderRunLengthBase
{
public:
BwtReaderRunLengthV3( const string &filename );
BwtReaderRunLengthV3( const BwtReaderRunLengthV3 &obj );
virtual ~BwtReaderRunLengthV3() {}
virtual BwtReaderRunLengthV3 *clone() const
{
return new BwtReaderRunLengthV3( *this );
};
virtual bool getRun( void );
virtual void rewindFile( void );
virtual LetterNumber tellg( void ) const;
virtual int seek( const LetterNumber posInFile, const LetterNumber baseNumber );
protected:
vector<uchar> symbolForRunLength1ForPile_;
vector<LetterNumber> maxEncodedRunLengthForPile_;
uchar firstContinuationSymbol_;
LetterNumber maxEncodedRunLengthMultiplierForContinuationSymbol_;
long firstDataByteInFile_;
void prefetchNextByte();
int prefetchedByte_;
};
class BwtReaderIncrementalRunLength : public BwtReaderBase
{
public:
BwtReaderIncrementalRunLength( const string &filename );
virtual ~BwtReaderIncrementalRunLength() {}
virtual BwtReaderIncrementalRunLength *clone() const
{
return new BwtReaderIncrementalRunLength( *this );
};
virtual LetterNumber readAndCount( LetterCount &c, const LetterNumber numChars );
virtual LetterNumber readAndSend( BwtWriterBase &writer, const LetterNumber numChars );
virtual LetterNumber operator()( char *p, LetterNumber numChars );
virtual void rewindFile( void );
virtual LetterNumber tellg( void ) const;
virtual int seek( const LetterNumber posInFile, const LetterNumber baseNumber ) { assert(false && "todo"); }
bool getRun( void );
void defragment();
protected:
uint lengths_[256];
uchar codes_[256];
uint runLength_;
uchar *pBuf_;
uchar *pBufMax_;
uchar lastChar_;
uchar lastMetadata_;
bool finished_;
LetterNumber currentPos_;
size_t posInRamFile_;
int fileNum_;
vector<size_t> posInRamFiles_;
vector<int> stackedFileNums_;
}; // class ~BwtReaderIncrementalRunLength
#ifdef ACTIVATE_HUFFMAN
// new input module to support Huffman encoded input
// migrated & adapted from compression.cpp in Tony's Misc CVS tree
// Tobias, 28/11/11
class BwtReaderHuffman : public BwtReaderBase
{
public:
BwtReaderHuffman( const string &filename );
virtual BwtReaderHuffman *clone() const
{
assert( false && "todo" );
return new BwtReaderHuffman( *this );
};
virtual LetterNumber readAndCount( LetterCount &c, const LetterNumber numChars );
virtual LetterNumber readAndSend( BwtWriterBase &writer, const LetterNumber numChars );
virtual LetterNumber operator()( char *p, LetterNumber numChars );
virtual void rewindFile( void );
virtual LetterNumber tellg( void ) const;
virtual int seek( const LetterNumber posInFile, const LetterNumber baseNumber ) { assert(false && "todo"); }
uint getNum( int &i );
bool getRun( void );
protected:
BitBuffer soFar_;
BitBuffer toAdd_;
uint runLength_; // current run length
uchar lastChar_; // last char read until now
int bitsUsed_; // number of bits currently used
int numInts_; // # of ints in the file
bool finished_; // self explainatory
bool nearlyFinished_; // self explainatory
TokenTable tokenTable_; // holds shortcuts for runlength decoding
int intCounter_; // how many ints already have been processed from BWT file
int numSymbols_; // current number of character already decoded and waiting for output
int maxSymbols_; //max number of character already decoded and waiting for output
int queueCounter_; // position in the queue of decoded sumbols
uchar symBuf[huffmanBufferSize]; // extracted character from compressed BWT
uint runBuf[huffmanBufferSize]; // runlength of character at same pos in upper array
LetterNumber currentPos_; // position in the file
bool firstRun_; // self explainatory
}; // class ~BwtReaderHuffman
#endif //ifdef ACTIVATE_HUFFMAN
class BwtReaderRunLengthRam : public BwtReaderBase
{
public:
BwtReaderRunLengthRam( const string &filename );
BwtReaderRunLengthRam( const BwtReaderRunLengthRam & );
virtual ~BwtReaderRunLengthRam();
virtual BwtReaderRunLengthRam *clone() const
{
return new BwtReaderRunLengthRam( *this );
};
virtual LetterNumber readAndCount( LetterCount &c, const LetterNumber numChars );
virtual LetterNumber readAndSend( BwtWriterBase &writer, const LetterNumber numChars );
virtual LetterNumber operator()( char *p, LetterNumber numChars );
virtual void rewindFile( void );
virtual LetterNumber tellg( void ) const;
virtual int seek( const LetterNumber posInFile, const LetterNumber baseNumber ) { assert(false && "todo"); }
bool getRun( void );
protected:
uint lengths_[256];
uchar codes_[256];
uint runLength_;
uchar lastChar_;
LetterNumber currentPos_;
char *fullFileBuf_;
LetterNumber posInFullFileBuf_;
LetterNumber sizeOfFullFileBuf_;
#ifndef DONT_USE_MMAP
size_t mmapLength_;
#endif
private:
bool isClonedObject_;
}; // class ~BwtReaderRunLengthRam
BwtReaderBase* instantiateBwtPileReader( const string &pileFilename, const string &useShm = "", const bool keepBwtInRam = false, const bool forceNotUseIndexClass = false );
vector <BwtReaderBase *> instantiateBwtPileReaders( const string &bwtPrefix, const string &useShm = "" );
#endif
<|start_filename|>src/BCR/TransposeFasta.cpp<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#include "TransposeFasta.hh"
#include "Filename.hh"
#include "SeqReader.hh"
#include "Tools.hh"
#include "libzoo/util/Logger.hh"
#include "libzoo/util/TemporaryFilesManager.hh"
#include <cassert>
#include <cstdlib>
using namespace std;
TransposeFasta::TransposeFasta()
: pReader_( NULL )
, cycleNum_( 0 )
, processQualities_( false )
{
for ( int i( 0 ); i < 256; i++ ) freq[i] = 0;
}
void TransposeFasta::init( SeqReaderFile *pReader, const bool processQualities )
{
pReader_ = pReader;
cycleNum_ = pReader->length();
outputFiles_.resize( pReader->length() );
buf_.resize( pReader->length(), vector<uchar>( BUFFERSIZE ) );
processQualities_ = processQualities;
cerr << "Constructing TransposeFasta, found read length of "
<< cycleNum_ << endl;
if ( processQualities_ && pReader_->thisQual()[0] == '\0' )
{
// If the first entry of the file (which can be fastq or any other format (raw/fasta/etc)) doesn't contain any quality info
// , deactivate qualities processing
processQualities_ = false;
}
}
TransposeFasta::~TransposeFasta()
{
}
bool TransposeFasta::convert( /*const string &input,*/ const string &output, bool generatedFilesAreTemporary )
{
vector<vector<uchar> > bufQual;
vector<FILE *> outputFilesQual;
if ( processQualities_ )
{
bufQual.resize( pReader_->length(), vector<uchar>( BUFFERSIZE ) );
outputFilesQual.resize( pReader_->length() );
}
//TO DO
lengthRead = cycleNum_;
//The distribution of characters is useful
//for alpha[256] -->Corresponding between the alphabet, the piles and tableOcc
//and to know sizeAlpha
//We supposed that the symbols in the input file are the following
freq[int( terminatorChar )] = 1;
freq[int( 'A' )] = 1;
freq[int( 'C' )] = 1;
freq[int( 'G' )] = 1;
freq[int( 'N' )] = 1;
freq[int( 'T' )] = 1;
//GIOVANNA: ADDED THE SYMBOL Z IN THE ALPHABET, SO sizeAlpha = alphabetSize
#ifdef USE_EXTRA_CHARACTER_Z
freq[int( 'Z' )] = 1;
#endif
// create output files
for ( SequenceLength i = 0; i < cycleNum_; i++ )
{
Filename fn( output, i, "" );
outputFiles_[i] = fopen( fn, "w" );
if ( outputFiles_[i] == NULL )
{
cerr << "Error: couldn't open output file " << fn << endl;
if ( i > 0 )
{
cerr << " You may have reached the maximum number of opened files (see `ulimit -n`) or the maximum number of files allowed in one directory, as we create one file per cycle (and a second one if qualities are present)" << endl;
exit ( -1 );
}
}
if ( generatedFilesAreTemporary )
TemporaryFilesManager::get().addFilename( fn );
if ( processQualities_ )
{
Filename fnQual( output + "qual.", i, "" );
outputFilesQual[i] = fopen( fnQual, "w" );
if ( outputFilesQual[i] == NULL )
{
cerr << "Error: couldn't open output file " << fnQual << endl;
if ( i > 0 )
{
cerr << " You may have reached the maximum number of opened files (see `ulimit -n`) or the maximum number of files allowed in one directory, as we create one file per cycle (and a second one if qualities are present)" << endl;
exit ( -1 );
}
}
if ( generatedFilesAreTemporary )
TemporaryFilesManager::get().addFilename( fnQual );
}
}
// looping through the input file, add the characters to the buffer, print buffer when it's full
// unsigned int num_read = 0;
unsigned int num_write = 0;
unsigned int charsBuffered = 0;
//******************************buf[cycleNum_+1]; ********* is cycleNum_ right?
lengthTexts = 0;
nSeq = 0;
// num_read = fread(buf,sizeof(uchar),cycleNum_,ifile);
// fgets ( buf,1024, ifile ); %%%%%
// while( !feof(ifile) ) %%%%%
while ( pReader_->allRead() == false )
{
//cerr << "current line : " << buf << endl;
if ( charsBuffered == BUFFERSIZE )
{
// write buffers to the files, clear buffers
#pragma omp parallel for num_threads(4)
for ( SequenceLength i = 0; i < cycleNum_; i++ )
{
//cerr << "writing to " << i << " : " << buf_[i] << endl;
size_t num_write_bases = fwrite ( buf_[i].data(), sizeof( char ), charsBuffered, outputFiles_[i] );
checkIfEqual( num_write_bases, charsBuffered ); // we should always read/write the same number of characters
if ( processQualities_ )
{
size_t num_write_qual = fwrite ( bufQual[i].data(), sizeof( char ), charsBuffered, outputFilesQual[i] );
checkIfEqual( num_write_bases, num_write_qual );
}
}
lengthTexts += ( num_write * cycleNum_ );
charsBuffered = 0;
}
for ( SequenceLength i = 0; i < cycleNum_; i++ )
{
buf_[i][charsBuffered] = pReader_->thisSeq()[i];
if ( processQualities_ )
{
bufQual[i][charsBuffered] = pReader_->thisQual()[i];
}
}
// increase the counter of chars buffered
charsBuffered++;
nSeq++;
#ifdef XXX
// process the input
if ( buf[0] != '>' )
{
// add the characters
for ( SequenceLength i = 0; i < cycleNum_; i++ )
{
buf_[i][charsBuffered] = buf[i];
}
// increase the counter of chars buffered
charsBuffered++;
nSeq++;
}
#endif //else
//num_read = fread(buf,sizeof(uchar),cycleNum_,ifile);
// fgets ( buf, 1024, ifile );
pReader_->readNext();
}
// write the rest
for ( SequenceLength i = 0; i < cycleNum_; i++ )
{
num_write = fwrite ( buf_[i].data(), sizeof( uchar ), charsBuffered, outputFiles_[i] );
lengthTexts += num_write;
if ( processQualities_ )
{
size_t num_write_qual = fwrite ( bufQual[i].data(), sizeof( uchar ), charsBuffered, outputFilesQual[i] );
checkIfEqual( num_write, num_write_qual );
}
}
checkIfEqual( num_write, charsBuffered );
// closing all the output file streams
for ( SequenceLength i = 0; i < cycleNum_; i++ )
{
fclose( outputFiles_[i] );
if ( processQualities_ )
{
fclose( outputFilesQual[i] );
}
}
std::cout << "Number of sequences reading/writing: " << nSeq << "\n";
std::cout << "Number of characters reading/writing: " << lengthTexts << "\n";
// delete pReader;
return true;
}
bool TransposeFasta::inputCycFile( const string &cycPrefix )
{
//TO DO
//The distribution of characters is useful
//for alpha[256] -->Corresponding between the alphabet, the piles and tableOcc
//and to know sizeAlpha
//1) Alphabet
//We supposed that the symbols in the input file are the following
freq[int( terminatorChar )] = 1;
freq[int( 'A' )] = 1;
freq[int( 'C' )] = 1;
freq[int( 'G' )] = 1;
freq[int( 'N' )] = 1;
freq[int( 'T' )] = 1;
//GIOVANNA: ADDED THE SYMBOL Z IN THE ALPHABET, SO sizeAlpha = alphabetSize
#ifdef USE_EXTRA_CHARACTER_Z
freq[int( 'Z' )] = 1;
#endif
//2) Number of sequences
string cyc1Filename = cycPrefix + "1";
FILE *f = fopen( cyc1Filename.c_str(), "rb" );
if ( !f )
{
cerr << "ERROR: Cycle file " << cyc1Filename << " not found!" << endl;
exit( -1 );
}
fseek( f, 0, SEEK_END );
nSeq = ftell( f );
if (nSeq != ftell( f))
{
Logger::error() << "Error: Too many sequences. This version of BEETL was compiled for a maximum of " << maxSequenceNumber << " sequences, but this input has " << ftell(f) << " sequences. You can increase this limit by changing the type definition of 'SequenceNumber' in Types.hh and recompiling BEETL." << endl;
exit( -1 );
}
fclose( f );
//3) Length of the longest sequence
for ( lengthRead = 1; ; ++lengthRead )
{
Filename cycFilename( cycPrefix, lengthRead, "" );
FILE *f = fopen( cycFilename, "rb" );
if ( f )
fclose( f );
else
break;
}
//4) qualities detection
string qual1Filename = cycPrefix + "qual.1";
f = fopen( qual1Filename.c_str(), "rb" );
if ( f )
{
processQualities_ = true;
fclose( f );
}
else
processQualities_ = false;
//5) Total Length
lengthTexts = lengthRead * nSeq;
// Report
Logger_if( LOG_SHOW_IF_VERBOSE )
{
Logger::out() << "****processing qualities: " << processQualities_ << "\n";
Logger::out() << "****number of sequences: " << nSeq << "\n";
Logger::out() << "****max length of each sequence: " << lengthRead << "\n";
Logger::out() << "****lengthTot: " << lengthTexts << "\n";
}
return 1;
}
bool TransposeFasta::convertFromCycFileToFastaOrFastq( const string &fileInputPrefix, const string &fileOutput, bool generatedFilesAreTemporary, SequenceExtractor *sequenceExtractor )
{
bool outputIsFastq = hasSuffix( fileOutput, ".fastq" );
vector <FILE *> inFilesCyc;
vector <FILE *> inFilesCycQual;
//Open all cyc files
for ( int i = 0; ; ++i )
{
Filename fn( fileInputPrefix, i, "" );
FILE *f = fopen( fn, "rb" );
if ( !f ) break;
inFilesCyc.push_back( f );
if ( outputIsFastq )
{
Filename fnQual( fileInputPrefix, i, ".qual" );
inFilesCycQual.push_back( fopen( fnQual, "rb" ) );
if ( inFilesCycQual[i] == NULL )
{
std::cerr << "TransposeFasta: could not open file " << fnQual << std::endl;
exit ( EXIT_FAILURE );
}
}
}
if ( inFilesCyc.empty() )
{
std::cerr << "TransposeFasta: could not open file " << fileInputPrefix << "0" << std::endl;
exit ( EXIT_FAILURE );
}
SequenceLength lengthRead = inFilesCyc.size();
fseek( inFilesCyc[0], 0, SEEK_END );
SequenceNumber nSeq = ftell( inFilesCyc[0] );
fseek( inFilesCyc[0], 0, SEEK_SET );
ofstream outFile ( fileOutput.c_str() );
if ( outFile.is_open() == false )
{
std::cerr << "Error opening \"" << fileOutput << "\" file" << std::endl;
exit ( 1 );
}
//I must read a char for each sequence. The chars at the position i corresponds to the chars of the sequence i.
char symbol;
string sequence = "";
// buf to accelerate SequenceExtractor usage
const int SEQ_EXTRACTION_BUF_SIZE = 1024;
char seqExtractionBuf[SEQ_EXTRACTION_BUF_SIZE];
int seqCountToSkip = 0;
for ( SequenceNumber j = 0; j < nSeq; j++ )
{
bool extractThisSeq = !sequenceExtractor || sequenceExtractor->doWeExtractNextSequence();
if ( !extractThisSeq )
{
++seqCountToSkip;
continue;
}
else
{
while ( seqCountToSkip > 0 )
{
size_t skip = min( seqCountToSkip, SEQ_EXTRACTION_BUF_SIZE );
for ( SequenceLength i = 0; i < lengthRead; i++ )
{
assert( fread ( seqExtractionBuf, sizeof( char ), skip, inFilesCyc[i] ) == skip );
if ( outputIsFastq && inFilesCycQual.size() >= lengthRead )
assert( fread ( seqExtractionBuf, sizeof( char ), skip, inFilesCycQual[i] ) == skip );
}
seqCountToSkip -= skip;
}
}
if ( outputIsFastq )
outFile << "@Read" << j << std::endl;
else
outFile << "> Read " << j << std::endl;
for ( SequenceLength i = 0; i < lengthRead; i++ )
{
assert( fread ( &symbol, sizeof( char ), 1, inFilesCyc[i] ) == 1 );
sequence.append ( 1, symbol );
}
outFile << sequence << std::endl;
Logger_if( LOG_FOR_DEBUGGING ) Logger::out() << sequence << std::endl;
sequence.clear();
if ( outputIsFastq )
{
outFile << "+" << std::endl;
if ( outputIsFastq && inFilesCycQual.size() >= lengthRead )
{
for ( SequenceLength i = 0; i < lengthRead; i++ )
{
assert( fread ( &symbol, sizeof( char ), 1, inFilesCycQual[i] ) == 1 );
sequence.append ( 1, symbol );
}
outFile << sequence << std::endl;
sequence.clear();
}
else
outFile << "<qualities not available>" << std::endl;
}
}
outFile.close();
//Close all cyc files
for ( SequenceLength i = 0; i < lengthRead; i++ )
{
fclose( inFilesCyc[i] );
}
return 1;
}
<|start_filename|>src/parameters/UnbwtParameters.hh<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#ifndef BEETL_UNBWT_PARAMETERS_HH
#define BEETL_UNBWT_PARAMETERS_HH
#include "libzoo/cli/ToolParameters.hh"
#include <string>
namespace BeetlUnbwtParameters
{
// options: input format
enum InputFormat
{
INPUT_FORMAT_BWT_ASCII,
INPUT_FORMAT_COUNT
};
static const string inputFormatLabels[] =
{
"BWT_ASCII",
"" // end marker
};
// options: output format
enum OutputFormat
{
OUTPUT_FORMAT_FASTA,
OUTPUT_FORMAT_FASTQ,
OUTPUT_FORMAT_COUNT
};
static const string outputFormatLabels[] =
{
"fasta",
"fastq",
"" // end marker
};
// options: process qualities off/on
enum ProcessQualities
{
PROCESS_QUALITIES_OFF,
PROCESS_QUALITIES_ON,
PROCESS_QUALITIES_COUNT,
};
static const string processQualitiesLabels[] =
{
"off",
"on",
"" // end marker
};
// options: decode direction off/on
enum DecodeDirection
{
DECODE_DIRECTION_BACKWARD,
DECODE_DIRECTION_FORWARD,
DECODE_DIRECTION_COUNT,
};
static const string decodeDirectionLabels[] =
{
"backward",
"forward",
"" // end marker
};
// options: use vector off/on
enum UseVector
{
USE_VECTOR_OFF,
USE_VECTOR_ON,
USE_VECTOR_COUNT,
};
static const string useVectorLabels[] =
{
"off",
"on",
"" // end marker
};
// Option container
enum UnbwtOptions
{
// PARAMETER_PROCESS_QUALITIES,
PARAMETER_DECODE_DIRECTION,
PARAMETER_USE_VECTOR,
PARAMETER_COUNT // end marker
};
} // namespace BeetlUnbwtParameters
class UnbwtParameters : public ToolParameters
{
public:
UnbwtParameters()
{
using namespace BeetlUnbwtParameters;
addEntry( -1, "input filename prefix", "--input", "-i", "Input file name prefix (without -B0x)", "", TYPE_STRING | REQUIRED );
addEntry( -1, "output filename", "--output", "-o", "Output file name", "outUnBWT.fasta", TYPE_STRING | REQUIRED );
addEntry( -1, "input format", "--input-format", "", "Must be:", "detect", TYPE_CHOICE | REQUIRED, inputFormatLabels );
addEntry( -1, "output format", "--output-format", "", "", "detect", TYPE_CHOICE | REQUIRED, outputFormatLabels );
addEntry( PARAMETER_DECODE_DIRECTION, "decode direction", "--decode-direction", "-d", "", "backward", TYPE_CHOICE, decodeDirectionLabels );
addEntry( PARAMETER_USE_VECTOR, "use vector", "--use-vector", "", "", "on", TYPE_CHOICE, useVectorLabels );
addDefaultVerbosityAndHelpEntries();
}
};
#endif //ifndef BEETL_UNBWT_PARAMETERS_HH
<|start_filename|>src/countWords/IntervalHandlerTumourNormal.cpp<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#include "IntervalHandlerTumourNormal.hh"
#include "libzoo/util/Logger.hh"
using namespace std;
void IntervalHandlerTumourNormal::foundInBoth
( const int pileNum,
const LetterCount &countsThisRangeA,
const LetterCount &countsThisRangeB,
const Range &thisRangeA,
const Range &thisRangeB,
AlphabetFlag &propagateIntervalA,
AlphabetFlag &propagateIntervalB,
bool &isBreakpointDetected,
const int cycle
)
{
int nonsharedPaths( 0 );
int sharedPathsA( 0 );
int sharedPathsB( 0 );
LetterNumber meanSignalAOnly( 0 ), meanSignalBOnly( 0 ), current_minOccA_( 0 ), current_minOccB_( 0 );
if ( cycle < 12 )
{
current_minOccA_ = minOcc_;
current_minOccB_ = minOcc_;
}
else
{
meanSignalAOnly = ( countsThisRangeA.count_[1] + countsThisRangeA.count_[2] + countsThisRangeA.count_[3] + countsThisRangeA.count_[5] ) / 10;
meanSignalBOnly = ( countsThisRangeB.count_[1] + countsThisRangeB.count_[2] + countsThisRangeB.count_[3] + countsThisRangeB.count_[5] ) / 10;
current_minOccA_ = max( meanSignalAOnly, minOcc_ );
current_minOccB_ = max( meanSignalBOnly, minOcc_ );
}
for ( int l( 1 ); l < alphabetSize; l++ )
{
if ( l == 4 ) continue;
sharedPathsB += countsThisRangeB.count_[l] > 1;
sharedPathsA += countsThisRangeA.count_[l] > 1;
nonsharedPaths += ( ( ( countsThisRangeA.count_[l] > ( LetterNumber )( ( double )current_minOccA_ * fsizeRatio_ ) && countsThisRangeB.count_[l] == 0 ) || ( countsThisRangeB.count_[l] > current_minOccB_ && countsThisRangeA.count_[l] == 0 ) ) );
Logger_if( LOG_SHOW_IF_VERY_VERBOSE ) Logger::out() << l << " countsThisRangeA.count_ " << countsThisRangeA.count_[l] << "\t" << "countsThisRangeB.count_ " << countsThisRangeB.count_[l] << endl;
if ( countsThisRangeA.count_[0] == ( countsThisRangeA.count_[1] + countsThisRangeA.count_[2] + countsThisRangeA.count_[3] + countsThisRangeA.count_[5] ) ) nonsharedPaths = 0;
if ( countsThisRangeB.count_[0] == ( countsThisRangeB.count_[1] + countsThisRangeB.count_[2] + countsThisRangeB.count_[3] + countsThisRangeB.count_[5] ) ) nonsharedPaths = 0;
} // ~for l
if ( nonsharedPaths > 0 && sharedPathsB < 3 && sharedPathsA < 3 )
{
for ( int l( 1 ); l < alphabetSize; l++ )
{
propagateIntervalA[l] = ( countsThisRangeA.count_[l] >= current_minOccA_ && countsThisRangeB.count_[l] == 0 );
propagateIntervalB[l] = ( countsThisRangeB.count_[l] >= current_minOccB_ && countsThisRangeA.count_[l] == 0 );
}
isBreakpointDetected = true;
outFile_ << "BKPT ";
if ( thisRangeB.word_.empty() )
outFile_ << alphabet[pileNum] << string( cycle - 1, 'x' ); // No propagated sequence => Print what we know of the sequence
else
outFile_ << thisRangeB.word_;
outFile_
<< ' ' << countsThisRangeA.count_[0]
<< ':' << countsThisRangeA.count_[1]
<< ':' << countsThisRangeA.count_[2]
<< ':' << countsThisRangeA.count_[3]
<< ':' << countsThisRangeA.count_[4]
<< ':' << countsThisRangeA.count_[5]
<< ' ' << countsThisRangeB.count_[0]
<< ':' << countsThisRangeB.count_[1]
<< ':' << countsThisRangeB.count_[2]
<< ':' << countsThisRangeB.count_[3]
<< ':' << countsThisRangeB.count_[4]
<< ':' << countsThisRangeB.count_[5]
<< ' ' << ( thisRangeA.pos_ & matchMask )
<< ' ' << ( thisRangeB.pos_ & matchMask )
<< ' ' << thisRangeA.num_
<< ' ' << thisRangeB.num_
<< endl;
}
else
{
for ( int l( 1 ); l < alphabetSize; l++ )
propagateIntervalB[l] = ( countsThisRangeB.count_[l] >= current_minOccB_ );
for ( int l( 1 ); l < alphabetSize; l++ )
propagateIntervalA[l] = ( countsThisRangeA.count_[l] >= current_minOccA_ );
}
// don't bother with Ns
propagateIntervalA[whichPile[( int )dontKnowChar]] = false;
propagateIntervalB[whichPile[( int )dontKnowChar]] = false;
} // ~foundInBoth
void IntervalHandlerTumourNormal::foundInAOnly
( const int pileNum,
const LetterCount &countsSoFarA,
const LetterCount &countsThisRangeA,
const char *bwtSubstring,
Range &thisRangeA,
AlphabetFlag &propagateIntervalA,
const int cycle
)
{
if ( countsThisRangeA.count_[0] > 0 )
{
Logger_if( LOG_SHOW_IF_VERY_VERBOSE )
{
outFile_ << "READ ";
if ( thisRangeA.word_.empty() )
outFile_ << alphabet[pileNum]; // No propagated sequence
else
outFile_ << thisRangeA.word_;
outFile_
<< ' ' << thisRangeA.pos_
<< ' ' << countsThisRangeA.count_[0]
<< ':' << countsThisRangeA.count_[1]
<< ':' << countsThisRangeA.count_[2]
<< ':' << countsThisRangeA.count_[3]
<< ':' << countsThisRangeA.count_[4]
<< ':' << countsThisRangeA.count_[5]
<< ' ' << countsSoFarA.count_[0]
<< endl;
}
}
// TBD print out IDs of discovered reads
for ( int l( 1 ); l < alphabetSize; l++ )
{
propagateIntervalA[l] = ( countsThisRangeA.count_[l] > 0 );
}
// don't bother with Ns
propagateIntervalA[whichPile[( int )dontKnowChar]] = false;
} // ~foundInBoth
void IntervalHandlerTumourNormal::foundInBOnly
( const int pileNum,
const LetterCount &countsSoFarB,
const LetterCount &countsThisRangeB,
const char *bwtSubstring,
Range &thisRangeB,
AlphabetFlag &propagateIntervalB,
const int cycle
)
{
if ( countsThisRangeB.count_[0] > 0 )
{
Logger_if( LOG_SHOW_IF_VERY_VERBOSE )
{
outFile_ << "INBS ";
if ( thisRangeB.word_.empty() )
outFile_ << alphabet[pileNum]; // No propagated sequence
else
outFile_ << thisRangeB.word_;
outFile_
<< ' ' << thisRangeB.pos_
<< ' ' << countsThisRangeB.count_[0]
<< ':' << countsThisRangeB.count_[1]
<< ':' << countsThisRangeB.count_[2]
<< ':' << countsThisRangeB.count_[3]
<< ':' << countsThisRangeB.count_[4]
<< ':' << countsThisRangeB.count_[5]
<< ' ' << countsSoFarB.count_[0]
<< endl;
}
}
// TBD print out IDs of discovered reads
for ( int l( 1 ); l < alphabetSize; l++ )
{
propagateIntervalB[l] = ( countsThisRangeB.count_[l] > 0 );
}
// don't bother with Ns
propagateIntervalB[whichPile[( int )dontKnowChar]] = false;
} // ~foundInBoth
<|start_filename|>src/errors/WitnessReader.cpp<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#include "WitnessReader.hh"
using namespace std;
WitnessReader::WitnessReader(
const string &lcpFileName,
const string &bwtFileName,
int witnessLength,
int minimumSupport,
bool rleBWT
)
: pFile_( fopen( lcpFileName.c_str(), "rb" ) )
, filledTo_( 0 )
, at_( 0 )
, lastBlockEnd_( 0 )
, filePos_( 0 )
, witnessLength_( witnessLength )
, minimumSupport_( minimumSupport )
, lastLcpBlockSupport_( 0 )
{
for ( unsigned int i = 0; i < ReadBufferSize; i++ )
lcpBuf_[i] = 0;
bwtReader_ = instantiateBwtPileReader( bwtFileName );
totalCountSoFar_.clear();
refill_();
}
WitnessReader::~WitnessReader()
{
fclose( pFile_ );
delete bwtReader_;
}
LetterCount WitnessReader::TotalCountSoFar()
{
return totalCountSoFar_;
}
int WitnessReader::currentWitnessCount() const
{
return lastLcpBlockSupport_;
}
int WitnessReader::currentWitnessBlockStart() const
{
return filePos_ - filledTo_ + at_ - lastLcpBlockSupport_;
}
bool WitnessReader::nextWitnessBlock( LetterCount &lc )
{
while ( nextCandidateLcpBlock_() )
{
//catch the bwt file up with the lcp...
bwtReader_->readAndCount( totalCountSoFar_, currentWitnessBlockStart() - lastBlockEnd_ );
//get the actual individual letter counts we're interested in...
lc.clear();
bwtReader_->readAndCount( lc, currentWitnessCount() );
totalCountSoFar_ += lc;
int totalSupport = currentWitnessCount() - lc.count_[
whichPile[( int )'$']
];
if ( totalSupport > minimumSupport_ )
{
lc.count_[
whichPile[( int )'$']
] = 0;
return true;
}
}
bwtReader_->readAndCount( totalCountSoFar_ );
return false;
}
void WitnessReader::refill_()
{
filledTo_ = fread( lcpBuf_, sizeof( int ), ReadBufferSize, pFile_ );
filePos_ += filledTo_;
at_ = 0;
}
bool WitnessReader::nextCandidateLcpBlock_()
{
lastBlockEnd_ = currentWitnessCount() + currentWitnessBlockStart();
lastLcpBlockSupport_ = 1;
while ( filledTo_ > 0 )
{
while ( at_ < filledTo_ )
{
if ( lcpBuf_[at_] >= witnessLength_ )
lastLcpBlockSupport_++;
else if ( lastLcpBlockSupport_ > minimumSupport_ )
return true;
else
lastLcpBlockSupport_ = 1;
at_++;
}
refill_();
}
return false;
}
void WitnessReader::test()
{
char bwtChars[filledTo_];
( *bwtReader_ )( bwtChars, filledTo_ );
for ( int i = 0; i < filledTo_; i++ )
cout << bwtChars[i] << " " << lcpBuf_[i] << endl;
}
<|start_filename|>src/parameters/BwtParameters.hh<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#ifndef BEETL_BWT_PARAMETERS_HH
#define BEETL_BWT_PARAMETERS_HH
#include "libzoo/cli/ToolParameters.hh"
#include <string>
using std::string;
namespace BeetlBwtParameters
{
// options: input format
enum InputFormat
{
INPUT_FORMAT_FASTA,
INPUT_FORMAT_FASTQ,
INPUT_FORMAT_CYC,
INPUT_FORMAT_SEQ,
INPUT_FORMAT_BCL,
INPUT_FORMAT_COUNT
};
static const string inputFormatLabels[] =
{
"fasta",
"fastq",
"cyc",
"seq",
"bcl",
"" // end marker
};
// options: output format
enum OutputFormat
{
OUTPUT_FORMAT_ASCII,
OUTPUT_FORMAT_RLE,
OUTPUT_FORMAT_COUNT
// de-activated
,OUTPUT_FORMAT_HUFFMAN
};
static const string outputFormatLabels[] =
{
"ASCII",
"RLE",
// "Huffman",
"" // end marker
};
// options: algorithm
enum AlgorithmOption
{
ALGORITHM_BCR,
ALGORITHM_EXT,
ALGORITHM_COUNT
};
static const string algorithmLabels[] =
{
"BCR",
"ext",
"" // end marker
};
// options: intermediate format
enum IntermediateFormat
{
INTERMEDIATE_FORMAT_RLE,
INTERMEDIATE_FORMAT_ASCII,
INTERMEDIATE_FORMAT_COUNT
// de-activated
,INTERMEDIATE_FORMAT_MULTIRLE
,INTERMEDIATE_FORMAT_HUFFMAN
};
static const string intermediateFormatLabels[] =
{
"RLE",
"ASCII",
// "multiRLE",
// "Huffman",
"" // end marker
};
// options: intermediate storage medium
enum IntermediateStorageMedium
{
INTERMEDIATE_STORAGE_MEDIUM_DISK,
INTERMEDIATE_STORAGE_MEDIUM_RAM,
INTERMEDIATE_STORAGE_MEDIUM_COUNT
};
static const string intermediateStorageMediumLabels[] =
{
"disk",
"RAM",
"" // end marker
};
// options: parallel processing
enum ParallelProcessing // todo: this should only be on/off; the number of cores should be part of the hardware resources
{
PARALLEL_PROCESSING_OFF,
PARALLEL_PROCESSING_2CORES,
PARALLEL_PROCESSING_4CORES,
PARALLEL_PROCESSING_COUNT,
};
static const string parallelProcessingLabels[] =
{
"1 core",
"2 cores",
"4 cores",
"" // end marker
};
// options: process qualities
enum ProcessQualities
{
PROCESS_QUALITIES_IGNORE,
PROCESS_QUALITIES_PERMUTE,
// PROCESS_QUALITIES_PRESERVE,
// PROCESS_QUALITIES_SMOOTH,
PROCESS_QUALITIES_COUNT,
};
static const string processQualitiesLabels[] =
{
"ignore",
"permute",
// "preserve",
// "smooth",
"" // end marker
};
// options: generate cycle BWT off/pbe/ascii
enum GenerateCycleBwt
{
GENERATE_CYCLE_BWT_OFF,
GENERATE_CYCLE_BWT_PBE,
GENERATE_CYCLE_BWT_ASCII,
GENERATE_CYCLE_BWT_COUNT,
};
static const string generateCycleBwtLabels[] =
{
"off",
"PBE",
"ASCII",
"" // end marker
};
// options: paired-reads input
enum PairedReadsInput
{
PAIRED_READS_INPUT_NONE,
PAIRED_READS_INPUT_ALL1ALL2,
PAIRED_READS_INPUT_COUNT
};
static const string pairedReadsInputLabels[] =
{
"none",
"all1all2",
"" // end marker
};
// options: generate cycle qualities off/pbe
enum GenerateCycleQual
{
GENERATE_CYCLE_QUAL_OFF,
GENERATE_CYCLE_QUAL_PBE,
GENERATE_CYCLE_QUAL_COUNT,
};
static const string generateCycleQualLabels[] =
{
"off",
"PBE",
"" // end marker
};
// Option container
enum BwtParameterIds
{
PARAMETER_UNDEFINED = -1,
PARAMETER_INPUT_FILENAME = 0,
PARAMETER_OUTPUT_FILENAME,
PARAMETER_MEMORY_LIMIT,
PARAMETER_INPUT_FORMAT,
PARAMETER_OUTPUT_FORMAT,
PARAMETER_ALGORITHM,
PARAMETER_INTERMEDIATE_FORMAT,
PARAMETER_INTERMEDIATE_STORAGE_MEDIUM,
PARAMETER_PARALLEL_PREFETCH,
// PARAMETER_PARALLEL_PROCESSING,
PARAMETER_PROCESS_QUALITIES,
PARAMETER_GENERATE_LCP,
PARAMETER_ADD_REV_COMP,
PARAMETER_REVERSE,
PARAMETER_SUB_SEQUENCE_LENGTH,
PARAMETER_PAIRED_READS_INPUT,
PARAMETER_SINGLE_CYCLE,
PARAMETER_CONCATENATE_OUTPUT,
PARAMETER_SAP_ORDERING,
PARAMETER_GENERATE_ENDPOSFILE,
PARAMETER_GENERATE_CYCLE_BWT,
PARAMETER_GENERATE_CYCLE_QUAL,
PARAMETER_PAUSE_BETWEEN_CYCLES,
PARAMETER_COUNT // end marker
};
} // namespace BeetlBwtParameters
class BwtParameters : public ToolParameters
{
public:
BwtParameters()
{
using namespace BeetlBwtParameters;
addEntry( PARAMETER_INPUT_FILENAME, "input filename", "--input", "-i", "Input file name or prefix", "", TYPE_STRING | REQUIRED );
addEntry( PARAMETER_OUTPUT_FILENAME, "output filename", "--output", "-o", "Output file name or prefix", "outBWT", TYPE_STRING | REQUIRED );
addEntry( PARAMETER_INPUT_FORMAT, "input format", "--input-format", "", "", "detect", TYPE_CHOICE | REQUIRED, inputFormatLabels );
addEntry( PARAMETER_OUTPUT_FORMAT, "output format", "--output-format", "", "", "rle", TYPE_CHOICE | REQUIRED, outputFormatLabels );
addEntry( PARAMETER_INTERMEDIATE_FORMAT, "intermediate format", "--intermediate-format", "", "", "", TYPE_CHOICE | REQUIRED | AUTOMATED, intermediateFormatLabels );
// addEntry( PARAMETER_INTERMEDIATE_STORAGE_MEDIUM, "intermediate storage medium", "--intermediate-medium", "", "[disk|ram] (multirle->ram, others->disk)", "", TYPE_CHOICE | REQUIRED | AUTOMATED, intermediateStorageMediumLabels );
addEntry( PARAMETER_ALGORITHM, "algorithm", "--algorithm", "-a", "", "", TYPE_CHOICE | REQUIRED | AUTOMATED, algorithmLabels );
addEntry( PARAMETER_MEMORY_LIMIT, "memory limit MB", "--memory-limit", "-M", "RAM constraint in MB", "smallest of ulimit -v and /proc/meminfo", TYPE_INT | REQUIRED );
addEntry( PARAMETER_PROCESS_QUALITIES, "process qualities", "--qualities", "-q", "Ignore/Permute qualities", "ignore", TYPE_CHOICE, processQualitiesLabels );
addEntry( PARAMETER_CONCATENATE_OUTPUT, "concatenate output", "--concatenate-output", "", "Concatenate BWT files at the end", "", TYPE_SWITCH );
addEntry( PARAMETER_ADD_REV_COMP, "add reverse complement", "--add-rev-comp", "", "Add reverse complemented sequences", "", TYPE_SWITCH );
addEntry( PARAMETER_REVERSE, "reverse", "--reverse", "", "Process cycles in reverse order", "", TYPE_SWITCH );
addEntry( PARAMETER_SUB_SEQUENCE_LENGTH, "sub-sequence length", "--sub-sequence-length", "", "Split sequences into two sub-sequences. Useful for paired reads", "", TYPE_INT );
addEntry( PARAMETER_PAIRED_READS_INPUT, "paired-reads input", "--paired-reads-input", "", "If your input file contains paired reads", "none", TYPE_CHOICE, pairedReadsInputLabels );
addEntry( PARAMETER_SAP_ORDERING, "SAP ordering", "--sap-ordering", "", "Use SAP ordering (see SAP note below)", "", TYPE_SWITCH );
addEntry( PARAMETER_GENERATE_ENDPOSFILE, "generate endPosFile", "--generate-end-pos-file", "", "Generate mapping between BWT '$' signs and sequence numbers", "", TYPE_SWITCH );
addEntry( PARAMETER_GENERATE_LCP, "generate LCP", "--generate-lcp", "", "Generate Longest Common Prefix lengths (see LCP note below)", "", TYPE_SWITCH );
addEntry( PARAMETER_GENERATE_CYCLE_BWT, "generate cycle BWT", "--cycle-bwt", "", "PBE=Generate cycle-by-cycle BWT with prediction-based encoding", "off", TYPE_CHOICE, generateCycleBwtLabels );
addEntry( PARAMETER_GENERATE_CYCLE_QUAL, "generate cycle qualities", "--cycle-qual", "", "PBE=Generate cycle-by-cycle qualities zeroed at correctly-predicted bases", "off", TYPE_CHOICE, generateCycleQualLabels );
#ifdef _OPENMP
addEntry( PARAMETER_PARALLEL_PREFETCH, "parallel prefetch", "--no-parallel-prefetch", "", "Disable parallel prefetch of cycle files", "", TYPE_SWITCH | AUTOMATED );
// addEntry( PARAMETER_PARALLEL_PROCESSING, "parallel processing", "--no-parallel-processing", "", "Disable parallel processing by letter", "", TYPE_SWITCH | AUTOMATED, parallelProcessingLabels );
#endif //ifdef _OPENMP
// addEntry( PARAMETER_, "", " --hw-constraints File describing hardware constraints for speed estimates", "", TYPE_STRING );
addEntry( PARAMETER_PAUSE_BETWEEN_CYCLES, "pause between cycles", "--pause-between-cycles", "", "Wait for a key press after each cycle", "", TYPE_SWITCH );
addDefaultVerbosityAndHelpEntries();
}
};
#endif //ifndef BEETL_BWT_PARAMETERS_HH
<|start_filename|>src/shared/EndPosFile.hh<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#ifndef INCLUDED_ENDPOSFILE
#define INCLUDED_ENDPOSFILE
#include "Types.hh"
#include <fstream>
//SequenceNumber EndPosFile_convertDollarNumToSequenceNum( const SequenceNumber dollarNum );
class EndPosFile
{
public:
EndPosFile( const string &bwtFilenamePrefix );
SequenceNumber convertDollarNumToSequenceNum( const SequenceNumber dollarNum );
private:
std::ifstream file_;
SequenceNumber sequenceGroupCount_;
uint8_t sequenceCountInGroup_;
uint8_t hasRevComp_;
SequenceNumber dollarSignCount_;
};
#endif // INCLUDED_ENDPOSFILE
<|start_filename|>src/errors/CorrectionAligner.cpp<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#include "CorrectionAligner.hh"
using namespace std;
template <class T>
T **makeMatrix( int rows, int cols )
{
T **result = new T*[rows];
for ( int row = 0; row < rows; row++ )
result[row] = new T[cols];
return result;
}
template <class T>
void zapMatrix( T **matrix, int rows )
{
for ( int row = 0; row < rows; row++ )
delete[] matrix[row];
delete[] matrix;
}
struct CorrectionInterval
{
CorrectionInterval( int inStart, int inLengthOnRead, int inCorrectionLength ): correctionLength( inCorrectionLength ), lengthOnRead( inLengthOnRead ), start( inStart ) {}
int correctionLength, lengthOnRead, start;
};
string CorrectionAligner::MakeFastaRecord( int number, string name, string sequence, string quality )
{
stringstream ss;
ss << ">" << number << endl << sequence << endl;
return ss.str();
}
string CorrectionAligner::MakeFastqRecord( int number, string name, string sequence, string quality )
{
stringstream ss;
ss << name << sequence << endl << "+" << endl << quality << endl;
return ss.str();
}
bool CorrectionAligner::SortByLastCycle( ErrorInfo *a, ErrorInfo *b )
{
return a->lastCycle > b->lastCycle;
}
void CorrectionAligner::ApplyCorrections(
SeqReaderFile *readsFile,
vector<ErrorInfo> &corrections,
const string &outFile,
bool correctionsOnly,
ReadsFormat fileType
)
{
ofstream correctedReadsFile ( outFile.c_str(), fstream::out );
ApplyCorrections( readsFile, corrections, correctedReadsFile, correctionsOnly, fileType );
correctedReadsFile.close();
}
void CorrectionAligner::ApplyCorrections(
SeqReaderFile *readsFile,
vector<ErrorInfo> &corrections,
ostream &correctedReadsOut,
bool correctionsOnly,
ReadsFormat fileType
)
{
uint readLength = readsFile->length();
readsFile->rewindFile();
uint currentCorrection = 0;
int currentRead = 0;
while ( readsFile->readNext(), !readsFile->allRead() )
{
string name = string( readsFile->thisName() );
string readStr = string( readsFile->thisSeq() ).substr( 0, readLength );
string qStr = string( readsFile->thisQual() );
if ( qStr.size() > readLength )
qStr = qStr.substr( 0, readLength );
if ( currentCorrection < corrections.size() && corrections[currentCorrection].seqNum == currentRead )
{
vector<ErrorInfo *> correctionsForCurrentRead;
while ( currentCorrection < corrections.size() && corrections[currentCorrection].seqNum == currentRead )
{
correctionsForCurrentRead.push_back( &corrections[currentCorrection] );
++currentCorrection;
}
string correctedRead, correctedQstr;
CorrectRead( correctionsForCurrentRead, readStr, qStr, correctedRead, correctedQstr );
if ( fileType == READS_FORMAT_FASTQ )
correctedReadsOut << MakeFastqRecord( currentRead, name, correctedRead, correctedQstr );
else if ( fileType == READS_FORMAT_FASTA )
correctedReadsOut << MakeFastaRecord( currentRead, name, correctedRead, correctedQstr );
}
else if ( !correctionsOnly )
{
if ( fileType == READS_FORMAT_FASTQ )
correctedReadsOut << MakeFastqRecord( currentRead, name, readStr, qStr );
else if ( fileType == READS_FORMAT_FASTA )
correctedReadsOut << MakeFastaRecord( currentRead, name, readStr, qStr );
}
currentRead++;
}
}
string CorrectionAligner::Correct( const string &errorContainingRead, vector<ErrorInfo *> &corrections )
{
return errorContainingRead;
}
void CorrectionAligner::CorrectRead(
vector<ErrorInfo *> &corrections,
const string &errorContainingRead,
const string &inQstr,
string &outRead,
string &outQstr
)
{
outRead = Correct( errorContainingRead, corrections );
}
enum AlignType
{
POSITION_MATCH = 0,
SEQ1_GAP,
SEQ2_GAP
};
void SmithWatermanCorrectionAligner::Align( const string &seq1, const string &seq2, int &lengthOnSeq1, int &lengthOnSeq2 )
{
int **matrix = makeMatrix<int>( seq1.size() + 1, seq2.size() + 1 );
AlignType **pointers = makeMatrix<AlignType>( seq1.size() + 1, seq2.size() + 1 );
for ( uint seq1pos = 0; seq1pos <= seq1.size(); seq1pos++ )
matrix[seq1pos][0] = 0;
for ( uint seq2pos = 0; seq2pos <= seq2.size(); seq2pos++ )
matrix[0][seq2pos] = 0;
for ( uint seq1pos = 1; seq1pos <= seq1.size(); seq1pos++ )
for ( uint seq2pos = 1; seq2pos <= seq2.size(); seq2pos++ )
{
AlignType alignType = POSITION_MATCH;
int score = 0;
int matchScore;
if ( seq1[seq1pos - 1] == seq2[seq2pos - 1] )
matchScore = matchScore_;
else
matchScore = mismatchScore_;
if ( score < matrix[seq1pos - 1][seq2pos] + deletionScore_ )
{
score = matrix[seq1pos - 1][seq2pos] + deletionScore_;
alignType = SEQ2_GAP;
}
if ( score < matrix[seq1pos][seq2pos - 1] + insertionScore_ )
{
score = matrix[seq1pos][seq2pos - 1] + insertionScore_;
alignType = SEQ1_GAP;
}
if ( score < matrix[seq1pos - 1][seq2pos - 1] + matchScore )
{
score = matrix[seq1pos - 1][seq2pos - 1] + matchScore;
alignType = POSITION_MATCH;
}
matrix[seq1pos][seq2pos] = score;
pointers[seq1pos][seq2pos] = alignType;
}
int pos1 = seq1.size() - 1;
int pos2 = seq2.size() - 1;
while ( ( pos1 > 0 ) && ( pos2 > 0 ) )
{
switch ( pointers[pos1][pos2] )
{
case POSITION_MATCH:
pos1--;
pos2--;
break;
case SEQ1_GAP:
pos2--;
break;
case SEQ2_GAP:
pos1--;
break;
default:
exit( 1 );
}
}
lengthOnSeq1 = seq1.size() - pos1;
lengthOnSeq2 = seq2.size() - pos2;
zapMatrix<AlignType>( pointers, seq1.size() + 1 );
zapMatrix<int>( matrix, seq1.size() + 1 );
}
void SmithWatermanCorrectionAligner::Align( const string &seq1, const string &seq2, int &lengthOnSeq1, int &lengthOnSeq2, bool correctForwards )
{
if ( correctForwards )
{
string seq1_ = strreverse( seq1 );
string seq2_ = strreverse( seq2 );
Align( seq1_, seq2_, lengthOnSeq1, lengthOnSeq2 );
}
else
Align( seq1, seq2, lengthOnSeq1, lengthOnSeq2 );
}
string SmithWatermanCorrectionAligner::Replace( const string &original, const string &correction, int lineUpPosition, bool correctForwards )
{
int lengthOnOriginal;
return Replace( original, correction, lineUpPosition, correctForwards, lengthOnOriginal );
}
string SmithWatermanCorrectionAligner::Replace( const string &original, const string &correction, int lineUpPosition, bool correctForwards, int &lengthOnOriginal )
{
int lengthOnCorrection;
string originalPartToAlign = correctForwards ? original.substr( lineUpPosition ) : original.substr( 0, lineUpPosition + 1 );
Align( correction, originalPartToAlign, lengthOnCorrection, lengthOnOriginal, correctForwards );
if ( correctForwards )
{
return
original.substr( 0, lineUpPosition )
+
correction
+
original.substr( min<int>( lengthOnOriginal + lineUpPosition, original.size() ) )
;
}
else
{
return
original.substr( 0, max<int>( lineUpPosition - lengthOnOriginal + 1, 0 ) )
+
correction
+
original.substr( min<int>( original.size(), lineUpPosition + 1 ) )
;
}
}
string SmithWatermanCorrectionAligner::Correct( const string &errorContainingRead, vector<ErrorInfo *> &corrections )
{
bool firstCorrection = true;
string result( errorContainingRead );
for ( uint currentCorrection = 0; currentCorrection < corrections.size(); currentCorrection++ )
{
ErrorInfo *current = corrections[currentCorrection];
string witness = ( current->reverseStrand ) ?
errorContainingRead.substr( current->positionInRead - current->lastCycle, current->lastCycle ) :
errorContainingRead.substr( current->positionInRead + 1, current->lastCycle );
int lineUpAt;
int canCorrect = true;
if ( firstCorrection )
{
//if its the first correction we're applying to the read then we know the exact position the putative error
//occurred, so we can line up exactly and avoid calling str.find
lineUpAt = current->positionInRead;
}
else
{
int witnessLocation = result.find( witness );
if ( witnessLocation == -1 )
canCorrect = false;
lineUpAt = ( current->reverseStrand ) ? witnessLocation + current->lastCycle : witnessLocation - 1;
}
if ( canCorrect )
{
result = Replace(
result,
current->corrector,
lineUpAt,
current->reverseStrand
);
}
firstCorrection = false;
}
return result;
}
string StitchAligner::Correct( const string &errorContainingRead, vector<ErrorInfo *> &corrections )
{
return errorContainingRead.substr( 0, corrections[0]->correctorStart ) + corrections[0]->corrector;
}
void NoIndelAligner::CorrectRead(
vector<ErrorInfo *> &corrections,
const string &errorContainingRead,
const string &inQstr,
string &outRead,
string &outQstr
)
{
bool firstCorrection = true;
outRead = errorContainingRead;
outQstr = inQstr;
sort( corrections.begin(), corrections.end(), SortByLastCycle );
for ( uint currentCorrection = 0; currentCorrection < corrections.size(); currentCorrection++ )
{
ErrorInfo *current = corrections[currentCorrection];
bool canCorrect = true;
if ( current->lastCycle < minLastCycle_ )
canCorrect = false;
string witness = ( current->reverseStrand ) ?
errorContainingRead.substr( current->positionInRead - current->lastCycle, current->lastCycle ) :
errorContainingRead.substr( current->positionInRead + 1, current->lastCycle );
int lineUpAt;
if ( firstCorrection )
lineUpAt = current->positionInRead;
else
{
int witnessLocation = outRead.find( witness );
if ( witnessLocation == -1 )
canCorrect = false;
lineUpAt = ( current->reverseStrand ) ? witnessLocation + current->lastCycle : witnessLocation - 1;
}
if ( canCorrect )
{
string corrector = current->corrector;
//int corrStart = current->correctorStart;
int corrStart = ( current->reverseStrand ) ? lineUpAt : lineUpAt - corrector.size() + 1;
outRead =
outRead.substr( 0, max<int>( corrStart, 0 ) ) +
corrector +
outRead.substr( min<int>( corrStart + corrector.size(), outRead.size() ) )
;
outQstr =
outQstr.substr( 0, max<int>( corrStart, 0 ) ) +
string( corrector.size(), correctionQuality_ ) +
outQstr.substr( min<int>( corrStart + corrector.size(), outQstr.size() ) )
;
if ( trim_ )
{
outRead = outRead.substr( max<int>( 0, -corrStart ), errorContainingRead.size() );
outQstr = outQstr.substr( max<int>( 0, -corrStart ), errorContainingRead.size() );
}
}
firstCorrection = false;
}
}
<|start_filename|>src/errors/ErrorInfo.cpp<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#include "ErrorInfo.hh"
using namespace std;
bool ErrorInfo::SortByRead( ErrorInfo const & a, ErrorInfo const & b )
{
// Multi-level comparison to ensure uniqueness of results when using different implementations of std::sort
if (a.seqNum < b.seqNum)
return true;
else if (a.seqNum == b.seqNum)
{
if (a.positionInRead < b.positionInRead)
return true;
else if (a.positionInRead == b.positionInRead)
{
if (a.correctorStart < b.correctorStart)
return true;
else if (a.correctorStart == b.correctorStart)
return ( a.corrector.compare( b.corrector ) < 0 );
}
}
return false;
}
void ErrorInfo::print() const
{
cout << "Error info: " << endl;
cout << " " << "First cycle:" << firstCycle << endl; //first cycle the error is noticed
cout << " " << "Last cycle:" << lastCycle << endl; // last cycle where error is noticed
cout << " " << "Read End:" << readEnd << endl; //cycle at which $ for read is reached
cout << " " << "Corrector: " << corrector << endl; // what read should be corrected to
cout << " " << "Sequence number: " << seqNum << endl; // position of read in original list
cout << endl;
}
void ErrorInfo::SetReadNumbersToOriginal( char *endPosFileName, vector<ErrorInfo> &errorsInSortedReads )
{
//loop through all the errors and for each one look up which read it comes from
LetterNumber numchar;
FILE *InFileEndPos = fopen( endPosFileName, "rb" );
if ( InFileEndPos == NULL )
{
std::cerr << "could not open file!" << endl;
exit ( EXIT_FAILURE );
}
SequenceNumber numText = 0;
numchar = fread ( &numText, sizeof( SequenceNumber ), 1 , InFileEndPos );
checkIfEqual( numchar, 1 );
uint8_t subSequenceCount = 0;
numchar = fread ( &subSequenceCount, sizeof( uint8_t ), 1 , InFileEndPos );
checkIfEqual( numchar, 1 );
uint8_t hasRevComp = 0;
numchar = fread ( &hasRevComp, sizeof( uint8_t ), 1 , InFileEndPos );
checkIfEqual( numchar, 1 );
sortElement triple;
uint currentSortedReadIndex = 0;
SequenceNumber i = 0;
while ( currentSortedReadIndex < errorsInSortedReads.size() )
{
numchar = fread ( &triple.seqN, sizeof( SequenceNumber ), 1 , InFileEndPos );
checkIfEqual( numchar, 1 );
// numchar = fread ( &triple.posN, sizeof( LetterNumber ), 1 , InFileEndPos );
// checkIfEqual( numchar, 1 );
// numchar = fread ( &triple.pileN, sizeof( AlphabetSymbol ), 1 , InFileEndPos );
// checkIfEqual( numchar, 1 );
uint8_t subSequenceNum;
numchar = fread ( &subSequenceNum, sizeof( uint8_t ), 1 , InFileEndPos );
checkIfEqual( numchar, 1 );
while (
i == ( SequenceNumber )( errorsInSortedReads[currentSortedReadIndex].seqNum )
&&
currentSortedReadIndex < errorsInSortedReads.size()
)
errorsInSortedReads[currentSortedReadIndex++].seqNum = triple.seqN;
i++;
}
fclose( InFileEndPos );
}
static const char complementaryAlphabet[] = "$TGCNAZ";
string strreverse( const string &inStr )
{
string result = "";
for ( int i = inStr.size() - 1; i >= 0; i-- )
result += inStr[i];
return result;
}
void ErrorInfo::ConvertRCCorrectionsToOriginal( vector<ErrorInfo> &corrections, int numberOfReads, int readLength )
{
for ( uint errNo = 0; errNo < corrections.size(); errNo++ )
if ( corrections[errNo].seqNum >= numberOfReads )
{
corrections[errNo].seqNum -= numberOfReads;
corrections[errNo].positionInRead = readLength - 1 - corrections[errNo].positionInRead;
for ( uint i = 0; i < corrections[errNo].corrector.size(); i++ )
corrections[errNo].corrector[i] = complementaryAlphabet[whichPile[( int )corrections[errNo].corrector[i]]];
corrections[errNo].correctorStart = corrections[errNo].positionInRead;
corrections[errNo].reverseStrand = true;
}
else
{
corrections[errNo].correctorStart = corrections[errNo].positionInRead - ( corrections[errNo].corrector.size() - 1 );
corrections[errNo].corrector = strreverse( corrections[errNo].corrector );
corrections[errNo].reverseStrand = false;
}
}
void ErrorInfo::CorrectionsToCsv( const string &fileName, vector<ErrorInfo> &corrections )
{
ofstream correctionsFile;
correctionsFile.open( fileName.c_str() );
correctionsFile << "read position reverse_strand correction corrector_start shortest_witness longest_witness" << endl; //the columns of the output csv file.
for ( uint errNo = 0; errNo < corrections.size(); errNo++ )
correctionsFile
<< corrections[errNo].seqNum << " "
<< corrections[errNo].positionInRead << " "
<< corrections[errNo].reverseStrand << " "
<< corrections[errNo].corrector << " "
<< corrections[errNo].correctorStart << " "
<< corrections[errNo].firstCycle << " "
<< corrections[errNo].lastCycle << endl;
correctionsFile.close();
}
vector<ErrorInfo> ErrorInfo::ReadCorrectionsFromCsv( const string &fileName )
{
vector<ErrorInfo> result;
ifstream in( fileName.c_str() );
string correctionRecord;
bool firstLine = true;
while ( getline( in, correctionRecord ) )
{
if ( firstLine )
{
firstLine = false;
continue;
}
ErrorInfo correction;
stringstream ss( correctionRecord );
ss >> correction.seqNum;
ss >> correction.positionInRead;
ss >> correction.reverseStrand;
ss >> correction.corrector;
ss >> correction.correctorStart;
ss >> correction.firstCycle;
ss >> correction.lastCycle;
correction.readEnd = correction.positionInRead + correction.firstCycle + 1;
result.push_back( correction );
}
in.close();
return result;
}
<|start_filename|>src/shared/Beetl.cpp<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#include "Beetl.hh" // own declarations
#include "Algorithm.hh" // framework class declaration
#include "BCRext.hh" // interface to BCRext
#include "BWTCollection.hh" // interface to BCR
#include "CountWords.hh" // interface to countwords
#include <cstdio>
#include <cstdlib>
#include <cstring>
#include <iostream>
#include <string>
using namespace std;
//#define BEETL_ID "$Id$"
const string BEETL_ID( "1.0" );
int main( int numArgs, char **args )
{
if ( numArgs < 2 )
{
print_usage( args[0] );
exit( EXIT_SUCCESS );
}
if ( strcmp( args[1], COMMAND_BCR ) == 0 )
{
bcrMode = 0; // explicitly set default mode to zero (ie build BWT from sequence)
CompressionFormatType bcrCompression( compressionASCII );
// start at 2, 0 is the executable, 1 the command name parsed above
for ( int i = 2; i < numArgs; i++ )
if ( args[i][0] == '-' ) // only flags here "-X etc."
{
switch ( args[i][1] )
{
case 'i':
// next param should be the filename, checking now
isArgumentOrExit( i + 1, numArgs );
// don't check file here! In -m 1 mode this is a prefix name, not an actual file
// so the file doesn't need to exist for input to be valid - TC 4.9.12
// fileIsReadableOrExit(args[i + 1]);
bcrFileIn = args[i + 1]; // this should be the name
cout << "-> input file is " << bcrFileIn << endl;
break;
case 'o':
isArgumentOrExit( i + 1, numArgs );
bcrFileOut = args[i + 1];
cout << "-> output prefix is " << bcrFileOut << endl;
break;
case 'm':
isArgumentOrExit( i + 1, numArgs );
bcrMode = atoi( args[i + 1] );
if ( bcrMode > 2 || bcrMode < 0 )
{
cerr << bcrMode << " is no valid bcr mode " << endl;
exit( EXIT_FAILURE );
}
cout << "-> working mode set to \""
<< bcrModes[bcrMode]
<< "\""
<< endl;
break;
case 'a':
bcrCompression = compressionASCII;;
cout << "-> writing ASCII encoded output"
<< endl;
break;
case 'h':
bcrCompression = compressionHuffman;
cout << "Huffman encoding not yet supported, sorry."
<< endl;
exit( EXIT_FAILURE );
//cout << "-> writing huffman encoded output"
// << endl;
break;
case 'r':
bcrCompression = compressionRunLength;
cout << "-> writing runlength encoded output"
<< endl;
break;
case 't':
bcrCompression = compressionIncrementalRunLength;
cout << "-> writing incremental runlength encoded output"
<< endl;
break;
default:
cout << "!! unknown flag \""
<< args[i][1] << "\"" << endl;
print_usage( args[0], COMMAND_BCR );
exit( EXIT_FAILURE );
}
}
// check if all arguments are given
if ( bcrFileIn.length() > 0 && bcrMode >= 0 )
{
if ( bcrFileOut.length() == 0 )
{
bcrFileOut = bcrFileOutPrefixDefault;
}
if ( bcrMode == 0 )
{
fileIsReadableOrExit( bcrFileIn.c_str() );
}
// Redundant and produces confusing warning - SeqReader now checks for and handles fasta, fastq and raw
// sequence files, the latter two trigger a warning here. TC 4.9.12
// if (bcrMode == 0)
// {
// isValidFastaFile(bcrFileIn.c_str());
// }
// created new tool object
Algorithm *pBCR
= new BCR( bcrMode, bcrFileIn, bcrFileOut, bcrCompression );
// run previous main method
pBCR->run();
// clean up
delete pBCR;
// die
exit( EXIT_SUCCESS );
}
else
{
// something wrong happened
print_usage( args[0], COMMAND_BCR );
exit( EXIT_FAILURE );
}
}
else if ( strcmp( args[1], COMMAND_BCR_EXT ) == 0 )
{
// set defaults for BCRext mode
bcrExtAsciiOutput = false; // use normal ASCII alphabet as output
bcrExtHuffmanOutput = false; // use huffman encoding as compression
bcrExtRunlengthOutput = true; // use RunLength encoding [default]
bcrExtImplicitSort = false; // do implicit sort of input sequences
for ( int i = 2; i < numArgs; i++ )
{
if ( args[i][0] == '-' ) // only flags here "-X etc."
{
string thisArg( ( string )args[i] );
if ( thisArg == "-i" )
{
// next param should be the filename, checking now
isArgumentOrExit( i + 1, numArgs );
fileIsReadableOrExit( args[i + 1] );
bcrExtFileIn = args[i + 1]; // this should be the name
cout << "-> input file is " << bcrExtFileIn << endl;
}
else if ( thisArg == "-p" )
{
isArgumentOrExit( i + 1, numArgs );
bcrExtFileOutPrefix = args[i + 1];
cout << "-> output prefix set to "
<< bcrExtFileOutPrefix << endl;
}
else if ( thisArg == "-s" )
{
cout << "-> using SeqFile input"
<< endl;
bcrExtUseSeq = true;
}
else if ( thisArg == "-a" )
{
bcrExtAsciiOutput = true;
bcrExtRunlengthOutput = false;
bcrExtHuffmanOutput = false;
cout << "-> writing ASCII encoded output"
<< endl;
}
else if ( thisArg == "-h" )
{
bcrExtAsciiOutput = false;
bcrExtRunlengthOutput = false;
bcrExtHuffmanOutput = true;
cout << "-> writing huffman encoded output"
<< endl;
}
else if ( thisArg == "-r" )
{
bcrExtAsciiOutput = false;
bcrExtRunlengthOutput = true;
bcrExtHuffmanOutput = false;
cout << "-> writing runlength encoded output"
<< endl;
}
else if ( thisArg == "-sap" )
{
bcrExtImplicitSort = true;
cout << "-> perform implicit sort of input sequences"
<< endl;
}
else
{
cout << "!! unknown flag \""
<< args[i][1] << "\"" << endl;
print_usage( args[0], COMMAND_BCR_EXT );
exit( EXIT_FAILURE );
}
} // ~if begins with -
} // ~for
// cout << bcrExtImplicitSort << bcrExtHuffmanOutput << bcrExtRunlengthOutput << endl;
if ( bcrExtFileIn.empty() )
{
print_usage( args[0], COMMAND_BCR_EXT );
exit( EXIT_FAILURE );
}
/*
if (!bcrExtUseSeq && ! isValidFastaFile(bcrExtFileIn.c_str())) {
print_usage(args[0],COMMAND_BCR_EXT);
// exit(EXIT_FAILURE);
}
if (bcrExtUseSeq && !isValidReadFile(bcrExtFileIn.c_str())) {
print_usage(args[0],COMMAND_BCR_EXT);
exit(EXIT_FAILURE);
}
*/
// check if all arguments are given
if ( bcrExtRunlengthOutput || bcrExtAsciiOutput || bcrExtHuffmanOutput )
// check if its a fasta file when fasta flag set
//&& ( isValidFastaFile(bcrExtFileIn.c_str()) ||
// is this a valid .seq file? only when no fasta flag
//(bcrExtUseSeq && isValidReadFile(bcrExtFileIn.c_str()))) )
{
if ( bcrExtFileOutPrefix.length() == 0 )
{
bcrExtFileOutPrefix = bcrExtFileOutPrefixDefault;
}
// created new tool object
Algorithm *pBCRext = new BCRext( bcrExtHuffmanOutput,
bcrExtRunlengthOutput,
bcrExtAsciiOutput,
bcrExtImplicitSort,
bcrExtUseSeq,
bcrExtFileIn,
bcrExtFileOutPrefix );
// run previous main method
pBCRext->run();
// clean up
delete pBCRext;
// die
exit( EXIT_SUCCESS );
}
else
{
// something wrong happened
print_usage( args[0], COMMAND_BCR_EXT );
exit( EXIT_FAILURE );
}
}
else if ( strcmp( args[1], COMMAND_COUNTWORDS ) == 0 )
{
vector<string> filesA, filesB, filesC;
string ncbiTax;
int minWord( 0 );
string prefix;// prefix for output files
for ( int i = 2; i < numArgs; i++ )
if ( args[i][0] == '-' ) // only flags here "-X etc."
{
//cout << "blah" << endl;
switch ( args[i][1] )
{
case 'r':
cout << "-> reference genome mode for set B " << endl;
whichHandler = 'r';
break;
case 's' :
cout << "-> splice mode set " << endl;
whichHandler = 's';
break;
case 'm' :
cout << "-> metagenomic mode set " << endl;
whichHandler = 'm';
break;
case 'a':
while ( args[++i][0] != '-' )
{
//cout << args[i] << " fred " << endl;
fileIsReadableOrExit( args[i] );
filesA.push_back( args[i] );
cout << "-> input file A is "
<< filesA.back()
<< endl;
}
i--;
break;
#ifdef OLD
// next param should be the filename, checking
isArgumentOrExit( i + 1, numArgs );
fileIsReadableOrExit( args[i + 1] );
countWordsInputA = args[i + 1]; // should be the name
// cout << "-> input file A is "
// << countWordsInputA
// << endl;
break;
#endif//OLD
case 'b':
while ( ( ++i ) != numArgs )
{
if ( strcmp( args[i], "-c" ) == 0 )
{
i--;
break;
}
fileIsReadableOrExit( args[i] );
filesB.push_back( args[i] );
// cout << "-> input file B is "
// << filesB.back()
// << endl;
}
// i--;
break;
case 'c' :
if ( whichHandler != 'm' )
{
cerr << "Merged set C not needed Here!" << endl;
break;
}
while ( ( ++i ) != numArgs )
{
if ( args[i][0] == '-' )
{
i--;
break;
}
fileIsReadableOrExit( args[i] );
filesC.push_back( args[i] );
cout << "-> input file C is "
<< filesC.back()
<< endl;
}
break;
#ifdef OLD
// next param should be the filename, checking now
isArgumentOrExit( i + 1, numArgs );
fileIsReadableOrExit( args[i + 1] );
countWordsInputB = args[i + 1]; // should be the name
cout << "-> input file B is "
<< countWordsInputB
<< endl;
break;
#endif
case 't':
if ( whichHandler != 'm' )
{
cerr << "The taxonomic file is not needed without the -m option." << endl;
break;
}
cout << "-> Got taxonomic information " << args[i + 1] << endl ;
ncbiTax = args[i + 1];
i++;
break;
case 'd':
if ( whichHandler != 'm' )
{
cerr << "Option -d is not available without merged input. " << endl;
break;
}
cout << "Will test database " << endl;
testDatabase = true;
break;
case 'w':
minWord = atoi( args[i + 1] );
cout << "-> minimum word length w in metagenomic mode set to \""
<< minWord
<< "\""
<< endl;
break;
case 'p':
prefix = args[i + 1];
break;
case 'k':
isArgumentOrExit( i + 1, numArgs );
maxLengthK = atoi( args[i + 1] );
if ( maxLengthK < 0 )
{
cerr << "!! "
<< maxLengthK
<< " is no valid length "
<< endl;
exit( EXIT_FAILURE );
}
cout << "-> max kmer length set to \""
<< maxLengthK
<< "\""
<< endl;
break;
case 'n':
isArgumentOrExit( i + 1, numArgs );
minimalOccurencesN = atoi( args[i + 1] );
if ( minimalOccurencesN < 0 )
{
cerr << "!! "
<< minimalOccurencesN
<< " is no valid value "
<< endl;
exit( EXIT_FAILURE );
}
cout << "-> min occurences n set to \""
<< minimalOccurencesN
<< "\""
<< endl;
break;
case 'A':
cout << "-> assuming set A is compressed" << endl;
compressedInputA = true;
break;
case 'B':
cout << "-> assuming set B is compressed" << endl;
compressedInputB = true;
break;
case 'C':
cout << "-> assuming set A&B are compressed" << endl;
compressedInputA = true;
compressedInputB = true;
break;
default:
cout << "!! unknown flag \"" << args[i][1]
<< "\"" << endl;
print_usage( args[0], COMMAND_COUNTWORDS );
exit( EXIT_FAILURE );
}
}
// check for required arguments
if ( ( maxLengthK > 0 ) && ( minimalOccurencesN > 0 ) &&
( !filesA.empty() ) && ( filesA.size() == filesB.size() ) )
{
// create new tool object
Algorithm *pcountWords = new CountWords( compressedInputA,
compressedInputB, whichHandler, minimalOccurencesN,
maxLengthK, filesA, filesB, filesC, ncbiTax, testDatabase, minWord, "" );
// run the "main" method
pcountWords->run();
// clean up
delete pcountWords;
// closing time
exit( EXIT_SUCCESS );
}
else
{
// oops
print_usage( args[0], COMMAND_COUNTWORDS );
exit( EXIT_FAILURE );
}
}
else
{
cerr << "!! \"" << args[1] << "\": unknown command" << endl;
print_usage( args[0] );
exit( EXIT_FAILURE );
}
return 0;
}
void fileIsReadableOrExit( string filename )
{
FILE *pFile;
// test file for read access
pFile = fopen( filename.c_str(), "r" );
if ( pFile != NULL )
{
fclose( pFile );
return;
}
else
{
cerr << "!! \"" << filename << "\" is NOT readable!" << endl;
exit( EXIT_FAILURE );
}
}
void isArgumentOrExit( int num, int numArgs )
{
if ( ( num ) > ( numArgs - 1 ) )
{
cerr << "!! CLI parsing error. Wrong number of arguments?" << endl;
exit( EXIT_FAILURE );
}
}
void print_usage( char *args, const char *command )
{
cerr << endl << "- This is the BEETL software library -" << endl
<< endl
// Tony 13.6.12 - BEETL_ID is not informative now we have moved to git
// << "Framework version" << endl
// << BEETL_ID << endl
<< endl;
if ( command == 0 )
cerr << "Included in this framework are the following algorithms" << endl
<< endl
<< endl;
if ( command == 0 || string( command ) == COMMAND_BCR_EXT )
cerr << "-> BCRext - command \"" << COMMAND_BCR_EXT << "\"" << endl
<< "========================================================" << endl
<< "improved version of the original algorithm" << endl
<< "uses significantly less RAM (a.k.a. none) but depends heavily on I/O" << endl
<< endl
<< "Usage: " << args << " "
<< COMMAND_BCR_EXT << " -i <read file> -p <output file prefix> [-h -r -a] [-s] [-sap]" << endl
<< endl
<< "-i <file>:\tinput file in fasta format" << endl
<< "-s:\t\tuse .seq input files instead of fasta (each line one sequence)" << endl
<< "-p <string>:\toutput file names will start with \"prefix\"" << endl
<< "-a:\t\toutput ASCII encoded files" << endl
<< "-r:\t\toutput runlength encoded files [recommended]" << endl
<< "-h:\t\toutput Huffman encoded files" << endl
<< "-sap:\t\tperform implicit permutation of collection to obtain more compressible BWT"
<< endl
<< endl;
if ( command == 0 || string( command ) == COMMAND_BCR )
cerr << "-> BCR - command \"" << COMMAND_BCR << "\"" << endl
<< "========================================================" << endl
<< "original algorithm to construct the BWT of a set of reads" << endl
<< "needs approximately 14GB of RAM for 1 billion reads" << endl
<< endl
<< "Usage: " << args << " "
<< COMMAND_BCR << " -i <fasta or seq read file> -o <output file> [-r -t -a] -m <[0,1,2]>" << endl
<< endl
<< "-i <file>:\tinput set of reads [if mode = 1 set the prefix of the BWT files, normally BCR-B0]" << endl
<< "-o <file>:\toutput file" << endl
<< "-m <n>:\t\tmode = 0 --> BCR " << endl
<< "\t\tmode = 1 --> unBCR " << endl
<< "\t\tmode = 2 --> Backward search + Locate SeqID (Uses extra file called \"searchedKmers\" as input)" << endl
<< "-a:\t\toutput ASCII encoded files in BCR mode [mode = 0]" << endl
<< "-r:\t\toutput run length encoded files in BCR mode [mode = 0]" << endl
<< "-t:\t\toutput incremental runlength encoded files [experimental]" << endl
<< endl
<< endl;
if ( command == 0 || string( command ) == COMMAND_COUNTWORDS )
cerr << "-> countWords - command \"" << COMMAND_COUNTWORDS << "\"" << endl
<< "========================================================" << endl
<< "find all words of length at least k that occur" << endl
<< "at least n times in string set A and never in string set B" << endl
<< endl
<< "Usage: " << args << " "
<< COMMAND_COUNTWORDS << " [-A -B -C] [-s -r -m] -k <n> -n <n> -p <file_prefix> -a <set A> -b <set B> "
<< "[-c <C part of set B> -t file -d -o databaseTestOut ]" << endl
<< endl
<< "-A:\t\tassume BWT files for set A are in compressed format" << endl
<< "-B:\t\tassume BWT files for set B are in compressed format" << endl
<< "-C:\t\tassume BWT files for sets A and B are in compressed format" << endl
<< "-s:\t\tassume set B are reads of a reference" << endl
<< "-r:\t\tassume set B is a reference genome" << endl
<< "-m:\t\tassume set B are merged reference genomes" << endl
<< "-k <n>:\t\tmaximal length" << endl
<< "-n <n>:\t\tminimum number of occurences (coverage)" << endl
<< "-p <prefix>:\tprefix out output files" << endl
<< "-a <file>:\tinput set A" << endl
<< "-b <file>:\tinput set B" << endl
<< "-c <file>:\tC output of merging, only needed in -m Mode" << endl
<< "-t file:\ttaxonomy information for the merged input, only needed in -m Mode" << endl
<< "-w n\t\tminimal length k-mer" << endl
<< "-d:\t\tflag to test the minimal needed word length for the different taxa in the databse. Only available with -m" << endl
<< endl
<< endl;
if ( !command )
cerr << "If you had fun using these algorithms you may cite:" << endl
<< "---------------------------------------------------" << endl
<< "<NAME>, <NAME> and <NAME>" << endl
<< "Lightweight BWT Construction for Very Large String Collections. " << endl
<< "Proceedings of CPM 2011, pp.219-231, doi: 10.1007/978-3-642-21458-5_20" << endl
<< "[Description of BWT construction algorithms in 'bcr' and 'ext' modes]" << endl << endl
<< "<NAME>, <NAME> and <NAME>" << endl
<< "Lightweight algorithms for constructing and inverting the BWT of string collections "
<< endl << "Theoretical Computer Science, doi: 10.1016/j.tcs.2012.02.002" << endl
<< "[As above plus description of BWT inversion in 'bcr' mode]" << endl << endl
<< "<NAME>, <NAME>, <NAME> and <NAME>" << endl
<< "Large-scale compression of genomic sequence databases with the Burrows-Wheeler transform"
<< endl
<< "Bioinformatics, doi: 10.1093/bioinformatics/bts173" << endl
<< "[Description of '-sap' compression boosting strategy in 'ext' mode]" << endl << endl
<< "BEETL web page:" << endl
<< "---------------" << endl
<< "http://beetl.github.com/BEETL/" << endl << endl
<< "BEETL user group:" << endl
<< "-----------------" << endl
<< "http://tech.groups.yahoo.com/group/BEETL/" << endl << endl;
}
<|start_filename|>src/search/ExtenderIntervalHandler.cpp<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#include "ExtenderIntervalHandler.hh"
#include "IntervalFile.hh"
#include "libzoo/util/ColorText.hh"
#include "libzoo/util/Logger.hh"
#include <algorithm>
using namespace std;
extern vector<string> kmerList2;
ExtenderIntervalHandler::ExtenderIntervalHandler( EndPosFile &endPosFile )
: endPosFile_( endPosFile )
{
}
void ExtenderIntervalHandler::foundInAOnly
( const int pileNum,
const LetterCount &countsSoFarA,
const LetterCount &countsThisRangeA,
const char *bwtSubstring,
Range &thisRangeBaseA,
AlphabetFlag &propagateIntervalA,
const int cycle
)
{
if ( countsThisRangeA.count_[0] )
{
IntervalRecord *rec = reinterpret_cast< IntervalRecord * >( thisRangeBaseA.userData_ );
Logger_if( LOG_SHOW_IF_VERY_VERBOSE ) Logger::out() << "$ signs detected for " << *rec << ": " << countsThisRangeA.count_[0] << " items from " << countsSoFarA.count_[0] << endl;
if ( !thisRangeBaseA.word_.empty() )
{
Logger_if( LOG_SHOW_IF_VERY_VERBOSE ) Logger::out() << " Sub-sequence from beginning of read to searched kmer: " << thisRangeBaseA.word_ << endl;
}
for ( LetterNumber i = 0; i < countsThisRangeA.count_[0]; ++i )
rec->dollarSignPositions.push_back( countsSoFarA.count_[0] + i );
}
for ( int l( 0 ); l < alphabetSize; l++ )
propagateIntervalA[l] = ( countsThisRangeA.count_[l] > 0 );
Logger_if( LOG_SHOW_IF_VERY_VERBOSE )
{
IntervalRecord *rec = reinterpret_cast< IntervalRecord * >( thisRangeBaseA.userData_ );
Logger::out() << "Propagating " << *rec << " to " << countsThisRangeA << endl;
}
if ( !thisRangeBaseA.word_.empty() ) // == if --propagate-sequence
{
// Full sequence output at last cycle
const IntervalRecord *rec = reinterpret_cast< IntervalRecord * >( thisRangeBaseA.userData_ );
const LetterNumber nextBwtPosStart = countsSoFarA.count_[whichPile[( int )rec->kmer[0]]];
const LetterNumber count = countsThisRangeA.count_[whichPile[( int )rec->kmer[0]]];
if ( ( count && ( nextBwtPosStart + count > rec->position ) && ( nextBwtPosStart < rec->position + rec->count ) )
|| ( rec->kmer[0] == '$' && countsThisRangeA.count_[0] > 0 ) )
// const int cycleCount = 101;
// if (cycle >= cycleCount + 1)
{
const int cycleCount = cycle - 1;
const string &rotatedSeq = thisRangeBaseA.word_;
// cout << "rec " << *rec << ":\nSeq=" << rotatedSeq << endl;
const size_t searchedKmerSize = rec->kmer.size() + 1; // TODO: this k-mer size should be fixed in beetl-compare output
const size_t dollarPos = rotatedSeq.find( '$' );
size_t kmerPosInRead;
string part1, part2, part3;
//string part1_cycle0Letter;
if ( rec->kmer[0] == '$' )
{
kmerPosInRead = 0;
part1 = rotatedSeq.substr( 0, dollarPos );
}
else if ( dollarPos != string::npos )
{
kmerPosInRead = cycleCount - dollarPos;
part1 = rotatedSeq.substr( dollarPos + 1, kmerPosInRead - 2 );
// part1_cycle0Letter = rotatedSeq.substr( dollarPos + kmerPosInRead - 1, 1 );
part2 = rotatedSeq.substr( 0, searchedKmerSize - 2 );
if ( dollarPos + 2 >= searchedKmerSize )
part3 = rotatedSeq.substr( searchedKmerSize - 2, dollarPos - searchedKmerSize + 2 );
else
part3 = "??? " + rotatedSeq;
}
else
{
kmerPosInRead = cycleCount + 1;
part1 = rotatedSeq;
}
// We still have 1 character to propagate to get the full sequence, and the possible values of this character are in countsThisRangeA
for ( int i = 0; i < alphabetSize; i++ )
{
for ( uint j = 0; j < countsThisRangeA.count_[i]; ++j ) // Just repeat the same line multiple times in the unlikely event of this count being > 1
{
string lastPropagatedChar;
if ( i ) lastPropagatedChar = alphabet[i]; // don't print '$' chars
// header
static int firstTime = true;
if ( firstTime )
{
cout << "Output:\t//kmer\tposition\tcount\tposInRead\tdollarPos\tseqNum\tseq" << endl;
firstTime = false;
}
SequenceNumber dollarPos = 0, seqNum = 0;
if ( !rec->dollarSignPositions.empty() )
{
dollarPos = rec->dollarSignPositions[0];
seqNum = endPosFile_.convertDollarNumToSequenceNum( dollarPos );
}
cout << "Output:\t" << rec->kmer
<< '\t' << rec->position
<< '\t' << rec->count
<< '\t' << ( ( off_t )kmerPosInRead - 1 )
<< '\t' << dollarPos
<< '\t' << seqNum
<< '\t' << part1 << ColorText::startRed << /*part1_cycle0Letter << */ lastPropagatedChar << part2 << ColorText::endRed << part3 << endl;
}
}
// End propagation
for ( int i = 0; i < alphabetSize; i++ )
propagateIntervalA[i] = false;
}
}
else
{
// if not --propagate-sequence, then we stop the propagation at $ signs
propagateIntervalA[0] = false;
}
}
<|start_filename|>src/errors/CorrectionAligner.hh<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#include <algorithm>
#include <iostream>
#include <stdio.h>
#include <fstream>
#include <vector>
#include <math.h>
#include <limits>
#include "AlignmentParameters.hh"
#include "ErrorInfo.hh"
#ifndef INCLUDED_CORRECTIONALIGNER_HH
#define INCLUDED_CORRECTIONALIGNER_HH
using namespace std;
using namespace BeetlAlignParameters;
template <class T>
void zapMatrix( T **matrix, int rows );
template <class T>
T **makeMatrix( int rows, int cols );
class CorrectionAligner
{
public:
virtual ~CorrectionAligner() {}
static string MakeFastaRecord( int currentRead, string name, string sequence, string quality );
static string MakeFastqRecord( int currentRead, string name, string sequence, string quality );
static bool SortByLastCycle( ErrorInfo *a, ErrorInfo *b );
void ApplyCorrections(
SeqReaderFile *readsFile,
vector<ErrorInfo> &corrections,
ostream &correctedReadsOut,
bool correctionsOnly,
ReadsFormat fileType
);
void ApplyCorrections(
SeqReaderFile *readsFile,
vector<ErrorInfo> &corrections,
const string &outFile,
bool correctionsOnly,
ReadsFormat fileType
);
//override this method to make a new aligner capable of correcting reads without quality scores
virtual string Correct( const string &errorContainingRead, vector<ErrorInfo *> &corrections );
//override this method to make a new aligner capable of correcting reads with quality scores
virtual void CorrectRead(
vector<ErrorInfo *> &corrections,
const string &errorContainingRead,
const string &inQstr,
string &outRead,
string &outQstr
);
};
class SmithWatermanCorrectionAligner : public CorrectionAligner
{
public:
SmithWatermanCorrectionAligner( int m, int mm, int d, int i ): matchScore_( m ), mismatchScore_( mm ), deletionScore_( d ), insertionScore_( i ) {}
void Align( const string &seq1, const string &seq2, int &lengthOnSeq1, int &lengthOnSeq2 );
void Align( const string &seq1, const string &seq2, int &lengthOnSeq1, int &lengthOnSeq2, bool correctForwards );
string Replace( const string &original, const string &correction, int lineUpPosition, bool correctForwards );
string Replace( const string &original, const string &correction, int lineUpPosition, bool correctForwards, int &lengthOnOriginal );
string Correct( const string &errorContainingRead, vector<ErrorInfo *> &corrections );
private:
int matchScore_;
int mismatchScore_;
int deletionScore_;
int insertionScore_;
};
class StitchAligner : public CorrectionAligner
{
public:
string Correct( const string &errorContainingRead, vector<ErrorInfo *> &corrections );
};
class NoIndelAligner : public CorrectionAligner
{
public:
NoIndelAligner( char correctionQuality, int minLastCycle, bool trim ):
correctionQuality_( correctionQuality ),
minLastCycle_( minLastCycle ),
trim_( trim )
{}
void CorrectRead(
vector<ErrorInfo *> &corrections,
const string &errorContainingRead,
const string &inQstr,
string &outRead,
string &outQstr
);
private:
char correctionQuality_;
int minLastCycle_;
bool trim_;
};
#endif
<|start_filename|>src/BCR/BWTCollection.hh<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#ifndef _SXSI_BWTCollection_h_
#define _SXSI_BWTCollection_h_
#include "Algorithm.hh"
#include "LetterCount.hh"
#include "Sorting.hh"
#include "Tools.hh" // Defines ulong and uchar.
#include "parameters/BwtParameters.hh"
#include "parameters/UnbwtParameters.hh"
#include <iostream>
#include <string>
using std::string;
// by Tobias, small class interface to call from beetl executable
class BCR : public Algorithm
{
int mode_;
string inFile_;
string outFile_;
CompressionFormatType outputCompression_;
public:
BCR( const int mode, const string &in, const string &out,
const CompressionFormatType outputCompression );
~BCR()
{
}
void run( void );
};
// end
namespace SXSI
{
/**
* General interface for a bwt collection
*
* Class is virtual, make objects by calling
* the static method InitBWTCollection().
*/
class BWTCollection
{
public:
vector <sortElement> vectTriple; //Is is used both encoding, decoding, searching.
//ulong seqN; //contains a number of a sequence
//ulong posN; //contains the position of the last inserted symbol of the sequence seqN[i]
//uchar pileN; //contains the number of the pile of the last inserted symbol of the sequence seqN[i]
SequenceNumber nText; //number total of texts in filename1
//SequenceNumber middle; // number of sequence in filename1
//LetterNumber middleLength; //text[middleLength] = the first letter of the second database (filename2)
SequenceLength lengthRead; //number of char in each text + $
LetterNumber lengthTot; //length of the all texts without $
LetterNumber lengthTot_plus_eof; //length of the BWT
LetterNumber **tableOcc; //contains the number of occurrences of each symbol
LetterCountEachPile tableOcc_; // replace tableOcc
vector<AlphabetSymbol> alpha; //Corresponding between the alphabet, the piles and tableOcc
AlphabetSymbol sizeAlpha; //number of the different symbols in the input texts
AlphabetSymbol *alphaInverse; //Corresponding between alpha[i] and the symbol as char
vector< vector< vector<LetterNumber> > > vectorOcc;
vector <LetterNumber> numBlocksInPartialBWT;
//LetterNumber*** vectorOcc;
vector<sortElement> FirstVector, LastVector;
CompressionFormatType outputCompression_;
/**
* Init an instance of a text collection object
*
* Returns a pointer to an object implementing this interface.
*/
static BWTCollection *InitBWTCollection
( const string &file1, const string &fileOut, const int mode,
const CompressionFormatType outputCompression );
/**
* Virtual destructor
*/
virtual ~BWTCollection() {}
/**
*
* The i'th text insertion gets an identifier value i-1.
* In other words, document identifiers start from 0.
*/
virtual int buildBCR( const string &, const string &, const BwtParameters *bwtParams ) = 0;
virtual int unbuildBCR( char const *, char const *, char const *, char const * ) = 0;
virtual int backwardSearchBCR( char const * , char const * , char const * , char const * ) = 0;
virtual int decodeBCRnaiveForward( char const *, char const *, char const * ) = 0; //Inverse BWT by Forward direction of nText sequences, one sequence at a time, in lexicographic order.
virtual int decodeBCRmultipleReverse( char const *, char const *, char const *, bool processQualities = false ) = 0; //Inverse BWT by Backward direction of nText sequences at the same time by lengthRead iterations.
virtual int RecoverNsymbolsReverse( char const *, char const *, uchar *, uchar *newQual = 0 ) = 0;
virtual int RecoverNsymbolsReverseByVector( char const *file1, char const *fileOutBwt, uchar *newSymb, uchar *newQual = 0 ) = 0;
virtual int Recover1symbolReverse( char const * , char const * , uchar *, sortElement * ) = 0;
virtual SequenceNumber recover1SequenceForward( char const * , char const * , sortElement , uchar *, SequenceLength * ) = 0 ;
virtual vector <int> recoverNSequenceForward( char const * , char const *, SequenceNumber ) = 0;
virtual int recoverNSequenceForwardSequentially( char const * , char const *, SequenceNumber ) = 0;
virtual void storeBWT( uchar const *, uchar const *qual = NULL ) = 0;
virtual void storeEntireBWT( const string & ) = 0;
virtual void storeSA( SequenceLength ) = 0;
virtual void storeEntirePairSA( const char * ) = 0;
virtual void storeEntireSAfromPairSA( const char * ) = 0;
virtual void storeBWTandLCP( uchar const * ) = 0;
virtual void storeEntireLCP( const string & ) = 0;
virtual LetterNumber rankManySymbols( FILE &, LetterNumber *, LetterNumber, uchar * ) = 0;
virtual LetterNumber rankManySymbolsByVector( FILE & , LetterNumber *, LetterNumber, uchar *, uchar *foundQual = 0, FILE *InFileBWTQual = 0 ) = 0;
virtual LetterNumber findRankInBWT ( char const *, char const *, AlphabetSymbol, LetterNumber, uchar ) = 0;
virtual LetterNumber findRankInBWTbyVector ( char const *, char const *, AlphabetSymbol, LetterNumber, uchar ) = 0;
virtual int rankInverseManyByVector ( char const * , char const * , SequenceNumber , uchar * ) = 0;
virtual int backwardSearchManyBCR( char const * , char const *, char const *, vector<string>, SequenceLength ) = 0;
virtual int SearchAndLocateKmer ( char const * , char const * , char const * , vector<string> , SequenceLength, vector <int> & ) = 0;
private:
virtual void InsertNsymbols( uchar const *, SequenceLength, uchar const *qual = NULL ) = 0;
virtual void InsertFirstsymbols( uchar const *, uchar const *qual = NULL, const int subSequenceNum = 0 ) = 0;
virtual int initializeUnbuildBCR( char const *, char const *, LetterNumber [] ) = 0;
virtual int computeNewPositionForBackSearch ( char const *, char const *, uchar ) = 0;
virtual int computeNewPositionForBackSearchByVector ( char const *, char const *, uchar ) = 0;
virtual int computeVectorUnbuildBCR( char const *, char const *, LetterNumber [] ) = 0;
virtual int computeManyNewPositionForBackSearchByVector( char const * , char const * , uchar *, SequenceNumber ) = 0;
virtual int update_Pos_Pile( sortElement * ) = 0;
virtual int update_Pos_Pile_Blocks( LetterNumber *, LetterNumber *, AlphabetSymbol, uchar ) = 0;
virtual int findBlockToRead( LetterNumber *, AlphabetSymbol , LetterNumber *, LetterNumber * ) = 0;
protected:
// Protected constructor; call the static function InitBWTCollection().
BWTCollection() : tableOcc( NULL ), alpha( 256 ), sizeAlpha( 0 ), alphaInverse( NULL ), outputCompression_( compressionASCII ) { }
// No copy constructor or assignment
BWTCollection( BWTCollection const & );
BWTCollection &operator = ( BWTCollection const & );
};
}
#endif
<|start_filename|>src/parameters/SearchParameters.hh<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#ifndef BEETL_SEARCH_PARAMETERS_HH
#define BEETL_SEARCH_PARAMETERS_HH
#include "libzoo/cli/ToolParameters.hh"
#include <string>
namespace BeetlSearchParameters
{
// Option container
enum SearchOptions
{
SEARCH_OPTION_COUNT // end marker
};
} // namespace BeetlSearchParameters
class SearchParameters : public ToolParameters
{
public:
SearchParameters()
{
using namespace BeetlSearchParameters;
addEntry( -1, "input", "--input", "-i", "Input filename prefix (i.e. BWT files are \"prefix-B0[0-6]\")", "", TYPE_STRING | REQUIRED );
addEntry( -1, "output", "--output", "-o", "Output filename", "searchedKmers_positions", TYPE_STRING | REQUIRED );
addEntry( -1, "kmers input file", "--kmers", "-j", "File containing a list of k-mers to be searched for (one k-mer per line) OR", "", TYPE_STRING );
addEntry( -1, "one kmer string", "--kmer", "-k", "Single k-mer string to be searched for", "", TYPE_STRING );
//addEntry( -1, "add rev comp", "--add-rev-comp", "", "Also search for reverse-complemented k-mers (reported as distinct k-mers)", "", TYPE_SWITCH );
// addEntry( -1, "pause between cycles", "--pause-between-cycles", "", "Wait for a key press after each cycle", "", TYPE_SWITCH );
addDefaultVerbosityAndHelpEntries();
}
};
#endif //ifndef BEETL_SEARCH_PARAMETERS_HH
<|start_filename|>src/BCRext/BCRext.hh<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#ifndef INCLUDED_BCREXT_HH
#define INCLUDED_BCREXT_HH
#include "Algorithm.hh"
#include <string>
using std::string;
// by Tobias, small class interface to call from beetl executable
class BCRext : public Algorithm
{
const bool useHuffmanEncoder_;
const bool useRunlengthEncoder_;
const bool useAsciiEncoder_;
const bool useImplicitSort_;
const bool useSeqFile_;
const string inFile_;
const string prefix_;
public:
BCRext( bool, bool, bool, bool, bool, const string &, const string & );
~BCRext()
{
}
void run( void );
};
// end
#define BCREXT_HH_ID "@(#) $Id: BCRext.hh,v 1.2 2011/11/28 11:32:09 tjakobi Exp $"
#endif
<|start_filename|>src/shared/Timer.hh<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#ifndef INCLUDED_TIMER
#define INCLUDED_TIMER
#include <ctime>
#include <fstream>
#include <sys/resource.h>
#include <sys/time.h>
// Class Name : Timer
// Description: Maintains info on actual and processing time
class Timer
{
public:
Timer( void );
std::ostream &print( std::ostream &os );
// timeNow: returns current date and time as an ASCII string
const char *timeNow( void ) const;
private:
rusage thisUsage_;
rusage lastUsage_;
timeval thisTime_;
timeval lastTime_;
// timeb thisTime_;
// timeb lastTime_;
}; // Timer
std::ostream &operator<<( std::ostream &os, Timer &timer );
#endif
// end of Timer.hh
<|start_filename|>src/search/SearchUsingBacktracker.hh<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#ifndef SEARCH_USING_BACKTRACKER_HH
#define SEARCH_USING_BACKTRACKER_HH
#include "Algorithm.hh"
#include <string>
using std::string;
class SearchParameters;
class SearchUsingBacktracker : public Algorithm
{
public:
SearchUsingBacktracker(
const SearchParameters &searchParams
);
virtual ~SearchUsingBacktracker() {}
virtual void run( void );
private:
const SearchParameters &searchParams_;
};
#endif // SEARCH_USING_BACKTRACKER_HH
<|start_filename|>src/BCR/BCR_BWTCollection.cpp<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#include "BWTCollection.hh"
#include <cassert>
#include <iostream>
using namespace std;
using SXSI::BWTCollection;
#define BCR_ID "$Id: BCR_BWTCollection.cpp,v 1.6 2011/11/28 16:38:32 acox Exp $"
BCR::BCR( const int mode, const string &in, const string &out,
const CompressionFormatType outputCompression ) :
mode_( mode ),
outputCompression_( outputCompression )
{
inFile_ = in;
outFile_ = out;
}
void BCR::run( void )
{
BWTCollection *BCRexternalBWT = BWTCollection::InitBWTCollection( inFile_, outFile_, mode_, outputCompression_ );
//cout << "finished iteration, usage: " << timer << endl;
delete BCRexternalBWT;
}
<|start_filename|>src/countWords/IntervalHandlerSplice.hh<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#ifndef INCLUDED_INTERVALHANDLER_SPLICE_HH
#define INCLUDED_INTERVALHANDLER_SPLICE_HH
#include "Config.hh"
#include "IntervalHandlerBase.hh"
//
// IntervalHandler
//
// Idea here is that different algorithms can be implemented by defining
// new subclasses of IntervalHandler
struct IntervalHandlerSplice : public IntervalHandlerBase
{
IntervalHandlerSplice( unsigned int minOcc ) : minOcc_( minOcc ) {}
virtual ~IntervalHandlerSplice() {}
virtual void foundInBoth
( const int pileNum,
const LetterCount &countsThisRangeA,
const LetterCount &countsThisRangeB,
const Range &thisRangeA,
const Range &thisRangeB,
AlphabetFlag &propagateIntervalA,
AlphabetFlag &propagateIntervalB,
bool &isBreakpointDetected,
const int cycle
);
virtual void foundInAOnly
( const int pileNum,
const LetterCount &countsSoFarA,
const LetterCount &countsThisRangeA,
const char *bwtSubstring,
Range &thisRangeA,
AlphabetFlag &propagateIntervalA,
const int cycle
);
virtual void foundInBOnly
( const int pileNum,
const LetterCount &countsSoFarB,
const LetterCount &countsThisRangeB,
const char *bwtSubstring,
Range &thisRangeB,
AlphabetFlag &propagateIntervalB,
const int cycle
);
const LetterNumber minOcc_;
};
#endif
<|start_filename|>src/metagenomics/parse.cpp<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#include "../shared/Tools.hh"
#include "metaShared.hh"
#include <cassert>
#include <cstring>
#include <cstdlib>
#include <fstream>
#include <iostream>
#include <map>
#include <sstream>
#include <stdint.h>
#include <string>
#include <vector>
using namespace std;
int printUsage( string message );
/*
struct Overlap
{
unsigned int start_;
unsigned int end_;
unsigned int suffixCount_;
unsigned int readCount_;
};
*/
long getFileSize( FILE *file )
{
long lCurPos, lEndPos;
lCurPos = ftell( file );
fseek( file, 0, 2 );
lEndPos = ftell( file );
fseek( file, lCurPos, 0 );
return lEndPos;
}
int whichPile( char c );
int getTaxonomicLevel( string s );
double readCount( 0.0 );
//TAXMAP loadTaxInformationForDatabase( string taxFile, int cycleSize, string ncbiNames );
READMAP loadReadInformation( string seqFile );
void getRefInfoOfCount( TAXMAP &taxInfo, string parseWordOut, vector<int> wordSize );
//void getRefInfoOfBinarys(TAXMAP &taxInfo, vector<string> wordCountOut, int cycleSize);
void printTaxInformation( TAXMAP &taxInfo, FILEMAP &fileInfo );
void parseForCoverage( string countWordOutput, vector<FILE *> mergeAOutput, FILEMAP &fileInfo );
vector<BWTMAP> getBWTInformation( string parseWordOut, int minWordLength, vector<int> wordSizes, string mergedZeroFile, READMAP readMap );
void getTaxCountThroughBWTInfo( vector<BWTMAP> &bwtInfo, TAXMAP &taxInfo, vector<int> cycleSize );
void countWordTest( TAXMAP &taxInfo, vector<int> taxIDsInTest, vector<double> testExpections );
void printTaxTree( TAXMAP &taxInfo, FILEMAP &fileInfo, vector<int> wordSize );
void loadFileNumToTaxIds( string taxIdNames );
vector<BWTMAP> getBWTInformationThroughOnceParsed( string countWordOutput, vector<int> WordSizes );
vector< vector< int> > fileNumToTaxIds;
vector<bool> intervalInSameTaxa( vector<unsigned int> &sharedTaxIds, vector<unsigned short> &fileNumbers );
void getSecondaryInformation( string parsedWordCountOutput );
#include "Krona.hh"
int main( const int argc, const char **argv )
{
if ( argc < 2 )
return printUsage( "not enough arguments" );
bool referenceInfo( false );
bool fastAnalysis( false );
bool testTaxCount( false );
bool secondParsed( false );
bool countTaxLevel( false );
bool getCoverageInformation( false );
string taxInfo;
// string parseCount;
// vector<FILE *> mergeCOutput;
vector<FILE *> mergeAOutput;
string countWordOutput;
// string ncbiTaxonomyNames;
// string outputFile;
string mergedZeroFile;
vector<int> taxIds;
vector<double> expections;
string ncbiNames;
vector<int> wordSize;
string sequenceFile;
for ( int i( 1 ); i < argc; ++i )
{
if ( strcmp( argv[i], "-f" ) == 0 )
fastAnalysis = true;
else if ( strcmp( argv[i], "-c" ) == 0 )
getCoverageInformation = true;
else if ( strcmp( argv[i], "-p" ) == 0 )
secondParsed = true;
if ( strcmp( argv[i], "-t" ) == 0 )
taxInfo = argv[i + 1];
if ( strcmp( argv[i], "-b" ) == 0 )
countWordOutput = argv[i + 1];
if ( strcmp( argv[i], "-ids" ) == 0 )
{
for ( int j( i + 1 ); j < argc; ++j )
{
if ( argv[j][0] == '-' )
break;
taxIds.push_back( atoi( argv[j] ) );
}
}
if ( strcmp( argv[i], "-exp" ) == 0 )
{
for ( int j( i + 1 ); j < argc; ++j )
{
if ( argv[j][0] == '-' )
break;
expections.push_back( atof( argv[j] ) );
}
}
if ( strcmp( argv[i], "-w" ) == 0 )
{
for ( int j( i + 1 ); j < argc ; ++j )
{
if ( argv[j][0] == '-' )
break;
wordSize.push_back( atoi( argv[j] ) );
}
}
if ( strcmp( argv[i], "-m" ) == 0 )
{
for ( int j( i + 1 ); j < argc; ++j )
{
if ( argv[j][0] == '-' )
break;
FILE *mergeAOut = fopen( argv[j], "r" );
if ( mergeAOut == NULL )
{
cerr << "Can not read " << argv[j] << endl;
return 1;
}
mergeAOutput.push_back( mergeAOut );
}
}
if ( strcmp( argv[i], "-n" ) == 0 )
ncbiNames = argv[i + 1];
if ( strcmp( argv[i], "-z" ) == 0 )
mergedZeroFile = argv[i + 1];
if ( strcmp( argv[i] , "-s" ) == 0 )
sequenceFile = argv[i + 1];
}
if ( referenceInfo )
{
cerr << "Parsing the reference information" << endl;
loadFileNumToTaxIds( taxInfo );
map<int, TaxInformation> taxInformation = loadTaxInformationForDatabase( taxInfo, wordSize.size(), ncbiNames );
// getSequenceLengths(taxInformation, mergAOutput[0]);
cerr << "taxinfo " << taxInformation.size() << endl;
// getRefInfoOfBinarys(taxInformation, countWordOutput);
getRefInfoOfCount( taxInformation, countWordOutput, wordSize );
}
else if ( getCoverageInformation )
{
cerr << "getcoverageInformation " << endl;
FILEMAP fileInfo;
parseForCoverage( countWordOutput, mergeAOutput, fileInfo );
}
else if ( countTaxLevel )
{
getSecondaryInformation( countWordOutput );
}
else if ( fastAnalysis )
{
TAXMAP taxInformation = loadTaxInformationForDatabase( taxInfo, wordSize.size(), ncbiNames );
// for (TAXMAP::iterator it = taxInformation.begin(); it != taxInformation.end(); it++){
// if( (*it).second.taxLevel_ == 5)
//cout << (*it).first << "\t" <<(*it).second.files_.size() << endl;
//}
int minWordLength = 300;
for ( unsigned int i ( 0 ); i < wordSize.size(); ++i )
{
cerr << "Searching for length " << wordSize[i] << endl;
minWordLength = ( minWordLength > wordSize[i] ) ? wordSize[i] : minWordLength;
}
READMAP readMap = loadReadInformation( sequenceFile );
vector<BWTMAP> bwtInfo = getBWTInformation( countWordOutput, minWordLength, wordSize, mergedZeroFile, readMap );
cerr << "start getting taxa" << endl;
getTaxCountThroughBWTInfo( bwtInfo, taxInformation, wordSize );
cerr << "Simple Analysis done " << endl;
cerr << "Got all file Infos " << endl;
FILEMAP fileInfo;
printTaxTree( taxInformation, fileInfo, wordSize );
cerr << "printed taxTree" << endl;
}
else if ( secondParsed )
{
cerr << "Expecting already parsed BWT output" << endl;
TAXMAP taxInformation = loadTaxInformationForDatabase( taxInfo, wordSize.size(), ncbiNames );
vector<BWTMAP> bwtInfo = getBWTInformationThroughOnceParsed( countWordOutput, wordSize );
cerr << "got bwtinfo " << bwtInfo.size() << endl;
getTaxCountThroughBWTInfo( bwtInfo, taxInformation, wordSize );
FILEMAP fileInfo;
printTaxTree( taxInformation, fileInfo, wordSize );
}
else if ( testTaxCount )
{
cout << "Test Tax Count" << endl;
if ( taxIds.size() != expections.size() )
{
cerr << "TaxIds and counts have different size Abbort " << endl;
return -1;
}
TAXMAP taxInformation = loadTaxInformationForDatabase( taxInfo, wordSize.size(), ncbiNames );
cerr << "TaxSize " << taxInformation.size() << endl;
int minWordLength = 300;
for ( unsigned int i ( 0 ); i < wordSize.size(); ++i )
minWordLength = ( minWordLength > wordSize[i] ) ? wordSize[i] : minWordLength;
READMAP readMap = loadReadInformation( sequenceFile );
vector<BWTMAP> bwtInfo = getBWTInformation( countWordOutput, minWordLength, wordSize, mergedZeroFile, readMap );
getTaxCountThroughBWTInfo( bwtInfo, taxInformation, wordSize );
cerr << "Got taxa Count " << endl;
countWordTest( taxInformation, taxIds, expections );
}
}
void printTaxTree( TAXMAP &taxInfo, FILEMAP &fileInfo, vector<int> wordMinSize )
{
cerr << "print taxa Tree" << endl;
cerr << "file info " << fileInfo.size() << endl;
for ( unsigned int s ( 0 ); s < wordMinSize.size(); ++s )
{
cout << wordMinSize[s] << endl;
stringstream ss;
ss << wordMinSize[s];
ofstream output( ss.str().c_str(), ios::out );
for ( unsigned int level = 0 ; level <= taxLevelSize ; ++level )
{
// cerr << "l" << level << endl;
for ( TAXMAP::iterator iter = taxInfo.begin() ; iter != taxInfo.end(); ++iter )
{
if ( ( *iter ).second.taxLevel_ == level )
{
output << "TAXA\t" << level << "\t" << ( *iter ).first << "\t" << ( *iter ).second.wordCountPerSize_[s] << "\t" << ( *iter ).second.name_ << "\t" << ( *iter ).second.parentId_ << endl ;
/* if(level == (taxLevelSize -1) && (*iter).second.wordCountPerSize_[s] >30)
{
vector<int> files = (*iter).second.files_;
for(unsigned int j(0);j < files.size(); j++){
output << "FILE " << (*iter).second.files_[j] <<endl;
FileInformation file = fileInfo[files[j]];
vector<unsigned int> suffixPos = file.suffixPos_;
for(unsigned int k(0); k < suffixPos.size(); k++){
if(file.suffixLength_[k] >= wordMinSize[s]){
output << "SUFF " << suffixPos[k]<< " ";
if(suffixPos[k] > file.sequenceLength_)
cerr << "suffixPosition is wrong "<< suffixPos[k] << endl;
for(unsigned int c(0) ; c <file.charCounts_[k].size(); c++)
output << file.charCounts_[k][c] << " ";
output << endl;
}
}
}
}*/
}
}
}
output.close();
}
// Resetting total counts including children
cerr << "Resetting total counts including children" << endl;
for ( TAXMAP::iterator iter = taxInfo.begin() ; iter != taxInfo.end(); ++iter )
for ( unsigned int s ( 0 ); s < wordMinSize.size(); ++s )
iter->second.wordCountPerSizeOfChildren_[s] = 0;
// Calculating total counts including children
cerr << "Calculating total counts including children" << endl;
for ( int level = taxLevelSize ; level >= 0 ; --level )
{
for ( TAXMAP::iterator iter = taxInfo.begin() ; iter != taxInfo.end(); ++iter )
{
if ( ( *iter ).second.taxLevel_ == level )
{
// cerr << iter->first << " " << iter->second.parentId_ << endl;
TAXMAP::iterator parent = taxInfo.find( iter->second.parentId_ );
assert( parent != taxInfo.end() );
for ( unsigned int s ( 0 ); s < wordMinSize.size(); ++s )
parent->second.wordCountPerSizeOfChildren_[s] += iter->second.wordCountPerSize_[s] + iter->second.wordCountPerSizeOfChildren_[s];
}
}
}
// LJ version for Krona
cerr << "Krona output" << endl;
/*
for ( unsigned int s ( 0 ); s < wordMinSize.size(); ++s )
{
cout << wordMinSize[s] << endl;
}
*/
{
ofstream output( "metaBeetl_krona.html", ios::out );
printKronaHeader( output );
printKronaDatasets( output, wordMinSize );
TAXMAP::iterator topLevel = taxInfo.find( 1 ); // top level has taxonomy Id 1
assert( topLevel != taxInfo.end() );
printKronaChildren( topLevel, output, 0, taxInfo, wordMinSize.size() );
printKronaFooter( output );
output.close();
}
}
/*
look in the output of the parsing (for the different word lengths)
how much could be classified for each taxonomic level
this can help to get a statistical analysis on how much could be found
*/
void getSecondaryInformation( string countWordOutput )
{
ifstream parsedIn( countWordOutput.c_str(), ios::in );
vector<uint64_t> taxLevelCount;
for ( unsigned int level ( 0 ); level < taxLevelSize; ++level )
taxLevelCount.push_back( 0 );
string line;
while ( parsedIn.good() )
{
//TAXA 1 976 5118577658
getline( parsedIn, line );
cout << line << endl;
if ( line.length() > 8 )
{
vector<string> splitLine = splitString( line, " " );
taxLevelCount[atoi( splitLine[1].c_str() )] += atol( splitLine[3].c_str() );
}
}
for ( unsigned int lev( 0 ); lev < taxLevelSize; ++lev )
cout << taxLevelNames[lev] << " " << taxLevelCount[lev] << endl;
}
void getRefInfoOfCount( TAXMAP &taxInfo, string parseWordOut, vector<int> wordSizes )
{
ifstream wordIn( parseWordOut.c_str(), ios::in );
string line;
int lineCount( 0 );
int wordSize( 0 );
// int wordIndex(0);
while ( wordIn.good() )
{
lineCount++;
if ( lineCount % 1000000 == 0 )
cerr << " . " << taxInfo[2].wordCountPerSize_[35];
getline( wordIn, line );
// cout << line << endl;
// int taxId;
vector<string> countBHit = splitString( line, " " );
//database information found
if ( line[0] == 'B' )
{
int taxLevel = atoi( countBHit[1].c_str() );
if ( wordSize != ( int )countBHit[4].length() )
{
wordSize = countBHit[4].length();
for ( unsigned int i( 0 ); i < wordSizes.size(); ++i )
if ( ( wordSize = ( wordSizes[i] ) ) != 0 )
{
// wordIndex = i;
break;
}
}
vector<unsigned short > fileNumbers ;
vector<string> fileNumberString = splitString( countBHit[5], ":" );
// cout << countBHit[4] <<endl;
for ( unsigned int i( 0 ); i < fileNumberString.size(); ++i )
{
fileNumbers.push_back( atoi( fileNumberString[i].c_str() ) );
//cout << fileNumbers[i]<<endl;
}
vector<unsigned int > sharedTaxIds;
sharedTaxIds.resize( taxLevelSize );
vector<bool> intervalSameTaxa = intervalInSameTaxa( sharedTaxIds, fileNumbers );
taxInfo[sharedTaxIds[taxLevel]].wordCountPerSize_[wordSize]++;
// cerr <<"level " << taxLevel <<endl;
for ( int i( taxLevelSize - 1 ) ; i >= 0 ; --i )
{
// cerr << i <<" "<< sharedTaxIds[i] <<endl;
// if(intervalInSameTaxa[i])
if ( sharedTaxIds[i] != 0 )
break;
}
// else
//cerr <<"there is something wrong" <<endl;
}
}
//add the counts for the single taxLevel up
// double classWordSum = 0;
// map<int, int> taxIdToLength;
for ( unsigned int s( 0 ); s < wordSizes.size() ; ++s )
{
for ( int level ( taxLevelSize - 1 ) ; level >= -1; --level )
{
for ( TAXMAP::iterator it = taxInfo.begin(); it != taxInfo.end(); ++it )
{
//add the count of the children from the taxonomic node to the count of the taxonomic node
double childrenCount = 0;
for ( TAXMAP::iterator ch = taxInfo.begin(); ch != taxInfo.end(); ++ch )
{
if ( ( int )( *ch ).second.parentId_ == it->first )
childrenCount += ( *ch ).second.wordCountPerSize_[s];
it->second.wordCountPerSize_[s] += childrenCount;
}
}
}
}
// print information
for ( unsigned int s( 0 ); s < wordSizes.size() ; ++s )
{
stringstream ss;
ss << wordSizes[s] << "refInfo";
ofstream output( ss.str().c_str(), ios::out );
for ( int level ( taxLevelSize - 1 ) ; level >= -1; --level )
{
for ( TAXMAP::iterator it = taxInfo.begin(); it != taxInfo.end(); ++it )
{
if ( it->second.taxLevel_ == level )
{
output << "REF " << level << " " << it->first << " " << it->second.wordCountPerSize_[s] << " " << it->second.seqLengthSum_ << endl;
}
}
}
output.close();
}
}
vector<BWTMAP> getBWTInformationThroughOnceParsed( string parsedBWTOutput, vector<int> wordSizes )
{
ifstream largestBWTOutput( parsedBWTOutput.c_str(), ios::in );
vector<BWTMAP> bwtInfo;
bwtInfo.resize( wordSizes.size() );
string line;
int lineCount( 0 );
while ( largestBWTOutput.good() )
{
if ( lineCount % 100000 == 0 )
cerr << lineCount << " ";
lineCount ++;
getline( largestBWTOutput, line );
if ( line.length() > 10 )
{
vector<string> splitLine = splitString( line, " " );
// 146065189 287 58 1 4 0:0:0:2:2:0: 0:0:0:4:0:0: 2544:2208:2974:4163:
// bwt = 146065189, taxId = 287, wordLengt = 58, pileNum = 1, 4 = readCount, 0:0:0:2:2:0: = countA (reads), 0:0:0:4:0:0: = countB (reference), 2544:2208:2974:4163: = fileNumbers
uint64_t bwtPosition = atol( splitLine[0].c_str() );
unsigned int wordLength = atoi( splitLine[2].c_str() );
bool firstBWT = true;
BWTInformation bwt;
//save all BWT Positions which are
for ( unsigned int s( 0 ); s < wordSizes.size(); ++s )
{
//take the smalles possible bwt information for a bwt
if ( ( int )wordLength >= wordSizes[s]
&& bwtInfo[s].find( bwtPosition ) == bwtInfo[s].end() )
{
//set all the BWT information only once for each newly saved BWT position
if ( firstBWT )
{
unsigned int taxId = atoi( splitLine[1].c_str() );
unsigned short pileNum = atoi( splitLine[3].c_str() );
vector<string> countAstring = splitString( splitLine[5], ":" );
vector<unsigned int> countA;
int readCount = 0;
// cout <<line <<endl;
for ( unsigned int i( 0 ); i < countAstring.size(); ++i )
{
countA.push_back( atoi( countAstring[i].c_str() ) );
readCount += countA[i];
}
//cout << readCount <<endl;
vector<string> countsBString = splitString( splitLine[5], ":" );
vector<unsigned int> countB;
for ( unsigned int i( 0 ); i < countsBString.size(); ++i )
countB.push_back( atoi( countsBString[i].c_str() ) );
vector<string> fileNumbersString = splitString( splitLine[6], ":" );
vector<unsigned short > fileNumbers;
for ( unsigned int i( 0 ); i < fileNumbersString.size(); ++i )
fileNumbers.push_back( atoi( fileNumbersString[i].c_str() ) );
bwt.pileNum_ = pileNum;
bwt.readCount_ = readCount;
bwt.wordLength_ = wordLength;
bwt.taxId_ = taxId;
bwt.charACount_ = countA;
bwt.charBCount_ = countB;
bwt.fileNumbers_ = fileNumbers;
firstBWT = false;
}
bwtInfo[s][bwtPosition] = bwt;
}
}
}
}
cerr << "got all bwtInfo " << bwtInfo.size() << endl;
return bwtInfo;
}
READMAP loadReadInformation( string seqFile )
{
ifstream seqs( seqFile.c_str(), ios::in );
string line;
READMAP readMap;
int seqCount( 0 );
while ( seqs.good() )
{
getline( seqs, line );
readMap[seqCount] = line;
seqCount++;
}
return readMap;
}
vector<BWTMAP> getBWTInformation( string countWordOutput, int minWordLength, vector<int> wordSizes, string mergedZeroFile, READMAP readMap )
{
ifstream wordCount( countWordOutput.c_str(), ios::in );
string line;
vector<BWTMAP> bwtInfo;
bwtInfo.resize( wordSizes.size() );
int lineCount( 0 );
bool firstHit = false;
cerr << "Min wordlength " << minWordLength << endl;
int wordSizeReached = wordSizes[0];
int indexWordSize( 0 );
FILE *mergeZero = fopen( mergedZeroFile.c_str(), "r" );
unsigned genomeLength;
vector<unsigned> genomeLengths;
if ( mergeZero == NULL )
cerr << "File A-00 is not readable. The reads won't be normalized by the genome Length!" << endl;
if ( mergeZero != NULL )
{
while ( fread( &genomeLength, sizeof( unsigned ), 1, mergeZero ) == 1 )
{
genomeLengths.push_back( genomeLength );
}
fclose( mergeZero );
}
else
{
for ( unsigned int i ( 0 ); i < 100000; ++i )
genomeLengths.push_back( 1 );
}
while ( wordCount.good() )
{
lineCount++;
if ( ( lineCount % 10000000 ) == 0 )
{
cerr << lineCount << " bwt " << wordSizeReached << " " << bwtInfo[indexWordSize].size() << endl;
}
getline( wordCount, line );
if ( line.substr( 0, 5 ).compare( "MTAXA" ) == 0 )
{
vector<string> splitLine = splitString( line, " " );
if ( firstHit || ( int )splitLine[3].length() > minWordLength )
{
if ( !firstHit )
cerr << line << endl;
firstHit = true;
//MTAXA 6 562 AAAAAAAAAAAAAAAAAAAAAAAAAAAAACCCCCCCCCCCCCCCCC 1455 0:2:0:0:0:0 0:0:1:0:0:0 2247:
// MTAXA 6 37734 ACTAGGGGTCCA 982822322 0:3:1:1:0:2 0:2:0:0:0:0 299:301:
int readCount = 0;
uint64_t bwtPosition = atol( splitLine[4].c_str() );
unsigned short taxLevel = ( unsigned short ) atoi( splitLine[1].c_str() );
int wordLength = splitLine[3].length();
bool firstBWT = true;
BWTInformation bwt;
//save all BWT Positions which are
for ( unsigned int s( 0 ); s < wordSizes.size(); ++s )
{
//take the smallest possible BWT positions for each interested word length.
//this also means the highest possible count for these lengths
if ( wordLength >= wordSizes[s]
&& bwtInfo[s].find( bwtPosition ) == bwtInfo[s].end() )
{
wordSizeReached = wordSizes[s];
indexWordSize = s;
//compute the BWT information only once for each line which fits.
if ( firstBWT )
{
int taxId = atoi( splitLine[2].c_str() );
vector<string> readCounts = splitString( splitLine[5], ":" );
vector<unsigned int> charACount;
for ( unsigned int i( 0 ); i < readCounts.size(); ++i )
{
readCount += atoi( readCounts[i].c_str() );
charACount.push_back( atoi( readCounts[i].c_str() ) );
}
vector<string> fileCounts = splitString( splitLine[6], ":" );
vector<unsigned int> charBCount;
for ( unsigned int i( 0 ); i < fileCounts.size(); ++i )
charBCount.push_back( atoi( fileCounts[i].c_str() ) );
vector<string> fileNumbers = splitString( splitLine[7], ":" );
vector<unsigned short> fileNums;
uint64_t genomeLengthsSum( 0 ) ;
for ( unsigned int i( 0 ); i < fileNumbers.size() - 1; ++i )
{
unsigned short fileNum = ( unsigned short ) atoi( fileNumbers[i].c_str() );
fileNums.push_back( fileNum );
genomeLengthsSum += genomeLengths[fileNum];
}
double averageLengths = ( double ) genomeLengthsSum / ( double ) fileNums.size();
int pileNum = whichPile( splitLine[3][0] );
for ( READMAP::iterator it = readMap.begin(); it != readMap.end(); ++it )
{
//find the sequence for the suffix
if ( it->second.find( splitLine[3] ) != string::npos )
bwt.readIds.push_back( it->first );
}
bwt.pileNum_ = pileNum;
bwt.readCount_ = ( double ) readCount / averageLengths;
bwt.wordLength_ = splitLine[3].length();
bwt.taxId_ = taxId;
bwt.charACount_ = charACount;
bwt.charBCount_ = charBCount;
bwt.fileNumbers_ = fileNums;
bwt.taxLevel_ = taxLevel;
firstBWT = false;
}
bwtInfo[s][bwtPosition] = bwt;
}
}
}
}
}
cerr << "got all bwtInformation " << bwtInfo.size() << endl;
stringstream ss;
ss << countWordOutput << "_largestBWT" ;
ofstream bwtOut( ss.str().c_str(), ios::out );
cerr << "print BWT information in " << countWordOutput << "_largestBWT" << endl;
for ( unsigned int b( 0 ) ; b < bwtInfo.size(); ++b )
{
cerr << b << " " << bwtInfo[b].size() << endl;
for ( BWTMAP::iterator it = bwtInfo[b].begin(); it != bwtInfo[b].end(); ++it )
{
BWTInformation bwt = it->second;
bwtOut << it->first << " " << bwt.taxId_ << " " << bwt.wordLength_
<< " " << bwt.pileNum_ << " " << bwt.readCount_ << " " ;
for ( unsigned int i( 0 ) ; i < bwt.charACount_.size(); ++i )
bwtOut << bwt.charACount_[i] << ":" ;
bwtOut << " ";
for ( unsigned int i( 0 ); i < bwt.charBCount_.size(); ++i )
bwtOut << bwt.charBCount_[i] << ":" ;
bwtOut << " ";
for ( unsigned int i( 0 ); i < bwt.fileNumbers_.size(); ++i )
bwtOut << bwt.fileNumbers_[i] << ":";
bwtOut << endl;
}
}
bwtOut.close();
return bwtInfo;
}
/*get the wordCounts for each wordSizes in the taxInformation
*/
void getTaxCountThroughBWTInfo( vector<BWTMAP> &bwtInfo, TAXMAP &taxInfo, vector<int> wordSize )
{
// vector<double> classWordCount;
// int words(0);
for ( unsigned int s ( 0 ) ; s < wordSize.size(); ++s )
{
cerr << "getting tax count for " << wordSize[s] << " " << bwtInfo[s].size() << endl;
int minWord = wordSize[s];
double wordCLevel( 0 );
for ( BWTMAP::iterator bwtIt = bwtInfo[s].begin(); bwtIt != bwtInfo[s].end(); ++bwtIt )
{
auto searchedItem = taxInfo.find( ( int )( *bwtIt ).second.taxId_ );
if ( searchedItem != taxInfo.end() )
{
searchedItem->second.wordCountPerSize_[s] += ( *bwtIt ).second.readCount_;
wordCLevel += ( *bwtIt ).second.readCount_;
}
}
cerr << "overall count for " << minWord << " " << wordCLevel << endl;
}
//add the counts for the single taxLevel up
/* double classWordSum = 0;
for(int s(0); s<wordSize.size() ; s++){
for(int level (taxLevelSize -1) ; level >=-1; level--){
for(TAXMAP::iterator it = taxInfo.begin(); it != taxInfo.end(); it++){
//add the count of the children from the taxonomic node to the count of the taxonomic node
int childrenCount = 0;
for(TAXMAP::iterator ch = taxInfo.begin(); ch != taxInfo.end(); ch ++){
if((*ch).first != 0){
if( (*ch).second.parentId_ == (*it).first)
childrenCount += (*ch).second.wordCountPerSize_[s];
}
}
(*it).second.wordCountPerSize_[s] += childrenCount;
}
}
}*/
/*
map<int,double> taxIdToCount;
double testSuffixCount(0);
for(int level (taxLevelSize-1) ; level >= 0; level--){
double levelSum(0);
for(TAXMAP::iterator it = taxInfo.begin(); it != taxInfo.end();it++){
double suffixSum(0);
int taxId = (*it).first;
if((*it).second.taxLevel_ == level ){
//get the tax count
for(unsigned int bwtPos(0); bwtPos < (*it).second.wordCounts_.size(); bwtPos++){
if((*it).second.wordLengths_[bwtPos] >= wordSize[0]){
suffixSum += (*it).second.wordCounts_[bwtPos];
testSuffixCount += (*it).second.wordCounts_[bwtPos];
}
}
// cerr <<suffixSum <<endl;
//get the children counts (which should already be processed
for(TAXMAP::iterator its =taxInfo.begin(); its != taxInfo.end(); its ++){
if((*its).second.parentId_ != 0 && (*its).second.parentId_ == taxId){
suffixSum += taxIdToCount[(*its).first];
// cerr << "got parent " <<taxId << " : " << (*its).first<<endl;
}
}
taxIdToCount[taxId] = suffixSum;
// if(taxId == 37734 || taxId == 1350)
if(level ==0)
cerr << taxId << "\t" << suffixSum <<endl;
double countDividedByReadNumber =suffixSum / classWordSum;
levelSum += countDividedByReadNumber;
taxInfo[taxId].normalisedCount_ = countDividedByReadNumber;
cout << "TAX\t" <<taxId << "\t" << suffixSum <<endl;
}
}
}
cerr << testSuffixCount <<" " <<endl;*/
}
void countWordTest( TAXMAP &taxInfo, vector<int> taxIDsInTest, vector<double> testExpections )
{
map<int, double> expectedCountsPerId;
for ( unsigned int i ( 0 ); i < taxIDsInTest.size(); ++i )
expectedCountsPerId[taxIDsInTest[i]] = testExpections[i];
//get all possible taxIds and their expected Counts
for ( int level( taxLevelSize - 1 ); level >= 0; --level )
{
for ( TAXMAP::iterator it = taxInfo.begin(); it != taxInfo.end(); ++it )
{
if ( it->second.taxLevel_ == level )
{
double expCount = ( expectedCountsPerId.find( it->first ) != expectedCountsPerId.end() )
? expectedCountsPerId[it->first]
: 0 ;
bool inTest ( false );
for ( TAXMAP::iterator child = taxInfo.begin(); child != taxInfo.end(); ++child )
{
if ( ( ( int )( *child ).second.parentId_ == it->first )
&& ( expectedCountsPerId.find( ( *child ).first ) != expectedCountsPerId.end() ) )
{
expCount += expectedCountsPerId[( *child ).first];
inTest = true;
}
}
if ( inTest )
{
expectedCountsPerId[it->first] = expCount;
}
}
}
}
cout << "expectedCounts " << expectedCountsPerId.size() << endl;
// map<int, double>::iterator its;
// for(its = expectedCountsPerId.begin(); its != expectedCountsPerId.end(); its++)
// cout << (*its).first <<"\t" << (*its).second <<endl;
for ( int level ( taxLevelSize - 1 ); level >= 0; --level )
{
double wronglyClassified( 0 );
double rightlyClassified( 0 );
double allClassified( 0 );
double overestimated( 0 );
double underestimated( 0 );
double plainWrong( 0 );
for ( TAXMAP::iterator it = taxInfo.begin(); it != taxInfo.end(); ++it )
{
if ( it->second.taxLevel_ == level )
{
//if the id was in the testDataset
if ( expectedCountsPerId.find( it->first ) != expectedCountsPerId.end() )
{
//if more was found than expected, take the overestimation as wrong, the rest as right
if ( expectedCountsPerId[it->first] <= it->second.normalisedCount_ )
{
rightlyClassified += expectedCountsPerId[it->first];
wronglyClassified += it->second.normalisedCount_ - expectedCountsPerId[it->first];
overestimated += it->second.normalisedCount_ - expectedCountsPerId[it->first];
allClassified += it->second.normalisedCount_;
// cout << (*it).first << " exp smaller " << rightlyClassified << " " << wronglyClassified << endl;
}
//if there was less found than expected, take all that was found as correct and the rest of what was not found as wrong
else
{
rightlyClassified += it->second.normalisedCount_;
wronglyClassified += expectedCountsPerId[it->first] - it->second.normalisedCount_;
underestimated += expectedCountsPerId[it->first] - it->second.normalisedCount_;
allClassified += it->second.normalisedCount_;
//cout << (*it).first << " exp smaller " << rightlyClassified << " " << wronglyClassified << endl;
}
}
//take all which was not expected as wrong
else
{
plainWrong += it->second.normalisedCount_;
wronglyClassified += it->second.normalisedCount_;
allClassified += it->second.normalisedCount_;
// cout << (*it).first << " exp not there " << rightlyClassified << " " << wronglyClassified << endl;
}
}
}
cout << "Level " << level << endl << "wrong " << wronglyClassified << endl << "right " << rightlyClassified << endl << "all " << allClassified << endl;
cout << "over " << overestimated << endl << "under " << underestimated << endl << " wrong " << plainWrong << endl;
}
}
int whichPile( char c )
{
// TODO: use alphabet.hh
switch ( c )
{
case 'A':
return 1;
case 'C':
return 2;
case 'G':
return 3;
case 'N':
return 4;
case 'T':
return 5;
default:
return 6 ;
}
}
// To repair the referenceTEstOutput
vector<bool> intervalInSameTaxa( vector<unsigned int> &sharedTaxIds, vector<unsigned short> &fileNumbers )
{
//first get the matching fileNumbers out of the file with the file numbers corresponding to bwt positions of the mergin
vector<bool> taxSame;
taxSame.resize( taxLevelSize );
//look if the files have at each point of the taxonomic tree different tax level or if they are the same
bool sameTaxa( false );
cout << "intervalInSame " << fileNumbers.size() << endl;
cout << fileNumToTaxIds.size() << endl;
for ( unsigned int i( 0 ) ; i < taxLevelSize; ++i )
{
for ( unsigned int j( 0 ); j < fileNumbers.size() - 1; ++j )
{
cout << "Number " << fileNumbers[j] << endl;
if ( fileNumToTaxIds[fileNumbers[j]][i] == fileNumToTaxIds[fileNumbers[j + 1]][i]
&& fileNumToTaxIds[fileNumbers[j]][i] != 0
&& fileNumToTaxIds[fileNumbers[j + 1]][i] != 0 )
{
cout << "true" << endl;
sameTaxa = true;
}
else
{
//if one of the taxa is different than the one before it is enough to set all to false
sameTaxa = false;
break;
}
}
taxSame[i] = sameTaxa;
if ( taxSame[i] )
sharedTaxIds[i] = fileNumToTaxIds[fileNumbers[0]][i];
else
sharedTaxIds[i] = 0;
}
return taxSame;
}
void loadFileNumToTaxIds( string taxIdNames )
{
ifstream taxas( taxIdNames.c_str(), ios::in );
string line;
while ( taxas.good() )
{
vector<int> taxIDs;
getline( taxas, line );
if ( line.compare( "" ) != 0 )
{
unsigned int fileNum = atoi( line.substr( 0, line.find( " " ) ).c_str() );
line = line.substr( line.find( " " ) + 1, line.length() );
while ( line.find( " " ) != string::npos )
{
taxIDs.push_back( atoi( line.substr( 0, line.find( " " ) ).c_str() ) );
line = line.substr( line.find( " " ) + 1, line.length() );
}
taxIDs.push_back( atoi( line.c_str() ) );
if ( taxIDs.size() < taxLevelSize )
{
cerr << "Tax Ids have not enough taxonomic Information. Only " << taxIDs.size() << " could be found " << endl
<< "Will add unknown taxa until size is right" << endl;
for ( unsigned int i( taxIDs.size() - 1 ) ; i < taxLevelSize; ++i )
taxIDs.push_back( 0 );
}
if ( taxIDs.size() > taxLevelSize )
cerr << "Tax Ids have to much taxonomic information. "
<< "Please note, that the taxonomic information about one file should be shown as: " << endl
<< "FileNumber Superkingdom Phylum Order Family Genus Species" << endl;
fileNumToTaxIds.push_back( taxIDs );
unsigned int test = fileNum + 1;
if ( test != fileNumToTaxIds.size() )
cout << "Wrong filenumber " << fileNum << " " << fileNumToTaxIds.size() << endl;
}
}
cout << " fineNumToTaxIds " << fileNumToTaxIds.size() << endl;
}
void parseForCoverage( string countWordOutput, vector<FILE *> mergeAOutput, FILEMAP &fileInfo )
{
ifstream wordCount( countWordOutput.c_str(), ios::in );
string line;
int count( 0 );
while ( wordCount.good() )
{
count++;
if ( ( count % 10000 ) == 0 )
{
cerr << count << " " << fileInfo.size() << endl;
}
// int fileNumber;
// FileInformation info;
getline( wordCount, line );
// cout << line<<endl;
vector<string> splitLine = splitString( line, " " );
if ( splitLine[0].compare( "MTAXA" ) == 0 )
{
//MTAXA 2 91061 GGCTGCCAACTAA 1197996044 0:0:2:3:0:5 0:0:0:0:0:7 1121:1123:816:77:75:1460:1462:
uint64_t BWTPosition = atol( splitLine[4].c_str() );
vector<string> fileNumbersStrings = ( splitLine[7].compare( "" ) == 0 ) ? splitString( splitLine[8], ":" ) : splitString( splitLine[7], ":" );
vector<int> fileNumbers;
for ( unsigned int i( 0 ); i < fileNumbersStrings.size() - 1 ; ++i )
{
fileNumbers.push_back( atoi( fileNumbersStrings[i].c_str() ) );
//cerr << "string " << fileNumbersStrings[i] << " int " << atoi(fileNumbersStrings[i].c_str()) <<endl;
}
unsigned int fileCounts = fileNumbers.size();
fseek( mergeAOutput[whichPile( splitLine[3][0] )], ( BWTPosition * sizeof( unsigned ) ) , SEEK_SET );
unsigned *suffStarts = ( unsigned * ) malloc ( ( fileCounts ) * ( sizeof( unsigned ) ) );
// cout <<"got unsigned " <<endl;
//read the start of the words out of the mergeA-output
assert( fread( suffStarts, sizeof( unsigned ), fileCounts, mergeAOutput[whichPile( splitLine[3][0] )] ) == fileCounts );
// cout << "Read suff worked " <<endl;
int readsSuffCount( 0 );
vector<string> countsA = splitString( splitLine[5], ":" );
for ( unsigned int i( 0 ); i < countsA.size(); ++i )
readsSuffCount += atoi( countsA[i].c_str() );
for ( unsigned int i( 0 ); i < fileCounts; ++i )
{
//if(fileNumbers[i] != fileNum[i] )
int fileNumber = fileNumbers[i];
//cerr<<"fileNumber " << fileNumber << " " << splitLine[7] << endl;
fileInfo[fileNumber].suffixPos_.push_back( suffStarts[i] );
fileInfo[fileNumber].suffixLengths_.push_back( splitLine[3].length() );
fileInfo[fileNumber].suffixCounts_.push_back( readsSuffCount );
fileInfo[fileNumber].suffixChar_.push_back( splitLine[3][0] );
fileInfo[fileNumber].bwtPositions_.push_back( BWTPosition );
//if(fileNumber != fileNum[i])
// cerr <<"There is something wrong with the fileNumber" <<endl;
}
// cout << count <<endl;
free( suffStarts );
// delete fileNum;
}
}
//get only the single suffix information
for ( FILEMAP::iterator it = fileInfo.begin(); it != fileInfo.end() ; ++it )
{
vector<unsigned> uniqueSuffixPos;
vector< int > uniqueSuffixCounts;
vector<unsigned short > uniqueSuffixLengths;
vector<char> uniqueSuffixChar;
vector<uint64_t > uniqueBWTs;
if ( it->second.suffixPos_.size() > 1 )
for ( unsigned int i( 0 ); i < it->second.suffixPos_.size() ; ++i )
{
bool foundSuff ( false );
int suffPosition( 0 );
for ( unsigned int j( 0 ); j < uniqueSuffixPos.size() ; ++j )
{
if ( uniqueSuffixPos[j] == it->second.suffixPos_[i] )
{
foundSuff = true;
suffPosition = j;
break;
}
}
if ( !foundSuff )
{
uniqueSuffixPos.push_back( it->second.suffixPos_[i] );
uniqueSuffixCounts.push_back( it->second.suffixCounts_[i] );
uniqueSuffixLengths.push_back( it->second.suffixLengths_[i] );
uniqueSuffixChar.push_back( it->second.suffixChar_[i] );
uniqueBWTs.push_back( it->second.bwtPositions_[i] );
}
else if ( uniqueSuffixLengths[suffPosition] < it->second.suffixLengths_[i] )
{
uniqueSuffixLengths[suffPosition] = it->second.suffixLengths_[i];
uniqueSuffixCounts[suffPosition] = it->second.suffixCounts_[i];
}
}
/* if((*it).second.suffixPos_.size() >1){
cerr << "upos " << uniqueSuffixPos.size() <<endl;
cerr << "ulen " << uniqueSuffixLengths.size() <<endl;
cerr << "ucha " << uniqueSuffixChar.size() <<endl;
cerr << "ucou " << uniqueSuffixCounts.size() <<endl;
}*/
it->second.suffixPos_ = uniqueSuffixPos;
it->second.suffixCounts_ = uniqueSuffixCounts;
it->second.suffixLengths_ = uniqueSuffixLengths;
it->second.suffixChar_ = uniqueSuffixChar;
it->second.bwtPositions_ = uniqueBWTs;
}
//Get the sequence length of the files
// short fileNumberRead;
// unsigned sequenceLength;
//set the sequence lengths of each file
for ( FILEMAP::iterator it = fileInfo.begin(); it != fileInfo.end() ; ++it )
{
if ( it->second.suffixPos_.size() > 1 )
{
stringstream ss;
cerr << "print to " << it->first << endl;
ss << "F_" << it->first;
ofstream output( ss.str().c_str(), ios::out );
cerr << "suffixCount " << it->second.suffixPos_.size() << endl;
for ( unsigned int i( 0 ); i < it->second.suffixPos_.size(); ++i )
{
output << it->second.suffixPos_[i] << " ";
output << it->second.suffixLengths_[i] << " " ;
output << it->second.suffixCounts_[i] << " ";
output << it->second.suffixChar_[i] << " ";
output << it->second.bwtPositions_[i] << endl;
}
output.close();
}
}
}
/*
vector<SequenceInformation> loadSequenceInfo(vector<string> sequenceFiles, vector<string> mergingOutput){
FILE* mergeZero;
vector<SequenceInformation> seqInfo;
SequenceInformation oneSeq;
seqInfo.resize(sequenceFiles.size());
cout << "sequence File count "<< sequenceFiles.size() << endl;
vector<unsigned short> posVector;
//just to test the amout of input files
for(int i(0); i< mergingOutput.size(); i++) {
FILE* merg = fopen(mergingOutput[i].c_str(), "r");
if(getFileSize(merg) % sizeof(unsigned short) != 0){
cerr << "MergfileC " << mergingOutput[i] << " has the wrong size" << endl;
}
if(mergingOutput[i].find("-C00")!=string::npos){
// cout << "found it " << mergingOutput[i] << endl;
mergeZero = fopen(mergingOutput[i].c_str(),"r");
}
}
assert(mergeZero!=NULL);
unsigned short fNum ;
long size = getFileSize(mergeZero);
while(fread(&fNum, sizeof(unsigned short),1,mergeZero) == 1){
posVector.push_back(fNum);
}
cout << "positionSize " << posVector.size() << " seq length " << sequenceFiles. size() << endl;
assert(posVector.size() == sequenceFiles.size());
fclose(mergeZero);
for(int i(0); i< sequenceFiles.size(); i++){
unsigned int sequencelength = 0;
string fastaTag;
ifstream fasFile(sequenceFiles[i].c_str(), ios::in);
string line;
while(fasFile.good()){
getline(fasFile,line);
if(line[0]=='>')
oneSeq.fastaTag_ = line.substr(1,line.length()-1);
else
sequencelength += line.length();
}
oneSeq.fileNum_ = (unsigned short) i;
int found;
found = sequenceFiles[i].find_last_of("/\\");
oneSeq.fileName_ = sequenceFiles[i].substr(found+1);
oneSeq.sequenceLength_ = sequencelength;
seqInfo[i] = oneSe+
*/
int printUsage( string message )
{
cerr << endl << message << endl << endl;
cerr << "Usage: " << endl
<< "parse -f -b file -t file -n file -z mergedAZeroFile -w <int>" << endl
<< "parse -p -b file -t file -n file -w <int>" << endl
<< "parse -c -b file -m <file> " << endl
<< endl;
cerr << "-f\t Fast and simple version, just counts the occurences of the single taxIds and read counts." << endl
<< "-p \t -b should contain the bwt info which was already parsed once " << endl
<< "-c get the coverage information for each file" << endl
<< "-t taxonomic input which was used for the countWord -m call." << endl
<< "-z A-00 file which is a result of the merging of the genomes. Can be found in the same directory as the B-00 file" << endl
<< "-m A-0* files, there is for each BWT-position saved where the corresponding suffix occurs" << endl
<< "-n ncbiNames dmp file " << endl
<< "-w minimal word counts. Gives the results for everything bigger than the chosen words out." << endl;
return 1;
}
<|start_filename|>src/errors/ErrorCorrectionRange.cpp<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#include "ErrorCorrectionRange.hh"
using namespace std;
bool ErrorCorrectionRange::writeTo( TemporaryFile *pFile, RangeState ¤tState ) const
{
Range::writeTo( pFile, currentState );
writeCompressedNum( pFile, static_cast<LetterNumber>( data_.errorIntervalType ) );
writeCompressedNum( pFile, data_.correctionForBwtPosns.size() );
for ( uint i = 0; i < data_.correctionForBwtPosns.size(); i++ )
writeCompressedNum( pFile, data_.correctionForBwtPosns[i] );
writeCompressedNum( pFile, data_.errorsForBwtPosns.size() );
for ( uint i = 0; i < data_.errorsForBwtPosns.size(); i++ )
writeCompressedNum( pFile, data_.errorsForBwtPosns[i] );
return true;
}
bool ErrorCorrectionRange::readFrom( TemporaryFile *pFile, RangeState ¤tState )
{
if ( Range::readFrom( pFile, currentState ) == false )
return false;
LetterNumber intervalTypeNo;
readCompressedNum( pFile, intervalTypeNo );
data_.errorIntervalType = static_cast<IntervalType>( intervalTypeNo );
assert( data_.correctionForBwtPosns.empty() );
LetterNumber numBwtPosns;
readCompressedNum( pFile, numBwtPosns );
for ( uint i = 0; i < numBwtPosns; i++ )
{
LetterNumber newBwtPos;
readCompressedNum( pFile, newBwtPos );
data_.correctionForBwtPosns.push_back( newBwtPos );
}
assert( data_.errorsForBwtPosns.empty() );
LetterNumber numErrBwtPosns;
readCompressedNum( pFile, numErrBwtPosns );
for ( uint i = 0; i < numErrBwtPosns; i++ )
{
LetterNumber newErrBwtPos;
readCompressedNum( pFile, newErrBwtPos );
data_.errorsForBwtPosns.push_back( newErrBwtPos );
}
return true;
}
<|start_filename|>src/frontends/BeetlCorrect.cpp<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#include "BeetlCorrect.hh"
#include "BCRext.hh"
#include "BCRexternalBWT.hh"
#include "Common.hh"
#include "DatasetMetadata.hh"
#include "parameters/BwtParameters.hh"
#include "config.h"
#include "libzoo/cli/Common.hh"
#include "libzoo/util/Logger.hh"
#include "errors/WitnessReader.hh"
#include "errors/HiTECStats.hh"
#include "errors/BwtCorrectorParameters.hh"
#include "errors/BwtCorrector.hh"
#include <algorithm>
#include <cassert>
#include <cstring>
#include <fstream>
#include <iostream>
#include <sstream>
#include <stdexcept>
#include <string>
#include <vector>
using namespace std;
using namespace BeetlBwtParameters;
int main( const int argc, const char **argv )
{
cout << ",-----. ,------.,------.,--------.,--. ,-----. ,-----. ,------. ,------. ,------. ,-----.,--------." << endl;
cout << "| |) /_ | .---'| .---''--. .--'| | ' .--./' .-. '| .--. '| .--. '| .---'' .--./'--. .--' " << endl;
cout << "| .-. \\| `--, | `--, | | | | | | | | | || '--'.'| '--'.'| `--, | | | | " << endl;
cout << "| '--' /| `---.| `---. | | | '--. ' '--'\\' '-' '| |\\ \\ | |\\ \\ | `---.' '--'\\ | | " << endl;
cout << "`------' `------'`------' `--' `-----' `-----' `-----' `--' '--'`--' '--'`------' `-----' `--' " << endl;
cout << endl;
BwtCorrectorParameters params;
if ( !params.parseArgv( argc, argv ) || params["help"] == 1 || !params.chechRequiredParameters() )
{
params.printUsage();
exit( params["help"] == 0 );
}
// Use default parameter values where needed
params.commitDefaultValues();
string indexPrefix = params.getStringValue( "input filename" );
int readLength = params.getValue( "read length" );
bool compressed;
vector<string> pileNames;
string dummyStr;
BwtReaderBase *dollarPile;
detectInputBwtProperties( indexPrefix, pileNames, compressed, dummyStr );
dollarPile = instantiateBwtPileReader( pileNames[0], params.getStringValue( "use shm" ) );
int numReads = 0;
LetterCount lc;
dollarPile->readAndCount( lc );
for ( int i = 0; i < alphabetSize; i++ )
numReads += lc.count_[i];
//must divide number of reads in half, as it (should) include reverse complements...
numReads /= 2;
delete dollarPile;
double errorRate = ( double )params.getValue( "error rate" ) / ( double )1000000;
int genomeLength = params.getValue( "genome length" );
int minWitnessLength;
if ( params["min witness length"].isSet() )
minWitnessLength = params.getValue( "min witness length" );
else
{
HiTECStats stats(
errorRate,
genomeLength,
numReads,
readLength
);
minWitnessLength = stats.Calculate_wm() - 1;
}
int minSupport = 0;
if ( params["min support"].isSet() )
minSupport = params.getValue( "min support" );
BwtCorrector *corrector = new BwtCorrector(
indexPrefix,
params.getStringValue( "corrections output filename" ),
numReads,
readLength,
errorRate,
genomeLength,
minWitnessLength,
params.getStringValue( "subset" ),
¶ms,
minSupport
);
corrector->showExecutionPlan();
if ( !params["don't run"].isSet() )
corrector->run();
delete corrector;
return 0;
}
<|start_filename|>src/frontends/BeetlCorrect.hh<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#ifndef BEETL_CORRECT_HH
#define BEETL_CORRECT_HH
#endif //ifndef BEETL_CORRECT_HH
<|start_filename|>src/libzoo/util/ColorText.hh<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#ifndef COLOR_TEXT_HH
#define COLOR_TEXT_HH
#include <iostream>
#include <string>
using std::string;
class ColorText
{
public:
static string startRed;
static string endRed;
static void init( int activateColor = -1 ); // -1 = auto: on for tty supporting colors, off for files
};
#endif // COLOR_TEXT_HH
<|start_filename|>src/BCRext/BCRext.cpp<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#include "BCRext.hh"
#include "BwtReader.hh"
#include "BwtWriter.hh"
#include "LetterCount.hh"
#include "ReadBuffer.hh"
#include "SeqReader.hh"
#include "Config.hh"
#include "Filename.hh"
#include "Timer.hh"
#include "Tools.hh"
#include "Types.hh"
#include "config.h"
#include "libzoo/util/Logger.hh"
#include <cassert>
#include <cstdlib>
#include <cstring>
#include <fcntl.h>
#include <fstream>
#include <iostream>
#include <sstream>
#include <string>
#include <unistd.h>
#include <vector>
#ifdef HAVE_POSIX_FADVISE
# include <fcntl.h>
#endif
#define BCREXT_ID "@(#) $Id: BCRext.cpp,v 1.8 2011/12/20 14:33:24 tjakobi Exp $"
using namespace std;
//typedef ReadBufferASCII ReadBuffer;
#ifdef USE_4_BITS_PER_BASE
typedef ReadBuffer4Bits ReadBuffer;
#else
#ifdef USE_PREFIX_ONLY
typedef ReadBuffer4Bits ReadBuffer;
#else
typedef ReadBufferASCII ReadBuffer;
#endif
#endif
// added by Tobias, interface to new Beetl executable
BCRext::BCRext( bool huffman, bool runlength,
bool ascii, bool implicitSort,
bool seqFile, const string &inFile, const string &prefix ) :
// set tool flags
useHuffmanEncoder_( huffman ),
useRunlengthEncoder_( runlength ),
useAsciiEncoder_( ascii ),
useImplicitSort_( implicitSort ),
useSeqFile_ ( seqFile ),
inFile_( inFile ),
prefix_( prefix )
{
// Notes
if ( implicitSort && ( huffman || runlength ) )
{
cout << "-> Note: -sap mode needs ASCII intermediate files," << endl
<< "-> will revert to requested compression type for final output" << endl;
}
}
// called from beetl class, replaces main method
// args have been set through constructor before
void BCRext::run( void )
{
Timer timer;
//string prefix = (string)"[" + (string)args[0] + (string)"]: ";
string prefix = ( string )"[" + ( string )"BCRext" + ( string )"]: ";
cerr << prefix << "time now is " << timer.timeNow();
// Tony 13.6.12 - BCREXT_ID is not informative now we are in git world
// cerr << prefix << "software version is " << BCREXT_ID << endl;
const string fileStem( "tmp1" );
const string fileStemTemp( "tmp2" );
string tmpIn = fileStem;
string tmpOut = fileStemTemp;
string tmpSwap;
string fileName;
// output streams for sequences - 1 per pile
vector <FILE *> outSeq( alphabetSize );
// output streams for positions of suffixes in pile - 1 per pile
vector <FILE *> outPtr( alphabetSize );
// output stream for updated BWTs - 1 per pile
vector <BwtWriterBase *> outBwt( alphabetSize );
// output streams for original read numberings - 1 per pile
vector <FILE *> outNum( alphabetSize );
// input streams for BWT from previous iter - 1 per pile
// this one used to compute the counts to do backward search
vector <BwtReaderBase *> inBwt( alphabetSize );
// input streams for BWT from previous iter - 1 per pile
// this one used to read the BWT chunk by chunk to allow new
// chars to be interspersed
vector <BwtReaderBase *> inBwt2( alphabetSize );
// FILE* inPtr;
// FILE* inNum;
BwtWriterBase *outDollarBwt;
vector<char> bwtBuf;
// extra byte accounts for a fact that inserted BWT characters
// are appended to the buffer after the interspersed characters so as to
// make a single write to file
bwtBuf.resize( bwtBufferSize + 1 );
if ( whichPile[( int ) '$'] != 0 ||
whichPile[( int ) 'A'] != 1 ||
whichPile[( int ) 'C'] != 2 ||
whichPile[( int ) 'G'] != 3 ||
whichPile[( int ) alphabet[4]] != 4 ||
whichPile[( int ) alphabet[5]] != 5
#ifdef USE_EXTRA_CHARACTER_Z
|| whichPile[( int ) notInAlphabet] != 6
#endif
)
{
cerr << "Something seems to be wrong with the alphabet table!" << endl;
exit( EXIT_FAILURE );
}
Logger_if( LOG_FOR_DEBUGGING ) Logger::out() << prefix << "Using alphabet = " << alphabet << ", size = " << alphabetSize << endl;
// cerr << BUFSIZ << endl
if ( alphabetSize >= 10 )
{
cerr << "Alphabet sizes larger than 9 are not supported yet." << endl;
exit( EXIT_FAILURE );
}
SequenceNumber seqNum( 0 );
const LetterNumber sameAsPrevFlag( ( ( LetterNumber )1 ) << ( ( 8 * sizeof( LetterNumber ) ) - 1 ) );
const LetterNumber sameAsPrevMask( ~sameAsPrevFlag );
// cout << sameAsPrevFlag << " "<< sameAsPrevMask << " "<< (sameAsPrevMask&sameAsPrevFlag) << " " << (((LetterNumber)1)<<63) << endl;
LetterNumber seqPtr;
int thisPile, lastPile;
LetterNumber posInPile;
// char inChar;
LetterNumber charsToGrab;// charsLeft, charsThisBatch;
Logger_if( LOG_SHOW_IF_VERBOSE ) Logger::out() << prefix << "Will read sequences from file " << inFile_ << endl;
// read first sequence to determine read size
SeqReaderFile *seqReader( SeqReaderFile::getReader( fopen( inFile_.c_str(), "rb" ) ) );
const char *seqBuf = seqReader->thisSeq();
const int seqSize( strlen( seqBuf ) - 1 ); // -1 compensates for \n at end
Logger_if( LOG_SHOW_IF_VERBOSE ) Logger::out() << prefix << "Assuming all sequences are of length " << seqSize << endl;
// inFile.seekg(0,ios::beg);
// rewind(inSeq);
if ( ( seqSize % 2 ) == 1 ) // if odd
{
// cout << "ODD" << endl;
tmpIn = fileStem;
tmpOut = fileStemTemp;
} // ~if
else
{
// cout << "EVEN" << endl;
tmpIn = fileStemTemp;
tmpOut = fileStem;
} // ~else
fileName = TmpFilename( fileStem, "-B0", 0 ).str();
readWriteCheck( fileName.c_str(), true );
//outDollarBwt = fopen( fileName.c_str(), "w" );
if ( useImplicitSort_ || useAsciiEncoder_ )
outDollarBwt = new BwtWriterASCII( fileName.c_str() );
#ifdef ACTIVATE_HUFFMAN
else if ( useHuffmanEncoder_ )
outDollarBwt = new BwtWriterHuffman( fileName.c_str() );
#endif
else if ( useRunlengthEncoder_ )
outDollarBwt = new BwtWriterRunLengthV3( fileName.c_str() );
else
assert( false );
for ( int j( 1 ); j < alphabetSize; j++ )
{
fileName = TmpFilename( tmpIn, "-S0", j ).str();
readWriteCheck( fileName.c_str(), true );
outSeq[j] = fopen( fileName.c_str(), "w" );
fileName = TmpFilename( tmpIn, "-P0", j ).str();
readWriteCheck( fileName.c_str(), true );
outPtr[j] = fopen( fileName.c_str(), "w" );
#ifdef TRACK_SEQUENCE_NUMBER
fileName = TmpFilename( tmpIn, "-N0", j ).str();
readWriteCheck( fileName.c_str(), true );
outNum[j] = fopen( fileName.c_str(), "w" );
#endif
fileName = TmpFilename( tmpIn, "-B0", j ).str();
if ( useImplicitSort_ || useAsciiEncoder_ )
outBwt[j] = new BwtWriterASCII( fileName.c_str() );
#ifdef ACTIVATE_HUFFMAN
else if ( useHuffmanEncoder_ )
outBwt[j] = new BwtWriterHuffman( fileName.c_str() );
#endif
else if ( useRunlengthEncoder_ )
outBwt[j] = new BwtWriterRunLengthV3( fileName.c_str() );
else
assert( false );
} // ~for
LetterCount dollars;
// vector<LetterCount> alreadyInPile(alphabetSize);
// vector<LetterCount> countedThisIter(alphabetSize);
LetterCountEachPile alreadyInPile;
LetterCountEachPile countedThisIter;
LetterCountEachPile newCharsThisIter;
// TBD Rationalize count names wrt first and subsequent iterations
LetterCount addedSoFar;
LetterCount outputSoFar;
LetterCount prevCharsOutputThisIter;
LetterCount newCharsAddedThisIter;
// LetterCount readSoFar[alphabetSize];
// First iteration
// - move original sequence into piles based on last character
// - work out BWT corresponding to 0-suffixes and 1-suffixes
// TBD check for invalid chars, do qual masking
ReadBuffer readBuffer( seqSize, -1, -1, -1 );
if ( readBuffer.blockSize_ <= seqSize + 1 )
{
cerr << "ReadBuffer blocksize is too small (" << readBuffer.blockSize_ << "). Aborting." << endl;
exit( EXIT_FAILURE );
}
// copy first sequence over
strcpy( readBuffer.seqBufBase_, seqBuf );
do
{
thisPile = whichPile[( int )readBuffer.seqBufBase_[seqSize - 1]];
// zero is terminator so should not be present
if ( thisPile < 0 )
{
cerr << "Pile must not be < 0. Aborting. At char |" << readBuffer.seqBufBase_[seqSize - 1] << "|" << endl;
exit( EXIT_FAILURE );
}
if ( thisPile > alphabetSize )
{
cerr << "Pile must not be > alphabet size. Aborting." << endl;
exit( EXIT_FAILURE );
}
#ifdef DEBUG
cout << readBuffer.seqBufBase_ << endl;
#endif
// count characters and output first N chars of BWT
// (those preceding terminator characters)
dollars.count_[thisPile]++;
/*
if ( fwrite( readBuffer.seqBufBase_ + seqSize - 1, sizeof ( char ), 1, outDollarBwt ) != 1 )
{
cerr << "Could not write to Dollar Pile. Aborting." << endl;
exit( EXIT_FAILURE );
}
*/
( *outDollarBwt )( readBuffer.seqBufBase_ + seqSize - 1, 1 );
// fprintf( outSeq[thisPile], "%s", readBuffer.seqBufBase_);
readBuffer.convertFromASCII();
readBuffer.sendTo( outSeq[thisPile] );
#ifdef TRACK_SEQUENCE_NUMBER
assert( fwrite( &seqNum, sizeof( SequenceNumber ),
1, outNum[thisPile] ) == 1 );
// seqNum++;
#endif
// create BWT corresponding to 1-suffixes
if ( whichPile[( int )readBuffer.seqBufBase_[seqSize - 2]] < 0 ||
whichPile[( int )readBuffer.seqBufBase_[seqSize - 2]] > alphabetSize )
{
cerr << "Trying to write non alphabet character to pile. Aborting." << endl;
exit( EXIT_FAILURE );
}
countedThisIter[thisPile].count_[whichPile[( int )readBuffer.seqBufBase_[seqSize - 2]]]++;
// assert(fwrite( readBuffer.seqBufBase_+seqSize-2, sizeof(char), 1, outBwt[thisPile] )==1);
seqPtr = *( addedSoFar.count_ + thisPile );
if ( useImplicitSort_ && ( addedSoFar.count_[thisPile] != 0 ) )
{
// cout << thisPile << " " << addedSoFar.count_[thisPile] << " 1\n";
seqPtr |= sameAsPrevFlag; // TBD replace if clause with sum
// *(readBuffer.seqBufBase_+seqSize-2)+=32;//tolower(*(readBuffer.seqBufBase_+seqSize-2));
*( readBuffer.seqBufBase_ + seqSize - 2 ) = tolower( *( readBuffer.seqBufBase_ + seqSize - 2 ) );
}
( *outBwt[thisPile] )( readBuffer.seqBufBase_ + seqSize - 2, 1 );
if ( fwrite( &seqPtr, sizeof( LetterNumber ),
1, outPtr[thisPile] ) != 1 )
{
cerr << "Could not write to pointer pile. Aborting." << endl;
exit( EXIT_FAILURE );
}
addedSoFar.count_[thisPile]++;
seqNum++;
seqReader->readNext( readBuffer.seqBufBase_ );
} // ~while
while ( !seqReader->allRead() );
delete outDollarBwt;
Logger_if( LOG_SHOW_IF_VERBOSE ) Logger::out() << prefix << "Read " << seqNum << " sequences" << endl;
for ( int i( 1 ); i < alphabetSize; i++ )
{
fclose( outSeq[i] );
fclose( outPtr[i] );
#ifdef TRACK_SEQUENCE_NUMBER
fclose( outNum[i] );
#endif
delete outBwt[i];
// fclose(outBwt[i]);
}
// return (0);
LetterCount lastSAPInterval;
LetterNumber thisSAPInterval;
// ReadBuffer buffer(seqSize);
// Main loop
for ( int i( 2 ); i <= seqSize; i++ )
{
thisSAPInterval = 0;
lastSAPInterval.clear();
cout << "Starting iteration " << i << ", time now: " << timer.timeNow();
cout << "Starting iteration " << i << ", usage: " << timer << endl;
// don't do j=0 - this is the $ sign which is done already
for ( int j( 1 ); j < alphabetSize; j++ )
{
// prep the output files
fileName = TmpFilename( tmpOut, "-S0", j ).str();
readWriteCheck( fileName.c_str(), true );
outSeq[j] = fopen( fileName.c_str(), "w" );
fileName = TmpFilename( tmpOut, "-P0", j ).str();
readWriteCheck( fileName.c_str(), true );
outPtr[j] = fopen( fileName.c_str(), "w" );
#ifdef TRACK_SEQUENCE_NUMBER
fileName = TmpFilename( tmpOut, "-N0", j ).str();
readWriteCheck( fileName.c_str(), true );
outNum[j] = fopen( fileName.c_str(), "w" );
#endif
fileName = TmpFilename( tmpOut, "-B0", j ).str();
if ( ( useImplicitSort_ && ( i != seqSize ) ) || useAsciiEncoder_ )
outBwt[j] = new BwtWriterASCII( fileName.c_str() );
#ifdef ACTIVATE_HUFFMAN
else if ( useHuffmanEncoder_ )
outBwt[j] = new BwtWriterHuffman( fileName.c_str() );
#endif
else if ( useRunlengthEncoder_ )
outBwt[j] = new BwtWriterRunLengthV3( fileName.c_str() );
else
assert( false );
if ( useImplicitSort_ && ( i == seqSize ) )
{
BwtWriterBase *p( new BwtWriterImplicit( outBwt[j] ) );
outBwt[j] = p; // ... and the deception is complete!!!
} // ~if
#ifdef DEBUG
cout << "Prepping output file " << tmpOut << endl;
#endif
setvbuf( outSeq[j], NULL, _IOFBF, 262144 );
// setvbuf( outPtr[j], NULL, _IOFBF, 65536);
// setvbuf( outNum[j], NULL, _IOFBF, 65536);
// setvbuf( outBwt[j], NULL, _IOFBF, 65536);
// prep the input files
fileName = TmpFilename( tmpIn, "-B0", j ).str();
// select the proper input module
if ( useImplicitSort_ || useAsciiEncoder_ )
{
inBwt[j] = new BwtReaderASCII( fileName.c_str() );
inBwt2[j] = new BwtReaderASCII( fileName.c_str() );
}
#ifdef ACTIVATE_HUFFMAN
else if ( useHuffmanEncoder_ )
{
inBwt[j] = new BwtReaderHuffman( fileName.c_str() );
inBwt2[j] = new BwtReaderHuffman( fileName.c_str() );
}
#endif
else if ( useRunlengthEncoder_ )
{
inBwt[j] = new BwtReaderRunLengthV3( fileName.c_str() );
inBwt2[j] = new BwtReaderRunLengthV3( fileName.c_str() );
}
else
assert( false );
#ifdef DEBUG
cout << "Prepping input file " << tmpIn << endl;
#endif
} // ~for j
addedSoFar.clear();
outputSoFar.clear();
prevCharsOutputThisIter.clear();
newCharsAddedThisIter.clear();
#ifdef DEBUG
cout << "already in pile" << endl;
alreadyInPile.print();
cout << "counted this iter" << endl;
countedThisIter.print();
#endif
countedThisIter.clear();
newCharsThisIter.clear();
#ifdef DEBUG
cout << "Count in dollars pile: ";
dollars.print();
#endif
// don't do j=0; $ sign done already
for ( int j( 1 ); j < alphabetSize; j++ )
{
int fdSeq, fdNum, fdPtr;
#ifndef TRACK_SEQUENCE_NUMBER
fdNum = 0;
#endif
// read each input file in turn
fileName = TmpFilename( tmpIn, "-S0", j ).str();
readWriteCheck( fileName.c_str(), false );
fdSeq = open( fileName.c_str(), O_RDONLY, 0 );
fileName = TmpFilename( tmpIn, "-P0", j ).str();
readWriteCheck( fileName.c_str(), false );
fdPtr = open( fileName.c_str(), O_RDONLY, 0 );
#ifdef TRACK_SEQUENCE_NUMBER
fileName = TmpFilename( tmpIn, "-N0", j ).str();
readWriteCheck( fileName.c_str(), false );
fdNum = open( fileName.c_str(), O_RDONLY, 0 );
#ifdef HAVE_POSIX_FADVISE
assert( posix_fadvise( fdNum, 0, 0, POSIX_FADV_SEQUENTIAL | POSIX_FADV_NOREUSE | POSIX_FADV_WILLNEED ) != -1 );
#endif
#endif
#ifdef HAVE_POSIX_FADVISE
assert( posix_fadvise( fdSeq, 0, 0, POSIX_FADV_SEQUENTIAL | POSIX_FADV_NOREUSE | POSIX_FADV_WILLNEED ) != -1 );
assert( posix_fadvise( fdPtr, 0, 0, POSIX_FADV_SEQUENTIAL | POSIX_FADV_NOREUSE | POSIX_FADV_WILLNEED ) != -1 );
#endif
#ifdef USE_PREFIX_ONLY
ReadBufferPrefix buffer( seqSize, i, fdSeq, fdNum, fdPtr );
#else
ReadBuffer buffer( seqSize, fdSeq, fdNum, fdPtr );
#endif
while ( buffer.getNext( seqNum, seqPtr ) )
{
bool thisSAPValue = ( ( seqPtr & sameAsPrevFlag ) != 0 );
if ( thisSAPValue )
{
seqPtr &= sameAsPrevMask;
}
else
{
thisSAPInterval++;
}
thisPile = buffer[seqSize - i];
//thisPile=whichPile[seqBuff[seqSize-i]];
if ( thisPile < 0 )
{
cerr << "Pile must not be < 0. Aborting." << endl;
exit( EXIT_FAILURE );
}
lastPile = buffer[seqSize - i + 1];
//lastPile=whichPile[seqBuff[seqSize-i+1]];
if ( lastPile < 0 )
{
cerr << "Pile must not be < 0. " << endl;
exit( EXIT_FAILURE );
}
#ifdef DEBUG
cout << ( ( thisSAPValue ) ? '1' : '0' ) << " " << thisSAPInterval << " " << seqPtr << " " << seqNum << " " << thisPile << " " << lastPile << endl;
cout << "Read in " << seqPtr << " " << seqNum << " " << thisPile << " " << lastPile << endl;
for ( int ZZ( 0 ); ZZ < seqSize; ZZ++ )
{
cout << "ZZ " << ZZ << endl;
cout << alphabet[buffer[ZZ]] << endl;
}
cout << endl;
#endif
// *** work out position in new pile ***
// sum contents of lexicographically smaller piles
// ... probably possible to speed this up by storing cumulative sums
#ifdef DEBUG
cout << "already in pile" << endl;
alreadyInPile.print();
#endif
// posInPile=0;
posInPile = dollars.count_[thisPile];
// cout << posInPile << " " << thisPile << " " << lastPile << endl;
for ( int k( 1 ); k < lastPile; k++ )
{
posInPile += alreadyInPile[k].count_[thisPile];
// cout << posInPile << endl;
} // ~for k
#ifdef DEBUG
cout << "posInPile starts at " << posInPile << endl;
cout << "counting in pile " << alphabet[lastPile] << endl;
#endif
// count all chars prior to seqPtr in lastPile
// read seqPtr too, but don't add to countedThisIter
charsToGrab = seqPtr - addedSoFar.count_[lastPile]; //+1;
#ifdef DEBUG
cout << "charsToGrab " << charsToGrab << endl;
#endif
// Should now always read at least 1 byte
if ( charsToGrab < 0 )
{
cerr << "Tried to grap < 0 chars. Aborting." << endl;
exit( EXIT_FAILURE );
}
LetterNumber readCountChars = inBwt[lastPile]->readAndCount
( countedThisIter[lastPile], charsToGrab );
if ( readCountChars != charsToGrab )
{
cerr << "BWT readAndCount returned only " << readCountChars
<< " chars. Expected " << charsToGrab
<< " chars. Aborting." << endl;
exit( EXIT_FAILURE );
}
inBwt[lastPile]->readAndCount( newCharsThisIter[lastPile], 1 );
addedSoFar.count_[lastPile] = seqPtr + 1;
posInPile += countedThisIter[lastPile].count_[thisPile];
#ifdef DEBUG
cout << "counted this iter" << endl;
countedThisIter.print();
#endif
// *** add char into new pile ***
// read and output bytes up to insertion point
charsToGrab = posInPile - prevCharsOutputThisIter.count_[thisPile];
LetterNumber readSendChars = inBwt2[thisPile]->readAndSend
( *outBwt[thisPile], charsToGrab );
if ( readSendChars != charsToGrab )
{
cerr << "BWT readAndSend returned only " << readSendChars
<< " chars. Expected " << charsToGrab
<< " chars. Aborting." << endl;
exit( EXIT_FAILURE );
}
// bwtBuf[0]=(seqSize-i-1>=0)?baseNames[buffer[seqSize-i-1]]:'$';
bwtBuf[0] = ( seqSize - i - 1 >= 0 ) ? alphabet[buffer[seqSize - i - 1]] : alphabet[0];
// if (thisSAPValue==true) bwtBuf[0]+=32;//=tolower(bwtBuf[0]);
prevCharsOutputThisIter.count_[thisPile] = posInPile;
// pointer into new pile must be offset by number of new entries added
// seqPtr+=newCharsAddedThisIter.count_[thisPile];
seqPtr = posInPile + newCharsAddedThisIter.count_[thisPile];
if ( useImplicitSort_ )
{
if ( lastSAPInterval.count_[thisPile] == thisSAPInterval )
{
bwtBuf[0] = tolower( bwtBuf[0] );
// bwtBuf[0]+=32;
seqPtr |= sameAsPrevFlag;
// cout << thisSAPInterval << endl;
}
else
{
// cout << thisSAPInterval << " " << lastSAPInterval.count_[thisPile] << endl;
lastSAPInterval.count_[thisPile] = thisSAPInterval;
}
}
( *outBwt[thisPile] )( bwtBuf.data(), 1 );
if ( fwrite( &seqPtr, sizeof ( LetterNumber ),
1, outPtr[thisPile] ) != 1 )
{
cerr << "BWT readAndSend returned only " << readSendChars
<< " chars. Expected " << charsToGrab
<< " chars. Aborting." << endl;
exit( EXIT_FAILURE );
}
#ifdef DEBUG
cout << "adding pointer " << seqPtr << " to pile "
<< alphabet[thisPile] << endl;
#endif
// then the offset itself is updated
newCharsAddedThisIter.count_[thisPile]++;
// do radix sort
// fprintf( outSeq[thisPile], "%s\n", seqBuff);
// assert(fwrite( seqBuff, sizeof(char),
// 1+seqSize, outSeq[thisPile] )==1+seqSize);
buffer.sendTo( outSeq[thisPile] );
#ifdef TRACK_SEQUENCE_NUMBER
assert( fwrite( &seqNum, sizeof( SequenceNumber ),
1, outNum[thisPile] ) == 1 );
#endif
} // ~while
close( fdSeq );
close( fdPtr );
#ifdef TRACK_SEQUENCE_NUMBER
close( fdNum );
#endif
} // ~for j
Logger_if( LOG_SHOW_IF_VERBOSE ) Logger::out() << "All new characters inserted, usage: " << timer << endl;
for ( int j( 1 ); j < alphabetSize; j++ )
{
while ( inBwt[j]->readAndCount
( countedThisIter[j], ReadBufferSize ) == ReadBufferSize );
} // ~for j
Logger_if( LOG_SHOW_IF_VERBOSE ) Logger::out() << "finishing off BWT strings" << endl;
for ( int j( 1 ); j < alphabetSize; j++ )
{
while ( inBwt2[j]->readAndSend( *outBwt[j], ReadBufferSize )
== ReadBufferSize );
} // ~for
#ifdef DEBUG
cout << "final value of counted this iter" << endl;
countedThisIter.print();
cout << "final value of new chars this iter" << endl;
newCharsThisIter.print();
#endif
alreadyInPile += newCharsThisIter;
for ( int j( 1 ); j < alphabetSize; j++ )
{
fclose( outSeq[j] );
fclose( outPtr[j] );
#ifdef TRACK_SEQUENCE_NUMBER
fclose( outNum[j] );
#endif
delete ( outBwt[j] );
delete ( inBwt[j] );
delete ( inBwt2[j] );
} // ~for j
tmpSwap = tmpIn;
tmpIn = tmpOut;
tmpOut = tmpSwap;
Logger_if( LOG_SHOW_IF_VERBOSE ) Logger::out() << "finished iteration " << i << ", usage: " << timer << endl;
// assert(i<2);
} // ~for i (main iteration)
string fileTypes( "BPS" );
#ifdef REMOVE_TEMPORARY_FILES
for ( int j( 1 ); j < alphabetSize; j++ )
{
for ( unsigned int i( 0 ); i < fileTypes.size(); i++ )
{
char s[4] = "-B0";
s[1] = fileTypes[i];
fileName = TmpFilename( tmpOut, s, j ).str();
if ( remove( fileName.c_str() ) != 0 )
{
cerr << "Warning: failed to clean up temporary file " << fileName
<< endl;
} // ~if
else
{
Logger_if( LOG_SHOW_IF_VERBOSE ) Logger::out() << "Removed temporary file " << fileName << endl;
} // ~if
} // ~for i
} // ~for j
#endif
// Move files to final output directory
for ( int j( 0 ); j < alphabetSize; j++ )
{
for ( unsigned int i( 0 ); i < fileTypes.size(); i++ )
{
if ( j == 0 && i >= 1 ) continue; // "-S00" and "-P00" don't exist
char s[4] = "-B0";
s[1] = fileTypes[i];
fileName = TmpFilename( fileStem, s, j ).str();
Filename filenameOut( prefix_, s, j );
if ( safeRename( fileName, filenameOut.str() ) != 0 )
{
cerr << "Warning: failed to rename temporary file \"" << fileName << "\" to final output " << endl;
}
}
}
Logger::out() << "Final output files are named " << prefix_ << "-Bxx and similar" << endl;
}
<|start_filename|>src/BCR/BCRexternalBWT.hh<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#ifndef _BCRexternalBWT_H_
#define _BCRexternalBWT_H_
#include "BWTCollection.hh"
#include "BwtReader.hh"
#include "libzoo/cli/ToolParameters.hh"
#include "shared/FragmentedVector.hh"
#include <fstream>
#include <iostream>
#include <map>
class SearchParameters;
class BCRexternalBWT : public SXSI::BWTCollection
{
public:
/**
* Constructor
*/
explicit BCRexternalBWT ( const string &file1, const string &fileOut, const int mode, const CompressionFormatType outputCompression, ToolParameters *toolParams = NULL );
~BCRexternalBWT();
int buildBCR( const string &, const string &, const BwtParameters *bwtParams );
int unbuildBCR( char const *, char const *, char const *, char const * );
int backwardSearchBCR( char const * , char const * , char const * , char const * );
int decodeBCRnaiveForward( char const *, char const *, char const * ); //Inverse BWT by Forward direction of nText sequences, one sequence at a time, in lexicographic order.
int decodeBCRmultipleReverse( char const *, char const *, char const *, bool processQualities = false ); //Inverse BWT by Backward direction of nText sequences at the same time by lengthRead iterations.
int Recover1symbolReverse( char const * , char const * , uchar *, sortElement * );
int RecoverNsymbolsReverse( char const *, char const *, uchar *, uchar *newQual = 0 );
int RecoverNsymbolsReverseByVector( char const *file1, char const *fileOutBwt, uchar *newSymb, uchar *newQual = 0 );
SequenceNumber recover1SequenceForward( char const * , char const * , sortElement , uchar *, SequenceLength * ) ;
vector <int> recoverNSequenceForward( char const * , char const *, SequenceNumber );
int recoverNSequenceForwardSequentially( char const * , char const *, SequenceNumber );
void storeBWT( uchar const *, uchar const *qual = NULL );
void storeBWT_parallelPile( uchar const *newSymb, uchar const *newQual, unsigned int parallelPile, SequenceNumber startIndex, SequenceNumber endIndex );
void storeEntireBWT( const string & );
void storeSA( SequenceLength );
void storeEntirePairSA( const char * );
void storeEntireSAfromPairSA( const char * );
virtual void storeBWTandLCP( uchar const * );
virtual void storeEntireLCP( const string & );
LetterNumber rankManySymbols( FILE &, LetterNumber *, LetterNumber, uchar * );
#ifdef XXX
LetterNumber rankManySymbols( FILE &, LetterCount &, LetterNumber, uchar * ); // TEMP
#endif
LetterNumber rankManySymbolsByVector( FILE & , LetterNumber *, LetterNumber, uchar *, uchar *foundQual = 0, FILE *InFileBWTQual = 0 );
LetterNumber findRankInBWT ( char const *, char const *, AlphabetSymbol, LetterNumber, uchar );
LetterNumber findRankInBWTbyVector ( char const *, char const *, AlphabetSymbol, LetterNumber, uchar );
int rankInverseManyByVector ( char const * , char const * , SequenceNumber , uchar * );
int backwardSearchManyBCR( char const * , char const *, char const *, vector<string>, SequenceLength );
int SearchAndLocateKmer ( char const * , char const * , char const * , vector<string> , SequenceLength , vector <int> & );
private:
void InsertNsymbols( uchar const *, SequenceLength, uchar const *qual = NULL );
void InsertNsymbols_parallelPile( uchar const *newSymb, SequenceLength posSymb, uchar const *newQual, unsigned int parallelPile, SequenceNumber startIndex, SequenceNumber endIndex, vector< FragmentedVector< sortElement > > &newVectTriplePerNewPile );
void InitialiseTmpFiles();
void InsertFirstsymbols( uchar const *, uchar const *qual = NULL, const int subSequenceNum = 0 );
int initializeUnbuildBCR( char const *, char const *, LetterNumber [] );
int computeNewPositionForBackSearch ( char const *, char const *, uchar );
int computeNewPositionForBackSearchByVector ( char const *, char const *, uchar );
int computeManyNewPositionForBackSearchByVector( char const * , char const * , uchar *, SequenceNumber );
int computeVectorUnbuildBCR( char const *, char const *, LetterNumber [] );
int update_Pos_Pile( sortElement * );
int update_Pos_Pile_Blocks( LetterNumber *, LetterNumber *, AlphabetSymbol, uchar );
int findBlockToRead( LetterNumber *, AlphabetSymbol , LetterNumber *, LetterNumber * );
private:
void pauseBetweenCyclesIfNeeded();
void convertFileFromIntermediateToFinalFormat( const char *filenameIn, const char *filenameOut );
void ReadFilesForCycle( const char *prefix, const SequenceLength cycle, const SequenceLength readLength, const SequenceNumber nText, uchar *newSymb, const bool processQualities, uchar *newQual );
BwtReaderBase *instantiateBwtReaderForIntermediateCycle( const char *filenameIn, bool allowDefrag = false );
BwtWriterBase *instantiateBwtWriterForIntermediateCycle( const char *filenameOut );
BwtWriterBase *instantiateBwtWriterForLastCycle( const char *filenameOut );
BwtReaderBase *instantiateBwtReaderForLastCycle( const char *filenameOut );
void writeEndPosFile( const uint8_t subSequenceNum, const bool lastFile );
BwtWriterBase *pWriterBwt0_; // persistent file, as we only ever need to append (never insert) characters to it
shared_ptr< ToolParameters > toolParams_;
shared_ptr< BwtParameters > bwtParams_;
shared_ptr< UnbwtParameters > unbwtParams_;
shared_ptr< SearchParameters > searchParams_;
};
#endif
<|start_filename|>src/backtracker/OneBwtBackTracker.hh<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#ifndef INCLUDED_ONEBWTBACKTRACKER_HH
#define INCLUDED_ONEBWTBACKTRACKER_HH
#include "BackTrackerBase.hh"
#include "BwtReader.hh"
#include "EndPosFile.hh"
#include "IntervalHandlerBase.hh"
#include "LetterCount.hh"
#include "RangeStore.hh"
#include "Types.hh"
#include "libzoo/util/Logger.hh"
#include <string>
using namespace std;
// OneBwtBackTracker class implements the backward search and takes its
// cue from IntervalHandler as to whether to continue down a particular
// branch of the search tree
class OneBwtBackTracker: public BackTrackerBase
{
public:
OneBwtBackTracker(
BwtReaderBase *inBwt,
LetterNumber ¤tPos,
RangeStoreExternal &r,
LetterCount &countsSoFar,
const string &subset,
const int cycle,
const bool doesPropagateBkptToSeqNumInSet,
const bool noComparisonSkip,
const bool propagateSequence,
EndPosFile &endPosFile
);
void skipIfNecessary( const Range &thisRange,
LetterNumber ¤tPos,
BwtReaderBase &inBwt,
LetterCount &countsSoFar );
void process(
int i,
string &thisWord,
IntervalHandlerBase &intervalHandler,
Range &rangeDerivedObject
);
BwtReaderBase *inBwt_;
LetterNumber ¤tPos_;
RangeStoreExternal &r_;
LetterCount &countsSoFar_;
const string &subset_;
const int cycle_;
// LetterNumber numRanges_;
// LetterNumber numSingletonRanges_;
AlphabetFlag propagateInterval_;
const bool doesPropagateBkptToSeqNumInSet_;
// const bool noComparisonSkip_;
EndPosFile &endPosFile_;
};
#endif
<|start_filename|>src/BCR/BCRexternalBWT.cpp<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#include "BCRexternalBWT.hh"
#include "BWTCollection.hh"
#include "Filename.hh"
#include "Tools.hh"
#include "TransposeFasta.hh"
#include "parameters/BwtParameters.hh"
#include "parameters/SearchParameters.hh"
#include "parameters/UnbwtParameters.hh"
#include "libzoo/util/Logger.hh"
#include "libzoo/util/TemporaryFilesManager.hh"
#include <algorithm>
#include <cassert>
#include <cmath>
#include <cstdio>
#include <cstring>
#include <fstream>
#include <iostream>
#include <vector>
#include <sys/stat.h>
using namespace std;
using SXSI::BWTCollection;
#define SIZEBUFFER 1024
#define DIMBLOCK 2048
////////////////////////////////////////////////////////////////////////////
// Class BCRexternalBWT
/**
* Constructor inits
*/
BCRexternalBWT::BCRexternalBWT ( const string &file1, const string &fileOutput, const int mode, const CompressionFormatType outputCompression, ToolParameters *toolParams )
: toolParams_( toolParams, emptyDeleter() )
, bwtParams_( 0 )
, unbwtParams_( 0 )
, searchParams_( 0 )
{
const char *intermediateCycFiles = "cyc.";
if ( mode == 0 )
{
using namespace BeetlBwtParameters;
bwtParams_ = dynamic_pointer_cast<BwtParameters>( toolParams_ );
if ( !bwtParams_ )
{
// Legacy mode: old code provides only outputCompression. We create a BwtParameters structure to let it work.
bwtParams_.reset( new BwtParameters );
switch ( outputCompression )
{
case compressionASCII:
( *bwtParams_ )[PARAMETER_INTERMEDIATE_FORMAT] = INTERMEDIATE_FORMAT_ASCII;
( *bwtParams_ )[PARAMETER_OUTPUT_FORMAT] = OUTPUT_FORMAT_ASCII;
break;
case compressionRunLength:
( *bwtParams_ )[PARAMETER_INTERMEDIATE_FORMAT] = INTERMEDIATE_FORMAT_RLE;
( *bwtParams_ )[PARAMETER_OUTPUT_FORMAT] = OUTPUT_FORMAT_RLE;
break;
case compressionIncrementalRunLength:
( *bwtParams_ )[PARAMETER_INTERMEDIATE_FORMAT] = INTERMEDIATE_FORMAT_MULTIRLE;
( *bwtParams_ )[PARAMETER_OUTPUT_FORMAT] = OUTPUT_FORMAT_RLE;
break;
case compressionHuffman:
( *bwtParams_ )[PARAMETER_INTERMEDIATE_FORMAT] = INTERMEDIATE_FORMAT_HUFFMAN;
( *bwtParams_ )[PARAMETER_OUTPUT_FORMAT] = OUTPUT_FORMAT_HUFFMAN;
break;
default:
assert( false && "shouldn't reach here" );
}
}
std::cerr << "Start BCR encode\n";
outputCompression_ = ( CompressionFormatType )( -1 ); // todo: remove this variable altogether
cerr << "Compression format for intermediate BWT files: " << bwtParams_->getStringValue( PARAMETER_INTERMEDIATE_FORMAT ) << endl;
cerr << "Compression format for BWT output files: " << bwtParams_->getStringValue( PARAMETER_OUTPUT_FORMAT ) << endl;
//added by GIOVANNA
if ( BUILD_SA == 1 )
std::cerr << "Compute also the SA by using the BCR (BCR_SA)\n";
else
std::cerr << "Compute only the BWT by using the BCR (BCR_SA)\n";
if ( bwtParams_->getValue( PARAMETER_GENERATE_ENDPOSFILE ) || BUILD_SA )
{
// we make sure that this file does not exist, to avoid reading an old version
Filename fileEndPos( bwtParams_->getStringValue( "output filename" ).c_str(), "-end-pos" );
remove( fileEndPos );
}
int result = buildBCR( file1, intermediateCycFiles, bwtParams_.get() );
checkIfNotEqual( result, 0 );
bool hasProcessedQualities = ( result == 2 );
if ( bwtParams_->getValue( PARAMETER_CONCATENATE_OUTPUT ) == true )
{
//Store the entire BWT from alphabetSize-files
storeEntireBWT( fileOutput );
if ( bwtParams_->getValue( PARAMETER_GENERATE_LCP ) == true )
{
storeEntireLCP( fileOutput );
}
}
//Do we want compute the extended suffix array (position and number of sequence)?
if ( BUILD_SA == 1 ) //To store the SA
{
storeEntirePairSA( fileOutput.c_str() );
storeEntireSAfromPairSA( fileOutput.c_str() );
}
if ( verboseEncode == 1 )
{
if ( ( bwtParams_->getValue( PARAMETER_GENERATE_LCP ) == true ) && ( BUILD_SA == 1 ) )
{
std::cerr << "Store the files containing the BWT, LCP and SA in a single file\n";
TmpFilename fnSA( fileOutput, ".sa" );
TmpFilename fnPairSA( fileOutput, ".pairSA" );
TmpFilename fnLCP( fileOutput, ".lcp" );
Filename fileOutRes( fileOutput, ".txt" );
FILE *InFileBWT = fopen( fileOutput.c_str(), "rb" );
if ( InFileBWT == NULL )
{
std::cerr << "Entire BWT file: Error opening " << fileOutput << std::endl;
exit ( EXIT_FAILURE );
}
FILE *InFilePairSA = fopen( fnPairSA, "rb" );
if ( InFilePairSA == NULL )
{
std::cerr << "Entire Pairs SA file: Error opening " << fnPairSA.str() << std::endl;
exit ( EXIT_FAILURE );
}
FILE *InFileSA = fopen( fnSA, "rb" );
if ( InFileSA == NULL )
{
std::cerr << "Entire SA file: Error opening " << fnSA << std::endl;
exit ( EXIT_FAILURE );
}
FILE *InFileLCP = fopen( fnLCP, "rb" );
if ( InFileLCP == NULL )
{
std::cerr << "Entire LCP file: Error opening " << fnLCP << std::endl;
exit ( EXIT_FAILURE );
}
ofstream outFile( fileOutRes.str() );
if ( outFile.bad() )
{
std::cerr << "Error opening output \"" << fileOutRes << "\" file" << std::endl;
exit ( EXIT_FAILURE );
}
uchar *bufferBWT = new uchar[SIZEBUFFER];
uchar *bufferLCP = new uchar[SIZEBUFFER];
ElementType *buffer = new ElementType[SIZEBUFFER];
LetterNumber *bufferNChar = new LetterNumber[SIZEBUFFER];
while ( ( !feof( InFileBWT ) ) && ( !feof( InFileSA ) ) && ( !feof( InFilePairSA ) ) && ( !feof( InFileLCP ) ) )
{
LetterNumber numcharBWT = fread( bufferBWT, sizeof( uchar ), SIZEBUFFER, InFileBWT );
LetterNumber numcharPairSA = fread( buffer, sizeof( ElementType ), SIZEBUFFER, InFilePairSA );
LetterNumber numcharSA = fread( bufferNChar, sizeof( LetterNumber ), SIZEBUFFER, InFileSA );
LetterNumber numcharLCP = fread( bufferNChar, sizeof( LetterNumber ), SIZEBUFFER, InFileLCP );
//std::cerr << "Char read: " << numcharBWT << "\t" << numcharLCP << "\t" << numcharSA << "\t" << numcharPairSA << "\n";
outFile << "bwt\tlcp\tpos\tnumSeq\tSA\n";
if ( ( numcharPairSA != numcharSA ) || ( numcharLCP != numcharSA ) || ( numcharLCP != numcharBWT ) )
std::cerr << "Error: number in BWT in Pair SA in SA and in LCP\n";
else
{
for ( LetterNumber i = 0; i < numcharSA; i++ )
{
outFile << bufferBWT[i]
<< '\t' << bufferLCP[i]
<< '\t' << buffer[i].sa
<< '\t' << buffer[i].numSeq
<< '\t' << bufferNChar[i]
<< endl;
}
}
}
delete[] buffer;
delete[] bufferNChar;
fclose( InFilePairSA );
fclose( InFileSA );
fclose( InFileLCP );
}
}
TemporaryFilesManager::get().cleanupAllFiles();
Logger_if( LOG_SHOW_IF_VERBOSE ) Logger::out() << "Removing/Renaming the BWT segments\n";
for ( int g = 0 ; g < alphabetSize; g++ )
{
TmpFilename filename( g );
if ( deletePartialBWT == 1 )
{
if ( remove( filename ) != 0 )
perror( ( "BCRexternalBWT: Error deleting file " + filename.str() ).c_str() );
}
else //rename the aux bwt file
{
Filename newFilename( fileOutput, "-B0", g, "" );
safeRename( filename, newFilename );
if ( hasProcessedQualities )
{
TmpFilename qualFilename( "", g, ".qual" );
Filename newQualFilename( fileOutput, "-Q0", g, "" );
safeRename( qualFilename, newQualFilename );
}
}
}
if ( bwtParams_->getValue( PARAMETER_GENERATE_LCP ) == true )
{
std::cerr << "Removing/Renaming the LCP segment files\n";
for ( AlphabetSymbol g = 0 ; g < alphabetSize; g++ )
{
TmpFilename filenameIn( "", g, ".lcp" );
if ( deletePartialLCP == 1 )
{
if ( remove( filenameIn ) != 0 )
perror( ( "BCRexternalBWT: Error deleting file " + filenameIn.str() ).c_str() );
}
else //rename the aux lcp file
{
Filename newFilename( fileOutput, "-L0", g, "" );
safeRename( filenameIn, newFilename );
}
}
}
/* std::cerr << "Removing/Renaming the SA segments\n";
for (AlphabetSymbol g = 0 ; g < alphabetSize; g++) {
Filename filenameIn( "sa_", g );
if (deletePartialSA == 1) {
if (remove(filenameIn)!=0)
std::cerr << "BCRexternalBWT: Error deleting file" << std::endl;
}
else //renome the aux bwt file
{
Filename newfilename( fileOutput, filenameIn.str() );
if(rename(filenameIn, newfilename))
std::cerr <<"BCRexternalBWT: Error renaming file" << std::endl;
}
}
*/
Logger_if( LOG_FOR_DEBUGGING )
{
if ( /*bwtParams_->getValue( PARAMETER_INTERMEDIATE_STORAGE_MEDIUM ) == INTERMEDIATE_STORAGE_MEDIUM_RAM*/
bwtParams_->getValue( PARAMETER_INTERMEDIATE_FORMAT ) == INTERMEDIATE_FORMAT_MULTIRLE
)
{
Logger::out() << "RAM file lengths:";
extern vector< vector<unsigned char> > ramFiles; // declared in BwtWriter; todo: move those to another a new header file
size_t totalFileLengths = 0;
for ( unsigned int i = 0; i < ramFiles.size(); ++i )
{
Logger::out() << " " << i << ":" << ramFiles[i].size();
totalFileLengths += ramFiles[i].size();
}
Logger::out() << std::endl;
Logger::out() << " total RAM used for files: " << totalFileLengths << std::endl;
}
}
}
else if ( mode == 1 )
{
using namespace BeetlUnbwtParameters;
unbwtParams_ = dynamic_pointer_cast<UnbwtParameters>( toolParams_ );
if ( unbwtParams_ == NULL )
{
// Legacy mode: old code was set using #defines. We create a BwtParameters structure to reflect the default values.
unbwtParams_.reset( new UnbwtParameters );
( *unbwtParams_ )[PARAMETER_DECODE_DIRECTION] = DECODE_DIRECTION_BACKWARD;
( *unbwtParams_ )[PARAMETER_USE_VECTOR] = USE_VECTOR_ON;
}
std::cerr << "Start BCR decode\n";
const char *fileOutBwt = "";
int result = unbuildBCR( file1.c_str(), fileOutBwt, intermediateCycFiles, fileOutput.c_str() );
checkIfEqual( result, 1 );
}
else if ( mode == 2 )
{
using namespace BeetlSearchParameters;
searchParams_ = dynamic_pointer_cast<SearchParameters>( toolParams_ );
if ( searchParams_ == NULL )
{
// Legacy mode: old code was set using #defines. We create a BwtParameters structure to reflect the default values.
searchParams_.reset( new SearchParameters );
}
std::cerr << "Start Locate Function:\n";
std::cerr << "Backward Search and Recovery of the number of sequences\n";
const char *fileOutBwt = "";
vector<string> kmers;
char kmer[1000];
SequenceLength lenKmer = 0;
FILE *InFileKmer = fopen( "searchedKmers", "rb" );
if ( InFileKmer == NULL )
{
std::cerr << "Error opening \"searchedKmers\" file" << std::endl;
exit( EXIT_FAILURE );
}
while ( fgets( kmer, sizeof( kmer ), InFileKmer ) )
{
char *tmp = strchr( kmer, '\n' );
if ( tmp )
*tmp = '\0';
tmp = strchr( kmer, '\r' );
if ( tmp )
*tmp = '\0';
if ( ( strcmp( kmer, "\r" ) != 0 ) && ( strcmp( kmer, "\n" ) != 0 )
&& ( strcmp( kmer, "\0" ) != 0 ) )
{
kmers.push_back( kmer );
lenKmer = strlen( kmer );
}
}
fclose( InFileKmer );
vector <int> seqID;
int result = SearchAndLocateKmer( file1.c_str(), fileOutBwt, intermediateCycFiles, kmers, lenKmer, seqID );
checkIfEqual( result, 1 );
std::cerr << "\nBCRexternalBWT: We have located all kmers, Now we store the positions of the found kmers" << endl;
if ( seqID.empty() )
std::cerr << "BCRexternalBWT: None of the k-mers occur in the collection" << endl;
else
{
Filename newfilename( fileOutput );
FILE *FilePosKmers = fopen( newfilename, "wb" );
if ( FilePosKmers == NULL )
{
std::cerr << "BCRexternalBWT: could not open file " << newfilename << "!" << std::endl;
exit ( EXIT_FAILURE );
}
fprintf( FilePosKmers, "kmer_ID \t N_kmer \t pos_in_the_SA\n" );
SequenceNumber numTotKmers = 0;
for ( SequenceNumber g = 0 ; g < kmers.size(); g++ )
{
for ( LetterNumber j = FirstVector[g].posN ; j <= LastVector[g].posN; j++ ) //For each position between first and last
{
fprintf( FilePosKmers, "%u \t %u \t %d\n", LastVector[g].seqN, numTotKmers, seqID[numTotKmers] );
numTotKmers++;
}
}
fclose( FilePosKmers );
/* if (verboseDecode == 1) {
SequenceNumber numTotKmers=0;
for (SequenceNumber g = 0 ; g < kmers.size(); g++) {
std::cerr << "\nk-mer of index " << LastVector[g].seqN << ": "<< kmers[LastVector[g].seqN] << ". Number of occurrences " << LastVector[g].posN-FirstVector[g].posN+1 << std::endl;
for (LetterNumber j = FirstVector[g].posN ; j <= LastVector[g].posN; j++) { //For each position between first and last
std::cerr << "Number " << numTotKmers << " pos in the SA=\t"<< j << "\t SeqId=\t" << seqID[numTotKmers] << std::endl;
numTotKmers++;
}
}
}
*/
}
}
else
std::cerr << "Mode Error" << endl;
}
int BCRexternalBWT::SearchAndLocateKmer ( char const *file1, char const *fileOutBwt, char const *fileOut, vector<string> kmers, SequenceLength lenKmer, vector <int> &seqID )
{
LetterNumber freq[256]; //contains the distribution of the symbols.
int resultInit = initializeUnbuildBCR( file1, fileOutBwt, freq );
checkIfEqual( resultInit, 1 );
std::cerr << "Frequency" << "\n";
for ( AlphabetSymbol i = 0; i < 255; i++ )
if ( freq[i] > 0 )
{
std::cerr << i << "\t" << freq[i] << "\t" << ( int )alpha[i] << "\t" << ( int )alphaInverse[( int )alpha[i]] << "\n";
}
assert( unbwtParams_ || searchParams_ );
if ( ( unbwtParams_ && unbwtParams_->getValue( BeetlUnbwtParameters::PARAMETER_USE_VECTOR ) == BeetlUnbwtParameters::USE_VECTOR_ON )
|| searchParams_ )
{
resultInit = computeVectorUnbuildBCR( file1, fileOutBwt, freq );
checkIfEqual( resultInit, 1 );
}
std::cerr << "backwardSearchManyBCR\n";
backwardSearchManyBCR( file1, fileOutBwt, fileOut, kmers, lenKmer );
for ( SequenceNumber g = 0 ; g < kmers.size(); g++ )
{
std::cerr << "The number of occurrences of " << kmers[LastVector[g].seqN] << " is \t" << LastVector[g].posN - FirstVector[g].posN + 1 << "\n";
}
if ( verboseDecode == 1 )
{
std::cerr << "First and Last: " << "\n";
std::cerr << "Q ";
for ( SequenceNumber g = 0 ; g < kmers.size(); g++ )
{
std::cerr << ( int )FirstVector[g].pileN << " " << ( int )LastVector[g].pileN << "\t";
}
std::cerr << std::endl;
std::cerr << "P ";
for ( SequenceNumber g = 0 ; g < kmers.size(); g++ )
{
std::cerr << FirstVector[g].posN << " " << LastVector[g].posN << "\t";
}
std::cerr << std::endl;
std::cerr << "N ";
for ( SequenceNumber g = 0 ; g < kmers.size(); g++ )
{
std::cerr << FirstVector[g].seqN << " " << LastVector[g].seqN << "\t" ;
}
std::cerr << std::endl;
}
vector<int> tmpSeqId = recoverNSequenceForward( file1, fileOutBwt, kmers.size() );
seqID.swap( tmpSeqId );
if ( seqID.empty() )
std::cerr << "\nSearchAndLocateKmer: No k-mer occurs in the collection";
//result = recoverNSequenceForwardSequentially(file1, fileOutBwt, kmers.size());
//assert (result ==1);
//Free the memory
for ( AlphabetSymbol j = 0 ; j < sizeAlpha; j++ )
{
delete [] tableOcc[j];
tableOcc[j] = NULL;
}
delete [] tableOcc;
delete[] alphaInverse;
return 1;
}
//Computes the rank-inverse function for many sequences by using the vector and update posN with the number of symbols that it read.
//Computes the position of the i-th occurrences of the symbol toFindSymbolp[h] in the BWT.
//posN[h] is the number of occurrences of the symbol toFindSymbol[h] that I have to find in BWT corresponding to the i-th occurrence of the symbol in F.
int BCRexternalBWT::rankInverseManyByVector ( char const *file1, char const *fileOutBwt, SequenceNumber numKmersInput, uchar *toFindSymbols )
{
uchar *buf = new uchar[SIZEBUFFER];
//Timer timer;
SequenceNumber j = 0;
while ( j < numKmersInput )
{
//std::cerr << "===j= " << j << " vectTriple[j].pileN " << (int)vectTriple[j].pileN << " vectTriple[j].seqN " << vectTriple[j].seqN <<"\n";
//We work into one BWT-partial at the time.
AlphabetSymbol currentPile = vectTriple[j].pileN;
//#ifdef DEBUG
// std::cerr << "===Current BWT-partial= " << (int)currentPile << "\n";
//#endif
Filename newfilename( file1, "-B0", currentPile, "" );
FILE *InFileBWT = fopen( newfilename, "rb" );
if ( InFileBWT == NULL )
{
std::cerr << "rankInverseManyByVector: BWT file " << ( int )j << ": Error opening " << newfilename << std::endl;
exit ( EXIT_FAILURE );
}
//contaAperturaFile++;
SequenceNumber k = j;
//For each tripla in the same current pile, we have to find the position of toRead-th occurrences of the symbol toFindSymbol
while ( ( k < numKmersInput ) && ( vectTriple[k].pileN == currentPile ) )
{
uchar toFindSymbol = toFindSymbols[k];
//std::cerr << "===k= " << k << " vectTriple[k].pileN " << (int)vectTriple[k].pileN << " vectTriple[k].seqN " << vectTriple[k].seqN << " toFindSymbol " << toFindSymbol <<"\n";
if ( toFindSymbol != terminatorChar )
{
//Update each tripla, so posN is the number that we should read in a particular block into BWT-partial
LetterNumber readChar = 0;
LetterNumber toRead = vectTriple[k].posN;
LetterNumber numBlock = 0;
//Find the block
//std::cerr << "toRead "<< toRead << " numBlock " << numBlock << " currentPile " << (int)currentPile << " toFindSymbol " << toFindSymbol<< "\n";
//cerr << "UPDATE_POS...";
//timer.timeNow();
int result = update_Pos_Pile_Blocks( &toRead, &numBlock, currentPile, toFindSymbol );
//cerr << "done." << timer << endl;
checkIfEqual( result, 1 );
readChar = numBlock * DIMBLOCK;
//We have read readChar by using vectorOcc, now we have to read toRead symbols from the block numBlock
//We move the file pointer in the position where the numBlock block starts.
fseek ( InFileBWT, numBlock * DIMBLOCK, SEEK_SET );
//Find the occurrences in the found block
LetterNumber num = 0, num_read = 0;
while ( ( !feof( InFileBWT ) ) && ( toRead > 0 ) )
{
num_read = fread( buf, sizeof( uchar ), SIZEBUFFER, InFileBWT );
num = 0;
while ( ( num < num_read ) && ( toRead > 0 ) )
{
if ( buf[num] == toFindSymbol )
toRead--;
readChar++; //it is the number of read symbols
num++;
}
if ( toRead < 0 )
std::cerr << "rankInverseManyByVector: position of the symbol not found" << "\n";
}
if ( toRead > 0 )
{
std::cerr << "*Error rankInverseManyByVector: we should read " << toRead << " characters yet in " << newfilename << " file!\n";
exit ( EXIT_FAILURE );
}
//Update the value of posN
vectTriple[k].posN = readChar;
}
k++;
}
j = k;
fclose( InFileBWT );
}
delete [] buf;
return 1;
}
//Computes the rank-inverse function and returns the number of symbols that it read.
//Computes the position of the i-th occurrences of the symbol toFindSymbol in the BWT.
//toRead is the number of occurrences of the symbol toFindSymbol that I have to find in BWT corresponding to the i-th occurrence of the symbol in F.
LetterNumber BCRexternalBWT::findRankInBWT ( char const *file1, char const *fileOutBwt, AlphabetSymbol currentPile, LetterNumber toRead, uchar toFindSymbol )
{
assert( string( fileOutBwt ) == "" && "todo: remove this parameter if it is always null" );
Filename newfilename( file1, "-B0", currentPile, "" );
uchar *buf = new uchar[SIZEBUFFER];
FILE *InFileBWT = fopen( newfilename, "rb" );
if ( InFileBWT == NULL )
{
std::cerr << "findRankInBWT: could not open file " << newfilename << " !" << std::endl;
exit ( EXIT_FAILURE );
}
LetterNumber num = 0, num_read = 0, readChar = 0;
//#ifdef DEBUG
// std::cerr << "***FindRankInBWT: we must to read " << toRead << " occurrences of the symbol " << toFindSymbol << "!\n";
//#endif
while ( ( !feof( InFileBWT ) ) && ( toRead > 0 ) )
{
num_read = fread( buf, sizeof( uchar ), SIZEBUFFER, InFileBWT );
num = 0;
while ( ( num < num_read ) && ( toRead > 0 ) )
{
if ( buf[num] == toFindSymbol )
toRead--;
readChar++; //it is the number of read symbols
num++;
}
if ( toRead < 0 )
std::cerr << "findRankInBWT: position of the symbol not found" << "\n";
}
if ( toRead > 0 )
{
std::cerr << "Error findRankInBWT: we should read " << toRead << " characters yet in " << newfilename << " file!\n";
exit ( EXIT_FAILURE );
}
fclose( InFileBWT );
delete [] buf;
return readChar;
}
//Computes the rank-inverse function and returns the number of symbols that it read.
//Computes the position of the i-th occurrences of the symbol toFindSymbol in the BWT.
//toRead is the number of occurrences of the symbol toFindSymbol that I have to find in BWT corresponding to the i-th occurrence of the symbol in F.
LetterNumber BCRexternalBWT::findRankInBWTbyVector ( char const *file1, char const *fileOutBwt, AlphabetSymbol currentPile, LetterNumber toRead, uchar toFindSymbol )
{
assert( string( fileOutBwt ) == "" && "todo: remove this parameter if it is always null" );
Filename newfilename( file1, "-B0", currentPile, "" );
uchar *buf = new uchar[SIZEBUFFER];
FILE *InFileBWT = fopen( newfilename, "rb" );
if ( InFileBWT == NULL )
{
std::cerr << "findRankInBWTbyVector: could not open file " << newfilename << " !" << std::endl;
exit ( EXIT_FAILURE );
}
LetterNumber readChar = 0;
LetterNumber numBlock = 0;
int result = update_Pos_Pile_Blocks( &toRead, &numBlock, currentPile, toFindSymbol );
checkIfEqual( result, 1 );
readChar = numBlock * DIMBLOCK;
fseek ( InFileBWT, numBlock * DIMBLOCK, SEEK_SET );
LetterNumber num = 0, num_read = 0;
while ( ( !feof( InFileBWT ) ) && ( toRead > 0 ) )
{
num_read = fread( buf, sizeof( uchar ), SIZEBUFFER, InFileBWT );
num = 0;
while ( ( num < num_read ) && ( toRead > 0 ) )
{
if ( buf[num] == toFindSymbol )
toRead--;
readChar++; //it is the number of read symbols
num++;
}
if ( toRead < 0 )
std::cerr << "findRankInBWTbyVector: position of the symbol not found" << "\n";
}
if ( toRead > 0 )
{
std::cerr << "*Error findRankInBWTbyVector: we should read " << toRead << " characters yet in " << newfilename << " file!\n";
exit ( EXIT_FAILURE );
}
fclose( InFileBWT );
delete [] buf;
return readChar;
}
//Computes the rank function and returns the number of symbols that it read.
//The rank function computes the number char less than the symbol c from the starting position (startPos) in the BWT to the position pos (endPos) in the BWT.
//Here, we compute the number of occurrences of each symbol from from the starting position (startPos) in the BWT to the position pos (endPos) in the BWT.
//The startPos is the position of the File pointer InFileBWT, the endPos depends on toRead
//In the original definition of the rank, startPos corresponds to the position 1 and endPos corresponds to the previous symbol.
//Here, we work by using \sigma partial BWTs.
//toRead is the number of symbols that I have to read before to find the symbol in B corresponding to the symbol in F.
LetterNumber BCRexternalBWT::rankManySymbols( FILE &InFileBWT, LetterNumber *counters, LetterNumber toRead, uchar *foundSymbol )
{
LetterNumber numchar, cont = 0; //cont is the number of symbols already read!
uchar *buffer = new uchar[SIZEBUFFER];
//it reads toRead symbols from the fp file (Partial BWT)
while ( toRead > 0 ) //((numchar!=0) && (toRead > 0)) {
{
if ( toRead <= SIZEBUFFER ) //Read toRead characters
{
numchar = fread( buffer, sizeof( uchar ), toRead, &InFileBWT );
// we should always read/write the same number of characters
checkIfEqual( numchar, toRead );
*foundSymbol = buffer[numchar - 1]; //The symbol of the sequence k. It is the symbol in the last position in the partial BWT that we have read.
}
else //Read sizebuffer characters
{
numchar = fread( buffer, sizeof( uchar ), SIZEBUFFER, &InFileBWT );
// we should always read/write the same number of characters
checkIfEqual( numchar, SIZEBUFFER );
}
//For each symbol in the buffer, it updates the number of occurrences into counters
for ( LetterNumber r = 0; r < numchar; r++ )
counters[alpha[( int )buffer[r]]]++; //increment the number of letter symbol into counters
cont += numchar; //number of read symbols
toRead -= numchar; //number of remaining symbols to read
if ( ( numchar == 0 ) && ( toRead > 0 ) ) //it means that we have read 0 character, but there are still toRead characters to read
{
std::cerr << "rankManySymbols: read 0 character, but there are still " << toRead << " characters to read " << std::endl;
exit ( EXIT_FAILURE );
}
}
delete [] buffer;
return cont;
}
LetterNumber BCRexternalBWT::rankManySymbolsByVector( FILE &InFileBWT, LetterNumber *counters, LetterNumber toRead, uchar *foundSymbol, uchar *foundQual, FILE *InFileBWTQual )
{
const LetterNumber offset = toRead;
LetterNumber numchar, count = 0; //count is the number of symbols already read!
static uchar *bufferBlock = new uchar[DIMBLOCK];
//it reads toRead symbols from the fp file (Partial BWT)
while ( toRead > 0 ) //((numchar!=0) && (toRead > 0)) {
{
if ( toRead <= DIMBLOCK ) //Read toRead characters
{
numchar = fread( bufferBlock, sizeof( uchar ), toRead, &InFileBWT );
checkIfEqual( numchar, toRead ); // we should always read/write the same number of characters
*foundSymbol = bufferBlock[numchar - 1]; //The symbol of the sequence k. It is the symbol in the last position in the partial BWT that we have read.
}
else //Read sizebuffer characters
{
std::cerr << "rankManySymbolsByVector: Error to read is" << toRead << std::endl;
exit ( EXIT_FAILURE );
//numchar = fread(buffer,sizeof(uchar),SIZEBUFFER,&InFileBWT);
//assert(numchar == SIZEBUFFER); // we should always read/write the same number of characters
//aggiorna counters dalla tabella del vettori
}
//For each symbol in the buffer, it updates the number of occurrences into counters
for ( LetterNumber r = 0; r < numchar; r++ )
counters[alpha[( int )bufferBlock[r]]]++; //increment the number of letter symbol into counters
count += numchar; //number of read symbols
toRead -= numchar; //number of remaining symbols to read
if ( ( numchar == 0 ) && ( toRead > 0 ) ) //it means that we have read 0 character, but there are still toRead characters to read
{
std::cerr << "rankManySymbolsByVector: read 0 character, but there are still " << toRead << " characters to read " << std::endl;
exit ( EXIT_FAILURE );
}
}
// delete [] bufferBlock;
if ( foundQual )
{
if ( offset > 1 )
fseek( InFileBWTQual, offset - 1, SEEK_CUR );
numchar = fread( foundQual, sizeof( uchar ), 1, InFileBWTQual );
checkIfEqual( numchar, 1 );
size_t pos1 = ftell( &InFileBWT );
size_t pos2 = ftell( InFileBWTQual );
checkIfEqual( pos1, pos2 );
}
return count;
}
int BCRexternalBWT::computeNewPositionForBackSearch( char const *file1, char const *fileOutBwt, uchar symbol )
{
//Last = C[c] + rank (c, Last) --> (vectTriple[1].pileN, vectTriple[1].posN)
//First = C[c] + rank (c, First - 1) + 1 --> (vectTriple[0].pileN, vectTriple[0].posN)
//So we write:
vectTriple[0].posN --; //So we compute rank until position First - 1
uchar foundSymbol = '\0';
LetterNumber toRead = 0;
LetterNumber *counters = new LetterNumber[sizeAlpha]; //it counts the number of each symbol into the i-Pile-BWT
SequenceNumber j = 0;
while ( j < 2 )
{
for ( AlphabetSymbol i = 0 ; i < sizeAlpha; i++ )
counters[i] = 0;
AlphabetSymbol currentPile = vectTriple[j].pileN;
//if (verboseDecode == 1)
// std::cerr << "\n===Current BWT-partial= " << (int)currentPile << "\n";
Filename newfilename( file1, "-B0", currentPile, "" );
FILE *InFileBWT = fopen( newfilename, "rb" );
if ( InFileBWT == NULL )
{
std::cerr << "computeNewPositionForBackSearch: BWT file " << ( int )j << ": Error opening " << std::endl;
exit ( EXIT_FAILURE );
}
SequenceNumber k = j;
LetterNumber cont = 0; //number of the read symbols
LetterNumber numberRead = 0;
//uchar symbol;
//SequenceLength lenCheck=0;
while ( ( k < 2 ) && ( vectTriple[k].pileN == currentPile ) )
{
//The symbol for the sequences seqN in F[posN] is the symbol
//symbol = alphaInverse[vectTriple[k].pileN];
//Now, I have to find the new symbol, it is in B[pileN] in position posN and so I can update pileN and posN
//For any character (of differents sequences) in the same pile
//symbol = '\0';
//cont is the number of symbols already read!
toRead = vectTriple[k].posN - cont;
numberRead = rankManySymbols( *InFileBWT, counters, toRead, &foundSymbol );
checkIfEqual( toRead, numberRead );
cont += numberRead;
//I have to update the value in vectTriple[k].posN, it must contain the position of the symbol in F
//Symbol is
//newSymb[vectTriple[k].seqN] = symbol; //it is not useful here
//PosN is
vectTriple[k].posN = counters[alpha[( int )symbol]];
for ( AlphabetSymbol g = 0 ; g < currentPile; g++ ) //I have to count in each pile g= 0... (currentPile-1)-pile
{
vectTriple[k].posN = vectTriple[k].posN + tableOcc[g][alpha[( int )symbol]];
}
//pileN is
vectTriple[k].pileN = alpha[( int )symbol];
k++;
}
fclose( InFileBWT );
j = k;
}
delete [] counters;
//First = c[c] + rank (c, First - 1) + 1
vectTriple[0].posN ++; //We must to sum 1 to first
return 1;
}
int BCRexternalBWT::computeNewPositionForBackSearchByVector( char const *file1, char const *fileOutBwt, uchar symbol )
{
//Last = C[c] + rank (c, Last) --> (vectTriple[1].pileN, vectTriple[1].posN)
//First = C[c] + rank (c, First - 1) + 1 --> (vectTriple[0].pileN, vectTriple[0].posN)
//So we write:
vectTriple[0].posN --; //So we compute rank until position First - 1
uchar foundSymbol = '\0';
LetterNumber toRead = 0;
LetterNumber *counters = new LetterNumber[sizeAlpha]; //it counts the number of each symbol into the i-Pile-BWT
SequenceNumber j = 0;
while ( j < 2 )
{
//The symbol for the sequences seqN in F[posN] is the symbol
//symbol = alphaInverse[vectTriple[k].pileN];
//Now, I have to find the new symbol, it is in B[pileN] in position posN and so I can update pileN and posN
for ( AlphabetSymbol i = 0 ; i < sizeAlpha; i++ )
counters[i] = 0;
AlphabetSymbol currentPile = vectTriple[j].pileN;
//if (verboseDecode == 1)
// std::cerr << "\n===Current BWT-partial= " << (int)currentPile << "(computeNewPositionForBackSearchByVector)\n";
Filename newfilename( file1, "-B0", currentPile, "" );
FILE *InFileBWT = fopen( newfilename, "rb" );
if ( InFileBWT == NULL )
{
std::cerr << "computeNewPositionForBackSearchByVector: BWT file " << ( int )j << ": Error opening " << std::endl;
exit ( EXIT_FAILURE );
}
SequenceNumber k = j;
LetterNumber cont = 0; //number of the read symbols
LetterNumber numberRead = 0;
//uchar symbol;
//SequenceLength lenCheck=0;
LetterNumber numBlock = 0;
while ( ( k < 2 ) && ( vectTriple[k].pileN == currentPile ) )
{
//cont is the number of symbols already read!
//toRead = vectTriple[k].posN - cont;
toRead = vectTriple[k].posN;
for ( AlphabetSymbol i = 0 ; i < sizeAlpha; i++ )
counters[i] = 0;
if ( toRead > 0 )
{
//we need to know how many occurrences of each symbol there are up to the position toRead.
//if ToRead > dimBlock, we can use vectorOcc in order to find the occorrences in the blocks precede the block where the position toRead is.
//Before, we need to find the block where toRead position is.
int result = findBlockToRead( counters, currentPile, &toRead, &numBlock );
checkIfEqual( result, 1 );
}
if ( toRead <= DIMBLOCK ) //If toRead == DIMBLOCK, because I can need to known foundSymbol character
{
fseek ( InFileBWT, numBlock * DIMBLOCK, SEEK_SET );
numberRead = rankManySymbolsByVector( *InFileBWT, counters, toRead, &foundSymbol );
checkIfEqual( toRead, numberRead );
cont += numberRead;
}
//I have to update the value in vectTriple[k].posN, it must contain the position of the symbol in F
//Symbol is
//newSymb[vectTriple[k].seqN] = symbol; //it is not useful here
//PosN is
vectTriple[k].posN = counters[alpha[( int )symbol]];
for ( AlphabetSymbol g = 0 ; g < currentPile; g++ ) //I have to count in each pile g= 0... (currentPile-1)-pile
{
vectTriple[k].posN = vectTriple[k].posN + tableOcc[g][alpha[( int )symbol]];
}
//pileN is
vectTriple[k].pileN = alpha[( int )symbol];
k++;
}
fclose( InFileBWT );
j = k;
}
delete [] counters;
//First = c[c] + rank (c, First - 1) + 1
vectTriple[0].posN ++; //We must to sum 1 to first
return 1;
}
int BCRexternalBWT::findBlockToRead( LetterNumber *counters, AlphabetSymbol currentPile, LetterNumber *toRead, LetterNumber *numBlock )
{
//Find the block numblock, where the position toRead is
//numBlock = 0;
*numBlock = ( LetterNumber )floor( ( long double )( ( *toRead - 1 ) / DIMBLOCK ) ) ; //The smallest integral value NOT less than x.
//if (*numBlock >= numBlocksInPartialBWT[currentPile])
// std::cerr << "Error findBlockToRead: numBlock " << *numBlock << " and numBlocksInPartialBWT["<<(int)currentPile<<"]" << numBlocksInPartialBWT[currentPile] << "\n";
//assert(*numBlock < numBlocksInPartialBWT[currentPile]);
if ( *numBlock >= numBlocksInPartialBWT[currentPile] )
{
cerr << "Numblock size mismatch: " << *numBlock << " < "
<< numBlocksInPartialBWT[currentPile]
<< ". Aborting." << endl;
}
if ( *numBlock > 0 )
{
for ( AlphabetSymbol r = 0; r < sizeAlpha; r++ )
counters[r] = vectorOcc[currentPile][r][( *numBlock ) - 1]; //vectorOcc is indexed by 0, so we have numBlock-1
*toRead = *toRead - ( *numBlock * DIMBLOCK ); //Number of symbols that we must read yet. it could be = DIMBLOCK
}
return 1;
}
int BCRexternalBWT::computeManyNewPositionForBackSearchByVector( char const *file1, char const *fileOutBwt, uchar *symbols, SequenceNumber nKmers )
{
//Last = C[c] + rank (c, Last) --> (vectTriple[1].pileN, vectTriple[1].posN)
//First = C[c] + rank (c, First - 1) + 1 --> (vectTriple[0].pileN, vectTriple[0].posN)
//So we write:
for ( SequenceNumber i = 0; i < nKmers; i++ ) //For each kmer
if ( LastVector[i].posN >= FirstVector[i].posN ) //if not, the kmer is not in the collection
FirstVector[i].posN --; //So we compute rank until position First - 1
uchar foundSymbol = '\0'; //here, it is not useful
LetterNumber toRead = 0;
LetterNumber *counters = new LetterNumber[sizeAlpha]; //it counts the number of each symbol into the i-Pile-BWT
SequenceNumber j = 0;
while ( j < nKmers )
{
//The symbol for the sequences seqN in F[posN] is the symbol
//symbol = alphaInverse[vectTriple[k].pileN];
//Now, I have to find the new symbol, it is in B[pileN] in position posN and so I can update pileN and posN
for ( AlphabetSymbol i = 0 ; i < sizeAlpha; i++ )
counters[i] = 0;
AlphabetSymbol currentPile = FirstVector[j].pileN;
//if (verboseDecode == 1)
// std::cerr << "===Current BWT-partial= " << (int)currentPile << " (computeManyNewPositionForBackSearchByVector)\n";
Filename newfilename( file1, "-B0", currentPile, "" );
FILE *InFileBWT = fopen( newfilename, "rb" );
if ( InFileBWT == NULL )
{
std::cerr << "computeManyNewPositionForBackSearchByVector: BWT file " << ( int )j << ": Error opening " << newfilename << std::endl;
exit ( EXIT_FAILURE );
}
SequenceNumber k = j;
//LetterNumber cont = 0; //number of the read symbols
LetterNumber numberRead = 0;
//uchar symbol;
//SequenceLength lenCheck=0;
LetterNumber numBlock = 0;
while ( ( k < nKmers ) && ( FirstVector[k].pileN == currentPile ) )
{
if ( FirstVector[k].posN <= LastVector[k].posN )
{
//FIRST
//cont is the number of symbols already read!
//toRead = vectTriple[k].posN - cont;
toRead = FirstVector[k].posN;
for ( AlphabetSymbol i = 0 ; i < sizeAlpha; i++ )
counters[i] = 0;
if ( toRead > 0 )
{
//we need to know how many occurrences of each symbol there are up to the position toRead.
//if ToRead > dimBlock, we can use vectorOcc in order to find the occurrences in the blocks that precede the block where the position toRead is.
//Before, we need to find the block where toRead position is.
int result = findBlockToRead( counters, currentPile, &toRead, &numBlock );
checkIfEqual( result, 1 );
if ( toRead <= DIMBLOCK ) //If toRead == DIMBLOCK, because I can need to known foundSymbol character
{
fseek ( InFileBWT, numBlock * DIMBLOCK, SEEK_SET );
numberRead = rankManySymbolsByVector( *InFileBWT, counters, toRead, &foundSymbol );
checkIfEqual( toRead, numberRead );
//cont += numberRead;
}
}
//I have to update the value in vectTriple[k].posN, it must contain the position of the symbol in F
//Symbol is
//newSymb[vectTriple[k].seqN] = symbol; //it is not useful here
//PosN is
FirstVector[k].posN = counters[alpha[( int )symbols[FirstVector[k].seqN]]];
for ( AlphabetSymbol g = 0 ; g < currentPile; g++ ) //I have to count in each pile g= 0... (currentPile-1)-pile
{
FirstVector[k].posN = FirstVector[k].posN + tableOcc[g][alpha[( int )symbols[FirstVector[k].seqN]]];
}
//pileN is
FirstVector[k].pileN = alpha[( int )symbols[FirstVector[k].seqN]];
//First = c[c] + rank (c, First - 1) + 1
FirstVector[k].posN ++; //We must add 1 to first
//LAST
toRead = LastVector[k].posN;
numBlock = 0;
for ( AlphabetSymbol i = 0 ; i < sizeAlpha; i++ )
counters[i] = 0;
if ( toRead > 0 )
{
//we need to know how many occurrences of each symbol there are up to the position toRead.
//if ToRead > dimBlock, we can use vectorOcc in order to find the occorrences in the blocks precede the block where the position toRead is.
//Before, we need to find the block where toRead position is.
int result = findBlockToRead( counters, currentPile, &toRead, &numBlock );
checkIfEqual( result, 1 );
if ( toRead <= DIMBLOCK ) //If toRead == DIMBLOCK, because I can need to known foundSymbol character
{
fseek ( InFileBWT, numBlock * DIMBLOCK, SEEK_SET );
numberRead = rankManySymbolsByVector( *InFileBWT, counters, toRead, &foundSymbol );
checkIfEqual( toRead, numberRead );
//cont += numberRead;
}
}
//I have to update the value in vectTriple[k].posN, it must contain the position of the symbol in F
//Symbol is
//newSymb[vectTriple[k].seqN] = symbol; //it is not useful here
//PosN is
LastVector[k].posN = counters[alpha[( int )symbols[FirstVector[k].seqN]]];
for ( AlphabetSymbol g = 0 ; g < currentPile; g++ ) //I have to count in each pile g= 0... (currentPile-1)-pile
{
LastVector[k].posN = LastVector[k].posN + tableOcc[g][alpha[( int )symbols[FirstVector[k].seqN]]];
}
//pileN is
LastVector[k].pileN = alpha[( int )symbols[FirstVector[k].seqN]];
}
k++;
}
fclose( InFileBWT );
j = k;
}
delete [] counters;
return 1;
}
int BCRexternalBWT::backwardSearchManyBCR( char const *file1, char const *fileOutBwt, char const *fileOut, vector<string> kmers, SequenceLength lenKmer )
{
assert( unbwtParams_ || searchParams_ );
if ( ( unbwtParams_ && unbwtParams_->getValue( BeetlUnbwtParameters::PARAMETER_USE_VECTOR ) == BeetlUnbwtParameters::USE_VECTOR_ON )
|| searchParams_ )
{
std::cerr << "For the computation of the new position useful for BackSearch, it uses a sampling of the occurrences for each segment: " << DIMBLOCK << " size." << std::endl;
}
else
{
std::cerr << "backwardSearchManyBCR is only implemented by using the sampling." << std::endl;
exit( EXIT_FAILURE );
}
//Initialization
uchar *symbols = new uchar[kmers.size()];
FirstVector.resize( kmers.size() );
LastVector.resize( kmers.size() );
for ( SequenceNumber i = 0; i < kmers.size(); i++ )
std::cerr << kmers[i] << "\n";
for ( SequenceNumber i = 0; i < kmers.size(); i++ ) //For each kmer
{
symbols[i] = kmers[i][lenKmer - 1];
//FIRST
FirstVector[i].seqN = i; //It is not useful
FirstVector[i].pileN = alpha[int( symbols[FirstVector[i].seqN] )];
FirstVector[i].posN = 1; //The first occurrence of symbol in F is in the first position in the pile Symbol
//LAST
LastVector[i].seqN = i; //It is not useful
LastVector[i].pileN = alpha[int( symbols[LastVector[i].seqN] )];
//The last occurrence of the symbol prevSymbol in F is in the last position in the pile prevSymbol
//It also corresponds to C[int(symbol) + 1]
LastVector[i].posN = 0;
for ( AlphabetSymbol mm = 0 ; mm < sizeAlpha; mm++ )
LastVector[i].posN += tableOcc[LastVector[i].pileN][mm];
}
/*
if (verboseDecode==1) {
std::cerr << "Init triples: " << "\n";
std::cerr << "Symbols in positions " << lenKmer << "\n";
for (SequenceNumber g = 0 ; g < kmers.size(); g++) {
std::cerr << symbols[g] << "\t";
}
std::cerr << std::endl;
std::cerr << "Q ";
for (SequenceNumber g = 0 ; g < kmers.size(); g++) {
std::cerr << (int)FirstVector[g].pileN << " " << (int)LastVector[g].pileN << "\t";
}
std::cerr << std::endl;
std::cerr << "P ";
for (SequenceNumber g = 0 ; g < kmers.size(); g++) {
std::cerr << FirstVector[g].posN << " " << LastVector[g].posN << "\t";
}
std::cerr << std::endl;
std::cerr << "N ";
for (SequenceNumber g = 0 ; g < kmers.size(); g++) {
std::cerr << FirstVector[g].seqN << " " << LastVector[g].seqN << "\t" ;
}
std::cerr << std::endl;
}
*/
for ( SequenceLength posSymb = lenKmer - 1; posSymb > 0; posSymb-- ) //For each symbol of the kmer
{
for ( SequenceNumber i = 0; i < kmers.size(); i++ ) //For each kmer in accord to the order in the triples
if ( LastVector[i].posN >= FirstVector[i].posN ) //if not, the kmer is not in the collection
symbols[FirstVector[i].seqN] = kmers[FirstVector[i].seqN][posSymb - 1];
quickSort( FirstVector );
quickSort( LastVector );
//For each symbol in the kmer we have to update First and Last
int resultCompute = computeManyNewPositionForBackSearchByVector ( file1, fileOutBwt, symbols, kmers.size() );
checkIfEqual( resultCompute, 1 );
/*
if (verboseDecode==1) {
std::cerr << "After The computation of the new positions: " << "\n";
std::cerr << "Symbols in positions " << posSymb << "\n";
for (SequenceNumber g = 0 ; g < kmers.size(); g++) {
std::cerr << symbols[FirstVector[g].seqN] << "\t\t";
}
std::cerr << std::endl;
std::cerr << "Q ";
for (SequenceNumber g = 0 ; g < kmers.size(); g++) {
std::cerr << (int)FirstVector[g].pileN << " " << (int)LastVector[g].pileN << "\t";
}
std::cerr << std::endl;
std::cerr << "P ";
for (SequenceNumber g = 0 ; g < kmers.size(); g++) {
std::cerr << FirstVector[g].posN << " " << LastVector[g].posN << "\t";
}
std::cerr << std::endl;
std::cerr << "N ";
for (SequenceNumber g = 0 ; g < kmers.size(); g++) {
std::cerr << FirstVector[g].seqN << " " << LastVector[g].seqN << "\t" ;
}
std::cerr << std::endl;
}
*/
}
delete[] symbols;
return 1;
}
//Reconstruct 1 factor backwards by threading through the LF-mapping.
int BCRexternalBWT::backwardSearchBCR( char const *file1, char const *fileOutBwt, char const *fileOut, char const *kmer )
{
// LetterNumber freq[256]; //contains the distribution of the symbols.
// int resultInit = initializeUnbuildBCR(file1, fileOutBwt, freq);
// assert (resultInit == 1);
std::cerr << "Now: backward search\n";
SequenceLength lenKmer = strlen( kmer );
std::cerr << "kmer: " << kmer << " length " << lenKmer << "\n";
LetterNumber posSymb = lenKmer - 1;
uchar symbol = kmer[posSymb];
vectTriple.resize( 2 );
if ( verboseDecode == 1 )
std::cerr << "\n>>>>>symbol is " << symbol << " in position " << posSymb + 1 << " of the pattern\n";
//Initialize triplaFirst to find the first sequence
//FIRST in position 0
vectTriple[0].seqN = 0; //It is not useful
vectTriple[0].pileN = alpha[int( symbol )];
vectTriple[0].posN = 1; //The first occurrence of symbol in F is in the first position in the pile Symbol
//Initialize triplaLast to find the last sequence
//LAST in position 1
vectTriple[1].seqN = 0; //It is not useful
vectTriple[1].pileN = alpha[int( symbol )];
//The last occurrence of the symbol prevSymbol in F is in the last position in the pile prevSymbol
//It also corresponds to C[int(symbol) + 1]
vectTriple[1].posN = 0;
for ( AlphabetSymbol j = 0 ; j < sizeAlpha; j++ )
vectTriple[1].posN += tableOcc[vectTriple[1].pileN][j];
if ( verboseDecode == 1 )
{
std::cerr << "Init triples: " << "\n";
std::cerr << "Q ";
for ( SequenceNumber g = 0 ; g < 2; g++ )
{
std::cerr << ( int )vectTriple[g].pileN << " ";
}
std::cerr << std::endl;
std::cerr << "P ";
for ( SequenceNumber g = 0 ; g < 2; g++ )
{
std::cerr << vectTriple[g].posN << " ";
}
std::cerr << std::endl;
std::cerr << "N ";
for ( SequenceNumber g = 0 ; g < 2; g++ )
{
std::cerr << vectTriple[g].seqN << " ";
}
std::cerr << std::endl;
}
//The new positions of symbol followed by kmer[posSymb] in F is computed by following function
assert( unbwtParams_ || searchParams_ );
if ( ( unbwtParams_ && unbwtParams_->getValue( BeetlUnbwtParameters::PARAMETER_USE_VECTOR ) == BeetlUnbwtParameters::USE_VECTOR_ON )
|| searchParams_ )
{
std::cerr << "For the computation of the new position useful for BackSearch, it uses a sampling of the occurrences for each segment: " << DIMBLOCK << " size." << std::endl;
}
else
{
std::cerr << "For the computation of the new position useful for BackSearch you don't use the vector of the occurrences. You read the file" << std::endl;
}
while ( ( ( vectTriple[0].pileN == vectTriple[1].pileN ) && ( vectTriple[0].posN < vectTriple[1].posN ) ) && ( posSymb >= 1 ) )
{
symbol = kmer[posSymb - 1];
if ( verboseDecode == 1 )
std::cerr << "\n>>>>>symbol is " << symbol << " in position " << posSymb << " of the pattern\n";
//The new positions of symbol followed by kmer[posSymb] in F is computed by following function
int resultCompute = 0;
assert( unbwtParams_ || searchParams_ );
if ( ( unbwtParams_ && unbwtParams_->getValue( BeetlUnbwtParameters::PARAMETER_USE_VECTOR ) == BeetlUnbwtParameters::USE_VECTOR_ON )
|| searchParams_ )
{
resultCompute = computeNewPositionForBackSearchByVector ( file1, fileOutBwt, symbol );
}
else
{
resultCompute = computeNewPositionForBackSearch ( file1, fileOutBwt, symbol );
}
checkIfEqual( resultCompute, 1 );
if ( verboseDecode == 1 )
{
std::cerr << "New triples: " << "\n";
std::cerr << "Q ";
for ( SequenceNumber g = 0 ; g < 2; g++ )
{
std::cerr << ( int )vectTriple[g].pileN << " ";
}
std::cerr << std::endl;
std::cerr << "P ";
for ( SequenceNumber g = 0 ; g < 2; g++ )
{
std::cerr << vectTriple[g].posN << " ";
}
std::cerr << std::endl;
std::cerr << "N ";
for ( SequenceNumber g = 0 ; g < 2; g++ )
{
std::cerr << vectTriple[g].seqN << " ";
}
std::cerr << std::endl;
}
posSymb--;
if ( verboseDecode == 1 )
std::cerr << ">>>>>Next symbol in position " << posSymb << "\n";
}
return vectTriple[1].posN - vectTriple[0].posN + 1;
}
int BCRexternalBWT::computeVectorUnbuildBCR( char const *file1, char const *fileOutBwt, LetterNumber freq[] )
{
assert( string( fileOutBwt ) == "" && "todo: remove this parameter if it is always null" );
numBlocksInPartialBWT.resize( sizeAlpha );
for ( AlphabetSymbol x = 0 ; x < sizeAlpha; x++ )
{
numBlocksInPartialBWT[x] = ( LetterNumber )ceil( ( long double )freq[alphaInverse[x]] / DIMBLOCK );
if ( verboseDecode == 1 )
std::cerr << "numBlocksInPartialBWT[ " << ( int )x << " ]= " << numBlocksInPartialBWT[x] << "\n";
}
// Start by allocating an array for array of arrays
vectorOcc.resize( sizeAlpha ); //For each BWT-partial
// Allocate an array for each element of the first array
for ( AlphabetSymbol x = 0 ; x < sizeAlpha; x++ ) //For each block of BWT-partial
{
vectorOcc[x].resize( sizeAlpha ); //SumCumulative for each symbol and each block
// Allocate an array of integers for each element of this symbol
for ( AlphabetSymbol y = 0 ; y < sizeAlpha; y++ ) //For each block
vectorOcc[x][y].resize( numBlocksInPartialBWT[x], 0 );
}
#pragma omp parallel for
for ( AlphabetSymbol x = 0 ; x < sizeAlpha; x++ ) //For each BWT-partial
{
vector< uchar > bufBlock( DIMBLOCK );
Filename newfilename( file1, "-B0", x, "" );
FILE *InFileBWT = fopen( newfilename, "rb" );
if ( InFileBWT == NULL )
{
std::cerr << "computeVectorUnbuildBCR: could not open file " << newfilename << " !" << std::endl;
exit ( EXIT_FAILURE );
}
LetterNumber numBlock = 0;
while ( !feof( InFileBWT ) && ( numBlock < numBlocksInPartialBWT[x] ) ) //Added check on numBlocks
{
SequenceLength num_read = fread( bufBlock.data(), sizeof( uchar ), DIMBLOCK, InFileBWT );
for ( SequenceLength i = 0; i < num_read; i++ )
{
vectorOcc[x][alpha[( int )( bufBlock[i] )]][numBlock]++;
}
numBlock++;
}
fclose( InFileBWT );
//Compute the sum cumulative for each BWT-partial
for ( AlphabetSymbol z = 0 ; z < sizeAlpha; z++ ) //For each symbol z
for ( LetterNumber y = 1; y < numBlocksInPartialBWT[x] ; y++ ) //For each block y>1 of partial-BWT x
vectorOcc[x][z][y] = vectorOcc[x][z][y - 1] + vectorOcc[x][z][y]; //Sum the previous one: ie Blcok y and block y-1
}
/*
#ifdef DEBUG
for (AlphabetSymbol x = 0 ; x < sizeAlpha; x++) {
std::cerr << "x = " << (int)x << " For the " << alphaInverse[x] << "-BWT-partial: the #symbols is " << freq[alphaInverse[x]] << " Number of block of the symbol " << numBlocksInPartialBWT[x] << "\n";
for(AlphabetSymbol z = 0; z < sizeAlpha; ++z) {
std::cerr << "Symbol: " << (int)z << ":\t";
for(LetterNumber y = 0; y < numBlocksInPartialBWT[x]; ++y)
std::cerr << vectorOcc[x][z][y] << "\t";
}
std::cerr << "\n";
}
std::cerr << "\n";
}
#endif
*/
return 1;
}
int BCRexternalBWT::initializeUnbuildBCR( char const *file1, char const *fileOutBwt, LetterNumber freq[] )
{
assert( string( fileOutBwt ) == "" && "todo: remove this parameter if it is always null" );
//We supposed that the symbols in the input file are the following
//TODO
for ( AlphabetSymbol i = 0; i < 255; i++ )
freq[i] = 0;
freq[int( terminatorChar )] = 1;
freq[int( 'A' )] = 1;
freq[int( 'C' )] = 1;
freq[int( 'G' )] = 1;
freq[int( 'N' )] = 1;
freq[int( 'T' )] = 1;
//GIOVANNA: ADDED THE SYMBOL Z IN THE ALPHABET, SO sizeAlpha = alphabetSize
#ifdef USE_EXTRA_CHARACTER_Z
freq[int( 'Z' )] = 1;
#endif
//Compute size of alphabet
sizeAlpha = 0;
for ( AlphabetSymbol i = 0; i < 255; i++ )
if ( freq[i] > 0 )
sizeAlpha++;
//Compute alpha and alphaInverse
alphaInverse = new AlphabetSymbol[sizeAlpha];
AlphabetSymbol mmm = 0;
for ( AlphabetSymbol i = 0; i < 255; i++ )
if ( freq[i] > 0 )
{
alpha[i] = mmm;
alphaInverse[mmm] = i;
std::cerr << i << "\t" << freq[i] << "\t" << ( int )alpha[i] << "\t" << ( int )alphaInverse[mmm] << "\n";
mmm++;
}
std::cerr << "sizeof(type size of alpha): " << sizeof( AlphabetSymbol ) << "\n";
std::cerr << "sizeof(type of #sequences): " << sizeof( SequenceNumber ) << "\n";
std::cerr << "sizeof(type of #characters): " << sizeof( LetterNumber ) << "\n";
lengthTot = 0; //Counts the number of symbols
nText = 0;
lengthRead = 0;
lengthTot_plus_eof = 0;
tableOcc = new LetterNumber*[sizeAlpha];
for ( AlphabetSymbol j = 0 ; j < sizeAlpha; j++ ) //Counting for each pile: $-pile, A-pile, C-pile, G-pile, N-pile, T-pile
{
tableOcc[j] = new LetterNumber[sizeAlpha];
}
for ( AlphabetSymbol j = 0 ; j < sizeAlpha; j++ )
for ( AlphabetSymbol h = 0 ; h < sizeAlpha; h++ )
tableOcc[j][h] = 0;
LetterNumber lengthTotPlusEof = 0;
#pragma omp parallel for reduction(+:lengthTotPlusEof)
for ( AlphabetSymbol g = 0 ; g < sizeAlpha; g++ )
{
vector< uchar > buf( SIZEBUFFER );
Filename newfilename( file1, "-B0", g, "" );
FILE *InFileBWT = fopen( newfilename, "rb" );
if ( InFileBWT == NULL )
{
std::cerr << "initializeUnbuildBCR: could not open file " << newfilename << " !" << std::endl;
exit ( EXIT_FAILURE );
}
while ( !feof( InFileBWT ) )
{
SequenceLength num_read = fread( buf.data(), sizeof( uchar ), SIZEBUFFER, InFileBWT );
for ( SequenceLength i = 0; i < num_read; i++ )
{
tableOcc[g][alpha[( int )( buf[i] )]]++;
}
lengthTotPlusEof += num_read;
}
fclose( InFileBWT );
}
lengthTot_plus_eof = lengthTotPlusEof;
nText = 0;
for ( AlphabetSymbol g = 0 ; g < sizeAlpha; g++ )
nText += tableOcc[alpha[int( terminatorChar )]][g];
lengthTot = lengthTot_plus_eof - nText;
lengthRead = lengthTot / nText;
std::cerr << "\nNumber of sequences: " << nText << "\n";
std::cerr << "Length of each sequence: " << lengthRead << "\n\n";
std::cerr << "Total length (without $): " << lengthTot << "\n";
std::cerr << "Total length (with $): " << lengthTot_plus_eof << "\n";
//if (verboseDecode == 1) {
std::cerr << "TableOcc: " << "\n";
for ( AlphabetSymbol g = 0 ; g < sizeAlpha; g++ )
{
std::cerr << int( g ) << ":\t";
for ( AlphabetSymbol j = 0 ; j < sizeAlpha; j++ )
std::cerr << tableOcc[g][j] << "\t";
std::cerr << "\n";
}
//}
for ( AlphabetSymbol j = 0 ; j < 255; j++ )
freq[j] = 0;
//Compute of the frequency of each symbol
for ( AlphabetSymbol j = 0 ; j < sizeAlpha; j++ )
for ( AlphabetSymbol h = 0 ; h < sizeAlpha; h++ )
freq[( int )alphaInverse[j]] += tableOcc[j][h];
return 1;
}
int BCRexternalBWT::unbuildBCR( char const *file1, char const *fileOutBwt, char const *fileOut, char const *fileOutput )
{
bool processQualities = hasSuffix( fileOutput, ".fastq" );
LetterNumber freq[256]; //contains the distribution of the symbols.
int resultInit = initializeUnbuildBCR( file1, fileOutBwt, freq );
checkIfEqual ( resultInit, 1 );
if ( unbwtParams_->getValue( BeetlUnbwtParameters::PARAMETER_USE_VECTOR ) == BeetlUnbwtParameters::USE_VECTOR_ON )
{
resultInit = computeVectorUnbuildBCR( file1, fileOutBwt, freq );
checkIfEqual( resultInit, 1 );
}
if ( unbwtParams_->getValue( BeetlUnbwtParameters::PARAMETER_DECODE_DIRECTION ) == BeetlUnbwtParameters::DECODE_DIRECTION_BACKWARD )
{
std::cerr << "Inverse BWT by Backward direction." << std::endl;
decodeBCRmultipleReverse( file1, fileOutBwt, fileOut, processQualities );
std::cerr << "The cyc files have been built. Building the sequences." << std::endl;
TransposeFasta trasp;
TmpFilename cycFilesInTmp( "cyc." );
int res = trasp.convertFromCycFileToFastaOrFastq( cycFilesInTmp, fileOutput );
checkIfEqual ( res, 1 );
if ( deleteCycFile == 1 )
{
Logger_if( LOG_SHOW_IF_VERBOSE ) Logger::out() << "Removing auxiliary input files (cyc files)\n";
// delete output files
for ( SequenceLength i = 0; i < lengthRead; i++ )
{
TmpFilename filename1( fileOut, i, "" );
if ( remove( filename1 ) != 0 )
std::cerr << filename1 << " BCRexternalBWT: Error deleting file" << std::endl;
if ( processQualities )
{
TmpFilename filename2( fileOut, i, ".qual" );
if ( remove( filename2 ) != 0 )
std::cerr << filename2 << " BCRexternalBWT: Error deleting file" << std::endl;
}
}
}
}
else
{
std::cerr << "Inverse BWT by Forward direction." << std::endl;
decodeBCRnaiveForward( file1, fileOutBwt, fileOutput );
}
//Free the memory
for ( AlphabetSymbol j = 0 ; j < sizeAlpha; j++ )
{
delete [] tableOcc[j];
tableOcc[j] = NULL;
}
delete [] tableOcc;
delete[] alphaInverse;
return true;
}
int BCRexternalBWT::update_Pos_Pile( sortElement *tripla )
{
//I have to find the position of toFindSymbol in corrected partial BWT
//To find the pile, where the posN occurrences is, we use tableOcc.
LetterNumber sumOccPileN = 0;
AlphabetSymbol currentPile = 0;
while ( ( sumOccPileN < tripla->posN ) && ( currentPile < sizeAlpha ) )
{
sumOccPileN += tableOcc[currentPile][tripla->pileN];
currentPile++;
}
if ( sumOccPileN >= tripla->posN ) //it means that the pile, where toFindSymbol is, is currentPile-1 and the position
{
currentPile--;
sumOccPileN = sumOccPileN - tableOcc[currentPile][tripla->pileN];
tripla->posN = tripla->posN - sumOccPileN;
tripla->pileN = currentPile;
}
else
std::cerr << "update_Pos_Pile: symbol " << ( int )tripla->pileN << " not found: " << tripla->posN << "occurrence.\n";
return 1;
}
int BCRexternalBWT::update_Pos_Pile_Blocks( LetterNumber *toRead, LetterNumber *numBlock, AlphabetSymbol currentPile, uchar toFindSymbol )
{
//I have to find the position of toFindSymbol in corrected blocks in the partial BWT
//To find the block in the pile, where the posN occurrences is, we use vectorOcc.
/*
//Linear scanning
*numBlock = 0;
while ((vectorOcc[currentPile][alpha[(int)(toFindSymbol)]][*numBlock] < *toRead) && (*numBlock <=numBlocksInPartialBWT[currentPile] )) { //Added checks on numBlocks
(*numBlock)++;
}
assert (*numBlock <=numBlocksInPartialBWT[currentPile] );
*/
//Binary search for scanning
vector<LetterNumber>::iterator low;
low = lower_bound ( vectorOcc[currentPile][alpha[( int )( toFindSymbol )]].begin(), vectorOcc[currentPile][alpha[( int )( toFindSymbol )]].end(), *toRead ); // ^
*numBlock = ( LetterNumber )( low - vectorOcc[currentPile][alpha[( int )( toFindSymbol )]].begin() );
//assert (*numBlock <=numBlocksInPartialBWT[currentPile] );
if ( *numBlock > numBlocksInPartialBWT[currentPile] )
{
cerr << "Numblock size mismatch: " << *numBlock << " < "
<< numBlocksInPartialBWT[currentPile]
<< ". Aborting." << endl;
}
if ( ( *numBlock ) > 0 )
{
*toRead = *toRead - vectorOcc[currentPile][alpha[( int )( toFindSymbol )]][*numBlock - 1]; //vectorOcc is indexed by 0
}
return 1;
}
vector <int> BCRexternalBWT::recoverNSequenceForward( char const *file1, char const *fileOutBwt, SequenceNumber numKmersInput )
{
sortElement tripla;
SequenceNumber numTotKmers = 0;
for ( SequenceNumber g = 0 ; g < numKmersInput; g++ )
{
std::cerr << "Initialization for the k-mer of index " << LastVector[g].seqN << ". Number of occurrences " << LastVector[g].posN - FirstVector[g].posN + 1 << std::endl;
//Initialize triple
for ( LetterNumber j = FirstVector[g].posN ; j <= LastVector[g].posN; j++ ) //For each position between first and last
{
tripla.seqN = numTotKmers;
tripla.posN = j;
tripla.pileN = FirstVector[g].pileN;
vectTriple.push_back( tripla );
numTotKmers++;
}
}
std::cerr << "We want to compute the seqID of " << numTotKmers << " sequences." << std::endl;
quickSort( vectTriple );
uchar *toFindSymbols = new uchar[numTotKmers]; //Symbol to find for each kmers
SequenceNumber h;
for ( h = 0 ; h < numTotKmers; h++ )
toFindSymbols[h] = alphaInverse[vectTriple[h].pileN]; //The first time vectTriple[h].seqN == h
h = 0 ;
bool existDollars = false; //We suppose that there are not terminatorChar
while ( ( h < numTotKmers ) && ( existDollars != true ) )
{
if ( toFindSymbols[h] != terminatorChar )
existDollars = true; //There are at least 1 symbol terminatorChar
h++;
}
int result = 0;
SequenceNumber countDollars = 0;
while ( existDollars == true ) //If there is at least one terminatorChar symbol
{
//cerr << "another round existDollars" << endl;
//Update the PileN where the number of occurrences is, for each h
//posN is the number of occurrences (absolute value) in the entire BWT
for ( h = 0 ; h < numTotKmers; h++ )
{
if ( toFindSymbols[vectTriple[h].seqN] != terminatorChar ) //if =terminatorChar, it means that we have already obtained the seqID
{
//cerr << "calling update_Pos_Pile" << endl;
result = update_Pos_Pile( &vectTriple[vectTriple[h].seqN] ); //posN is the number of occurrences (relative value) in the pileN-BWT-partial
checkIfEqual( result, 1 );
}
}
//Compute the rank inverse and inserts the number of read symbols into posN. Update posN
//cerr << "calling rankInverseManyByVector()";
result = rankInverseManyByVector ( file1, fileOutBwt, numTotKmers, toFindSymbols );
//cerr << "done." << endl;
checkIfEqual( result, 1 );
/*
LetterNumber readChar = 0;
for ( h = 0 ; h < numTotKmers; h++) {
if (toFindSymbols[h] != terminatorChar) {
readChar=findRankInBWTbyVector (file1, fileOutBwt, vectTriple[h].pileN, vectTriple[h].posN, toFindSymbols[h]);
assert(readChar!=0);
vectTriple[h].posN = readChar;
}
}
*/
quickSort( vectTriple );
//Update toFindSymbol and count the dollars
countDollars = 0;
for ( h = 0 ; h < numTotKmers; h++ )
{
if ( toFindSymbols[h] != terminatorChar )
{
toFindSymbols[h] = alphaInverse[vectTriple[h].pileN];
}
else
countDollars++;
}
//std::cerr << "countDollars " << countDollars << " ." << std::endl;
if ( countDollars >= numTotKmers )
{
existDollars = false; //The end!
}
}
vector <int> resultSeqId;
resultSeqId.resize( numTotKmers );
//The position is indexed by 1, the number of sequence by 0
for ( h = 0 ; h < numTotKmers; h++ )
{
resultSeqId[vectTriple[h].seqN] = vectTriple[h].posN - 1;
}
vectTriple.clear(); //Erase all elements of vector.
delete[] toFindSymbols;
return resultSeqId;
}
int BCRexternalBWT::recoverNSequenceForwardSequentially( char const *file1, char const *fileOutBwt, SequenceNumber numKmersInput )
{
//Compute the seqID sequentially
//Now, you must find seqN for each position between vectTriple[0].posN and vectTriple[1].posN of the BWT-partial vectTriple[0].pileN=vectTriple[1].posN
for ( SequenceNumber g = 0 ; g < numKmersInput; g++ )
{
//Recover the number of the sequence seqN of the kmer one symbol at time in reverse order
uchar *sequence = new uchar[lengthRead + 2];
sortElement tripla;
std::cerr << "List of the seqID containing the k-mer with index" << g << ":" << std::endl;
for ( LetterNumber j = FirstVector[g].posN ; j <= LastVector[g].posN; j++ ) //For each position between first and last
{
for ( SequenceLength mmm = lengthRead + 2; mmm > 0; mmm-- )
sequence[mmm - 1] = '\0';
//Initialize tripla to find the sequence
tripla.seqN = g;
tripla.posN = j;
tripla.pileN = FirstVector[g].pileN;
SequenceLength lenSeq = 0;
//#ifdef DEBUG
// std::cerr << "Starting Tripla for the suffix: \tQ= " << (int)tripla.pileN << " P= " << tripla.posN << " N= " << tripla.seqN << std::endl;
//#endif
SequenceNumber numberOfSeq = recover1SequenceForward( file1, fileOutBwt, tripla, sequence, &lenSeq );
//#ifdef DEBUG
// std::cerr << " Computed suffix is " << sequence << "! It is long " << lenSeq << ". It belongs to " << numberOfSeq << " sequence of the collection" << std::endl;
//#endif
std::cerr << "pos in the SA=\t" << j << "\t SeqId=\t" << numberOfSeq << std::endl;
}
delete [] sequence;
}
return 1;
}
//Reconstruct 1 sequence backwards by threading through the LF-mapping and reading the characters off of F column.
SequenceNumber BCRexternalBWT::recover1SequenceForward( char const *file1, char const *fileOutBwt, sortElement tripla, uchar *sequence, SequenceLength *lenCheck )
{
//The toFindSymbol is into F column, it is in pileN-BWT in the position posN. So, it is the posN occurrences of alphaInverse[pileN] in F.
//So, toFindSymbol is the alphaInverse[pileN]
*lenCheck = 0;
uchar toFindSymbol = alphaInverse[tripla.pileN];
//LetterNumber rankFoundSymbol;
// if (verboseDecode == 1) {
// std::cerr << "The symbol is: " << toFindSymbol << "\n";
// std::cerr << "\nI have to find the position of the " << tripla.posN << " " << toFindSymbol << " in the whole BWT\n";
// }
sequence[0] = toFindSymbol;
//LetterNumber numcharWrite = fwrite (&toFindSymbol, sizeof(uchar), 1 , InfileOutDecode);
//assert( numcharWrite == 1); // we should always read the same number of characters
( *lenCheck )++;
while ( ( toFindSymbol != terminatorChar ) && ( *lenCheck <= lengthRead ) )
{
//posN is the number of occurrences (absolute value) in the entire BWT
int result = update_Pos_Pile( &tripla ); //posN is the number of occurrences (relative value) in the pileN-BWT-partial
checkIfEqual( result, 1 );
// if (verboseDecode == 1)
// std::cerr << "I have to find the position of the " << rankFoundSymbol << " occurrences of the symbol " << toFindSymbol << " in " << (int)tripla.pileN << " pile \n";
//I have to read the pileN until I find rankFoundSymbol symbols. The found value is posN, i.e. the position of the next symbol
LetterNumber readChar = 0;
if ( unbwtParams_->getValue( BeetlUnbwtParameters::PARAMETER_USE_VECTOR ) != BeetlUnbwtParameters::USE_VECTOR_ON )
{
readChar = findRankInBWT ( file1, fileOutBwt, tripla.pileN, tripla.posN, toFindSymbol );
}
else
{
readChar = findRankInBWTbyVector ( file1, fileOutBwt, tripla.pileN, tripla.posN, toFindSymbol );
}
checkIfNotEqual( readChar, 0 );
tripla.posN = readChar;
// if (verboseDecode == 1)
// std::cerr << "The occurrence " << rankFoundSymbol << " of the symbol " << toFindSymbol << " is in position " << tripla.posN << "\n\n";
toFindSymbol = alphaInverse[tripla.pileN];
sequence[*lenCheck] = toFindSymbol;
// if (verboseDecode == 1) {
// std::cerr << "The symbol is: " << toFindSymbol << "\n";
// std::cerr << "I have to find the position of the " << tripla.posN << " " << toFindSymbol << " in the whole BWT\n";
// }
( *lenCheck )++;
}
//if (verboseDecode == 1)
// std::cerr << lenCheck << " " << lengthRead << "\n";
//if (verboseDecode==1) {
// std::cerr << "***********Found the $-sign in First column \t";
// std::cerr << "Q= " << (int)tripla.pileN << " P= " << tripla.posN << " N= " << tripla.seqN << std::endl;
//}
//The position is indexed by 1, the number of sequence by 0
return tripla.posN - 1;
}
//Inverse BWT by Forward direction of nText sequences, one sequence at a time, in lexicographic order.
//Reconstruct the sequences one at a time in forward order
//file1 is the input file
//fileOutBwt is the suffix of the auxiliary files for the partial BWTs
//fileOutDecode is the output, that is the texts
int BCRexternalBWT::decodeBCRnaiveForward( char const *file1, char const *fileOutBwt, char const *fileOutDecode )
{
LetterNumber numchar;
Filename fileEndPos( file1, "-end-pos" );
FILE *InFileEndPos; // input file of the end positions;
InFileEndPos = fopen( fileEndPos, "rb" );
if ( InFileEndPos == NULL )
{
std::cerr << "decodeBCRnaiveForward: could not open file " << fileEndPos << " !" << std::endl;
exit ( EXIT_FAILURE );
}
FILE *InfileOutDecode = fopen( fileOutDecode, "wb" );
if ( InfileOutDecode == NULL )
{
std::cerr << "decodeBCRnaiveForward: could not open file " << fileOutDecode << " !" << std::endl;
exit ( EXIT_FAILURE );
}
SequenceNumber numText = 0;
numchar = fread ( &numText, sizeof( SequenceNumber ), 1 , InFileEndPos );
checkIfEqual ( numchar, 1 );
checkIfEqual ( nText, numText ); // we should always read the same number of Texts of the bwt
uint8_t subSequenceCount = 0;
numchar = fread ( &subSequenceCount, sizeof( uint8_t ), 1 , InFileEndPos );
checkIfEqual ( numchar, 1 );
uint8_t hasRevComp = 0;
numchar = fread ( &hasRevComp, sizeof( uint8_t ), 1 , InFileEndPos );
checkIfEqual ( numchar, 1 );
sortElement triple;
std::cerr << "Recover the sequences of the collection in lexicographic order. A sequence at a time!" << std::endl;
assert( unbwtParams_ || searchParams_ );
if ( ( unbwtParams_ && unbwtParams_->getValue( BeetlUnbwtParameters::PARAMETER_USE_VECTOR ) == BeetlUnbwtParameters::USE_VECTOR_ON )
|| searchParams_ )
{
std::cerr << "It is using the sampling of the BWT. It requires more memory!" << std::endl;
std::cerr << "In order to do this, it uses a sampling of the occurrences for each segment: " << DIMBLOCK << " size." << std::endl;
}
else
{
std::cerr << "It is not using the sampling of the BWT. It requires more time!" << std::endl;
}
for ( SequenceNumber i = 0; i < nText; i++ )
{
Logger::out() << "Decoding sequence " << i << endl;
numchar = fread ( &triple.seqN, sizeof( SequenceNumber ), 1 , InFileEndPos );
checkIfEqual( numchar, 1 ); // we should always read the same number of characters
// numchar = fread ( &triple.posN, sizeof( LetterNumber ), 1 , InFileEndPos ); //it is the relative position of the $ in the partial BWT
// checkIfEqual( numchar, 1 ); // we should always read the same number of characters
// numchar = fread ( &triple.pileN, sizeof( AlphabetSymbol ), 1 , InFileEndPos );
// checkIfEqual( numchar, 1 ); // we should always read the same number of characters
assert( false && "todo: recalculate posN and pileN which are not stored in end-pos file anymore" );
uint8_t subSequenceNum;
numchar = fread ( &subSequenceNum, sizeof( uint8_t ), 1 , InFileEndPos );
checkIfEqual( numchar, 1 ); // we should always read the same number of characters
// if (verboseDecode == 1)
// std::cerr << std::endl << "Starting Tripla: " << triple.seqN << " " << triple.posN << " " << (int)triple.pileN << "\n" << std::endl;
uchar *sequence = new uchar[lengthRead + 2];
for ( SequenceLength j = lengthRead + 2; j > 0; j-- )
sequence[j - 1] = '\0';
SequenceLength lenSeq = 0;
SequenceNumber numberOfSeq = recover1SequenceForward( file1, fileOutBwt, triple, sequence, &lenSeq );
checkIfEqual( numberOfSeq, triple.seqN );
// std::cerr << "The " << i+1 <<"-th/" << nText <<" computed sequence is " << sequence << "! It is long " << lenSeq << ". It belongs to " << numberOfSeq << " sequence of the collection" << std::endl;
if ( verboseDecode == 1 )
cerr << numberOfSeq << "\t" << sequence << endl;
SequenceLength numcharWrite = 0;
numcharWrite = fwrite ( sequence, sizeof( uchar ), lenSeq , InfileOutDecode );
checkIfEqual( numcharWrite , lenSeq ); // we should always read the same number of characters
numcharWrite = fwrite ( "\n", sizeof( char ), 1 , InfileOutDecode );
checkIfEqual( numcharWrite, 1 ); // we should always read the same number of characters
delete [] sequence;
}
fclose( InFileEndPos );
fclose( InfileOutDecode );
return true;
}
//Multiple Decoding the sequences (Build reverse sequence)
//Reconstruct m sequences backwards by threading through the FL-mapping and reading the characters off of L.
//file1 is the input file
//fileOutBWT is the suffix of the filename of the partial BWTs
//fileOut is the prefix of the lengthRead-filename (transpose texts: cyc.i)
//Inverse BWT by Backward direction of nText sequences at the same time by lengthRead iterations.
int BCRexternalBWT::decodeBCRmultipleReverse( char const *file1, char const *fileOutBwt, char const *fileOut, bool processQualities )
{
vectTriple.resize( nText );
//As I want to compute the reverse sequences, I need the position of $ in F
for ( SequenceNumber g = 0 ; g < nText; g++ )
{
vectTriple[g].pileN = alpha[int( terminatorChar )]; //So the 0-pile
vectTriple[g].posN = g + 1;
vectTriple[g].seqN = g;
}
if ( verboseDecode == 1 )
{
std::cerr << "The Initial triples of $ in first column are!" << std::endl;
std::cerr << "Q ";
for ( SequenceNumber g = 0 ; g < nText; g++ )
{
std::cerr << ( int )vectTriple[g].pileN << " ";
}
std::cerr << std::endl;
std::cerr << "P ";
for ( SequenceNumber g = 0 ; g < nText; g++ )
{
std::cerr << vectTriple[g].posN << " ";
}
std::cerr << std::endl;
std::cerr << "N ";
for ( SequenceNumber g = 0 ; g < nText; g++ )
{
std::cerr << vectTriple[g].seqN << " ";
}
std::cerr << std::endl;
}
uchar *newSymb = new uchar[nText];
uchar *newQual = processQualities ? ( new uchar[nText] ) : NULL;
//As we recover the symbol in reverse order, I store the first found symbol in cyc.(length-1) file
//and the last found symbol in cyc.0 file
assert( unbwtParams_ || searchParams_ );
if ( ( unbwtParams_ && unbwtParams_->getValue( BeetlUnbwtParameters::PARAMETER_USE_VECTOR ) == BeetlUnbwtParameters::USE_VECTOR_ON )
|| searchParams_ )
{
std::cerr << "It is using the sampling of the BWT. It requires more memory!" << std::endl;
std::cerr << "In order to do this, it uses a sampling of the occurrences for each segment: " << DIMBLOCK << " size." << std::endl;
}
else
{
std::cerr << "It is not using the sampling of the BWT. It requires more time!" << std::endl;
}
for ( SequenceLength m = lengthRead ; m > 0 ; m-- )
{
Logger::out() << "Decoding cycle " << m << endl;
int resultNsymbol = -1;
assert( unbwtParams_ || searchParams_ );
if ( ( unbwtParams_ && unbwtParams_->getValue( BeetlUnbwtParameters::PARAMETER_USE_VECTOR ) == BeetlUnbwtParameters::USE_VECTOR_ON )
|| searchParams_ )
{
resultNsymbol = RecoverNsymbolsReverseByVector( file1, fileOutBwt, newSymb, newQual );
}
else
{
resultNsymbol = RecoverNsymbolsReverse ( file1, fileOutBwt, newSymb, newQual );
}
checkIfEqual ( resultNsymbol , 1 );
TmpFilename filename( fileOut, m - 1, "" );
FILE *InfileOutDecodeCyc = fopen( filename, "wb" );
if ( InfileOutDecodeCyc == NULL )
{
std::cerr << "decodeBCRmultipleReverse: could not open file " << filename << " !" << std::endl;
exit ( EXIT_FAILURE );
}
LetterNumber numcharWrite = fwrite ( newSymb, sizeof( uchar ), nText , InfileOutDecodeCyc );
checkIfEqual( numcharWrite, nText ); // we should always read the same number of characters
fclose( InfileOutDecodeCyc );
if ( processQualities )
{
TmpFilename qualFilename( fileOut, m - 1, ".qual" );
FILE *InfileOutDecodeCycQual = fopen( qualFilename, "wb" );
if ( InfileOutDecodeCycQual == NULL )
{
std::cerr << "decodeBCRmultipleReverse: could not open file " << qualFilename << " !" << std::endl;
exit ( EXIT_FAILURE );
}
LetterNumber numcharWriteQual = fwrite ( newQual, sizeof( uchar ), nText , InfileOutDecodeCycQual );
checkIfEqual( numcharWriteQual, nText ); // we should always read the same number of characters
fclose( InfileOutDecodeCycQual );
}
}
delete [] newSymb;
return true;
}
//It is used to reconstruct m sequences backwards by threading through the FL-mapping and reading the characters off of L.
int BCRexternalBWT::RecoverNsymbolsReverse( char const *file1, char const *fileOutBwt, uchar *newSymb, uchar *newQual )
{
assert( string( fileOutBwt ) == "" && "todo: remove this parameter if it is always null" );
if ( newQual != 0 )
{
cerr << "TODO: Quality decompression not implemented for RecoverNsymbolsReverse" << endl;
exit( -1 );
}
LetterNumber toRead = 0;
LetterNumber *counters = new LetterNumber[sizeAlpha]; //it counts the number of each symbol into the i-Pile-BWT
SequenceNumber j = 0;
while ( j < nText )
{
for ( AlphabetSymbol i = 0 ; i < sizeAlpha; i++ )
{
counters[i] = 0;
}
AlphabetSymbol currentPile = vectTriple[j].pileN;
//if (verboseDecode == 1)
// std::cerr << "===Current BWT-partial= " << (int)currentPile << "\n";
Filename newfilename( file1, "-B0", currentPile, "" );
FILE *InFileBWT = fopen( newfilename, "rb" );
if ( InFileBWT == NULL )
{
std::cerr << "RecoverNsymbolsReverse: BWT file " << ( int )j << ": Error opening " << std::endl;
exit ( EXIT_FAILURE );
}
SequenceNumber k = j;
LetterNumber cont = 0; //number of the read symbols
uchar foundSymbol;
//SequenceLength lenCheck=0;
LetterNumber numberRead = 0;
while ( ( k < nText ) && ( vectTriple[k].pileN == currentPile ) )
{
if ( verboseDecode == 1 )
{
std::cerr << "Sequence number " << k << "\n";
std::cerr << "j-1: Q[" << k << "]=" << ( int )vectTriple[k].pileN << " P[" << k << "]=" << ( LetterNumber )vectTriple[k].posN << " N[" << k << "]=" << ( SequenceNumber )vectTriple[k].seqN << "\n";
}
//The symbol for the sequences seqN in F[posN] is the symbol
//symbol = alphaInverse[vectTriple[k].pileN];
//Now, I have to find the new symbol, it is in B[pileN] in position posN and so I can update pileN and posN
//For any character (of differents sequences) in the same pile
foundSymbol = '\0';
//cont is the number of symbols already read!
toRead = vectTriple[k].posN - cont;
numberRead = rankManySymbols( *InFileBWT, counters, toRead, &foundSymbol );
if ( verboseDecode == 1 )
{
std::cerr << "toRead " << toRead << " Found Symbol is " << foundSymbol << "\n";
}
checkIfEqual ( toRead, numberRead );
cont += numberRead;
//I have to update the value in vectTriple[k].posN, it must contain the position of the symbol in F
//Symbol is
if ( verboseDecode == 1 )
std::cerr << "vectTriple[k].seqN = " << vectTriple[k].seqN << " Symbol = " << foundSymbol << "\n";
newSymb[vectTriple[k].seqN] = foundSymbol;
//PosN is
vectTriple[k].posN = counters[alpha[( int )foundSymbol]];
//if (verboseDecode == 1)
// std::cerr << "\nCompute PosN\nInit New P["<< k <<"]= " << vectTriple[k].posN <<std::endl;
for ( AlphabetSymbol g = 0 ; g < currentPile; g++ ) //I have to count in each pile g= 0... (currentPile-1)-pile
{
vectTriple[k].posN = vectTriple[k].posN + tableOcc[g][alpha[( int )foundSymbol]];
//if (verboseDecode == 1) {
// std::cerr << "g= " << (int)g << " symbol= " << (int)symbol << " alpha[symbol]= "<< (int)alpha[(int)symbol] <<std::endl;
// std::cerr << "Add New posN[k]=" << vectTriple[k].posN << " tableOcc[g][alpha[(int)symbol]] " << tableOcc[g][alpha[(int)symbol]] <<std::endl;
//}
}
//pileN is
//std::cerr << "\nCompute Pile\n";
vectTriple[k].pileN = alpha[( int )foundSymbol];
if ( verboseDecode == 1 )
std::cerr << "Result: j : Q[q]=" << ( int )vectTriple[k].pileN << " P[q]=" << ( LetterNumber )vectTriple[k].posN << " N[q]=" << ( SequenceNumber )vectTriple[k].seqN << std::endl << std::endl;
k++;
}
fclose( InFileBWT );
j = k;
}
delete [] counters;
if ( verboseDecode == 1 )
{
std::cerr << "NewSymbols " ;
for ( SequenceNumber g = 0 ; g < nText; g++ )
{
std::cerr << newSymb[g] << " ";
}
std::cerr << std::endl;
std::cerr << "Before Sorting" << std::endl;
std::cerr << "Q ";
for ( SequenceNumber g = 0 ; g < nText; g++ )
{
std::cerr << ( int )vectTriple[g].pileN << " ";
}
std::cerr << std::endl;
std::cerr << "P ";
for ( SequenceNumber g = 0 ; g < nText; g++ )
{
std::cerr << vectTriple[g].posN << " ";
}
std::cerr << std::endl;
std::cerr << "N ";
for ( SequenceNumber g = 0 ; g < nText; g++ )
{
std::cerr << vectTriple[g].seqN << " ";
}
std::cerr << std::endl;
}
quickSort( vectTriple );
if ( verboseDecode == 1 )
{
std::cerr << "After Sorting" << std::endl;
std::cerr << "Q ";
for ( SequenceNumber g = 0 ; g < nText; g++ )
{
std::cerr << ( int )vectTriple[g].pileN << " ";
}
std::cerr << std::endl;
std::cerr << "P ";
for ( SequenceNumber g = 0 ; g < nText; g++ )
{
std::cerr << vectTriple[g].posN << " ";
}
std::cerr << std::endl;
std::cerr << "N ";
for ( SequenceNumber g = 0 ; g < nText; g++ )
{
std::cerr << vectTriple[g].seqN << " ";
}
std::cerr << std::endl;
}
return 1;
}
//It is used to reconstruct m sequences backwards by threading through the FL-mapping and reading the characters off of L.
int BCRexternalBWT::RecoverNsymbolsReverseByVector( char const *file1, char const *fileOutBwt, uchar *newSymb, uchar *newQual )
{
assert( string( fileOutBwt ) == "" && "todo: remove this parameter if it is always null" );
LetterNumber toRead = 0;
LetterNumber *counters = new LetterNumber[sizeAlpha]; //it counts the number of each symbol into the i-Pile-BWT
for ( AlphabetSymbol i = 0 ; i < sizeAlpha; i++ )
counters[i] = 0;
SequenceNumber j = 0;
while ( j < nText )
{
for ( AlphabetSymbol i = 0 ; i < sizeAlpha; i++ )
counters[i] = 0;
AlphabetSymbol currentPile = vectTriple[j].pileN;
//if (verboseDecode == 1)
// std::cerr << "===Current BWT-partial= " << (int)currentPile << "\n";
Filename newfilename( file1, "-B0", currentPile, "" );
FILE *InFileBWT = fopen( newfilename, "rb" );
if ( InFileBWT == NULL )
{
std::cerr << "RecoverNsymbolsReverseByVector: BWT file " << ( int )j << ": Error opening " << std::endl;
exit ( EXIT_FAILURE );
}
FILE *InFileBWTQual = 0;
if ( newQual )
{
Filename qualFilename( file1, "-Q0", currentPile );
InFileBWTQual = fopen( qualFilename, "rb" );
if ( InFileBWT == NULL )
{
std::cerr << "RecoverNsymbolsReverseByVector: BWT Quality file " << ( int )j << ": Error opening " << std::endl;
exit ( EXIT_FAILURE );
}
}
LetterNumber currentReadPos = 0;
LetterNumber nextReadPos = 0;
SequenceNumber k = j;
//LetterNumber cont = 0; //number of the read symbols
uchar foundSymbol, foundQual;
//SequenceLength lenCheck=0;
while ( ( k < nText ) && ( vectTriple[k].pileN == currentPile ) )
{
if ( verboseDecode == 1 )
{
std::cerr << "Sequence number " << k << "\n";
std::cerr << "j-1: Q[" << k << "]=" << ( int )vectTriple[k].pileN << " P[" << k << "]=" << ( LetterNumber )vectTriple[k].posN << " N[" << k << "]=" << ( SequenceNumber )vectTriple[k].seqN << "\n";
}
//The symbol for the sequences seqN in F[posN] is the symbol
//symbol = alphaInverse[vectTriple[k].pileN];
//Now, I have to find the new symbol, it is in B[pileN] in position posN and so I can update pileN and posN
//For any character (of differents sequences) in the same pile
foundSymbol = '\0';
foundQual = '\0';
//cont is the number of symbols already read!
//toRead = vectTriple[k].posN - cont;
nextReadPos = toRead = vectTriple[k].posN;
// always true: assert( currentReadPos == ftell( InFileBWT ) );
if ( toRead > currentReadPos && toRead - currentReadPos < DIMBLOCK )
{
// Next position is close enough to the last one, so that we can avoid to fseek
toRead -= currentReadPos;
}
else
{
LetterNumber numBlock = 0;
for ( AlphabetSymbol i = 0 ; i < sizeAlpha; i++ )
counters[i] = 0;
//std::cerr << "toRead is " << toRead << "\n";
if ( toRead > 0 )
{
//we need to know how many occurrences of each symbol there are up to the position toRead.
//if ToRead > dimBlock, we can use vectorOcc in order to find the occorrences in the blocks precede the block where the position toRead is.
//Before, we need to find the block where toRead position is.
int result = findBlockToRead( counters, currentPile, &toRead, &numBlock );
checkIfEqual ( result , 1 );
//std::cerr << "numBlock: " << numBlock << " toRead " << toRead << "\n";
}
assert ( toRead <= DIMBLOCK ); //If toRead == DIMBLOCK, because I can need to known foundSymbol character
//std::cerr << "Move file to the position " << numBlock*DIMBLOCK << "\n";
fseek ( InFileBWT, numBlock * DIMBLOCK, SEEK_SET );
if ( newQual )
fseek ( InFileBWTQual, numBlock * DIMBLOCK, SEEK_SET );
}
LetterNumber numberRead = rankManySymbolsByVector( *InFileBWT, counters, toRead, &foundSymbol, newQual ? &foundQual : NULL, InFileBWTQual );
checkIfEqual ( toRead , numberRead );
//std::cerr << "foundSymbol " << (int)foundSymbol << "\n";
//cont += numberRead;
/*
std::cerr << "counters after FirstVector:\t";
for (AlphabetSymbol i = 0 ; i < sizeAlpha; i++)
std::cerr << " " << counters[i];
std::cerr << "\n";
*/
//numberRead = rankManySymbols(*InFileBWT, counters, toRead, &foundSymbol);
//std::cerr << "toRead " << toRead << " Found Symbol is " << foundSymbol << "\n";
//assert (toRead == numberRead);
//cont += numberRead;
//I have to update the value in vectTriple[k].posN, it must contain the position of the symbol in F
//Symbol is
if ( verboseDecode == 1 )
std::cerr << "vectTriple[k].seqN = " << vectTriple[k].seqN << " Symbol = " << foundSymbol << "\n";
newSymb[vectTriple[k].seqN] = foundSymbol;
if ( newQual )
{
newQual[vectTriple[k].seqN] = foundQual;
}
//PosN is
vectTriple[k].posN = counters[alpha[( int )foundSymbol]];
//if (verboseDecode == 1)
// std::cerr << "\nCompute PosN\nInit New P["<< k <<"]= " << vectTriple[k].posN <<std::endl;
for ( AlphabetSymbol g = 0 ; g < currentPile; g++ ) //I have to count in each pile g= 0... (currentPile-1)-pile
{
vectTriple[k].posN = vectTriple[k].posN + tableOcc[g][alpha[( int )foundSymbol]];
//if (verboseDecode == 1) {
// std::cerr << "g= " << (int)g << " symbol= " << (int)symbol << " alpha[symbol]= "<< (int)alpha[(int)symbol] <<std::endl;
// std::cerr << "Add New posN[k]=" << vectTriple[k].posN << " tableOcc[g][alpha[(int)symbol]] " << tableOcc[g][alpha[(int)symbol]] <<std::endl;
//}
}
//pileN is
//std::cerr << "\nCompute Pile\n";
vectTriple[k].pileN = alpha[( int )foundSymbol];
if ( verboseDecode == 1 )
std::cerr << "Result: j : Q[q]=" << ( int )vectTriple[k].pileN << " P[q]=" << ( LetterNumber )vectTriple[k].posN << " N[q]=" << ( SequenceNumber )vectTriple[k].seqN << std::endl << std::endl;
currentReadPos = nextReadPos;
k++;
}
fclose( InFileBWT );
if ( InFileBWTQual )
fclose( InFileBWTQual );
j = k;
}
delete [] counters;
if ( verboseDecode == 1 )
{
std::cerr << "NewSymbols " ;
for ( SequenceNumber g = 0 ; g < nText; g++ )
{
std::cerr << ( char )newSymb[g] << " ";
}
std::cerr << std::endl;
std::cerr << "Before Sorting" << std::endl;
std::cerr << "Q ";
for ( SequenceNumber g = 0 ; g < nText; g++ )
{
std::cerr << ( int )vectTriple[g].pileN << " ";
}
std::cerr << std::endl;
std::cerr << "P ";
for ( SequenceNumber g = 0 ; g < nText; g++ )
{
std::cerr << vectTriple[g].posN << " ";
}
std::cerr << std::endl;
std::cerr << "N ";
for ( SequenceNumber g = 0 ; g < nText; g++ )
{
std::cerr << vectTriple[g].seqN << " ";
}
std::cerr << std::endl;
}
quickSort( vectTriple );
if ( verboseDecode == 1 )
{
std::cerr << "After Sorting" << std::endl;
std::cerr << "Q ";
for ( SequenceNumber g = 0 ; g < nText; g++ )
{
std::cerr << ( int )vectTriple[g].pileN << " ";
}
std::cerr << std::endl;
std::cerr << "P ";
for ( SequenceNumber g = 0 ; g < nText; g++ )
{
std::cerr << vectTriple[g].posN << " ";
}
std::cerr << std::endl;
std::cerr << "N ";
for ( SequenceNumber g = 0 ; g < nText; g++ )
{
std::cerr << vectTriple[g].seqN << " ";
}
std::cerr << std::endl;
}
return 1;
}
//It is used to reconstruct 1 sequences backwards by threading through the FL-mapping and reading the characters off of L.
int BCRexternalBWT::Recover1symbolReverse( char const *file1, char const *fileOutBwt, uchar *newSymbol, sortElement *tripla )
{
assert( string( fileOutBwt ) == "" && "todo: remove this parameter if it is always null" );
LetterNumber toRead = 0;
LetterNumber *counters = new LetterNumber[sizeAlpha]; //it counts the number of each symbol into the i-Pile-BWT
for ( AlphabetSymbol i = 0 ; i < sizeAlpha; i++ )
counters[i] = 0;
AlphabetSymbol currentPile = tripla->pileN;
//if (verboseDecode == 1)
// std::cerr << "===Current BWT-partial= " << (int)currentPile << "\n";
Filename newfilename( file1, "-B0", currentPile, "" );
FILE *InFileBWT = fopen( newfilename, "rb" );
if ( InFileBWT == NULL )
{
std::cerr << "Recover1symbolReverse: BWT file " << ( int )currentPile << ": Error opening " << std::endl;
exit ( EXIT_FAILURE );
}
LetterNumber cont = 0; //number of the read symbols
uchar foundSymbol;
//if (verboseDecode == 1) {
// std::cerr << "j-1: Q["<<(int)currentPile<<"]=" << (int)tripla->pileN << " P["<<(int)currentPile<<"]=" << (LetterNumber)tripla->posN << " N["<<(int)currentPile<<"]=" << (SequenceNumber)tripla->seqN << "\n";
//}
//The symbol for the sequences seqN in F[posN] is the symbol
//symbol = alphaInverse[vectTriple[k].pileN];
//Now, I have to find the new symbol, it is in B[pileN] in position posN and so I can update pileN and posN
//For any character (of differents sequences) in the same pile
foundSymbol = '\0';
//cont is the number of symbols already read!
toRead = tripla->posN - cont;
LetterNumber numberRead = 0;
numberRead = rankManySymbols( *InFileBWT, counters, toRead, &foundSymbol );
//std::cerr << "toRead " << toRead << "Found Symbol is " << foundSymbol << "\n";
checkIfEqual ( toRead , numberRead );
cont += numberRead;
//I have to update the value in tripla.posN, it must contain the position of the symbol in F
//Symbol is
//if (verboseDecode == 1)
// std::cerr << "tripla.seqN = " << tripla->seqN << " Symbol = " << foundSymbol << "\n";
*newSymbol = foundSymbol;
//PosN is
tripla->posN = counters[alpha[( int )foundSymbol]];
//if (verboseDecode == 1)
// std::cerr << "\nCompute PosN\nInit New P= " << (LetterNumber)tripla->posN <<std::endl;
for ( AlphabetSymbol g = 0 ; g < currentPile; g++ ) //I have to count in each pile g= 0... (currentPile-1)-pile
{
tripla->posN = tripla->posN + tableOcc[g][alpha[( int )foundSymbol]];
//if (verboseDecode == 1) {
// std::cerr << "g= " << (int)g << " symbol= " << (int)foundSymbol << " alpha[symbol]= "<< (int)alpha[(int)symbol] <<std::endl;
// std::cerr << "Add New posN[k]=" << vectTriple[k].posN << " tableOcc[g][alpha[(int)foundSymbol]] " << tableOcc[g][alpha[(int)foundSymbol]] <<std::endl;
//}
}
//pileN is
//std::cerr << "\nCompute Pile\n";
tripla->pileN = alpha[( int )foundSymbol];
//if (verboseDecode == 1)
// std::cerr << "Result: j : Q[q]=" << (int)tripla->pileN << " P[q]=" << (LetterNumber)tripla->posN << " N[q]=" << (SequenceNumber)tripla->seqN << std::endl << std::endl;
fclose( InFileBWT );
delete [] counters;
/*
if (verboseDecode==1) {
std::cerr << "NewSymbols " ;
std::cerr << *newSymbol << " ";
std::cerr << std::endl;
std::cerr << "Q ";
std::cerr << (int)tripla->pileN << " ";
std::cerr << std::endl;
std::cerr << "P ";
std::cerr << tripla->posN << " ";
std::cerr << std::endl;
std::cerr << "N ";
std::cerr << tripla->seqN << " ";
std::cerr << std::endl;
}
*/
return true;
}
BCRexternalBWT::~BCRexternalBWT()
{
}
<|start_filename|>src/metagenomics/Krona.hh<|end_filename|>
void printKronaHeader( ofstream &output )
{
output << "<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Strict//EN\" \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd\">" << endl;
output << "<!-- adapted from http://krona.sourceforge.net/examples/mg-rast.krona.html -->" << endl;
output << "<html xmlns=\"http://www.w3.org/1999/xhtml\" xml:lang=\"en\" lang=\"en\">" << endl;
output << "<head>" << endl;
output << " <meta charset=\"utf-8\"/>" << endl;
output << " <base href=\"https://s3.amazonaws.com/metabeetl/krona/\" target=\"_blank\"/>" << endl;
output << " <link rel=\"shortcut icon\" href=\"favicon.ico\"/>" << endl;
output << " <script id=\"notfound\">window.onload=function(){document.body.innerHTML=\"Could not get resources from \\\"https://s3.amazonaws.com/metabeetl/krona\\\".\"}</script>" << endl;
output << " <script src=\"krona-2.0.js\"></script>" << endl;
output << " <title>Krona - all</title></head>" << endl;
output << " <body style=\"padding:0;position:relative\">" << endl;
output << " <img id=\"hiddenImage\" src=\"hidden.png\" style=\"display:none\">" << endl;
output << " <noscript>Javascript must be enabled to view this page.</noscript>" << endl;
output << " <div style=\"display:none\">" << endl;
output << " <krona collapse=\"false\" key=\"true\">" << endl;
output << " <attributes magnitude=\"magnitude\">" << endl;
output << " <attribute display=\"Abundance\">magnitude</attribute>" << endl;
output << " <attribute display=\"Rank\">rank</attribute>" << endl;
output << " <attribute display=\"Tax id\">taxid</attribute>" << endl;
output << " </attributes>" << endl;
}
void printKronaDatasets( ofstream &output, vector<int> &wordMinSize )
{
output << " <datasets>" << endl;
for ( unsigned int s ( 0 ); s < wordMinSize.size(); s++ )
output << " <dataset>" << wordMinSize[s] << "</dataset>" << endl;
output << " </datasets>" << endl;
}
void printKronaFooter( ofstream &output )
{
output << "</krona>" << endl;
output << "</div>" << endl;
output << "</body></html>" << endl;
}
void printKronaChildren( TAXMAP::iterator &iter, ofstream &output, int level, TAXMAP &taxInfo, unsigned int wordMinSizeCount )
{
bool dataAvailable = false;
for ( unsigned int s ( 0 ); s < wordMinSizeCount; s++ )
{
int magnitude = iter->second.wordCountPerSize_[s] + iter->second.wordCountPerSizeOfChildren_[s];
if ( magnitude != 0 ) dataAvailable = true;
}
if ( !dataAvailable ) return;
int id = iter->first;
string indent( level, ' ' );
output << indent << "<node name=\"" << iter->second.name_ /* << "(" << id << ")" */ << "\">" << endl;
output << indent << "<magnitude>";
for ( unsigned int s ( 0 ); s < wordMinSizeCount; s++ )
{
uint64_t magnitude = iter->second.wordCountPerSize_[s] + iter->second.wordCountPerSizeOfChildren_[s];
output << "<val>" << magnitude << "</val>";
}
output << "</magnitude>" << endl;
unsigned int taxLevel = iter->second.taxLevel_;
if ( taxLevel < taxLevelSize )
output << indent << "<rank><val>" << taxLevelNames[taxLevel] << "</val></rank>" << endl;
output << indent << "<taxid><val>" << id << "</val></taxid>" << endl;
// cerr << "krona id " << id << " " << level << endl;
for ( TAXMAP::iterator iter = taxInfo.begin() ; iter != taxInfo.end(); ++iter )
{
if ( iter->second.parentId_ == id && id != 0 )
{
printKronaChildren( iter, output, level + 2, taxInfo, wordMinSizeCount );
}
}
output << indent << "</node>" << endl;
}
<|start_filename|>src/frontends/BeetlExtend.cpp<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#include "config.h"
#include "parameters/ExtendParameters.hh"
#include "search/Extender.hh"
using namespace std;
ExtendParameters params;
void printUsage()
{
params.printUsage();
}
void launchBeetlExtend()
{
Extender extender( params );
extender.run();
}
#include "libzoo/util/ColorText.hh"
int main( const int argc, const char **argv )
{
// Generated using: http://patorjk.com/software/taag/#p=display&f=Soft&t=BEETL%20extend
cout << ",-----. ,------.,------.,--------.,--. ,--. ,--. " << endl;
cout << "| |) /_ | .---'| .---''--. .--'| | ,---. ,--. ,--.,-' '-. ,---. ,--,--, ,-| | " << endl;
cout << "| .-. \\| `--, | `--, | | | | | .-. : \\ `' / '-. .-'| .-. :| \\' .-. | " << endl;
cout << "| '--' /| `---.| `---. | | | '--. \\ --. / /. \\ | | \\ --.| || |\\ `-' | " << endl;
cout << "`------' `------'`------' `--' `-----' `----''--' '--' `--' `----'`--''--' `---'" << endl;
cout << "Version " << PACKAGE_VERSION << endl;
cout << endl;
cout << "Command called:" << endl << " ";
for ( int i = 0; i < argc; ++i )
{
cout << " " << argv[i];
}
cout << "\n" << endl;
if ( !params.parseArgv( argc, argv ) || params["help"] == 1 || !params.chechRequiredParameters() )
{
printUsage();
exit( params["help"] == 0 );
}
// Use default parameter values where needed
params.commitDefaultValues();
// Checking for required parameters
if ( ! ( params["sequence numbers output filename"].isSet() || params["dollar positions output filename"].isSet() ) )
{
cerr << "Error: Missing or incorrect arguments: at least one output (-o or -p) is required\n" << endl;
printUsage();
exit( 1 );
}
// Launch
launchBeetlExtend();
return 0;
}
<|start_filename|>src/countWords/CountWords.cpp<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#include "CountWords.hh"
#include "Timer.hh"
#include "Tools.hh"
#include "config.h"
#include "libzoo/util/Logger.hh"
#include <fcntl.h>
#include <sstream>
#include <sys/mman.h>
#include <sys/stat.h>
#include <sys/types.h>
#ifdef _OPENMP
# include <omp.h>
#endif //ifdef _OPENMP
using namespace std;
//#define DEBUG__FORCE_WRITE_COUNT
//#define DEBUG__SKIP_ITERATION0
//#define FIRST_CYCLE 14
CountWords::CountWords( bool inputACompressed,
bool inputBCompressed, char whichHandler,
int paramN, int paramK, const vector<string> &setA,
const vector<string> &setB, const vector<string> &setC,
const string &ncbiTax, bool testDB, unsigned int minWordLen, string subset
, const CompareParameters *compareParams
)
: numCycles_( paramK )
, minOcc_( paramN )
, setA_( setA )
, setB_( setB )
, setC_( setC )
, subset_( subset )
, compareParams_( compareParams )
, doPauseBetweenCycles_( compareParams_ ? ( *compareParams_ )["pause between cycles"] : false )
, doesPropagateBkptToSeqNumInSetA_( compareParams_ ? ( *compareParams_ )["generate seq num A"] : false )
, doesPropagateBkptToSeqNumInSetB_( compareParams_ ? ( *compareParams_ )["generate seq num B"] : false )
, noComparisonSkip_( compareParams_ ? ( *compareParams_ )["no comparison skip"] : false )
, bwtInRam_( compareParams_ ? ( *compareParams_ )["BWT in RAM"] : false )
, propagateSequence_( compareParams_ ? ( *compareParams_ )["propagate sequence"] : false )
, outputDirectory_( compareParams_ ? ( *compareParams_ )["output directory"] : string( "BeetlCompareOutput" ) )
, inBwtA_( alphabetSize )
, inBwtB_( alphabetSize )
, fsizeRatio_( 0 )
{
if ( compareParams_ == NULL )
{
// Legacy mode, used only by the OldBeetl executable: We create a CompareParameters structure with default values
compareParams_ = new CompareParameters;
// Three different modes, so a little parsing is needed (tumour-normal is not a legacy mode)
switch ( whichHandler )
{
case 's':
mode_ = BeetlCompareParameters::MODE_SPLICE;
break;
case 'r':
mode_ = BeetlCompareParameters::MODE_REFERENCE;
break;
case 'm':
mode_ = BeetlCompareParameters::MODE_METAGENOMICS;
break;
default:
assert( false && "unexpected compare mode" );
}
}
else
mode_ = static_cast<enum BeetlCompareParameters::Mode>( compareParams_->getValue( "mode" ) );
// set tool flags
inputACompressed_ = inputACompressed;
inputBCompressed_ = inputBCompressed;
// Check that output directory is empty. Create it if necessary.
mkdir( outputDirectory_.c_str(), 0750 );
if ( isDirectoryEmpty( outputDirectory_ ) == 0 )
{
Logger::error() << "Error: Output directory not empty" << endl;
assert( false && "output directory not empty" );
}
/*
Information for metagenomics search.
Only needed if countWords is used as a metagenome classifier
*/
//ncbiInfo_ should be the information about which files in the database have which taxonomy
ncbiInfo_ = ncbiTax;
//flag if the database of the metagenomics information should also be tested
//the testing of the database uses a lot of disk space so it should be handled carefully
testDB_ = testDB;
//only used in the metagenomics part. tests of taxonomy only after a certain suffix length is reached
minWordLen_ = minWordLen;
if ( testDB_ && mode_ != BeetlCompareParameters::MODE_METAGENOMICS )
{
cerr << "WARNING Database test is only available in metagenome Mode" << endl
<< "ABORTING Database test" << endl;
testDB_ = false;
}
}
void CountWords::run( void )
{
Timer timer;
LetterCountEachPile countsPerPileA;
LetterCountEachPile countsPerPileB;
const bool useThreadsForSubsets = true;
int cyclesToSkipComparisonFor = -1;
int previousComparisonDeactivationLength = 0;
#ifdef _OPENMP
// Use nested openmp parallelisation
omp_set_nested( 1 );
#endif
// Metagenomics-specific stuff
if ( mode_ == BeetlCompareParameters::MODE_METAGENOMICS )
initialiseMetagomeMode();
vector<RangeStoreExternal *> rangeStoresA;
vector<RangeStoreExternal *> rangeStoresB;
vector<int> pileToThread;
if ( !useThreadsForSubsets )
{
rangeStoresA.push_back( new RangeStoreExternal( propagateSequence_, "Intervals_setA" ) );
rangeStoresB.push_back( new RangeStoreExternal( propagateSequence_, "Intervals_setB" ) );
pileToThread.resize( alphabetSize, 0 ); // All piles point to thread 0
}
else
{
for ( int i = 0; i < 4; ++i )
{
ostringstream oss;
oss << "Intervals_subset" << i;
rangeStoresA.push_back( new RangeStoreExternal( propagateSequence_, oss.str() + "_setA" ) );
rangeStoresB.push_back( new RangeStoreExternal( propagateSequence_, oss.str() + "_setB" ) );
}
pileToThread.resize( alphabetSize, 0 );
pileToThread[whichPile['A']] = 0; // pile 'A' processed by thread 0
pileToThread[whichPile['C']] = 1; // pile 'C' processed by thread 1
pileToThread[whichPile['G']] = 2; // pile 'G' processed by thread 2
pileToThread[whichPile['T']] = 3; // pile 'T' processed by thread 3
pileToThread[whichPile['N']] = 1; // pile 'N' processed by thread 1
#ifdef USE_EXTRA_CHARACTER_Z
pileToThread[whichPile['Z']] = 2; // pile 'Z' processed by thread 2
#endif
}
// cout << "OMP threadCount: " << omp_get_num_threads() << endl;
//omp_set_num_threads( 2 * alphabetSize );
// We reproduce the same nested parallel distribution here than during the real processing loops, in order to load the BWT data in the most appropriate NUMA nodes
const int subsetThreadCount = rangeStoresA.size();
#if defined(_OPENMP)
omp_set_num_threads( subsetThreadCount * alphabetSize );
#endif
#pragma omp parallel for
for ( int threadNum = 0; threadNum < subsetThreadCount * alphabetSize; ++threadNum )
{
int subsetThreadNum = threadNum / alphabetSize; // for ( int subsetThreadNum = 0; subsetThreadNum < subsetThreadCount; ++subsetThreadNum )
int i = threadNum % alphabetSize; // for ( int i = 0; i < alphabetSize; ++i )
int datasetAorB;
switch ( subsetThreadNum )
{
// set A goes to same node as subsets 0&1: node 0
// We distribute the read&Count as follows: ACGT=>subset 0's cpus, others=>subset 1's cpus
case 0:
if ( alphabet[i] == 'A' || alphabet[i] == 'C' || alphabet[i] == 'G' || alphabet[i] == 'T' )
datasetAorB = 0;
else
continue;
break;
case 1:
if ( alphabet[i] == 'A' || alphabet[i] == 'C' || alphabet[i] == 'G' || alphabet[i] == 'T' )
continue;
else
datasetAorB = 0;
break;
// set B goes to same node as subsets 2&3: node 1
// We distribute the read&Count as follows: ACGT=>subset 2's cpus, others=>subset 3's cpus
case 2:
if ( alphabet[i] == 'A' || alphabet[i] == 'C' || alphabet[i] == 'G' || alphabet[i] == 'T' )
datasetAorB = 1;
else
continue;
break;
case 3:
if ( alphabet[i] == 'A' || alphabet[i] == 'C' || alphabet[i] == 'G' || alphabet[i] == 'T' )
continue;
else
datasetAorB = 1;
break;
default:
continue;
}
/*
omp_set_num_threads( alphabetSize );
#pragma omp parallel for
for ( int i = 0; i < alphabetSize; ++i )
*/
{
Logger_if( LOG_SHOW_IF_VERY_VERBOSE )
{
int pid = -1;
int numThreads = -1;
int processor = -1;
readProcSelfStat( pid, numThreads, processor );
#pragma omp critical (IO)
cerr << "Reading BWT: subsetThreadNum=" << subsetThreadNum << " i=" << i << " pid=" << pid << " numThreads=" << numThreads << " processor=" << processor << endl;
}
if ( !isDistributedProcessResponsibleForPile( i ) )
continue;
Logger_if( LOG_SHOW_IF_VERY_VERBOSE )
{
#pragma omp critical (IO)
{
Logger::out() << "i=" << i << ", alphabet[i]=" << alphabet[i] << ": ";
if ( datasetAorB == 0 )
Logger::out() << "setA[i]=" << setA_[i] << endl;
else
Logger::out() << "setB[i]=" << setB_[i] << endl;
}
}
if ( datasetAorB == 0 )
{
inBwtA_[i] = instantiateBwtPileReader( setA_[i], compareParams_->getStringValue( "use shm" ), bwtInRam_ );
#ifndef DEBUG__SKIP_ITERATION0
Logger_if( LOG_SHOW_IF_VERY_VERBOSE )
{
int pid = -1;
int numThreads = -1;
int processor = -1;
readProcSelfStat( pid, numThreads, processor );
#pragma omp critical (IO)
Logger::out() << "ReadAndCount set=A i=" << i << " pid=" << pid << " numThreads=" << numThreads << " processor=" << processor << endl;
}
inBwtA_[i]->readAndCount( countsPerPileA[i] );
#endif //ifndef DEBUG__SKIP_ITERATION0
}
else
{
inBwtB_[i] = instantiateBwtPileReader( setB_[i], compareParams_->getStringValue( "use shm" ), bwtInRam_ );
#ifndef DEBUG__SKIP_ITERATION0
Logger_if( LOG_SHOW_IF_VERY_VERBOSE )
{
int pid = -1;
int numThreads = -1;
int processor = -1;
readProcSelfStat( pid, numThreads, processor );
#pragma omp critical (IO)
Logger::out() << "ReadAndCount set=B i=" << i << " pid=" << pid << " numThreads=" << numThreads << " processor=" << processor << endl;
}
inBwtB_[i]->readAndCount( countsPerPileB[i] );
#endif //ifndef DEBUG__SKIP_ITERATION0
}
Logger_if( LOG_SHOW_IF_VERY_VERBOSE )
{
#pragma omp critical (IO)
{
if ( datasetAorB == 0 )
Logger::out() << "countsPerPileA[i=" << i << "]: " << countsPerPileA[i] << endl;
else
Logger::out() << "countsPerPileB[i=" << i << "]: " << countsPerPileB[i] << endl;
}
}
#ifndef DEBUG__SKIP_ITERATION0
// Share counts via files if distributed processing
#ifndef DEBUG__FORCE_WRITE_COUNT
if ( ( *compareParams_ )["4-way distributed"].isSet() )
#endif
{
if ( datasetAorB == 0 )
{
stringstream ss;
ss << "counts.A." << i;
ofstream ofs( ss.str() );
ofs << countsPerPileA[i] << endl;
}
else
{
stringstream ss;
ss << "counts.B." << i;
ofstream ofs( ss.str() );
ofs << countsPerPileB[i] << endl;
}
}
#endif //ifndef DEBUG__SKIP_ITERATION0
}
}
// Free bwt-B00, especially useful when keeping BWT in RAM
delete inBwtA_[0];
inBwtA_[0] = NULL;
delete inBwtB_[0];
inBwtB_[0] = NULL;
cerr << "Finished initialisation cycle, usage: " << timer << endl;
if ( doPauseBetweenCycles_ )
pauseBetweenCycles();
cerr << "Starting iteration " << 0 << ", time now: " << timer.timeNow();
cerr << "Starting iteration " << 0 << ", usage: " << timer << endl;
// Gather counts via files if distributed processing
#ifndef DEBUG__SKIP_ITERATION0
if ( ( *compareParams_ )["4-way distributed"].isSet() )
#endif
{
for ( int i = 0; i < alphabetSize; i++ )
{
{
stringstream ss;
ss << "counts.A." << i;
ifstream ifs( ss.str() );
ifs >> countsPerPileA[i];
}
{
stringstream ss;
ss << "counts.B." << i;
ifstream ifs( ss.str() );
ifs >> countsPerPileB[i];
}
Logger_if( LOG_SHOW_IF_VERY_VERBOSE )
{
#pragma omp critical (IO)
{
Logger::out() << "countsPerPileA[i=" << i << "]: " << countsPerPileA[i] << endl;
Logger::out() << "countsPerPileB[i=" << i << "]: " << countsPerPileB[i] << endl;
}
}
}
}
// Calculating fsizeRatio
{
LetterNumber pile0LengthA = 0;
LetterNumber pile0LengthB = 0;
for ( int i( 0 ); i < alphabetSize; i++ )
{
pile0LengthA += countsPerPileA[0].count_[i];
pile0LengthB += countsPerPileB[0].count_[i];
}
fsizeRatio_ = double( pile0LengthA ) / double( pile0LengthB );
}
countsCumulativeA_ = countsPerPileA;
countsCumulativeB_ = countsPerPileB;
for ( int i( 1 ); i < alphabetSize; i++ )
{
countsCumulativeA_[i] += countsCumulativeA_[i - 1];
countsCumulativeB_[i] += countsCumulativeB_[i - 1];
}
Logger_if( LOG_SHOW_IF_VERY_VERBOSE )
{
Logger::out() << "cumulative counts A: " << endl;
countsCumulativeA_.print();
Logger::out() << "cumulative counts B: " << endl;
countsCumulativeB_.print();
}
/*
#pragma omp parallel for
for ( int subsetNum = 0; subsetNum < 4; subsetNum++ )
{
string subsetValues[] = { "A", "C", "G", "T" };
string threadSubset = subsetValues[subsetNum];
*/
#ifndef DEBUG__SKIP_ITERATION0
const int dontKnowIndex( whichPile[( int )dontKnowChar] );
// sort out first iter
string currentWord = "xx";
for ( int i( 1 ); i < alphabetSize; ++i )
{
const int subsetThreadNum = pileToThread[i];
if ( propagateSequence_ )
currentWord[1] = alphabet[i];
for ( int j( 1 ); j < alphabetSize; ++j )
{
Logger_if( LOG_FOR_DEBUGGING )
{
#pragma omp critical (IO)
{
Logger::out() << i << " " << j << " " << matchFlag << endl;
Logger::out() << ( countsCumulativeA_[i - 1].count_[j]
| ( matchFlag * ( countsPerPileB[i].count_[j] != 0 ) ) )
<< " " << ( countsCumulativeB_[i - 1].count_[j]
| ( matchFlag * ( countsPerPileA[i].count_[j] != 0 ) ) ) << endl;
}
}
if ( propagateSequence_ )
currentWord[0] = alphabet[j];
/*
if (!subset.empty() && subset[subset.size()-1] != alphabet[i])
{
Logger::out() << " skipped" << endl;
continue;
}
*/
if ( ( i != dontKnowIndex ) && ( j != dontKnowIndex ) ) // don't process ranges with N in them
{
if ( countsPerPileA[i].count_[j] != 0 )
rangeStoresA[subsetThreadNum]->addRange( Range( currentWord,
( countsCumulativeA_[i - 1].count_[j]
| ( matchFlag * ( LetterNumber )( countsPerPileB[i].count_[j] != 0 ) ) ),
countsPerPileA[i].count_[j], false )
, j, i, subset_, 1 );
if ( countsPerPileB[i].count_[j] != 0 )
rangeStoresB[subsetThreadNum]->addRange( Range( currentWord,
( countsCumulativeB_[i - 1].count_[j]
| ( matchFlag * ( countsPerPileA[i].count_[j] != 0 ) ) ),
countsPerPileB[i].count_[j], false )
, j, i, subset_, 1 );
} // ~if
} // ~for j
} // ~for i
#endif //ifndef DEBUG__SKIP_ITERATION0
// Get ready for next cycle (flush current files and delete next cycle's output files)
for ( auto rangeStore : rangeStoresA )
rangeStore->clear();
for ( auto rangeStore : rangeStoresB )
rangeStore->clear();
cerr << "Finished cycle 0, usage: " << timer << endl;
if ( doPauseBetweenCycles_ )
pauseBetweenCycles();
#ifdef DEBUG__SKIP_ITERATION0
// omp_set_num_threads( 1 ); // for debugging
#endif
#ifdef FIRST_CYCLE
const int firstCycle = FIRST_CYCLE;
#else
const int firstCycle = 1;
#endif
for ( int cycle = firstCycle; cycle <= numCycles_; ++cycle )
{
cerr << "Starting iteration " << cycle << ", time now: " << timer.timeNow();
cerr << "Starting iteration " << cycle << ", usage: " << timer << endl;
numRanges_ = 0;
numSingletonRanges_ = 0;
numNotSkippedA_ = 0;
numNotSkippedB_ = 0;
numSkippedA_ = 0;
numSkippedB_ = 0;
/*
#pragma omp parallel for
for ( int subsetThreadNum = 0; subsetThreadNum < subsetThreadCount; ++subsetThreadNum )
{
rangeStoresA[subsetThreadNum]->setCycleNum( cycle );
rangeStoresB[subsetThreadNum]->setCycleNum( cycle );
CountWords_parallelSubsetThread(
subsetThreadNum
, cycle
, *( rangeStoresA[subsetThreadNum] )
, *( rangeStoresB[subsetThreadNum] )
);
}
*/
// Sequential initialisation before the big parallel loop
for ( int subsetThreadNum = 0; subsetThreadNum < subsetThreadCount; ++subsetThreadNum )
{
rangeStoresA[subsetThreadNum]->setCycleNum( cycle );
rangeStoresB[subsetThreadNum]->setCycleNum( cycle );
}
#pragma omp parallel for
for ( int threadNum = 0; threadNum < subsetThreadCount * alphabetSize; ++threadNum )
{
const int subsetThreadNum = threadNum / alphabetSize; // for ( int subsetThreadNum = 0; subsetThreadNum < subsetThreadCount; ++subsetThreadNum )
//const int i = threadNum % alphabetSize; // for ( int i = 0; i < alphabetSize; ++i )
CountWords_parallelSubsetThread(
threadNum
, cycle
, *( rangeStoresA[subsetThreadNum] )
, *( rangeStoresB[subsetThreadNum] )
);
}
cerr << "Finished cycle " << cycle << ": ranges=" << numRanges_
<< " singletons=" << numSingletonRanges_
<< " usage: " << timer << endl;
cerr
<< " skippedA=" << numSkippedA_
<< " skippedB=" << numSkippedB_
<< " notSkippedA=" << numNotSkippedA_
<< " notSkippedB=" << numNotSkippedB_
<< endl;
if ( ( *compareParams_ )["no comparison skip"] == false )
{
if ( cyclesToSkipComparisonFor == -1 )
{
// if #intervals stopped increasing [by more than 2%], then it's time to start disabling comparisons
if ( numNotSkippedA_ < numRanges_ * 1.02 )
{
double ratio = numSkippedA_ / ( double )numNotSkippedA_;
if ( ratio > 0.42 )
cyclesToSkipComparisonFor = 0;
else if ( ratio > 0.32 )
cyclesToSkipComparisonFor = 1;
else if ( ratio > 0.25 ) //(0.3*3/(0.7*3+0.49*2+0.34))
cyclesToSkipComparisonFor = 2;
else if ( ratio > 0.2 )
cyclesToSkipComparisonFor = 4;
else if ( ratio > 0.1 )
cyclesToSkipComparisonFor = 6;
else if ( ratio > 0.01 )
cyclesToSkipComparisonFor = 10;
else if ( ratio > 0.001 )
cyclesToSkipComparisonFor = 20;
else
cyclesToSkipComparisonFor = 100;
previousComparisonDeactivationLength = cyclesToSkipComparisonFor;
}
}
else
{
if ( noComparisonSkip_ )
{
// it was deactivated => time to reactivate?
--cyclesToSkipComparisonFor;
}
else
{
// it was active => time to deactivate?
double ratio = numSkippedA_ / ( double )numNotSkippedA_ / previousComparisonDeactivationLength;
if ( ratio > 0.42 )
cyclesToSkipComparisonFor = 0;
else if ( ratio > 0.32 )
cyclesToSkipComparisonFor = 1;
else if ( ratio > 0.25 ) //(0.3*3/(0.7*3+0.49*2+0.34))
cyclesToSkipComparisonFor = 2;
else if ( ratio > 0.2 )
cyclesToSkipComparisonFor = 4;
else if ( ratio > 0.1 )
cyclesToSkipComparisonFor = 6;
else if ( ratio > 0.01 )
cyclesToSkipComparisonFor = 10;
else if ( ratio > 0.001 )
cyclesToSkipComparisonFor = 20;
else
cyclesToSkipComparisonFor = 100;
previousComparisonDeactivationLength = cyclesToSkipComparisonFor;
}
}
if ( cyclesToSkipComparisonFor == 0 && numNotSkippedA_ <= numSingletonRanges_ )
{
// If the number of interval decreases, don't reactivate comparison skips
Logger_if( LOG_SHOW_IF_VERBOSE ) Logger::out() << "Interval count decreasing. ";
cyclesToSkipComparisonFor = 1;
}
if ( cyclesToSkipComparisonFor > 0 )
{
Logger_if( LOG_SHOW_IF_VERBOSE ) Logger::out() << "Disabling comparison skips for " << cyclesToSkipComparisonFor << " iterations" << endl;
noComparisonSkip_ = true;
}
else
{
Logger_if( LOG_SHOW_IF_VERBOSE )
{
if ( noComparisonSkip_ )
Logger::out() << "Re-activating comparison skips" << endl;
else
Logger::out() << "Keeping comparison skips active" << endl;
}
noComparisonSkip_ = false;
}
}
if ( doPauseBetweenCycles_ )
pauseBetweenCycles();
for ( auto rangeStore : rangeStoresA )
rangeStore->clear( false );
for ( auto rangeStore : rangeStoresB )
rangeStore->clear( false );
if ( numRanges_ == 0 ) break;
} // ~for c
// Clean up
for ( auto rangeStore : rangeStoresA )
delete rangeStore;
for ( auto rangeStore : rangeStoresB )
delete rangeStore;
rangeStoresA.clear();
rangeStoresB.clear();
for ( auto & bwtReader : inBwtA_ )
{
delete bwtReader;
bwtReader = 0;
}
for ( auto & bwtReader : inBwtB_ )
{
delete bwtReader;
bwtReader = 0;
}
// Metagenomics-specific stuff
if ( mode_ == BeetlCompareParameters::MODE_METAGENOMICS )
releaseMetagomeMode();
} // countWords::run()
void CountWords::CountWords_parallelSubsetThread(
const int threadNum
, const int cycle
, RangeStoreExternal &rangeStoreA
, RangeStoreExternal &rangeStoreB
)
{
string currentWord( cycle + 2, 'x' );
int subsetThreadNum = threadNum / alphabetSize; // for ( int subsetThreadNum = 0; subsetThreadNum < subsetThreadCount; ++subsetThreadNum )
int i = threadNum % alphabetSize; // for ( int i = 0; i < alphabetSize; ++i )
/*
{
int pid = -1;
int numThreads = -1;
int processor = -1;
readProcSelfStat( pid, numThreads, processor );
#pragma omp critical (IO)
cerr << "CountWords_parallelSubsetThread header cycle=" << cycle << " subsetThreadNum=" << subsetThreadNum << " pid=" << pid << " numThreads=" << numThreads << " processor=" << processor << endl;
usleep( 100 );
}
omp_set_num_threads( alphabetSize );
#pragma omp parallel for
for ( int i = 0; i < alphabetSize; ++i )
*/
{
Logger_if( LOG_SHOW_IF_VERY_VERBOSE )
{
int pid = -1;
int numThreads = -1;
int processor = -1;
readProcSelfStat( pid, numThreads, processor );
#pragma omp critical (IO)
Logger::out() << "CountWords_parallelSubsetThread cycle=" << cycle << " subsetThreadNum=" << subsetThreadNum << " i=" << i << " pid=" << pid << " numThreads=" << numThreads << " processor=" << processor << endl;
}
if ( i == 0 ) return;
if ( !isDistributedProcessResponsibleForPile( i ) )
return;
BwtReaderBase *inBwtA = inBwtA_[i]->clone();
BwtReaderBase *inBwtB = inBwtB_[i]->clone();
inBwtA->rewindFile();
inBwtB->rewindFile();
LetterCount countsSoFarA, countsSoFarB;
LetterNumber currentPosA, currentPosB;
RangeStoreExternal parallel_rA( rangeStoreA );
RangeStoreExternal parallel_rB( rangeStoreB );
inBwtA->rewindFile();
inBwtB->rewindFile();
currentPosA = 0;
currentPosB = 0;
countsSoFarA.clear();
countsSoFarA += countsCumulativeA_[i - 1];
countsSoFarB.clear();
countsSoFarB += countsCumulativeB_[i - 1];
for ( int j( 1 ); j < alphabetSize; ++j )
{
Logger_if( LOG_FOR_DEBUGGING ) Logger::out() << "positions - A: " << currentPosA << " B: " << currentPosB << endl;
parallel_rA.setPortion( i, j );
parallel_rB.setPortion( i, j );
TwoBwtBackTracker backTracker( inBwtA, inBwtB
, currentPosA, currentPosB
, parallel_rA, parallel_rB, countsSoFarA, countsSoFarB
, minOcc_, numCycles_, subset_, cycle + 1
, doesPropagateBkptToSeqNumInSetA_
, doesPropagateBkptToSeqNumInSetB_
, noComparisonSkip_
, propagateSequence_
);
switch ( mode_ )
{
case BeetlCompareParameters::MODE_REFERENCE:
{
IntervalHandlerReference intervalHandler( minOcc_ );
backTracker.process( i, currentWord, intervalHandler );
}
break;
case BeetlCompareParameters::MODE_METAGENOMICS:
{
IntervalHandlerMetagenome intervalHandler( minOcc_, setC_, mmappedCFiles_, fileNumToTaxIds_, testDB_, minWordLen_, numCycles_ );
intervalHandler.createOutputFile( subsetThreadNum, i, j, cycle + 1, outputDirectory_ );
backTracker.process( i, currentWord, intervalHandler );
}
break;
case BeetlCompareParameters::MODE_SPLICE:
{
IntervalHandlerSplice intervalHandler( minOcc_ );
backTracker.process( i, currentWord, intervalHandler );
}
break;
case BeetlCompareParameters::MODE_TUMOUR_NORMAL:
{
IntervalHandlerTumourNormal intervalHandler( minOcc_, fsizeRatio_ );
intervalHandler.createOutputFile( subsetThreadNum, i, j, cycle + 1, outputDirectory_ );
backTracker.process( i, currentWord, intervalHandler );
}
break;
default:
assert( false && "Unexpected mode" );
}
#pragma omp atomic
numRanges_ += backTracker.numRanges_;
#pragma omp atomic
numSingletonRanges_ += backTracker.numSingletonRanges_;
#pragma omp atomic
numNotSkippedA_ += backTracker.numNotSkippedA_;
#pragma omp atomic
numNotSkippedB_ += backTracker.numNotSkippedB_;
#pragma omp atomic
numSkippedA_ += backTracker.numSkippedA_;
#pragma omp atomic
numSkippedB_ += backTracker.numSkippedB_;
parallel_rA.deleteInputPortion( i, j );
parallel_rB.deleteInputPortion( i, j );
} // ~for j
//cerr << "Done i " << i <<endl;
parallel_rA.clear( false );
parallel_rB.clear( false );
delete inBwtA;
delete inBwtB;
} // ~for i
}
/*
@taxIdNames , file containing the taxonomic information about the database
loads the taxonomic information so it is known which file number corresponds to which taxa
fills up the vector fileNumToTaxIds;
This should be called before the comparison starts
*/
void CountWords::loadFileNumToTaxIds( const string &taxIdNames )
{
ifstream taxas( taxIdNames.c_str(), ios::in );
assert( taxas.good() && "Error opening taxonomy file" );
string line;
//each line should contain the fileNumber followed by the taxIds split up with one space character
while ( getline( taxas, line ) )
{
if ( line.empty() || line[0] == '#' )
continue;
string originalLine( line );
if ( !line.empty() )
{
vector<int> taxIDs;
unsigned int fileNum = atoi( line.substr( 0, line.find( " " ) ).c_str() );
line = line.substr( line.find( " " ) + 1, line.length() );
while ( line.find( " " ) != string::npos )
{
taxIDs.push_back( atoi( line.substr( 0, line.find( " " ) ).c_str() ) );
line = line.substr( line.find( " " ) + 1, line.length() );
}
taxIDs.push_back( atoi( line.c_str() ) );
//test if all TaxIds were found
if ( taxIDs.size() < taxLevelSize )
cerr << "Tax Ids don't have enough taxonomic Information. Only " << taxIDs.size() << " could be found (" << originalLine << ")" << endl
<< "Will add unknown taxa until size is right" << endl;
else if ( taxIDs.size() > taxLevelSize )
cerr << "Tax Ids have too much taxonomic information (" << originalLine << ")" << endl
<< "Please note, that the taxonomic information about one file should be shown as: " << endl
<< "FileNumber Superkingdom Phylum Order Family Genus Species Strain " << endl;
taxIDs.resize( taxLevelSize );
fileNumToTaxIds_.push_back( taxIDs );
unsigned int test = fileNum + 1;
if ( test != fileNumToTaxIds_.size() )
cout << "Wrong filenumber " << fileNum << " " << fileNumToTaxIds_.size() << endl;
}
}
//cout << " fineNumToTaxIds " << fileNumToTaxIds.size() <<endl;
}
void CountWords::initialiseMetagomeMode()
{
loadFileNumToTaxIds( ncbiInfo_ );
if ( ( *compareParams_ )["mmap C files"] == true )
{
mmappedCFiles_.resize( alphabetSize );
for ( int i = 0; i < alphabetSize; i++ )
{
int fd = open( setC_[i].c_str(), O_RDONLY );
if ( fd == -1 )
{
#pragma omp critical (IO)
{
cout << "ERROR: Could not open File \"" << setC_[i] << "\"" << endl;
}
}
assert( sizeof( off_t ) >= 8 && "64 bits architecture required to hold large files" );
off_t fileSize = lseek( fd, 0, SEEK_END );
mmappedCFiles_[i] = ( char * )mmap( NULL, fileSize, PROT_READ, MAP_SHARED , fd, 0 );
if ( mmappedCFiles_[i] == ( void * ) - 1 )
{
perror ( "Error mmap " );
mmappedCFiles_[i] = NULL;
}
}
}
}
void CountWords::releaseMetagomeMode()
{
for ( unsigned int i = 0; i < alphabetSize; i++ )
{
if ( mmappedCFiles_.size() > i && mmappedCFiles_[i] != NULL )
{
int fd = open( setC_[i].c_str(), O_RDONLY );
off_t fileSize = lseek( fd, 0, SEEK_END );
munmap( mmappedCFiles_[i], fileSize );
}
}
}
bool CountWords::isDistributedProcessResponsibleForPile( const int pile )
{
if ( ( *compareParams_ )["4-way distributed"].isSet() )
{
// Delete own barrier file
// Wait until all barrier files are deleted
int processNum = ( *compareParams_ )["4-way distributed"];
switch ( processNum )
{
case 0: // A
if ( pile != 1 ) return false;
break;
case 1: // C+[^ACGNT]
if ( pile != 2 && pile != 0 && pile <= 5 ) return false;
break;
case 2: // G+N
if ( pile != 3 && pile != 4 ) return false;
break;
case 3: // T
if ( pile != 5 ) return false;
break;
}
#pragma omp critical (IO)
cout << "process num = " << processNum << " processing pile " << pile << endl;
}
return true;
}
<|start_filename|>src/metagenomics/findCertainTaxLevel.cpp<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#include "metaShared.hh"
#include "Krona.hh"
#include "../shared/Tools.hh"
#include<cassert>
#include<cstdlib>
#include<cstring>
#include<fstream>
#include<iostream>
#include<map>
#include<sstream>
#include<stdint.h>
#include<string>
#include<vector>
using namespace std;
struct NCBINode
{
string name_;
int id_;
int parentId_;
int taxLevel_;
int parentLevel_;
bool inUse_;
int magnitude_;
NCBINode()
: id_( 0 )
, parentId_( 0 )
, taxLevel_( 0 )
, parentLevel_( 0 )
, inUse_( 0 )
, magnitude_( 0 )
{}
};
struct SequenceInformation
{
unsigned short fileNum_;
string tag_;
string fileName_;
uint64_t giNumber_;
string giString_;
SequenceInformation() : fileNum_( 0 ), giNumber_( 0 ) {}
};
vector< SequenceInformation > seqs;
vector<SequenceInformation> loadSequenceInfo( string headerFile, string fileCounter );
void findNCBITaxa( string nodesDMP, string mergedDMP, string fileNumFile );
void printAncestors( const int id, const SequenceInformation &seqItem );
map<int, int> giToTaxId;
map<int, string> idToScientificName;
map<int, NCBINode> idToNode;
map<int, vector<NCBINode> > parentToChildren;
int extraTaxId = -2;
map< string, int > extraTaxNameToIdMap;
void parseNamesFile( const string &namesDMP )
{
ifstream ncbiNames( namesDMP );
string line;
vector<string> lineVector;
while ( getline( ncbiNames, line ) )
{
lineVector = splitString( line , "\t|\t" );
string name = lineVector[1];
// cout << "ncbiName >"<< name <<"<" <<endl;
int id = atoi( lineVector[0].c_str() );
// nameToId[name] = id;
if ( line.find( "scientific" ) != string::npos )
idToScientificName[id] = name;
}
}
void parseGiTotaxId( const string &giToTaxIdFName, const vector<int> &giNumbers )
{
map<int, bool> giNumbersPresent;
for ( int giNum : giNumbers )
giNumbersPresent[giNum] = true;
ifstream giToTaxFile( giToTaxIdFName );
string line;
clog << "GI numbers count: " << giNumbers.size() << endl;
while ( getline( giToTaxFile, line ) )
{
istringstream iss( line );
int giNumber = -1;// = atoi( lineVector[0].c_str() );
int taxId = -1; //atoi( lineVector[1].c_str() );
if ( iss >> giNumber >> taxId )
{
assert( taxId != -1 );
if ( giNumbersPresent.find( giNumber ) != giNumbersPresent.end() )
{
giToTaxId[giNumber] = taxId;
clog << "Found gi tax number->id (" << giToTaxId.size() << "/" << giNumbers.size() << "): " << giNumber << " " << taxId << endl;
}
}
}
clog << " got " << giToTaxId.size() << " GenBank Ids" << endl;
}
string getPseudoScientificNameFromTag( string tag )
{
vector<string> tagSplit = splitString( tag, "|" );
string name = tagSplit[4];
size_t pos = name.find( ',' );
if ( pos != string::npos )
name = name.substr( 0, pos );
pos = name.find( " complete" );
if ( pos != string::npos )
name = name.substr( 0, pos );
pos = name.find( " main" );
if ( pos != string::npos )
name = name.substr( 0, pos );
pos = name.find( " chromosome" );
if ( pos != string::npos )
name = name.substr( 0, pos );
pos = name.find( " genome" );
if ( pos != string::npos )
name = name.substr( 0, pos );
pos = name.find( " strain " );
if ( pos != string::npos )
name = name.erase( pos, 7 );
if ( name[0] == ' ' )
name = name.substr( 1 );
return name;
}
int main( int argc, char **argv )
{
string namesFile;
string nodesFile;
string mergedFile;
string headerFile;
string output;
string fileCountFile;
string giToTaxIdFile;
if ( argc < 5 )
cout << "-nA namesfile, -nO nodesFile, [-nM mergedFile,] -nG giToTaxIdFile, -h headerFile, -f fileCountFile " << endl;
for ( int i( 0 ) ; i < argc ; i++ )
{
if ( strcmp( argv[i], "-nA" ) == 0 )
namesFile = argv[i + 1];
else if ( strcmp( argv[i] , "-nO" ) == 0 )
nodesFile = argv[i + 1];
else if ( strcmp( argv[i] , "-nM" ) == 0 )
mergedFile = argv[i + 1];
else if ( strcmp( argv[i], "-h" ) == 0 )
headerFile = argv[i + 1];
else if ( strcmp( argv[i], "-f" ) == 0 )
fileCountFile = argv[i + 1];
else if ( strcmp( argv[i], "-nG" ) == 0 )
giToTaxIdFile = argv[i + 1];
}
seqs = loadSequenceInfo( headerFile, fileCountFile );
vector<int> gis;
gis.push_back( seqs[0].giNumber_ );
for ( unsigned int i( 1 ); i < seqs.size(); i++ )
{
bool found = false;
for ( unsigned int j( 0 ); j < gis.size(); j++ )
{
if ( gis[j] == ( int )seqs[i].giNumber_ )
{
found = true;
break;
}
}
if ( ! found )
{
gis.push_back( seqs[i].giNumber_ );
// clog << seqs[i].giNumber_ << "|" ;
}
}
// clog << endl;
clog << "Gi Id size " << gis.size() << endl;
parseGiTotaxId( giToTaxIdFile, gis );
parseNamesFile( namesFile );
vector<int> extraGis;
for ( unsigned int i ( 0 ); i < seqs.size() ; i++ )
{
if ( giToTaxId.find( ( int )seqs[i].giNumber_ ) == giToTaxId.end() )
{
clog << "# No taxId for giNumber " << seqs[i].giNumber_ << " (seq " << i << ")" << endl;
// This is due to an incorrect gi number in the NCBI database. Trying to find a correct one by name
string name = getPseudoScientificNameFromTag( seqs[i].tag_ );
clog << " Trying identification by name: \"" << seqs[i].tag_ << "\" / \"" << name << "\"" << endl;
for ( auto & item : idToScientificName )
{
if ( !strcasecmp( name.c_str(), item.second.c_str() ) )
{
int taxId = item.first;
clog << " Found taxId matching by name: " << taxId << endl;
giToTaxId[seqs[i].giNumber_] = taxId;
clog << "Faking gi tax number->id (" << giToTaxId.size() << "/" << gis.size() << "): " << seqs[i].giNumber_ << " " << taxId << endl;
}
}
}
}
clog << "got " << giToTaxId.size() << " GI IDs " << endl;
findNCBITaxa( nodesFile, mergedFile, output );
// output newly created ids
{
ofstream ofs( "metaBeetlExtraNames.dmp" );
for ( const auto & item : extraTaxNameToIdMap )
ofs << item.second << "\t|\t" << item.first << "\t|\t" << "\t|\t" << "metaBeetl pseudo-scientific name" << "\t|\t" << endl;
}
return 0;
}
vector<SequenceInformation> loadSequenceInfo( string headerFile, string fileCounter )
{
vector<SequenceInformation> seqsInfo;
ifstream fileCount( fileCounter );
string line;
string fName;
while ( getline( fileCount, line ) )
{
SequenceInformation oneSeq;
unsigned short fileCNum = ( unsigned short ) strtoul( line.substr( 0, line.find( "," ) ).c_str(), NULL, 0 );
oneSeq.fileNum_ = fileCNum;
fName = line.substr( line.find( "," ) + 1 );
oneSeq.fileName_ = fName;
seqsInfo.push_back( oneSeq );
}
ifstream head( headerFile );
int tagCount( 0 );
while ( getline( head, line ) )
{
string fileCount = line.substr( 0, line.find( "," ) );
string tag = line.substr( line.find( "," ) + 1 );
string f = "G_" + fileCount;
string fr = "G_" + fileCount + "_rev";
int revCount( 0 );
int foundFile( 0 );
for ( unsigned int i ( 0 ); i < seqsInfo.size(); i++ )
{
// cout << seqsInfo[i].fileName_ << " f " << f << " fr " <<fr <<endl;
if ( seqsInfo[i].fileName_.compare( f ) == 0 || seqsInfo[i].fileName_.compare( fr ) == 0 )
{
revCount++;
// cout << seqsInfo[i].fileName_ << " f " << f << " fr " <<fr <<endl;
if ( i != seqsInfo[i].fileNum_ )
cerr << "wrong file number " << endl;
//1,gi|15604717|ref|NC_000117.1| Chlamydia trachomatis D/UW-3/CX, complete genome
seqsInfo[i].tag_ = tag;
vector<string> tagSplit = splitString( line, "|" );
seqsInfo[i].giNumber_ = atol( tagSplit[1].c_str() );
seqsInfo[i].giString_ = tagSplit[1];
tagCount++;
++foundFile;
}
}
if ( foundFile != 2 )
{
cerr << "Found " << foundFile << " File for " << line << endl;
cerr << "file should be " << f << " or " << fr << endl;
}
}
clog << "TagCount " << tagCount << endl;
clog << "Got Sequenceinformation " << seqsInfo.size() << endl;
return seqsInfo;
}
void markUsefulNodes()
{
for ( auto & item : giToTaxId )
{
int taxId = item.second;
while ( taxId != idToNode[taxId].parentId_ )
{
idToNode[taxId].inUse_ = true;
++idToNode[taxId].magnitude_;
taxId = idToNode[taxId].parentId_;
}
++idToNode[taxId].magnitude_;
idToNode[taxId].inUse_ = true;
}
}
void printFullTree_recursive( ofstream &output, const NCBINode &node, const double magnitude = 1.0, const int depth = 0 )
{
string indent( depth, ' ' );
// output << indent << "{ " << id << endl;
vector<NCBINode> &children = parentToChildren[node.id_];
// krona output
output << indent << "<node name=\"" << node.name_ << '(' << node.id_ << ")\">";
output << indent << "<magnitude><val>" << node.magnitude_ << "</val></magnitude>";
output << indent << "<rank><val>" << ( depth - 2 ) << '/' << node.taxLevel_ << "</val></rank>" << endl;
// count useful children
int usefulChildrenCount = 0;
for ( auto & childNode : children )
{
if ( childNode.inUse_ ) // Only because id 1 has itself as its parent
++usefulChildrenCount;
}
for ( auto & childNode : children )
{
if ( childNode.inUse_ && childNode.id_ != node.id_ ) // Only because id 1 has itself as its parent
printFullTree_recursive( output, childNode, magnitude / usefulChildrenCount, depth + 1 );
}
// output << indent << "}" << endl;
output << indent << "</node>" << endl;
}
void printFullTree( )
{
ofstream treeStream( "tree_krona.html" );
printKronaHeader( treeStream );
printFullTree_recursive( treeStream, idToNode[1] );
printKronaFooter( treeStream );
}
void findNCBITaxa( string nodesDMP, string mergedDMP, string outputInfo )
{
vector<string> lineVector;
string line;
// map<string, int> nameToId;
ifstream ncbiNodes( nodesDMP );
NCBINode node;
while ( getline( ncbiNodes, line ) )
{
lineVector = splitString( line, "\t|\t" );
int id = atoi( lineVector[0].c_str() );
int parentId = atoi( lineVector[1].c_str() );
node.name_ = idToScientificName[id];
node.id_ = id;
if ( lineVector.size() > 2 )
{
int taxLevel = getTaxonomicLevel( lineVector[2] );
node.taxLevel_ = taxLevel;
}
else
cerr << line << endl;
node.parentId_ = parentId;
if ( idToNode.find( id ) != idToNode.end() )
cerr << "already has id " << id << " " << idToScientificName[id] << endl;
node.inUse_ = false;
idToNode[id] = node;
}
clog << "got ncbi names " << endl;
markUsefulNodes();
// Create reverse mapping
for ( auto & node : idToNode )
{
parentToChildren[ node.second.parentId_ ].push_back( node.second );
}
/*
//get parent levels, and change the taxLevel under species to strain
for ( map< int, NCBINode >::iterator it = idToNode.begin(); it != idToNode.end(); ++it )
{
NCBINode parent = idToNode[it->second.parentId_];
if ( parent.taxLevel_ == getTaxonomicLevel( "species" ) )
it->second.taxLevel_ = getTaxonomicLevel( "strain" );
else if ( parent.taxLevel_ == getTaxonomicLevel( "strain" ) )
it->second.taxLevel_ = getTaxonomicLevel( "subsubspecies" );
it->second.parentLevel_ = parent.taxLevel_;
}
*/
clog << "node to id " << idToNode.size() << endl;
/* ifstream mergedStream( mergedDMP.c_str(), ios::in);
while(mergedStream.good()) {
getline(mergedStream, line);
lineVector = split(line,"\t|\t");
int oldId = atoi(lineVector[0].c_str());
int newId = atoi(lineVector[1].c_str());
cerr << oldId << " " << newId <<endl;
NCBINode oldNode = idToNode[oldId];
cerr << "name " << oldNode.name_ <<endl;
idToNode[newId] = oldNode;
}*/
clog << "after merged out " << idToNode.size() << endl;
clog << "seq size " << seqs.size() << endl;
for ( unsigned int j ( 0 ) ; j < seqs.size(); j++ )
{
unsigned short fileNum = seqs[j].fileNum_;
//1,gi|15604717|ref|NC_000117.1| Chlamydia trachomatis D/UW-3/CX, complete genome
int gi = seqs[j].giNumber_;
clog << "fileNum " << fileNum << " (" << j << "/" << seqs.size() << "): gi=" << gi << ", taxId=" << giToTaxId[gi] << endl;
// if ( giToTaxId[gi] != 0 )
{
cout << fileNum;
printAncestors( giToTaxId[gi], seqs[j] );
cout << endl;
}
}
printFullTree();
}
void printAncestors( const int lowestRankTaxId, const SequenceInformation &seqItem )
{
int id( lowestRankTaxId );
clog << "Ancestors for id " << id;
vector<int> taxIds( taxLevelSize );
vector<int> taxIds2; // whole taxonomy, including the "no rank" levels
vector<int> taxIds3; // same as taxIds, plus all the "no rank" levels below "sub-species"
bool knownRankReached = false;
if ( id != 0 )
{
const NCBINode *n;
do
{
n = &idToNode[id];
taxIds2.push_back( id );
if ( n->taxLevel_ < ( int )taxLevelSize )
{
taxIds[ n->taxLevel_ ] = id;
//taxIds3[ n->taxLevel_ ] = id;
knownRankReached = true;
}
else if ( !knownRankReached )
taxIds3.push_back( id );
clog << " => id " << id << "(lvl " << n->taxLevel_ << ")";
id = n->parentId_;
}
while ( n->taxLevel_ != 0 );
clog << endl;
}
unsigned int unusedLowestRanks = 0;
while ( unusedLowestRanks < taxLevelSize && taxIds[ taxLevelSize - unusedLowestRanks - 1] == 0 )
++unusedLowestRanks;
// Check if the lowest rank taxId is a node of the tax tree, in which case we create a sub-item in order to prevent also using it as a leaf
if ( unusedLowestRanks != taxLevelSize ) // Equality only happens one for an annoying genome that we need to discard
{
if ( parentToChildren.find( lowestRankTaxId ) != parentToChildren.end() )
{
auto &childrenList = parentToChildren[ lowestRankTaxId ];
bool atLeastOneInUse = false;
for ( const auto & child : childrenList )
atLeastOneInUse |= child.inUse_;
if ( atLeastOneInUse )
{
clog << "TaxId " << lowestRankTaxId << " seems to be both a leaf and a node of the taxonomic tree. Creating a new sub-id to use as a leaf" << endl;
string pseudoName = getPseudoScientificNameFromTag( seqItem.tag_ );
if ( extraTaxNameToIdMap.find( pseudoName ) == extraTaxNameToIdMap.end() )
{
extraTaxNameToIdMap[ pseudoName ] = extraTaxId;
taxIds3.insert( taxIds3.begin(), extraTaxId );
--extraTaxId;
}
else
{
taxIds3.insert( taxIds3.begin(), extraTaxNameToIdMap[ pseudoName ] );
}
//childrenList.push_back
}
}
}
#ifdef DEBUG_TAX_LEVELS
cout << " " << taxIds2.size();
cout << " " << ( taxLevelSize + taxIds3.size() - unusedLowestRanks );
for ( int k( 0 ) ; k < ( int )taxLevelSize ; ++k )
cout << " " << taxIds[k];
cout << " |";
for ( int k( taxIds2.size() - 1 ) ; k >= 0 ; --k )
cout << " " << taxIds2[k] << "(" << idToNode[taxIds2[k]].name_ << "|" << idToNode[taxIds2[k]].taxLevel_ << ")";
cout << " |XXX| ";
for ( int k( 0 ) ; k < ( int )taxLevelSize - unusedLowestRanks ; ++k )
cout << " " << taxIds[k] << "(" << idToNode[taxIds[k]].name_ << "|" << idToNode[taxIds[k]].taxLevel_ << ")";
cout << " |X| ";
for ( int k( taxIds3.size() - 1 ) ; k >= 0 ; --k )
cout << " " << taxIds3[k] << "(" << idToNode[taxIds3[k]].name_ << "|" << idToNode[taxIds3[k]].taxLevel_ << ")";
#else
// Final choice
for ( uint k( 0 ) ; k < taxLevelSize - unusedLowestRanks ; ++k )
cout << " " << taxIds[k];
for ( int k( taxIds3.size() - 1 ) ; k >= 0 ; --k )
cout << " " << taxIds3[k];
for ( uint k( 0 ) ; k < unusedLowestRanks - taxIds3.size(); ++k )
cout << " 0";
#endif // DEBUG_TAX_LEVELS
}
<|start_filename|>src/BCR/BWTCollection.cpp<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
/******************************************************************************
* It assumes that:
* - the length of each sequence is 100.
* - the alphabet is $ACGNTZ
******************************************************************************/
#include "BWTCollection.hh"
#include "BCRexternalBWT.hh"
namespace SXSI
{
/**
* Init bwt collection
*
* See BCRexternalBWT.h for more details.
*/
BWTCollection *BWTCollection::InitBWTCollection( const string &file1, const string &fileOut, const int mode, const CompressionFormatType outputCompression )
{
BWTCollection *result =
new BCRexternalBWT( file1, fileOut, mode, outputCompression );
return result;
}
}
<|start_filename|>src/libzoo/util/Logger.cpp<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#include "Logger.hh"
using namespace std;
NullStream nullStream;
int Logger::currentVerbosity = 1;
void Logger::setVerbosity( const string &verbosityString )
{
if ( verbosityString == "quiet" ||
verbosityString == "0" )
currentVerbosity = 0;
else if ( verbosityString == "normal" ||
verbosityString == "1" )
currentVerbosity = 1;
else if ( verbosityString == "verbose" ||
verbosityString == "2" )
currentVerbosity = 2;
else if ( verbosityString == "very-verbose" ||
verbosityString == "3" )
currentVerbosity = 3;
else if ( verbosityString == "debug" ||
verbosityString == "4" )
currentVerbosity = 4;
else
{
clog << "Warning: Invalid verbosity value. Setting to \"normal\"" << endl;
currentVerbosity = 1;
}
Logger_if( LOG_SHOW_IF_VERBOSE ) Logger::out() << "Setting logging to level " << currentVerbosity << endl;
}
<|start_filename|>src/libzoo/cli/ToolParameters.hh<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#ifndef BEETL_TOOL_PARAMETERS_HH
#define BEETL_TOOL_PARAMETERS_HH
#include <cstdlib>
#include <iostream>
#include <map>
#include <string>
#include <vector>
using std::string;
using std::vector;
using namespace std;
class ToolParameters;
enum { TYPE_STRING = 1, TYPE_CHOICE = 2, TYPE_INT = 4, TYPE_SWITCH = 8 };
enum { REQUIRED = 16, OPTIONAL = 0, ENVIRONMENT = 32 };
enum { AUTOMATED = 64, NOT_AUTOMATED = 0 };
struct ParameterEntry
{
string stringId;
int numId;
string longCmdLineName;
string shortCmdLineName;
string description;
string defaultValue;
int flags;
const string *possibleValues;
ToolParameters *parent_;
string userValue;
int parsedValue;
ParameterEntry( string stringId, int numId, string longCmdLineName, string shortCmdLineName, string description, string defaultValue, int flags, const string *possibleValues, ToolParameters *parent )
: stringId( stringId )
, numId( numId )
, longCmdLineName( longCmdLineName )
, shortCmdLineName( shortCmdLineName )
, description( description )
, defaultValue( defaultValue )
, flags( flags )
, possibleValues( possibleValues )
, parent_( parent )
, userValue( "" )
, parsedValue( -1 )
{}
bool operator==( const int rhs );
bool operator==( const string &rhs );
operator int () const;
operator string () const;
ParameterEntry &operator=( const int rhs );
ParameterEntry &operator=( const string &rhs );
bool isSet() const;
void silentSet( const int val )
{
set( val, true );
}
private:
void set( const int val, const bool isSilent = false );
void set( const string &valString );
};
ostream &operator<<( std::ostream &os, const ParameterEntry &obj );
static const int MULTIPLE_OPTIONS = 99;
// options: --color
enum ColorFormat
{
COLOR_FORMAT_NEVER = 0,
COLOR_FORMAT_ALWAYS = 1,
COLOR_FORMAT_AUTO = 2
};
static const string colorLabels[] =
{
"never",
"always",
"auto",
"" // end marker
};
class ToolParameters
{
public:
virtual ~ToolParameters() {}
int getValue( const int key ) const;
int getValue( const string &key ) const;
string getStringValue( const int key ) const;
string getStringValue( const string &key ) const;
ParameterEntry &operator[]( const string &key )
{
return getEntry( key );
}
ParameterEntry operator[]( const string &key ) const
{
return getEntry( key );
}
ParameterEntry &operator[]( const int key )
{
return getEntry( key );
}
ParameterEntry operator[]( const int key ) const
{
return getEntry( key );
}
void print( std::ostream &os, const bool singleLine, const int flagMask = 0 ) const;
private:
ParameterEntry &getEntry( const string &key );
ParameterEntry getEntry( const string &key ) const;
ParameterEntry &getEntry( const int key );
ParameterEntry getEntry( const int key ) const;
protected:
void addDefaultVerbosityAndHelpEntries();
public:
void addEntry( int numId, string stringId, string longCmdLineName, string shortCmdLineName, string description, string defaultValue, int flags, const string *inputFormatLabels = NULL );
void printUsage() const;
bool parseArgv( const int argc, const char **argv );
bool chechRequiredParameters();
void commitDefaultValues();
void mergeWith( const ToolParameters &other );
vector<ParameterEntry> entries_;
private:
void setLoggerVerbosityAndTempDir();
void printUsageForCategory( int trueMask, int falseMask ) const;
map<string, int> stringIdMap; // Map stringId -> internalId
map<int, int> numIdMap; // Map numId -> internalId
};
#endif //ifndef BEETL_TOOL_PARAMETERS_HH
<|start_filename|>src/shared/SequenceExtractor.hh<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**
**
**/
#ifndef SEQUENCE_EXTRACTOR_HH
#define SEQUENCE_EXTRACTOR_HH
#include "Types.hh"
#include <algorithm>
#include <fstream>
#include <sstream>
#include <vector>
using std::vector;
class SequenceExtractor
{
public:
SequenceExtractor();
void init( const string &seqNumFilename );
bool doWeExtractNextSequence();
private:
bool isActive_;
vector<SequenceNumber> seqNums_;
SequenceNumber index_;
SequenceNumber currentSeqNum_;
};
#endif // SEQUENCE_EXTRACTOR_HH
<|start_filename|>src/countWords/IntervalHandlerSplice.cpp<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#include "IntervalHandlerSplice.hh"
#include "libzoo/util/Logger.hh"
using namespace std;
//
// IntervalHandlerSplice member function declarations
//
void IntervalHandlerSplice::foundInBoth
( const int pileNum,
const LetterCount &countsThisRangeA,
const LetterCount &countsThisRangeB,
const Range &thisRangeA,
const Range &thisRangeB,
AlphabetFlag &propagateIntervalA,
AlphabetFlag &propagateIntervalB,
bool &isBreakpointDetected,
const int cycle
)
{
bool sharedPath( false );
LetterNumber maxSignalAOnly( 0 ), maxSignalBOnly( 0 );
for ( int l( 1 ); l < alphabetSize; l++ )
{
if ( ( countsThisRangeB.count_[l] == 0 )
&& ( countsThisRangeA.count_[l] > maxSignalAOnly ) )
maxSignalAOnly = countsThisRangeA.count_[l];
if ( ( countsThisRangeA.count_[l] == 0 )
&& ( countsThisRangeB.count_[l] > maxSignalBOnly ) )
maxSignalBOnly = countsThisRangeB.count_[l];
sharedPath |= ( ( countsThisRangeA.count_[l] > 0 )
&& ( countsThisRangeB.count_[l] > 0 ) );
// for a general shared interval, we only care about backward
// extensions in B (genome) that are common to A (RNA)
propagateIntervalA[l] = ( countsThisRangeA.count_[l] >= minOcc_ );
propagateIntervalB[l] = ( propagateIntervalA[l] &&
( countsThisRangeB.count_[l] >= 0 ) );
} // ~for l
if ( ( sharedPath == false )
&& ( maxSignalAOnly >= minOcc_ )
&& ( maxSignalBOnly >= minOcc_ ) )
{
// At a breakpoint position, we *do* care about the B-only intervals
// extensions in B (genome) that are common to A (RNA)
for ( int l( 1 ); l < alphabetSize; l++ )
propagateIntervalB[l] = ( countsThisRangeB.count_[l] >= minOcc_ );
isBreakpointDetected = true;
#pragma omp critical (IO)
{
Logger::out() << "BKPT ";
if ( thisRangeB.word_.empty() )
Logger::out() << alphabet[pileNum] << string( cycle - 1, 'x' ); // No propagated sequence => Print what we know of the sequence
else
Logger::out() << thisRangeB.word_;
Logger::out()
<< ' ' << countsThisRangeA.count_[0]
<< ':' << countsThisRangeA.count_[1]
<< ':' << countsThisRangeA.count_[2]
<< ':' << countsThisRangeA.count_[3]
<< ':' << countsThisRangeA.count_[4]
<< ':' << countsThisRangeA.count_[5]
<< ' ' << countsThisRangeB.count_[0]
<< ':' << countsThisRangeB.count_[1]
<< ':' << countsThisRangeB.count_[2]
<< ':' << countsThisRangeB.count_[3]
<< ':' << countsThisRangeB.count_[4]
<< ':' << countsThisRangeB.count_[5]
<< ' ' << ( thisRangeA.pos_ & matchMask )
<< ' ' << ( thisRangeB.pos_ & matchMask )
<< ' ' << thisRangeA.num_
<< ' ' << thisRangeB.num_
<< endl;
}
}
// don't bother with Ns
propagateIntervalA[whichPile[( int )dontKnowChar]] = false;
propagateIntervalB[whichPile[( int )dontKnowChar]] = false;
} // ~foundInBoth
void IntervalHandlerSplice::foundInAOnly
( const int pileNum,
const LetterCount &countsSoFarA,
const LetterCount &countsThisRangeA,
const char *bwtSubstring,
Range &thisRangeA,
AlphabetFlag &propagateIntervalA,
const int cycle
)
{
if ( countsThisRangeA.count_[0] > 0 )
{
#pragma omp critical (IO)
{
Logger::out() << "READ ";
if ( thisRangeA.word_.empty() )
Logger::out() << alphabet[pileNum]; // No propagated sequence
else
Logger::out() << thisRangeA.word_;
Logger::out()
<< ' ' << thisRangeA.pos_
<< ' ' << countsThisRangeA.count_[0]
<< ':' << countsThisRangeA.count_[1]
<< ':' << countsThisRangeA.count_[2]
<< ':' << countsThisRangeA.count_[3]
<< ':' << countsThisRangeA.count_[4]
<< ':' << countsThisRangeA.count_[5]
<< ' ' << countsSoFarA.count_[0]
<< endl;
}
}
// TBD print out IDs of discovered reads
for ( int l( 1 ); l < alphabetSize; l++ )
{
propagateIntervalA[l] = ( countsThisRangeA.count_[l] > 0 );
} // ~for l
// don't bother with Ns
propagateIntervalA[whichPile[( int )dontKnowChar]] = false;
} // ~foundInBoth
void IntervalHandlerSplice::foundInBOnly
( const int pileNum,
const LetterCount &countsSoFarB,
const LetterCount &countsThisRangeB,
const char *bwtSubstring,
Range &thisRangeB,
AlphabetFlag &propagateIntervalB,
const int cycle
)
{
if ( countsThisRangeB.count_[0] > 0 )
{
#pragma omp critical (IO)
{
Logger::out() << "INBS ";
if ( thisRangeB.word_.empty() )
Logger::out() << alphabet[pileNum]; // No propagated sequence
else
Logger::out() << thisRangeB.word_;
Logger::out()
<< ' ' << thisRangeB.pos_
<< ' ' << countsThisRangeB.count_[0]
<< ':' << countsThisRangeB.count_[1]
<< ':' << countsThisRangeB.count_[2]
<< ':' << countsThisRangeB.count_[3]
<< ':' << countsThisRangeB.count_[4]
<< ':' << countsThisRangeB.count_[5]
<< ' ' << countsSoFarB.count_[0]
<< endl;
}
}
// TBD print out IDs of discovered reads
for ( int l( 1 ); l < alphabetSize; l++ )
{
propagateIntervalB[l] = ( countsThisRangeB.count_[l] > 0 );
} // ~for l
// don't bother with Ns
propagateIntervalB[whichPile[( int )dontKnowChar]] = false;
} // ~foundInBoth
<|start_filename|>src/BCRext/BwtReader.cpp<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#include "BwtReader.hh"
#include "BwtIndex.hh"
#include "BwtWriter.hh"
#include "CountWords.hh"
#include "LetterCount.hh"
#include <algorithm>
#include <cmath>
#include <cstdlib>
#include <cstring>
#include <sstream>
#ifndef DONT_USE_MMAP
# include <fcntl.h>
# include <sys/mman.h>
# include <sys/stat.h>
# include <sys/types.h>
#endif
using namespace std;
//#define DEBUG_RAC 1
//#define DEBUG_RAC_VERBOSE 1
//
// BwtReaderBase member function definitions
//
BwtReaderBase::BwtReaderBase( const string &filename ) :
filename_( filename )
, pFile_( fopen( filename.c_str(), "r" ) )
, buf_( ReadBufferSize )
{
if ( pFile_ == NULL )
{
cerr << "!! BwtReaderBase: failed to open file " << filename << endl;
exit( EXIT_FAILURE );
}
#ifdef DEBUG
cout << "BwtReaderBase: opened file " << filename << " " << pFile_ << endl;
#endif
}
BwtReaderBase::BwtReaderBase( const BwtReaderBase &obj ):
filename_( obj.filename_ )
, pFile_( fopen( obj.filename_.c_str(), "r" ) )
, buf_( obj.buf_ )
{
if ( pFile_ == NULL )
{
cerr << "!! BwtReaderBase: failed to re-open file " << filename_ << endl;
exit( EXIT_FAILURE );
}
#ifdef DEBUG
cout << "BwtReaderBase: re-opened file " << filename_ << " " << pFile_ << endl;
#endif
// Restore file to same position as before
fseek( pFile_, ftell( obj.pFile_ ), SEEK_SET );
} // ~ctor
BwtReaderBase::~BwtReaderBase()
{
if ( pFile_ )
fclose( pFile_ );
#ifdef DEBUG
cout << "BwtReaderBase: closed file " << pFile_ << endl;
#endif
}
LetterNumber BwtReaderBase::readAndCount( LetterCount &c )
{
// will call the relevant virtual function
return readAndCount( c, maxLetterNumber );
}
LetterNumber BwtReaderBase::readAndSend( BwtWriterBase &writer )
{
// will call the relevant virtual function
return readAndSend( writer, maxLetterNumber );
}
//
// BwtReaderASCII member function definitions
//
void BwtReaderASCII::rewindFile( void )
{
rewind( pFile_ );
currentPos_ = 0;
} // ~rewindFile
LetterNumber BwtReaderASCII::tellg( void ) const
{
return currentPos_;
} // ~tellg
int BwtReaderASCII::seek( const LetterNumber posInFile, const LetterNumber baseNumber )
{
assert( posInFile == baseNumber );
currentPos_ = baseNumber;
return fseek( pFile_, posInFile, SEEK_SET );
}
LetterNumber BwtReaderASCII::readAndCount( LetterCount &c, const LetterNumber numChars )
{
#ifdef DEBUG
std::cout << "BR ASCII readAndCount " << numChars << " chars " << endl;
#endif
LetterNumber charsLeft( numChars ), charsToRead, charsRead;
while ( charsLeft > 0 )
{
charsToRead = ( ( charsLeft > ReadBufferSize ) ? ReadBufferSize : charsLeft );
charsRead = fread( buf_.data(), sizeof( char ), charsToRead, pFile_ );
#ifdef DEBUG
std::cout << "Reading " << charsRead << " chars ";
#endif
for ( LetterNumber i( 0 ); i < charsRead; i++ )
{
#ifdef DEBUG
std::cout << buf_[i];
#endif
assert( whichPile[( int )buf_[i]] != nv && "Letter not in alphabet" );
c.count_[whichPile[( int )buf_[i]]]++;
}
#ifdef DEBUG
std::cout << std::endl;
#endif
charsLeft -= charsRead;
if ( charsRead < charsToRead )
{
// did not get everything asked for! return num of chars actually found
currentPos_ += ( numChars - charsLeft );
return ( numChars - charsLeft );
} // ~if
} // ~while
currentPos_ += numChars;
return numChars;
} // ~int BwtReaderASCII::readAndCount( LetterCount& c, const LetterNumber numChars )
LetterNumber BwtReaderASCII::readAndSend( BwtWriterBase &writer, const LetterNumber numChars )
{
#ifdef DEBUG
std::cout << "BR ASCII readAndSend " << numChars << " chars " << endl;
#endif
LetterNumber totalRead = 0;
// read readbufferzise bytes, if there are less bytes
// ordered only fetch the last missing bytes
LetterNumber readNextPass =
( ( numChars - totalRead ) < ReadBufferSize ) ? ( numChars - totalRead ) : ReadBufferSize;
// std::cout << "Reading " << numChars << " chars " << endl;
while ( totalRead < ( LetterNumber )numChars )
{
LetterNumber numRead = 0;
// try to read buffersize byte from file
numRead = fread( buf_.data(), sizeof ( uchar ), readNextPass, pFile_ );
totalRead += numRead;
if ( numRead == 0 ) break;
readNextPass =
( ( numChars - totalRead ) < ReadBufferSize ) ? ( numChars - totalRead ) : ReadBufferSize;
//std::cout << "next pass " << numRead << " chars " << endl;
// writer( buf_, numRead );
#define XXX 1
#ifdef XXX
LetterNumber charsLeft = numRead;
for ( LetterNumber counter = 0; counter < charsLeft; counter++ )
{
if ( buf_[counter] == lastChar_ )
{
runLength_++; // same char, increase runlength counter
}
else
{
// new char, print previous run
writer.sendRun( lastChar_, runLength_ );
// reset runlength to new char
lastChar_ = buf_[counter];
runLength_ = 1;
}
} // ~for
#endif
currentPos_ += numRead;
}
#ifdef XXX
writer.sendRun( lastChar_, runLength_ ); // send out last run
runLength_ = 0; // next call to his function will then again start a new run
#endif
return totalRead;
} // ~LetterNumber BwtReaderASCII::readAndSend( BwtWriterBase& writer, const LetterNumber numChars )
LetterNumber BwtReaderASCII::operator()( char *p, LetterNumber numChars )
{
#ifdef DEBUG
std::cout << "BR ASCII () " << numChars << " chars " << endl;
#endif
// std::cout << "want " << numChars << " chars" << std::endl;
return fread( p, sizeof( char ), numChars, pFile_ );
} // ~operator()
//
// BwtReaderRunLengthBase member function definitions
//
BwtReaderRunLengthBase::BwtReaderRunLengthBase( const string &filename ):
BwtReaderBase( filename ),
lengths_( 256 ),
codes_( 256 ),
pBuf_( buf_.data() + ReadBufferSize ),
pBufMax_( buf_.data() + ReadBufferSize ),
finished_( false ),
lastChar_( notInAlphabet ),
runLength_( 0 ),
currentPos_( 0 ),
currentPosInFile_( 0 )
{
for ( unsigned int i( 0 ); i < 256; i++ )
{
lengths_[i] = 1 + ( i >> 4 );
uint j = ( i & 0xF );
codes_[i] = ( j < alphabetSize ) ? alphabet[j] : notInAlphabet;
} // ~for i
} // ~ctor
BwtReaderRunLengthBase::BwtReaderRunLengthBase( const BwtReaderRunLengthBase &obj ):
BwtReaderBase( obj ),
lengths_( obj.lengths_ ),
codes_( obj.codes_ ),
pBuf_( obj.pBuf_ ),
pBufMax_( obj.pBufMax_ ),
finished_( obj.finished_ ),
lastChar_( obj.lastChar_ ),
runLength_( obj.runLength_ ),
currentPos_( obj.currentPos_ ),
currentPosInFile_( obj.currentPosInFile_ )
{
} // ~ctor
void BwtReaderRunLengthBase::rewindFile( void )
{
// rewind file and set all vars as per constructor
rewind( pFile_ );
runLength_ = 0;
pBuf_ = buf_.data() + ReadBufferSize;
pBufMax_ = buf_.data() + ReadBufferSize;
lastChar_ = notInAlphabet;
currentPos_ = 0;
currentPosInFile_ = 0;
finished_ = false;
} // ~rewindFile
LetterNumber BwtReaderRunLengthBase::tellg( void ) const
{
return currentPosInFile_;
} // ~tellg
int BwtReaderRunLengthBase::seek( const LetterNumber posInFile, const LetterNumber baseNumber )
{
currentPos_ = baseNumber;
currentPosInFile_ = posInFile;
runLength_ = 0;
pBuf_ = buf_.data() + ReadBufferSize;
pBufMax_ = buf_.data() + ReadBufferSize;
lastChar_ = notInAlphabet;
finished_ = false;
return fseek( pFile_, posInFile, SEEK_SET );
}
LetterNumber BwtReaderRunLengthBase::readAndCount( LetterCount &c, const LetterNumber numChars )
{
#ifdef DEBUG_RAC
std::cout << "BR RL readAndCount " << numChars << " chars " << endl;
std::cout << "Before: " << currentPos_ << " " << ftell( pFile_ ) << " ";
std::cout << c << endl;
#endif
LetterNumber charsLeft( numChars );
while ( charsLeft > runLength_ )
{
if (runLength_)
{
c.count_[whichPile[lastChar_]] += runLength_;
charsLeft -= runLength_;
}
#ifdef DEBUG_RAC
std::cout << "R&C: " << currentPos_ << " " << ftell( pFile_ ) << " " << charsLeft << " " << runLength_ << " " << lastChar_ << " " << c << endl;
#endif
if ( getRun() == false )
{
currentPos_ += ( numChars - charsLeft );
#ifdef DEBUG_RAC
std::cout << "After (end): " << currentPos_ << " " << ftell( pFile_ ) << " " << c << endl;
#endif
return ( numChars - charsLeft );
// assert(1==0);
} // ~if
} // ~while
c.count_[whichPile[lastChar_]] += charsLeft;
runLength_ -= charsLeft;
currentPos_ += numChars;
#ifdef DEBUG_RAC
std::cout << "After (not at end): " << currentPos_ << " " << ftell( pFile_ ) << " " << c << endl;
#endif
return numChars;
} // ~BwtReaderRunLengthBase::readAndCount( LetterCount& c, const LetterNumber numChars )
LetterNumber BwtReaderRunLengthBase::readAndSend( BwtWriterBase &writer, const LetterNumber numChars )
{
#ifdef DEBUG
std::cout << "BR RL readAndSend " << numChars << " chars " << endl;
#endif
LetterNumber charsLeft( numChars );
while ( charsLeft > runLength_ )
{
// int fred(whichPile[lastChar_]);
writer.sendRun( lastChar_, runLength_ );
// c.count_[whichPile[lastChar_]]+=runLength_;
charsLeft -= runLength_;
if ( getRun() == false )
{
currentPos_ += ( numChars - charsLeft );
return ( numChars - charsLeft );
// assert(1==0);
} // ~if
} // ~while
writer.sendRun( lastChar_, charsLeft );
// c.count_[whichPile[lastChar_]]+=charsLeft;
runLength_ -= charsLeft;
currentPos_ += numChars;
return numChars;
} //~BwtReaderRunLengthBase::readAndSend(BwtWriterBase& writer, const LetterNumber numChars)
LetterNumber BwtReaderRunLengthBase::operator()( char *p, LetterNumber numChars )
{
#ifdef DEBUG
std::cout << "BR RL () : asked for " << numChars << " " << lastChar_ << " "
<< runLength_ << " " << pFile_ << std::endl;
#endif
LetterNumber charsLeft( numChars );
// return fread( p, sizeof(char), numChars, pFile_ );
while ( charsLeft > runLength_ )
{
#ifdef DEBUG
std::cout << "BR RL () : setting " << lastChar_ << " "
<< runLength_ << " " << pFile_ << std::endl;
#endif
memset( p, lastChar_, runLength_ );
p += runLength_;
charsLeft -= runLength_;
if ( getRun() == false )
{
// runLength_=0;
#ifdef DEBUG
std::cout << "B read " << numChars - charsLeft << " out of "
<< numChars << std::endl;
#endif
currentPos_ += ( numChars - charsLeft );
return ( numChars - charsLeft );
} // ~if
} // ~while
#ifdef DEBUG
std::cout << "BR RL () : last try - setting " << lastChar_ << " "
<< runLength_ << " " << pFile_ << std::endl;
#endif
// runLength_=lengths_[lastChar_];
// lastChar_=codes_[lastChar_];
#ifdef DEBUG
std::cout << "BR RL () : last try - setting " << lastChar_ << " "
<< runLength_ << " " << pFile_ << std::endl;
#endif
memset( p, lastChar_, charsLeft );
runLength_ -= charsLeft;
#ifdef DEBUG
std::cout << "B delivered " << numChars << " " << charsLeft << " "
<< pFile_ << std::endl;
#endif
currentPos_ += numChars;
return numChars;
} // ~operator()
//
// BwtReaderRunLength member function definitions
//
BwtReaderRunLength::BwtReaderRunLength( const string &filename ):
BwtReaderRunLengthBase( filename )
{
} // ~ctor
BwtReaderRunLength::BwtReaderRunLength( const BwtReaderRunLength &obj ):
BwtReaderRunLengthBase( obj )
{
} // ~ctor
bool BwtReaderRunLength::getRun( void )
{
if ( pBuf_ == pBufMax_ )
{
if ( finished_ )
{
runLength_ = 0;
return false;
}
else
{
LetterNumber numRead( fread( buf_.data(), sizeof( uchar ), ReadBufferSize, pFile_ ) );
if ( numRead == 0 )
{
runLength_ = 0;
return false;
}
else if ( numRead < ReadBufferSize )
{
finished_ = true;
pBufMax_ = buf_.data() + numRead;
}
pBuf_ = buf_.data();
} // ~else
} // ~if
runLength_ = lengths_[*pBuf_];
lastChar_ = codes_[*pBuf_];
#ifdef DEBUG
cout << "Got run: " << runLength_ << " of " << lastChar_ << endl;
#endif
++pBuf_;
++currentPosInFile_;
return true;
} // ~getRun
//
// BwtReaderRunLengthV3 member function definitions
//
BwtReaderRunLengthV3::BwtReaderRunLengthV3( const string &filename ):
BwtReaderRunLengthBase( filename ),
symbolForRunLength1ForPile_( 0 ),
maxEncodedRunLengthForPile_( 0 ),
firstContinuationSymbol_( 0 ),
maxEncodedRunLengthMultiplierForContinuationSymbol_( 0 ),
firstDataByteInFile_( 0 ),
prefetchedByte_( 0 )
{
// Check file header
uchar c;
c = fgetc( pFile_ );
assert( c == 'B' );
c = fgetc( pFile_ );
assert( c == 'W' );
c = fgetc( pFile_ );
assert( c == 'T' );
c = fgetc( pFile_ ); // \r\n sequence to check for invalid dos/unix format conversions
assert( c == 13 );
c = fgetc( pFile_ );
assert( c == 10 );
c = fgetc( pFile_ ); // Ctrl-Z, making some text viewers stop here and being non-ASCII to avoid confusion with text files
assert( c == 26 );
// Format version number, on 2 bytes to help identify endianness problems
uint16_t formatVersion;
assert( fread( &formatVersion, sizeof( formatVersion ), 1, pFile_ ) == 1 );
assert( formatVersion == 3 );
symbolForRunLength1ForPile_.resize( alphabetSize );
maxEncodedRunLengthForPile_.resize( alphabetSize );
firstContinuationSymbol_ = 0;
maxEncodedRunLengthMultiplierForContinuationSymbol_ = -1;
uint8_t base, rangeLength;
uint16_t firstRunLength;
uint16_t firstBytecode = 0;
while (firstBytecode < 256 )
{
assert( fread( &base, sizeof( base ), 1, pFile_ ) == 1 );
assert( fread( &rangeLength, sizeof( rangeLength ), 1, pFile_ ) == 1 );
assert( fread( &firstRunLength, sizeof( firstRunLength ), 1, pFile_ ) == 1 );
for (int i=0; i<rangeLength; ++i)
{
lengths_[ firstBytecode+i ] = firstRunLength+i;
codes_[ firstBytecode+i ] = base;
}
if (base != '+')
{
assert( firstRunLength == 1 ); // current restriction
symbolForRunLength1ForPile_[ whichPile[base] ] = firstBytecode;
maxEncodedRunLengthForPile_[ whichPile[base] ] = firstRunLength + rangeLength - 1;
}
else
{
assert( firstRunLength == 0 ); // current restriction
firstContinuationSymbol_ = firstBytecode;
maxEncodedRunLengthMultiplierForContinuationSymbol_ = firstRunLength + rangeLength - 1;
}
firstBytecode += rangeLength;
}
assert (firstBytecode == 256 );
currentPosInFile_ = firstDataByteInFile_ = ftell( pFile_ );
prefetchNextByte();
} // ~ctor
BwtReaderRunLengthV3::BwtReaderRunLengthV3( const BwtReaderRunLengthV3 &obj ):
BwtReaderRunLengthBase( obj ),
symbolForRunLength1ForPile_( obj.symbolForRunLength1ForPile_ ),
maxEncodedRunLengthForPile_( obj.maxEncodedRunLengthForPile_ ),
firstContinuationSymbol_( obj.firstContinuationSymbol_ ),
maxEncodedRunLengthMultiplierForContinuationSymbol_( obj.maxEncodedRunLengthMultiplierForContinuationSymbol_ ),
firstDataByteInFile_( obj.firstDataByteInFile_ ),
prefetchedByte_( obj.prefetchedByte_ )
{
} // ~ctor
bool BwtReaderRunLengthV3::getRun( void )
{
if ( prefetchedByte_ == EOF )
{
lastChar_ = notInAlphabet;
runLength_ = 0;
return false;
}
LetterNumber currentContinuationMultiplier = 0;
do
{
if (codes_[prefetchedByte_] != '+') // continuation code
{
lastChar_ = codes_[prefetchedByte_];
runLength_ = lengths_[prefetchedByte_];
currentContinuationMultiplier = maxEncodedRunLengthForPile_[ whichPile[lastChar_] ];
}
else
{
assert( currentContinuationMultiplier != 0 );
runLength_ += lengths_[prefetchedByte_] * currentContinuationMultiplier;
currentContinuationMultiplier *= maxEncodedRunLengthMultiplierForContinuationSymbol_ + 1;
}
++currentPosInFile_;
prefetchNextByte();
}
while (prefetchedByte_ != EOF && codes_[prefetchedByte_] == '+'); // continuation code
#ifdef DEBUG
cout << "Got run: " << runLength_ << " of " << lastChar_ << endl;
#endif
return true;
}
void BwtReaderRunLengthV3::prefetchNextByte()
{
if (prefetchedByte_ == EOF)
return;
if ( pBuf_ == pBufMax_ )
{
LetterNumber numRead = fread( buf_.data(), sizeof( uchar ), ReadBufferSize, pFile_ );
if ( numRead < ReadBufferSize )
{
pBufMax_ = buf_.data() + numRead;
if ( numRead == 0 )
{
prefetchedByte_ = EOF;
return;
}
}
pBuf_ = buf_.data();
}
prefetchedByte_ = *pBuf_;
++pBuf_;
// ++currentPosInFile_;
}
void BwtReaderRunLengthV3::rewindFile( void )
{
seek( firstDataByteInFile_, 0 );
}
LetterNumber BwtReaderRunLengthV3::tellg( void ) const
{
return currentPosInFile_;
}
int BwtReaderRunLengthV3::seek( const LetterNumber posInFile, const LetterNumber baseNumber )
{
Logger_if( LOG_SHOW_IF_VERY_VERBOSE ) Logger::out() << "seek filename=" << filename_ << " posInFile=" << posInFile << " baseNumber=" << baseNumber << endl;
int ret = BwtReaderRunLengthBase::seek( posInFile, baseNumber );
prefetchedByte_ = 0;
prefetchNextByte();
return ret;
}
// BwtReaderIncrementalRunLength member function definitions
//
extern vector< vector<unsigned char> > ramFiles; // declared in BwtWriter; todo: move those to another a new header file
BwtReaderIncrementalRunLength::BwtReaderIncrementalRunLength( const string &filename ):
BwtReaderBase( filename ), runLength_( 0 ),
pBuf_( buf_.data() + ReadBufferSize ), pBufMax_( buf_.data() + ReadBufferSize ),
lastChar_( notInAlphabet ),
lastMetadata_( 0 ),
finished_( false ),
currentPos_ ( 0 ),
posInRamFile_ ( 0 )
{
// cout << "BwtReaderIncrementalRunLength: Opening " << filename << endl;
if ( fread( &fileNum_, sizeof( fileNum_ ), 1, pFile_ ) != 1 )
{
fileNum_ = -1;
finished_ = true;
}
// cout << " = file #" << fileNum_ << endl;
fileNum_ %= 5;
// cout << " => file #" << fileNum_ << endl;
assert( ( int )ramFiles.size() > fileNum_ );
for ( uint i( 0 ); i < 256; i++ )
{
lengths_[i] = 1 + ( i >> 4 );
uint j = ( i & 0xF );
codes_[i] = ( j < alphabetSize ) ? alphabet[j] : notInAlphabet;
} // ~for i
} // ~ctor
void BwtReaderIncrementalRunLength::rewindFile( void )
{
// rewind file and set all vars as per constructor
rewind( pFile_ );
runLength_ = 0;
pBuf_ = buf_.data() + ReadBufferSize;
pBufMax_ = buf_.data() + ReadBufferSize;
lastChar_ = notInAlphabet;
currentPos_ = 0;
finished_ = false;
} // ~rewindFile
LetterNumber BwtReaderIncrementalRunLength::tellg( void ) const
{
return currentPos_;
} // ~tellg
LetterNumber BwtReaderIncrementalRunLength::readAndCount( LetterCount &c, const LetterNumber numChars )
{
#ifdef DEBUG
std::cout << "BR RL readAndCount " << numChars << " chars " << endl;
#endif
LetterNumber charsLeft( numChars );
while ( charsLeft > runLength_ )
{
// Below is not great design, at first call of this function it accesses an
// out-of-range array element. Fortunately it always adds zero to it! :)
c.count_[whichPile[lastChar_]] += runLength_;
charsLeft -= runLength_;
if ( getRun() == false )
{
currentPos_ += ( numChars - charsLeft );
return ( numChars - charsLeft );
// assert(1==0);
} // ~if
} // ~while
c.count_[whichPile[lastChar_]] += charsLeft;
runLength_ -= charsLeft;
currentPos_ += numChars;
return numChars;
} // ~BwtReaderIncrementalRunLength::readAndCount( LetterCount& c, const LetterNumber numChars )
LetterNumber BwtReaderIncrementalRunLength::readAndSend( BwtWriterBase &writer, const LetterNumber numChars )
{
#ifdef DEBUG
std::cout << "BR RL readAndSend " << numChars << " chars " << endl;
#endif
bool isWriterIncremental = writer.isIncremental();
LetterNumber charsLeft( numChars );
while ( charsLeft > runLength_ )
{
if ( !isWriterIncremental )
writer.sendRun( lastChar_, runLength_ );
charsLeft -= runLength_;
if ( getRun() == false )
{
currentPos_ += ( numChars - charsLeft );
return ( numChars - charsLeft );
// assert(1==0);
} // ~if
} // ~while
runLength_ -= charsLeft;
currentPos_ += numChars;
if ( !isWriterIncremental )
writer.sendRun( lastChar_, charsLeft );
else
writer.sendRunOfPreExistingData( lastChar_, charsLeft, fileNum_, posInRamFile_, runLength_ );
return numChars;
} //~BwtReaderIncrementalRunLength::readAndSend(BwtWriterBase& writer, const LetterNumber numChars)
LetterNumber BwtReaderIncrementalRunLength::operator()( char *p, LetterNumber numChars )
{
#ifdef DEBUG
std::cout << "BR RL () : asked for " << numChars << " " << lastChar_ << " "
<< runLength_ << " " << pFile_ << std::endl;
#endif
LetterNumber charsLeft( numChars );
// return fread( p, sizeof(char), numChars, pFile_ );
while ( charsLeft > runLength_ )
{
#ifdef DEBUG
std::cout << "BR RL () : setting " << lastChar_ << " "
<< runLength_ << " " << pFile_ << std::endl;
#endif
memset( p, lastChar_, runLength_ );
p += runLength_;
charsLeft -= runLength_;
if ( getRun() == false )
{
// runLength_=0;
#ifdef DEBUG
std::cout << "B read " << numChars - charsLeft << " out of "
<< numChars << std::endl;
#endif
currentPos_ += ( numChars - charsLeft );
return ( numChars - charsLeft );
} // ~if
} // ~while
#ifdef DEBUG
std::cout << "BR RL () : last try - setting " << lastChar_ << " "
<< runLength_ << " " << pFile_ << std::endl;
#endif
// runLength_=lengths_[lastChar_];
// lastChar_=codes_[lastChar_];
#ifdef DEBUG
std::cout << "BR RL () : last try - setting " << lastChar_ << " "
<< runLength_ << " " << pFile_ << std::endl;
#endif
memset( p, lastChar_, charsLeft );
runLength_ -= charsLeft;
#ifdef DEBUG
std::cout << "B delivered " << numChars << " " << charsLeft << " "
<< pFile_ << std::endl;
#endif
currentPos_ += numChars;
return numChars;
} // ~operator()
bool BwtReaderIncrementalRunLength::getRun( void )
{
again:
if ( lastMetadata_ != 0 )
{
if ( ( lastMetadata_ & ~0x80 ) != 0 ) // if there is a subcall, whether or not the Return flag is also present
{
// push current file onto stack
// we need a current position associated to each file
int calledFileLevel = lastMetadata_ & ~0x80;
assert( calledFileLevel < 128 );
int oldFileNum = fileNum_;
int newFileNum = 5 * calledFileLevel + ( fileNum_ % 5 );
if ( ( int )posInRamFiles_.size() <= newFileNum )
{
posInRamFiles_.resize( newFileNum + 1 );
}
posInRamFiles_[oldFileNum] = posInRamFile_;
posInRamFile_ = posInRamFiles_[newFileNum];
stackedFileNums_.push_back( oldFileNum );
fileNum_ = newFileNum;
}
else
{
// return bit (only) is set
assert( lastMetadata_ == 0x80 );
while ( ( lastMetadata_ & 0x80 ) != 0 && !stackedFileNums_.empty() )
{
// return from subfile
assert( !stackedFileNums_.empty() );
posInRamFiles_[fileNum_] = posInRamFile_;
fileNum_ = stackedFileNums_.back();
stackedFileNums_.pop_back();
posInRamFile_ = posInRamFiles_[fileNum_];
// retrieve the caller's lastMetadata to check its Return flag (the subcall may have been changed, but the return flag should persist)
lastMetadata_ = ramFiles[fileNum_][posInRamFile_ - 1];
}
}
}
if ( finished_ || fileNum_ == -1 || posInRamFile_ >= ramFiles[fileNum_].size() )
{
finished_ = true;
runLength_ = 0;
return false;
}
const unsigned char c = ramFiles[fileNum_][posInRamFile_];
lastMetadata_ = ramFiles[fileNum_][posInRamFile_ + 1];
if ( posInRamFile_ + 2 >= ramFiles[fileNum_].size() )
{
// Add a Return bit if we reach the end of the file, for easier processing afterwards
if ( fileNum_ > 5 )
{
lastMetadata_ |= 0x80;
}
}
#ifdef READ_DATA_FROM_FILES_FOR_DEBUGGING
if ( pBuf_ == pBufMax_ )
{
if ( finished_ )
{
runLength_ = 0;
return false;
}
else
{
LetterNumber numRead( fread( buf_.data(), sizeof( uchar ),
ReadBufferSize, pFile_ ) );
if ( numRead == 0 )
{
runLength_ = 0;
return false;
}
else if ( numRead < ReadBufferSize )
{
// finished_=true;
pBufMax_ = buf_.data() + numRead;
}
pBuf_ = buf_.data();
} // ~else
} // ~if
assert( c == *pBuf_ );
#endif //ifdef READ_DATA_FROM_FILES_FOR_DEBUGGING
runLength_ = lengths_[( int )c];
lastChar_ = codes_[( int )c];
#ifdef DEBUG
cout << "Got run: " << runLength_ << " of " << lastChar_ << endl;
#endif
pBuf_ += 2;
posInRamFile_ += 2;
if ( c == 0xFF )
goto again;
return true;
} // ~getRun
void BwtReaderIncrementalRunLength::defragment( void )
{
#define DEFRAGMENTATION_MAX_RUN_LENGTH 12
vector<unsigned char> newRamFile;
unsigned char prevLetter = 0;
unsigned char prevRunLength = 0;
assert ( runLength_ == 0 && "defragment shouldn't be called after any other operation" );
while ( getRun() )
{
if ( prevRunLength == 0 )
{
prevLetter = whichPile[( int )lastChar_];
prevRunLength = runLength_;//lastChar_ >> 4;
}
else
{
unsigned char newLetter = whichPile[( int )lastChar_];
unsigned char newRunLength = runLength_;
if ( ( newLetter != prevLetter ) ) // || (prevRunLength >= DEFRAGMENTATION_MAX_RUN_LENGTH))
{
newRamFile.push_back( prevLetter | ( ( prevRunLength - 1 ) << 4 ) );
newRamFile.push_back( 0 );
prevLetter = newLetter;
prevRunLength = newRunLength;
}
else
{
unsigned char totalRunLength = ( prevRunLength + newRunLength );
if ( totalRunLength <= DEFRAGMENTATION_MAX_RUN_LENGTH )
{
newRamFile.push_back( prevLetter | ( ( totalRunLength - 1 ) << 4 ) );
newRamFile.push_back( 0 );
prevRunLength = 0;
}
else
{
prevRunLength = totalRunLength;
do
{
newRamFile.push_back( prevLetter | ( DEFRAGMENTATION_MAX_RUN_LENGTH - 1 ) << 4 );
newRamFile.push_back( 0 );
prevRunLength -= DEFRAGMENTATION_MAX_RUN_LENGTH;
}
while ( prevRunLength > DEFRAGMENTATION_MAX_RUN_LENGTH );
}
}
}
}
if ( prevRunLength )
{
newRamFile.push_back( prevLetter | ( ( prevRunLength - 1 ) << 4 ) );
newRamFile.push_back( 0 );
}
size_t sizeBefore = 0;
for ( unsigned int i = fileNum_; i < ramFiles.size(); i += 5 )
{
vector<unsigned char> emptyVec;
sizeBefore += ramFiles[i].size();
// ramFiles[i].clear();
ramFiles[i].swap( emptyVec ); // deallocates vector memory
}
ramFiles[fileNum_].swap( newRamFile );
size_t sizeAfter = ramFiles[fileNum_].size();
Logger_if( LOG_SHOW_IF_VERBOSE )
{
Logger::out() << "defragment " << fileNum_ << " : size before= " << sizeBefore << " size after= " << sizeAfter << endl;
}
}
//
// BwtReaderHuffman member function definitions
//
#ifdef ACTIVATE_HUFFMAN
BwtReaderHuffman::BwtReaderHuffman( const string &filename ):
BwtReaderBase( filename ),
runLength_( 0 ),
lastChar_( notInAlphabet ),
bitsUsed_( 0 ),
finished_( false ),
nearlyFinished_( false ),
intCounter_( 0 ),
numSymbols_( 0 ),
maxSymbols_( 0 ),
queueCounter_( 1 ), // needed for the first call of getRun()
currentPos_( 0 )
{
// just make sure everything is fine
fseek( pFile_, 0, SEEK_END );
long fileSize( ftell( pFile_ ) );
// kill everything if file size is not a multiple of 4byte (~32 bit)
assert( ( fileSize % sizeof( unsigned int ) ) == 0 ); // read with == 4 byte
numInts_ = fileSize / sizeof( unsigned int ); // how many ints to read from file
//cerr << filename << ": " << fileSize << " bytes/"<< numInts_ << " blocks" << endl;
fseek( pFile_, 0, SEEK_SET );
//init arrays
for ( int i = 0; i < huffmanBufferSize; i++ )
{
symBuf[i] = 0;
runBuf[i] = 0;
}
soFar_.ull = 0;
toAdd_.ull = 0;
// init the token lookup table, does not need to be cleared when the file
// is rewind since its more or less static
// TODO: hardcode the token table? stays the same for each program call
unsigned int codeMask;
for ( unsigned int i( 0 ); i < numTokens; i++ )
{
tokenTable_[i] = 0xFF;
for ( unsigned int j( 0 ); j < numSingleCodes; j++ )
{
codeMask = ( 1 << singleCharLength[j] ) - 1; // (*2^3==) -1
if ( ( i & codeMask ) == singleCharCode[j] )
{
assert ( tokenTable_[i] == 0xFF );
tokenTable_[i] = ( j << 1 );
// cerr << "TT @ " << i << " is "<< itoa(tokenTable_[i],2) << endl;
}
} // ~for j
for ( unsigned int j( 0 ); j < numDoubleCodes; j++ )
{
codeMask = ( 1 << doubleCharLength[j] ) - 1;
if ( ( i & codeMask ) == doubleCharCode[j] )
{
assert ( tokenTable_[i] == 0xFF );
tokenTable_[i] = ( ( j << 1 ) | 0x1 );
// cerr << "TT @ " << i << " is "<< itoa(tokenTable_[i],2) << endl;
}
} // ~for j
// assert (tokenTable_[i]!=0xFF); some tokens can have no prefix
// that corresponds to a valid code
} // ~for i
} // ~ctor
void BwtReaderHuffman::rewindFile( void )
{
// rewind file and set all vars as per constructor
rewind( pFile_ );
runLength_ = 0;
lastChar_ = notInAlphabet;
currentPos_ = 0;
finished_ = false;
bitsUsed_ = 0;
soFar_.ull = 0;
numSymbols_ = 0;
queueCounter_ = 0;
maxSymbols_ = 0;
intCounter_ = 0;
firstRun_ = true; // fixes 3 Bit huffman error (BTL-17)
nearlyFinished_ = false;
for ( int i = 0; i < huffmanBufferSize; i++ )
{
symBuf[i] = 0;
runBuf[i] = 0;
}
} // ~rewindFile
LetterNumber BwtReaderHuffman::tellg( void ) const
{
return currentPos_;
} // ~tellg
LetterNumber BwtReaderHuffman::readAndCount( LetterCount &c, const LetterNumber numChars )
{
LetterNumber charsLeft( numChars );
while ( charsLeft > runLength_ )
{
// Below is not great design, at first call of this function it accesses an
// out-of-range array element. Fortunately it always adds zero to it! :)
c.count_[whichPile[lastChar_]] += runLength_;
charsLeft -= runLength_;
if ( getRun() == false )
{
currentPos_ += ( numChars - charsLeft );
return ( numChars - charsLeft );
} // ~if
} // ~while
c.count_[whichPile[lastChar_]] += charsLeft;
runLength_ -= charsLeft;
currentPos_ += numChars;
return numChars;
} // ~BwtReaderHuffman::readAndCount( LetterCount& c, const LetterNumber numChars )
LetterNumber BwtReaderHuffman::readAndSend( BwtWriterBase &writer, const LetterNumber numChars )
{
if ( numChars == 0 )
{
return numChars; // exit directy
}
LetterNumber charsLeft( numChars );
while ( charsLeft > runLength_ )
{
writer.sendRun( lastChar_, runLength_ );
charsLeft -= runLength_;
if ( getRun() == false )
{
currentPos_ += ( numChars - charsLeft );
return ( numChars - charsLeft );
} // ~if
} // ~while
writer.sendRun( lastChar_, charsLeft );
runLength_ -= charsLeft;
currentPos_ += numChars;
return numChars;
} //~BwtReaderHuffman::readAndSend(BwtWriterBase& writer, const LetterNumber numChars)
bool BwtReaderHuffman::getRun( void )
{
numSymbols_ = -1; // needed for loops
// no more data available AND or buffer is also empty -> finished here
if ( finished_ && ( queueCounter_ > maxSymbols_ ) ) return false;
// there is still data, we only have to fill out buffer
if ( queueCounter_ > maxSymbols_ && !nearlyFinished_ )
{
unsigned int codeNum = 0;
unsigned int codeSize = 0;
unsigned int runLength = 0;
unsigned int elementsRead = 0;
toAdd_.ull = 0; // init ull AND both ui's with zeros
elementsRead = fread( &toAdd_.ui, sizeof ( unsigned int ), 1, pFile_ );
// try to read 32 bits in a row, if not return false
if ( elementsRead == 1 )
{
toAdd_.ull <<= bitsUsed_; // left shift of how many bits used, start 0
soFar_.ull |= toAdd_.ull; // glue both 32bit words
bitsUsed_ += 32; // we have succesfully read 32 bit
intCounter_++; // and we have used one int for that
}
if ( firstRun_ ) // first call, have to fill the integer buffer
{
toAdd_.ull = 0; // init
elementsRead = fread( &toAdd_.ui, sizeof ( unsigned int ), 1, pFile_ );
if ( elementsRead == 1 )
{
toAdd_.ull <<= bitsUsed_; // left shift of how many bits used, start 0
soFar_.ull |= toAdd_.ull; // glue both 32bit words together
bitsUsed_ += 32; // we have succesfully read 32 bit
intCounter_++; // and we have used one int for that
}
firstRun_ = false;
}
while ( bitsUsed_ > 32 ) // as long as we have some more bits than 1 int uses
{
codeNum = tokenTable_[soFar_.ui & tokenMask]; // get codenum
if ( ( codeNum & 0x1 ) == 0 ) // single code
{
codeNum >>= 1;
// we have just read the stop sign
if ( codeNum == finalCharCode )
{
nearlyFinished_ = true;
break;
}
codeSize = singleCharLength[codeNum]; // how large is the code
soFar_.ull >>= codeSize; // shift by these bits
bitsUsed_ -= codeSize; // substract bits
runLength = 1; // single run only
numSymbols_++; // new symbol in buffer
symBuf[numSymbols_] = alphabet[codeNum]; // add to buffer
runBuf[numSymbols_] = runLength; // ....
}// ~if
else // double code
{
codeNum >>= 1;
codeSize = doubleCharLength[codeNum];
soFar_.ull >>= codeSize;
bitsUsed_ -= codeSize;
runLength = getNum( intCounter_ );
numSymbols_++;
symBuf[numSymbols_] = alphabet[codeNum];
runBuf[numSymbols_] = runLength;
} // ~else.
} // ~while
if ( intCounter_ == ( numInts_ ) && !nearlyFinished_ )
{
while ( 1 )
{
codeNum = tokenTable_[soFar_.ui & tokenMask];
if ( ( codeNum & 0x1 ) == 0 )
{
codeNum >>= 1;
if ( codeNum == finalCharCode )
{
nearlyFinished_ = true;
break;
}
codeSize = singleCharLength[codeNum];
soFar_.ull >>= codeSize;
bitsUsed_ -= codeSize;
assert( bitsUsed_ > 0 );
runLength = 1;
numSymbols_++;
symBuf[numSymbols_] = alphabet[codeNum];
runBuf[numSymbols_] = runLength;
}// ~if
else
{
codeNum >>= 1;
codeSize = doubleCharLength[codeNum];
soFar_.ull >>= codeSize;
bitsUsed_ -= codeSize;
assert( bitsUsed_ > 0 );
int i( 0 );
runLength = getNum( i );
numSymbols_++;
symBuf[numSymbols_] = alphabet[codeNum];
runBuf[numSymbols_] = runLength;
} // ~else
} // ~while
} // ~if
//
maxSymbols_ = numSymbols_;
queueCounter_ = 0; // reset
numSymbols_ = -1; // reset for next run
}
if ( nearlyFinished_ && queueCounter_ > maxSymbols_ ) // GET THIS RIGHT
{
finished_ = true; // now we are really finished
return false;
}
else
{
runLength_ = runBuf[queueCounter_];
lastChar_ = symBuf[queueCounter_];
queueCounter_++;
return true;
}
} // ~getRun
unsigned int BwtReaderHuffman::getNum( int &i )
{
unsigned int n( soFar_.ui & 0xF ); // only last 4 bit -> 4 bit encodig the number
soFar_.ull >>= 4; // process shift
bitsUsed_ -= 4; // set counter
assert( bitsUsed_ > 0 );
if ( n != 0xF ) // that would be excacly 15
{
n++;
n++;
}
else
{
n = 0;
int bitShift( 0 );
bool carryOn;
do
{
carryOn = ( ( soFar_.ui & 0x80 ) != 0 ); // test if 1000 0000 bit is set
n |= ( ( soFar_.ui & 0x7F ) << bitShift ); // extract last 7 bits, shit by 0 in first iteration
bitShift += 7; // next iter will shift by 7
soFar_.ull >>= 8; // shift in the next 8 bits from left to right
bitsUsed_ -= 8; // we used 8 bit so far
assert( bitsUsed_ > 0 );
if ( ( carryOn == true ) && ( bitsUsed_ < 8 ) ) // true if
{
toAdd_.ull = 0;
i++;
assert( fread( &toAdd_.ui, sizeof ( unsigned int ), 1, pFile_ ) == 1 );
toAdd_.ull <<= bitsUsed_;
soFar_.ull |= toAdd_.ull;
bitsUsed_ += 32;
} // ~if
}// ~while
while ( carryOn == true );
n += 17;
} // ~else
#ifdef DEBUG
cout << "getNum " << n << endl;
#endif
return n;
} // ~getNum
// just deprecated code to make everything compile
LetterNumber BwtReaderHuffman::operator()( char *p, LetterNumber numChars )
{
assert( 1 == 0 );
return -1;
} // ~operator()
#endif //ifdef ACTIVATE_HUFFMAN
//
// BwtReaderRunLengthRam member function definitions
//
BwtReaderRunLengthRam::BwtReaderRunLengthRam( const string &filename ):
BwtReaderBase( filename ),
runLength_( 0 ),
lastChar_( notInAlphabet ),
currentPos_( 0 ),
isClonedObject_( false )
{
// Find file size
fseek( pFile_, 0, SEEK_END );
size_t fileSize( ftell( pFile_ ) );
fseek( pFile_, 0, SEEK_SET );
if ( fileSize )
{
#ifdef DONT_USE_MMAP
#pragma omp critical (IO)
cerr << "Info: Using malloc'ed BWT" << endl;
// Allocate enough RAM to contain the whole file
fullFileBuf_ = ( char * ) malloc( fileSize );
assert( fullFileBuf_ != 0 && "Not enough RAM to load BWT" );
size_t ret = fread( fullFileBuf_, fileSize, 1, pFile_ );
assert( ret == 1 );
#else
#pragma omp critical (IO)
cerr << "Info: Using mmap'ed BWT" << endl;
int fd = open( filename.c_str(), O_RDONLY );
assert( fd >= 0 );
fullFileBuf_ = ( char * )mmap( NULL, fileSize, PROT_READ, MAP_SHARED /*| MAP_LOCKED | MAP_POPULATE*/, fd, 0 );
if ( fullFileBuf_ == ( void * ) - 1 )
{
perror( "Error: Map failed" );
assert( false );
exit( -1 );
}
//#define ACTIVATE_LOCKING
#ifdef ACTIVATE_LOCKING
if ( mlock( fullFileBuf_, fileSize ) != 0 )
{
ostringstream oss;
oss << "Error: Mlock failed for file " << filename << " with code " << errno;
#pragma omp critical (IO)
perror( oss.str().c_str() );
}
#endif // ACTIVATE_LOCKING
#endif // DONT_USE_MMAP
}
else
{
fullFileBuf_ = 0;
}
sizeOfFullFileBuf_ = fileSize;
posInFullFileBuf_ = 0;
// This file shouldn't be needed anymore
fclose( pFile_ );
pFile_ = NULL;
for ( unsigned int i( 0 ); i < 256; i++ )
{
lengths_[i] = 1 + ( i >> 4 );
uint j = ( i & 0xF );
codes_[i] = ( j < alphabetSize ) ? alphabet[j] : notInAlphabet;
} // ~for i
} // ~ctor
BwtReaderRunLengthRam::BwtReaderRunLengthRam( const BwtReaderRunLengthRam &obj ):
BwtReaderBase( obj.filename_ ),
runLength_( 0 ),
lastChar_( notInAlphabet ),
currentPos_( 0 ),
#ifndef DONT_USE_MMAP
mmapLength_( 0 ),
#endif
isClonedObject_( true )
{
fullFileBuf_ = obj.fullFileBuf_;
sizeOfFullFileBuf_ = obj.sizeOfFullFileBuf_;
posInFullFileBuf_ = 0;
// This file shouldn't be needed anymore
fclose( pFile_ );
pFile_ = NULL;
for ( unsigned int i( 0 ); i < 256; i++ )
{
lengths_[i] = 1 + ( i >> 4 );
uint j = ( i & 0xF );
codes_[i] = ( j < alphabetSize ) ? alphabet[j] : notInAlphabet;
}
} // ~copy ctor
BwtReaderRunLengthRam::~BwtReaderRunLengthRam()
{
if ( !isClonedObject_ )
#ifdef DONT_USE_MMAP
free( fullFileBuf_ );
#else
if ( fullFileBuf_ )
munmap( fullFileBuf_, mmapLength_ );
#endif
}
void BwtReaderRunLengthRam::rewindFile( void )
{
// rewind file and set all vars as per constructor
// rewind( pFile_ );
runLength_ = 0;
lastChar_ = notInAlphabet;
currentPos_ = 0;
posInFullFileBuf_ = 0;
} // ~rewindFile
LetterNumber BwtReaderRunLengthRam::tellg( void ) const
{
return currentPos_;
} // ~tellg
LetterNumber BwtReaderRunLengthRam::readAndCount( LetterCount &c, const LetterNumber numChars )
{
#ifdef DEBUG_RAC
std::cout << "BR RL readAndCount " << numChars << " chars " << endl;
std::cout << "Before: " << currentPos_ << " " << ftell( pFile_ ) << " " << c << endl;
#endif
LetterNumber charsLeft( numChars );
while ( charsLeft > runLength_ )
{
// Below is not great design, at first call of this function it accesses an
// out-of-range array element. Fortunately it always adds zero to it! :)
c.count_[whichPile[lastChar_]] += runLength_;
charsLeft -= runLength_;
#ifdef DEBUG_RAC
std::cout << "R&C: " << currentPos_ << " " << posInFullFileBuf_ << " " << charsLeft << " " << runLength_ << " " << lastChar_ << " " << c << endl;
#endif
if ( getRun() == false )
{
currentPos_ += ( numChars - charsLeft );
#ifdef DEBUG_RAC
std::cout << "After (end): " << currentPos_ << " " << posInFullFileBuf_ << " " << c << endl;
#endif
return ( numChars - charsLeft );
// assert(1==0);
} // ~if
} // ~while
c.count_[whichPile[lastChar_]] += charsLeft;
runLength_ -= charsLeft;
currentPos_ += numChars;
#ifdef DEBUG_RAC
std::cout << "After (not at end): " << currentPos_ << " " << posInFullFileBuf_ << " " << c << endl;
#endif
return numChars;
} // ~BwtReaderRunLengthRam::readAndCount( LetterCount& c, const LetterNumber numChars )
LetterNumber BwtReaderRunLengthRam::readAndSend( BwtWriterBase &writer, const LetterNumber numChars )
{
#ifdef DEBUG
std::cout << "BR RL readAndSend " << numChars << " chars " << endl;
#endif
LetterNumber charsLeft( numChars );
while ( charsLeft > runLength_ )
{
// int fred(whichPile[lastChar_]);
writer.sendRun( lastChar_, runLength_ );
// c.count_[whichPile[lastChar_]]+=runLength_;
charsLeft -= runLength_;
if ( getRun() == false )
{
currentPos_ += ( numChars - charsLeft );
return ( numChars - charsLeft );
// assert(1==0);
} // ~if
} // ~while
writer.sendRun( lastChar_, charsLeft );
// c.count_[whichPile[lastChar_]]+=charsLeft;
runLength_ -= charsLeft;
currentPos_ += numChars;
return numChars;
} //~BwtReaderRunLengthRam::readAndSend(BwtWriterBase& writer, const LetterNumber numChars)
LetterNumber BwtReaderRunLengthRam::operator()( char *p, LetterNumber numChars )
{
#ifdef DEBUG
std::cout << "BR RL () : asked for " << numChars << " " << lastChar_ << " "
<< runLength_ << " " << pFile_ << std::endl;
#endif
LetterNumber charsLeft( numChars );
// return fread( p, sizeof(char), numChars, pFile_ );
while ( charsLeft > runLength_ )
{
#ifdef DEBUG
std::cout << "BR RL () : setting " << lastChar_ << " "
<< runLength_ << " " << pFile_ << std::endl;
#endif
memset( p, lastChar_, runLength_ );
p += runLength_;
charsLeft -= runLength_;
if ( getRun() == false )
{
// runLength_=0;
#ifdef DEBUG
std::cout << "B read " << numChars - charsLeft << " out of "
<< numChars << std::endl;
#endif
currentPos_ += ( numChars - charsLeft );
return ( numChars - charsLeft );
} // ~if
} // ~while
#ifdef DEBUG
std::cout << "BR RL () : last try - setting " << lastChar_ << " "
<< runLength_ << " " << pFile_ << std::endl;
#endif
// runLength_=lengths_[lastChar_];
// lastChar_=codes_[lastChar_];
#ifdef DEBUG
std::cout << "BR RL () : last try - setting " << lastChar_ << " "
<< runLength_ << " " << pFile_ << std::endl;
#endif
memset( p, lastChar_, charsLeft );
runLength_ -= charsLeft;
#ifdef DEBUG
std::cout << "B delivered " << numChars << " " << charsLeft << " "
<< pFile_ << std::endl;
#endif
currentPos_ += numChars;
return numChars;
} // ~operator()
bool BwtReaderRunLengthRam::getRun( void )
{
if ( posInFullFileBuf_ == sizeOfFullFileBuf_ )
{
runLength_ = 0;
// cerr << "end reached at " << posInFullFileBuf_ << endl;
return false;
}
else
{
const unsigned char c = fullFileBuf_[posInFullFileBuf_];
runLength_ = lengths_[c];
lastChar_ = codes_[c];
++posInFullFileBuf_;
#ifdef DEBUG
cout << "Got run: " << runLength_ << " of " << lastChar_ << endl;
#endif
}
return true;
} // ~getRun
BwtReaderBase* instantiateBwtPileReader( const string &pileFilename, const string &useShm, const bool keepBwtInRam, const bool forceNotUseIndexClass )
{
if ( keepBwtInRam )
{
Logger::out() << "Info: the option to keep BWT in RAM has been temporarily de-activated" << endl;
}
// Detect BWT file type (ASCII/RLE, and which RLE version)
ifstream bwtFile( pileFilename.c_str() );
vector<char> buf( 8, 'A' ); // initialised with 'A's to default to ASCII if not enough data
bwtFile.read( buf.data(), 8 );
if ( equal( buf.begin(), buf.end(), rleV3Header.begin() ) )
{
// RLE_v3 detected
Logger_if( LOG_SHOW_IF_VERY_VERBOSE ) Logger::out() << "BWT file " << pileFilename << " detected as RLE version 3" << endl;
if ( readWriteCheck( (pileFilename+".idx").c_str(), false, false ) && !forceNotUseIndexClass )
{
Logger::out() << "Using indexed BWT file for " << pileFilename << endl;
return new BwtReaderIndex<BwtReaderRunLengthV3>( pileFilename, useShm );
}
else
{
return new BwtReaderRunLengthV3( pileFilename );
}
}
else
{
bool allAscii = true;
for ( const char c: buf )
{
switch ( toupper( c ) )
{
case 'A':
case 'C':
case 'G':
case 'T':
case 'N':
case '$':
break;
default:
allAscii = false;
}
}
if (allAscii)
{
// ASCII detected
Logger_if( LOG_SHOW_IF_VERY_VERBOSE ) Logger::out() << "BWT file " << pileFilename << " detected as ASCII" << endl;
return new BwtReaderASCII( pileFilename );
}
else
{
// old RLE detected
Logger_if( LOG_SHOW_IF_VERY_VERBOSE ) Logger::out() << "BWT file " << pileFilename << " detected as RLE" << endl;
if ( readWriteCheck( (pileFilename+".idx").c_str(), false, false ) && !forceNotUseIndexClass )
{
Logger::out() << "Using indexed BWT file for " << pileFilename << endl;
return new BwtReaderIndex<BwtReaderRunLength>( pileFilename, useShm );
}
else
{
return new BwtReaderRunLength( pileFilename );
}
}
}
assert( false && "unreachable code" );
return NULL;
}
vector <BwtReaderBase *> instantiateBwtPileReaders( const string &bwtPrefix, const string &useShm )
{
vector <BwtReaderBase *> inBwt( alphabetSize );
for ( int i( 0 ); i < alphabetSize; i++ )
{
stringstream filenameSS;
filenameSS << bwtPrefix << "-B0" << i;
string pileFilename = filenameSS.str().c_str();
inBwt[i] = instantiateBwtPileReader( pileFilename, useShm, false );
}
return inBwt;
}
<|start_filename|>src/metagenomics/BuildChromosomeBwt.cpp<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#ifdef HAVE_SEQAN
#include "../shared/Alphabet.hh"
#include <iostream>
#include <seqan/index.h>
using namespace std;
using namespace seqan;
void getFileName( const string &stem, const char code, const int pile,
string &fileName )
{
fileName = stem;
fileName += '-';
fileName += code;
fileName += '0';
assert( pile <= 9 );
fileName += ( char )( 48 + pile );
// cerr << "Made file name " << fileName << endl;
}
int main ( int numArgs, const char *args[] )
{
if ( numArgs != 3 )
{
cerr << "Usage: " << args[0] << "outputprefix fileToConvert" << endl;
exit( EXIT_FAILURE );
}
cerr << "metabeetl-db-makeBWTSkew inputFile " << args[1] << endl;
std::fstream fstrm;
fstrm.open( args[1], ::std::ios_base::in | ::std::ios_base::binary );
String<char> fasta_tag;
String<char> fasta_seq;
//Read the meta-information.
readMeta( fstrm, fasta_tag, Fasta() );
std::cout << "Tag: " << fasta_tag << "\n"; //prints "a test file"
//Read the sequence.
read( fstrm, fasta_seq, Fasta() );
fstrm.close();
for ( uint i( 0 ); i < length( fasta_seq ); i++ )
{
fasta_seq[i] = toupper( fasta_seq[i] );
if ( strchr( "ACGNT", fasta_seq[i] ) == NULL )
{
cerr << "Found invalid character " << fasta_seq[i]
<< " at position " << i << " in file "
<< args[1] << endl;
fasta_seq[i] = 'N';
}
}
appendValue( fasta_seq, '$' );
// std::cout <<"Seq: "<< fasta_seq << "\n"; //prints the sequence
// ModifiedString< ModifiedString< String<char>, ModView< FunctorComplement<char> > >, ModReverse > myMod(fasta_seq);
String<unsigned> sa;
///Build a suffix array using the Skew7 algorithm.
resize( sa, length( fasta_seq ) );
createSuffixArray( sa, fasta_seq, Skew7() );
char lastChar( notInAlphabet );
int fileNum;
string fileName;
FILE *pFile( NULL ), *pArrayFile( NULL );
cout << length( fasta_seq ) << endl;
for ( uint i( 0 ); i < length( fasta_seq ); i++ )
{
if ( sa[i] > length( fasta_seq ) - 1 )
{
cerr << "sa bigger " << endl;
}
char thisChar = fasta_seq[sa[i]];
if ( thisChar != lastChar )
{
fileNum = whichPile[( int )thisChar];
assert( fileNum != nv );
string fileNameStem = "bwt_" + ( string )args[2];
getFileName( fileNameStem, 'B', fileNum, fileName );
cerr << "Opening new file " << fileName << endl;
if ( pFile != NULL )
{
fclose( pFile );
pFile = NULL;
}
pFile = fopen( fileName.c_str(), "w" );
assert( pFile != NULL );
getFileName( fileNameStem, 'A', fileNum, fileName );
cerr << "Opening new file " << fileName << endl;
if ( pArrayFile != NULL )
{
fclose( pArrayFile );
pArrayFile = NULL;
}
pArrayFile = fopen( fileName.c_str(), "w" );
assert( pArrayFile != NULL );
lastChar = thisChar;
}
thisChar = ( ( sa[i] == 0 ) ? '$' : fasta_seq[sa[i] - 1] );
fputc( thisChar, pFile );
fwrite( &sa[i], sizeof( unsigned ), 1, pArrayFile );
// cout << fasta_seq[sa[i]] << endl;
} // ~for i
cout << endl;
if ( pFile != NULL ) fclose ( pFile );
if ( pArrayFile != NULL ) fclose ( pArrayFile );
// for (int i(0);i<10;i++)
// cout << fasta_seq[sa[i]] << end
return 0;
} // ~main
#else //ifdef HAVE_SEQAN
#warning Compiled without Seqan library. Missing metagenomics tool.
#include <iostream>
int main ( int numArgs, const char *args[] )
{
std::cerr << "This tool is unavailable as BEETL was compiled without the Seqan library" << std::endl;
}
#endif //ifdef HAVE_SEQAN
<|start_filename|>src/shared/SeqReader.cpp<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#include "SeqReader.hh"
#include "libzoo/util/Logger.hh"
#include <cmath>
#include <cstdlib>
#include <cstring>
#include <iostream>
using namespace std;
//#define DEBUG 1
//
// SeqReaderBase member function definitions
//
SeqReaderBase::SeqReaderBase() {}
SeqReaderBase::~SeqReaderBase() {}
//
// SeqReaderFile member function definitions
//
SeqReaderFile::SeqReaderFile( FILE *pFile ) :
pFile_( pFile ), allRead_( false ), length_( -1 )
{
bufSeq_[0] = 0;
bufQual_[0] = 0;
bufName_[0] = 0;
}
SeqReaderFile::~SeqReaderFile() {}
SeqReaderFile *SeqReaderFile::getReader( FILE *pFile )
{
int i( fgetc( pFile ) );
char c( ( char )i ); // TBD check for error condition
ungetc( i, pFile );
if ( c == '>' )
{
return new SeqReaderFasta( pFile );
}
else if ( c == '@' )
{
return new SeqReaderFastq( pFile );
}
else if ( whichPile[i] != nv )
{
return new SeqReaderRaw( pFile );
}
else
{
Logger::error() << "Error: Unable to deduce file type from first char (char code = "
<< i << " )" << endl;
exit( EXIT_FAILURE );
} // ~else
} // ~getReader
void SeqReaderFile::rewindFile()
{
rewind( pFile_ );
}
const char *SeqReaderFile::thisSeq( void )
{
return bufSeq_;
}
const char *SeqReaderFile::thisQual( void )
{
return bufQual_;
}
const char *SeqReaderFile::thisName( void )
{
return bufName_;
}
bool SeqReaderFile::allRead( void ) const
{
return allRead_;
}
int SeqReaderFile::length( void ) const
{
return length_;
}
//
// SeqReaderRaw member function definitions
//
SeqReaderRaw::SeqReaderRaw( FILE *pFile ) : SeqReaderFile( pFile )
{
Logger_if( LOG_SHOW_IF_VERY_VERBOSE ) Logger::out() << "Creating SeqReaderRaw" << endl;
readNext();
if ( allRead() == true )
{
Logger::error() << "Error: No sequences in file!" << endl;
exit( EXIT_FAILURE );
}
else
{
length_ = strlen( bufSeq_ ) - 1;
Logger_if( LOG_SHOW_IF_VERY_VERBOSE ) Logger::out() << "Deducing read length of " << length_ << endl;
}
}
SeqReaderRaw::~SeqReaderRaw() {}
void SeqReaderRaw::readNext( char *seqBuf )
{
// cout << "readNext" << endl;
if ( allRead_ == true )
{
Logger::error() << "Error: Tried to read an empty sequence stream" << endl;
exit( EXIT_FAILURE );
}
else if ( fgets( seqBuf ? : bufSeq_, maxSeqSize, pFile_ ) == NULL )
{
allRead_ = true;
}
else if ( ( length_ != -1 ) && ( ( ( int )strlen( seqBuf ? : bufSeq_ ) ) != length_ + 1 ) )
{
Logger::error() << "Error: Length of current sequence does not match length of first @pos " << ftell( pFile_ ) << endl;
exit( EXIT_FAILURE );
}
}
//
// SeqReaderFasta member function definitions
//
SeqReaderFasta::SeqReaderFasta( FILE *pFile ) : SeqReaderFile( pFile )
{
Logger_if( LOG_SHOW_IF_VERY_VERBOSE ) Logger::out() << "Creating SeqReaderFasta" << endl;
readNext();
if ( allRead() == true )
{
Logger::error() << "Error: No sequences in file!" << endl;
exit( EXIT_FAILURE );
}
else
{
length_ = strlen( bufSeq_ ) - 1;
Logger_if( LOG_SHOW_IF_VERY_VERBOSE ) Logger::out() << "Deducing read length of " << length_ << endl;
}
}
SeqReaderFasta::~SeqReaderFasta() {}
void SeqReaderFasta::readNext( char *seqBuf )
{
if ( allRead_ == true )
{
Logger::error() << "Error: Error: Tried to read an empty sequence stream" << endl;
exit( EXIT_FAILURE );
}
else if ( fgets( bufName_, maxSeqSize, pFile_ ) == NULL )
{
allRead_ = true;
}
else
{
if ( bufName_[0] != '>' )
{
Logger::error() << "Error: Expected FASTA header, got " << bufName_ << endl;
exit( EXIT_FAILURE );
}
if ( fgets( seqBuf ? : bufSeq_, maxSeqSize, pFile_ ) == NULL )
{
Logger::error() << "Error: read FASTA header with no entry, incomplete file?" << endl;
exit( EXIT_FAILURE );
}
else if ( ( length_ != -1 ) && ( ( ( int )strlen( seqBuf ? : bufSeq_ ) ) != length_ + 1 ) )
//else if (strlen(seqBuf?:bufSeq_)!=length_)
{
Logger::error() << "Error: Length of current sequence does not match length of first @pos " << ftell( pFile_ ) << endl;
exit( EXIT_FAILURE );
}
}
}
//
// SeqReaderFastq member function definitions
//
SeqReaderFastq::SeqReaderFastq( FILE *pFile ) : SeqReaderFile( pFile )
{
Logger_if( LOG_SHOW_IF_VERY_VERBOSE ) Logger::out() << "Creating SeqReaderFastq" << endl;
readNext();
if ( allRead() == true )
{
Logger::error() << "Error: No sequences in file!" << endl;
exit( EXIT_FAILURE );
}
else
{
length_ = strlen( bufSeq_ ) - 1;
Logger_if( LOG_SHOW_IF_VERY_VERBOSE ) Logger::out() << "Deducing read length of " << length_ << endl;
}
}
SeqReaderFastq::~SeqReaderFastq() {}
void SeqReaderFastq::readNext( char *seqBuf )
{
if ( allRead_ == true )
{
Logger::error() << "Error: Tried to read an empty sequence stream" << endl;
exit( EXIT_FAILURE );
}
else if ( fgets( bufName_, maxSeqSize, pFile_ ) == NULL )
{
allRead_ = true;
}
else
{
if ( bufName_[0] != '@' )
{
Logger::error() << "Error: Expected FASTQ header, got " << bufName_ << endl;
exit( EXIT_FAILURE );
}
if ( fgets( seqBuf ? : bufSeq_, maxSeqSize, pFile_ ) == NULL )
{
Logger::error() << "Error: read FASTA header with no entry, incomplete file?" << endl;
exit( EXIT_FAILURE );
}
else
{
if ( ( length_ != -1 ) && ( ( ( int )strlen( seqBuf ? : bufSeq_ ) ) != length_ + 1 ) )
//else if (strlen(seqBuf?:bufSeq_)!=length_)
{
Logger::error() << "Error: Length of current sequence does not match length of first at position " << ftell( pFile_ ) << endl;
exit( EXIT_FAILURE );
}
else if ( fgets( bufQual_, maxSeqSize, pFile_ ) == NULL )
{
Logger::error() << "Error: Could not read FASTQ quality spacer, incomplete file?" << endl;
exit( EXIT_FAILURE );
}
else if ( bufQual_[0] != '+' )
{
Logger::error() << "Error: Expected FASTQ quality spacer, got " << bufQual_ << endl;
exit( EXIT_FAILURE );
}
else if ( fgets( bufQual_, maxSeqSize, pFile_ ) == NULL )
{
Logger::error() << "Error: Could not read FASTQ quality string, incomplete file?" << endl;
exit( EXIT_FAILURE );
}
} // ~else
} // ~else
} // ~Fastq::readNext
<|start_filename|>src/backtracker/OneBwtBackTracker.cpp<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#include "OneBwtBackTracker.hh"
#include "libzoo/util/Logger.hh"
using namespace std;
OneBwtBackTracker::OneBwtBackTracker(
BwtReaderBase *inBwt,
LetterNumber ¤tPos,
RangeStoreExternal &r,
LetterCount &countsSoFar,
const string &subset,
const int cycle,
const bool doesPropagateBkptToSeqNumInSet,
const bool noComparisonSkip,
const bool propagateSequence,
EndPosFile &endPosFile
)
: BackTrackerBase( subset, cycle, noComparisonSkip, propagateSequence )
, inBwt_( inBwt ),
currentPos_( currentPos ),
r_( r ),
countsSoFar_( countsSoFar ),
subset_( subset ),
cycle_( cycle ),
// numRanges_( 0 ),
// numSingletonRanges_( 0 ),
doesPropagateBkptToSeqNumInSet_( doesPropagateBkptToSeqNumInSet )
// noComparisonSkip_( noComparisonSkip )
, endPosFile_( endPosFile )
{
for ( int l( 0 ); l < alphabetSize; ++l )
propagateInterval_[l] = false;
}
void OneBwtBackTracker::process (
int pileNum,
string &thisWord,
IntervalHandlerBase &intervalHandler,
Range &thisRange
)
{
LetterCount countsThisRange;
bool notAtLast( true );
processSingletons(
pileNum
, notAtLast
, r_
, thisRange
, currentPos_
, inBwt_
, countsSoFar_
, countsThisRange
, intervalHandler
, propagateInterval_
, thisWord
, doesPropagateBkptToSeqNumInSet_
, ( IntervalHandler_FoundCallbackPtr )( &IntervalHandlerBase::foundInAOnly )
, endPosFile_
, 1
);
} // ~OneBwtBackTracker::operator()
<|start_filename|>src/BCRext/BwtIndex.hh<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#ifndef BWT_INDEX_HH
#define BWT_INDEX_HH
#include "BwtReader.hh"
#include "BwtWriter.hh"
#include <string>
#include <vector>
using std::string;
using std::vector;
//#define USE_COMPACT_STRUCTURES
#ifdef USE_COMPACT_STRUCTURES
# define LETTER_COUNT_CLASS LetterCountCompact
#else
# define LETTER_COUNT_CLASS LetterCount
#endif
const vector<char> indexV1Header = { 'B', 'W', 'I', 13, 10, 26, 1, 0 };
const vector<char> indexV2Header = { 'B', 'W', 'I', 13, 10, 26, 2, 0 };
template< class T >
class BwtReaderIndex : public T
{
public:
BwtReaderIndex( const string &filename, const string &optionalSharedMemoryPath );
// BwtReaderIndex( const BwtReaderIndex & );
BwtReaderIndex( const BwtReaderIndex &obj ) :
T( obj ),
indexFilename_( obj.indexFilename_ ),
pIndexFile_( obj.pIndexFile_ ),
indexPosBwt_( obj.indexPosBwt_ ),
indexPosFile_( obj.indexPosFile_ ),
indexCount_( obj.indexCount_ ),
indexNext_( obj.indexNext_ )
{
assert( pIndexFile_ == NULL ); // If it's not NULL, we may try to fclose it multiple times
}
virtual ~BwtReaderIndex() {}
virtual BwtReaderIndex *clone() const
{
return new BwtReaderIndex( *this );
};
virtual LetterNumber readAndCount( LetterCount &c, const LetterNumber numChars );
virtual LetterNumber readAndSend( BwtWriterBase &writer, const LetterNumber numChars )
{
assert( 1 == 0 );
}
virtual LetterNumber operator()( char *p, LetterNumber numChars )
{
return T::operator()( p, numChars );
// assert( 1 == 0 );
}
virtual void rewindFile( void );
// virtual LetterNumber tellg( void ) const;
void initIndex( const string &optionalSharedMemoryPath );
// bool getRun(void);
protected:
string indexFilename_;
FILE *pIndexFile_;
vector<LetterNumber> indexPosBwt0_;
vector<LetterNumber> indexPosFile0_;
vector<LETTER_COUNT_CLASS> indexCount0_;
// Pointers to the same structure, used in case of mmapped files
LetterNumber *indexPosBwt_;
LetterNumber *indexPosFile_;
LETTER_COUNT_CLASS *indexCount_;
uint32_t indexSize_;
uint32_t indexNext_;
};
void buildIndex( BwtReaderBase *reader, FILE *pFile, const int indexBinSize );
#endif //ifdef BWT_INDEX_HH
<|start_filename|>src/parameters/ExtendParameters.hh<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#ifndef BEETL_EXTEND_PARAMETERS_HH
#define BEETL_EXTEND_PARAMETERS_HH
#include "libzoo/cli/ToolParameters.hh"
namespace BeetlExtendParameters
{
} // namespace BeetlExtendParameters
class ExtendParameters : public ToolParameters
{
public:
ExtendParameters()
{
using namespace BeetlExtendParameters;
addEntry( -1, "intervals filename", "--intervals", "-i", "Input file: intervals to extend", "", TYPE_STRING | REQUIRED );
addEntry( -1, "bwt filename prefix", "--bwt-prefix", "-b", "Input BWT index files prefix", "", TYPE_STRING | REQUIRED );
addEntry( -1, "sequence numbers output filename", "--output-seqnum", "-o", "Destination file to output sequence numbers", "", TYPE_STRING );
addEntry( -1, "dollar positions output filename", "--output-dollar-pos", "-p", "Destination file to output BWT positions of dollar signs", "", TYPE_STRING );
addEntry( -1, "propagate sequence", "--propagate-sequence", "", "Propagate and output sequence with each BWT range (slower)", "", TYPE_SWITCH );
addDefaultVerbosityAndHelpEntries();
}
};
#endif //ifndef BEETL_EXTEND_PARAMETERS_HH
<|start_filename|>src/shared/Timer.cpp<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#include "Timer.hh"
#include <cstdlib>
Timer::Timer( void )
{
if ( gettimeofday( &lastTime_, NULL ) != 0 )
exit( EXIT_FAILURE );
if ( getrusage( RUSAGE_SELF, &lastUsage_ ) != 0 )
exit( EXIT_FAILURE );
} // ~Timer::Timer( void )
std::ostream &Timer::print( std::ostream &os )
{
if ( gettimeofday( &thisTime_, NULL ) != 0 )
exit( EXIT_FAILURE );
if ( getrusage( RUSAGE_SELF, &thisUsage_ ) != 0 )
exit( EXIT_FAILURE );
static double elapsedActual, elapsedUser, elapsedSystem;
elapsedUser = thisUsage_.ru_utime.tv_sec - lastUsage_.ru_utime.tv_sec
+ ( thisUsage_.ru_utime.tv_usec
- ( double ) lastUsage_.ru_utime.tv_usec ) / 1000000;
elapsedSystem = thisUsage_.ru_stime.tv_sec - lastUsage_.ru_stime.tv_sec
+ ( thisUsage_.ru_stime.tv_usec
- ( double ) lastUsage_.ru_stime.tv_usec ) / 1000000;
elapsedActual = thisTime_.tv_sec - lastTime_.tv_sec + ( thisTime_.tv_usec
- ( double ) lastTime_.tv_usec ) / 1000000;
os << "User: " << elapsedUser << "s System: " << elapsedSystem
<< "s Actual: " << elapsedActual << "s Efficiency: "
<< ( ( elapsedActual == 0 ) ? 0 : ( ( elapsedUser + elapsedSystem )
* 100.0 / elapsedActual ) ) << '\045'; // only way to print % liked by both Intel and gcc!
lastTime_ = thisTime_;
lastUsage_ = thisUsage_;
return os;
} // ~std::ostream& Timer::print( std::ostream& os )
std::ostream &operator<<( std::ostream &os, Timer &timer )
{
return timer.print( os );
} // ~std::ostream& operator<<( std::ostream& os, Timer& timer )
const char *Timer::timeNow( void ) const
{
time_t tt( time( NULL ) );
return ctime( &tt );
} // ~const char* Timer::timeNow( void ) const
<|start_filename|>src/countWords/IntervalHandlerMetagenome.hh<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#ifndef INCLUDED_INTERVALHANDLER_METAGENOME_HH
#define INCLUDED_INTERVALHANDLER_METAGENOME_HH
#include "Alphabet.hh"
#include "IntervalHandlerBase.hh"
#include "LetterCount.hh"
#include "RangeStore.hh"
#include "Tools.hh"
#include "Types.hh"
#include <cstdlib>
#include <fstream>
#include <unistd.h>
using std::vector;
//
// IntervalHandler
//
// Idea here is that different algorithms can be implemented by defining
// new subclasses of IntervalHandler
struct IntervalHandlerMetagenome : public IntervalHandlerBase
{
IntervalHandlerMetagenome( unsigned int minOcc,
vector<string> &filenamesCSet,
const vector<char *> &mmappedCFiles,
vector< vector< int> > &fileNumToTaxIds,
bool testDB,
uint minWordLength,
uint maxWordLength );
virtual ~IntervalHandlerMetagenome();
virtual void foundInBoth
( const int pileNum,
const LetterCount &countsThisRangeA,
const LetterCount &countsThisRangeB,
const Range &thisRangeA,
const Range &thisRangeB,
AlphabetFlag &propagateIntervalA,
AlphabetFlag &propagateIntervalB,
bool &isBreakpointDetected,
const int cycle
);
virtual void foundInAOnly
( const int pileNum,
const LetterCount &countsSoFarA,
const LetterCount &countsThisRangeA,
const char *bwtSubstring,
Range &thisRangeA,
AlphabetFlag &propagateIntervalA,
const int cycle
);
virtual void foundInBOnly
( const int pileNum,
const LetterCount &countsSoFarB,
const LetterCount &countsThisRangeB,
const char *bwtSubstring,
Range &thisRangeB,
AlphabetFlag &propagateIntervalB,
const int cycle
);
vector<bool> intervalInSameTaxa( vector<uint> &sharedTaxIds, vector<MetagFileNumRefType> &fileNumbers );
void getFileNumbersForRange( const unsigned int &pileNum, const LetterNumber &bwtPosition, const uint &num, vector<MetagFileNumRefType> &fileNumbers );
const LetterNumber minOcc_;
//setC of the merging algorithm from Tony,
//for each bwt positions there should be a (unsigned short) fileNumber indicating from which file the suffix came from
vector<int> cSetFileDescs_;
vector<off_t> posInFile_;
const vector<char *> mmappedCFiles_;
//for each fileNumber there should be the same amount of taxIds, this can stop at any level it will be filled up with zeros
vector< vector< int> > &fileNumToTaxIds_;
bool testDB_;
//minimal word length. Not exactly needed but speeds the algorithm up,
//because the fileNumbers and taxa do not have to be checked before the minimal wordLength is reached
uint minWordLength_;
uint maxWordLength_;
// void createOutputFile( const int subsetThreadNum, const int i, const int j, const int cycle );
// std::ofstream outFile_;
};
#endif
<|start_filename|>src/errors/WitnessReader.hh<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**l
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#ifndef INCLUDED_WITNESSREADER_HH
#define INCLUDED_WITNESSREADER_HH
#include "BwtReader.hh"
#include "IntervalHandlerBase.hh"
#include "LetterCount.hh"
#include "RangeStore.hh"
#include "Types.hh"
#include "libzoo/util/Logger.hh"
#include <string>
using namespace std;
class WitnessReader
{
public:
WitnessReader(
const string &lcpFileName,
const string &bwtFileName,
int witnessLength,
int minimumSupport,
bool rleBWT
);
virtual ~WitnessReader();
int currentWitnessCount() const;
LetterCount TotalCountSoFar();
int currentWitnessBlockStart() const;
LetterCount currentWitnessSupport();
bool nextWitnessBlock( LetterCount &lc );
void test();
private:
FILE *pFile_;
BwtReaderBase *bwtReader_;
int lcpBuf_[ReadBufferSize];
int filledTo_;
int at_;
int lastBlockEnd_;
int filePos_;
int witnessLength_;
int minimumSupport_;
int lastLcpBlockSupport_;
LetterCount totalCountSoFar_;
void refill_();
bool nextCandidateLcpBlock_();
};
#endif
<|start_filename|>src/libzoo/util/TemporaryFilesManager.hh<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#ifndef TEMPORARY_FILES_MANAGER_HH
#define TEMPORARY_FILES_MANAGER_HH
#include <cstdio>
#include <inttypes.h>
#include <memory>
#include <stdlib.h>
#include <string>
#include <vector>
using std::string;
using std::vector;
using std::shared_ptr;
//#define DISABLE_WRITES_AND_REMOVES
class TemporaryFilesManager
{
private:
TemporaryFilesManager() : ramLimitMB_( 0 ), tempPathWasCreated_( false ) {}
TemporaryFilesManager( TemporaryFilesManager const & ); // no impl to avoid copies of singleton
void operator=( TemporaryFilesManager const & ); // no impl to avoid copies of singleton
public:
static TemporaryFilesManager &get()
{
static TemporaryFilesManager singleton;
return singleton;
}
void setTempPath( const string &path, const bool createUniqueSubDirectory = true );
void setRamLimit( const size_t ramLimit );
void addFilename( const string &filename );
void cleanupAllFiles(); // Delete all existing temporary files
void cleanup(); // Delete all existing temporary files and temp subdirectory
string tempPath_;
size_t ramLimitMB_;
private:
string tempPathParent_;
bool tempPathWasCreated_;
vector<string> filenames_;
};
static const int TempFileBufSize( 32768 );
class TemporaryFile
{
public:
TemporaryFile( )
: f_( NULL )
#ifdef BUFFERED_WRITE_TEST_VERSION
, buf_( TempFileBufSize )
, p_( &buf_[0] )
, pBufMax_( p_ + TempFileBufSize )
#endif
{}
TemporaryFile( const char *filename, const char *mode );
virtual ~TemporaryFile() {}
static TemporaryFile *fopen( const char *filename, const char *mode )
{
#ifdef DISABLE_WRITES_AND_REMOVES
if ( mode[0] == 'w' )
return new TemporaryFile( "tmp", mode );
#endif
TemporaryFile *result = new TemporaryFile( filename, mode );
if ( result->f_ )
return result;
else
{
delete result;
return NULL;
}
}
void open( const char *filename, const char *mode );
virtual size_t read( void *ptr, size_t size, size_t nmemb )
{
return ::fread( ptr, size, nmemb, f_ );
}
#ifdef BUFFERED_WRITE_TEST_VERSION
void flushBuffer( void )
{
::fwrite( &buf_[0], 1, p_ - &buf_[0], f_ );
p_ = &buf_[0];
}
virtual size_t write( const void *ptr, size_t size, size_t nmemb )
{
#ifdef DISABLE_WRITES_AND_REMOVES
return size ? nmemb : 0;
#endif
char *pIn( ( char * )ptr );
for ( size_t i( 0 ); i < size * nmemb; i++ )
{
if ( p_ == pBufMax_ ) flushBuffer();
*p_++ = *pIn++;
}
return nmemb;
}
#else
virtual size_t write( const void *ptr, size_t size, size_t nmemb )
{
#ifdef DISABLE_WRITES_AND_REMOVES
return size ? nmemb : 0;
#endif
return ::fwrite( ptr, size, nmemb, f_ );
}
#endif
virtual size_t tell()
{
// TBD - gives wrong answers for buffered write
return ::ftell( f_ );
}
void flush()
{
if ( f_ )
::fflush( f_ );
}
virtual void close()
{
if ( f_ )
{
#ifdef BUFFERED_WRITE_TEST_VERSION
flushBuffer();
#endif
::fclose( f_ );
}
delete this;
}
int fileno()
{
if ( f_ )
return ::fileno( f_ );
else
return -1;
}
virtual bool eof()
{
if ( f_ )
return ::feof( f_ );
else
return true;
}
friend size_t fread( void *ptr, size_t size, size_t nmemb, TemporaryFile *stream )
{
return stream->read( ptr, size, nmemb );
}
friend size_t fwrite( const void *ptr, size_t size, size_t nmemb, TemporaryFile *stream )
{
return stream->write( ptr, size, nmemb );
}
friend size_t ftell( TemporaryFile *stream )
{
return stream->tell();
}
friend void fflush( TemporaryFile *stream )
{
stream->flush();
}
friend void fclose( TemporaryFile *stream )
{
stream->close();
}
friend int fileno( TemporaryFile *stream )
{
return stream->fileno();
}
friend bool feof( TemporaryFile *stream )
{
return stream->eof();
}
static bool remove( const char *filename );
protected:
static string getFullFilename( const string &filename );
FILE *f_;
#ifdef BUFFERED_WRITE_TEST_VERSION
vector<char> buf_;
char *p_;
char *pBufMax_;
#endif //ifdef BUFFERED_WRITE_TEST_VERSION
};
struct NoInitChar
{
char value;
NoInitChar() {} // do nothing, especially not an initialisation
};
class NoInitCharVector
{
public:
NoInitCharVector()
: data( NULL )
, capacity_( 0 )
, size_( 0 )
{}
~NoInitCharVector()
{
free( data );
}
void reserve( size_t n )
{
data = ( char * )malloc( n );
capacity_ = n;
}
void resize( size_t n )
{
size_ = n;
}
size_t capacity() const
{
return capacity_;
}
size_t size() const
{
return size_;
}
char *data;
private:
size_t capacity_;
size_t size_;
};
// RAM file with disk fallback
class TemporaryRamFile : public TemporaryFile
{
public:
TemporaryRamFile( ) : TemporaryFile(), currentPos_( 0 ) {}
TemporaryRamFile( const char *filename, const char *mode, const uint64_t maxRAM = 0 );
virtual ~TemporaryRamFile();
static TemporaryRamFile *fopen( const char *filename, const char *mode, const uint64_t maxRAM = 0 );
static bool remove( const char *filename );
virtual void close();
virtual size_t read( void *ptr, size_t size, size_t nmemb );
virtual size_t write( const void *ptr, size_t size, size_t nmemb );
virtual size_t tell()
{
return currentPos_;
}
virtual bool eof();
private:
const string filename_;
const string mode_;
shared_ptr< NoInitCharVector > buf_;
size_t currentPos_;
// char localBuf_[1024*1024];
};
#endif // TEMPORARY_FILES_HH
<|start_filename|>src/shared/EndPosFile.cpp<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#include "EndPosFile.hh"
#include <cassert>
#include <fstream>
using namespace std;
EndPosFile::EndPosFile( const string &bwtFilenamePrefix )
: file_( bwtFilenamePrefix + "-end-pos" )
, sequenceGroupCount_( 0 )
, sequenceCountInGroup_( 0 )
, hasRevComp_( 0 )
{
file_.read( reinterpret_cast< char * >( &sequenceGroupCount_ ), sizeof( SequenceNumber ) );
file_.read( reinterpret_cast< char * >( &sequenceCountInGroup_ ), sizeof( uint8_t ) );
file_.read( reinterpret_cast< char * >( &hasRevComp_ ), sizeof( uint8_t ) );
// assert( file_.good() );
dollarSignCount_ = sequenceGroupCount_ * sequenceCountInGroup_ * ( hasRevComp_ ? 2 : 1 );
}
SequenceNumber EndPosFile::convertDollarNumToSequenceNum( const SequenceNumber dollarNum )
//SequenceNumber EndPosFile_convertDollarNumToSequenceNum( const SequenceNumber dollarNum )
{
assert( file_.good() && "Error: -end-pos file not readable" );
assert( dollarNum < dollarSignCount_ );
/*
if ( dollarPos >= numDollarEntries )
{
cout << "Warning: dollarPos " << dollarPos << " >= numDollarEntries " << numDollarEntries << endl;
// continue;
dollarPos %= numDollarEntries;
}
*/
file_.seekg( sizeof( SequenceNumber ) + 2 * sizeof( uint8_t ) + ( dollarNum ) * ( sizeof( SequenceNumber ) + sizeof( uint8_t ) ) );
SequenceNumber sequenceGroupNum;
file_.read( reinterpret_cast< char * >( &sequenceGroupNum ), sizeof( SequenceNumber ) );
uint8_t positionInGroup;
file_.read( reinterpret_cast< char * >( &positionInGroup ), sizeof( uint8_t ) );
SequenceNumber sequenceNum = sequenceGroupNum + positionInGroup * sequenceGroupCount_;
return sequenceNum;
}
<|start_filename|>src/backtracker/IntervalHandlerBase.cpp<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#include "IntervalHandlerBase.hh"
#include <sstream>
#include <sys/types.h>
#include <sys/stat.h>
using namespace std;
void IntervalHandlerBase::countString( char *bwtSubstring, int length, LetterCount &countsThisRange )
{
for ( int i = 0; i < length; i++ )
countsThisRange += bwtSubstring[i];
}
void IntervalHandlerBase::createOutputFile( const int subsetThreadNum, const int i, const int j, const int cycle, const string &outputDirectory )
{
#define CONCATENATE_J_PILES
// if ( cycle >= ( int )minWordLength_ )
{
ostringstream filename;
#ifdef CONCATENATE_J_PILES
filename << outputDirectory << ( outputDirectory.empty() ? "" : "/" ) << "cycle" << cycle << ".subset" << subsetThreadNum << "." << i;
outFile_.open( filename.str(), ( j == 1 ) ? ios::out : ios::app );
#else
filename << outputDirectory << ( outputDirectory.empty() ? "" : "/" ) << "cycle" << cycle << ".subset" << subsetThreadNum << "." << i << "." << j;
outFile_.open( filename.str() );
#endif
}
}
<|start_filename|>src/countWords/IntervalHandlerReference.cpp<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#include "IntervalHandlerReference.hh"
#include "libzoo/util/Logger.hh"
using namespace std;
//
// IntervalHandlerReference member function declarations
//
void IntervalHandlerReference::foundInBoth
( const int pileNum,
const LetterCount &countsThisRangeA,
const LetterCount &countsThisRangeB,
const Range &thisRangeA,
const Range &thisRangeB,
AlphabetFlag &propagateIntervalA,
AlphabetFlag &propagateIntervalB,
bool &isBreakpointDetected,
const int cycle
)
{
bool significantNonRef( false );
// LetterNumber maxSignalAOnly(0), maxSignalBOnly(0);
if ( thisRangeB.num_ > 1 )
{
// if k-mer is not unique in B (reference),
// propagate all B and all A that matches B
for ( int l( 1 ); l < alphabetSize; l++ )
{
if ( countsThisRangeB.count_[l] > 0 )
{
propagateIntervalB[l] = true;
if ( countsThisRangeA.count_[l] > 0 )
propagateIntervalA[l] = true;
else
propagateIntervalA[l] = false;
} // ~if
else
{
propagateIntervalA[l] = false;
propagateIntervalB[l] = false;
} // ~else
} // ~for
} // ~if
else
{
if ( thisRangeB.num_ != 1 )
{
cerr << "thisRandB is no 1. Aborting." << endl;
}
for ( int l( 1 ); l < alphabetSize; l++ )
{
if ( countsThisRangeB.count_[l] > 0 )
{
propagateIntervalB[l] = true;
if ( countsThisRangeA.count_[l] > 0 )
propagateIntervalA[l] = true;
else
propagateIntervalA[l] = false;
} // ~if
else
{
propagateIntervalB[l] = false;
if ( countsThisRangeA.count_[l] > minOcc_ )
{
propagateIntervalA[l] = true;
significantNonRef = true;
}
else
propagateIntervalA[l] = false;
} // ~else
} // ~for
} // ~else
if ( significantNonRef == true )
{
isBreakpointDetected = true;
if ( !thisRangeB.word_.empty() )
{
#pragma omp critical (IO)
Logger::out()
<< "BKPT"
<< ' ' << thisRangeB.word_
<< ' ' << ( thisRangeB.pos_ & matchMask )
<< ' ' << countsThisRangeA.count_[0]
<< ':' << countsThisRangeA.count_[1]
<< ':' << countsThisRangeA.count_[2]
<< ':' << countsThisRangeA.count_[3]
<< ':' << countsThisRangeA.count_[4]
<< ':' << countsThisRangeA.count_[5]
<< ':' << countsThisRangeB.count_[0]
<< ':' << countsThisRangeB.count_[1]
<< ':' << countsThisRangeB.count_[2]
<< ':' << countsThisRangeB.count_[3]
<< ':' << countsThisRangeB.count_[4]
<< ':' << countsThisRangeB.count_[5]
<< endl;
}
else
{
#pragma omp critical (IO)
Logger::out()
<< "BKPT"
<< ' ' << alphabet[pileNum]
<< ' ' << countsThisRangeA.count_[0]
<< ':' << countsThisRangeA.count_[1]
<< ':' << countsThisRangeA.count_[2]
<< ':' << countsThisRangeA.count_[3]
<< ':' << countsThisRangeA.count_[4]
<< ':' << countsThisRangeA.count_[5]
<< ':' << countsThisRangeB.count_[0]
<< ':' << countsThisRangeB.count_[1]
<< ':' << countsThisRangeB.count_[2]
<< ':' << countsThisRangeB.count_[3]
<< ':' << countsThisRangeB.count_[4]
<< ':' << countsThisRangeB.count_[5]
<< ' ' << ( thisRangeA.pos_ & matchMask )
<< ' ' << ( thisRangeB.pos_ & matchMask )
<< endl;
}
}
// don't bother with Ns
propagateIntervalA[whichPile[( int )dontKnowChar]] = false;
propagateIntervalB[whichPile[( int )dontKnowChar]] = false;
} // ~foundInBoth
void IntervalHandlerReference::foundInAOnly
( const int pileNum,
const LetterCount &countsSoFarA,
const LetterCount &countsThisRangeA,
const char *bwtSubstring,
Range &thisRangeA,
AlphabetFlag &propagateIntervalA,
const int cycle
)
{
bool significantPath( false );
for ( int l( 1 ); l < alphabetSize; l++ )
{
if ( countsThisRangeA.count_[l] >= minOcc_ )
{
significantPath = true;
propagateIntervalA[l] = true;
} // ~if
else
{
propagateIntervalA[l] = false;
} // ~else
} // ~for l
if ( significantPath == false )
#pragma omp critical (IO)
{
Logger::out() << "READ ";
if ( thisRangeA.word_.empty() )
Logger::out() << alphabet[pileNum]; // No propagated sequence
else
Logger::out() << thisRangeA.word_;
Logger::out() << " " << thisRangeA.pos_;
for ( int l( 0 ); l < alphabetSize; l++ )
Logger::out() << ( ( l == 0 ) ? " " : ":" ) << countsThisRangeA.count_[l];
Logger::out() << endl;
}
#ifdef OLD
// For now this is same as for Splice - continue until all reads found
if ( countsThisRangeA.count_[0] > 0 )
#pragma omp critical (IO)
{
Logger::out() << "READ " << thisRangeA.word_;
Logger::out() << " " << thisRangeA.pos_;
for ( int l( 0 ); l < alphabetSize; l++ )
Logger::out() << ( ( l == 0 ) ? " " : ":" ) << countsThisRangeA.count_[l];
Logger::out() << endl;
}
// TBD print out IDs of discovered reads
for ( int l( 1 ); l < alphabetSize; l++ )
{
propagateIntervalA[l] = ( countsThisRangeA.count_[l] > 0 );
} // ~for l
#endif
// don't bother with Ns
propagateIntervalA[whichPile[( int )dontKnowChar]] = false;
} // ~foundInBoth
void IntervalHandlerReference::foundInBOnly
( const int pileNum,
const LetterCount &countsSoFarB,
const LetterCount &countsThisRangeB,
const char *bwtSubstring,
Range &thisRangeB,
AlphabetFlag &propagateIntervalB,
const int cycle
)
{
// TBD
}
<|start_filename|>src/frontends/BeetlUnbwt.cpp<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#include "BeetlUnbwt.hh"
#include "BCRexternalBWT.hh"
#include "config.h"
#include "parameters/UnbwtParameters.hh"
#include "libzoo/cli/Common.hh"
#include "libzoo/util/Logger.hh"
#include <cstdlib>
#include <iostream>
#include <sstream>
#include <string.h>
using namespace std;
using namespace BeetlUnbwtParameters;
UnbwtParameters params;
void launchBeetlUnbwt()
{
const string &inputFilename = params["input filename prefix"];
const string &outputFilename = params["output filename"];
Logger::out() << "\nLaunching the following configuration of Beetl-unbwt:" << endl;
params.print( Logger::out(), false );
Logger::out() << endl;
int bcrMode = 1 ; // 1=decode BWT
CompressionFormatType outputCompression = compressionIncrementalRunLength; // not used
BCRexternalBWT bwt( ( char * )inputFilename.c_str(), ( char * )outputFilename.c_str(), bcrMode, outputCompression, ¶ms );
// automatically calls: result = unbuildBCR( file1, fileOutBwt, intermediateCycFiles, fileOutput );
}
void printUsage()
{
params.printUsage();
cout << "Notes:" << endl;
cout << " Input must be a set of ASCII-encoded BWT files (not run-length-encoded)" << endl;
cout << " Fastq output requires {input}-Q0x quality files to be present" << endl;
cout << endl;
}
int main( const int argc, const char **argv )
{
// Generated using: http://patorjk.com/software/taag/#p=display&f=Soft&t=BEETL%20unbwt
cout << ",-----. ,------.,------.,--------.,--. ,-----. ,--. ,--.,--------. " << endl;
cout << "| |) /_ | .---'| .---''--. .--'| | ,--.,--.,--,--, | |) /_ | | | |'--. .--' " << endl;
cout << "| .-. \\| `--, | `--, | | | | | || || \\| .-. \\| |.'.| | | | " << endl;
cout << "| '--' /| `---.| `---. | | | '--. ' '' '| || || '--' /| ,'. | | | " << endl;
cout << "`------' `------'`------' `--' `-----' `----' `--''--'`------' '--' '--' `--' " << endl;
cout << "Version " << PACKAGE_VERSION << endl;
cout << endl;
cout << "Command called:" << endl << " ";
for ( int i = 0; i < argc; ++i )
{
cout << " " << argv[i];
}
cout << "\n" << endl;
if ( !params.parseArgv( argc, argv ) || params["help"] == 1 || !params.chechRequiredParameters() )
{
printUsage();
exit( params["help"] == 0 );
}
// Use default parameter values where needed
params.commitDefaultValues();
// Auto-detection of missing arguments
if ( !params["output format"].isSet() )
{
const string &filename = params["output filename"];
string fileFormat = detectFileFormat( filename );
if ( fileFormat.empty() )
{
cerr << "Error: file format not recognised for " << filename << endl;
exit( -1 );
}
params["output format"] = fileFormat;
}
checkFileFormat( params["output filename"], params["output format"] );
if ( !params["input format"].isSet() )
{
const string &bwtPrefix = params["input filename prefix"];
vector<string> filenames;
bool isBwtCompressed;
string availableFileLetters;
detectInputBwtProperties( bwtPrefix, filenames, isBwtCompressed, availableFileLetters );
if ( filenames.size() < 2 )
{
cerr << "Error: too few input files detected (run with -vv for more details)" << endl;
exit( -1 );
}
if ( isBwtCompressed )
{
cerr << "Error: BWT files don't seem to be in ASCII format (they probably got created as run-length-encoded)" << endl;
exit( -1 );
}
else
{
params["input format"] = "BWT_ASCII";
}
// Check that {prefix}-Q0* files are present if output is fastq
if ( params["output format"] == "fastq" )
{
assert( strchr( availableFileLetters.c_str(), 'Q' ) && "{input}-Q0x files cannot be found (those qualities are required for fastq output)" );
}
}
// Launch
launchBeetlUnbwt();
return 0;
}
<|start_filename|>src/BCRext/BwtIndex.cpp<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#include "BwtIndex.hh"
#include "BwtReader.hh"
#include "libzoo/util/Logger.hh"
#include <algorithm>
#include <unistd.h>
#include <sys/types.h>
#ifndef DONT_USE_MMAP
# include <fcntl.h>
# include <sys/mman.h>
# include <sys/stat.h>
# include <sys/types.h>
#endif
using namespace std;
template< class T >
BwtReaderIndex<T>::BwtReaderIndex( const string &filename, const string &optionalSharedMemoryPath ):
T( filename ),
indexFilename_( filename + ".idx" ),
// isNextIndex_( false ),
pIndexFile_( NULL )
{
// current_.clear();
initIndex( optionalSharedMemoryPath );
}
template< class T >
void BwtReaderIndex<T>::rewindFile( void )
{
// rewind file and set all vars as per constructor
// current_.clear();
indexNext_ = 0;
// initIndex();
T::rewindFile();
} // ~rewindFile
template< class T >
LetterNumber BwtReaderIndex<T>::readAndCount( LetterCount &c, const LetterNumber numChars )
{
#ifdef DEBUG_RAC
std::cout << "BR RLI readAndCount " << numChars << " chars " << endl;
std::cout << "Before: " << currentPos_ << " " << ftell( T::pFile_ ) << " ";
std::cout << c << endl;;
#endif
LetterNumber charsLeft( numChars );
uint32_t indexLast;
#ifdef DEBUG_RAC
if ( indexNext_ != indexSize_ )
assert( currentPos_ <= indexPosBwt_[indexNext_] );
#endif
// gotcha: numChars can be set to maxLetterNumber so no expressions should
// add to it - wraparound issues!
// if indexLast==indexPosBwtSize we know we have gone past last index point
// or that none are present at all
if ( ( indexNext_ != indexSize_ )
&& ( numChars > ( indexPosBwt_[indexNext_] - T::currentPos_ ) ) )
{
// count interval spans at least one index point
// how many index points does the count interval span?
indexLast = indexNext_;
while ( ( indexLast != indexSize_ )
&& ( numChars > ( indexPosBwt_[indexLast] - T::currentPos_ ) ) )
{
indexLast++;
}
indexLast--;
if ( indexNext_ <= indexLast )
{
// more than one index point in count interval - can use index
if ( ! ( T::currentPos_ == 0 && charsLeft >= indexPosBwt_[indexNext_] ) )
charsLeft -= T::readAndCount( c, indexPosBwt_[indexNext_] - T::currentPos_ );
else
{
charsLeft -= indexPosBwt_[0];
c += indexCount_[0];
if ( indexNext_ == indexLast )
T::seek( indexPosFile_[0], indexPosBwt_[0] );
}
// assert(T::currentPos_==indexNext_);
if ( indexNext_ != indexLast )
{
charsLeft -= ( indexPosBwt_[indexLast] - indexPosBwt_[indexNext_] );
// update counts and also indexNext_
while ( ++indexNext_ <= indexLast )
{
c += indexCount_[indexNext_];
#ifdef DEBUG_RAC_VERBOSE
std::cout << indexNext_ << " " << indexPosBwt_[indexNext_] << " " << indexPosFile_[indexNext_] << " " << indexCount_[indexNext_] << endl;
#endif
} //
// skip to last index point and reset buffers
T::seek( indexPosFile_[indexLast], indexPosBwt_[indexLast] );
}
else
{
assert( T::currentPos_ == indexPosBwt_[indexLast] );
++indexNext_;
}
/*
T::runLength_ = 0;
T::pBuf_ = T::buf_ + ReadBufferSize;
T::pBufMax_ = T::buf_ + ReadBufferSize;
*/
} // if more than one index point
// if we're in this clause we've gone past at least one index
indexLast++;
assert( indexLast <= indexSize_ );
}
#ifdef DEBUG_RAC
std::cout << "After (RLI) skip: " << T::currentPos_ << " " << ftell( T::pFile_ ) << " " << c << endl;
#endif
// now read as normal until done
charsLeft -= T::readAndCount( c, charsLeft );
// assert(T::currentPos_==desiredPos);
#ifdef DEBUG_RAC
std::cout << "After (RLI) final read: " << T::currentPos_ << " " << ftell( T::pFile_ ) << " " << c << endl;
#endif
return ( numChars - charsLeft );
}
template< class T >
void BwtReaderIndex<T>::initIndex( const string &optionalSharedMemoryPath )
{
indexNext_ = 0;
bool useSharedMemory = !optionalSharedMemoryPath.empty();
string shmFilename1, shmFilename2, shmFilename3;
if ( useSharedMemory )
{
string filenameWithoutSlash = T::filename_;
std::replace( filenameWithoutSlash.begin(), filenameWithoutSlash.end(), '/', '_' );
shmFilename1 = optionalSharedMemoryPath + "/BeetlIndexPosFile_" + filenameWithoutSlash;
shmFilename2 = optionalSharedMemoryPath + "/BeetlIndexCount_" + filenameWithoutSlash;
shmFilename3 = optionalSharedMemoryPath + "/BeetlIndexPosBwt_" + filenameWithoutSlash;
if ( readWriteCheck( shmFilename1.c_str(), false, false ) )
{
// Load vectors from shared memory
{
cerr << "Info: Using mmap'ed index " << shmFilename1 << endl;
int fd = open( shmFilename1.c_str(), O_RDONLY );
assert( fd >= 0 );
off_t fileSize = lseek( fd, 0, SEEK_END );
lseek( fd, 0, SEEK_SET );
char *mmappedFile = ( char * )mmap( NULL, fileSize, PROT_READ, MAP_SHARED /*| MAP_LOCKED | MAP_POPULATE*/, fd, 0 );
if ( mmappedFile == ( void * ) - 1 )
{
perror( "Error: Map failed" );
assert( false );
}
indexSize_ = *reinterpret_cast<uint32_t *>( mmappedFile );
indexPosFile_ = reinterpret_cast<LetterNumber *>( mmappedFile + sizeof( indexSize_ ) );
close( fd );
}
{
int fd = open( shmFilename2.c_str(), O_RDONLY );
assert( fd >= 0 );
off_t fileSize = lseek( fd, 0, SEEK_END );
lseek( fd, 0, SEEK_SET );
char *mmappedFile = ( char * )mmap( NULL, fileSize, PROT_READ, MAP_SHARED /*| MAP_LOCKED | MAP_POPULATE*/, fd, 0 );
if ( mmappedFile == ( void * ) - 1 )
{
perror( "Error: Map failed" );
assert( false );
}
assert( indexSize_ == *reinterpret_cast<uint32_t *>( mmappedFile ) );
indexCount_ = reinterpret_cast<LETTER_COUNT_CLASS *>( mmappedFile + sizeof( indexSize_ ) );
close( fd );
}
{
int fd = open( shmFilename3.c_str(), O_RDONLY );
assert( fd >= 0 );
off_t fileSize = lseek( fd, 0, SEEK_END );
lseek( fd, 0, SEEK_SET );
char *mmappedFile = ( char * )mmap( NULL, fileSize, PROT_READ, MAP_SHARED /*| MAP_LOCKED | MAP_POPULATE*/, fd, 0 );
if ( mmappedFile == ( void * ) - 1 )
{
perror( "Error: Map failed" );
assert( false );
}
assert( indexSize_ == *reinterpret_cast<uint32_t *>( mmappedFile ) );
indexPosBwt_ = reinterpret_cast<LetterNumber *>( mmappedFile + sizeof( indexSize_ ) );
close( fd );
}
return;
}
}
LetterNumber currentPosBwt( 0 );
uint8_t unusedAlphabetEntries( 0 );
if ( pIndexFile_ != NULL ) fclose( pIndexFile_ );
pIndexFile_ = fopen( indexFilename_.c_str(), "r" );
if ( pIndexFile_ == NULL )
{
// Logger::error() << "Error opening index file " << indexFilename_;
// exit( -1 );
}
else
{
// read file header
bool isIndexV2 = false;
uint8_t sizeOfAlphabet = 0;
uint8_t sizeOfLetterNumber = 0;
uint16_t sizeOfLetterCountCompact = 0;
vector<char> buf( indexV1Header.size() );
fread( buf.data(), indexV1Header.size(), 1, pIndexFile_ );
if ( equal( buf.begin(), buf.end(), indexV1Header.begin() ) )
{
// index v1 detected
fread( &sizeOfAlphabet, sizeof( uint8_t ), 1, pIndexFile_ );
fread( &sizeOfLetterNumber, sizeof( uint8_t ), 1, pIndexFile_ );
fread( &sizeOfLetterCountCompact, sizeof( uint16_t ), 1, pIndexFile_ );
}
else if ( equal( buf.begin(), buf.end(), indexV2Header.begin() ) )
{
// index v2 detected
isIndexV2 = true;
fread( &sizeOfAlphabet, sizeof( uint8_t ), 1, pIndexFile_ );
fread( &sizeOfLetterNumber, sizeof( uint8_t ), 1, pIndexFile_ );
sizeOfLetterCountCompact = sizeof( LetterCountCompact ); // unused in index v2
}
else
{
// default value from previous header-less format
sizeOfAlphabet = 7;
sizeOfLetterNumber = 8;
sizeOfLetterCountCompact = 4*sizeOfAlphabet;
rewind( pIndexFile_ );
}
if ( sizeOfAlphabet > alphabetSize )
{
Logger::error() << "WARNING: Index file " << indexFilename_ << " was built with alphabetSize == " << (int)sizeOfAlphabet << " whereas the current tools are using alphabetSize == " << alphabetSize << ".\n => You should rebuild the index files with beetl-index (or rebuild the tools using the same data widths (specified in Types.hh))." << endl;
unusedAlphabetEntries = sizeOfAlphabet - alphabetSize;
}
else if ( sizeOfAlphabet < alphabetSize )
{
Logger::error() << "ERROR: Index file " << indexFilename_ << " was built with alphabetSize == " << (int)sizeOfAlphabet << " whereas the current tools are using alphabetSize == " << alphabetSize << ".\n => You should rebuild the index files with beetl-index (or rebuild the tools using the same data widths (specified in Types.hh))." << endl;
exit( -1 );
}
if ( sizeOfLetterNumber != sizeof( LetterNumber ) )
{
Logger::error() << "ERROR: Index file " << indexFilename_ << " was built with sizeof(LetterNumber) == " << (int)sizeOfLetterNumber << " whereas the current tools are using sizeof(LetterNumber) == " << sizeof( LetterNumber ) << ".\n => You should rebuild the index files with beetl-index (or rebuild the tools using the same data widths (specified in Types.hh))." << endl;
exit( -1 );
}
if ( sizeOfLetterCountCompact != sizeof( LetterCountCompact ) + 4 * unusedAlphabetEntries ) // allow 32 bits per unused entry to be automatically ignored
{
Logger::error() << "ERROR: Index file " << indexFilename_ << " was built with sizeof(LetterCountCompact) == " << sizeOfLetterCountCompact << " whereas the current tools are using sizeof(LetterCountCompact) == " << sizeof( LetterCountCompact ) << " + " << unusedAlphabetEntries << "unused alphabet entries.\n => You should rebuild the index files with beetl-index (or rebuild the tools using the same data widths (specified in Types.hh))." << endl;
exit( -1 );
}
indexPosFile0_.push_back( 0 );
while ( fread( &indexPosFile0_.back(), sizeof( LetterNumber ), 1, pIndexFile_ ) == 1 )
{
indexCount0_.push_back( LETTER_COUNT_CLASS() );
if (!isIndexV2)
{
// In Index v1, counts were always stored using compact 32 bits values, which now need to be scaled to LETTER_COUNT_CLASS
for (int i=0; i<alphabetSize; ++i)
{
assert ( fread( &indexCount0_.back().count_[i], sizeof( uint32_t ), 1, pIndexFile_ ) == 1 );
}
uint32_t unusedEntry;
for (int i=0; i<unusedAlphabetEntries; ++i)
{
assert ( fread( &unusedEntry, sizeof( uint32_t ), 1, pIndexFile_ ) == 1 );
}
}
else
{
for (int i=0; i<alphabetSize; ++i)
{
int byteCount;
assert ( fread( &byteCount, 1, 1, pIndexFile_ ) == 1 );
if (byteCount)
{
#ifdef USE_COMPACT_STRUCTURES
if ( byteCount > sizeof(LetterNumberCompact) )
{
Logger::error() << "ERROR: Index file " << indexFilename_ << " contains large values. BEETL needs to be built without USE_COMPACT_STRUCTURES in BwtIndex.hh." << endl;
exit( -1 );
}
#endif
assert ( fread( &indexCount0_.back().count_[i], byteCount, 1, pIndexFile_ ) == 1 );
}
}
}
for ( int i( 0 ); i < alphabetSize; i++ )
currentPosBwt += indexCount0_.back().count_[i];
indexPosBwt0_.push_back( currentPosBwt );
#ifdef DEBUG_RAC_VERBOSE
cout << indexPosBwt0_.back() << " " << indexPosFile0_.back() << " " << indexCount0_.back() << endl;
#endif
// skip unused alphabet entries, and check that they were indeed useless
for (int i=0; i<unusedAlphabetEntries; ++i)
{
uint32_t unusedEntry;
assert( fread( &unusedEntry, sizeof( uint32_t ), 1, pIndexFile_ ) == 1 );
assert( unusedEntry == 0 && "Error: Trying to ignore an index entry, which contains a non-zero value" );
}
indexPosFile0_.push_back( 0 );
} // ~while
indexPosFile0_.pop_back();
fclose( pIndexFile_ );
pIndexFile_ = NULL;
} // ~if
indexSize_ = indexPosBwt0_.size();
assert( indexSize_ == indexPosFile0_.size() );
assert( indexSize_ == indexCount0_.size() );
// rewindFile();
indexPosBwt_ = indexPosBwt0_.data();
indexPosFile_ = indexPosFile0_.data();
indexCount_ = indexCount0_.data();
// Save vectors to shared memory
if ( useSharedMemory && !indexPosBwt0_.empty() )
{
{
ofstream os( shmFilename1 );
if ( !os.good() )
{
cerr << "Error creating " << shmFilename1 << endl;
exit( -1 );
}
os.write( reinterpret_cast<const char *>( &indexSize_ ), sizeof( indexSize_ ) );
os.write( reinterpret_cast<const char *>( indexPosFile0_.data() ), indexSize_ * sizeof( indexPosFile0_[0] ) );
}
{
ofstream os( shmFilename2 );
os.write( reinterpret_cast<const char *>( &indexSize_ ), sizeof( indexSize_ ) );
os.write( reinterpret_cast<const char *>( indexCount0_.data() ), indexSize_ * sizeof( indexCount0_[0] ) );
}
{
ofstream os( shmFilename3 );
os.write( reinterpret_cast<const char *>( &indexSize_ ), sizeof( indexSize_ ) );
os.write( reinterpret_cast<const char *>( indexPosBwt0_.data() ), indexSize_ * sizeof( indexPosBwt0_[0] ) );
}
}
} // ~initIndex
// Index creation
void buildIndex( BwtReaderBase *reader0, FILE *pIndexFile, const int indexBinSize )
{
BwtReaderRunLengthBase *reader = dynamic_cast< BwtReaderRunLengthBase* >( reader0 );
const int runsPerChunk( indexBinSize );
int runsThisChunk( 0 );
LetterCount countsThisChunk;
LetterNumber runsSoFar( 0 ), chunksSoFar( 0 );
bool lastRun = false;
if (reader == NULL)
{
Logger::out() << "Warning: cannot index file " << reader0->filename_ << endl;
return;
}
reader->currentPos_ = 0;
// Write file header
assert( fwrite( indexV2Header.data(), indexV2Header.size(), 1, pIndexFile ) == 1 );
uint8_t sizeOfAlphabet = alphabetSize;
uint8_t sizeOfLetterNumber = sizeof( LetterNumber );
fwrite( &sizeOfAlphabet, sizeof( uint8_t ), 1, pIndexFile );
fwrite( &sizeOfLetterNumber, sizeof( uint8_t ), 1, pIndexFile );
while ( !lastRun )
{
lastRun = !reader->getRun();
if (!lastRun)
{
runsSoFar++;
runsThisChunk++;
countsThisChunk.count_[whichPile[reader->lastChar_]] += reader->runLength_;
assert( countsThisChunk.count_[whichPile[reader->lastChar_]] >= reader->runLength_ && "Error: Overflow in buildIndex" );
reader->currentPos_ += reader->runLength_;
}
if ( runsThisChunk == runsPerChunk || lastRun )
{
#ifdef DEBUG_RAC
cout << reader->currentPos_ << " " << runsSoFar << " " << countsThisChunk << endl;
#endif
// don't bother writing this as can deduce by summing countsThisChunk
// assert
// ( fwrite( &reader->currentPos_, sizeof( LetterNumber ), 1, pIndexFile ) == 1 );
LetterNumber posInFile = reader->tellg();
assert
( fwrite( &posInFile, sizeof( LetterNumber ), 1, pIndexFile ) == 1 );
// In index format v2, we write each LetterCount independently, encoding the number of bytes as first byte
for (int i=0; i<alphabetSize; ++i)
{
LetterNumber val = countsThisChunk.count_[i];
int bytesNeeded = 0;
while (val >> (8*bytesNeeded))
++bytesNeeded;
assert( fwrite( &bytesNeeded, 1, 1, pIndexFile ) == 1 );
if (bytesNeeded)
assert( fwrite( &val, bytesNeeded, 1, pIndexFile ) == 1 );
}
chunksSoFar++;
runsThisChunk = 0;
countsThisChunk.clear();
}
}
cout << "buildIndex: read " << reader->currentPos_ << " bases compressed into " << runsSoFar << " runs" << " over " << reader->tellg() << " bytes." << endl;
cout << "buildIndex: generated " << chunksSoFar << " index points." << endl;
} // ~buildIndex
// Explicit template instantiations
template class BwtReaderIndex<BwtReaderRunLength>;
template class BwtReaderIndex<BwtReaderRunLengthV3>;
<|start_filename|>src/libzoo/cli/Common.cpp<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#include "Common.hh"
#include <cassert>
#include <cstdlib>
#include <cstring>
#include <fstream>
#include <iostream>
#include <sstream>
#include <unistd.h>
using namespace std;
bool beginsWith( const string &s, const string &prefix )
{
return s.compare( 0, prefix.size(), prefix ) == 0;
}
bool endsWith( const string &s, const string &suffix )
{
return s.size() >= suffix.size()
&& s.compare( s.size() - suffix.size(), suffix.size(), suffix ) == 0;
}
string detectFileFormat( const string &inputFilename )
{
if ( endsWith( inputFilename, ".fa" ) || endsWith( inputFilename, ".fasta" ) )
{
return fileFormatLabels[FILE_FORMAT_FASTA];
}
else if ( endsWith( inputFilename, ".fastq" ) )
{
return fileFormatLabels[FILE_FORMAT_FASTQ];
}
else if ( endsWith( inputFilename, ".fq" ) )
{
return fileFormatLabels[FILE_FORMAT_FASTQ];
}
else if ( endsWith( inputFilename, ".seq" ) )
{
return fileFormatLabels[FILE_FORMAT_SEQ];
}
else if ( endsWith( inputFilename, ".bcl" ) )
{
return fileFormatLabels[FILE_FORMAT_BCL];
}
else if ( endsWith( inputFilename, ".cyc" ) || endsWith( inputFilename, "cyc." ) || beginsWith( inputFilename, "cyc." ) )
{
return fileFormatLabels[FILE_FORMAT_CYC];
}
return "";
}
void checkFileFormat( const string &inputFilename, const string &fileFormat )
{
if ( fileFormat.empty() )
{
cerr << "Error: filename suffix not recognised for " << inputFilename << ". (You may try to run with --help, as we usually provide options such as --input-format to specify the file format)" << endl;
exit( EXIT_FAILURE );
}
}
bool isNextArgument( const string &shortPrefix, const string &longPrefix, const int argc, const char **argv, int &i, string *argValue )
{
string arg( argv[i] );
if ( arg == shortPrefix || arg == longPrefix )
{
if ( argValue )
{
if ( argc <= i + 1 )
{
cerr << "Error: Too few arguments after " << arg << endl;
exit( 1 );
}
*argValue = string( argv[++i] );
}
return true;
}
if ( argValue
&& beginsWith( arg, longPrefix )
&& arg.size() > longPrefix.size()
&& arg[longPrefix.size()] == '='
&& !longPrefix.empty()
)
{
*argValue = arg.substr( longPrefix.size() + 1 );
return true;
}
return false;
}
bool isNextArgumentInt( const string &shortPrefix, const string &longPrefix, const int argc, const char **argv, int &i, int *argValue )
{
string s;
if ( isNextArgument( shortPrefix, longPrefix, argc, argv, i, &s ) )
{
stringstream ss;
ss << s;
ss >> *argValue;
return true;
}
return false;
}
bool parseNextArgument( const string &shortPrefix, const string &longPrefix, const int argc, const char **argv, int &i, ToolParameters &toolParams, const unsigned toolParamKey )
{
string argValue;
if ( isNextArgument( shortPrefix, longPrefix, argc, argv, i, &argValue ) )
{
toolParams[toolParamKey] = argValue;
return true;
}
return false;
}
/*
bool string2bool( const string &str )
{
if (str == "true") return true;
if (str == "false") return false;
if (str == "1") return true;
if (str == "0") return false;
cerr << "Error: Wrong boolean value \"" << str << "\"" << endl;
exit( 1 );
}
*/
void launchBeetl( const string ¶ms )
{
ostringstream path;
// Use the same path as the current executable if we manage to extract it
// Note: "/proc/self/exe" only exists on linux, but launching executables is a temporary solution anyway
char selfPath[1024];
ssize_t len = ::readlink( "/proc/self/exe", selfPath, sizeof( selfPath ) - 1 );
if ( len != -1 )
{
selfPath[len] = '\0';
char *lastSlash = strrchr( selfPath, '/' );
if ( lastSlash )
{
*( lastSlash + 1 ) = 0;
path << selfPath;
}
}
string oldBeetl = path.str() + "OldBeetl";
FILE *f = fopen( oldBeetl.c_str(), "rb" );
if ( !f )
{
oldBeetl = path.str() + "../OldBeetl";
f = fopen( oldBeetl.c_str(), "rb" );
}
fclose( f );
stringstream command;
command << oldBeetl << " " << params;
cout << "Launching command:" << endl;
cout << " " << command.str() << endl;
int ret = system( command.str().c_str() );
if ( ret == -1 )
{
cerr << "Error in fork while launching Beetl" << endl;
exit( 1 );
}
else
{
if ( WIFSIGNALED( ret ) || !WIFEXITED( ret ) )
{
exit( -1 );
}
int retChild = WEXITSTATUS( ret );
if ( retChild != 0 )
{
exit( retChild );
}
}
}
bool doesFileExist( const string &filename )
{
return access( filename.c_str(), F_OK ) == 0;
}
<|start_filename|>src/parameters/CompareParameters.hh<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#ifndef BEETL_COMPARE_PARAMETERS_HH
#define BEETL_COMPARE_PARAMETERS_HH
#include "libzoo/cli/ToolParameters.hh"
#include <string>
namespace BeetlCompareParameters
{
// options: mode splice/reference/metagenomics
enum Mode
{
MODE_TUMOUR_NORMAL,
MODE_SPLICE,
MODE_REFERENCE,
MODE_METAGENOMICS,
MODE_COUNT,
};
static const string modeLabels[] =
{
"tumour-normal",
"splice",
"reference",
"metagenomics",
"" // end marker
};
// options: report minlength off/on
enum ReportMinlength
{
REPORT_MINLENGTH_OFF,
REPORT_MINLENGTH_ON,
REPORT_MINLENGTH_COUNT
};
static const string reportMinlengthLabels[] =
{
"off",
"on",
"" // end marker
};
// options: input format
enum InputFormat
{
INPUT_FORMAT_BWT_ASCII,
INPUT_FORMAT_BWT_RLE,
INPUT_FORMAT_DETECT,
INPUT_FORMAT_COUNT
};
static const string inputFormatLabels[] =
{
"bwt_ascii",
"bwt_rle",
"detect",
"" // end marker
};
// Option container
enum CompareOptions
{
COMPARE_OPTION_MODE,
COMPARE_OPTION_REPORT_MINLENGTH,
COMPARE_OPTION_COUNT
};
} // namespace BeetlCompareParameters
class CompareParameters : public ToolParameters
{
public:
CompareParameters()
{
using namespace BeetlCompareParameters;
addEntry( COMPARE_OPTION_MODE, "mode", "--mode", "-m", "See \"Mode\" note below", "", TYPE_CHOICE | REQUIRED, modeLabels );
addEntry( -1, "input setA", "--inputA", "-a", "Input filename prefix for Set A (such as \"prefix-B0[0-6]\" are Set A's BWT files)", "", TYPE_STRING | REQUIRED );
addEntry( -1, "input setB", "--inputB", "-b", "Input filename prefix for Set B (such as \"prefix-B0[0-6]\" are Set B's BWT files)", "", TYPE_STRING | REQUIRED );
addEntry( -1, "output directory", "--output", "-o", "Output directory", "BeetlCompareOutput", TYPE_STRING | REQUIRED );
addEntry( -1, "max length", "--max-length", "-k", "Maximal k-mer length (number of analysis cycles)", "100", TYPE_INT );
addEntry( -1, "min occ", "--min-occ", "-n", "Minimum number of occurrences (coverage)", "2", TYPE_INT );
addEntry( -1, "inputA format", "--inputA-format", "", "", "detect", TYPE_CHOICE | REQUIRED, inputFormatLabels );
addEntry( -1, "inputB format", "--inputB-format", "", "", "detect", TYPE_CHOICE | REQUIRED, inputFormatLabels );
addEntry( -1, "subset", "--subset", "", "Restrict computation to this suffix - Used for distributed computing", "", TYPE_STRING );
addEntry( -1, "4-way distributed", "--4-way", "", "4-way distributed process number (0-3)", "", TYPE_INT );
addEntry( -1, "generate seq num A", "--generate-seq-numA", "", "Propagate breakpoints to output read numbers. Requires {inputA}-end-pos file", "", TYPE_SWITCH );
addEntry( -1, "generate seq num B", "--generate-seq-numB", "", "Propagate breakpoints to output read numbers. Requires {inputB}-end-pos file", "", TYPE_SWITCH );
addEntry( -1, "memory limit MB", "--memory-limit", "-M", "RAM constraint in MB", "smallest of ulimit -v and /proc/meminfo", TYPE_INT | REQUIRED );
addEntry( -1, "no comparison skip", "--no-comparison-skip", "", "Don't skip already processed comparisons (slower, but smoother output)", "", TYPE_SWITCH );
addEntry( -1, "pause between cycles", "--pause-between-cycles", "", "Wait for a key press after each cycle", "", TYPE_SWITCH );
addEntry( -1, "BWT in RAM", "--bwt-in-ram", "", "Keep BWT in RAM for faster processing", "", TYPE_SWITCH );
addEntry( -1, "propagate sequence", "--propagate-sequence", "", "Propagate and output sequence with each BWT range (slower)", "", TYPE_SWITCH );
// addEntry( -1, "setB metadata", "--genome-metadata", "-c", "For Metagenomics mode only: Input filename \"extended\" prefix for Set B's metadata (for files \"prefix[0-6]\")", "${inputB}-C0", TYPE_STRING );
addEntry( -1, "taxonomy", "--taxonomy", "-t", "For Metagenomics mode only: Input filename for Set B's taxonomy information", "", TYPE_STRING );
addEntry( -1, "min kmer length", "--min-kmer-length", "-w", "For Metagenomics mode only: Minimum k-mer length", "50", TYPE_INT );
addEntry( COMPARE_OPTION_REPORT_MINLENGTH, "report min length", "--report-min-length", "-d", "For Metagenomics mode only: Report the minimal needed word length for the different taxa in the database", "off", TYPE_CHOICE, reportMinlengthLabels );
addEntry( -1, "mmap C files", "--mmap-c-files", "", "Memory-mapping -C0* files may lead to performance improvements", "", TYPE_SWITCH );
addDefaultVerbosityAndHelpEntries();
}
};
#endif //ifndef BEETL_COMPARE_PARAMETERS_HH
<|start_filename|>src/BCRext/BwtWriter.cpp<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#include "BwtWriter.hh"
#include "LetterCount.hh"
#include "Tools.hh"
#include "libzoo/util/Logger.hh"
#include <cstdlib>
#include <cstring>
using namespace std;
//#define DEBUG 1
#define LOCAL_DEBUG 0
//
// BwtWriterBase member function definitions
//
BwtWriterFile::BwtWriterFile( const string &fileName ) : pFile_( fopen( fileName.c_str(), "wb" ) )
{
#ifdef DEBUG
cout << "BwtWriterFile opened file " << fileName << " " << pFile_ << endl;
#endif
readWriteCheck( fileName.c_str(), 1 ); // setvbuf( pFile_, NULL, _IOFBF, 262144);
}
BwtWriterFile::~BwtWriterFile()
{
fclose( pFile_ );
#ifdef DEBUG
cout << "BwtWriterFile: closed file " << pFile_ << endl;
#endif
}
void BwtWriterFile::flush()
{
fflush( pFile_ );
}
//
// BwtWriterASCII member function definitions
//
BwtWriterASCII::BwtWriterASCII( const string &fileName ) : BwtWriterFile( fileName ), lastChar_( notInAlphabet )
{
#ifdef DEBUG
cout << "BW ASCII ctor" << endl;
#endif
}
BwtWriterASCII::~BwtWriterASCII()
{
#ifdef DEBUG
cout << "BW ASCII dtor" << endl;
#endif
}
void BwtWriterASCII::operator()( const char *p, LetterNumber numChars )
{
#ifdef DEBUG
cout << "BW ASCII () - " << *p << " " << numChars << endl;
#endif
size_t bytesWritten = fwrite( p, sizeof( char ), numChars, pFile_ );
if ( bytesWritten != ( size_t )numChars )
{
cerr << "Unable to write " << numChars
<< " chars. Aborting." << endl;
exit( EXIT_FAILURE );
}
if ( numChars > 0 )
lastChar_ = p[numChars - 1];
} // ~operator()
void BwtWriterASCII::sendRun( char c, LetterNumber runLength )
{
if ( runLength )
{
for ( LetterNumber i( 0 ); i < runLength; i++ ) fputc( c, pFile_ );
lastChar_ = c;
}
}
char BwtWriterASCII::getLastChar()
{
return lastChar_;
}
//
// BwtWriterRunLengthBase member function definitions
//
BwtWriterRunLengthBase::~BwtWriterRunLengthBase()
{
if ( runLength_ != 0 ) encodeRun( lastChar_, runLength_ );
if ( pBuf_ != buf_ )
{
size_t bytesWritten = fwrite( buf_, sizeof( char ), ( pBuf_ - buf_ ), pFile_ );
if ( bytesWritten != ( size_t )( pBuf_ - buf_ ) )
{
cerr << "Unable to write " << ( pBuf_ - buf_ )
<< " chars. Aborting." << endl;
exit( EXIT_FAILURE );
}
#ifdef REPORT_COMPRESSION_RATIO
bytesWritten_ += ( LetterNumber )( pBuf_ - buf_ );
#endif
}
#ifdef REPORT_COMPRESSION_RATIO
Logger_if( LOG_FOR_DEBUGGING ) Logger::out()
<< "BwtWriterRunLengthBase: received "
<< charsReceived_ << " chars, sent "
<< bytesWritten_ << " bytes, compression "
<< ( ( double )8 * bytesWritten_ ) / ( charsReceived_ )
<< " bits per char " << std::endl;
#endif
#ifdef GENERATE_RLE_HISTOGRAM
for ( auto & kv : histogram_ )
{
cout << "histogram:\t" << kv.first.first << "\t" << kv.first.second << "\t" << kv.second << endl;
}
#endif
}
void BwtWriterRunLengthBase::operator()( const char *p, LetterNumber numChars )
{
#ifdef DEBUG
std::cout << "BW RL () - " << *p << " " << numChars << " state: " << lastChar_ << " " << runLength_ << endl;
#endif
for ( LetterNumber i( 0 ); i < numChars; i++ )
{
if ( ( *p ) == lastChar_ )
{
runLength_++;
}
else
{
if ( runLength_ > 0 )
{
encodeRun( lastChar_, runLength_ );
} // ~if
runLength_ = 1;
lastChar_ = *p;
} // ~else
p++;
} // ~for
// assert(fwrite( p, sizeof(char), numChars, pFile_ )==numChars);
} // ~operator()
void BwtWriterRunLengthBase::flushBuffer()
{
size_t bufLength = pBuf_ - buf_;
#ifdef REPORT_COMPRESSION_RATIO
bytesWritten_ += bufLength;
#endif
size_t bytesWritten = fwrite( buf_, sizeof( char ), bufLength, pFile_ );
if ( bytesWritten != ( size_t )bufLength )
{
cerr << "Unable to write " << bufLength
<< " chars. Aborting." << endl;
exit( EXIT_FAILURE );
}
pBuf_ = buf_;
}
void BwtWriterRunLengthBase::sendChar( char c )
{
*pBuf_ = c;
if ( ++pBuf_ == pBufMax_ )
flushBuffer();
}
void BwtWriterRunLengthBase::encodeRun( char c, LetterNumber runLength )
{
#ifdef DEBUG
std::cout << "BW RL encodeRun - sending run " << c << " " << runLength << " " << pFile_
<< std::endl;
#endif
#ifdef GENERATE_RLE_HISTOGRAM
++histogram_[ make_pair( c, runLength ) ];
#endif
#ifdef REPORT_COMPRESSION_RATIO
charsReceived_ += runLength;
#endif
const unsigned char charIndex( whichPile[( int )c] );
if ( charIndex == nv )
{
cerr << "Char |" << c << "| is not part of the alphabet. Aborting." << endl;
exit( EXIT_FAILURE );
}
assert( charIndex >> baseFieldWidthInBits_ == 0 );
uchar outCode = charIndex | lengthFieldMask_;
runLength--;
const LetterNumber numMaxChars( runLength >> lengthFieldWidthInBits_ );
for ( LetterNumber i( 0 ); i < numMaxChars; i++ )
{
sendChar( outCode );
}
runLength -= numMaxChars << lengthFieldWidthInBits_;
assert( runLength >> lengthFieldWidthInBits_ == 0 );
outCode = charIndex | ( static_cast<uchar>( runLength ) << baseFieldWidthInBits_ );
sendChar( outCode );
#ifdef DEBUG
std::cout << "B sending " << ( unsigned int )outCode << " " << pFile_ << std::endl;
#endif
} // ~encodeRun
void BwtWriterRunLengthBase::sendRun( char c, LetterNumber runLength )
{
#ifdef DEBUG
std::cout << "BW RL sendRun - sending run " << c << " " << runLength << " " << endl;
#endif
if ( runLength != 0 )
{
if ( c == lastChar_ )
{
runLength_ += runLength;
}
else
{
if ( runLength_ != 0 ) encodeRun( lastChar_, runLength_ );
lastChar_ = c;
runLength_ = runLength;
}
}
} // ~sendRun
char BwtWriterRunLengthBase::getLastChar()
{
return lastChar_;
}
void BwtWriterRunLengthBase::flush()
{
if ( runLength_ != 0 )
{
encodeRun( lastChar_, runLength_ );
lastChar_ = notInAlphabet;
runLength_ = 0;
}
flushBuffer();
fflush( pFile_ );
}
//
// BwtWriterRunLengthV2 member function definitions
//
BwtWriterRunLengthV2::BwtWriterRunLengthV2( const string &fileName )
: BwtWriterRunLengthBase( fileName, 3 )
{
symbolForRunLength1ForPile_.resize( alphabetSize );
maxEncodedRunLengthForPile_.resize( alphabetSize );
LetterNumber bytecode = 0;
symbolForRunLength1ForPile_[ whichPile['A'] ] = bytecode;
bytecode += ( maxEncodedRunLengthForPile_[ whichPile['A'] ] = 63 );
symbolForRunLength1ForPile_[ whichPile['C'] ] = bytecode;
bytecode += ( maxEncodedRunLengthForPile_[ whichPile['C'] ] = 63 );
symbolForRunLength1ForPile_[ whichPile['G'] ] = bytecode;
bytecode += ( maxEncodedRunLengthForPile_[ whichPile['G'] ] = 63 );
symbolForRunLength1ForPile_[ whichPile['T'] ] = bytecode;
bytecode += ( maxEncodedRunLengthForPile_[ whichPile['T'] ] = 63 );
symbolForRunLength1ForPile_[ whichPile['N'] ] = bytecode;
bytecode += ( maxEncodedRunLengthForPile_[ whichPile['N'] ] = 1 );
symbolForRunLength1ForPile_[ whichPile['$'] ] = bytecode;
bytecode += ( maxEncodedRunLengthForPile_[ whichPile['$'] ] = 3 );
assert( bytecode == 256 );
// Send file header (inspired by the PNG format)
fputc( 137, pFile_ ); // non-ASCII to avoid confusion with text files
fputc( 'B', pFile_ );
fputc( 'W', pFile_ );
fputc( 'T', pFile_ );
fputc( 13, pFile_ ); // \r\n sequence to check for invalid dos/unix format conversions
fputc( 10, pFile_ );
fputc( 26, pFile_ ); // Ctrl-Z, making some text viewers stop here
fputc( 2, pFile_ ); // Format version number
/*
// Send conversion table: 256 entries with { base: 1 char, run length: 1 byte }
for (int i=1; i<=63; ++i)
{
fputc( 'A', pFile_ );
fputc( i, pFile_ );
}
for (int i=1; i<=63; ++i)
{
fputc( 'C', pFile_ );
fputc( i, pFile_ );
}
for (int i=1; i<=63; ++i)
{
fputc( 'G', pFile_ );
fputc( i, pFile_ );
}
for (int i=1; i<=63; ++i)
{
fputc( 'T', pFile_ );
fputc( i, pFile_ );
}
for (int i=1; i<=1; ++i)
{
fputc( 'N', pFile_ );
fputc( i, pFile_ );
}
for (int i=1; i<=3; ++i)
{
fputc( '$', pFile_ );
fputc( i, pFile_ );
}
*/
}
BwtWriterRunLengthV2::~BwtWriterRunLengthV2()
{
// We need to override this call from the base class' destructor, in order to use our own encodeRun
if ( runLength_ != 0 )
{
encodeRun( lastChar_, runLength_ );
runLength_ = 0;
}
}
void BwtWriterRunLengthV2::encodeRun( char c, LetterNumber runLength )
{
assert( runLength > 0 );
#ifdef DEBUG
std::cout << "BW RL encodeRun - sending run " << c << " " << runLength << " " << pFile_
<< std::endl;
#endif
#ifdef GENERATE_RLE_HISTOGRAM
++histogram_[ make_pair( c, runLength ) ];
#endif
#ifdef REPORT_COMPRESSION_RATIO
charsReceived_ += runLength;
#endif
const unsigned char charIndex( whichPile[( int )c] );
if ( charIndex == nv )
{
cerr << "Char |" << c << "| is not part of the alphabet. Aborting." << endl;
exit( EXIT_FAILURE );
}
const uchar symbolForRunLength1 = symbolForRunLength1ForPile_[charIndex];
const LetterNumber maxEncodedRunLength = maxEncodedRunLengthForPile_[charIndex];
uchar outCode = symbolForRunLength1 + maxEncodedRunLength - 1;
LetterNumber countAtFullLength = runLength / maxEncodedRunLength;
for ( LetterNumber i = 0; i < countAtFullLength; ++i )
{
sendChar( outCode );
}
runLength %= maxEncodedRunLength;
if ( runLength > 0 )
{
outCode = symbolForRunLength1 + runLength - 1;
sendChar( outCode );
}
}
//
// BwtWriterRunLengthV3 member function definitions
//
BwtWriterRunLengthV3::BwtWriterRunLengthV3( const string &fileName )
: BwtWriterRunLengthBase( fileName, 3 )
{
symbolForRunLength1ForPile_.resize( alphabetSize );
maxEncodedRunLengthForPile_.resize( alphabetSize );
// Send file header (inspired by the PNG format)
fputc( 'B', pFile_ );
fputc( 'W', pFile_ );
fputc( 'T', pFile_ );
fputc( 13, pFile_ ); // \r\n sequence to check for invalid dos/unix format conversions
fputc( 10, pFile_ );
fputc( 26, pFile_ ); // Ctrl-Z, making some text viewers stop here and being non-ASCII to avoid confusion with text files
// Format version number, on 2 bytes to help identify endianness problems
uint16_t formatVersion = 3;
assert( fwrite( &formatVersion, sizeof( formatVersion ), 1, pFile_ ) == 1 );
// Initialise conversion table: enough ranges to cover 256 entries, following the format { base: 1 char, range length: 1 byte, first run length: 2 bytes }
uint16_t bytecode = 0;
bytecode = initialiseCodeRange( 'A', 58, 1, bytecode );
bytecode = initialiseCodeRange( 'C', 58, 1, bytecode );
bytecode = initialiseCodeRange( 'G', 58, 1, bytecode );
bytecode = initialiseCodeRange( 'T', 58, 1, bytecode );
bytecode = initialiseCodeRange( 'N', 4, 1, bytecode );
bytecode = initialiseCodeRange( '$', 4, 1, bytecode );
bytecode = initialiseCodeRange( '+', 16, 0, bytecode );
assert( bytecode == 256 );
}
uint16_t BwtWriterRunLengthV3::initialiseCodeRange( const uint8_t base, const uint8_t rangeLength, const uint16_t firstRunLength, const uint8_t firstBytecode )
// returns next available bytecode, as uint16 to reach value 256
{
assert( fwrite( &base, sizeof( base ), 1, pFile_ ) == 1 );
assert( fwrite( &rangeLength, sizeof( rangeLength ), 1, pFile_ ) == 1 );
assert( fwrite( &firstRunLength, sizeof( firstRunLength ), 1, pFile_ ) == 1 );
if (base != '+')
{ // Common case for "normal" bases
symbolForRunLength1ForPile_[ whichPile[base] ] = firstBytecode;
maxEncodedRunLengthForPile_[ whichPile[base] ] = rangeLength;
}
else
{ // Special case for continuation symbol
firstContinuationSymbol_ = firstBytecode;
maxEncodedRunLengthMultiplierForContinuationSymbol_ = rangeLength;
}
return (uint16_t)firstBytecode + rangeLength;
}
BwtWriterRunLengthV3::~BwtWriterRunLengthV3()
{
// We need to override this call from the base class' destructor, in order to use our own encodeRun
if ( runLength_ != 0 )
{
encodeRun( lastChar_, runLength_ );
runLength_ = 0;
}
}
void BwtWriterRunLengthV3::encodeRun( char c, LetterNumber runLength )
{
assert( runLength > 0 );
LetterNumber runLengthMinus1 = runLength - 1;
#ifdef DEBUG
std::cout << "BW RL encodeRun - sending run " << c << " " << runLength << " " << pFile_
<< std::endl;
#endif
#ifdef GENERATE_RLE_HISTOGRAM
++histogram_[ make_pair( c, runLength ) ];
#endif
#ifdef REPORT_COMPRESSION_RATIO
charsReceived_ += runLength;
#endif
const unsigned char charIndex( whichPile[( int )c] );
if ( charIndex == nv )
{
cerr << "Char |" << c << "| is not part of the alphabet. Aborting." << endl;
exit( EXIT_FAILURE );
}
const uchar symbolForRunLength1 = symbolForRunLength1ForPile_[charIndex];
const LetterNumber maxEncodedRunLength = maxEncodedRunLengthForPile_[charIndex];
// First char, encoded according to table
LetterNumber symbolOffset = runLengthMinus1 % maxEncodedRunLength;
uchar outCode = symbolForRunLength1 + symbolOffset;
sendChar( outCode );
runLengthMinus1 /= maxEncodedRunLength;
// Subsequent chars, encoded in base `maxEncodedRunLengthMultiplierForContinuationSymbol_`, little endian
while (runLengthMinus1 > 0)
{
symbolOffset = runLengthMinus1 % maxEncodedRunLengthMultiplierForContinuationSymbol_;
outCode = firstContinuationSymbol_ + symbolOffset;
sendChar( outCode );
runLengthMinus1 /= maxEncodedRunLengthMultiplierForContinuationSymbol_;
}
}
//
// BwtWriterIncrementalRunLength member function definitions
//
vector< vector<unsigned char> > ramFiles( 1000 ); //TODO: make this '1000' dynamic (the array resize just needs to be put in an openmp critical section)
BwtWriterIncrementalRunLength::BwtWriterIncrementalRunLength( const string &fileName )
: BwtWriterFile( fileName )
, runLength_( 0 ), pBuf_( buf_ ), pBufMax_( buf_ + ReadBufferSize ), lastChar_( notInAlphabet )
#ifdef REPORT_COMPRESSION_RATIO
, charsReceived_( 0 ), bytesWritten_( 0 )
#endif
, fileNumInReader_( 0 )
, filePosInReader_( 0 )
, remainingRunLengthInReader_( 0 )
, lastFileReturnNeeded_( false )
// , onHoldUntilNextReturn_data_( 0 )
, onHoldUntilNextReturn_letter_( 0 )
, onHoldUntilNextReturn_runLength_( 0 )
, onHoldUntilNextReturn_metadata_( 0 )
{
// assert( ramFiles.size() == nextFileNum_ );
// cout << "BwtWriterIncrementalRunLength: Opening " << fileName << endl;
// todo: remove this hack, which is here to make sure the ramFiles%5 keep pointing to the correct alphabet BWT
int firstDigitPos = fileName.size() - 1;
while ( firstDigitPos >= 0 && ( fileName[firstDigitPos] >= '0' && fileName[firstDigitPos] <= '9' ) ) // skip any prefix like in "new_1"
--firstDigitPos;
++firstDigitPos;
unsigned int letterNum = atoi( fileName.c_str() + firstDigitPos );
extern unsigned int debugCycle;
extern unsigned int lastDefragCycle;
assert( letterNum > 0 && "This class doesn't store pile 0" );
if ( lastDefragCycle == 0 )
fileNum_ = letterNum - 1 + 5 * ( debugCycle - 1 );
else
fileNum_ = letterNum - 1 + 5 * ( debugCycle - lastDefragCycle + 1 );
// cout << " = file #" << fileNum_ << endl;
assert( fwrite( &fileNum_, sizeof( fileNum_ ), 1, pFile_ ) == 1 );
/*
#p.ragma omp critical
if( fileNum_ >= ramFiles.size() )
{
ramFiles.resize( fileNum_ + 1 );
}
*/
// ramFileLengths.resize( ramFiles.size() );
fileNumInReader_ = fileNum_ % ( alphabetSize - 1 ); // todo: improve this modulo
filePosInReader_ = 0;
}
BwtWriterIncrementalRunLength::~BwtWriterIncrementalRunLength()
{
if ( runLength_ != 0 ) encodeRun( lastChar_, runLength_ );
terminateLastInsertion();
if ( pBuf_ != buf_ )
{
size_t bytesWritten = fwrite( buf_, sizeof( char ), ( pBuf_ - buf_ ), pFile_ );
if ( bytesWritten != ( size_t )( pBuf_ - buf_ ) )
{
cerr << "Unable to write " << ( pBuf_ - buf_ )
<< " chars. Aborting." << endl;
exit( -1 );
}
#ifdef REPORT_COMPRESSION_RATIO
bytesWritten_ += ( LetterNumber )( pBuf_ - buf_ );
#endif
}
#ifdef REPORT_COMPRESSION_RATIO
#ifndef SEND_DATA_TO_FILES_FOR_DEBUGGING
bytesWritten_ = ramFiles[fileNum_].size();
#endif //ifndef SEND_DATA_TO_FILES_FOR_DEBUGGING
Logger_if( LOG_FOR_DEBUGGING ) Logger::out()
<< "BwtWriterIncrementalRunLength: received "
<< charsReceived_ << " chars, sent "
<< bytesWritten_ << " bytes, compression "
<< ( ( double )8 * bytesWritten_ ) / ( charsReceived_ )
<< " bits per char " << std::endl;
#endif
}
void BwtWriterIncrementalRunLength::operator()( const char *p, LetterNumber numChars )
{
#ifdef DEBUG
std::cout << "BW RL () - " << *p << " " << numChars << " state: " << lastChar_ << " " << runLength_ << endl;
#endif
for ( LetterNumber i( 0 ); i < numChars; i++ )
{
if ( ( *p ) == lastChar_ )
{
runLength_++;
}
else
{
if ( runLength_ > 0 )
{
encodeRun( lastChar_, runLength_ );
} // ~if
runLength_ = 1;
lastChar_ = *p;
} // ~else
p++;
} // ~for
} // ~operator()
void BwtWriterIncrementalRunLength::terminateLastInsertion()
{
if ( !ramFiles[fileNum_].empty() )
{
size_t lastPos = ramFiles[fileNum_].size() - 1;
unsigned char lastMetadata = ramFiles[fileNum_][lastPos];
if ( lastMetadata )
{
assert( lastMetadata & 0x80 );
assert( onHoldUntilNextReturn_letter_ == 0 );
assert( onHoldUntilNextReturn_runLength_ == 0 );
assert( onHoldUntilNextReturn_metadata_ == 0 );
return;
}
if ( onHoldUntilNextReturn_metadata_ )
{
lastMetadata = onHoldUntilNextReturn_metadata_;
onHoldUntilNextReturn_metadata_ = 0;
}
lastMetadata |= 0x80; // return bit
if ( onHoldUntilNextReturn_letter_ || onHoldUntilNextReturn_runLength_ )
{
assert( onHoldUntilNextReturn_runLength_ > 0 );
ramFiles[fileNum_].push_back( ( onHoldUntilNextReturn_runLength_ - 1 ) << 4 | onHoldUntilNextReturn_letter_ );
ramFiles[fileNum_].push_back( lastMetadata );
onHoldUntilNextReturn_letter_ = 0;
onHoldUntilNextReturn_runLength_ = 0;
}
else
{
ramFiles[fileNum_][lastPos] = lastMetadata;
}
}
}
void BwtWriterIncrementalRunLength::sendChar( unsigned char c, unsigned char metadata )
{
assert( metadata == 0 );
uint fnum = fileNumInReader_;
size_t &fpos = filePosInReader_;
if ( LOCAL_DEBUG )
{
clog << "Inserting " << ( unsigned int )c << " in file " << fnum << " pos " << fpos << ", remainingRunLengthInReader=" << remainingRunLengthInReader_ << " fileNum=" << fileNum_ << " ramFiles[fileNum_].size()=" << ramFiles[fileNum_].size() << " ramFiles[fnum].size()=" << ramFiles[fnum].size() << endl;
}
if ( lastFileReturnNeeded_ )
{
// New insertion starting => we terminate the last insertion properly
terminateLastInsertion();
lastFileReturnNeeded_ = false;
assert( onHoldUntilNextReturn_letter_ == 0 );
assert( onHoldUntilNextReturn_runLength_ == 0 );
assert( onHoldUntilNextReturn_metadata_ == 0 );
}
// Continuation of an already started insertion
if ( fileNum_ < 5 || ( !ramFiles[fileNum_].empty() && ( ramFiles[fileNum_].back() & 0x80 ) == 0 ) )
{
if ( LOCAL_DEBUG ) clog << " Continuation of an already started insertion" << endl;
assert( ( fileNum_ < 5 || fpos != 0 ) && "todo: insertion at the start of a file" );
ramFiles[fileNum_].push_back( c );
ramFiles[fileNum_].push_back( metadata );
return;
}
// Insertion at the start of a file
if ( fpos == 0 )
{
assert( fnum < 5 && "Only first cycle files may be empty" );
if ( LOCAL_DEBUG ) clog << " Insertion at the start of a file" << endl;
unsigned char replacementMetadata = fileNum_ / 5;
assert( ( replacementMetadata & 0x80 ) == 0 && "Error: there may be too many cycles for this algorithm, which overwrote the Return bit" );
if ( ramFiles[fnum].empty() )
{
ramFiles[fnum].push_back( 0xFF ); // Special byte meaning that we'll ignore this base (but not its associated metadata)
ramFiles[fnum].push_back( replacementMetadata );
fpos = 2;
ramFiles[fileNum_].push_back( c );
ramFiles[fileNum_].push_back( metadata );
return;
}
else if ( ramFiles[fnum][0] == 0xFF )
{
fpos = 2;
}
else
{
assert( false && "todo" );
}
}
// Insertion of a letter that can be added to the run-length-encoded char
unsigned char count1 = 1 + ( ramFiles[fnum][fpos - 2] >> 4 );
unsigned char count2 = 1 + ( c >> 4 );
unsigned char letter1 = ramFiles[fnum][fpos - 2] & 0x0F;
unsigned char letter2 = c & 0x0F;
if ( ( letter1 == letter2 ) && ( count1 + count2 <= 16 ) && ( ( ramFiles[fnum][fpos - 1] & ~0x80 ) != ( fileNum_ / 5 ) ) )
{
// we can combine the 2 items
if ( LOCAL_DEBUG ) clog << " Insertion of a letter that can be added to the run-length-encoded char" << endl;
ramFiles[fnum][fpos - 2] = ( ( count1 + count2 - 1 ) << 4 ) | letter1;
return;
}
// Insertion of a letter between 2 letters
if ( remainingRunLengthInReader_ == 0 )
{
if ( LOCAL_DEBUG ) clog << " Insertion of a letter between 2 letters" << endl;
// - where no insertion was already present
if ( ramFiles[fnum][fpos - 1] == 0 )
{
if ( LOCAL_DEBUG ) clog << " - where no insertion was already present" << endl;
unsigned char replacementMetadata = fileNum_ / 5;
assert( ( replacementMetadata & 0x80 ) == 0 && "Error: there may be too many cycles for this algorithm, which overwrote the Return bit" );
ramFiles[fnum][fpos - 1] = replacementMetadata;
ramFiles[fileNum_].push_back( c );
ramFiles[fileNum_].push_back( metadata );
return;
}
// - where an insertion was already present, but no return
if ( ramFiles[fnum][fpos - 1] != 0 && ( ramFiles[fnum][fpos - 1] & 0x80 ) == 0 )
{
if ( LOCAL_DEBUG ) clog << " - where an insertion was already present, but no return" << endl;
unsigned char replacementMetadata = fileNum_ / 5;
assert( ( replacementMetadata & 0x80 ) == 0 && "Error: there may be too many cycles for this algorithm, which overwrote the Return bit" );
if ( ramFiles[fnum][fpos - 1] != replacementMetadata )
{
onHoldUntilNextReturn_metadata_ = ramFiles[fnum][fpos - 1];
ramFiles[fnum][fpos - 1] = replacementMetadata;
}
else
{
// - where the insertion point refers to the current newest file - e.g. in the case of an insertion in the middle of a run-length-encoded char followed by readAndSend the rest of the char
if ( LOCAL_DEBUG ) clog << " - where the insertion point refers to the current newest file" << endl;
// in this case we continue the previous insertion
assert( ramFiles[fileNum_].back() & 0x80 );
onHoldUntilNextReturn_metadata_ = ramFiles[fileNum_].back() & ~0x80;
ramFiles[fileNum_].back() = 0;
}
ramFiles[fileNum_].push_back( c );
ramFiles[fileNum_].push_back( metadata );
return;
}
// - where a return was already present
if ( ramFiles[fnum][fpos - 1] == 0x80 )
{
if ( LOCAL_DEBUG ) clog << " - where a return was already present" << endl;
unsigned char replacementMetadata = fileNum_ / 5;
assert( ( replacementMetadata & 0x80 ) == 0 && "Error: there may be too many cycles for this algorithm, which overwrote the Return bit" );
ramFiles[fnum][fpos - 1] = replacementMetadata | 0x80; // the return stays
ramFiles[fileNum_].push_back( c );
ramFiles[fileNum_].push_back( metadata );
return;
}
// - where an insertion+return was already present
if ( ( ramFiles[fnum][fpos - 1] & ~0x80 ) != 0 && ( ramFiles[fnum][fpos - 1] & 0x80 ) != 0 )
{
if ( LOCAL_DEBUG ) clog << " - where an insertion+return was already present" << endl;
unsigned char replacementMetadata = fileNum_ / 5;
assert( ( replacementMetadata & 0x80 ) == 0 && "Error: there may be too many cycles for this algorithm, which overwrote the Return bit" );
if ( ( ramFiles[fnum][fpos - 1] & ~0x80 ) != replacementMetadata )
{
onHoldUntilNextReturn_metadata_ = ramFiles[fnum][fpos - 1] & ~0x80;
ramFiles[fnum][fpos - 1] = replacementMetadata | 0x80; // the return stays
}
else
{
// - where the insertion point refers to the current newest file - e.g. in the case of an insertion in the middle of a run-length-encoded char followed by readAndSend the rest of the char
if ( LOCAL_DEBUG ) clog << " - where the insertion point refers to the current newest file" << endl;
// in this case we continue the previous insertion
assert( ramFiles[fileNum_].back() & 0x80 );
onHoldUntilNextReturn_metadata_ = ramFiles[fileNum_].back() & ~0x80;
ramFiles[fileNum_].back() = 0;
}
ramFiles[fileNum_].push_back( c );
ramFiles[fileNum_].push_back( metadata );
return;
}
assert( false && "Should never reach here" );
}
// Insertion of a letter in the middle of a run-length-encoded char
if ( remainingRunLengthInReader_ != 0 )
{
if ( LOCAL_DEBUG ) clog << " Insertion of a letter in the middle of a run-length-encoded char" << endl;
// - where no insertion was already present
if ( ramFiles[fnum][fpos - 1] == 0 )
{
if ( LOCAL_DEBUG ) clog << " - where no insertion was already present" << endl;
unsigned char replacementMetadata = fileNum_ / 5;
assert( ( replacementMetadata & 0x80 ) == 0 && "Error: there may be too many cycles for this algorithm, which overwrote the Return bit" );
onHoldUntilNextReturn_letter_ = letter1;
onHoldUntilNextReturn_runLength_ = remainingRunLengthInReader_;
assert( remainingRunLengthInReader_ < count1 );
ramFiles[fnum][fpos - 2] = ( ( count1 - remainingRunLengthInReader_ - 1 ) << 4 ) | letter1;
ramFiles[fnum][fpos - 1] = replacementMetadata;
ramFiles[fileNum_].push_back( c );
ramFiles[fileNum_].push_back( metadata );
return;
}
// - where an insertion was already present, but no return
if ( ramFiles[fnum][fpos - 1] != 0 && ( ramFiles[fnum][fpos - 1] & 0x80 ) == 0 )
{
if ( LOCAL_DEBUG ) clog << " - where an insertion was already present, but no return" << endl;
unsigned char replacementMetadata = fileNum_ / 5;
assert( ( replacementMetadata & 0x80 ) == 0 && "Error: there may be too many cycles for this algorithm, which overwrote the Return bit" );
// - if the insertion point is the current newest file
if ( ramFiles[fnum][fpos - 1] == replacementMetadata )
{
if ( LOCAL_DEBUG ) clog << " - if the insertion point is the current newest file" << endl;
assert( ramFiles[fileNum_].back() != 0 ); // we should already have properly inserted the return, with an eventual jump, and we'll recycle those
onHoldUntilNextReturn_metadata_ = ramFiles[fileNum_].back(); // todo: & ~0x80?
ramFiles[fileNum_].pop_back();
assert( ( ramFiles[fileNum_].back() & 0x0F ) == letter1 );
assert( ( ramFiles[fileNum_].back() >> 4 ) + 1 > ( int )remainingRunLengthInReader_ );
unsigned char newCount = ( ramFiles[fileNum_].back() >> 4 ) + 1 - remainingRunLengthInReader_;
onHoldUntilNextReturn_letter_ = letter1;
onHoldUntilNextReturn_runLength_ = remainingRunLengthInReader_;
ramFiles[fileNum_].pop_back();
ramFiles[fileNum_].push_back( ( newCount - 1 ) << 4 | letter1 );
ramFiles[fileNum_].push_back( 0 );
ramFiles[fileNum_].push_back( c );
ramFiles[fileNum_].push_back( metadata );
return;
}
// - if the insertion point is an older file
else
{
if ( LOCAL_DEBUG ) clog << " - if the insertion point is an older file" << endl;
onHoldUntilNextReturn_letter_ = letter1;
onHoldUntilNextReturn_runLength_ = remainingRunLengthInReader_;
onHoldUntilNextReturn_metadata_ = ramFiles[fnum][fpos - 1];
assert( remainingRunLengthInReader_ < count1 );
ramFiles[fnum][fpos - 2] = ( ( count1 - remainingRunLengthInReader_ - 1 ) << 4 ) | letter1;
ramFiles[fnum][fpos - 1] = replacementMetadata;
ramFiles[fileNum_].push_back( c );
ramFiles[fileNum_].push_back( metadata );
return;
}
}
// - where a return was already present
if ( ramFiles[fnum][fpos - 1] == 0x80 )
{
if ( LOCAL_DEBUG ) clog << " - where a return was already present" << endl;
unsigned char replacementMetadata = fileNum_ / 5;
assert( ( replacementMetadata & 0x80 ) == 0 && "Error: there may be too many cycles for this algorithm, which overwrote the Return bit" );
onHoldUntilNextReturn_letter_ = letter1;
onHoldUntilNextReturn_runLength_ = remainingRunLengthInReader_;
assert( remainingRunLengthInReader_ < count1 );
ramFiles[fnum][fpos - 2] = ( ( count1 - remainingRunLengthInReader_ - 1 ) << 4 ) | letter1;
ramFiles[fnum][fpos - 1] = replacementMetadata | 0x80; // the return stays
ramFiles[fileNum_].push_back( c );
ramFiles[fileNum_].push_back( metadata );
return;
}
// - where an insertion+return was already present
if ( ( ramFiles[fnum][fpos - 1] & ~0x80 ) != 0 && ( ramFiles[fnum][fpos - 1] & 0x80 ) != 0 )
{
if ( LOCAL_DEBUG ) clog << " - where an insertion+return was already present" << endl;
unsigned char replacementMetadata = fileNum_ / 5;
assert( ( replacementMetadata & 0x80 ) == 0 && "Error: there may be too many cycles for this algorithm, which overwrote the Return bit" );
// - if the insertion point is the current newest file
if ( ( ramFiles[fnum][fpos - 1] & ~0x80 ) == replacementMetadata )
{
if ( LOCAL_DEBUG ) clog << " - if the insertion point is the current newest file" << endl;
assert( ramFiles[fileNum_].back() != 0 ); // we should already have properly inserted the return, with an eventual jump, and we'll recycle those
onHoldUntilNextReturn_metadata_ = ramFiles[fileNum_].back(); // todo: & ~0x80?
ramFiles[fileNum_].pop_back();
assert( ( ramFiles[fileNum_].back() & 0x0F ) == letter1 );
assert( ( ramFiles[fileNum_].back() >> 4 ) + 1 > ( int )remainingRunLengthInReader_ );
unsigned char newCount = ( ramFiles[fileNum_].back() >> 4 ) + 1 - remainingRunLengthInReader_;
onHoldUntilNextReturn_letter_ = letter1;
onHoldUntilNextReturn_runLength_ = remainingRunLengthInReader_;
ramFiles[fileNum_].pop_back();
ramFiles[fileNum_].push_back( ( newCount - 1 ) << 4 | letter1 );
ramFiles[fileNum_].push_back( 0 );
ramFiles[fileNum_].push_back( c );
ramFiles[fileNum_].push_back( metadata );
return;
}
// - if the insertion point is an older file
else
{
if ( LOCAL_DEBUG ) clog << " - if the insertion point is an older file" << endl;
onHoldUntilNextReturn_letter_ = letter1;
onHoldUntilNextReturn_runLength_ = remainingRunLengthInReader_;
onHoldUntilNextReturn_metadata_ = ramFiles[fnum][fpos - 1] & ~0x80;
assert( remainingRunLengthInReader_ < count1 );
ramFiles[fnum][fpos - 2] = ( ( count1 - remainingRunLengthInReader_ - 1 ) << 4 ) | letter1;
ramFiles[fnum][fpos - 1] = replacementMetadata | 0x80; // the return stays
ramFiles[fileNum_].push_back( c );
ramFiles[fileNum_].push_back( metadata );
return;
}
}
assert( false && "Should never reach here" );
}
}
void BwtWriterIncrementalRunLength::encodeRun( char c, LetterNumber runLength )
{
#ifdef DEBUG
std::cout << "BW RL encodeRun - sending run " << c << " " << runLength << " " << pFile_
<< std::endl;
#endif
#ifdef REPORT_COMPRESSION_RATIO
charsReceived_ += runLength;
#endif
int charIndex( whichPile[( int )c] );
if ( charIndex == nv )
{
cerr << "Char is not part of the alphabet. Aborting." << endl;
exit( -1 );
}
uchar outCode( 0xF0 | ( ( uchar )charIndex ) );
runLength--;
const LetterNumber numMaxChars( runLength >> 4 );
for ( LetterNumber i( 0 ); i < numMaxChars; i++ )
{
sendChar( outCode, 0 );
}
runLength &= ( LetterNumber )0xF;
outCode = ( ( ( uchar )runLength ) << 4 );
outCode |= charIndex;
// assert(((uint)outCode)<256);
// assert(fwrite( &outCode, sizeof(char), 1, pFile_ )==1);
sendChar( outCode, 0 );
#ifdef DEBUG
std::cout << "B sending " << ( uint )outCode << " " << pFile_ << std::endl;
#endif
} // ~encodeRun
void BwtWriterIncrementalRunLength::sendRun( char c, LetterNumber runLength )
{
assert( false );
#ifdef DEBUG
std::cout << "BW RL sendRun - sending run " << c << " " << runLength << " " << endl;
#endif
if ( runLength != 0 )
{
if ( c == lastChar_ )
{
runLength_ += runLength;
}
else
{
if ( runLength_ != 0 ) encodeRun( lastChar_, runLength_ );
lastChar_ = c;
runLength_ = runLength;
}
}
} // ~sendRun
// sendRunOfPreExistingData is expected to be called by reader's readAndSend
void BwtWriterIncrementalRunLength::sendRunOfPreExistingData( char c, LetterNumber runLength, int fileNum, size_t posInRamFile, LetterNumber remainingRunLength )
{
if ( runLength == 0 )
{
return;
}
if ( runLength_ != 0 && ( fileNumInReader_ != ( uint )fileNum || filePosInReader_ != posInRamFile || remainingRunLengthInReader_ != remainingRunLength ) )
{
encodeRun( lastChar_, runLength_ );
lastChar_ = notInAlphabet;
runLength_ = 0;
}
// We shouldn't need to write anything
// For debugging, we just want to check that the data is already present and advance our cursor
if ( fileNum >= 0 )
{
fileNumInReader_ = fileNum;
filePosInReader_ = posInRamFile;
remainingRunLengthInReader_ = remainingRunLength;
}
lastFileReturnNeeded_ = true;
} // ~sendRunOfPreExistingData
// Huffman implementation
#ifdef ACTIVATE_HUFFMAN
BwtWriterHuffman::~BwtWriterHuffman() // destructor
{
sendRun( lastChar_, runLength_ ); //gets last normal chars from buffer
sendRun( notInAlphabet, 1 ); // send termination char
BwtWriterHuffman::emptyBuffer();
while ( bitsUsed_ > 0 )
{
assert( fwrite( &soFar_.ui, sizeof( unsigned int ), 1, pFile_ ) == 1 );
#ifdef REPORT_COMPRESSION_RATIO
bytesWritten_ += ( LetterNumber ) sizeof( unsigned int );
#endif
#ifdef DEBUG
cout << endl;
for ( unsigned int i( 1 ); i != 0; i <<= 1 )
cout << ( ( soFar_.ui & i ) ? '1' : '0' );
cout << endl;
#endif
bitsUsed_ -= 32;
}
#ifdef REPORT_COMPRESSION_RATIO
Logger_if( LOG_FOR_DEBUGGING ) Logger::out()
<< "BwtWriterHuffman: received "
<< charsReceived_ << " chars, sent "
<< bytesWritten_ << " bytes, compression "
<< ( ( double )8 * bytesWritten_ ) / ( charsReceived_ )
<< " bits per char " << std::endl;
#endif
} // ~BwtWriterHuffman()
void BwtWriterHuffman::operator()( const char *p, LetterNumber numChars )
{
for ( LetterNumber i( 0 ); i < numChars; i++ )
{
if ( ( *p ) == lastChar_ )
{
runLength_++;
}
else
{
if ( runLength_ > 0 )
{
sendRun( lastChar_, runLength_ );
} // ~if
runLength_ = 1;
lastChar_ = *p;
} // ~else
p++;
} // ~for
sendRun( lastChar_, runLength_ );
runLength_ = 0;
} // ~operator()
void BwtWriterHuffman::sendToken( unsigned long long code, LetterNumber length )
{
toAdd_.ull = code;
toAdd_.ull <<= bitsUsed_; // left shift to the next free position
soFar_.ull |= toAdd_.ull; // update so far
bitsUsed_ += length;
if ( bitsUsed_ > 32 ) // if we have more than 32bit / 4 byte
{
assert( fwrite( &soFar_.ui, sizeof ( unsigned int ), 1, pFile_ ) == 1 );
#ifdef REPORT_COMPRESSION_RATIO
bytesWritten_ += ( LetterNumber ) sizeof( unsigned int );
#endif
#ifdef DEBUG
for ( unsigned int i( 1 ); i != 0; i <<= 1 )
cout << ( ( soFar_.ui & i ) ? '1' : '0' );
cout << endl;
#endif
soFar_.ull >>= 32; // shift rest to the right border
bitsUsed_ -= 32; // update bits used
} //
} // ~sendToken()
void BwtWriterHuffman::sendRun( char c, LetterNumber runLength )
{
#ifdef REPORT_COMPRESSION_RATIO
charsReceived_ += runLength;
#endif
if ( runLength > 0 )
{
for ( LetterNumber i( 0 ); i < runLength; i++ )
{
if ( huffmanBufferPos == huffmanWriterBufferSize - 1 )
{
symBuf[huffmanBufferPos] = c;
processBuffer( huffmanWriterBufferSize );
}
else
{
symBuf[huffmanBufferPos] = c;
huffmanBufferPos++;
} // ~else
} // ~for
}
} // ~sendRun
void BwtWriterHuffman::emptyBuffer( void )
{
processBuffer( huffmanBufferPos );
}
void BwtWriterHuffman::processBuffer( int itemsToPrint )
{
// cerr << pFile_ << "PROCESSING BUFFER" << endl;
if ( itemsToPrint > 0 )
{
char localLastChar = 0;
LetterNumber localRunLength = 0;
for ( int i( 0 ); i < itemsToPrint; i++ )
{
// cerr << pFile_ << " accessing char at " << i << endl;
if ( symBuf[i] == localLastChar )
{
localRunLength++;
}
else
{
if ( localRunLength > 0 )
{
// get number of this char, 0-5
int charIndex( whichPile[( int ) localLastChar] );
assert( charIndex != nv ); // crash if not from alphabet
if ( localRunLength == 1 ) // single run only
{
sendToken( singleCharCode[charIndex],
singleCharLength[charIndex] );
}// ~if
else
{
sendToken( doubleCharCode[charIndex],
doubleCharLength[charIndex] );
sendNum( localRunLength );
} //~else
} // ~if
localRunLength = 1;
localLastChar = symBuf[i];
} // ~else
} // ~for
// process last entry of the buffer
int charIndex( whichPile[( int ) localLastChar] ); // get number of this char, 0-5
if ( ( int )localLastChar > 0 && whichPile[( int ) localLastChar] < alphabetSize )
{
assert( charIndex != nv ); // crash if not from alphabet
if ( localRunLength == 1 ) // single run only
{
sendToken( singleCharCode[charIndex], singleCharLength[charIndex] );
}// ~if
else
{
sendToken( doubleCharCode[charIndex], doubleCharLength[charIndex] );
sendNum( localRunLength );
} //~else
}
huffmanBufferPos = 0; // reset counter
}
} // ~sendRun
void BwtWriterHuffman::sendNum( LetterNumber runLength )
{
if ( runLength < 17 ) // max 16
{
runLength--;
runLength--;
numBuf_.ui = runLength; // set new run length
sendToken( numBuf_.ull, 4 ); // write one token, encoding for the runlength
}// ~if
else // larger than 16 -> 2 byte
{
runLength -= 17; // substract 16 + 1
numBuf_.ui = 0xF; // set unsigned int to 16 -> 1111
sendToken( numBuf_.ull, 4 ); // send binary encoded 16 using 4 bits
do
{
numBuf_.ui = runLength; // set unsigned int to remaining runlength
numBuf_.ui &= 0x7F; // AND with 111|1111
// 1st: OR with 1000|0000 2nd multiply with 1 128 if RL > 127 or with 0
numBuf_.ui |= 0x80 * ( runLength > 0x7F );
sendToken( numBuf_.ull, 8 );
runLength >>= 7;
}// ~while
while ( runLength != 0 );
} // ~else
} // sendNum
#endif //ifdef ACTIVATE_HUFFMAN
//
// BwtWriterImplicit member function definitions
//
BwtWriterImplicit::~BwtWriterImplicit()
{
if ( inSAP_ == true )
{
flushSAP();
}
else if ( lastChar_ != notInAlphabet )
{
pWriter_->sendRun( lastChar_, lastRun_ );
}
delete pWriter_;
}
void BwtWriterImplicit::flushSAP( void )
{
assert( alphabet[firstSAP_] == lastChar_ );
if ( countSAP_.count_[firstSAP_] > 0 ) pWriter_->sendRun( alphabet[firstSAP_], countSAP_.count_[firstSAP_] );
for ( int i( 0 ); i < alphabetSize; i++ )
{
if ( ( i != firstSAP_ ) && ( countSAP_.count_[i] > 0 ) ) pWriter_->sendRun( alphabet[i], countSAP_.count_[i] );
}
}
void BwtWriterImplicit::operator()( const char *p, LetterNumber numChars )
{
for ( LetterNumber i( 0 ); i < numChars; i++, p++ )
{
if ( islower( *p ) )
{
if ( inSAP_ == false )
{
countSAP_.clear();
assert ( lastChar_ != notInAlphabet );
firstSAP_ = whichPile[( int )lastChar_];
assert( firstSAP_ != nv );
countSAP_.count_[firstSAP_] += lastRun_;
inSAP_ = true;
} // ~if
countSAP_ += *p;
} // ~if
else
{
if ( inSAP_ == true )
{
flushSAP();
inSAP_ = false;
}
else if ( lastChar_ != notInAlphabet )
{
pWriter_->sendRun( lastChar_, lastRun_ );
}
lastChar_ = *p;
lastRun_ = 1;
}
}
}
void BwtWriterImplicit::sendRun( char c, LetterNumber runLength )
{
if ( islower( c ) )
{
if ( inSAP_ == false )
{
countSAP_.clear();
assert ( lastChar_ != notInAlphabet );
firstSAP_ = whichPile[( int )lastChar_];
assert( firstSAP_ != nv );
countSAP_.count_[firstSAP_] += lastRun_;
inSAP_ = true;
} // ~if
countSAP_.count_[whichPile[( int )c]] += runLength;
}
else
{
if ( inSAP_ == true )
{
flushSAP();
inSAP_ = false;
}
else if ( lastChar_ != notInAlphabet )
{
pWriter_->sendRun( lastChar_, lastRun_ );
}
lastChar_ = c;
lastRun_ = runLength;
}
// (*pWriter_).sendRun(toupper(c), runLength);
}
#ifdef XXX
void BwtWriterImplicit::operator()( const char *p, LetterNumber numChars )
{
// could be smarter about this
char c;
for ( LetterNumber i( 0 ); i < numChars; i++, p++ )
{
c = toupper( *p );
( *pWriter_ )( &c, 1 );
}
}
void BwtWriterImplicit::sendRun( char c, LetterNumber runLength )
{
( *pWriter_ ).sendRun( toupper( c ), runLength );
}
#endif
<|start_filename|>src/backtracker/TwoBwtBackTracker.hh<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#ifndef TWO_BWT_BACKTRACKER_HH
#define TWO_BWT_BACKTRACKER_HH
#include "BackTrackerBase.hh"
class TwoBwtBackTracker : public BackTrackerBase
{
public:
TwoBwtBackTracker( BwtReaderBase *inBwtA, BwtReaderBase *inBwtB,
LetterNumber ¤tPosA, LetterNumber ¤tPosB,
RangeStoreExternal &rA, RangeStoreExternal &rB,
LetterCount &countsSoFarA, LetterCount &countsSoFarB,
int minOcc, const int maxLength, const string &subset, const int cycle,
const bool doesPropagateBkptToSeqNumInSetA,
const bool doesPropagateBkptToSeqNumInSetB,
const bool noComparisonSkip,
const bool propagateSequence );
void process (
int pileNum
, string &thisWord
, IntervalHandlerBase &intervalHandler_
);
BwtReaderBase *inBwtA_;
BwtReaderBase *inBwtB_;
LetterNumber ¤tPosA_;
LetterNumber ¤tPosB_;
RangeStoreExternal &rA_;
RangeStoreExternal &rB_;
LetterCount &countsSoFarA_;
LetterCount &countsSoFarB_;
int minOcc_;
int maxLength_;
const string &subset_;
const int cycle_;
// LetterNumber numRanges_;
// LetterNumber numSingletonRanges_;
LetterNumber numNotSkippedA_;
LetterNumber numNotSkippedB_;
LetterNumber numSkippedA_;
LetterNumber numSkippedB_;
AlphabetFlag propagateIntervalA_;
AlphabetFlag propagateIntervalB_;
const bool doesPropagateBkptToSeqNumInSetA_;
const bool doesPropagateBkptToSeqNumInSetB_;
// const bool noComparisonSkip_;
};
#endif // TWO_BWT_BACKTRACKER_HH
<|start_filename|>src/shared/SeqReader.hh<|end_filename|>
/**
** Copyright (c) 2011-2014 Illumina, Inc.
**
** This file is part of the BEETL software package,
** covered by the "BSD 2-Clause License" (see accompanying LICENSE file)
**
** Citation: <NAME>, <NAME> and <NAME>
** Lightweight BWT Construction for Very Large String Collections.
** Proceedings of CPM 2011, pp.219-231
**
**/
#ifndef INCLUDED_SEQREADER_HH
#define INCLUDED_SEQREADER_HH
#include "Alphabet.hh"
#include "Config.hh"
#include "Types.hh"
#include <cassert>
#include <cstdio>
#include <string>
class SeqReaderBase
{
public:
SeqReaderBase();
virtual ~SeqReaderBase();
virtual void readNext( char *seqBuf = NULL ) = 0;
virtual const char *thisSeq( void ) = 0;
virtual const char *thisQual( void ) = 0;
virtual const char *thisName( void ) = 0;
virtual bool allRead( void ) const = 0;
virtual int length( void ) const = 0;
}; // ~class BwtReaderBase
class SeqReaderFile : public SeqReaderBase
{
public:
static SeqReaderFile *getReader( FILE *pFile );
SeqReaderFile( FILE *pFile );
virtual ~SeqReaderFile();
virtual void readNext( char *seqBuf = NULL ) = 0;
virtual const char *thisSeq( void );
virtual const char *thisQual( void );
virtual const char *thisName( void );
virtual bool allRead( void ) const;
virtual int length( void ) const;
void rewindFile();
protected:
FILE *pFile_;
char bufSeq_[1 + maxSeqSize];
char bufQual_[1 + maxSeqSize];
char bufName_[1 + maxSeqSize];
bool allRead_;
int length_;
}; // ~class BwtReaderBase
class SeqReaderRaw: public SeqReaderFile
{
public:
SeqReaderRaw( FILE *pFile );
virtual ~SeqReaderRaw();
virtual void readNext( char *seqBuf = NULL );
// virtual const char* thisSeq( void );
// virtual const char* thisQual( void );
// virtual const char* thisName( void );
// virtual bool allRead( void ) const=0;
};
class SeqReaderFasta: public SeqReaderFile
{
public:
SeqReaderFasta( FILE *pFile );
virtual ~SeqReaderFasta();
virtual void readNext( char *seqBuf = NULL );
// virtual const char* thisSeq( void );
// virtual const char* thisQual( void );
// virtual const char* thisName( void );
// virtual bool allRead( void ) const=0;
};
class SeqReaderFastq: public SeqReaderFile
{
public:
SeqReaderFastq( FILE *pFile );
virtual ~SeqReaderFastq();
virtual void readNext( char *seqBuf = NULL );
// virtual const char* thisSeq( void );
// virtual const char* thisQual( void );
// virtual const char* thisName( void );
// virtual bool allRead( void ) const=0;
};
#endif
| ndaniel/BEETL |
<|start_filename|>ouster_viz/include/ouster/lidar_scan.h<|end_filename|>
/**
* @file
* @brief Holds lidar data by field in column-major order
*/
#pragma once
#include <Eigen/Eigen>
#include <iterator>
#include <utility>
#include <vector>
namespace ouster {
struct LidarScan {
using Data = Eigen::Array<double, Eigen::Dynamic, 6>;
using Point = Eigen::Array<double, 1, 6>;
const ssize_t W;
const ssize_t H;
Data data_;
LidarScan(size_t w, size_t h) : W(w), H(h), data_{w * h, 6} {};
Eigen::Ref<Eigen::ArrayXd> x() { return data_.col(0); }
Eigen::Ref<Eigen::ArrayXd> y() { return data_.col(1); }
Eigen::Ref<Eigen::ArrayXd> z() { return data_.col(2); }
Eigen::Ref<Eigen::ArrayXd> intensity() { return data_.col(3); }
Eigen::Ref<Eigen::ArrayXd> noise() { return data_.col(4); }
Eigen::Ref<Eigen::ArrayXd> range() { return data_.col(5); }
struct iterator;
iterator begin() { return iterator(0, &this->data_); }
static inline Point make_val(float x, float y, float z, float intensity,
uint32_t, uint16_t, uint8_t, uint16_t noise,
uint32_t range) {
Point p;
p << x, y, z, intensity, noise, range;
return p;
}
// Minimal set of operations to support os1_util.h:batch_to_iter; not really
// a proper iterator. Remove when Eigen support for STL iterators lands.
struct iterator {
using iterator_category = std::output_iterator_tag;
using value_type = LidarScan::Point;
using difference_type = void;
using pointer = void;
using reference = void;
inline iterator operator++() {
idx_++;
return *this;
}
inline Data::RowXpr operator*() { return data_->row(idx_); }
inline Data::RowXpr operator[](int i) { return data_->row(idx_ + i); }
friend iterator operator+(iterator lhs, int i) {
return iterator{lhs.idx_ + i, lhs.data_};
}
friend bool operator==(const iterator& lhs, const iterator& rhs) {
return lhs.idx_ == rhs.idx_;
}
friend bool operator!=(const iterator& lhs, const iterator& rhs) {
return !(lhs == rhs);
}
private:
iterator(int idx, Data* data) : idx_{idx}, data_{data} {}
int idx_;
Data* data_;
friend class LidarScan;
};
};
}
| wonjin/ouster_example |
<|start_filename|>utils.c<|end_filename|>
#include "utils.h"
void
shuffle(int *array, size_t n)
{
size_t i;
if (n > 1)
{
for (i = 0; i < n - 1; i++)
{
size_t j = i + rand() / (RAND_MAX / (n - i) + 1);
int t = array[j];
array[j] = array[i];
array[i] = t;
}
}
}
<|start_filename|>list_utils.c<|end_filename|>
#include "list_utils.h"
#include <stdio.h>
#include <stdlib.h>
extern struct config conf;
int
list_length(Elem *ptr)
{
int l = 0;
while (ptr)
{
l = l + 1;
ptr = ptr->next;
}
return l;
}
/* add element to the head of the list */
void
list_push(Elem **ptr, Elem *e)
{
if (!e)
{
return;
}
e->prev = NULL;
e->next = *ptr;
if (*ptr)
{
(*ptr)->prev = e;
}
*ptr = e;
}
/* add element to the end of the list */
void
list_append(Elem **ptr, Elem *e)
{
Elem *tmp = *ptr;
if (!e)
{
return;
}
if (!tmp)
{
*ptr = e;
return;
}
while (tmp->next)
{
tmp = tmp->next;
}
tmp->next = e;
e->prev = tmp;
e->next = NULL;
}
/* remove and return last element of list */
Elem*
list_shift(Elem **ptr)
{
Elem *tmp = (ptr) ? *ptr : NULL;
if (!tmp)
{
return NULL;
}
while (tmp->next)
{
tmp = tmp->next;
}
if (tmp->prev)
{
tmp->prev->next = NULL;
}
else
{
*ptr = NULL;
}
tmp->next = NULL;
tmp->prev = NULL;
return tmp;
}
/* remove and return first element of list */
Elem*
list_pop(Elem **ptr)
{
Elem *tmp = (ptr) ? *ptr : NULL;
if (!tmp)
{
return NULL;
}
if (tmp->next)
{
tmp->next->prev = NULL;
}
*ptr = tmp->next;
tmp->next = NULL;
tmp->prev = NULL;
return tmp;
}
void
list_split(Elem *ptr, Elem **chunks, int n)
{
if (!ptr)
{
return;
}
int len = list_length (ptr), k = len / n, i = 0, j = 0;
while (j < n)
{
i = 0;
chunks[j] = ptr;
if (ptr)
{
ptr->prev = NULL;
}
while (ptr != NULL && ((++i < k) || (j == n-1)))
{
ptr = ptr->next;
}
if (ptr)
{
ptr = ptr->next;
if (ptr && ptr->prev) {
ptr->prev->next = NULL;
}
}
j++;
}
}
Elem *
list_get(Elem **ptr, size_t n)
{
Elem *tmp = *ptr;
size_t i = 0;
if (!tmp)
{
return NULL;
}
while (tmp && i < n)
{
tmp = tmp->next;
i = i + 1;
}
if (!tmp)
{
return NULL;
}
if (tmp->prev)
{
tmp->prev->next = tmp->next;
}
else
{
*ptr = tmp->next;
}
if (tmp->next)
{
tmp->next->prev = tmp->prev;
}
tmp->prev = NULL;
tmp->next = NULL;
return tmp;
}
Elem *
list_slice(Elem **ptr, size_t s, size_t e)
{
Elem *tmp = (ptr) ? *ptr : NULL, *ret = NULL;
size_t i = 0;
if (!tmp)
{
return NULL;
}
while (i < s && tmp)
{
tmp = tmp->next;
i = i + 1;
}
if (!tmp)
{
return NULL;
}
// set head of new list
ret = tmp;
while (i < e && tmp)
{
tmp = tmp->next;
i = i + 1;
}
if (!tmp)
{
return NULL;
}
// cut slice and return
if (ret->prev)
{
ret->prev->next = tmp->next;
}
else
{
*ptr = tmp->next;
}
if (tmp->next) tmp->next->prev = ret->prev;
ret->prev = NULL;
tmp->next = NULL;
return ret;
}
/* concat chunk of elements to the end of the list */
void
list_concat(Elem **ptr, Elem *chunk)
{
Elem *tmp = (ptr) ? *ptr : NULL;
if (!tmp)
{
*ptr = chunk;
return;
}
while (tmp->next != NULL)
{
tmp = tmp->next;
}
tmp->next = chunk;
if (chunk)
{
chunk->prev = tmp;
}
}
void
list_from_chunks(Elem **ptr, Elem **chunks, int avoid, int len)
{
int next = (avoid + 1) % len;
if (!(*ptr) || !chunks || !chunks[next])
{
return;
}
// Disconnect avoided chunk
Elem *tmp = chunks[avoid];
if (tmp) {
tmp->prev = NULL;
}
while (tmp && tmp->next != NULL && tmp->next != chunks[next])
{
tmp = tmp->next;
}
if (tmp)
{
tmp->next = NULL;
}
// Link rest starting from next
tmp = *ptr = chunks[next];
if (tmp)
{
tmp->prev = NULL;
}
while (next != avoid && chunks[next] != NULL)
{
next = (next + 1) % len;
while (tmp && tmp->next != NULL && tmp->next != chunks[next])
{
if (tmp->next)
{
tmp->next->prev = tmp;
}
tmp = tmp->next;
}
if (tmp)
{
tmp->next = chunks[next];
}
if (chunks[next])
{
chunks[next]->prev = tmp;
}
}
if (tmp)
{
tmp->next = NULL;
}
}
void
print_list(Elem *ptr)
{
if (!ptr)
{
printf("(empty)\n");
return;
}
while (ptr != NULL)
{
printf("%p ", (void*)ptr);
ptr = ptr->next;
}
printf("\n");
}
void
initialize_list(Elem *src, ul sz, ul offset)
{
unsigned int j = 0;
for (j = 0; j < (sz / sizeof(Elem)) - offset; j++)
{
src[j].set = -2;
src[j].delta = 0;
src[j].prev = NULL;
src[j].next = NULL;
}
}
void
pick_n_random_from_list(Elem *ptr, ul stride, ul sz, ul offset, ul n)
{
unsigned int count = 1, i = 0;
unsigned int len = ((sz - (offset * sizeof(Elem))) / stride);
Elem *e = ptr;
e->prev = NULL;
e->set = -1;
ul *array = (ul*) calloc (len, sizeof(ul));
for (i = 1; i < len - 1; i++)
{
array[i] = i * (stride / sizeof(Elem));
}
for (i = 1; i < len - 1; i++)
{
size_t j = i + rand() / (RAND_MAX / (len - i) + 1);
int t = array[j];
array[j] = array[i];
array[i] = t;
}
for (i = 1; i < len && count < n; i++)
{
if (ptr[array[i]].set == -2)
{
e->next = &ptr[array[i]];
ptr[array[i]].prev = e;
ptr[array[i]].set = -1;
e = e->next;
count++;
}
}
free (array);
e->next = NULL;
}
void
rearrange_list(Elem **ptr, ul stride, ul sz, ul offset)
{
unsigned int len = (sz / sizeof(Elem)) - offset, i = 0;
Elem *p = *ptr;
if (!p)
{
return;
}
unsigned int j = 0, step = stride / sizeof(Elem);
for (i = step; i < len - 1; i += step)
{
if (p[i].set < 0)
{
p[i].set = -2;
p[i].prev = &p[j];
p[j].next = &p[i];
j = i;
}
}
p[0].prev = NULL;
p[j].next = NULL;
while (p && p->set > -1)
{
p = p->next;
}
*ptr = p;
if (p)
{
p->set = -2;
p->prev = NULL;
}
}
void
list_set_id(Elem *ptr, int id)
{
while (ptr)
{
ptr->set = id;
ptr = ptr->next;
}
}
void
generate_conflict_set(Elem **ptr, Elem **out)
{
Elem *candidate = NULL, *res = NULL;
int ret = 0;
while (*ptr) // or while size |out| == limit
{
candidate = list_pop (ptr);
if (conf.ratio > 0.0)
{
ret = tests (*out, (char*)candidate, conf.rounds, conf.threshold, conf.ratio, conf.traverse);
}
else
{
ret = tests_avg (*out, (char*)candidate, conf.rounds, conf.threshold, conf.traverse);
}
if (!ret)
{
// no conflict, add element
list_push (out, candidate);
}
else
{
// conflict, candidate goes to list of victims
list_push (&res, candidate);
}
}
*ptr = res;
}
<|start_filename|>private_structs.h<|end_filename|>
#ifndef private_structs_H
#define private_structs_H
#include "public_structs.h"
#ifdef THREAD_COUNTER
struct params_t {
pthread_mutex_t lock;
uint64_t counter;
};
#endif
#endif /* private_structs_H */
<|start_filename|>Makefile<|end_filename|>
CC = clang
CFLAGS += -std=gnu11 -Wall -pedantic -Wextra -fPIC -O3
LDFLAGS += -lm
evsets_dir := $(dir $(abspath $(lastword $(MAKEFILE_LIST))))
RPATH=-Wl,-R -Wl,${evsets_dir}
default: all
OBJS := list_utils.o hist_utils.o micro.o cache.o utils.o algorithms.o evsets_api.o
all: main.c libevsets.so
${CC} ${CFLAGS} ${RPATH} ${LDFLAGS} $^ -o evsets
libevsets.so: ${OBJS}
${CC} ${CFLAGS} -shared ${LDFLAGS} $^ -o libevsets.so
%.o: %.c %.h public_structs.h private_structs.h
${CC} ${CFLAGS} -c -o $@ $<
counter: CFLAGS += -DTHREAD_COUNTER
counter: LDFLAGS += -pthread -lpthread
counter: all
clean:
rm -f *.o libevsets.so evsets
<|start_filename|>cache.c<|end_filename|>
#include "cache.h"
#include "micro.h"
#include "hist_utils.h"
#include "public_structs.h"
#ifdef THREAD_COUNTER
#include <pthread.h>
#include <signal.h>
#endif
#ifdef THREAD_COUNTER
void* counter_thread()
{
while (1)
{
__asm__ volatile ("mfence");
params.counter++;
__asm__ volatile ("mfence");
}
pthread_exit(NULL);
}
static inline uint64_t
clock_thread()
{
uint64_t ret;
__asm__ volatile ("mfence");
ret = params.counter;
__asm__ volatile ("mfence");
return ret;
}
int
create_counter()
{
if (pthread_create (&thread, NULL, counter_thread, ¶ms))
{
printf("[!] Error: thread counter\n");
return 1;
}
return 0;
}
void destroy_counter()
{
pthread_kill(thread, 0);
}
#endif
inline
void
traverse_list_skylake(Elem *ptr)
{
while (ptr && ptr->next && ptr->next->next)
{
maccess (ptr);
maccess (ptr->next);
maccess (ptr->next->next);
maccess (ptr);
maccess (ptr->next);
maccess (ptr->next->next);
ptr = ptr->next;
}
}
inline
void
traverse_list_asm_skylake(Elem *ptr)
{
__asm__ volatile
(
"test %%rcx, %%rcx;"
"jz out;"
"loop:"
"movq (%%rcx), %%rax;"
"test %%rax, %%rax;"
"jz out;"
"movq (%%rax), %%rax;"
"test %%rax, %%rax;"
"jz out;"
"movq (%%rax), %%rax;"
"movq (%%rcx), %%rcx;"
"movq (%%rcx), %%rax;"
"movq (%%rax), %%rax;"
"test %%rcx, %%rcx;"
"jnz loop;"
"out:"
: // no output
: "c" (ptr)
: "cc", "memory"
);
}
inline
void
traverse_list_asm_haswell(Elem *ptr)
{
__asm__ volatile
(
"test %%rcx, %%rcx;"
"jz out2;"
"loop2:"
"movq (%%rcx), %%rax;"
"test %%rax, %%rax;"
"jz out2;"
"movq (%%rax), %%rax;"
"movq (%%rcx), %%rcx;"
"movq (%%rcx), %%rax;"
"test %%rcx, %%rcx;"
"jnz loop2;"
"out2:"
: // no output
: "c" (ptr)
: "cc", "memory"
);
}
inline
void
traverse_list_asm_simple(Elem *ptr)
{
__asm__ volatile
(
"loop3:"
"test %%rcx, %%rcx;"
"jz out3;"
"movq (%%rcx), %%rcx;"
"jmp loop3;"
"out3:"
: // no output
: "c" (ptr)
: "cc", "memory"
);
}
inline
void
traverse_list_haswell(Elem *ptr)
{
while (ptr && ptr->next)
{
maccess (ptr);
maccess (ptr->next);
maccess (ptr);
maccess (ptr->next);
ptr = ptr->next;
}
}
inline
void
traverse_list_simple(Elem *ptr)
{
while (ptr)
{
maccess (ptr);
ptr = ptr->next;
}
}
inline
void
traverse_list_rrip(Elem *ptr)
{
Elem *p, *s = ptr;
while (ptr)
{
p = ptr;
maccess (ptr);
maccess (ptr);
maccess (ptr);
maccess (ptr);
ptr = ptr->next;
}
while (p != s)
{
maccess (p);
maccess (p);
p = p->prev;
}
maccess (p);
maccess (p);
}
inline
void
traverse_list_to_n(Elem *ptr, int n)
{
while (ptr && n-- > 0)
{
maccess (ptr);
ptr = ptr->next;
}
}
inline
void
traverse_list_time (Elem *ptr, void (*trav)(Elem*))
{
size_t time;
trav (ptr);
while (ptr)
{
// time = rdtsc();
time = rdtscfence();
maccess (ptr);
ptr->delta += rdtscfence() - time;
// ptr->delta += rdtscp() - time;
ptr = ptr->next;
}
}
int
test_set(Elem *ptr, char *victim, void (*trav)(Elem*))
{
maccess (victim);
maccess (victim);
maccess (victim);
maccess (victim);
trav (ptr);
maccess (victim + 222); // page walk
size_t delta, time;
#ifndef THREAD_COUNTER
// time = rdtsc();
time = rdtscfence();
maccess (victim);
// delta = rdtscp() - time;
delta = rdtscfence() - time;
#else
time = clock_thread();
maccess (victim);
delta = clock_thread() - time;
#endif
return delta;
}
int
test_and_time(Elem *ptr, int rep, int threshold, int ways, void (*trav)(Elem*))
{
int i = 0, count = 0;
Elem *tmp = ptr;
while (tmp)
{
tmp->delta = 0;
tmp = tmp->next;
}
for (i = 0; i < rep; i++)
{
tmp = ptr;
traverse_list_time (tmp, trav);
}
while (ptr)
{
ptr->delta = (float)ptr->delta / rep;
if (ptr->delta > (unsigned)threshold)
{
count++;
}
ptr = ptr->next;
}
return count > ways;
}
int
tests_avg(Elem *ptr, char *victim, int rep, int threshold, void (*trav)(Elem*))
{
int i = 0, ret =0, delta = 0;
Elem *vic = (Elem*)victim;
vic->delta = 0;
for (i=0; i < rep; i++)
{
delta = test_set (ptr, victim, trav);
if (delta < 800) vic->delta += delta;
}
ret = (float)vic->delta / rep;
return ret > threshold;
}
int
tests(Elem *ptr, char *victim, int rep, int threshold, float ratio, void (*trav)(Elem*))
{
int i = 0, ret = 0, delta, hsz = rep * 100;
struct histogram *hist;
if ((hist = (struct histogram*) calloc (hsz, sizeof(struct histogram)))
== NULL)
{
return 0;
}
for (i=0; i < rep; i++)
{
delta = test_set (ptr, victim, trav);
hist_add (hist, hsz, delta);
}
ret = hist_q (hist, hsz, threshold);
free (hist);
return ret > (int)(rep * ratio);
}
int
calibrate(char *victim, struct config *conf)
{
size_t delta, time, t_flushed, t_unflushed;
struct histogram *flushed, *unflushed;
int i, ret, hsz = conf->cal_rounds * 100;
flushed = (struct histogram*) calloc (hsz, sizeof(struct histogram));
unflushed = (struct histogram*) calloc (hsz, sizeof(struct histogram));
if (flushed == NULL || unflushed == NULL)
{
return -1;
}
for (i=0; i < conf->cal_rounds; i++)
{
maccess (victim);
maccess (victim);
maccess (victim);
maccess (victim);
maccess (victim + 222); // page walk
#ifndef THREAD_COUNTER
// time = rdtsc();
time = rdtscfence();
maccess (victim);
// delta = rdtscp() - time;
delta = rdtscfence() - time;
#else
time = clock_thread();
maccess (victim);
delta = clock_thread() - time;
#endif
hist_add (unflushed, hsz, delta);
}
t_unflushed = hist_avg (unflushed, hsz);
for (i=0; i < conf->cal_rounds; i++)
{
maccess (victim); // page walk
flush (victim);
#ifndef THREAD_COUNTER
// time = rdtsc();
time = rdtscfence();
maccess (victim);
// delta = rdtscp() - time;
delta = rdtscfence() - time;
#else
time = clock_thread();
maccess (victim);
delta = clock_thread() - time;
#endif
hist_add (flushed, hsz, delta);
}
t_flushed = hist_avg (flushed, hsz);
ret = hist_min (flushed, hsz);
if (conf->flags & FLAG_VERBOSE)
{
printf("\tflushed: min %d, mode %d, avg %f, max %d, std %.02f, q %d (%.02f)\n",
hist_min (flushed, hsz), hist_mode(flushed, hsz),
hist_avg (flushed, hsz), hist_max (flushed, hsz),
hist_std (flushed, hsz, hist_avg (flushed, hsz)),
hist_q (flushed, hsz, ret),
(double) hist_q (flushed, hsz, ret) / conf->cal_rounds);
printf("\tunflushed: min %d, mode %d, avg %f, max %d, std %.02f, q %d (%.02f)\n",
hist_min (unflushed, hsz), hist_mode(unflushed, hsz),
hist_avg (unflushed, hsz), hist_max (unflushed, hsz),
hist_std (unflushed, hsz, hist_avg (unflushed, hsz)),
hist_q (unflushed, hsz, ret),
(double) hist_q (unflushed, hsz, ret) / conf->cal_rounds);
}
free (unflushed);
free (flushed);
if (t_flushed < t_unflushed)
{
return -1;
} else {
return (t_flushed + t_unflushed * 2) / 3;
}
}
<|start_filename|>micro.h<|end_filename|>
#ifndef micro_H
#define micro_H
#include <stdlib.h>
#include <stdbool.h>
#include <stdint.h>
#include <stdio.h>
#include "cache.h"
#define LINE_BITS 6
#define PAGE_BITS 12
#define LINE_SIZE (1 << LINE_BITS)
#define PAGE_SIZE2 (1 << PAGE_BITS)
typedef unsigned long long int ul;
inline
void
flush(void *p)
{
__asm__ volatile ("clflush 0(%0)" : : "c" (p) : "rax");
}
inline
uint64_t
rdtsc()
{
unsigned a, d;
__asm__ volatile ("cpuid\n"
"rdtsc\n"
"mov %%edx, %0\n"
"mov %%eax, %1\n"
: "=r" (a), "=r" (d)
:: "%rax", "%rbx", "%rcx", "%rdx");
return ((uint64_t)a << 32) | d;
}
inline
uint64_t
rdtscp()
{
unsigned a, d;
__asm__ volatile("rdtscp\n"
"mov %%edx, %0\n"
"mov %%eax, %1\n"
"cpuid\n"
: "=r" (a), "=r" (d)
:: "%rax", "%rbx", "%rcx", "%rdx");
return ((uint64_t)a << 32) | d;
}
inline
uint64_t
rdtscfence()
{
uint64_t a, d;
__asm__ volatile ("lfence");
__asm__ volatile ("rdtsc" : "=a" (a), "=d" (d) : :);
__asm__ volatile ("lfence");
return ((d<<32) | a);
}
inline
void
maccess(void* p)
{
__asm__ volatile ("movq (%0), %%rax\n" : : "c" (p) : "rax");
}
ul vtop(ul vaddr);
ul ptos(ul paddr, ul slicebits);
void recheck(Elem *ptr, char *victim, bool err, struct config *conf);
int filter(Elem **ptr, char *vicitm, int n, int m, struct config *conf);
#endif /* micro_H */
<|start_filename|>public_structs.h<|end_filename|>
#ifndef public_structs_H
#define public_structs_H
#define ALGORITHM_NAIVE 0
#define ALGORITHM_GROUP 1
#define ALGORITHM_BINARY 2
#define ALGORITHM_LINEAR 3
#define ALGORITHM_NAIVE_OPTIMISTIC 4
#define STRATEGY_HASWELL 0
#define STRATEGY_SKYLAKE 1
#define STRATEGY_ASMSKY 3
#define STRATEGY_ASMHAS 4
#define STRATEGY_ASM 5
#define STRATEGY_RRIP 10
#define STRATEGY_SIMPLE 2
#define FLAG_VERBOSE (1<<0)
#define FLAG_NOHUGEPAGES (1<<1)
#define FLAG_CALIBRATE (1<<2)
#define FLAG_RETRY (1<<3)
#define FLAG_BACKTRACKING (1<<4)
#define FLAG_IGNORESLICE (1<<5)
#define FLAG_FINDALLCOLORS (1<<6)
#define FLAG_FINDALLCONGRUENT (1<<7)
#define FLAG_VERIFY (1<<8)
#define FLAG_DEBUG (1<<9)
#define FLAG_CONFLICTSET (1<<10)
typedef struct elem
{
struct elem *next;
struct elem *prev;
int set;
size_t delta;
char pad[32]; // up to 64B
} Elem;
struct
config
{
int rounds, cal_rounds;
int stride;
int cache_size;
int buffer_size;
int cache_way;
int cache_slices;
int threshold;
int algorithm;
int strategy;
int offset;
int con, noncon; // only for debug
void (*traverse)(Elem*);
double ratio;
int flags;
};
#endif /* public_structs_H */
<|start_filename|>evsets_api.h<|end_filename|>
#ifndef evsets_api_H
#define evsets_api_H
#include "public_structs.h"
int init_evsets(struct config *conf);
int find_evsets();
int get_num_evsets();
Elem* get_evset(int id);
void close_evsets();
#endif /* evsets_api_H */
<|start_filename|>hist_utils.h<|end_filename|>
#ifndef hist_utils_H
#define hist_utils_H
#include <stdlib.h>
struct histogram
{
int val;
int count;
};
float hist_avg(struct histogram *hist, int len);
int hist_mode(struct histogram *hist, int len);
int hist_min(struct histogram *hist, int len);
int hist_max(struct histogram *hist, int len);
int hist_q(struct histogram *hist, int len, int threshold);
double hist_variance(struct histogram *hist, int len, int mean);
double hist_std(struct histogram *hist, int len, int mean);
void hist_print(struct histogram *hist, int len);
void hist_add(struct histogram *hist, int len, size_t val);
#endif /* hist_utils_H */
<|start_filename|>micro.c<|end_filename|>
#include "micro.h"
#include "cache.h"
#include <fcntl.h>
#include <unistd.h>
ul
vtop(ul vaddr)
{
int fd = open ("/proc/self/pagemap", O_RDONLY);
if (fd < 0)
{
return -1;
}
unsigned long paddr = -1;
unsigned long index = (vaddr / PAGE_SIZE2) * sizeof(paddr);
if (pread (fd, &paddr, sizeof(paddr), index) != sizeof(paddr))
{
return -1;
}
close (fd);
paddr &= 0x7fffffffffffff;
return (paddr << PAGE_BITS) | (vaddr & (PAGE_SIZE2-1));
}
unsigned int
count_bits(ul n)
{
unsigned int count = 0;
while (n)
{
n &= (n-1) ;
count++;
}
return count;
}
unsigned int
nbits(ul n)
{
unsigned int ret = 0;
n = n >> 1;
while (n > 0)
{
n >>= 1;
ret++;
}
return ret;
}
ul
ptos(ul paddr, ul slices)
{
unsigned long long ret = 0;
unsigned long long mask[3] = {0x1b5f575440ULL, 0x2eb5faa880ULL, 0x3cccc93100ULL}; // according to Maurice et al.
int bits = nbits(slices) - 1;
switch (bits)
{
case 3:
ret = (ret << 1) | (unsigned long long)(count_bits(mask[2] & paddr) % 2);
case 2:
ret = (ret << 1) | (unsigned long long)(count_bits(mask[1] & paddr) % 2);
case 1:
ret = (ret << 1) | (unsigned long long)(count_bits(mask[0] & paddr) % 2);
default:
break;
}
return ret;
}
void
recheck(Elem *ptr, char *victim, bool err, struct config *conf)
{
unsigned int cache_sets = conf->cache_size / LINE_SIZE / conf->cache_way / conf->cache_slices;
ul vpaddr = 0, paddr = 0, vcacheset = 0, cacheset = 0, vslice = 0, slice = 0;
ul num = 0;
bool verified = true;
if (victim)
{
vpaddr = vtop ((ul)victim);
vcacheset = (vpaddr >> LINE_BITS) & (cache_sets - 1);
vslice = ptos (vpaddr, conf->cache_slices);
}
else
{
vpaddr = vtop ((ul)ptr);
vcacheset = (vpaddr >> LINE_BITS) & (cache_sets - 1);
vslice = ptos (vpaddr, conf->cache_slices);
}
if (vpaddr == 0xffffffffffffffff)
{
printf("[!] Page map not supported. Can't verify set.\n");
return;
}
else
{
if (!err)
{
printf("[+] Verify eviction set (only in Linux with root):\n");
if (victim)
{
printf(" - victim pfn: 0x%llx, cache set: 0x%llx, slice: ",
vpaddr, vcacheset);
if (!(conf->flags & FLAG_IGNORESLICE))
{
printf("0x%llx\n", vslice);
}
else
{
printf("???\n");
}
}
}
}
while (ptr)
{
paddr = vtop ((ul)ptr);
cacheset = (paddr >> LINE_BITS) & (cache_sets - 1);
slice = ptos (paddr, conf->cache_slices);
if (!err)
{
printf(" - element pfn: 0x%llx, cache set: 0x%llx, slice: ",
paddr, cacheset);
if (!(conf->flags & FLAG_IGNORESLICE))
{
printf("0x%llx\n", slice);
}
else
{
printf("???\n");
}
}
ptr = ptr->next;
if (vcacheset == cacheset && ((vslice == slice) || (conf->flags & FLAG_IGNORESLICE)))
{
num++;
}
else
{
verified = false;
}
}
if (verified && !err)
{
printf("[+] Verified!\n");
}
else
{
printf("[-] Num. congruent addresses: %llu\n", num);
}
}
int
filter(Elem **ptr, char *victim, int n, int m, struct config *conf)
{
unsigned int cache_sets = conf->cache_size / LINE_SIZE / conf->cache_way / conf->cache_slices;
Elem *tmp = *ptr, *prev = NULL;
ul vpaddr = vtop ((ul)victim), vslice = ptos (vpaddr, conf->cache_slices);
ul vcacheset = (vpaddr >> LINE_BITS) & (cache_sets - 1);
ul cacheset = 0, paddr = 0, slice = 0;
if (vpaddr == 0xffffffffffffffff)
{
printf("[!] Page map not supported. Can't verify set.\n");
return 1;
}
while (tmp)
{
paddr = vtop ((ul)tmp);
slice = ptos (paddr, conf->cache_slices);
cacheset = (paddr >> LINE_BITS) & (cache_sets - 1);
if (vcacheset == cacheset && ((vslice == slice) || (conf->flags & FLAG_IGNORESLICE)))
{
if (n > 0)
{
if (prev == NULL)
{
*ptr = prev = tmp;
tmp->prev = NULL;
}
else
{
prev->next = tmp;
tmp->prev = prev;
prev = tmp;
}
n--;
}
}
else if (m > 0)
{
if (prev == NULL)
{
*ptr = prev = tmp;
tmp->prev = NULL;
}
else
{
prev->next = tmp;
tmp->prev = prev;
prev = tmp;
}
m--;
}
tmp = tmp->next;
}
if (prev)
{
prev->next = NULL;
}
else
{
*ptr = NULL;
return 1;
}
return 0;
}
<|start_filename|>algorithms.c<|end_filename|>
#include "algorithms.h"
#include "list_utils.h"
#include "utils.h"
#include "public_structs.h"
#include <math.h>
#include <stdio.h>
#define MAX_REPS_BACK 100
extern struct config conf;
int
naive_eviction(Elem **ptr, Elem **can, char *victim)
{
Elem *candidate = NULL;
int len = 0, cans = 0, i = 0, fail = 0, ret = 0, repeat = 0;
len = list_length (*ptr);
cans = list_length (*can);
while (len > conf.cache_way)
{
if (conf.ratio > 0.0)
{
ret = tests (*ptr, victim, conf.rounds, conf.threshold, conf.ratio, conf.traverse);
}
else
{
ret = tests_avg (*ptr, victim, conf.rounds, conf.threshold, conf.traverse);
}
if (ret)
{
candidate = list_pop (ptr);
list_push (can, candidate);
fail = 0;
}
else if (!cans)
{
break;
}
else
{
// candidate is part of the eviction set, put it back at the end
candidate = list_pop (can);
list_append (ptr, candidate);
if (fail)
{
// step back in decision binary tree by readding previous candidate
if (!(conf.flags & FLAG_BACKTRACKING) || repeat > MAX_REPS_BACK)
{
break;
}
repeat++;
if (conf.flags & FLAG_VERBOSE)
{
printf("\tbacktrack one step\n");
}
}
fail = 1;
}
len = list_length (*ptr);
cans = list_length (*can);
if ((conf.flags & FLAG_VERBOSE) && !(i++ % 300))
{
printf("\teset=%d, removed=%d (%d)\n", len, cans, len+cans);
}
}
if (conf.flags & FLAG_VERBOSE)
{
printf("\teset=%d, removed=%d (%d)\n", len, cans, len+cans);
}
if (conf.ratio > 0.0)
{
ret = tests (*ptr, victim, conf.rounds, conf.threshold, conf.ratio, conf.traverse);
}
else
{
ret = tests_avg (*ptr, victim, conf.rounds, conf.threshold, conf.traverse);
}
if (ret)
{
// Not fully reduced (exceed backtrack steps)
if (len > conf.cache_way)
{
return 1;
}
}
else
{
return 1;
}
return 0;
}
int
naive_eviction_optimistic(Elem **ptr, Elem **can, char *victim)
{
Elem *candidate = NULL, *es = NULL;
int len = 0, cans = 0, elen = 0, i = 0, ret = 0;
len = list_length (*ptr);
while (elen < conf.cache_way && len > conf.cache_way)
{
candidate = list_pop (ptr);
if (conf.ratio > 0.0)
{
ret = tests (*ptr, victim, conf.rounds, conf.threshold, conf.ratio, conf.traverse);
}
else
{
ret = tests_avg (*ptr, victim, conf.rounds, conf.threshold, conf.traverse);
}
if (ret)
{
// list still is an eviction set of victim
// discard candidate
list_push (can, candidate);
}
else
{
// candidate is congruent, keep it
elen++;
if (!es)
{
// pointer to eviction set sublist
es = candidate;
}
list_append (ptr, candidate);
}
len = list_length (*ptr);
cans = list_length (*can);
if ((conf.flags & FLAG_VERBOSE) && !(i++ % 300))
{
printf("\teset=%d, removed=%d (%d)\n", len, cans, len+cans);
}
}
if (conf.flags & FLAG_VERBOSE)
{
printf("\teset=%d, removed=%d (%d)\n", len, cans, len+cans);
}
list_concat (can, *ptr);
if (elen < conf.cache_way)
{
*ptr = NULL;
return 1;
}
else
{
es->prev->next = NULL;
es->prev = NULL;
*ptr = es;
}
if (conf.ratio > 0.0)
{
ret = tests (*ptr, victim, conf.rounds, conf.threshold, conf.ratio, conf.traverse);
}
else
{
ret = tests_avg (*ptr, victim, conf.rounds, conf.threshold, conf.traverse);
}
return !ret;
}
int
gt_eviction(Elem **ptr, Elem **can, char *victim)
{
// Random chunk selection
Elem **chunks = (Elem**) calloc (conf.cache_way + 1, sizeof (Elem*));
if (!chunks)
{
return 1;
}
int *ichunks = (int*) calloc (conf.cache_way + 1, sizeof (int)), i;
if (!ichunks)
{
free (chunks);
return 1;
}
int len = list_length (*ptr), cans = 0;
// Calculate length: h = log(a/(a+1), a/n)
double sz = (double)conf.cache_way / len;
double rate = (double)conf.cache_way / (conf.cache_way + 1);
int h = ceil(log(sz) / log(rate)), l = 0;
// Backtrack record
Elem **back = (Elem**) calloc (h * 2, sizeof (Elem*)); // TODO: check height bound
if (!back)
{
free (chunks);
free (ichunks);
return 1;
}
int repeat = 0;
do {
for (i=0; i < conf.cache_way + 1; i++)
{
ichunks[i] = i;
}
shuffle (ichunks, conf.cache_way + 1);
// Reduce
while (len > conf.cache_way)
{
list_split (*ptr, chunks, conf.cache_way + 1);
int n = 0, ret = 0;
// Try paths
do
{
list_from_chunks (ptr, chunks, ichunks[n], conf.cache_way + 1);
n = n + 1;
if (conf.ratio > 0.0)
{
ret = tests (*ptr, victim, conf.rounds, conf.threshold, conf.ratio, conf.traverse);
}
else
{
ret = tests_avg (*ptr, victim, conf.rounds, conf.threshold, conf.traverse);
}
}
while (!ret && (n < conf.cache_way + 1));
// If find smaller eviction set remove chunk
if (ret && n <= conf.cache_way)
{
back[l] = chunks[ichunks[n-1]]; // store ptr to discarded chunk
cans += list_length (back[l]); // add length of removed chunk
len = list_length (*ptr);
if (conf.flags & FLAG_VERBOSE)
{
printf("\tlvl=%d: eset=%d, removed=%d (%d)\n", l, len, cans, len+cans);
}
l = l + 1; // go to next lvl
}
// Else, re-add last removed chunk and try again
else if (l > 0)
{
list_concat (ptr, chunks[ichunks[n-1]]); // recover last case
l = l - 1;
cans -= list_length (back[l]);
list_concat (ptr, back[l]);
back[l] = NULL;
len = list_length (*ptr);
goto mycont;
}
else
{
list_concat (ptr, chunks[ichunks[n-1]]); // recover last case
break;
}
}
break;
mycont:
if (conf.flags & FLAG_VERBOSE)
{
printf("\tbacktracking step\n");
}
} while (l > 0 && repeat++ < MAX_REPS_BACK && (conf.flags & FLAG_BACKTRACKING));
// recover discarded elements
for (i = 0; i < h * 2; i++)
{
list_concat (can, back[i]);
}
free (chunks);
free (ichunks);
free (back);
int ret = 0;
if (conf.ratio > 0.0)
{
ret = tests (*ptr, victim, conf.rounds, conf.threshold, conf.ratio, conf.traverse);
}
else
{
ret = tests_avg (*ptr, victim, conf.rounds, conf.threshold, conf.traverse);
}
if (ret)
{
if (len > conf.cache_way)
{
return 1;
}
}
else
{
return 1;
}
return 0;
}
int
gt_eviction_any(Elem **ptr, Elem **can)
{
Elem **chunks = (Elem**) calloc (conf.cache_way + 2, sizeof (Elem*));
if (!chunks)
{
return 1;
}
// Random chunk selection
int *ichunks = (int*) calloc (conf.cache_way + 2, sizeof (int)), i;
if (!ichunks)
{
free (chunks);
return 1;
}
int len = list_length (*ptr), cans = 0;
// Calculate length: h = log(a/(a+1), a/n)
double sz = (double)(conf.cache_way + 1) / len;
double rate = (double)(conf.cache_way + 1) / (conf.cache_way + 2);
int h = ceil(log(sz) / log(rate)), l = 0;
// Backtrack record
Elem **back = calloc (h * 2, sizeof (Elem*)); // TODO: check height bound
if (!back)
{
free (chunks);
free (ichunks);
return 1;
}
int repeat = 0;
do {
for (i=0; i < conf.cache_way + 2; i++)
{
ichunks[i] = i;
}
shuffle (ichunks, conf.cache_way + 2);
while (len > conf.cache_way + 1)
{
list_split (*ptr, chunks, conf.cache_way + 2);
int n = 0, ret = 0;
do
{
list_from_chunks (ptr, chunks, ichunks[n], conf.cache_way + 2);
n = n + 1;
}
while (!(ret = (test_and_time (*ptr, conf.rounds, conf.threshold, conf.cache_way, conf.traverse))) && (n < conf.cache_way + 2));
// If find smaller eviction set remove chunk
if (ret && n <= conf.cache_way + 1)
{
back[l] = chunks[ichunks[n-1]]; // store ptr to discarded chunk
cans += list_length (back[l]); // add length of removed chunk
len = list_length (*ptr);
if (conf.flags & FLAG_VERBOSE)
{
printf("\tlvl=%d: eset=%d, removed=%d (%d)\n", l, len, cans, len+cans);
}
l = l + 1; // go to next lvl
}
// Else, re-add last removed chunk and try again
else if (l > 0)
{
list_concat (ptr, chunks[ichunks[n-1]]); // recover last case
l = l - 1;
cans -= list_length (back[l]);
list_concat (ptr, back[l]);
back[l] = NULL;
len = list_length (*ptr);
goto mycont;
}
else
{
break;
}
}
break;
mycont:
if (conf.flags & FLAG_VERBOSE)
{
printf("\tbacktracking step\n");
}
}
while (l > 0 && repeat++ < MAX_REPS_BACK && (conf.flags & FLAG_BACKTRACKING));
// recover discarded elements
for (i = 0; i < h * 2; i++)
{
list_concat (can, back[i]);
}
free (chunks);
free (ichunks);
free (back);
if (test_and_time (*ptr, conf.rounds, conf.threshold, conf.cache_way, conf.traverse))
{
if (len > conf.cache_way + 1)
{
return 1;
}
}
else
{
return 1;
}
return 0;
}
int
binary_eviction(Elem **ptr, Elem **can, char *victim)
{
// shameful inneficient implementation with lists...
// any good way to add backtracking?
int olen = list_length (*ptr), len, cans, count = 0, i = 0, ret = 0;
double x = 0, pivot = 0, laste = 0, lastn = 0;
Elem *positive = NULL;
while (count < conf.cache_way)
{
x = 1;
laste = (double)olen;
lastn = 0;
pivot = 0;
i = 1;
while (fabs(lastn - laste) > 1 && x < olen)
{
i = i << 1;
pivot = ceil (x * (olen - conf.cache_way + 1) / i);
*can = list_slice (ptr, conf.cache_way - 2 + (unsigned int)pivot + 1, olen - 1);
len = list_length (*ptr);
cans = list_length (*can);
if (conf.ratio > 0.0)
{
ret = tests (*ptr, victim, conf.rounds, conf.threshold, conf.ratio, conf.traverse);
}
else
{
ret = tests_avg (*ptr, victim, conf.rounds, conf.threshold, conf.traverse);
}
if (ret)
{
laste = pivot;
x = 2 * x - 1;
}
else
{
lastn = pivot;
x = 2 * x + 1;
}
if (conf.flags & FLAG_VERBOSE)
{
printf("\telem==%d eset=%d res=%d (%d)\n", count, len, cans, len+cans);
}
list_concat (ptr, *can);
*can = NULL;
}
if (pivot + conf.cache_way > olen)
{
printf("[-] Something wrong, quitting\n");
return 1;
}
positive = list_get (ptr, conf.cache_way - 2 + (unsigned int)laste);
list_push (ptr, positive); // re-arrange list for next round (element to head)
count = count + 1;
}
*can = list_slice (ptr, conf.cache_way, len+cans-1);
if (conf.ratio > 0.0)
{
ret = tests (*ptr, victim, conf.rounds, conf.threshold, conf.ratio, conf.traverse);
}
else
{
ret = tests_avg (*ptr, victim, conf.rounds, conf.threshold, conf.traverse);
}
return !ret;
}
<|start_filename|>browser/index.html<|end_filename|>
<!doctype html>
<html>
<head>
<title>Cache Eviction Sets</title>
<script src="/main.js"></script>
<script>
function log(...s){
output.innerText += s + '\n';
}
function clearLog(){
output.innerText = '';
}
function getConf() {
let f = document.getElementById('config');
let b = parseInt(f.b.value), offset = parseInt(f.offset.value), assoc = parseInt(f.assoc.value), stride = parseInt(f.stride.value), conflict = (f.conflict.value === "yes");
return {
B : isNaN(b) ? 6000 : b,
ASSOC : isNaN(assoc) ? 16 : assoc,
OFFSET : isNaN(offset) ? 63 : offset,
STRIDE : isNaN(stride) ? 4069 : stride,
CONFLICT : conflict,
};
}
</script>
</head>
<body>
<h2>JS/Wasm implementation of group-testing reduction for finding minimal eviction sets</h2>
<p>We implement the threshold group testing based reduction in JS and Wasm for efficiently finding eviction sets of minimal size. For more details see our paper: <a href="https://vwzq.net/papers/evictionsets18.pdf">"Theory and Practice of Finding Eviction Sets</a> (published in SP'2019).</p>
<p>Tested on <i>Chrome 74.0.3729.75 with V8 7.4</i> with <code>--allow-natives-syntax --experimental-wasm-bigint</code> flags. Natives syntax is only required for validating of resulting JS offsets. Wasm BigInt support will be default soon.</p>
<p>
Source code:
<ul>
<li>Source code for validation of JS evictions sets will be published soon</li>
<li>Command line tool in C: <a href="https://github.com/cgvwzq/evsets">https://github.com/cgvwzq/evsets</a>
<li><a href="https://vwzq.net/slides/2019-rootedcon_extended.pdf">Slides</a> with some details about Wasm implementation</li>
</ul>
</p>
<p><i>(pepe vila on 2019/04/17)</i></p>
<hr>
<form id="config" action="#" onsubmit="start(getConf());return false">
<label>Blocks: </label><input type="number" name="b" value="6000" min="0"><br>
<label>Associativity: </label><input type="number" name="assoc" value="16" min="4"><br>
<label>Offset: </label><input type="number" name="offset" value="63" min="0" max="64"><br>
<label>Stride: </label><input type="number" name="stride" value="4096" min="64" step="64"><br>
<p><label>Find all: </label>
<input type="radio" name="conflict" value="yes">Yes</input>
<input type="radio" name="conflict" checked value="no">No</input>
</p>
<p>
<input type="submit" value="Start">
<input type="button" onclick="clearLog();" value="Clear">
</p>
</form>
<hr>
<h3>log:</h3>
<code id="output" style="display:block; width:80%; word-wrap:break-word;"></code>
</body>
</html>
<|start_filename|>browser/wasmWorker.js<|end_filename|>
self.onmessage = function(evt) {
const {module, memory, cb} = evt.data;
const instance = new WebAssembly.Instance(module, {env: {mem: memory}});
if (cb) {
let fn = new Function('instance', 'mem', cb);
fn(instance, memory);
}
}
<|start_filename|>utils.h<|end_filename|>
#ifndef utils_H
#define utils_H
#include <stdlib.h>
void shuffle(int *array, size_t n);
#endif /* utils_H */
<|start_filename|>evsets_api.c<|end_filename|>
#include <stdio.h>
#include <time.h>
#include <stdlib.h>
#include <sys/mman.h>
#include <sys/stat.h>
#include <sys/types.h>
#include <unistd.h>
#include <stdint.h>
#include <stdlib.h>
#include <fcntl.h>
#include <getopt.h>
#include <string.h>
#ifdef __MACH__
#include <mach/vm_statistics.h>
#endif
#include "evsets_api.h"
#include "list_utils.h"
#include "hist_utils.h"
#include "utils.h"
#include "cache.h"
#include "micro.h"
#include "algorithms.h"
#define MAX_REPS 50
struct config conf;
static Elem **evsets = NULL;
static int num_evsets = 0;
static int colors = 0;
static char *probe = NULL;
static char *pool = NULL;
static ul pool_sz = 0;
static ul sz = 0;
int
init_evsets(struct config *conf_ptr)
{
// save config
memcpy(&conf, conf_ptr, sizeof(struct config));
#ifdef THREAD_COUNTER
if (create_counter ())
{
return 1;
}
#endif /* THREAD_COUNTER */
sz = conf.buffer_size * conf.stride;
pool_sz = 128 << 20;
if (sz > pool_sz)
{
printf("[!] Error: not enough space\n");
return 1;
}
if (conf.flags & FLAG_NOHUGEPAGES)
{
pool = (char*) mmap (NULL, pool_sz, PROT_READ|PROT_WRITE,
MAP_PRIVATE|MAP_ANONYMOUS, 0, 0);
probe = (char*) mmap (NULL, pool_sz, PROT_READ|PROT_WRITE,
MAP_PRIVATE|MAP_ANONYMOUS, 0, 0); // add MAP_HUGETLB for testing effect of cache sets
}
else
{
#ifdef MAP_HUGETLB
pool = (char*) mmap (NULL, pool_sz, PROT_READ|PROT_WRITE,
MAP_PRIVATE|MAP_ANONYMOUS|MAP_HUGETLB, 0, 0);
probe = (char*) mmap (NULL, pool_sz, PROT_READ|PROT_WRITE,
MAP_PRIVATE|MAP_ANONYMOUS|MAP_HUGETLB, 0, 0);
#elif defined VM_FLAGS_SUPERPAGE_SIZE_2MB
/* Mac OS X specific: the file descriptor used for creating MAP_ANON
regions can be used to pass some Mach VM flags */
pool = (char*) mmap (NULL, pool_sz, PROT_READ|PROT_WRITE,
MAP_PRIVATE|MAP_ANON, VM_FLAGS_SUPERPAGE_SIZE_2MB, 0);
probe = (char*) mmap (NULL, pool_sz, PROT_READ|PROT_WRITE,
MAP_PRIVATE|MAP_ANON, VM_FLAGS_SUPERPAGE_SIZE_2MB, 0);
#endif
}
if (pool == MAP_FAILED || probe == MAP_FAILED)
{
printf("[!] Error: allocation\n");
return 1;
}
printf ("[+] %llu MB buffer allocated at %p (%llu blocks)\n",
sz >> 20, (void*)&pool[conf.offset << 6], sz/sizeof(Elem));
if (conf.stride < 64 || conf.stride % 64 != 0)
{
printf("[!] Error: invalid stride\n");
goto err;
}
// Set eviction strategy
switch (conf.strategy)
{
case STRATEGY_HASWELL:
conf.traverse = &traverse_list_haswell;
break;
case STRATEGY_SKYLAKE:
conf.traverse = &traverse_list_skylake;
break;
case STRATEGY_ASMSKY:
conf.traverse = &traverse_list_asm_skylake;
break;
case STRATEGY_ASMHAS:
conf.traverse = &traverse_list_asm_haswell;
break;
case STRATEGY_ASM:
conf.traverse = &traverse_list_asm_simple;
break;
case STRATEGY_RRIP:
conf.traverse = &traverse_list_rrip;
break;
case STRATEGY_SIMPLE:
default:
conf.traverse = &traverse_list_simple;
break;
}
colors = conf.cache_size / conf.cache_way / conf.stride;
evsets = calloc (colors, sizeof(Elem*));
if (!evsets)
{
printf("[!] Error: allocate\n");
goto err;
}
return 0;
err:
munmap (probe, pool_sz);
munmap (pool, pool_sz);
#ifdef THREAD_COUNTER
destroy_counter ();
#endif /* THREAD_COUNTER */
return 1;
}
void
close_evsets()
{
free (evsets);
munmap (probe, pool_sz);
munmap (pool, pool_sz);
#ifdef THREAD_COUNTER
destroy_counter ();
#endif /* THREAD_COUNTER */
}
int get_num_evsets() {
return num_evsets;
}
Elem* get_evset(int id) {
if (id >= num_evsets) {
return NULL;
}
return evsets[id];
}
int
find_evsets()
{
char *victim = NULL;
Elem *ptr = NULL;
Elem *can = NULL;
victim = &probe[conf.offset << 6];
*victim = 0; // touch line
int seed = time (NULL);
srand (seed);
if (conf.flags & FLAG_CALIBRATE)
{
conf.threshold = calibrate (victim, &conf);
printf("[+] Calibrated Threshold = %d\n", conf.threshold);
}
else
{
printf("[+] Default Threshold = %d\n", conf.threshold);
}
if (conf.threshold < 0)
{
printf("[!] Error: calibration\n");
return 1;
}
if (conf.algorithm == ALGORITHM_LINEAR)
{
victim = NULL;
}
clock_t tts, tte;
int rep = 0;
tts = clock();
pick:
ptr = (Elem*)&pool[conf.offset << 6];
initialize_list (ptr, pool_sz, conf.offset);
// Conflict set incompatible with ANY case (don't needed)
if ((conf.flags & FLAG_CONFLICTSET) && (conf.algorithm != ALGORITHM_LINEAR))
{
pick_n_random_from_list (ptr, conf.stride, pool_sz, conf.offset, conf.buffer_size);
generate_conflict_set (&ptr, &can);
printf ("[+] Compute conflict set: %d\n", list_length (can));
victim = (char*)ptr;
ptr = can; // new conflict set
while (victim && !tests (ptr, victim, conf.rounds, conf.threshold, conf.ratio, conf.traverse))
{
victim = (char*)(((Elem*)victim)->next);
}
can = NULL;
}
else
{
pick_n_random_from_list (ptr, conf.stride, pool_sz, conf.offset, conf.buffer_size);
if (list_length (ptr) != conf.buffer_size)
{
printf("[!] Error: broken list\n");
return 1;
}
}
int ret = 0;
if (conf.flags & FLAG_DEBUG)
{
conf.flags |= FLAG_VERIFY;
conf.flags &= ~(FLAG_FINDALLCOLORS | FLAG_FINDALLCONGRUENT);
printf ("[+] Filter: %d congruent, %d non-congruent addresses\n", conf.con, conf.noncon);
ret = filter (&ptr, victim, conf.con, conf.noncon, &conf);
if (ret && (conf.flags & FLAG_RETRY))
{
return 1;
}
}
if (conf.algorithm == ALGORITHM_LINEAR)
{
ret = test_and_time (ptr, conf.rounds, conf.threshold, conf.cache_way, conf.traverse);
}
else if (victim)
{
if (conf.ratio > 0.0)
{
ret = tests (ptr, victim, conf.rounds, conf.threshold, conf.ratio, conf.traverse);
}
else
{
ret = tests_avg (ptr, victim, conf.rounds, conf.threshold, conf.traverse);
}
}
if ((victim || conf.algorithm == ALGORITHM_LINEAR) && ret)
{
printf("[+] Initial candidate set evicted victim\n");
// rep = 0;
}
else
{
printf ("[!] Error: invalid candidate set\n");
if ((conf.flags & FLAG_RETRY) && rep < MAX_REPS)
{
rep++;
goto pick;
}
else if (rep >= MAX_REPS)
{
printf ("[!] Error: exceeded max repetitions\n");
}
if (conf.flags & FLAG_VERIFY)
{
recheck (ptr, victim, true, &conf);
}
return 1;
}
clock_t ts, te;
int len = 0;
int id = num_evsets;
// Iterate over all colors of conf.offset
do
{
printf ("[+] Created linked list structure (%d elements)\n",
list_length (ptr));
// Search
switch (conf.algorithm)
{
case ALGORITHM_NAIVE:
printf("[+] Starting naive reduction...\n");
ts = clock();
ret = naive_eviction (&ptr, &can, victim);
te = clock();
break;
case ALGORITHM_NAIVE_OPTIMISTIC:
printf("[+] Starting optimistic naive reduction...\n");
ts = clock();
ret = naive_eviction_optimistic (&ptr, &can, victim);
te = clock();
break;
case ALGORITHM_GROUP:
printf("[+] Starting group reduction...\n");
ts = clock();
ret = gt_eviction (&ptr, &can, victim);
te = clock();
break;
case ALGORITHM_BINARY:
printf("[+] Starting binary group reduction...\n");
ts = clock();
ret = binary_eviction (&ptr, &can, victim);
te = clock();
break;
case ALGORITHM_LINEAR:
printf("[+] Starting linear reduction...\n");
ts = clock();
ret = gt_eviction_any (&ptr, &can);
te = clock();
break;
}
tte = clock();
len = list_length (ptr);
if (ret)
{
printf("[!] Error: optimal eviction set not found (length=%d)\n", len);
}
else
{
printf("[+] Reduction time: %f seconds\n", ((double)(te-ts))/CLOCKS_PER_SEC);
printf("[+] Total execution time: %f seconds\n", ((double)(tte-tts))/CLOCKS_PER_SEC);
// Re-Check that it's an optimal eviction set
if (conf.algorithm != ALGORITHM_LINEAR)
{
printf("[+] (ID=%d) Found minimal eviction set for %p (length=%d): ",
id, (void*)victim, len);
print_list (ptr);
}
else
{
printf("[+] (ID=%d) Found a minimal eviction set (length=%d): ", id, len);
print_list (ptr);
}
evsets[id] = ptr;
num_evsets += 1;
}
if (conf.flags & FLAG_VERIFY)
{
recheck(ptr, victim, ret, &conf);
}
if (ret && (conf.flags & FLAG_RETRY))
{
if (rep < MAX_REPS)
{
list_concat (&ptr, can);
can = NULL;
rep++;
if (!(conf.flags & FLAG_CONFLICTSET) && !(conf.flags & FLAG_FINDALLCOLORS))
{
// select a new initial set
printf ("[!] Error: repeat, pick a new set\n");
goto pick;
}
else
{
// reshuffle list or change victim?
printf ("[!] Error: try new victim\n");
goto next;
// continue;
}
}
else
{
printf ("[!] Error: exceeded max repetitions\n");
}
}
else if (!ret)
{
rep = 0;
}
else
{
list_concat (&ptr, can);
can = NULL;
}
// Remove rest of congruent elements
list_set_id (evsets[id], id);
ptr = can;
if (conf.flags & FLAG_FINDALLCONGRUENT)
{
Elem *e = NULL, *head = NULL, *done = NULL, *tmp = NULL;
int count = 0, t = 0;
while (ptr)
{
e = list_pop(&ptr);
if (conf.ratio > 0.0)
{
t = tests (evsets[id], (char*)e, conf.rounds, conf.threshold, conf.ratio, conf.traverse);
}
else
{
t = tests_avg (evsets[id], (char*)e, conf.rounds, conf.threshold, conf.traverse);
}
if (t)
{
// create list of congruents
e->set = id;
count++;
list_push (&head, e);
}
else
{
list_push (&done, e);
}
}
if (tmp)
{
tmp->next = NULL;
}
printf ("[+] Found %d more congruent elements from set id=%d\n", count, id);
list_concat (&evsets[id], head);
ptr = done;
}
if (!(conf.flags & FLAG_FINDALLCOLORS))
{
break;
}
printf ("----------------------\n");
id = id + 1;
if (id == colors || !ptr ||
((conf.flags & FLAG_CONFLICTSET) && !victim) ||
(!(conf.flags & FLAG_CONFLICTSET) && victim >= probe + pool_sz - conf.stride))
{
printf ("[+] Found all eviction sets in buffer\n");
break;
}
next:
// Find victim for different color. Only for specific algorithms.
if (conf.algorithm != ALGORITHM_LINEAR)
{
int s = 0, ret = 0, ret2 = 0;
do
{
if (!(conf.flags & FLAG_CONFLICTSET))
{
victim += conf.stride;
*victim = 0;
}
else
{
victim = (char*)((Elem*)victim)->next;
}
// Check again. Better reorganize this mess.
if (((conf.flags & FLAG_CONFLICTSET) && !victim) ||
(!(conf.flags & FLAG_CONFLICTSET) && victim >= probe + pool_sz - conf.stride))
{
break;
}
// New victim is not evicted by previous eviction sets
for (ret = 0, s = 0; s < id && !ret; s++)
{
if (conf.ratio > 0.0)
{
ret = tests (evsets[s], victim, conf.rounds, conf.threshold, conf.ratio, conf.traverse);
}
else
{
ret = tests_avg (evsets[s], victim, conf.rounds, conf.threshold, conf.traverse);
}
}
if (!ret)
{
// Rest of initial eviction set can evict victim
if (conf.ratio > 0.0)
{
ret2 = tests (ptr, victim, conf.rounds, conf.threshold, conf.ratio, conf.traverse);
}
else
{
ret2 = tests_avg (ptr, victim, conf.rounds, conf.threshold, conf.traverse);
}
}
}
while ((list_length (ptr) > conf.cache_way) && !ret2 && (((conf.flags & FLAG_CONFLICTSET) && victim) ||
(!(conf.flags & FLAG_CONFLICTSET) && (victim < (probe + pool_sz - conf.stride)))));
if (ret2)
{
printf ("[+] Found new victim %p\n", (void*)victim);
}
else
{
printf ("[!] Error: couldn't find more victims\n");
return 1;
}
}
can = NULL;
}
while (((conf.flags & FLAG_FINDALLCOLORS) && id < colors) || ((conf.flags & FLAG_RETRY) && rep < MAX_REPS));
return ret;
}
<|start_filename|>browser/finder.js<|end_filename|>
self.importScripts('/utils.js', '/evset.js');
// Send log to main thread
function log(...args) {
self.postMessage({type: 'log', str: args});
}
// Constants
const P = 4096;
const VERBOSE = false;
const NOLOG = false;
const THRESHOLD = 60;
const RESULTS = [];
// global vars to refactor
var first, next, n;
self.onmessage = async function start(evt) {
// Parse settings
let {B, CONFLICT, OFFSET, ASSOC, STRIDE} = evt.data.conf;
// Prepare wasm instance
const {module, memory} = evt.data;
const instance = new WebAssembly.Instance(module, {env: {mem: memory}});
// Memory view
const view = new DataView(memory.buffer);
if (!NOLOG) log('Prepare new evset');
const evset = new EvSet(view, B, P*2, P, ASSOC, STRIDE, OFFSET);
first = true, next = CONFLICT;
n = 0;
const RETRY = 10;
await new Promise(r => setTimeout(r, 10)); // timeout to allow counter
do {
let r = 0;
while (!cb(instance, evset, CONFLICT) && ++r < RETRY && evset.victim) {
if (VERBOSE) log('retry');
first = false;
}
if (r < RETRY) {
RESULTS.push(evset.refs); // save eviction set
evset.refs = evset.del.slice();
evset.del = [];
evset.relink(); // from new refs
next = CONFLICT;
if (VERBOSE) log('Find next (', evset.refs.length, ')');
}
else
{
next = CONFLICT;
}
} while (CONFLICT && evset.vics.length > 0 && evset.refs.length > ASSOC);
log('Found ' + RESULTS.length + ' different eviction sets');
log('EOF');
postMessage({type:'eof'});
}
function cb(instance, evset, findall) {
let {wasm_hit, wasm_miss} = instance.exports;
const REP = 6;
const T = 1000;
const CLOCK = 256; // hardcoded offset in wasm
const VICTIM = evset.victim|0;
const PTR = evset.ptr|0;
function runCalibration(title, hit, miss, warm) {
for (let i=0; i<T; i++) {
hit(VICTIM);
miss(VICTIM, 0);
}
if (!warm) {
// real run
let t_hit = hit(VICTIM);
let t_miss = miss(VICTIM, PTR);
// output
if (VERBOSE) log ('--- ' + title + ' ---');
if (VERBOSE) log ('Hit:\t' + (Array.isArray(t_hit) ? stats(t_hit) : t_hit));
if (VERBOSE) log ('Miss:\t' + (Array.isArray(t_miss) ? stats(t_miss) : t_miss));
if (VERBOSE) log ('-----------');
// calc threshold
if (Array.isArray(t_hit)) {
t_hit = stats(t_hit).median;
}
if (Array.isArray(t_miss)) {
t_miss = stats(t_miss).median;
}
if (t_hit > t_miss) {
return 0;
} else {
return ((Number(t_miss) + Number(t_hit) * 2) / 3);
}
}
}
const wasmMeasureOpt = {
hit : function hit(vic) {
let t, total = [];
for (let i=0; i<REP; i++) {
t = wasm_hit(vic);
total.push(Number(t));
}
return total;
},
miss : function miss(vic, ptr) {
let t, total = [];
for (let i=0; i<REP; i++) {
t = wasm_miss(vic, ptr);
total.push(Number(t));
}
return total;
}
}
if (first) {
runCalibration('Wasm measure opt', wasmMeasureOpt.hit, wasmMeasureOpt.miss, true);
if (!THRESHOLD) {
log('Error: calibrating');
return false;
}
log('Calibrated threshold: ' + THRESHOLD);
if (findall) {
log('Creating conflict set...');
evset.genConflictSet(wasmMeasureOpt.miss, THRESHOLD);
log('Done: ' + evset.refs.length);
first = false;
}
}
if (next) {
let t;
do {
evset.victim = evset.vics.pop();
if (VERBOSE) log('\ttry victim', evset.victim);
let e = 0;
while (evset.victim && e < RESULTS.length) {
if (median(wasmMeasureOpt.miss(evset.victim, RESULTS[e][0])) >= THRESHOLD) {
if (VERBOSE) log('\tanother, this belongs to a previous eviction set');
evset.victim = evset.vics.pop();
}
e += 1;
}
t = median(wasmMeasureOpt.miss(evset.victim, evset.ptr));
} while (evset.victim && t < THRESHOLD);
if (!evset.victim) {
if (VERBOSE) log('No more victims');
return false;
}
next = false;
}
if (VERBOSE) log ('Starting reduction...');
evset.groupReduction(wasmMeasureOpt.miss, THRESHOLD);
if (evset.refs.length === evset.assoc) {
if (!NOLOG) log('Victim addr: ' + evset.victim);
if (!NOLOG) log('Eviction set: ' + evset.refs);
evset.del = evset.del.flat();
return true;
} else {
while (evset.del.length > 0) {
evset.relinkChunk();
}
if (VERBOSE) log('Failed: ' + evset.refs.length);
return false;
}
}
<|start_filename|>browser/virt_to_phys.c<|end_filename|>
/* from https://github.com/cgvwzq/evsets/blob/master/micro.c */
#include <stdio.h>
#include <fcntl.h>
#include <unistd.h>
#include <stdlib.h>
#include <string.h>
#define PAGE_BITS 12
#define LINE_BITS 6
#define SLICE_BITS 3
#define SET_BITS 10
#define PAGE_SIZE2 (1 << PAGE_BITS)
#define LINE_SIZE (1 << LINE_BITS)
#define CACHE_SLICES (1 << SLICE_BITS)
#define CACHE_SETS (1 << SET_BITS)
unsigned long long
vtop(unsigned pid, unsigned long long vaddr)
{
char path[1024];
sprintf (path, "/proc/%u/pagemap", pid);
int fd = open (path, O_RDONLY);
if (fd < 0)
{
return -1;
}
unsigned long long paddr = -1;
unsigned long long index = (vaddr / PAGE_SIZE2) * sizeof(paddr);
if (pread (fd, &paddr, sizeof(paddr), index) != sizeof(paddr))
{
return -1;
}
close (fd);
paddr &= 0x7fffffffffffff;
return (paddr << PAGE_BITS) | (vaddr & (PAGE_SIZE2-1));
}
unsigned int
count_bits(unsigned long long n)
{
unsigned int count = 0;
while (n)
{
n &= (n-1) ;
count++;
}
return count;
}
unsigned int
nbits(unsigned long long n)
{
unsigned int ret = 0;
n = n >> 1;
while (n > 0)
{
n >>= 1;
ret++;
}
return ret;
}
unsigned long long
ptos(unsigned long long paddr, unsigned long long bits)
{
unsigned long long ret = 0;
unsigned long long mask[3] = {0x1b5f575440ULL, 0x2eb5faa880ULL, 0x3cccc93100ULL}; // according to Maurice et al.
switch (bits)
{
case 3:
ret = (ret << 1) | (unsigned long long)(count_bits(mask[2] & paddr) % 2);
case 2:
ret = (ret << 1) | (unsigned long long)(count_bits(mask[1] & paddr) % 2);
case 1:
ret = (ret << 1) | (unsigned long long)(count_bits(mask[0] & paddr) % 2);
default:
break;
}
return ret;
}
void
check(unsigned int pid, unsigned long long *virtual_addresses, unsigned int length)
{
unsigned int cache_sets = 1024;
unsigned long long paddr = 0, cacheset = 0, slice = 0;
for (unsigned int i = 0; i < length; i++)
{
paddr = vtop (pid, virtual_addresses[i]);
cacheset = (paddr >> LINE_BITS) & (CACHE_SETS - 1);
slice = ptos (paddr, SLICE_BITS);
printf(" - element pfn: 0x%llx, cache set: 0x%llx, slice: 0x%llx\n", paddr, cacheset, slice);
}
}
int
main(int argc, char **argv)
{
unsigned int i = 0;
if (argc < 3)
{
printf ("[!] Use: %s pid 0x1 0x2 0x3 ...\n", argv[0]);
return 1;
}
unsigned int pid = atoi (argv[1]);
unsigned int len = argc - 2;
unsigned long long *addrs = malloc (sizeof(unsigned long long)*len);
char *eos = argv[argc-1] + strlen(argv[argc-1]);
if (!addrs)
{
printf ("[!] Err: allocate\n");
return 1;
}
for (i = 2; i < argc; i++)
{
addrs[i-2] = strtoull (argv[i], &eos, 16);
}
check (pid, addrs, len);
}
<|start_filename|>cache.h<|end_filename|>
#ifndef cache_H
#define cache_H
#include <stdlib.h>
#include <stdint.h>
#ifdef THREAD_COUNTER
#include <pthread.h>
#endif
#include "private_structs.h"
#ifdef THREAD_COUNTER
static pthread_t thread;
static struct params_t params;
void* counter_thread();
static inline uint64_t clock_thread();
int create_counter();
void destroy_counter();
#endif /* THREAD_COUNTER */
void traverse_list_skylake(Elem *ptr);
void traverse_list_haswell(Elem *ptr);
void traverse_list_simple(Elem *ptr);
void traverse_list_asm_skylake(Elem *ptr);
void traverse_list_asm_haswell(Elem *ptr);
void traverse_list_asm_simple(Elem *ptr);
void traverse_list_rrip(Elem *ptr);
void traverse_list_to_n(Elem *ptr, int n);
void traverse_list_time(Elem *ptr, void (*trav)(Elem*));
int test_set(Elem *ptr, char *victim, void (*trav)(Elem*));
int tests(Elem *ptr, char *victim, int rep, int threshold, float ratio, void (*trav)(Elem*));
int tests_avg(Elem *ptr, char *victim, int rep, int threshold, void (*trav)(Elem*));
int test_and_time(Elem *ptr, int rep, int threshold, int ways, void (*trav)(Elem*));
int calibrate(char *victim, struct config *conf);
#endif /* cache_H */
<|start_filename|>list_utils.h<|end_filename|>
#ifndef list_utils_H
#define list_utils_H
#include <stdlib.h>
#include "cache.h"
#include "micro.h"
int list_length(Elem *ptr);
Elem *list_pop(Elem **ptr);
Elem *list_shift(Elem **ptr);
void list_push(Elem **ptr, Elem *e);
void list_append(Elem **ptr, Elem *e);
void list_split(Elem *ptr, Elem **chunks, int n);
Elem *list_slice(Elem **ptr, size_t s, size_t e);
Elem *list_get(Elem **ptr, size_t n);
void list_concat(Elem **ptr, Elem *chunk);
void list_from_chunks(Elem **ptr, Elem **chunks, int avoid, int len);
void list_set_id(Elem *ptr, int id);
void print_list(Elem *ptr);
//void initialize_random_list(Elem *ptr, ul offset, ul sz, Elem *base);
void initialize_list(Elem *ptr, ul sz, ul offset);
void pick_n_random_from_list(Elem *src, ul stride, ul sz, ul offset, ul n);
void rearrange_list(Elem **ptr, ul stride, ul sz, ul offset);
void generate_conflict_set(Elem **ptr, Elem **out);
#endif /* list_utils_H */
<|start_filename|>browser/utils.js<|end_filename|>
// Statistics
function stats(data) {
return {
'min' : Math.min.apply(0, data),
'max' : Math.max.apply(0, data),
'mean' : mean(data),
'median' : median(data),
'std': std(data),
'mode' : mode(data),
'toString' : function() {
return `{min: ${this.min.toFixed(2)},\tmax: ${this.max.toFixed(2)},\tmean: ${this.mean.toFixed(2)},\tmedian: ${this.median.toFixed(2)},\tstd: ${this.std.toFixed(2)},\tmode: ${this.mode.map(e => e.toFixed(2))}}`;
}
};
}
function min(arr) {
return Math.min.apply(0, arr);
}
function mean(arr) {
return arr.reduce((a,b) => a+b) / arr.length;
}
function median(arr) {
arr.sort((a,b) => a-b);
return (arr.length % 2) ? arr[(arr.length / 2) | 0] : mean([arr[arr.length/2 - 1], arr[arr.length / 2]]);
}
function mode(arr) {
var counter = {};
var mode = [];
var max = 0;
for (var i in arr) {
if (!(arr[i] in counter)) {
counter[arr[i]] = 0;
}
counter[arr[i]]++;
if (counter[arr[i]] == max) {
mode.push(arr[i]);
} else if (counter[arr[i]] > max) {
max = counter[arr[i]];
mode = [arr[i]];
}
}
return mode;
}
function variance(arr) {
var x = mean(arr);
return arr.reduce((pre, cur) => pre + ((cur - x)**2)) / (arr.length - 1);
}
function std(arr) {
return Math.sqrt(variance(arr));
}
// Overload
Function.prototype.toSource = function() {
return this.toString().slice(this.toString().indexOf('{')+1,-1);
}
Object.defineProperty(Array.prototype, 'chunk', {
value: function(n){
let results = [];
let ceiled = this.length%n;
let k = Math.ceil(this.length/n);
let q = Math.floor(this.length/n);
let c = 0;
for (i=0; i<ceiled; i++) {
results[i] = this.slice(c, c+k);
c += k;
}
for (i; i<n; i++) {
results[i] = this.slice(c, c+q);
c += q;
}
return results;
}
});
// OptimizationStatus
function optimizationStatusToString(status) {
/* from https://github.com/v8/v8/blob/master/src/runtime/runtime.h */
let o = [];
if (status & (1<<0)) o.push('kIsFunction');
if (status & (1<<1)) o.push('kNeverOptimize');
if (status & (1<<2)) o.push('kAlwaysOptimize');
if (status & (1<<3)) o.push('kMaybeDeopted');
if (status & (1<<4)) o.push('kOptimized');
if (status & (1<<5)) o.push('kTurboFanned');
if (status & (1<<6)) o.push('kInterpreted');
if (status & (1<<7)) o.push('kMarkedForOptimization');
if (status & (1<<8)) o.push('kMarkedForConcurrentOptimization');
if (status & (1<<9)) o.push('kOptimizingConccurently');
if (status & (1<<10)) o.push('kIsExecuting');
if (status & (1<<11)) o.push('kTopmostFrameIsTurboFanned');
if (status & (1<<12)) o.push('kLiteMode');
return o.join("|");
}
// Lists
<|start_filename|>browser/main.js<|end_filename|>
async function start(config) {
const BM = 128*1024*1024; // Eviction buffer
const WP = 64*1024; // A WebAssembly page has a constant size of 64KB
const SZ = BM/WP; // 128 hardcoded value in wasm
// Shared memory
const memory = new WebAssembly.Memory({initial: SZ, maximum: SZ, shared: true});
// Clock thread
const resp = await fetch('/clock.wasm');
const bin = await resp.arrayBuffer();
const module = new WebAssembly.Module(bin);
const clock = new Worker('/wasmWorker.js');
clock.postMessage({"module": module, "memory": memory});
// Finder thread
const resp2 = await fetch('/poc.wasm');
const bin2 = await resp2.arrayBuffer();
const module2 = new WebAssembly.Module(bin2);
const finder = new Worker('/finder.js');
finder.onmessage = function handle(evt) {
let msg = evt.data;
switch (msg.type) {
case 'log':
log (...msg.str);
msg.str.map(e => %DebugPrint(e)); // used for verification
break;
case 'eof':
clock.terminate();
finder.terminate();
default:
}
};
finder.postMessage({"module": module2, "memory": memory, "conf": config});
return false;
}
<|start_filename|>algorithms.h<|end_filename|>
#ifndef algorithms_H
#define algorithms_H
#include "cache.h"
int naive_eviction(Elem **ptr, Elem **can, char *victim);
int naive_eviction_optimistic(Elem **ptr, Elem **can, char *victim);
int gt_eviction(Elem **ptr, Elem **can, char *victim);
int gt_eviction_any(Elem **ptr, Elem **can);
int binary_eviction(Elem **ptr, Elem **can, char *victim);
#endif /* algorithms_H */
<|start_filename|>browser/evset.js<|end_filename|>
function EvSet(view, nblocks, start=8192, victim=4096, assoc=16, stride=4096, offset=0) {
const RAND = true;
/* private methods */
this.genIndices = function (view, stride) {
let arr = [], j = 0;
for (let i=(stride)/4; i < (view.byteLength-this.start)/4; i += stride/4) {
arr[j++] = this.start + this.offset + i*4;
}
arr.unshift(this.start + this.offset);
return arr;
}
this.randomize = function (arr) {
for (let i = arr.length; i; i--) {
var j = Math.floor(Math.random() * i | 0) | 0;
[arr[i - 1], arr[j]] = [arr[j], arr[i - 1]];
}
return arr;
}
this.indicesToLinkedList = function (buf, indices) {
if (indices.length == 0) {
this.ptr = 0;
return;
}
let pre = this.ptr = indices[0];
for (let i=1; i<indices.length; i++) {
view.setUint32(pre, indices[i], true);
pre = indices[i];
}
view.setUint32(pre, 0, true);
}
this.init = function() {
let indx = this.genIndices(view, stride);
if (RAND) indx = this.randomize(indx);
indx.splice(nblocks, indx.length); // select nblocks elements
this.indicesToLinkedList(view, indx);
return indx;
}
/* end-of-private */
/* properties */
this.start = start;
this.offset = (offset&0x3f)<<6;
this.victim = victim+this.offset;
view.setUint32(this.victim, 0, true); // lazy alloc
this.assoc = assoc;
this.ptr = 0;
this.refs = this.init();
this.del = [];
this.vics = [];
/* end-of-properties */
/* public methods */
this.unlinkChunk = function unlinkChunk(chunk) {
let s = this.refs.indexOf(chunk[0]), f = this.refs.indexOf(chunk[chunk.length-1]);
view.setUint32(this.refs[f], 0, true);
this.refs.splice(s, chunk.length); // splice chunk indexes
if (this.refs.length === 0) { // empty list
this.ptr = 0;
} else if (s === 0) { // removing first chunk
this.ptr = this.refs[0];
} else if (s > this.refs.length-1) { // removing last chunk
view.setUint32(this.refs[this.refs.length-1], 0, true);
} else { // removing middle chunk
view.setUint32(this.refs[s-1], this.refs[s], true);
}
this.del.push(chunk); // right
}
this.relinkChunk = function relinkChunk() {
let chunk = this.del.pop(); // right
if (chunk === undefined) {
return;
}
this.ptr = chunk[0];
if (this.refs.length > 0) {
view.setUint32(chunk[chunk.length-1], this.refs[0], true);
}
if (typeof(chunk) === 'number') {
this.refs.unshift(chunk); // left
} else {
this.refs.unshift(...chunk); // left
}
}
this.groupReduction = function groupReduction(miss, threshold) {
const MAX = 20;
let i = 0, r = 0;
while (this.refs.length > this.assoc) {
let m = this.refs.chunk(this.assoc+1);
let found = false;
for (let c in m) {
this.unlinkChunk(m[c]);
let t = median(miss(this.victim, this.ptr));
if (t < threshold) {
this.relinkChunk();
} else {
found = true;
break;
}
}
if (!found) {
r += 1;
if (r < MAX) {
this.relinkChunk();
if (this.del.length === 0) break;
} else {
while (this.del.length > 0) {
this.relinkChunk();
}
break;
}
}
if (VERBOSE) if (!(i++ % 100)) print('\tremaining size: ', this.refs.length);
}
}
this.linkElement = function linkElement(e) {
if (e === undefined) return;
this.ptr = e;
if (this.refs.length > 0) {
view.setUint32(e, this.refs[0], true);
} else {
view.setUint32(e, 0, true);
}
this.refs.unshift(e); // left
}
this.relink = function () {
this.indicesToLinkedList(this.buffer, this.refs);
}
this.genConflictSet = function (miss, threshold) {
let indices = this.refs; // copy original indices
this.refs = [];
this.vics = [];
let pre = this.ptr = indices[0], i = 0, e, l = indices.length;
for (i=0; i<Math.min(l, 800); i++) {
e = indices.pop();
this.linkElement(e);
}
while (indices.length > 0) {
e = indices.pop();
view.setUint32(e, 0, true); // chrome's COW
let t = miss(e, this.ptr);
if (Array.isArray(t)) {
t = median(t);
}
if (t < threshold) {
this.linkElement(e);
} else {
this.vics.push(e);
// break;
}
}
first = true;
}
/* end-of-public */
}
<|start_filename|>hist_utils.c<|end_filename|>
#include "hist_utils.h"
#include <stdio.h>
#include <math.h>
void
hist_add(struct histogram *hist, int len, size_t val)
{
// remove outliers
int j = val;
if (j < 800) // remove outliers
{
while (hist[j % len].val > 0 && hist[j % len].val != (int)val) {
j++;
}
hist[j % len].val = val;
hist[j % len].count++;
}
}
float
hist_avg(struct histogram *hist, int len) {
float total = 0;
int i = 0, n = 0;
for (i=0; i < len; i++) {
if (hist[i].val > 0) {
total += hist[i].val * hist[i].count;
n += hist[i].count;
}
}
return (float)(total / n);
}
int
hist_mode(struct histogram *hist, int len)
{
int i, max = 0, mode = 0;
for (i=0; i < len; i++)
{
if (hist[i].count > max)
{
max = hist[i].count;
mode = hist[i].val;
}
}
return mode;
}
int
hist_min(struct histogram *hist, int len)
{
int i, min = 99999;
for (i=0; i < len; i++)
{
if (hist[i].count > 0 && hist[i].val < min)
{
min = hist[i].val;
}
}
return min;
}
int
hist_max(struct histogram *hist, int len)
{
int i, max= 0;
for (i=0; i < len; i++)
{
if (hist[i].count > 0 && hist[i].val > max)
{
max = hist[i].val;
}
}
return max;
}
double
hist_variance(struct histogram *hist, int len, int mean)
{
int i, count = 0;
double sum = 0;
for (i=0; i < len; i++)
{
if (hist[i].count > 0) {
sum += pow((double)(hist[i].val - mean), 2.0) * hist[i].count;
count += hist[i].count;
}
}
return sum / count;
}
double
hist_std(struct histogram *hist, int len, int mean)
{
return sqrt(hist_variance (hist, len, mean));
}
// count number of misses
int
hist_q(struct histogram *hist, int len, int threshold)
{
int i = 0, count = 0;
for (i=0; i < len; i++)
{
if (hist[i].count > 0 && hist[i].val > threshold)
{
count += hist[i].count;
}
}
return count;
}
void
hist_print(struct histogram *hist, int len)
{
int i = 0;
for (i=0; i < len; i++)
{
if (hist[i].count > 0)
{
printf("%d(%d) ", hist[i].val, hist[i].count);
}
}
printf("\n");
}
| cgvwzq/evsets |
<|start_filename|>Makefile<|end_filename|>
BABEL ?= ./node_modules/.bin/babel
BABEL-NODE ?= ./node_modules/.bin/babel-node
BROWSER-RUN ?= ./node_modules/.bin/browser-run
BROWSERIFY ?= ./node_modules/.bin/browserify
DOCCO ?= ./node_modules/docco/bin/docco
ESLINT ?= ./node_modules/.bin/eslint
FAUCET ?= ./node_modules/.bin/faucet
HTTPSERVE ?= ./node_modules/.bin/http-server
SED ?= sed
all: node_modules
FORCE:
node_modules: package.json package-lock.json
npm install
.PHONY: browser-check
browser-check: node_modules
$(BROWSERIFY) -t babelify tests/tests.js | $(BROWSER-RUN) -p 8022
.PHONY: serve
serve: node_modules
$(HTTPSERVE) -p 8080
.PHONY: clean
clean:
-rm -rf node_modules
.PHONY: docs
docs:
$(DOCCO) --css=stylesheets/docco.css src/pluggable.js
.PHONY: watchjs
watchjs: node_modules
$(BABEL) --out-file=./dist/pluggable.js --watch=src/pluggable.js
.PHONY: dist
dist: node_modules
npm run build
.PHONY: check
check: dist
$(BABEL-NODE) tests/tests.js | $(FAUCET)
.PHONY: release
release:
$(SED) -ri s/\"version\":\ \"[0-9]\+\.[0-9]\+\.[0-9]\+\"/\"version\":\ \"$(VERSION)\"/ package.json
$(SED) -i "s/(Unreleased)/(`date +%Y-%m-%d`)/" CHANGES.md
make docs
make dist
.PHONY: eslint
eslint: node_modules
$(ESLINT) src/*.js
<|start_filename|>rollup.config.js<|end_filename|>
import babel from '@rollup/plugin-babel';
import resolve from '@rollup/plugin-node-resolve';
const plugins = [
resolve(),
babel({
"plugins": [
'@babel/plugin-proposal-optional-chaining'
],
"presets": [[ "@babel/preset-env" ]]
})
];
export default [
{
input: 'src/pluggable.js',
output: {
name: 'pluggable',
sourcemap: true,
file: 'dist/pluggable.js',
format: 'umd'
},
plugins
}
];
<|start_filename|>docs/pluggable.html<|end_filename|>
<!DOCTYPE html>
<html>
<head>
<title>pluggable.js</title>
<meta http-equiv="content-type" content="text/html; charset=UTF-8">
<meta name="viewport" content="width=device-width, target-densitydpi=160dpi, initial-scale=1.0; maximum-scale=1.0; user-scalable=0;">
<link rel="stylesheet" media="all" href="docco.css" />
</head>
<body>
<div id="container">
<div id="background"></div>
<ul class="sections">
<li id="title">
<div class="annotation">
<h1>pluggable.js</h1>
</div>
</li>
<li id="section-1">
<div class="annotation">
<div class="pilwrap ">
<a class="pilcrow" href="#section-1">¶</a>
</div>
</div>
<div class="content"><div class='highlight'><pre><span class="hljs-comment">/*
____ __ __ __ _
/ __ \/ /_ __ ___ ___ ____ _/ /_ / /__ (_)____
/ /_/ / / / / / __ \/ __ \/ __/ / __ \/ / _ \ / / ___/
/ ____/ / /_/ / /_/ / /_/ / /_/ / /_/ / / __/ / (__ )
/_/ /_/\__,_/\__, /\__, /\__/_/_.___/_/\___(_)_/ /____/
/____//____/ /___/
*/</span></pre></div></div>
</li>
<li id="section-2">
<div class="annotation">
<div class="pilwrap ">
<a class="pilcrow" href="#section-2">¶</a>
</div>
<p>Pluggable.js lets you to make your Javascript code pluggable while still
keeping sensitive objects and data private through closures.</p>
</div>
</li>
<li id="section-3">
<div class="annotation">
<div class="pilwrap ">
<a class="pilcrow" href="#section-3">¶</a>
</div>
<p><code>wrappedOverride</code> creates a partially applied wrapper function
that makes sure to set the proper super method when the
overriding method is called. This is done to enable
chaining of plugin methods, all the way up to the
original method.</p>
</div>
<div class="content"><div class='highlight'><pre><span class="hljs-function"><span class="hljs-keyword">function</span> <span class="hljs-title">wrappedOverride</span> (<span class="hljs-params">key, value, super_method, default_super, ...args</span>) </span>{
<span class="hljs-keyword">if</span> (<span class="hljs-keyword">typeof</span> super_method === <span class="hljs-string">"function"</span>) {
<span class="hljs-keyword">if</span> (<span class="hljs-keyword">typeof</span> <span class="hljs-keyword">this</span>.__super__ === <span class="hljs-string">"undefined"</span>) {
<span class="hljs-comment">/* We're not on the context of the plugged object.
* This can happen when the overridden method is called via
* an event handler or when it's a constructor.
*
* In this case, we simply tack on the __super__ obj.
*/</span>
<span class="hljs-keyword">this</span>.__super__ = default_super;
}
<span class="hljs-keyword">this</span>.__super__[key] = super_method.bind(<span class="hljs-keyword">this</span>);
}
<span class="hljs-keyword">return</span> value.apply(<span class="hljs-keyword">this</span>, args);
}</pre></div></div>
</li>
<li id="section-4">
<div class="annotation">
<div class="pilwrap ">
<a class="pilcrow" href="#section-4">¶</a>
</div>
<p>The <code>PluginSocket</code> class contains the plugin architecture, and gets
created whenever <code>pluggable.enable(obj);</code> is called on the object
that you want to make pluggable.
You can also see it as the thing into which the plugins are plugged.
It takes two parameters, first, the object being made pluggable, and
then the name by which the pluggable object may be referenced on the
<strong>super</strong> object (inside overrides).</p>
</div>
<div class="content"><div class='highlight'><pre><span class="hljs-class"><span class="hljs-keyword">class</span> <span class="hljs-title">PluginSocket</span> </span>{
<span class="hljs-keyword">constructor</span> (plugged, name) {
<span class="hljs-keyword">this</span>.name = name;
<span class="hljs-keyword">this</span>.plugged = plugged;
<span class="hljs-keyword">if</span> (<span class="hljs-keyword">typeof</span> <span class="hljs-keyword">this</span>.plugged.__super__ === <span class="hljs-string">'undefined'</span>) {
<span class="hljs-keyword">this</span>.plugged.__super__ = {};
} <span class="hljs-keyword">else</span> <span class="hljs-keyword">if</span> (<span class="hljs-keyword">typeof</span> <span class="hljs-keyword">this</span>.plugged.__super__ === <span class="hljs-string">'string'</span>) {
<span class="hljs-keyword">this</span>.plugged.__super__ = { <span class="hljs-string">'__string__'</span>: <span class="hljs-keyword">this</span>.plugged.__super__ };
}
<span class="hljs-keyword">this</span>.plugged.__super__[name] = <span class="hljs-keyword">this</span>.plugged;
<span class="hljs-keyword">this</span>.plugins = {};
<span class="hljs-keyword">this</span>.initialized_plugins = [];
}</pre></div></div>
</li>
<li id="section-5">
<div class="annotation">
<div class="pilwrap ">
<a class="pilcrow" href="#section-5">¶</a>
</div>
<p><code>_overrideAttribute</code> overrides an attribute on the original object
(the thing being plugged into).</p>
<p>If the attribute being overridden is a function, then the original
function will still be available via the <code>__super__</code> attribute.</p>
<p>If the same function is being overridden multiple times, then
the original function will be available at the end of a chain of
functions, starting from the most recent override, all the way
back to the original function, each being referenced by the
previous’ <strong>super</strong> attribute.</p>
<p>For example:</p>
<p><code>plugin2.MyFunc.__super__.myFunc => plugin1.MyFunc.__super__.myFunc => original.myFunc</code></p>
</div>
<div class="content"><div class='highlight'><pre> _overrideAttribute (key, plugin) {
<span class="hljs-keyword">const</span> value = plugin.overrides[key];
<span class="hljs-keyword">if</span> (<span class="hljs-keyword">typeof</span> value === <span class="hljs-string">"function"</span>) {
<span class="hljs-keyword">const</span> default_super = {};
default_super[<span class="hljs-keyword">this</span>.name] = <span class="hljs-keyword">this</span>.plugged;
<span class="hljs-keyword">const</span> super_method = <span class="hljs-keyword">this</span>.plugged[key];
<span class="hljs-keyword">this</span>.plugged[key] = <span class="hljs-function"><span class="hljs-keyword">function</span> (<span class="hljs-params">...args</span>) </span>{
<span class="hljs-keyword">return</span> wrappedOverride.apply(<span class="hljs-keyword">this</span>, [key, value, super_method, default_super, ...args]);
}
} <span class="hljs-keyword">else</span> {
<span class="hljs-keyword">this</span>.plugged[key] = value;
}
}
_extendObject (obj, attributes) {
<span class="hljs-keyword">if</span> (!obj.prototype.__super__) {
obj.prototype.__super__ = {};
obj.prototype.__super__[<span class="hljs-keyword">this</span>.name] = <span class="hljs-keyword">this</span>.plugged;
}
<span class="hljs-keyword">for</span> (<span class="hljs-keyword">const</span> [key, value] <span class="hljs-keyword">of</span> <span class="hljs-built_in">Object</span>.entries(attributes)) {
<span class="hljs-keyword">if</span> (key === <span class="hljs-string">'events'</span>) {
obj.prototype[key] = <span class="hljs-built_in">Object</span>.assign(value, obj.prototype[key]);
} <span class="hljs-keyword">else</span> <span class="hljs-keyword">if</span> (<span class="hljs-keyword">typeof</span> value === <span class="hljs-string">'function'</span>) {</pre></div></div>
</li>
<li id="section-6">
<div class="annotation">
<div class="pilwrap ">
<a class="pilcrow" href="#section-6">¶</a>
</div>
<p>We create a partially applied wrapper function, that
makes sure to set the proper super method when the
overriding method is called. This is done to enable
chaining of plugin methods, all the way up to the
original method.</p>
</div>
<div class="content"><div class='highlight'><pre> <span class="hljs-keyword">const</span> default_super = {};
default_super[<span class="hljs-keyword">this</span>.name] = <span class="hljs-keyword">this</span>.plugged;
<span class="hljs-keyword">const</span> super_method = obj.prototype[key];
obj.prototype[key] = <span class="hljs-function"><span class="hljs-keyword">function</span> (<span class="hljs-params">...args</span>) </span>{
<span class="hljs-keyword">return</span> wrappedOverride.apply(<span class="hljs-keyword">this</span>, [key, value, super_method, default_super, ...args]);
}
} <span class="hljs-keyword">else</span> {
obj.prototype[key] = value;
}
}
}</pre></div></div>
</li>
<li id="section-7">
<div class="annotation">
<div class="pilwrap ">
<a class="pilcrow" href="#section-7">¶</a>
</div>
<p>Plugins can specify dependencies (by means of the
<code>dependencies</code> list attribute) which refers to dependencies
which will be initialized first, before the plugin itself gets initialized.</p>
<p>If <code>strict_plugin_dependencies</code> is set to <code>false</code> (on the object being
made pluggable), then no error will be thrown if any of these plugins aren’t
available.</p>
</div>
<div class="content"><div class='highlight'><pre> loadPluginDependencies (plugin) {
plugin.dependencies?.forEach(<span class="hljs-function"><span class="hljs-params">name</span> =></span> {
<span class="hljs-keyword">const</span> dep = <span class="hljs-keyword">this</span>.plugins[name];
<span class="hljs-keyword">if</span> (dep) {
<span class="hljs-keyword">if</span> (dep.dependencies?.includes(plugin.__name__)) {
<span class="hljs-comment">/* <span class="hljs-doctag">FIXME:</span> circular dependency checking is only one level deep. */</span>
<span class="hljs-keyword">throw</span> <span class="hljs-string">"Found a circular dependency between the plugins \""</span>+
plugin.__name__+<span class="hljs-string">"\" and \""</span>+name+<span class="hljs-string">"\""</span>;
}
<span class="hljs-keyword">this</span>.initializePlugin(dep);
} <span class="hljs-keyword">else</span> {
<span class="hljs-keyword">this</span>.throwUndefinedDependencyError(
<span class="hljs-string">"Could not find dependency \""</span>+name+<span class="hljs-string">"\" "</span>+
<span class="hljs-string">"for the plugin \""</span>+plugin.__name__+<span class="hljs-string">"\". "</span>+
<span class="hljs-string">"If it's needed, make sure it's loaded by require.js"</span>);
}
});
}
throwUndefinedDependencyError (msg) {
<span class="hljs-keyword">if</span> (<span class="hljs-keyword">this</span>.plugged.strict_plugin_dependencies) {
<span class="hljs-keyword">throw</span> msg;
} <span class="hljs-keyword">else</span> {
<span class="hljs-keyword">if</span> (<span class="hljs-built_in">console</span>.warn) {
<span class="hljs-built_in">console</span>.warn(msg);
} <span class="hljs-keyword">else</span> {
<span class="hljs-built_in">console</span>.log(msg);
}
}
}</pre></div></div>
</li>
<li id="section-8">
<div class="annotation">
<div class="pilwrap ">
<a class="pilcrow" href="#section-8">¶</a>
</div>
<p><code>applyOverrides</code> is called by initializePlugin. It applies any
and all overrides of methods or Backbone views and models that
are defined on any of the plugins.</p>
</div>
<div class="content"><div class='highlight'><pre> applyOverrides (plugin) {
<span class="hljs-built_in">Object</span>.keys(plugin.overrides || {}).forEach(<span class="hljs-function"><span class="hljs-params">key</span> =></span> {
<span class="hljs-keyword">const</span> override = plugin.overrides[key];
<span class="hljs-keyword">if</span> (<span class="hljs-keyword">typeof</span> override === <span class="hljs-string">"object"</span>) {
<span class="hljs-keyword">if</span> (<span class="hljs-keyword">typeof</span> <span class="hljs-keyword">this</span>.plugged[key] === <span class="hljs-string">'undefined'</span>) {
<span class="hljs-keyword">this</span>.throwUndefinedDependencyError(
<span class="hljs-string">`Plugin "<span class="hljs-subst">${plugin.__name__}</span>" tried to override "<span class="hljs-subst">${key}</span>" but it's not found.`</span>);
} <span class="hljs-keyword">else</span> {
<span class="hljs-keyword">this</span>._extendObject(<span class="hljs-keyword">this</span>.plugged[key], override);
}
} <span class="hljs-keyword">else</span> {
<span class="hljs-keyword">this</span>._overrideAttribute(key, plugin);
}
});
}</pre></div></div>
</li>
<li id="section-9">
<div class="annotation">
<div class="pilwrap ">
<a class="pilcrow" href="#section-9">¶</a>
</div>
<p><code>initializePlugin</code> applies the overrides (if any) defined on all
the registered plugins and then calls the initialize method of the plugin</p>
</div>
<div class="content"><div class='highlight'><pre> initializePlugin (plugin) {
<span class="hljs-keyword">if</span> (!<span class="hljs-built_in">Object</span>.keys(<span class="hljs-keyword">this</span>.allowed_plugins).includes(plugin.__name__)) {
<span class="hljs-comment">/* Don't initialize disallowed plugins. */</span>
<span class="hljs-keyword">return</span>;
}
<span class="hljs-keyword">if</span> (<span class="hljs-keyword">this</span>.initialized_plugins.includes(plugin.__name__)) {
<span class="hljs-comment">/* Don't initialize plugins twice, otherwise we get
* infinite recursion in overridden methods.
*/</span>
<span class="hljs-keyword">return</span>;
}
<span class="hljs-keyword">if</span> (<span class="hljs-keyword">typeof</span> plugin.enabled === <span class="hljs-string">'boolean'</span> && plugin.enabled ||
plugin.enabled?.(<span class="hljs-keyword">this</span>.plugged) ||
plugin.enabled == <span class="hljs-literal">null</span>) { <span class="hljs-comment">// isNil</span>
<span class="hljs-built_in">Object</span>.assign(plugin, <span class="hljs-keyword">this</span>.properties);
<span class="hljs-keyword">if</span> (plugin.dependencies) {
<span class="hljs-keyword">this</span>.loadPluginDependencies(plugin);
}
<span class="hljs-keyword">this</span>.applyOverrides(plugin);
<span class="hljs-keyword">if</span> (<span class="hljs-keyword">typeof</span> plugin.initialize === <span class="hljs-string">"function"</span>) {
plugin.initialize.bind(plugin)(<span class="hljs-keyword">this</span>);
}
<span class="hljs-keyword">this</span>.initialized_plugins.push(plugin.__name__);
}
}</pre></div></div>
</li>
<li id="section-10">
<div class="annotation">
<div class="pilwrap ">
<a class="pilcrow" href="#section-10">¶</a>
</div>
<p><code>registerPlugin</code> registers (or inserts, if you’d like) a plugin,
by adding it to the <code>plugins</code> map on the PluginSocket instance.</p>
</div>
<div class="content"><div class='highlight'><pre> registerPlugin (name, plugin) {
<span class="hljs-keyword">if</span> (name <span class="hljs-keyword">in</span> <span class="hljs-keyword">this</span>.plugins) {
<span class="hljs-keyword">throw</span> <span class="hljs-keyword">new</span> <span class="hljs-built_in">Error</span>(<span class="hljs-string">'Error: Plugin name '</span>+name+<span class="hljs-string">' is already taken'</span>);
}
plugin.__name__ = name;
<span class="hljs-keyword">this</span>.plugins[name] = plugin;
}</pre></div></div>
</li>
<li id="section-11">
<div class="annotation">
<div class="pilwrap ">
<a class="pilcrow" href="#section-11">¶</a>
</div>
<p><code>initializePlugins</code> should get called once all plugins have been
registered. It will then iterate through all the plugins, calling
<code>initializePlugin</code> for each.
The passed in properties variable is an object with attributes and methods
which will be attached to the plugins.</p>
</div>
<div class="content"><div class='highlight'><pre> initializePlugins (properties={}, whitelist=[], blacklist=[]) {
<span class="hljs-keyword">if</span> (!<span class="hljs-built_in">Object</span>.keys(<span class="hljs-keyword">this</span>.plugins).length) {
<span class="hljs-keyword">return</span>;
}
<span class="hljs-keyword">this</span>.properties = properties;
<span class="hljs-keyword">this</span>.allowed_plugins = {};
<span class="hljs-keyword">for</span> (<span class="hljs-keyword">const</span> [key, plugin] <span class="hljs-keyword">of</span> <span class="hljs-built_in">Object</span>.entries(<span class="hljs-keyword">this</span>.plugins)) {
<span class="hljs-keyword">if</span> ((!whitelist.length || whitelist.includes(key)) && !blacklist.includes(key)) {
<span class="hljs-keyword">this</span>.allowed_plugins[key] = plugin;
}
}
<span class="hljs-built_in">Object</span>.values(<span class="hljs-keyword">this</span>.allowed_plugins).forEach(<span class="hljs-function"><span class="hljs-params">o</span> =></span> <span class="hljs-keyword">this</span>.initializePlugin(o));
}
}
<span class="hljs-function"><span class="hljs-keyword">function</span> <span class="hljs-title">enable</span> (<span class="hljs-params">object, name, attrname</span>) </span>{</pre></div></div>
</li>
<li id="section-12">
<div class="annotation">
<div class="pilwrap ">
<a class="pilcrow" href="#section-12">¶</a>
</div>
<p>Call the <code>enable</code> method to make an object pluggable</p>
<p>It takes three parameters:</p>
<ul>
<li><code>object</code>: The object that gets made pluggable.</li>
<li><code>name</code>: The string name by which the now pluggable object
may be referenced on the <strong>super</strong> obj (in overrides).
The default value is “plugged”.</li>
<li><code>attrname</code>: The string name of the attribute on the now
pluggable object, which refers to the PluginSocket instance
that gets created.</li>
</ul>
</div>
<div class="content"><div class='highlight'><pre> <span class="hljs-keyword">if</span> (<span class="hljs-keyword">typeof</span> attrname === <span class="hljs-string">"undefined"</span>) {
attrname = <span class="hljs-string">"pluginSocket"</span>;
}
<span class="hljs-keyword">if</span> (<span class="hljs-keyword">typeof</span> name === <span class="hljs-string">'undefined'</span>) {
name = <span class="hljs-string">'plugged'</span>;
}
object[attrname] = <span class="hljs-keyword">new</span> PluginSocket(object, name);
<span class="hljs-keyword">return</span> object;
}
<span class="hljs-keyword">export</span> {
enable
};
<span class="hljs-keyword">export</span> <span class="hljs-keyword">default</span> {
enable
};</pre></div></div>
</li>
</ul>
</div>
</body>
</html>
<|start_filename|>dist/pluggable.js<|end_filename|>
(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) :
typeof define === 'function' && define.amd ? define(['exports'], factory) :
(global = global || self, factory(global.pluggable = {}));
}(this, (function (exports) { 'use strict';
function _typeof(obj) {
"@babel/helpers - typeof";
if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") {
_typeof = function (obj) {
return typeof obj;
};
} else {
_typeof = function (obj) {
return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj;
};
}
return _typeof(obj);
}
function _classCallCheck(instance, Constructor) {
if (!(instance instanceof Constructor)) {
throw new TypeError("Cannot call a class as a function");
}
}
function _defineProperties(target, props) {
for (var i = 0; i < props.length; i++) {
var descriptor = props[i];
descriptor.enumerable = descriptor.enumerable || false;
descriptor.configurable = true;
if ("value" in descriptor) descriptor.writable = true;
Object.defineProperty(target, descriptor.key, descriptor);
}
}
function _createClass(Constructor, protoProps, staticProps) {
if (protoProps) _defineProperties(Constructor.prototype, protoProps);
if (staticProps) _defineProperties(Constructor, staticProps);
return Constructor;
}
function _slicedToArray(arr, i) {
return _arrayWithHoles(arr) || _iterableToArrayLimit(arr, i) || _unsupportedIterableToArray(arr, i) || _nonIterableRest();
}
function _arrayWithHoles(arr) {
if (Array.isArray(arr)) return arr;
}
function _iterableToArrayLimit(arr, i) {
var _i = arr && (typeof Symbol !== "undefined" && arr[Symbol.iterator] || arr["@@iterator"]);
if (_i == null) return;
var _arr = [];
var _n = true;
var _d = false;
var _s, _e;
try {
for (_i = _i.call(arr); !(_n = (_s = _i.next()).done); _n = true) {
_arr.push(_s.value);
if (i && _arr.length === i) break;
}
} catch (err) {
_d = true;
_e = err;
} finally {
try {
if (!_n && _i["return"] != null) _i["return"]();
} finally {
if (_d) throw _e;
}
}
return _arr;
}
function _unsupportedIterableToArray(o, minLen) {
if (!o) return;
if (typeof o === "string") return _arrayLikeToArray(o, minLen);
var n = Object.prototype.toString.call(o).slice(8, -1);
if (n === "Object" && o.constructor) n = o.constructor.name;
if (n === "Map" || n === "Set") return Array.from(o);
if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen);
}
function _arrayLikeToArray(arr, len) {
if (len == null || len > arr.length) len = arr.length;
for (var i = 0, arr2 = new Array(len); i < len; i++) arr2[i] = arr[i];
return arr2;
}
function _nonIterableRest() {
throw new TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.");
}
/*
____ __ __ __ _
/ __ \/ /_ __ ___ ___ ____ _/ /_ / /__ (_)____
/ /_/ / / / / / __ \/ __ \/ __/ / __ \/ / _ \ / / ___/
/ ____/ / /_/ / /_/ / /_/ / /_/ / /_/ / / __/ / (__ )
/_/ /_/\__,_/\__, /\__, /\__/_/_.___/_/\___(_)_/ /____/
/____//____/ /___/
*/
// Pluggable.js lets you to make your Javascript code pluggable while still
// keeping sensitive objects and data private through closures.
// `wrappedOverride` creates a partially applied wrapper function
// that makes sure to set the proper super method when the
// overriding method is called. This is done to enable
// chaining of plugin methods, all the way up to the
// original method.
function wrappedOverride(key, value, super_method, default_super) {
if (typeof super_method === "function") {
if (typeof this.__super__ === "undefined") {
/* We're not on the context of the plugged object.
* This can happen when the overridden method is called via
* an event handler or when it's a constructor.
*
* In this case, we simply tack on the __super__ obj.
*/
this.__super__ = default_super;
}
this.__super__[key] = super_method.bind(this);
}
for (var _len = arguments.length, args = new Array(_len > 4 ? _len - 4 : 0), _key = 4; _key < _len; _key++) {
args[_key - 4] = arguments[_key];
}
return value.apply(this, args);
} // The `PluginSocket` class contains the plugin architecture, and gets
// created whenever `pluggable.enable(obj);` is called on the object
// that you want to make pluggable.
// You can also see it as the thing into which the plugins are plugged.
// It takes two parameters, first, the object being made pluggable, and
// then the name by which the pluggable object may be referenced on the
// __super__ object (inside overrides).
var PluginSocket = /*#__PURE__*/function () {
function PluginSocket(plugged, name) {
_classCallCheck(this, PluginSocket);
this.name = name;
this.plugged = plugged;
if (typeof this.plugged.__super__ === 'undefined') {
this.plugged.__super__ = {};
} else if (typeof this.plugged.__super__ === 'string') {
this.plugged.__super__ = {
'__string__': this.plugged.__super__
};
}
this.plugged.__super__[name] = this.plugged;
this.plugins = {};
this.initialized_plugins = [];
} // `_overrideAttribute` overrides an attribute on the original object
// (the thing being plugged into).
//
// If the attribute being overridden is a function, then the original
// function will still be available via the `__super__` attribute.
//
// If the same function is being overridden multiple times, then
// the original function will be available at the end of a chain of
// functions, starting from the most recent override, all the way
// back to the original function, each being referenced by the
// previous' __super__ attribute.
//
// For example:
//
// `plugin2.MyFunc.__super__.myFunc => plugin1.MyFunc.__super__.myFunc => original.myFunc`
_createClass(PluginSocket, [{
key: "_overrideAttribute",
value: function _overrideAttribute(key, plugin) {
var value = plugin.overrides[key];
if (typeof value === "function") {
var default_super = {};
default_super[this.name] = this.plugged;
var super_method = this.plugged[key];
this.plugged[key] = function () {
for (var _len2 = arguments.length, args = new Array(_len2), _key2 = 0; _key2 < _len2; _key2++) {
args[_key2] = arguments[_key2];
}
return wrappedOverride.apply(this, [key, value, super_method, default_super].concat(args));
};
} else {
this.plugged[key] = value;
}
}
}, {
key: "_extendObject",
value: function _extendObject(obj, attributes) {
var _this = this;
if (!obj.prototype.__super__) {
obj.prototype.__super__ = {};
obj.prototype.__super__[this.name] = this.plugged;
}
var _loop = function _loop() {
var _Object$entries$_i = _slicedToArray(_Object$entries[_i], 2),
key = _Object$entries$_i[0],
value = _Object$entries$_i[1];
if (key === 'events') {
obj.prototype[key] = Object.assign(value, obj.prototype[key]);
} else if (typeof value === 'function') {
// We create a partially applied wrapper function, that
// makes sure to set the proper super method when the
// overriding method is called. This is done to enable
// chaining of plugin methods, all the way up to the
// original method.
var default_super = {};
default_super[_this.name] = _this.plugged;
var super_method = obj.prototype[key];
obj.prototype[key] = function () {
for (var _len3 = arguments.length, args = new Array(_len3), _key3 = 0; _key3 < _len3; _key3++) {
args[_key3] = arguments[_key3];
}
return wrappedOverride.apply(this, [key, value, super_method, default_super].concat(args));
};
} else {
obj.prototype[key] = value;
}
};
for (var _i = 0, _Object$entries = Object.entries(attributes); _i < _Object$entries.length; _i++) {
_loop();
}
} // Plugins can specify dependencies (by means of the
// `dependencies` list attribute) which refers to dependencies
// which will be initialized first, before the plugin itself gets initialized.
//
// If `strict_plugin_dependencies` is set to `false` (on the object being
// made pluggable), then no error will be thrown if any of these plugins aren't
// available.
}, {
key: "loadPluginDependencies",
value: function loadPluginDependencies(plugin) {
var _plugin$dependencies,
_this2 = this;
(_plugin$dependencies = plugin.dependencies) === null || _plugin$dependencies === void 0 ? void 0 : _plugin$dependencies.forEach(function (name) {
var dep = _this2.plugins[name];
if (dep) {
var _dep$dependencies;
if ((_dep$dependencies = dep.dependencies) !== null && _dep$dependencies !== void 0 && _dep$dependencies.includes(plugin.__name__)) {
/* FIXME: circular dependency checking is only one level deep. */
throw "Found a circular dependency between the plugins \"" + plugin.__name__ + "\" and \"" + name + "\"";
}
_this2.initializePlugin(dep);
} else {
_this2.throwUndefinedDependencyError("Could not find dependency \"" + name + "\" " + "for the plugin \"" + plugin.__name__ + "\". " + "If it's needed, make sure it's loaded by require.js");
}
});
}
}, {
key: "throwUndefinedDependencyError",
value: function throwUndefinedDependencyError(msg) {
if (this.plugged.strict_plugin_dependencies) {
throw msg;
} else {
if (console.warn) {
console.warn(msg);
} else {
console.log(msg);
}
}
} // `applyOverrides` is called by initializePlugin. It applies any
// and all overrides of methods or Backbone views and models that
// are defined on any of the plugins.
}, {
key: "applyOverrides",
value: function applyOverrides(plugin) {
var _this3 = this;
Object.keys(plugin.overrides || {}).forEach(function (key) {
var override = plugin.overrides[key];
if (_typeof(override) === "object") {
if (typeof _this3.plugged[key] === 'undefined') {
_this3.throwUndefinedDependencyError("Plugin \"".concat(plugin.__name__, "\" tried to override \"").concat(key, "\" but it's not found."));
} else {
_this3._extendObject(_this3.plugged[key], override);
}
} else {
_this3._overrideAttribute(key, plugin);
}
});
} // `initializePlugin` applies the overrides (if any) defined on all
// the registered plugins and then calls the initialize method of the plugin
}, {
key: "initializePlugin",
value: function initializePlugin(plugin) {
var _plugin$enabled;
if (!Object.keys(this.allowed_plugins).includes(plugin.__name__)) {
/* Don't initialize disallowed plugins. */
return;
}
if (this.initialized_plugins.includes(plugin.__name__)) {
/* Don't initialize plugins twice, otherwise we get
* infinite recursion in overridden methods.
*/
return;
}
if (typeof plugin.enabled === 'boolean' && plugin.enabled || (_plugin$enabled = plugin.enabled) !== null && _plugin$enabled !== void 0 && _plugin$enabled.call(plugin, this.plugged) || plugin.enabled == null) {
// isNil
Object.assign(plugin, this.properties);
if (plugin.dependencies) {
this.loadPluginDependencies(plugin);
}
this.applyOverrides(plugin);
if (typeof plugin.initialize === "function") {
plugin.initialize.bind(plugin)(this);
}
this.initialized_plugins.push(plugin.__name__);
}
} // `registerPlugin` registers (or inserts, if you'd like) a plugin,
// by adding it to the `plugins` map on the PluginSocket instance.
}, {
key: "registerPlugin",
value: function registerPlugin(name, plugin) {
if (name in this.plugins) {
throw new Error('Error: Plugin name ' + name + ' is already taken');
}
plugin.__name__ = name;
this.plugins[name] = plugin;
} // `initializePlugins` should get called once all plugins have been
// registered. It will then iterate through all the plugins, calling
// `initializePlugin` for each.
// The passed in properties variable is an object with attributes and methods
// which will be attached to the plugins.
}, {
key: "initializePlugins",
value: function initializePlugins() {
var _this4 = this;
var properties = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
var whitelist = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : [];
var blacklist = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : [];
if (!Object.keys(this.plugins).length) {
return;
}
this.properties = properties;
this.allowed_plugins = {};
for (var _i2 = 0, _Object$entries2 = Object.entries(this.plugins); _i2 < _Object$entries2.length; _i2++) {
var _Object$entries2$_i = _slicedToArray(_Object$entries2[_i2], 2),
key = _Object$entries2$_i[0],
plugin = _Object$entries2$_i[1];
if ((!whitelist.length || whitelist.includes(key)) && !blacklist.includes(key)) {
this.allowed_plugins[key] = plugin;
}
}
Object.values(this.allowed_plugins).forEach(function (o) {
return _this4.initializePlugin(o);
});
}
}]);
return PluginSocket;
}();
function enable(object, name, attrname) {
// Call the `enable` method to make an object pluggable
//
// It takes three parameters:
// - `object`: The object that gets made pluggable.
// - `name`: The string name by which the now pluggable object
// may be referenced on the __super__ obj (in overrides).
// The default value is "plugged".
// - `attrname`: The string name of the attribute on the now
// pluggable object, which refers to the PluginSocket instance
// that gets created.
if (typeof attrname === "undefined") {
attrname = "pluginSocket";
}
if (typeof name === 'undefined') {
name = 'plugged';
}
object[attrname] = new PluginSocket(object, name);
return object;
}
var pluggable = {
enable: enable
};
exports.default = pluggable;
exports.enable = enable;
Object.defineProperty(exports, '__esModule', { value: true });
})));
//# sourceMappingURL=pluggable.js.map
<|start_filename|>tests/app.js<|end_filename|>
const app = {};
import pluggable from '../dist/pluggable.js';
function initialize (whitelist=[], blacklist=[]) {
app.pluginSocket.initializePlugins({}, whitelist, blacklist);
}
function registerPlugin (name, plugin) {
app.pluginSocket.registerPlugin(name, plugin);
}
function getPluginSocket () {
// Normally this wouldn't be exposed, but we do so here for testing
// purposes.
return app.pluginSocket;
}
function getClosuredApp () {
// Normally this wouldn't be exposed, but we do so here for testing
// purposes.
return app;
}
// Calling `pluggable.enable` on the private `app` object, will make it
// pluggable. Additionally, it will get the `pluginSocket` attribute, which
// refers to the object that the plugins get plugged into.
function makePluggable () {
pluggable.enable(app);
}
export {
initialize,
registerPlugin,
getClosuredApp,
getPluginSocket,
makePluggable
};
| jcbrand/pluggable.js |
<|start_filename|>wifitool/src/main/java/ru/yandex/qatools/wifitool/NetworkManager.java<|end_filename|>
package ru.yandex.qatools.wifitool;
import android.net.wifi.WifiConfiguration;
import android.net.wifi.WifiInfo;
import android.net.wifi.WifiManager;
import android.util.Log;
import java.util.List;
import javax.annotation.Nonnull;
import javax.inject.Inject;
import ru.yandex.qatools.wifitool.utils.WifiConfigurationBuilder;
/**
* Sets up network.
*/
class NetworkManager {
static final int NO_ID = -1;
@Nonnull
private final WifiManager mWifiManager;
@Inject
NetworkManager(WifiManager wifiManager) {
mWifiManager = wifiManager;
}
/**
* Get pre-configured wifi network or add a new one.
* @param params Network ssid and security parameters.
* @return NetworkId of a network.
* @throws Exception if network could not be set up.
*/
int createNetwork(Params params) throws Exception {
Log.d(Tag.NAME, "Get connected network id...");
int connectedNetId = getConnectedNetworkWithSsid(params.quotedSsid);
if (connectedNetId != NO_ID) {
Log.d(Tag.NAME, "Configured network found. It is connected");
return connectedNetId;
}
int netId = getConfiguredNetworkId(params);
if (netId == NO_ID) {
Log.d(Tag.NAME, "Configured network not found");
} else {
// updated network configuration are not able to connect on some devices.
// removing network and adding it again is quite fast and is durable enough
Log.d(Tag.NAME, "Configured network found. It is not connected");
Log.d(Tag.NAME, "Removing configured network");
if (!mWifiManager.removeNetwork(netId)) {
throw new IllegalStateException("Unable to remove existing network");
}
}
return addNetwork(params);
}
private int getConnectedNetworkWithSsid(String maskedSsid) {
WifiInfo connectionInfo = mWifiManager.getConnectionInfo();
if (connectionInfo != null && maskedSsid.equals(connectionInfo.getSSID())) {
return connectionInfo.getNetworkId();
} else {
return NO_ID;
}
}
private int getConfiguredNetworkId(Params params) {
List<WifiConfiguration> networks = mWifiManager.getConfiguredNetworks();
for (WifiConfiguration network : networks) {
if (params.quotedSsid.equals(network.SSID)) {
return network.networkId;
}
}
return NO_ID;
}
private int addNetwork(Params params) {
Log.d(Tag.NAME, "Adding network");
WifiConfiguration wfc = WifiConfigurationBuilder.create(params);
int result = mWifiManager.addNetwork(wfc);
if (result == NO_ID) {
throw new IllegalStateException("Could not add network");
}
Log.d(Tag.NAME, "Network added");
return result;
}
}
<|start_filename|>wifitool/src/main/java/ru/yandex/qatools/wifitool/Injector.java<|end_filename|>
package ru.yandex.qatools.wifitool;
import javax.inject.Singleton;
import dagger.Component;
@Singleton
@Component(modules = IntentModule.class)
interface Injector {
void inject(WifiIntentService context);
}
<|start_filename|>wifitool/src/main/java/ru/yandex/qatools/wifitool/utils/ConnectivityChecker.java<|end_filename|>
package ru.yandex.qatools.wifitool.utils;
import android.net.ConnectivityManager;
import android.net.NetworkInfo;
import android.net.wifi.WifiInfo;
import android.net.wifi.WifiManager;
import android.util.Log;
import javax.annotation.Nonnull;
import javax.inject.Inject;
import ru.yandex.qatools.wifitool.Tag;
public class ConnectivityChecker {
@Nonnull
private final ConnectivityManager mConnectivityManager;
@Nonnull
private final WifiManager mWifiManager;
@Inject
public ConnectivityChecker(ConnectivityManager connectivityManager, WifiManager wifiManager) {
mConnectivityManager = connectivityManager;
mWifiManager = wifiManager;
}
public boolean isWifiNetworkConnected(int netId) {
NetworkInfo network = mConnectivityManager.getActiveNetworkInfo();
if (network == null) {
return false;
}
if (network.getType() != ConnectivityManager.TYPE_WIFI) {
return false;
}
WifiInfo wifiInfo = mWifiManager.getConnectionInfo();
if (wifiInfo == null) {
Log.d(Tag.NAME, "Wifi is not connected");
return false;
}
Log.d(Tag.NAME, "Wifi supplicant state: " + wifiInfo.getSupplicantState());
return network.isConnected() && wifiInfo.getNetworkId() == netId;
}
}
<|start_filename|>wifitool/src/main/java/ru/yandex/qatools/wifitool/package-info.java<|end_filename|>
@ParametersAreNonnullByDefault
package ru.yandex.qatools.wifitool;
import javax.annotation.ParametersAreNonnullByDefault;
<|start_filename|>wifitool/src/test/java/ru/yandex/qatools/wifitool/ParamsValidatorTest.java<|end_filename|>
package ru.yandex.qatools.wifitool;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.robolectric.RobolectricTestRunner;
import org.robolectric.annotation.Config;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static ru.yandex.qatools.wifitool.TestData.SOME_PASS;
import static ru.yandex.qatools.wifitool.TestData.SOME_SSID;
@RunWith(RobolectricTestRunner.class)
@Config(manifest = Config.NONE)
public class ParamsValidatorTest {
private static final String SOME_STRING = "Foo";
private static final int NEGATIVE = -1;
@Test
public void noSsid_Invalid() throws Exception {
Params params = new Params(null, SOME_PASS, ParamNames.SECURITY_WPA, 0, 0);
assertFalse("SSID absence must be invalid",
new ParamsValidator(params).isValid);
}
@Test
public void noSecurity_NoPassword_Valid() throws Exception {
Params params = new Params(SOME_SSID, SOME_PASS, ParamNames.SECURITY_WPA, 0, 0);
assertTrue("No password must be valid with unsecure WiFi",
new ParamsValidator(params).isValid);
}
@Test
public void hasSecurity_NoPassword_Invalid() throws Exception {
Params params = new Params(SOME_SSID, null, ParamNames.SECURITY_WPA, 0, 0);
assertFalse("Password must be specified when securityString is specified",
new ParamsValidator(params).isValid);
}
@Test
public void hasSecurity_HasPassword_Valid() throws Exception {
Params params = new Params(SOME_SSID, SOME_PASS, ParamNames.SECURITY_WPA, 0, 0);
assertTrue("Security with password must be valid",
new ParamsValidator(params).isValid);
}
@Test
public void unknownSecurity_Invalid() throws Exception {
Params params = new Params(SOME_SSID, SOME_PASS, SOME_STRING, 0, 0);
assertFalse("Only WEP|WPA securityString can be valid",
new ParamsValidator(params).isValid);
}
@Test
public void negativeRetryCount_Invalid() throws Exception {
Params params = new Params(SOME_SSID, SOME_PASS, ParamNames.SECURITY_WPA, NEGATIVE, 0);
assertFalse("Retry count must be greater than or equal to 0",
new ParamsValidator(params).isValid);
}
@Test
public void negativeRetryDelay_Invalid() throws Exception {
Params params = new Params(SOME_SSID, SOME_PASS, ParamNames.SECURITY_WPA, 0, NEGATIVE);
assertFalse("Retry delay must be greater than or equal to 0",
new ParamsValidator(params).isValid);
}
}
<|start_filename|>wifitool/src/main/java/ru/yandex/qatools/wifitool/Tag.java<|end_filename|>
package ru.yandex.qatools.wifitool;
public class Tag {
public static final String NAME = "WifiTool";
static final String SUCCESS = NAME + ":Success";
static final String FAIL = NAME + ":Fail";
private Tag() {}
}
<|start_filename|>wifitool/src/main/java/ru/yandex/qatools/wifitool/ParamsValidator.java<|end_filename|>
package ru.yandex.qatools.wifitool;
import java.util.ArrayList;
import java.util.List;
import javax.annotation.Nonnull;
class ParamsValidator {
@Nonnull
private static final List<String> VALID_SECURITY = new ArrayList<>();
static {
VALID_SECURITY.add(null);
VALID_SECURITY.add(ParamNames.SECURITY_WEP);
VALID_SECURITY.add(ParamNames.SECURITY_WPA);
}
final String message;
final boolean isValid;
ParamsValidator(Params params) {
if (params.retryCount < 0) {
message = "Retry count must be greater than or equal to 0";
isValid = false;
return;
}
if (params.retryDelay < 0) {
message = "Retry delay must be greater than or equal to 0";
isValid = false;
return;
}
if (params.quotedSsid == null) {
message = "ssid not specified";
isValid = false;
return;
}
if (!VALID_SECURITY.contains(params.securityString)) {
message = "Unknown securityString value '" + params.securityString + "'";
isValid = false;
return;
}
if (params.pass == null && params.securityString != null) {
message = "pass not specified while securityString is '" + params.securityString + "'";
isValid = false;
return;
}
isValid = true;
message = null;
}
}
<|start_filename|>wifitool/src/main/java/ru/yandex/qatools/wifitool/utils/WifiStates.java<|end_filename|>
package ru.yandex.qatools.wifitool.utils;
import android.net.wifi.WifiManager;
import android.util.SparseArray;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
public class WifiStates {
@Nonnull
private static final SparseArray<String> mWifiStates = new SparseArray<>();
static {
mWifiStates.append(WifiManager.WIFI_STATE_DISABLED, "WIFI_STATE_DISABLED");
mWifiStates.append(WifiManager.WIFI_STATE_DISABLING, "WIFI_STATE_DISABLING");
mWifiStates.append(WifiManager.WIFI_STATE_ENABLED, "WIFI_STATE_ENABLED");
mWifiStates.append(WifiManager.WIFI_STATE_ENABLING, "WIFI_STATE_ENABLING");
mWifiStates.append(WifiManager.WIFI_STATE_UNKNOWN, "WIFI_STATE_UNKNOWN");
}
private WifiStates() {}
@Nullable
public static String getName(int wifiState) {
return mWifiStates.get(wifiState);
}
}
<|start_filename|>wifitool/src/main/java/ru/yandex/qatools/wifitool/ConnectivityMonitor.java<|end_filename|>
package ru.yandex.qatools.wifitool;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.net.ConnectivityManager;
import android.net.wifi.WifiManager;
import android.util.Log;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import javax.inject.Inject;
import bolts.CancellationToken;
import bolts.Task;
import bolts.TaskCompletionSource;
import ru.yandex.qatools.wifitool.utils.ConnectivityChecker;
import static junit.framework.Assert.assertNotNull;
/**
* Waits for network to get connected.
* Checks connectivity on ConnectivityManager and WifiManager broadcasts.
*/
class ConnectivityMonitor {
@Nonnull
private final IntentFilter mIntentFilter = new IntentFilter();
@Nonnull
private final Context mContext;
@Nonnull
private final ConnectivityChecker mConnectivityChecker;
@Nullable
private BroadcastReceiver mBroadcastReceiver;
@Nonnull
private final TaskCompletionSource<Void> mCompletion = new TaskCompletionSource<>();
@Inject
ConnectivityMonitor(Context context, ConnectivityChecker connectivityChecker) {
mContext = context;
mConnectivityChecker = connectivityChecker;
mIntentFilter.addAction(WifiManager.WIFI_STATE_CHANGED_ACTION);
mIntentFilter.addAction(ConnectivityManager.CONNECTIVITY_ACTION);
}
/**
* Wait for network to get connectivity.
*/
@Nonnull
Task<Void> wait(final int netId, final CancellationToken timeoutToken) {
if (mConnectivityChecker.isWifiNetworkConnected(netId)) {
Log.d(Tag.NAME, "Wifi network is connected");
return Task.forResult(null);
}
register(netId);
timeoutToken.register(() -> {
if (mCompletion.getTask().isCompleted()) {
return;
}
Log.d(Tag.NAME, "Connectivity check timed out");
mCompletion.setError(new Exception("Connectivity check timed out"));
});
return mCompletion.getTask().continueWithTask(
task -> {
unregister();
return task;
}
);
}
private synchronized void register(final int netId) {
Log.d(Tag.NAME, "Register network status receiver");
mBroadcastReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
if (mConnectivityChecker.isWifiNetworkConnected(netId)) {
Log.d(Tag.NAME, "Network has been connected");
mCompletion.trySetResult(null);
}
}
};
mContext.registerReceiver(mBroadcastReceiver, mIntentFilter);
}
private synchronized void unregister() {
Log.d(Tag.NAME, "Unregister network status receiver");
assertNotNull("Receiver must be registered before unregister",
mBroadcastReceiver);
mContext.unregisterReceiver(mBroadcastReceiver);
}
}
<|start_filename|>wifitool/src/main/java/ru/yandex/qatools/wifitool/utils/WifiManagerException.java<|end_filename|>
package ru.yandex.qatools.wifitool.utils;
public class WifiManagerException extends RuntimeException {
public WifiManagerException(String message) {
super(message);
}
}
<|start_filename|>wifitool/src/main/java/ru/yandex/qatools/wifitool/NoDisplayActivity.java<|end_filename|>
package ru.yandex.qatools.wifitool;
import android.app.Activity;
import android.os.Bundle;
public class NoDisplayActivity extends Activity {
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
finish();
}
}
<|start_filename|>wifitool/src/main/java/ru/yandex/qatools/wifitool/utils/WifiConfigurationBuilder.java<|end_filename|>
package ru.yandex.qatools.wifitool.utils;
import android.net.wifi.WifiConfiguration;
import android.net.wifi.WifiConfiguration.AuthAlgorithm;
import android.net.wifi.WifiConfiguration.KeyMgmt;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import ru.yandex.qatools.wifitool.Params;
public class WifiConfigurationBuilder {
private WifiConfigurationBuilder() {
}
@Nonnull
public static WifiConfiguration create(Params params) {
WifiConfiguration wfc = new WifiConfiguration();
wfc.SSID = params.quotedSsid;
wfc.status = WifiConfiguration.Status.DISABLED;
wfc.priority = 100;
switch (params.security) {
case NONE:
setupUnsecure(wfc);
break;
case WEP:
setupWep(params, wfc);
break;
case WPA:
setupWpa(params, wfc);
break;
case UNKNOWN:
default:
throw new IllegalArgumentException("Unknown security value " + params.security +
". Argument was " + params.securityString);
}
return wfc;
}
private static void setupUnsecure(WifiConfiguration wfc) {
wfc.allowedKeyManagement.clear();
wfc.allowedKeyManagement.set(KeyMgmt.NONE);
wfc.allowedAuthAlgorithms.clear();
}
private static void setupWep(Params params, WifiConfiguration wfc) {
wfc.allowedKeyManagement.set(KeyMgmt.NONE);
wfc.allowedAuthAlgorithms.set(AuthAlgorithm.OPEN);
wfc.allowedAuthAlgorithms.set(AuthAlgorithm.SHARED);
if (isHexString(params.pass)) {
wfc.wepKeys[0] = params.pass;
} else {
wfc.wepKeys[0] = StringValues.enquote(params.pass);
}
wfc.wepTxKeyIndex = 0;
}
private static void setupWpa(Params params, WifiConfiguration wfc) {
wfc.preSharedKey = StringValues.enquote(params.pass);
}
private static boolean isHexString(@Nullable String pass) {
return pass != null && pass.matches("[0-9a-fA-F]+");
}
}
<|start_filename|>wifitool/src/test/java/ru/yandex/qatools/wifitool/TestData.java<|end_filename|>
package ru.yandex.qatools.wifitool;
import android.content.Context;
import android.net.wifi.WifiConfiguration;
import android.net.wifi.WifiInfo;
import android.net.wifi.WifiManager;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import org.robolectric.RuntimeEnvironment;
import org.robolectric.Shadows;
import org.robolectric.shadows.ShadowApplication;
import java.util.List;
import javax.annotation.Nonnull;
import edu.emory.mathcs.backport.java.util.Arrays;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock;
import static ru.yandex.qatools.wifitool.utils.StringValues.enquote;
class TestData {
static final int NET_ID = 100;
static final String SOME_SSID = "Foo";
static final String SOME_PASS = "<PASSWORD>";
static final String SOME_SSID_QUOTED = enquote(SOME_SSID);
static final String NO_PASS = null;
static final String NO_SECURITY = null;
private static final int NO_RETRIES = 0;
private static final int NO_DELAY = 0;
@Nonnull
static final Params UNSECURE_PARAMS =
new Params(SOME_SSID, NO_PASS, NO_SECURITY, NO_RETRIES, NO_DELAY);
@Mock
@Nonnull
WifiManager wifiManager;
@Mock
private WifiInfo activeWifi;
@Mock
private WifiConfiguration configuredWifi;
TestData() {
MockitoAnnotations.initMocks(this);
}
@Nonnull
static WifiManager mockWifiManager() {
ShadowApplication shadowApplication = Shadows.shadowOf(RuntimeEnvironment.application);
WifiManager wifiManager = mock(WifiManager.class);
shadowApplication.setSystemService(Context.WIFI_SERVICE, wifiManager);
return wifiManager;
}
void whenNetworkIsConfigured() {
configuredWifi.networkId = NET_ID;
configuredWifi.SSID = SOME_SSID_QUOTED;
List<WifiConfiguration> configurations =
Arrays.asList(new WifiConfiguration[]{configuredWifi});
doReturn(configurations).when(wifiManager).getConfiguredNetworks();
}
void whenNetworkIsConnected() {
doReturn(NET_ID).when(activeWifi).getNetworkId();
doReturn(SOME_SSID_QUOTED).when(activeWifi).getSSID();
doReturn(activeWifi).when(wifiManager).getConnectionInfo();
}
}
<|start_filename|>wifitool/src/test/java/ru/yandex/qatools/wifitool/ConnectivityMonitorTest.java<|end_filename|>
package ru.yandex.qatools.wifitool;
import android.content.Intent;
import android.net.ConnectivityManager;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import org.robolectric.RobolectricTestRunner;
import org.robolectric.RuntimeEnvironment;
import org.robolectric.annotation.Config;
import javax.annotation.Nonnull;
import bolts.CancellationToken;
import bolts.CancellationTokenSource;
import bolts.Task;
import ru.yandex.qatools.wifitool.utils.ConnectivityChecker;
import static junit.framework.Assert.assertFalse;
import static junit.framework.Assert.assertTrue;
import static org.mockito.Mockito.doReturn;
import static org.robolectric.Shadows.shadowOf;
@RunWith(RobolectricTestRunner.class)
@Config(manifest = Config.NONE)
public class ConnectivityMonitorTest {
@Nonnull
private final ConnectivityMonitor mConnectivityMonitor;
@Mock
private ConnectivityChecker mConnectivityChecker;
@Mock
private CancellationToken mCancellationToken;
public ConnectivityMonitorTest() {
MockitoAnnotations.initMocks(this);
mConnectivityMonitor = new ConnectivityMonitor(RuntimeEnvironment.application,
mConnectivityChecker);
}
@Test
public void whenConnected_Succeeds() throws InterruptedException {
whenConnected();
Task<Void> waitTask = mConnectivityMonitor.wait(TestData.NET_ID, mCancellationToken);
assertSucceeds(waitTask);
}
@Test
public void whenNotConnected_CancelFails() throws InterruptedException {
whenNotConnected();
CancellationTokenSource tokenSource = new CancellationTokenSource();
Task<Void> waitTask = mConnectivityMonitor.wait(TestData.NET_ID, tokenSource.getToken());
tokenSource.cancel();
assertFails(waitTask);
}
@Test
public void whenNotConnected_ConnectionBroadcastSucceeds() throws InterruptedException {
whenNotConnected();
Task<Void> waitTask = mConnectivityMonitor.wait(TestData.NET_ID, mCancellationToken);
whenConnected();
sendBroadcast();
assertSucceeds(waitTask);
}
private void sendBroadcast() {
Intent intent = new Intent();
intent.setAction(ConnectivityManager.CONNECTIVITY_ACTION);
shadowOf(RuntimeEnvironment.application).sendBroadcast(intent);
}
private void assertSucceeds(Task<Void> waitTask) throws InterruptedException {
waitTask.waitForCompletion();
assertFalse(waitTask.isFaulted());
}
private void assertFails(Task<Void> waitTask) throws InterruptedException {
waitTask.waitForCompletion();
assertTrue(waitTask.isFaulted());
}
private void whenConnected() {
doReturn(true).when(mConnectivityChecker).isWifiNetworkConnected(TestData.NET_ID);
}
private void whenNotConnected() {
doReturn(false).when(mConnectivityChecker).isWifiNetworkConnected(TestData.NET_ID);
}
}
<|start_filename|>wifitool/src/main/java/ru/yandex/qatools/wifitool/ParamNames.java<|end_filename|>
package ru.yandex.qatools.wifitool;
/**
* Created by kzaikin on 19.08.16.
*/
class ParamNames {
static final String SSID = "ssid";
static final String PASS = "<PASSWORD>";
static final String SECURITY = "security";
static final String RETRY_COUNT = "retry_count";
static final String RETRY_DELAY = "retry_delay";
static final String SECURITY_WEP = "WEP";
static final String SECURITY_WPA = "WPA";
private ParamNames() {
}
}
<|start_filename|>wifitool/src/test/java/ru/yandex/qatools/wifitool/ConnectorTest.java<|end_filename|>
package ru.yandex.qatools.wifitool;
import android.net.wifi.WifiManager;
import junit.framework.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import org.robolectric.RobolectricTestRunner;
import org.robolectric.annotation.Config;
import javax.annotation.Nonnull;
import bolts.CancellationToken;
import bolts.Task;
import ru.yandex.qatools.wifitool.utils.ConnectivityChecker;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyInt;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import static ru.yandex.qatools.wifitool.TestData.NET_ID;
@RunWith(RobolectricTestRunner.class)
@Config(manifest = Config.NONE)
public class ConnectorTest {
@Nonnull
private final TestData mData;
@Mock
ConnectivityMonitor mConnectivityMonitor;
@Mock
ConnectivityChecker mConnectivityChecker;
@Mock
NetworkManager mNetworkManager;
public ConnectorTest() {
MockitoAnnotations.initMocks(this);
mData = new TestData();
mConnector = new Connector(mData.wifiManager, mConnectivityMonitor, mNetworkManager,
mConnectivityChecker);
}
private Connector mConnector;
@Test(expected = IllegalStateException.class)
public void unknownWifi_Throws() throws InterruptedException {
doReturn(WifiManager.WIFI_STATE_UNKNOWN).when(mData.wifiManager).getWifiState();
Task<Void> task = mConnector.connect(TestData.UNSECURE_PARAMS);
task.waitForCompletion();
}
@Test
public void enabledWifi_Succeeds() throws Exception {
doReturn(WifiManager.WIFI_STATE_ENABLED).when(mData.wifiManager).getWifiState();
doReturn(true).when(mConnectivityChecker).isWifiNetworkConnected(NET_ID);
doReturn(NET_ID).when(mNetworkManager).createNetwork(any(Params.class));
doReturn(Task.forResult(null)).when(mConnectivityMonitor)
.wait(anyInt(), any(CancellationToken.class));
Task<Void> task = mConnector.connect(TestData.UNSECURE_PARAMS);
task.waitForCompletion();
Assert.assertFalse("Should connect but got\n" + task.getError(),
task.isFaulted());
}
@Test
public void disabledWifi_EnablesWifi() throws Exception {
when(mData.wifiManager.getWifiState())
.thenReturn(WifiManager.WIFI_STATE_DISABLED);
Task<Void> task = mConnector.connect(TestData.UNSECURE_PARAMS);
task.waitForCompletion();
verify(mData.wifiManager).setWifiEnabled(true);
}
}
<|start_filename|>wifitool/src/main/java/ru/yandex/qatools/wifitool/WifiIntentService.java<|end_filename|>
package ru.yandex.qatools.wifitool;
import android.app.IntentService;
import android.content.Intent;
import android.util.Log;
import javax.annotation.Nonnull;
import javax.inject.Inject;
import javax.inject.Provider;
import bolts.Continuation;
/**
* An {@link IntentService} subclass for handling asynchronous task requests in
* a service on a separate handler thread.
*/
public class WifiIntentService extends IntentService {
@Inject
Provider<RetryConnector> mRetryConnectorProvider;
public WifiIntentService() {
super("WifiIntentService");
Injector component = DaggerInjector.builder().intentModule(new IntentModule(this)).build();
component.inject(this);
}
@Override
protected void onHandleIntent(Intent intent) {
try {
handle(intent);
} catch (InterruptedException e) {
Log.e(Tag.FAIL, "Process got interrupted", e);
Thread.currentThread().interrupt();
}
}
void handle(Intent intent) throws InterruptedException {
Params params = Params.create(intent);
ParamsValidator validator = new ParamsValidator(params);
if (validator.isValid) {
Log.d(Tag.NAME, "Start connection");
RetryConnector retryConnector = mRetryConnectorProvider.get();
retryConnector.connect(params)
.continueWith(reportResult())
.waitForCompletion();
} else {
Log.i(Tag.FAIL, validator.message);
logUsage();
}
}
@Nonnull
private Continuation<Void, Void> reportResult() {
return task -> {
if (task.isFaulted()) {
Log.i(Tag.FAIL, task.getError().getMessage());
} else {
Log.i(Tag.SUCCESS, "Connected");
}
return null;
};
}
private void logUsage() {
Log.d(Tag.NAME, "Enable WiFi on device and connect to WiFi network.\n" +
"As soon as WiFi network is connected and IP is obtained, " + Tag.SUCCESS +
" is logged\n" +
"When connection fails, " + Tag.FAIL + " is logged.\n" +
"Possible failure reasons: WiFi can not be enabled, " +
"WiFi network can not be connected, IP can not be obtained." +
"Usage:\n" +
"adb shell am broadcast\n" +
" -n ru.yandex.qatools.wifitool/.Connect\n" +
" -e " + ParamNames.SSID + " SSID\n" +
" -e " + ParamNames.SECURITY + " [WEP|WPA]\n" +
" -e " + ParamNames.PASS + " password \n" +
" -e " + ParamNames.RETRY_COUNT + " number of connection retries. Default is 0\n" +
" -e " + ParamNames.RETRY_DELAY + " retry delay in milliseconds. " +
"Default is 10000\n" +
"Examples:\n" +
"adb shell am broadcast " +
" -n ru.yandex.qatools.wifitool/.Connect" +
" -e " + ParamNames.SSID + " SecureNet" +
" -e " + ParamNames.SECURITY + " WPA" +
" -e " + ParamNames.PASS + " <PASSWORD>" +
" -e " + ParamNames.RETRY_COUNT + " 3" +
" -e " + ParamNames.RETRY_DELAY + " 5\n" +
"adb shell am broadcast " +
" -n ru.yandex.qatools.wifitool/.Connect" +
" -e " + ParamNames.SSID + " UnsecureNet"
);
}
}
<|start_filename|>wifitool/src/test/java/ru/yandex/qatools/wifitool/NetworkManagerTest.java<|end_filename|>
package ru.yandex.qatools.wifitool;
import android.net.wifi.WifiConfiguration;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.MockitoAnnotations;
import org.robolectric.RobolectricTestRunner;
import org.robolectric.annotation.Config;
import javax.annotation.Nonnull;
import static junit.framework.Assert.assertEquals;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.doReturn;
import static ru.yandex.qatools.wifitool.NetworkManager.NO_ID;
import static ru.yandex.qatools.wifitool.TestData.NET_ID;
import static ru.yandex.qatools.wifitool.TestData.UNSECURE_PARAMS;
@RunWith(RobolectricTestRunner.class)
@Config(manifest = Config.NONE)
public class NetworkManagerTest {
@Nonnull
private final TestData mData;
@Nonnull
private NetworkManager mNetworkManager;
public NetworkManagerTest() {
MockitoAnnotations.initMocks(this);
mData = new TestData();
mNetworkManager = new NetworkManager(mData.wifiManager);
}
@Test
public void networkIsConfiguredAndConnected_ReturnsId() throws Exception {
mData.whenNetworkIsConnected();
assertEquals(NET_ID, mNetworkManager.createNetwork(UNSECURE_PARAMS));
}
@Test
public void networkIsConfiguredAndNotConnected_CanBeRemoved_ReturnsId() throws Exception {
mData.whenNetworkIsConfigured();
whenNetworkCanBeRemoved();
whenNetworkCanBeAdded();
assertEquals(NET_ID, mNetworkManager.createNetwork(UNSECURE_PARAMS));
}
@Test(expected = Exception.class)
public void networkIsConfiguredAndNotConnected_CantUpdate_Throws() throws Exception {
mData.whenNetworkIsConfigured();
assertEquals(NET_ID, mNetworkManager.createNetwork(UNSECURE_PARAMS));
}
@Test
public void networkIsNotConfigured_CanAdd_ReturnsId() throws Exception {
whenNetworkCanBeAdded();
assertEquals(NET_ID, mNetworkManager.createNetwork(UNSECURE_PARAMS));
}
@Test(expected = Exception.class)
public void networkIsNotConfigured_CantAdd_Throws() throws Exception {
whenNetworkCanNotBeAdded();
mNetworkManager.createNetwork(UNSECURE_PARAMS);
}
private void whenNetworkCanBeRemoved() {
doReturn(true).when(mData.wifiManager).removeNetwork(NET_ID);
}
private void whenNetworkCanBeAdded() {
doReturn(NET_ID).when(mData.wifiManager).addNetwork(any(WifiConfiguration.class));
}
private void whenNetworkCanNotBeAdded() {
doReturn(NO_ID).when(mData.wifiManager).addNetwork(any(WifiConfiguration.class));
}
}
<|start_filename|>wifitool/src/main/java/ru/yandex/qatools/wifitool/IntentModule.java<|end_filename|>
package ru.yandex.qatools.wifitool;
import android.content.Context;
import android.net.ConnectivityManager;
import android.net.wifi.WifiManager;
import javax.annotation.Nonnull;
import javax.inject.Singleton;
import dagger.Module;
import dagger.Provides;
import static android.content.Context.WIFI_SERVICE;
@Module
class IntentModule {
@Nonnull
private final Context mContext;
IntentModule(Context context) {
mContext = context;
}
@Provides
@Singleton
Context provideContext() {
return mContext;
}
@Provides
@Singleton
WifiManager provideWifiManager() {
return (WifiManager) mContext.getSystemService(WIFI_SERVICE);
}
@Provides
@Singleton
ConnectivityManager provideConnectivityManager() {
return (ConnectivityManager) mContext.getSystemService(Context.CONNECTIVITY_SERVICE);
}
}
<|start_filename|>wifitool/src/test/java/ru/yandex/qatools/wifitool/RetryConnectorTest.java<|end_filename|>
package ru.yandex.qatools.wifitool;
import android.content.Context;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import org.robolectric.RobolectricTestRunner;
import org.robolectric.annotation.Config;
import javax.annotation.Nonnull;
import javax.inject.Provider;
import bolts.Task;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static ru.yandex.qatools.wifitool.TestData.NO_PASS;
import static ru.yandex.qatools.wifitool.TestData.NO_SECURITY;
import static ru.yandex.qatools.wifitool.TestData.SOME_SSID;
@RunWith(RobolectricTestRunner.class)
@Config(manifest = Config.NONE)
public class RetryConnectorTest {
private Provider<Connector> mConnectorProvider;
@Mock
private Connector mConnector;
@Mock
private Context mContext;
public RetryConnectorTest() {
MockitoAnnotations.initMocks(this);
// mockito fails to mock javax.inject.Provider
mConnectorProvider = new Provider<Connector>() {
@Override
public Connector get() {
return mConnector;
}
};
}
@Test
public void zeroRetries_Connects() throws InterruptedException {
Params params = getRetryParams(0);
getRetryConnector().connect(params).waitForCompletion();
verify(mConnector).connect(any(Params.class));
}
@Nonnull
private RetryConnector getRetryConnector() {
return new RetryConnector(mConnectorProvider);
}
@Test
public void oneRetry_OnSuccessfulConnect_Connects1Time() throws InterruptedException {
whenConnectionSucceed();
Params params = getRetryParams(1);
getRetryConnector().connect(params).waitForCompletion();
verify(mConnector).connect(any(Params.class));
}
private void whenConnectionSucceed() {
Task<Object> successfulTask = Task.forResult(null);
doReturn(successfulTask).when(mConnector).connect(any(Params.class));
}
@Test
public void oneRetry_OnFailedConnect_Connects2Times() throws InterruptedException {
whenConnectionFail();
Params params = getRetryParams(1);
getRetryConnector().connect(params).waitForCompletion();
verify(mConnector, times(2)).connect(any(Params.class));
}
private void whenConnectionFail() {
Task<Object> faultedTask = Task.forError(new Exception());
doReturn(faultedTask).when(mConnector).connect(any(Params.class));
}
@Nonnull
private Params getRetryParams(int retryCount) {
return new Params(SOME_SSID, NO_PASS, NO_SECURITY, retryCount, 100);
}
}
<|start_filename|>wifitool/src/main/java/ru/yandex/qatools/wifitool/utils/package-info.java<|end_filename|>
@ParametersAreNonnullByDefault
package ru.yandex.qatools.wifitool.utils;
import javax.annotation.ParametersAreNonnullByDefault;
<|start_filename|>wifitool/src/main/java/ru/yandex/qatools/wifitool/Params.java<|end_filename|>
package ru.yandex.qatools.wifitool;
import android.content.Intent;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import ru.yandex.qatools.wifitool.utils.StringValues;
/**
* Request parameters.
*/
public class Params {
@Nullable
public final String quotedSsid;
@Nullable
public final String pass;
@Nullable
public final String securityString;
@Nonnull
public final Security security;
final int retryCount;
final int retryDelay;
Params(@Nullable String ssid, @Nullable String pass, @Nullable String security,
int retryCount, int retryDelay) {
this.quotedSsid = StringValues.enquote(ssid);
this.pass = pass;
this.security = makeSecurity(security, pass);
this.securityString = security;
this.retryCount = retryCount;
this.retryDelay = retryDelay;
}
@Nonnull
static Params create(Intent intent) {
String ssid = intent.getStringExtra(ParamNames.SSID);
String pass = intent.getStringExtra(ParamNames.PASS);
String security = intent.getStringExtra(ParamNames.SECURITY);
int retryCount = getIntExtra(intent, ParamNames.RETRY_COUNT, 0);
int retryDelay = getIntExtra(intent, ParamNames.RETRY_DELAY, 10000);
return new Params(ssid, pass, security, retryCount, retryDelay);
}
private static int getIntExtra(Intent intent, String name, int defaultValue) {
if (intent.hasExtra(name)) {
return Integer.parseInt(intent.getStringExtra(name));
} else {
return defaultValue;
}
}
@Nonnull
static Security makeSecurity(@Nullable String security, @Nullable String pass) {
if (security == null) {
if (pass == null) {
return Security.NONE;
} else {
return Security.WPA;
}
}
if (ParamNames.SECURITY_WPA.equalsIgnoreCase(security)) {
return Security.WPA;
}
if (ParamNames.SECURITY_WEP.equalsIgnoreCase(security)) {
return Security.WEP;
}
return Security.UNKNOWN;
}
}
<|start_filename|>wifitool/src/main/java/ru/yandex/qatools/wifitool/RetryConnector.java<|end_filename|>
package ru.yandex.qatools.wifitool;
import android.util.Log;
import javax.annotation.Nonnull;
import javax.inject.Inject;
import javax.inject.Provider;
import bolts.Task;
class RetryConnector {
@Nonnull
private Provider<Connector> mConnectorFactory;
private int mAttempt = 0;
private boolean mNeedRetry = true;
@Inject
RetryConnector(Provider<Connector> connectorProvider) {
mConnectorFactory = connectorProvider;
}
@Nonnull
Task<Void> connect(final Params params) throws InterruptedException {
Task<Void> task = Task.forResult(null);
return task.continueWhile(
() -> canRetry(params),
attempt -> {
Log.d(Tag.NAME, "Attempt " + mAttempt);
mAttempt++;
Connector connector = mConnectorFactory.get();
return connector.connect(params)
.continueWithTask(connection -> {
boolean faulted = connection.isFaulted();
mNeedRetry = faulted;
if (canRetry(params)) {
Log.d(Tag.NAME, "Attempt failed: " +
connection.getError().getMessage());
Log.d(Tag.NAME, "Delay for " + params.retryDelay + " ms");
return Task.delay(params.retryDelay);
} else {
return connection;
}
});
}
);
}
private boolean canRetry(Params params) {
return mNeedRetry && mAttempt <= params.retryCount;
}
}
<|start_filename|>wifitool/src/main/java/ru/yandex/qatools/wifitool/Security.java<|end_filename|>
package ru.yandex.qatools.wifitool;
public enum Security {
NONE,
WEP,
WPA,
UNKNOWN
}
<|start_filename|>wifitool/src/test/java/ru/yandex/qatools/wifitool/ParamsTest.java<|end_filename|>
package ru.yandex.qatools.wifitool;
import android.content.Intent;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.robolectric.RobolectricTestRunner;
import org.robolectric.annotation.Config;
import static org.junit.Assert.assertEquals;
@RunWith(RobolectricTestRunner.class)
@Config(manifest = Config.NONE)
public class ParamsTest {
@Test
public void retryCount_IsParsed() throws Exception {
Intent intent = new Intent();
intent.putExtra(ParamNames.RETRY_COUNT, "100");
assertEquals(100, Params.create(intent).retryCount);
}
@Test
public void retryDelay_IsParsed() throws Exception {
Intent intent = new Intent();
intent.putExtra(ParamNames.RETRY_DELAY, "50");
assertEquals(50, Params.create(intent).retryDelay);
}
@Test
public void ssid_IsParsed() throws Exception {
Intent intent = new Intent();
intent.putExtra(ParamNames.SSID, TestData.SOME_SSID);
assertEquals("\"" + TestData.SOME_SSID + "\"", Params.create(intent).quotedSsid);
}
@Test
public void securityWpa_IsParsed() throws Exception {
Intent intent = new Intent();
intent.putExtra(ParamNames.SECURITY, "WPA");
assertEquals(Security.WPA, Params.create(intent).security);
}
@Test
public void securityWep_IsParsed() throws Exception {
Intent intent = new Intent();
intent.putExtra(ParamNames.SECURITY, "WEP");
assertEquals(Security.WEP, Params.create(intent).security);
}
@Test
public void password_IsParsed() throws Exception {
Intent intent = new Intent();
intent.putExtra(ParamNames.PASS, TestData.SOME_PASS);
assertEquals(TestData.SOME_PASS, Params.create(intent).pass);
}
}
<|start_filename|>wifitool/src/main/java/ru/yandex/qatools/wifitool/utils/StringValues.java<|end_filename|>
package ru.yandex.qatools.wifitool.utils;
import javax.annotation.Nullable;
public class StringValues {
private StringValues() {}
@Nullable
public static String enquote(@Nullable String value) {
return value == null ? null : '\"' + value + '\"';
}
}
<|start_filename|>wifitool/src/test/java/ru/yandex/qatools/wifitool/WifiIntentServiceTest.java<|end_filename|>
package ru.yandex.qatools.wifitool;
import android.content.Intent;
import android.net.wifi.WifiManager;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import org.robolectric.Robolectric;
import org.robolectric.RobolectricTestRunner;
import org.robolectric.annotation.Config;
import org.robolectric.shadows.ShadowLog;
import org.robolectric.util.ServiceController;
import javax.annotation.Nonnull;
import javax.inject.Provider;
import bolts.Continuation;
import bolts.Task;
import static org.hamcrest.CoreMatchers.not;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.core.Is.is;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.doReturn;
import static ru.yandex.qatools.wifitool.TestData.SOME_SSID;
@RunWith(RobolectricTestRunner.class)
@Config(manifest = Config.NONE)
public class WifiIntentServiceTest {
private final WifiIntentService mIntentService;
@Mock
private Task<Void> mContinuationTask;
@Mock
private Task<Void> mConnectTask;
@Nonnull
private WifiManager mWifiManager = TestData.mockWifiManager();
@Mock
private RetryConnector mRetryConnector;
public WifiIntentServiceTest() throws InterruptedException {
MockitoAnnotations.initMocks(this);
ServiceController<WifiIntentService> controller =
Robolectric.buildService(WifiIntentService.class);
controller.create();
mIntentService = controller.get();
doReturn(mConnectTask).when(mRetryConnector).connect(any(Params.class));
doReturn(mContinuationTask).when(mConnectTask).continueWith(any(Continuation.class));
// mockito fails to mock javax.inject.Provider
mIntentService.mRetryConnectorProvider = new Provider<RetryConnector>() {
@Override
public RetryConnector get() {
return mRetryConnector;
}
};
}
@Test
public void invalidIntentParams_Fails() throws Exception {
Intent intent = new Intent();
mIntentService.onHandleIntent(intent);
Assert.assertThat(ShadowLog.getLogsForTag(Tag.FAIL), is(not(empty())));
}
@Test
public void validIntentParams_WifiUnknown_Fails() throws Exception {
Intent intent = new Intent();
intent.putExtra(ParamNames.SSID, SOME_SSID);
doReturn(WifiManager.WIFI_STATE_UNKNOWN).when(mWifiManager).getWifiState();
mIntentService.onHandleIntent(intent);
Assert.assertThat("Actual logs: " + ShadowLog.getLogs(),
ShadowLog.getLogsForTag(Tag.FAIL), is(not(empty())));
}
@Test
public void validIntentParams_WifiEnabled_Succeeds() throws Exception {
Intent intent = new Intent();
intent.putExtra(ParamNames.SSID, SOME_SSID);
doReturn(WifiManager.WIFI_STATE_ENABLED).when(mWifiManager).getWifiState();
mIntentService.onHandleIntent(intent);
Assert.assertThat("Actual logs: " + ShadowLog.getLogs(),
ShadowLog.getLogsForTag(Tag.FAIL), is(not(empty())));
}
}
<|start_filename|>wifitool/src/main/java/ru/yandex/qatools/wifitool/Connector.java<|end_filename|>
package ru.yandex.qatools.wifitool;
import android.net.wifi.WifiManager;
import android.util.Log;
import javax.annotation.Nonnull;
import javax.inject.Inject;
import bolts.CancellationTokenSource;
import bolts.Continuation;
import bolts.Task;
import ru.yandex.qatools.wifitool.utils.ConnectivityChecker;
import ru.yandex.qatools.wifitool.utils.WifiManagerException;
import ru.yandex.qatools.wifitool.utils.WifiStates;
/**
* Runs sequence of actions required for connection.
*/
class Connector {
private static final int ENABLE_WIFI_TIMEOUT = 1000;
private static final long CONNECTIVITY_TIMEOUT = 5000;
@Nonnull
private final WifiManager mWifiManager;
@Nonnull
private final ConnectivityMonitor mConnectivityMonitor;
@Nonnull
private final NetworkManager mNetworkManager;
@Nonnull
private final ConnectivityChecker mConnectivityChecker;
@Inject
Connector(WifiManager wifiManager, ConnectivityMonitor connectivityMonitor,
NetworkManager networkManager, ConnectivityChecker connectivityChecker) {
mWifiManager = wifiManager;
mConnectivityMonitor = connectivityMonitor;
mNetworkManager = networkManager;
mConnectivityChecker = connectivityChecker;
}
@Nonnull
Task<Void> connect(final Params params) {
int wifiState = mWifiManager.getWifiState();
Log.d(Tag.NAME, WifiStates.getName(wifiState));
switch (wifiState) {
case WifiManager.WIFI_STATE_ENABLING:
case WifiManager.WIFI_STATE_DISABLED:
case WifiManager.WIFI_STATE_DISABLING:
case WifiManager.WIFI_STATE_ENABLED:
return enableWifi(params)
.onSuccess(getNetworkId())
.onSuccess(connectNetwork())
.onSuccessTask(waitConnectivity());
case WifiManager.WIFI_STATE_UNKNOWN:
default:
throw new IllegalStateException("WiFi state unknown. Please inspect the device");
}
}
@Nonnull
private Task<Params> enableWifi(final Params params) {
if (mWifiManager.getWifiState() == WifiManager.WIFI_STATE_ENABLED) {
return Task.forResult(params);
}
Log.d(Tag.NAME, "Setting WiFi enabled");
mWifiManager.setWifiEnabled(true);
return Task.delay(ENABLE_WIFI_TIMEOUT).continueWith(task -> {
int wifiState = mWifiManager.getWifiState();
Log.d(Tag.NAME, WifiStates.getName(wifiState));
if (wifiState == WifiManager.WIFI_STATE_ENABLED) {
return params;
}
throw new IllegalStateException("WiFi could not be enabled. Now " +
WifiStates.getName(wifiState));
});
}
/**
* Get existing network id or add network and get its id.
*
* @return Network id.
*/
@Nonnull
private Continuation<Params, Integer> getNetworkId() {
return task -> mNetworkManager.createNetwork(task.getResult());
}
@Nonnull
private Continuation<Integer, Integer> connectNetwork() {
return task -> {
Integer netId = task.getResult();
if (mConnectivityChecker.isWifiNetworkConnected(netId)) {
return netId;
}
if (!mWifiManager.disconnect()) {
throw new WifiManagerException("Could not disconnect WiFi");
}
if (!mWifiManager.enableNetwork(netId, true)) {
throw new WifiManagerException("Could not enable a configured network");
}
if (!mWifiManager.reconnect()) {
throw new WifiManagerException("Could not connect to a configured network");
}
return netId;
};
}
private Continuation<Integer, Task<Void>> waitConnectivity() {
return task -> {
CancellationTokenSource ts = new CancellationTokenSource();
ts.cancelAfter(CONNECTIVITY_TIMEOUT);
return mConnectivityMonitor.wait(task.getResult(), ts.getToken());
};
}
}
| artkoshelev/android-wifitool |
<|start_filename|>simple-web-app/src/main/java/org/mitre/web/HomeController.java<|end_filename|>
/*******************************************************************************
* Copyright 2014 The MITRE Corporation
* and the MIT Kerberos and Internet Trust Consortium
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
package org.mitre.web;
import java.security.Principal;
import java.util.Locale;
import java.util.Set;
import javax.annotation.Resource;
import org.mitre.openid.connect.client.OIDCAuthenticationFilter;
import org.mitre.openid.connect.client.SubjectIssuerGrantedAuthority;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
/**
* Handles requests for the application home page.
*/
@Controller
public class HomeController {
private static final Logger logger = LoggerFactory.getLogger(HomeController.class);
// filter reference so we can get class names and things like that.
@Autowired
private OIDCAuthenticationFilter filter;
@Resource(name = "namedAdmins")
private Set<SubjectIssuerGrantedAuthority> admins;
/**
* Simply selects the home view to render by returning its name.
*/
@RequestMapping(value = "/", method = RequestMethod.GET)
public String home(Locale locale, Model model, Principal p) {
model.addAttribute("issuerServiceClass", filter.getIssuerService().getClass().getSimpleName());
model.addAttribute("serverConfigurationServiceClass", filter.getServerConfigurationService().getClass().getSimpleName());
model.addAttribute("clientConfigurationServiceClass", filter.getClientConfigurationService().getClass().getSimpleName());
model.addAttribute("authRequestOptionsServiceClass", filter.getAuthRequestOptionsService().getClass().getSimpleName());
model.addAttribute("authRequestUriBuilderClass", filter.getAuthRequestUrlBuilder().getClass().getSimpleName());
model.addAttribute("admins", admins);
return "home";
}
@RequestMapping("/user")
@PreAuthorize("hasRole('ROLE_USER')")
public String user(Principal p) {
return "user";
}
@RequestMapping("/open")
public String open(Principal p) {
return "open";
}
@RequestMapping("/admin")
@PreAuthorize("hasRole('ROLE_ADMIN')")
public String admin(Model model, Principal p) {
model.addAttribute("admins", admins);
return "admin";
}
@RequestMapping("/login")
public String login(Principal p) {
return "login";
}
}
<|start_filename|>simple-web-app/src/main/webapp/WEB-INF/views/admin.jsp<|end_filename|>
<%@ page language="java" contentType="text/html; charset=UTF-8" pageEncoding="UTF-8"%>
<%@ taglib uri="http://java.sun.com/jsp/jstl/core" prefix="c" %>
<%@ taglib prefix="security" uri="http://www.springframework.org/security/tags" %>
<%@ taglib prefix="o" tagdir="/WEB-INF/tags"%>
<o:header title="Admin"/>
<o:topbar pageName="Admin"/>
<div class="container-fluid main">
<div class="row-fluid">
<div class="span10 offset1">
<h1>Hello ${ userInfo.name }</h1>
<div>
<p>This page requires that the user be logged in with a valid account and the <code>ROLE_ADMIN</code> Spring Security authority.
If you are reading this page, <span class="text-success">you are currently logged in as an administrator</span>.</p>
<p>The authorization provider will assign your account a set of authorities depending on how it's configured.
Your current login has the following Spring Security authorities:</p>
<ul>
<security:authentication property="authorities" var="authorities" />
<c:forEach items="${authorities}" var="auth">
<li><code>${ auth }</code></li>
</c:forEach>
</ul>
</div>
<div>
<h3>Administrators</h3>
<p>Logged in users are assigned the <code>ROLE_USER</code> authority by default, but the following users
(identified by issuer/subject pairs) will also be given <code>ROLE_ADMIN</code>:</p>
<table class="table table-striped table-hover span4">
<tr>
<th>Issuer</th>
<th>Subject</th>
</tr>
<c:forEach items="${ admins }" var="admin">
<tr>
<td>${ admin.issuer }</td>
<td>${ admin.subject }</td>
</tr>
</c:forEach>
</table>
</div>
</div>
</div>
</div>
<o:footer />
<|start_filename|>simple-web-app/src/main/webapp/WEB-INF/views/home.jsp<|end_filename|>
<%@ taglib uri="http://java.sun.com/jsp/jstl/core" prefix="c" %>
<%@ taglib prefix="security" uri="http://www.springframework.org/security/tags" %>
<%@ taglib prefix="o" tagdir="/WEB-INF/tags"%>
<%@ page session="false" %>
<o:header title="Home"/>
<o:topbar pageName="Home"/>
<div class="container-fluid main">
<div class="row-fluid">
<div class="span10 offset1">
<h1>
Hello world!
</h1>
<div>
<p class="well">
<security:authorize access="hasRole('ROLE_USER')">
<b><span class="text-success">You are currently logged in.</span></b>
</security:authorize>
<security:authorize access="!hasRole('ROLE_USER')">
<b><span class="text-error">You are <em>NOT</em> currently logged in.</span></b>
</security:authorize>
</p>
<p>This example application is configured with several pages requiring different levels of access.
This page does not require the user to be logged in. Use the tabs in the navbar above to navigate to
pages with different access requirements.
</p>
<ul>
<li><a href="user">User</a>, requires the user to be logged in with the <code>ROLE_USER</code> Spring Security authority.</li>
<li><a href="admin">Admin</a>, requires the user to be logged in with the <code>ROLE_ADMIN</code> Spring Security authority.
See below for the currently configured list of admin accounts.</li>
<security:authorize access="hasRole('ROLE_USER')">
<li><a href="j_spring_security_logout">Logout</a>, log out directly and return to this page.</li>
</security:authorize>
<security:authorize access="!hasRole('ROLE_USER')">
<li><a href="login">Log in</a>, log in directly and return to this page.</li>
</security:authorize>
</ul>
</div>
<div>
<h3>Client Filter Configuration</h3>
<p>This authorization filter for this client has been configured with the following components:</p>
<ul>
<li>Issuer service: <code>${ issuerServiceClass }</code></li>
<li>Server configuration service: <code>${ serverConfigurationServiceClass }</code></li>
<li>Client configuration service: <code>${ clientConfigurationServiceClass }</code></li>
<li>Auth request options service: <code>${ authRequestOptionsServiceClass }</code></li>
<li>Auth request URI builder: <code>${ authRequestUriBuilderClass }</code></li>
</ul>
</div>
<div>
<h3>Administrators</h3>
<p>Logged in users are assigned the <code>ROLE_USER</code> authority by default, but the following users
(identified by issuer/subject pairs) will also be given <code>ROLE_ADMIN</code>:</p>
<table class="table table-striped table-hover span4">
<tr>
<th>Issuer</th>
<th>Subject</th>
</tr>
<c:forEach items="${ admins }" var="admin">
<tr>
<td>${ admin.issuer }</td>
<td>${ admin.subject }</td>
</tr>
</c:forEach>
</table>
</div>
</div>
</div>
</div>
<o:footer /> | hlisnikovsky/mitre-local |
<|start_filename|>AYPopupPickerView/AYPopupPickerView.h<|end_filename|>
//
// AYPopupPickerView.h
// AYPopupPickerView
//
// Created by JerryYu on 2020/8/14.
// Copyright © 2020 DolphinBro. All rights reserved.
//
#import <Foundation/Foundation.h>
//! Project version number for AYPopupPickerView.
FOUNDATION_EXPORT double AYPopupPickerViewVersionNumber;
//! Project version string for AYPopupPickerView.
FOUNDATION_EXPORT const unsigned char AYPopupPickerViewVersionString[];
// In this header, you should import all the public headers of your framework using statements like #import <AYPopupPickerView/PublicHeader.h>
| jobinsjohn/AYPopupPickerView |
<|start_filename|>lib/src/os/environment.dart<|end_filename|>
/* This is free and unencumbered software released into the public domain. */
import 'dart:async' show Future;
import 'dart:io' show Directory, Platform;
import 'package:flutter/services.dart' show MethodChannel;
/// Provides access to environment variables.
///
/// See: https://developer.android.com/reference/android/os/Environment
abstract class Environment {
static const MethodChannel _channel =
MethodChannel('flutter_android/Environment');
/// Standard directory in which to place any audio files that should be in the regular list of music for the user.
///
/// See: https://developer.android.com/reference/android/os/Environment#DIRECTORY_MUSIC
static const String DIRECTORY_MUSIC = "Music";
/// Standard directory in which to place any audio files that should be in the list of podcasts that the user can select (not as regular music).
///
/// See: https://developer.android.com/reference/android/os/Environment#DIRECTORY_PODCASTS
static const String DIRECTORY_PODCASTS = "Podcasts";
/// Standard directory in which to place any audio files that should be in the list of ringtones that the user can select (not as regular music).
///
/// See: https://developer.android.com/reference/android/os/Environment#DIRECTORY_RINGTONES
static const String DIRECTORY_RINGTONES = "Ringtones";
/// Standard directory in which to place any audio files that should be in the list of alarms that the user can select (not as regular music).
///
/// See: https://developer.android.com/reference/android/os/Environment#DIRECTORY_ALARMS
static const String DIRECTORY_ALARMS = "Alarms";
/// Standard directory in which to place any audio files that should be in the list of notifications that the user can select (not as regular music).
///
/// See: https://developer.android.com/reference/android/os/Environment#DIRECTORY_NOTIFICATIONS
static const String DIRECTORY_NOTIFICATIONS = "Notifications";
/// Standard directory in which to place pictures that are available to the user.
///
/// See: https://developer.android.com/reference/android/os/Environment#DIRECTORY_PICTURES
static const String DIRECTORY_PICTURES = "Pictures";
/// Standard directory in which to place movies that are available to the user.
///
/// See: https://developer.android.com/reference/android/os/Environment#DIRECTORY_MOVIES
static const String DIRECTORY_MOVIES = "Movies";
/// Standard directory in which to place files that have been downloaded by the user.
///
/// See: https://developer.android.com/reference/android/os/Environment#DIRECTORY_DOWNLOADS
static const String DIRECTORY_DOWNLOADS = "Download";
/// The traditional location for pictures and videos when mounting the device as a camera.
///
/// See: https://developer.android.com/reference/android/os/Environment#DIRECTORY_DCIM
static const String DIRECTORY_DCIM = "DCIM";
/// Standard directory in which to place documents that have been created by the user.
///
/// See: https://developer.android.com/reference/android/os/Environment#DIRECTORY_DOCUMENTS
static const String DIRECTORY_DOCUMENTS = "Documents";
/// Unknown storage state, such as when a path isn't backed by known storage media.
///
/// See: https://developer.android.com/reference/android/os/Environment#MEDIA_UNKNOWN
static const String MEDIA_UNKNOWN = "unknown";
/// Storage state if the media is not present.
///
/// See: https://developer.android.com/reference/android/os/Environment#MEDIA_REMOVED
static const String MEDIA_REMOVED = "removed";
/// Storage state if the media is present but not mounted.
///
/// See: https://developer.android.com/reference/android/os/Environment#MEDIA_UNMOUNTED
static const String MEDIA_UNMOUNTED = "unmounted";
/// Storage state if the media is present and being disk-checked.
///
/// See: https://developer.android.com/reference/android/os/Environment#MEDIA_CHECKING
static const String MEDIA_CHECKING = "checking";
/// Storage state if the media is present but is blank or is using an unsupported filesystem.
///
/// See: https://developer.android.com/reference/android/os/Environment#MEDIA_NOFS
static const String MEDIA_NOFS = "nofs";
/// Storage state if the media is present and mounted at its mount point with read/write access.
///
/// See: https://developer.android.com/reference/android/os/Environment#MEDIA_MOUNTED
static const String MEDIA_MOUNTED = "mounted";
/// Storage state if the media is present and mounted at its mount point with read-only access.
///
/// See: https://developer.android.com/reference/android/os/Environment#MEDIA_MOUNTED_READ_ONLY
static const String MEDIA_MOUNTED_READ_ONLY = "mounted_ro";
/// Storage state if the media is present not mounted, and shared via USB mass storage.
///
/// See: https://developer.android.com/reference/android/os/Environment#MEDIA_SHARED
static const String MEDIA_SHARED = "shared";
/// Storage state if the media was removed before it was unmounted.
///
/// See: https://developer.android.com/reference/android/os/Environment#MEDIA_BAD_REMOVAL
static const String MEDIA_BAD_REMOVAL = "bad_removal";
/// Storage state if the media is present but cannot be mounted.
///
/// See: https://developer.android.com/reference/android/os/Environment#MEDIA_UNMOUNTABLE
static const String MEDIA_UNMOUNTABLE = "unmountable";
/// Storage state if the media is in the process of being ejected.
///
/// See: https://developer.android.com/reference/android/os/Environment#MEDIA_EJECTING
static const String MEDIA_EJECTING = "ejecting";
/// Return the user data directory.
///
/// See: https://developer.android.com/reference/android/os/Environment#getDataDirectory()
static Future<Directory> get dataDirectory async {
assert(Platform.isAndroid);
return Directory(await _channel.invokeMethod('getDataDirectory') as String);
}
/// Return the download/cache content directory.
///
/// See: https://developer.android.com/reference/android/os/Environment#getDownloadCacheDirectory()
static Future<Directory> get downloadCacheDirectory async {
assert(Platform.isAndroid);
return Directory(
await _channel.invokeMethod('getDownloadCacheDirectory') as String);
}
/// Return the primary shared/external storage directory.
///
/// This directory may not currently be accessible if it has been mounted by
/// the user on their computer, has been removed from the device, or some
/// other problem has happened. You can determine its current state with
/// [externalStorageState].
///
/// See: https://developer.android.com/reference/android/os/Environment#getExternalStorageDirectory()
static Future<Directory> get externalStorageDirectory async {
assert(Platform.isAndroid);
return Directory(
await _channel.invokeMethod('getExternalStorageDirectory') as String);
}
/// Returns the current state of the primary shared/external storage media.
///
/// See: https://developer.android.com/reference/android/os/Environment#getExternalStorageState()
static Future<String> get externalStorageState async {
assert(Platform.isAndroid);
return await _channel.invokeMethod('getExternalStorageState')
as String; // TODO: enum values
}
/// Return root of the "system" partition holding the core Android OS. Always
/// present and mounted read-only.
///
/// See: https://developer.android.com/reference/android/os/Environment#getRootDirectory()
static Future<Directory> get rootDirectory async {
assert(Platform.isAndroid);
return Directory(await _channel.invokeMethod('getRootDirectory') as String);
}
/// Returns whether the primary shared/external storage media is emulated.
///
/// See: https://developer.android.com/reference/android/os/Environment#isExternalStorageEmulated()
static Future<bool> get isExternalStorageEmulated async {
assert(Platform.isAndroid);
return await _channel.invokeMethod('isExternalStorageEmulated') as bool;
}
/// Returns whether the primary shared/external storage media is physically
/// removable.
///
/// See: https://developer.android.com/reference/android/os/Environment#isExternalStorageRemovable()
static Future<bool> get isExternalStorageRemovable async {
assert(Platform.isAndroid);
return await _channel.invokeMethod('isExternalStorageRemovable') as bool;
}
}
<|start_filename|>lib/src/database/exceptions.dart<|end_filename|>
/* This is free and unencumbered software released into the public domain. */
import 'package:flutter/services.dart' show PlatformException;
/// An exception indicating that a cursor is out of bounds.
///
/// See: https://developer.android.com/reference/android/database/CursorIndexOutOfBoundsException
class CursorIndexOutOfBoundsException extends PlatformException {
final int index;
final int size;
CursorIndexOutOfBoundsException(this.index, this.size);
}
/// An exception that indicates there was an error with SQL parsing or
/// execution.
///
/// See: https://developer.android.com/reference/android/database/SQLException
class SQLException extends PlatformException {}
<|start_filename|>lib/src/os/build.dart<|end_filename|>
/* This is free and unencumbered software released into the public domain. */
import 'dart:io' show Platform;
/// Information about the current build, extracted from system properties.
///
/// See: https://developer.android.com/reference/android/os/Build
abstract class Build {
/// Value used for when a build property is unknown.
///
/// See: https://developer.android.com/reference/android/os/Build#UNKNOWN
static const String UNKNOWN = "unknown";
/// Returns the version string for the radio firmware.
///
/// May return `null` (if, for instance, the radio is not currently on).
///
/// See: https://developer.android.com/reference/android/os/Build#getRadioVersion()
static Future<String> get radioVersion {
assert(Platform.isAndroid);
return null; // TODO
}
/// Gets the hardware serial number, if available.
///
/// See: https://developer.android.com/reference/android/os/Build#getSerial()
static Future<String> get serial {
assert(Platform.isAndroid);
return null; // TODO
}
}
<|start_filename|>android/src/main/java/com/github/drydart/flutter_android/SharedPreferencesHandler.java<|end_filename|>
/* This is free and unencumbered software released into the public domain. */
package com.github.drydart.flutter_android;
import android.content.Context;
import android.content.SharedPreferences;
import androidx.annotation.NonNull;
import io.flutter.embedding.engine.plugins.FlutterPlugin;
import io.flutter.plugin.common.MethodCall;
import io.flutter.plugin.common.MethodChannel.Result;
/** SharedPreferencesHandler */
class SharedPreferencesHandler extends FlutterMethodCallHandler {
static final String CHANNEL = "flutter_android/SharedPreferences";
SharedPreferencesHandler(final @NonNull FlutterPlugin.FlutterPluginBinding binding) {
super(binding);
}
@Override
public void onMethodCall(final MethodCall call, final Result result) {
assert(call != null);
assert(result != null);
final Context context = this.binding.getApplicationContext();
assert(context != null);
assert(call.method != null);
switch (call.method) {
case "getAll": {
final String name = getRequiredArgument(call, "name");
final int mode = getRequiredArgument(call, "mode");
final SharedPreferences prefs = context.getSharedPreferences(name, mode);
result.success(prefs.getAll());
break;
}
default: {
result.notImplemented();
}
}
}
}
<|start_filename|>test/android_database_test.dart<|end_filename|>
/* This is free and unencumbered software released into the public domain. */
import 'package:flutter_test/flutter_test.dart';
import 'package:flutter_android/android_database.dart';
void main() {
group('android_database.DatabaseUtils', () {
test('DatabaseUtils.dumpCurrentRowToString', () {
// TODO
});
test('DatabaseUtils.dumpCurrentRowToStringBuffer', () {
// TODO
});
test('DatabaseUtils.dumpCursorToString', () {
// TODO
});
});
group('android_database.MatrixCursor', () {
test('MatrixCursor.empty', () {
final Cursor cursor = MatrixCursor.empty();
expect(cursor, isEmpty);
expect(cursor.position, equals(-1));
expect(cursor.isBeforeFirst, isTrue);
for (var row in cursor) {} // ignore: unused_local_variable
expect(cursor.position, equals(0));
expect(cursor.isAfterLast, isTrue);
});
test('MatrixCursor.from', () {
final Cursor cursor = MatrixCursor.from(
columns: <String>['a', 'b', 'c'],
rows: <List<dynamic>>[
[1, 2, 3],
[4, 5, 6],
[7, 8, 9]
],
);
expect(cursor, isNotEmpty);
expect(cursor.position, equals(-1));
expect(cursor.isBeforeFirst, isTrue);
for (var row in cursor) {
expect(row, hasLength(3));
expect(row.keys, contains('a'));
expect(row.keys, contains('b'));
expect(row.keys, contains('c'));
}
expect(cursor.position, equals(3));
expect(cursor.isAfterLast, isTrue);
});
});
}
<|start_filename|>lib/src/content/exceptions.dart<|end_filename|>
/* This is free and unencumbered software released into the public domain. */
import 'package:flutter/services.dart' show PlatformException;
/// Thrown when a call to `Context.startActivity(Intent)` or one of its variants
/// fails because an `Activity` can not be found to execute the given [Intent].
///
/// See: https://developer.android.com/reference/android/content/ActivityNotFoundException
class ActivityNotFoundException extends PlatformException {}
<|start_filename|>lib/src/os/user_manager.dart<|end_filename|>
/* This is free and unencumbered software released into the public domain. */
import 'dart:io' show Platform;
/// Manages users and user details on a multi-user system.
///
/// See: https://developer.android.com/reference/android/os/UserManager
class UserManager {
/// Specifies if a user is disallowed from adding and removing accounts.
///
/// See: https://developer.android.com/reference/android/os/UserManager#DISALLOW_MODIFY_ACCOUNTS
static const String DISALLOW_MODIFY_ACCOUNTS = "no_modify_accounts";
/// Specifies if a user is disallowed from changing Wi-Fi access points.
///
/// See: https://developer.android.com/reference/android/os/UserManager#DISALLOW_CONFIG_WIFI
static const String DISALLOW_CONFIG_WIFI = "no_config_wifi";
/// Specifies if a user is disallowed from changing the device language.
///
/// See: https://developer.android.com/reference/android/os/UserManager#DISALLOW_CONFIG_LOCALE
static const String DISALLOW_CONFIG_LOCALE = "no_config_locale";
/// Specifies if a user is disallowed from installing applications.
///
/// See: https://developer.android.com/reference/android/os/UserManager#DISALLOW_INSTALL_APPS
static const String DISALLOW_INSTALL_APPS = "no_install_apps";
/// Specifies if a user is disallowed from uninstalling applications.
///
/// See: https://developer.android.com/reference/android/os/UserManager#DISALLOW_UNINSTALL_APPS
static const String DISALLOW_UNINSTALL_APPS = "no_uninstall_apps";
/// Specifies if a user is disallowed from turning on location sharing.
///
/// See: https://developer.android.com/reference/android/os/UserManager#DISALLOW_SHARE_LOCATION
static const String DISALLOW_SHARE_LOCATION = "no_share_location";
/// Specifies if airplane mode is disallowed on the device.
///
/// See: https://developer.android.com/reference/android/os/UserManager#DISALLOW_AIRPLANE_MODE
static const String DISALLOW_AIRPLANE_MODE = "no_airplane_mode";
/// Specifies if a user is disallowed from configuring brightness.
///
/// See: https://developer.android.com/reference/android/os/UserManager#DISALLOW_CONFIG_BRIGHTNESS
static const String DISALLOW_CONFIG_BRIGHTNESS = "no_config_brightness";
/// Specifies if ambient display is disallowed for the user.
///
/// See: https://developer.android.com/reference/android/os/UserManager#DISALLOW_AMBIENT_DISPLAY
static const String DISALLOW_AMBIENT_DISPLAY = "no_ambient_display";
/// Specifies if a user is disallowed from changing screen off timeout.
///
/// See: https://developer.android.com/reference/android/os/UserManager#DISALLOW_CONFIG_SCREEN_TIMEOUT
static const String DISALLOW_CONFIG_SCREEN_TIMEOUT =
"no_config_screen_timeout";
/// Specifies if a user is disallowed from enabling the "Unknown Sources" setting, that allows installation of apps from unknown sources.
///
/// See: https://developer.android.com/reference/android/os/UserManager#DISALLOW_INSTALL_UNKNOWN_SOURCES
static const String DISALLOW_INSTALL_UNKNOWN_SOURCES =
"no_install_unknown_sources";
/// Specifies if a user is disallowed from configuring bluetooth.
///
/// See: https://developer.android.com/reference/android/os/UserManager#DISALLOW_CONFIG_BLUETOOTH
static const String DISALLOW_CONFIG_BLUETOOTH = "no_config_bluetooth";
/// Specifies if bluetooth is disallowed on the device.
///
/// See: https://developer.android.com/reference/android/os/UserManager#DISALLOW_BLUETOOTH
static const String DISALLOW_BLUETOOTH = "no_bluetooth";
/// Specifies if outgoing bluetooth sharing is disallowed on the device.
///
/// See: https://developer.android.com/reference/android/os/UserManager#DISALLOW_BLUETOOTH_SHARING
static const String DISALLOW_BLUETOOTH_SHARING = "no_bluetooth_sharing";
/// Specifies if a user is disallowed from transferring files over USB.
///
/// See: https://developer.android.com/reference/android/os/UserManager#DISALLOW_USB_FILE_TRANSFER
static const String DISALLOW_USB_FILE_TRANSFER = "no_usb_file_transfer";
/// Specifies if a user is disallowed from configuring user credentials.
///
/// See: https://developer.android.com/reference/android/os/UserManager#DISALLOW_CONFIG_CREDENTIALS
static const String DISALLOW_CONFIG_CREDENTIALS = "no_config_credentials";
/// When set on the primary user this specifies if the user can remove other users.
///
/// See: https://developer.android.com/reference/android/os/UserManager#DISALLOW_REMOVE_USER
static const String DISALLOW_REMOVE_USER = "no_remove_user";
/// Specifies if managed profiles of this user can be removed, other than by its profile owner.
///
/// See: https://developer.android.com/reference/android/os/UserManager#DISALLOW_REMOVE_MANAGED_PROFILE
static const String DISALLOW_REMOVE_MANAGED_PROFILE =
"no_remove_managed_profile";
/// Specifies if a user is disallowed from enabling or accessing debugging features.
///
/// See: https://developer.android.com/reference/android/os/UserManager#DISALLOW_DEBUGGING_FEATURES
static const String DISALLOW_DEBUGGING_FEATURES = "no_debugging_features";
/// Specifies if a user is disallowed from configuring a VPN.
///
/// See: https://developer.android.com/reference/android/os/UserManager#DISALLOW_CONFIG_VPN
static const String DISALLOW_CONFIG_VPN = "no_config_vpn";
/// Specifies if a user is disallowed from enabling or disabling location providers.
///
/// See: https://developer.android.com/reference/android/os/UserManager#DISALLOW_CONFIG_LOCATION
static const String DISALLOW_CONFIG_LOCATION = "no_config_location";
/// Specifies if date, time and timezone configuring is disallowed.
///
/// See: https://developer.android.com/reference/android/os/UserManager#DISALLOW_CONFIG_DATE_TIME
static const String DISALLOW_CONFIG_DATE_TIME = "no_config_date_time";
/// Specifies if a user is disallowed from configuring Tethering & portable hotspots.
///
/// See: https://developer.android.com/reference/android/os/UserManager#DISALLOW_CONFIG_TETHERING
static const String DISALLOW_CONFIG_TETHERING = "no_config_tethering";
/// Specifies if a user is disallowed from resetting network settings from Settings.
///
/// See: https://developer.android.com/reference/android/os/UserManager#DISALLOW_NETWORK_RESET
static const String DISALLOW_NETWORK_RESET = "no_network_reset";
/// Specifies if a user is disallowed from factory resetting from Settings.
///
/// See: https://developer.android.com/reference/android/os/UserManager#DISALLOW_FACTORY_RESET
static const String DISALLOW_FACTORY_RESET = "no_factory_reset";
/// Specifies if a user is disallowed from adding new users.
///
/// See: https://developer.android.com/reference/android/os/UserManager#DISALLOW_ADD_USER
static const String DISALLOW_ADD_USER = "no_add_user";
/// Specifies if a user is disallowed from adding managed profiles.
///
/// See: https://developer.android.com/reference/android/os/UserManager#DISALLOW_ADD_MANAGED_PROFILE
static const String DISALLOW_ADD_MANAGED_PROFILE = "no_add_managed_profile";
/// Specifies if a user is disallowed from disabling application verification.
///
/// See: https://developer.android.com/reference/android/os/UserManager#ENSURE_VERIFY_APPS
static const String ENSURE_VERIFY_APPS = "ensure_verify_apps";
/// Specifies if a user is disallowed from configuring cell broadcasts.
///
/// See: https://developer.android.com/reference/android/os/UserManager#DISALLOW_CONFIG_CELL_BROADCASTS
static const String DISALLOW_CONFIG_CELL_BROADCASTS =
"no_config_cell_broadcasts";
/// Specifies if a user is disallowed from configuring mobile networks.
///
/// See: https://developer.android.com/reference/android/os/UserManager#DISALLOW_CONFIG_MOBILE_NETWORKS
static const String DISALLOW_CONFIG_MOBILE_NETWORKS =
"no_config_mobile_networks";
/// Specifies if a user is disallowed from modifying applications in Settings or launchers.
///
/// See: https://developer.android.com/reference/android/os/UserManager#DISALLOW_APPS_CONTROL
static const String DISALLOW_APPS_CONTROL = "no_control_apps";
/// Specifies if a user is disallowed from mounting physical external media.
///
/// See: https://developer.android.com/reference/android/os/UserManager#DISALLOW_MOUNT_PHYSICAL_MEDIA
static const String DISALLOW_MOUNT_PHYSICAL_MEDIA = "no_physical_media";
/// Specifies if a user is disallowed from adjusting microphone volume.
///
/// See: https://developer.android.com/reference/android/os/UserManager#DISALLOW_UNMUTE_MICROPHONE
static const String DISALLOW_UNMUTE_MICROPHONE = "no_unmute_microphone";
/// Specifies if a user is disallowed from adjusting the master volume.
///
/// See: https://developer.android.com/reference/android/os/UserManager#DISALLOW_ADJUST_VOLUME
static const String DISALLOW_ADJUST_VOLUME = "no_adjust_volume";
/// Specifies that the user is not allowed to make outgoing phone calls.
///
/// See: https://developer.android.com/reference/android/os/UserManager#DISALLOW_OUTGOING_CALLS
static const String DISALLOW_OUTGOING_CALLS = "no_outgoing_calls";
/// Specifies that the user is not allowed to send or receive SMS messages.
///
/// See: https://developer.android.com/reference/android/os/UserManager#DISALLOW_SMS
static const String DISALLOW_SMS = "no_sms";
/// Specifies if the user is not allowed to have fun.
///
/// See: https://developer.android.com/reference/android/os/UserManager#DISALLOW_FUN
static const String DISALLOW_FUN = "no_fun";
/// Specifies that windows besides app windows should not be created.
///
/// See: https://developer.android.com/reference/android/os/UserManager#DISALLOW_CREATE_WINDOWS
static const String DISALLOW_CREATE_WINDOWS = "no_create_windows";
/// Specifies that system error dialogs for crashed or unresponsive apps should not be shown.
///
/// See: https://developer.android.com/reference/android/os/UserManager#DISALLOW_SYSTEM_ERROR_DIALOGS
static const String DISALLOW_SYSTEM_ERROR_DIALOGS = "no_system_error_dialogs";
/// Specifies if what is copied in the clipboard of this profile can be pasted in related profiles.
///
/// See: https://developer.android.com/reference/android/os/UserManager#DISALLOW_CROSS_PROFILE_COPY_PASTE
static const String DISALLOW_CROSS_PROFILE_COPY_PASTE =
"no_cross_profile_copy_paste";
/// Specifies if the user is not allowed to use NFC to beam out data from apps.
///
/// See: https://developer.android.com/reference/android/os/UserManager#DISALLOW_OUTGOING_BEAM
static const String DISALLOW_OUTGOING_BEAM = "no_outgoing_beam";
/// User restriction to disallow setting a wallpaper.
///
/// See: https://developer.android.com/reference/android/os/UserManager#DISALLOW_SET_WALLPAPER
static const String DISALLOW_SET_WALLPAPER = "no_set_wallpaper";
/// Specifies if the user is not allowed to reboot the device into safe boot mode.
///
/// See: https://developer.android.com/reference/android/os/UserManager#DISALLOW_SAFE_BOOT
static const String DISALLOW_SAFE_BOOT = "no_safe_boot";
/// Specifies if a user is not allowed to use cellular data when roaming.
///
/// See: https://developer.android.com/reference/android/os/UserManager#DISALLOW_DATA_ROAMING
static const String DISALLOW_DATA_ROAMING = "no_data_roaming";
/// Specifies if a user is not allowed to change their icon.
///
/// See: https://developer.android.com/reference/android/os/UserManager#DISALLOW_SET_USER_ICON
static const String DISALLOW_SET_USER_ICON = "no_set_user_icon";
/// Specifies that the managed profile is not allowed to have unified lock screen challenge with the primary user.
///
/// See: https://developer.android.com/reference/android/os/UserManager#DISALLOW_UNIFIED_PASSWORD
static const String DISALLOW_UNIFIED_PASSWORD = "<PASSWORD>";
/// Allows apps in the parent profile to handle web links from the managed profile.
///
/// See: https://developer.android.com/reference/android/os/UserManager#ALLOW_PARENT_PROFILE_APP_LINKING
static const String ALLOW_PARENT_PROFILE_APP_LINKING =
"allow_parent_profile_app_linking";
/// Specifies if a user is not allowed to use Autofill Services.
///
/// See: https://developer.android.com/reference/android/os/UserManager#DISALLOW_AUTOFILL
static const String DISALLOW_AUTOFILL = "no_autofill";
/// Specifies if user switching is blocked on the current user.
///
/// See: https://developer.android.com/reference/android/os/UserManager#DISALLOW_USER_SWITCH
static const String DISALLOW_USER_SWITCH = "no_user_switch";
/// Specifies whether the user can share file / picture / data from the primary user into the managed profile, either by sending them from the primary side, or by picking up data within an app in the managed profile.
///
/// See: https://developer.android.com/reference/android/os/UserManager#DISALLOW_SHARE_INTO_MANAGED_PROFILE
static const String DISALLOW_SHARE_INTO_MANAGED_PROFILE =
"no_sharing_into_profile";
/// Specifies whether the user is allowed to print.
///
/// See: https://developer.android.com/reference/android/os/UserManager#DISALLOW_PRINTING
static const String DISALLOW_PRINTING = "no_printing";
/// Application restriction key that is used to indicate the pending arrival of real restrictions for the app.
///
/// See: https://developer.android.com/reference/android/os/UserManager#KEY_RESTRICTIONS_PENDING
static const String KEY_RESTRICTIONS_PENDING = "restrictions_pending";
/// Error result indicating that this user is not allowed to add other users on this device.
///
/// See: https://developer.android.com/reference/android/os/UserManager#USER_CREATION_FAILED_NOT_PERMITTED
static const int USER_CREATION_FAILED_NOT_PERMITTED = 1;
/// Error result indicating that no more users can be created on this device.
///
/// See: https://developer.android.com/reference/android/os/UserManager#USER_CREATION_FAILED_NO_MORE_USERS
static const int USER_CREATION_FAILED_NO_MORE_USERS = 2;
/// Indicates user operation is successful.
///
/// See: https://developer.android.com/reference/android/os/UserManager#USER_OPERATION_SUCCESS
static const int USER_OPERATION_SUCCESS = 0;
/// Indicates user operation failed for unknown reason.
///
/// See: https://developer.android.com/reference/android/os/UserManager#USER_OPERATION_ERROR_UNKNOWN
static const int USER_OPERATION_ERROR_UNKNOWN = 1;
/// Indicates user operation failed because target user is a managed profile.
///
/// See: https://developer.android.com/reference/android/os/UserManager#USER_OPERATION_ERROR_MANAGED_PROFILE
static const int USER_OPERATION_ERROR_MANAGED_PROFILE = 2;
/// Indicates user operation failed because maximum running user limit has been reached.
///
/// See: https://developer.android.com/reference/android/os/UserManager#USER_OPERATION_ERROR_MAX_RUNNING_USERS
static const int USER_OPERATION_ERROR_MAX_RUNNING_USERS = 3;
/// Indicates user operation failed because the target user is in the foreground.
///
/// See: https://developer.android.com/reference/android/os/UserManager#USER_OPERATION_ERROR_CURRENT_USER
static const int USER_OPERATION_ERROR_CURRENT_USER = 4;
/// Indicates user operation failed because device has low data storage.
///
/// See: https://developer.android.com/reference/android/os/UserManager#USER_OPERATION_ERROR_LOW_STORAGE
static const int USER_OPERATION_ERROR_LOW_STORAGE = 5;
/// Indicates user operation failed because maximum user limit has been reached.
///
/// See: https://developer.android.com/reference/android/os/UserManager#USER_OPERATION_ERROR_MAX_USERS
static const int USER_OPERATION_ERROR_MAX_USERS = 6;
/// Returns whether this device supports multiple users with their own login
/// and customizable space.
///
/// See: https://developer.android.com/reference/android/os/UserManager#supportsMultipleUsers()
static Future<bool> get supportsMultipleUsers {
assert(Platform.isAndroid);
return null; // TODO
}
}
<|start_filename|>lib/src/app/exceptions.dart<|end_filename|>
/* This is free and unencumbered software released into the public domain. */
import 'package:flutter/services.dart' show PlatformException;
import '../os/parcel.dart' show Parcel;
import '../os/parcelable.dart' show Parcelable;
/// Thrown when authentication is needed from the end user before viewing the
/// content.
///
/// This exception is only appropriate where there is a concrete action the user
/// can take to authorize and make forward progress, such as confirming or
/// entering authentication credentials, or granting access via other means.
///
/// See: https://developer.android.com/reference/android/app/AuthenticationRequiredException
/// See: https://github.com/aosp-mirror/platform_frameworks_base/blob/master/core/java/android/app/AuthenticationRequiredException.java
class AuthenticationRequiredException extends PlatformException
with Parcelable {
@override
String get parcelableCreator => "android.app.AuthenticationRequiredException";
@override
void writeToParcel(final Parcel parcel, [final int flags = 0]) {
throw UnimplementedError(); // TODO: https://github.com/aosp-mirror/platform_frameworks_base/blob/master/core/java/android/app/AuthenticationRequiredException.java#L79
}
}
<|start_filename|>lib/src/os/system_clock.dart<|end_filename|>
/* This is free and unencumbered software released into the public domain. */
/// Core timekeeping facilities.
///
/// See: https://developer.android.com/reference/android/os/SystemClock
abstract class SystemClock {
/// Returns milliseconds running in the current thread.
///
/// See: https://developer.android.com/reference/android/os/SystemClock#currentThreadTimeMillis()
int get currentThreadTimeMillis {
return null; // TODO
}
/// Returns milliseconds since boot, including time spent in sleep.
///
/// See: https://developer.android.com/reference/android/os/SystemClock#elapsedRealtime()
int get elapsedRealtime {
return null; // TODO
}
/// Returns nanoseconds since boot, including time spent in sleep.
///
/// See: https://developer.android.com/reference/android/os/SystemClock#elapsedRealtimeNanos()
int get elapsedRealtimeNanos {
return null; // TODO
}
/// Sets the current wall time, in milliseconds.
///
/// Requires the calling process to have appropriate permissions.
///
/// See: https://developer.android.com/reference/android/os/SystemClock#setCurrentTimeMillis(long)
set currentThreadTimeMillis(final int millis) {
return null; // TODO
}
/// Waits a given number of milliseconds (of [uptimeMillis]) before returning.
///
/// See: https://developer.android.com/reference/android/os/SystemClock#sleep(long)
void sleep(final int ms) {
return null; // TODO
}
/// Returns milliseconds since boot, not counting time spent in deep sleep.
///
/// See: https://developer.android.com/reference/android/os/SystemClock#uptimeMillis()
int get uptimeMillis {
return null; // TODO
}
}
<|start_filename|>lib/src/hardware/sensor.dart<|end_filename|>
/* This is free and unencumbered software released into the public domain. */
import 'package:flutter/foundation.dart' show required;
import 'sensor_event.dart' show SensorEvent;
import 'sensor_manager.dart' show SensorManager;
/// Represents a sensor.
///
/// See: https://developer.android.com/reference/android/hardware/Sensor
class Sensor {
/// A constant describing an accelerometer sensor type.
///
/// See: https://developer.android.com/reference/android/hardware/Sensor#TYPE_ACCELEROMETER
static const int TYPE_ACCELEROMETER = 1;
/// A constant string describing an accelerometer sensor type.
///
/// See: https://developer.android.com/reference/android/hardware/Sensor#STRING_TYPE_ACCELEROMETER
static const String STRING_TYPE_ACCELEROMETER =
"android.sensor.accelerometer";
/// A constant describing a magnetic field sensor type.
///
/// See: https://developer.android.com/reference/android/hardware/Sensor#TYPE_MAGNETIC_FIELD
static const int TYPE_MAGNETIC_FIELD = 2;
/// A constant string describing a magnetic field sensor type.
///
/// See: https://developer.android.com/reference/android/hardware/Sensor#STRING_TYPE_MAGNETIC_FIELD
static const String STRING_TYPE_MAGNETIC_FIELD =
"android.sensor.magnetic_field";
/// A constant describing a gyroscope sensor type.
///
/// See: https://developer.android.com/reference/android/hardware/Sensor#TYPE_GYROSCOPE
static const int TYPE_GYROSCOPE = 4;
/// A constant string describing a gyroscope sensor type.
///
/// See: https://developer.android.com/reference/android/hardware/Sensor#STRING_TYPE_GYROSCOPE
static const String STRING_TYPE_GYROSCOPE = "android.sensor.gyroscope";
/// A constant describing a light sensor type.
///
/// See: https://developer.android.com/reference/android/hardware/Sensor#TYPE_LIGHT
static const int TYPE_LIGHT = 5;
/// A constant string describing a light sensor type.
///
/// See: https://developer.android.com/reference/android/hardware/Sensor#STRING_TYPE_LIGHT
static const String STRING_TYPE_LIGHT = "android.sensor.light";
/// A constant describing a pressure sensor type.
///
/// See: https://developer.android.com/reference/android/hardware/Sensor#TYPE_PRESSURE
static const int TYPE_PRESSURE = 6;
/// A constant string describing a pressure sensor type.
///
/// See: https://developer.android.com/reference/android/hardware/Sensor#STRING_TYPE_PRESSURE
static const String STRING_TYPE_PRESSURE = "android.sensor.pressure";
/// A constant describing a proximity sensor type.
///
/// See: https://developer.android.com/reference/android/hardware/Sensor#TYPE_PROXIMITY
static const int TYPE_PROXIMITY = 8;
/// A constant string describing a proximity sensor type.
///
/// See: https://developer.android.com/reference/android/hardware/Sensor#STRING_TYPE_PROXIMITY
static const String STRING_TYPE_PROXIMITY = "android.sensor.proximity";
/// A constant describing a gravity sensor type.
///
/// See: https://developer.android.com/reference/android/hardware/Sensor#TYPE_GRAVITY
static const int TYPE_GRAVITY = 9;
/// A constant string describing a gravity sensor type.
///
/// See: https://developer.android.com/reference/android/hardware/Sensor#STRING_TYPE_GRAVITY
static const String STRING_TYPE_GRAVITY = "android.sensor.gravity";
/// A constant describing a linear acceleration sensor type.
///
/// See: https://developer.android.com/reference/android/hardware/Sensor#TYPE_LINEAR_ACCELERATION
static const int TYPE_LINEAR_ACCELERATION = 10;
/// A constant string describing a linear acceleration sensor type.
///
/// See: https://developer.android.com/reference/android/hardware/Sensor#STRING_TYPE_LINEAR_ACCELERATION
static const String STRING_TYPE_LINEAR_ACCELERATION =
"android.sensor.linear_acceleration";
/// A constant describing a rotation vector sensor type.
///
/// See: https://developer.android.com/reference/android/hardware/Sensor#TYPE_ROTATION_VECTOR
static const int TYPE_ROTATION_VECTOR = 11;
/// A constant string describing a rotation vector sensor type.
///
/// See: https://developer.android.com/reference/android/hardware/Sensor#STRING_TYPE_ROTATION_VECTOR
static const String STRING_TYPE_ROTATION_VECTOR =
"android.sensor.rotation_vector";
/// A constant describing a relative humidity sensor type.
///
/// See: https://developer.android.com/reference/android/hardware/Sensor#TYPE_RELATIVE_HUMIDITY
static const int TYPE_RELATIVE_HUMIDITY = 12;
/// A constant string describing a relative humidity sensor type.
///
/// See: https://developer.android.com/reference/android/hardware/Sensor#STRING_TYPE_RELATIVE_HUMIDITY
static const String STRING_TYPE_RELATIVE_HUMIDITY =
"android.sensor.relative_humidity";
/// A constant describing an ambient temperature sensor type.
///
/// See: https://developer.android.com/reference/android/hardware/Sensor#TYPE_AMBIENT_TEMPERATURE
static const int TYPE_AMBIENT_TEMPERATURE = 13;
/// A constant string describing an ambient temperature sensor type.
///
/// See: https://developer.android.com/reference/android/hardware/Sensor#STRING_TYPE_AMBIENT_TEMPERATURE
static const String STRING_TYPE_AMBIENT_TEMPERATURE =
"android.sensor.ambient_temperature";
/// A constant describing an uncalibrated magnetic field sensor type.
///
/// See: https://developer.android.com/reference/android/hardware/Sensor#TYPE_MAGNETIC_FIELD_UNCALIBRATED
static const int TYPE_MAGNETIC_FIELD_UNCALIBRATED = 14;
/// A constant string describing an uncalibrated magnetic field sensor type.
///
/// See: https://developer.android.com/reference/android/hardware/Sensor#STRING_TYPE_MAGNETIC_FIELD_UNCALIBRATED
static const String STRING_TYPE_MAGNETIC_FIELD_UNCALIBRATED =
"android.sensor.magnetic_field_uncalibrated";
/// A constant describing an uncalibrated rotation vector sensor type.
///
/// See: https://developer.android.com/reference/android/hardware/Sensor#TYPE_GAME_ROTATION_VECTOR
static const int TYPE_GAME_ROTATION_VECTOR = 15;
/// A constant string describing an uncalibrated rotation vector sensor type.
///
/// See: https://developer.android.com/reference/android/hardware/Sensor#STRING_TYPE_GAME_ROTATION_VECTOR
static const String STRING_TYPE_GAME_ROTATION_VECTOR =
"android.sensor.game_rotation_vector";
/// A constant describing an uncalibrated gyroscope sensor type.
///
/// See: https://developer.android.com/reference/android/hardware/Sensor#TYPE_GYROSCOPE_UNCALIBRATED
static const int TYPE_GYROSCOPE_UNCALIBRATED = 16;
/// A constant string describing an uncalibrated gyroscope sensor type.
///
/// See: https://developer.android.com/reference/android/hardware/Sensor#STRING_TYPE_GYROSCOPE_UNCALIBRATED
static const String STRING_TYPE_GYROSCOPE_UNCALIBRATED =
"android.sensor.gyroscope_uncalibrated";
/// A constant describing a significant motion trigger sensor.
///
/// See: https://developer.android.com/reference/android/hardware/Sensor#TYPE_SIGNIFICANT_MOTION
static const int TYPE_SIGNIFICANT_MOTION = 17;
/// A constant string describing a significant motion trigger sensor.
///
/// See: https://developer.android.com/reference/android/hardware/Sensor#STRING_TYPE_SIGNIFICANT_MOTION
static const String STRING_TYPE_SIGNIFICANT_MOTION =
"android.sensor.significant_motion";
/// A constant describing a step detector sensor.
///
/// See: https://developer.android.com/reference/android/hardware/Sensor#TYPE_STEP_DETECTOR
static const int TYPE_STEP_DETECTOR = 18;
/// A constant string describing a step detector sensor.
///
/// See: https://developer.android.com/reference/android/hardware/Sensor#STRING_TYPE_STEP_DETECTOR
static const String STRING_TYPE_STEP_DETECTOR =
"android.sensor.step_detector";
/// A constant describing a step counter sensor.
///
/// See: https://developer.android.com/reference/android/hardware/Sensor#TYPE_STEP_COUNTER
static const int TYPE_STEP_COUNTER = 19;
/// A constant string describing a step counter sensor.
///
/// See: https://developer.android.com/reference/android/hardware/Sensor#STRING_TYPE_STEP_COUNTER
static const String STRING_TYPE_STEP_COUNTER = "android.sensor.step_counter";
/// A constant describing a geo-magnetic rotation vector.
///
/// See: https://developer.android.com/reference/android/hardware/Sensor#TYPE_GEOMAGNETIC_ROTATION_VECTOR
static const int TYPE_GEOMAGNETIC_ROTATION_VECTOR = 20;
/// A constant string describing a geo-magnetic rotation vector.
///
/// See: https://developer.android.com/reference/android/hardware/Sensor#STRING_TYPE_GEOMAGNETIC_ROTATION_VECTOR
static const String STRING_TYPE_GEOMAGNETIC_ROTATION_VECTOR =
"android.sensor.geomagnetic_rotation_vector";
/// A constant describing a heart rate monitor.
///
/// The reported value is the heart rate in beats per minute.
///
/// See: https://developer.android.com/reference/android/hardware/Sensor#TYPE_HEART_RATE
static const int TYPE_HEART_RATE = 21;
/// A constant string describing a heart rate monitor.
///
/// See: https://developer.android.com/reference/android/hardware/Sensor#STRING_TYPE_HEART_RATE
static const String STRING_TYPE_HEART_RATE = "android.sensor.heart_rate";
/// A constant describing a pose sensor with 6 degrees of freedom.
///
/// See: https://developer.android.com/reference/android/hardware/Sensor#TYPE_POSE_6DOF
static const int TYPE_POSE_6DOF = 28;
/// A constant string describing a pose sensor with 6 degrees of freedom.
///
/// See: https://developer.android.com/reference/android/hardware/Sensor#STRING_TYPE_POSE_6DOF
static const String STRING_TYPE_POSE_6DOF = "android.sensor.pose_6dof";
/// A constant describing a stationary detect sensor.
///
/// See: https://developer.android.com/reference/android/hardware/Sensor#TYPE_STATIONARY_DETECT
static const int TYPE_STATIONARY_DETECT = 29;
/// A constant string describing a stationary detection sensor.
///
/// See: https://developer.android.com/reference/android/hardware/Sensor#STRING_TYPE_STATIONARY_DETECT
static const String STRING_TYPE_STATIONARY_DETECT =
"android.sensor.stationary_detect";
/// A constant describing a motion detect sensor.
///
/// See: https://developer.android.com/reference/android/hardware/Sensor#TYPE_MOTION_DETECT
static const int TYPE_MOTION_DETECT = 30;
/// A constant string describing a motion detection sensor.
///
/// See: https://developer.android.com/reference/android/hardware/Sensor#STRING_TYPE_MOTION_DETECT
static const String STRING_TYPE_MOTION_DETECT =
"android.sensor.motion_detect";
/// A constant describing a motion detect sensor.
///
/// See: https://developer.android.com/reference/android/hardware/Sensor#TYPE_HEART_BEAT
static const int TYPE_HEART_BEAT = 31;
/// A constant string describing a heart beat sensor.
///
/// See: https://developer.android.com/reference/android/hardware/Sensor#STRING_TYPE_HEART_BEAT
static const String STRING_TYPE_HEART_BEAT = "android.sensor.heart_beat";
/// A constant describing a low latency off-body detect sensor.
///
/// See: https://developer.android.com/reference/android/hardware/Sensor#TYPE_LOW_LATENCY_OFFBODY_DETECT
static const int TYPE_LOW_LATENCY_OFFBODY_DETECT = 34;
/// A constant string describing a low-latency offbody detector sensor.
///
/// See: https://developer.android.com/reference/android/hardware/Sensor#STRING_TYPE_LOW_LATENCY_OFFBODY_DETECT
static const String STRING_TYPE_LOW_LATENCY_OFFBODY_DETECT =
"android.sensor.low_latency_offbody_detect";
/// A constant describing an uncalibrated accelerometer sensor.
///
/// See: https://developer.android.com/reference/android/hardware/Sensor#TYPE_ACCELEROMETER_UNCALIBRATED
static const int TYPE_ACCELEROMETER_UNCALIBRATED = 35;
/// A constant string describing an uncalibrated accelerometer sensor.
///
/// See: https://developer.android.com/reference/android/hardware/Sensor#STRING_TYPE_ACCELEROMETER_UNCALIBRATED
static const String STRING_TYPE_ACCELEROMETER_UNCALIBRATED =
"android.sensor.accelerometer_uncalibrated";
/// A constant describing all sensor types.
///
/// See: https://developer.android.com/reference/android/hardware/Sensor#TYPE_ALL
static const int TYPE_ALL = -1;
/// The lowest sensor type vendor defined sensors can use.
///
/// See: https://developer.android.com/reference/android/hardware/Sensor#TYPE_DEVICE_PRIVATE_BASE
static const int TYPE_DEVICE_PRIVATE_BASE = 65536;
/// Events are reported at a constant rate which is set by the rate parameter
/// of `SensorManager#registerListener(SensorEventListener, Sensor, int)`.
///
/// See: https://developer.android.com/reference/android/hardware/Sensor#REPORTING_MODE_CONTINUOUS
static const int REPORTING_MODE_CONTINUOUS = 0;
/// Events are reported only when the value changes.
///
/// See: https://developer.android.com/reference/android/hardware/Sensor#REPORTING_MODE_ON_CHANGE
static const int REPORTING_MODE_ON_CHANGE = 1;
/// Events are reported in one-shot mode.
///
/// See: https://developer.android.com/reference/android/hardware/Sensor#REPORTING_MODE_ONE_SHOT
static const int REPORTING_MODE_ONE_SHOT = 2;
/// Events are reported as described in the description of the sensor.
///
/// See: https://developer.android.com/reference/android/hardware/Sensor#REPORTING_MODE_SPECIAL_TRIGGER
static const int REPORTING_MODE_SPECIAL_TRIGGER = 3;
/// An internal handle.
final int key;
/// See: https://developer.android.com/reference/android/hardware/Sensor#getName()
final String name;
/// See: https://developer.android.com/reference/android/hardware/Sensor#getType()
final int type;
const Sensor({
@required this.key,
this.name,
this.type,
});
/// Subscribes to this sensor's event stream.
///
/// See: https://developer.android.com/reference/android/hardware/SensorManager#registerListener(android.hardware.SensorEventListener,%20android.hardware.Sensor,%20int)
Future<Stream<SensorEvent>> subscribe(
{int samplingPeriodUs, int maxReportLatencyUs}) async {
final events = await SensorManager.registerListener(this,
samplingPeriodUs: samplingPeriodUs,
maxReportLatencyUs: maxReportLatencyUs);
return events.receiveBroadcastStream().map(
(dynamic event) => _streamEventToSensorEvent(event.cast<double>()));
}
@override
String toString() => '[Sensor (type: $type)]';
SensorEvent _streamEventToSensorEvent(final List<double> list) {
return SensorEvent(
accuracy: null, // TODO
sensor: this,
timestamp: null, // TODO
values: list,
);
}
}
<|start_filename|>lib/src/os/stat_fs.dart<|end_filename|>
/* This is free and unencumbered software released into the public domain. */
/// Retrieve overall information about the space on a filesystem.
///
/// This is a wrapper for Unix `statvfs()`.
///
/// See: https://developer.android.com/reference/android/os/StatFs
class StatFs {
/// The filesystem path being examined.
final String path;
/// Construct a new `StatFs` for looking at the stats of the filesystem at
/// path.
///
/// Upon construction, the stat of the file system will be performed, and the
/// values retrieved available from the methods on this class.
///
/// See: https://developer.android.com/reference/android/os/StatFs#StatFs(java.lang.String)
StatFs(this.path);
/// The number of blocks that are free on the file system and available to
/// applications.
///
/// See: https://developer.android.com/reference/android/os/StatFs#getAvailableBlocksLong()
int get availableBlocks {
return null; // TODO
}
/// The number of bytes that are free on the file system and available to
/// applications.
///
/// See: https://developer.android.com/reference/android/os/StatFs#getAvailableBytes()
int get availableBytes {
return null; // TODO
}
/// The total number of blocks on the file system.
///
/// See: https://developer.android.com/reference/android/os/StatFs#getBlockCountLong()
int get blockCount {
return null; // TODO
}
/// The size, in bytes, of a block on the file system.
///
/// See: https://developer.android.com/reference/android/os/StatFs#getBlockSizeLong()
int get blockSize {
return null; // TODO
}
/// The total number of blocks that are free on the file system, including
/// reserved blocks (that are not available to normal applications).
///
/// See: https://developer.android.com/reference/android/os/StatFs#getFreeBlocksLong()
int get freeBlocks {
return null; // TODO
}
/// The number of bytes that are free on the file system, including reserved
/// blocks (that are not available to normal applications).
///
/// See: https://developer.android.com/reference/android/os/StatFs#getFreeBytes()
int get freeBytes {
return null; // TODO
}
/// The total number of bytes supported by the file system.
///
/// See: https://developer.android.com/reference/android/os/StatFs#getTotalBytes()
int get totalBytes {
return null; // TODO
}
}
<|start_filename|>lib/src/content/content_values.dart<|end_filename|>
/* This is free and unencumbered software released into the public domain. */
import '../os/parcel.dart' show Parcel;
import '../os/parcelable.dart' show Parcelable;
/// This class is used to store a set of values that the [ContentResolver] can
/// process.
///
/// See: https://developer.android.com/reference/android/content/ContentValues
/// See: https://github.com/aosp-mirror/platform_frameworks_base/blob/master/core/java/android/content/ContentValues.java
class ContentValues with Parcelable {
final Map<String, dynamic> map = const <String, dynamic>{};
/// Creates an empty set of values using the default initial size.
///
/// See: https://developer.android.com/reference/android/content/ContentValues#ContentValues()
ContentValues(); // FIXME: https://github.com/dart-lang/sdk/issues/40982
@override
String get parcelableCreator => "android.content.ContentValues";
@override
void writeToParcel(final Parcel parcel, [final int flags = 0]) {
parcel.writeInt(map.length);
parcel.writeArrayMap(map);
}
}
<|start_filename|>lib/src/app/notification.dart<|end_filename|>
/* This is free and unencumbered software released into the public domain. */
import 'dart:ui' show Color;
import 'package:flutter/widgets.dart' show Icon;
import '../os/bundle.dart' show Bundle;
import '../os/parcel.dart' show Parcel;
import '../os/parcelable.dart' show Parcelable;
import 'notification_action.dart' show NotificationAction;
/// Represents how a persistent notification is to be presented to the user
/// using the [NotificationManager].
///
/// The [NotificationBuilder] has been added to make it easier to construct
/// notifications.
///
/// See: https://developer.android.com/reference/android/app/Notification
/// See: https://github.com/aosp-mirror/platform_frameworks_base/blob/master/core/java/android/app/Notification.java
class Notification with Parcelable {
/// See: https://developer.android.com/reference/android/app/Notification#BADGE_ICON_LARGE
static const int BADGE_ICON_LARGE = 2;
/// See: https://developer.android.com/reference/android/app/Notification#BADGE_ICON_NONE
static const int BADGE_ICON_NONE = 0;
/// See: https://developer.android.com/reference/android/app/Notification#BADGE_ICON_SMALL
static const int BADGE_ICON_SMALL = 1;
/// See: https://developer.android.com/reference/android/app/Notification#VISIBILITY_PRIVATE
static const int VISIBILITY_PRIVATE = 0;
/// See: https://developer.android.com/reference/android/app/Notification#VISIBILITY_PUBLIC
static const int VISIBILITY_PUBLIC = 1;
/// See: https://developer.android.com/reference/android/app/Notification#VISIBILITY_SECRET
static const int VISIBILITY_SECRET = -1;
/// Array of all [NotificationAction] structures attached to this notification.
///
/// See: https://developer.android.com/reference/android/app/Notification#actions
final List<NotificationAction> actions;
/// One of the predefined notification categories.
///
/// See: https://developer.android.com/reference/android/app/Notification#category
final String category;
/// Accent color (an ARGB integer) to be applied.
///
/// See: https://developer.android.com/reference/android/app/Notification#color
final Color color;
/// Additional semantic data to be carried around.
///
/// See: https://developer.android.com/reference/android/app/Notification#extras
final Bundle extras;
/// Any additional flags.
///
/// See: https://developer.android.com/reference/android/app/Notification#flags
final int flags;
/// If the icon in the status bar is to have more than one level, you can set
/// this.
///
/// See: https://developer.android.com/reference/android/app/Notification#iconLevel
final int iconLevel;
/// The number of events that this notification represents.
///
/// See: https://developer.android.com/reference/android/app/Notification#number
final int number;
/// Replacement version of this notification whose content will be shown in an
/// insecure context such as atop a secure keyguard.
///
/// See: https://developer.android.com/reference/android/app/Notification#publicVersion
final Notification publicVersion;
/// The sphere of visibility of this notification.
///
/// See: https://developer.android.com/reference/android/app/Notification#visibility
final int visibility;
/// A timestamp related to this notification, in milliseconds since the epoch.
///
/// See: https://developer.android.com/reference/android/app/Notification#when
final int when;
/// Constructs a [Notification] object.
Notification({
this.actions,
this.category,
this.color,
this.extras,
this.flags,
this.iconLevel,
this.number,
this.publicVersion,
this.visibility,
this.when,
});
/// Returns what icon should be shown for this notification if it is being
/// displayed in a [Launcher] that supports badging.
///
/// See: https://developer.android.com/reference/android/app/Notification#getBadgeIconType()
int getBadgeIconType() => null; // TODO
/// Returns the id of the channel this notification posts to.
///
/// See: https://developer.android.com/reference/android/app/Notification#getChannelId()
String getChannelId() => null; // TODO
/// Get the key used to group this notification into a cluster or stack with
/// other notifications on devices which support such rendering.
///
/// See: https://developer.android.com/reference/android/app/Notification#getGroup()
String getGroup() => null; // TODO
/// Returns which type of notifications in a group are responsible for audibly
/// alerting the user.
///
/// See: https://developer.android.com/reference/android/app/Notification#getGroupAlertBehavior()
int getGroupAlertBehavior() => null; // TODO
/// The large icon shown in this notification's content view.
///
/// See: https://developer.android.com/reference/android/app/Notification#getLargeIcon()
Icon getLargeIcon() => null; // TODO
/// Returns the settings text.
///
/// See: https://developer.android.com/reference/android/app/Notification#getSettingsText()
String getSettingsText() => null; // TODO
/// Returns the id that this notification supersedes, if any.
///
/// See: https://developer.android.com/reference/android/app/Notification#getShortcutId()
String getShortcutId() => null; // TODO
/// The small icon representing this notification in the status bar and
/// content view.
///
/// See: https://developer.android.com/reference/android/app/Notification#getSmallIcon()
Icon getSmallIcon() => null; // TODO
/// Get a sort key that orders this notification among other notifications
/// from the same package.
///
/// See: https://developer.android.com/reference/android/app/Notification#getSortKey()
String getSortKey() => null; // TODO
/// Returns the duration from posting after which this notification should be
/// canceled by the system, if it's not canceled already.
///
/// See: https://developer.android.com/reference/android/app/Notification#getTimeoutAfter()
int getTimeoutAfter() => null; // TODO
@override
String get parcelableCreator => "android.app.Notification";
@override
void writeToParcel(final Parcel parcel, [final int flags = 0]) {
throw UnimplementedError(); // TODO: https://github.com/aosp-mirror/platform_frameworks_base/blob/master/core/java/android/app/Notification.java#L1818
}
}
<|start_filename|>lib/src/os/bundle.dart<|end_filename|>
/* This is free and unencumbered software released into the public domain. */
import 'parcel.dart' show Parcel;
import 'parcelable.dart' show Parcelable;
/// A mapping from [String] keys to various [Parcelable] values.
///
/// See: https://developer.android.com/reference/android/os/Bundle
/// See: https://github.com/aosp-mirror/platform_frameworks_base/blob/master/core/java/android/os/Bundle.java
class Bundle with Parcelable {
/// The canonical empty bundle.
///
/// See: https://developer.android.com/reference/android/os/Bundle#EMPTY
///
// ignore: non_constant_identifier_names
static final Bundle EMPTY = Bundle();
/// See: https://developer.android.com/reference/android/os/Bundle#CREATOR
//static const Parcelable.Creator<Bundle> CREATOR = null; // TODO
final Map<String, dynamic> mappings = Map.identity();
/// Constructs a new, empty [Bundle].
///
/// See: https://developer.android.com/reference/android/os/Bundle#Bundle()
Bundle();
/// Returns whether the mapping of this [Bundle] is empty.
///
/// See: https://developer.android.com/reference/android/os/BaseBundle#isEmpty()
bool get isEmpty => mappings.isEmpty;
/// Reports whether the bundle contains any parcelled file descriptors.
///
/// See: https://developer.android.com/reference/android/os/Bundle#hasFileDescriptors()
bool get hasFileDescriptors => false;
/// Returns the number of mappings contained in this [Bundle].
///
/// See: https://developer.android.com/reference/android/os/BaseBundle#size()
int get size => mappings.length;
/// Removes all elements from the mapping of this [Bundle].
///
/// See: https://developer.android.com/reference/android/os/Bundle#clear()
void clear() => mappings.clear();
/// Removes any entry with the given key from the mapping of this [Bundle].
///
/// See: https://developer.android.com/reference/android/os/Bundle#remove(java.lang.String)
void remove(final String key) => mappings.remove(key);
/// Returns whether the given key is contained in the mapping of this [Bundle].
///
/// See: https://developer.android.com/reference/android/os/BaseBundle#containsKey(java.lang.String)
bool containsKey(final String key) => mappings.containsKey(key);
/// Returns the entry with the given key as an object.
///
/// See: https://developer.android.com/reference/android/os/BaseBundle#get(java.lang.String)
dynamic get(final String key) => mappings[key];
/// Returns the entry with the given key.
///
/// See: https://developer.android.com/reference/android/os/BaseBundle#getString(java.lang.String)
String getString(final String key) => get(key);
/// Inserts a string value into the mapping of this [Bundle], replacing any
/// existing value for the given key.
///
/// See: https://developer.android.com/reference/android/os/BaseBundle#putString(java.lang.String,%20java.lang.String)
void putString(final String key, final String value) => mappings[key] = value;
@override
String get parcelableCreator => "android.os.Bundle";
@override
void writeToParcel(final Parcel parcel, [final int flags = 0]) {
throw UnimplementedError(); // TODO: https://github.com/aosp-mirror/platform_frameworks_base/blob/master/core/java/android/os/BaseBundle.java#L1556
}
}
| gitter-badger/flutter_android |
<|start_filename|>src/core/Grid.js<|end_filename|>
var Node = require('./Node');
/**
* The Grid class, which serves as the encapsulation of the layout of the nodes.
* @constructor
* @param {number} width Number of columns of the grid.
* @param {number} height Number of rows of the grid.
* @param {Array.<Array.<(number|boolean)>>} [matrix] - A 0-1 matrix
* representing the walkable status of the nodes(0 or false for walkable).
* If the matrix is not supplied, all the nodes will be walkable. */
function Grid(width, height, matrix) {
/**
* The number of columns of the grid.
* @type number
*/
this.width = width;
/**
* The number of rows of the grid.
* @type number
*/
this.height = height;
this.matrix = matrix;
/**
* A 2D array of nodes.
*/
this.nodes = this._buildNodes(width, height, matrix);
}
/**
* Build and return the nodes.
* @private
* @param {number} width
* @param {number} height
* @param {Array.<Array.<number|boolean>>} [matrix] - A 0-1 matrix representing
* the walkable status of the nodes.
* @see Grid
*/
Grid.prototype._buildNodes = function(width, height, matrix) {
var i, j,
nodes = new Array(height),
row;
for (i = 0; i < height; ++i) {
nodes[i] = new Array(width);
for (j = 0; j < width; ++j) {
nodes[i][j] = new Node(j, i, 0); // z == 0 in 2D
}
}
if (matrix === undefined) {
matrix = [];
for (i = 0; i < height; ++i) {
matrix.push([]);
for (j = 0; j < width; ++j) {
matrix[i][j] = 0; // 0 => walkable
}
}
this.matrix = matrix;
}
if (matrix.length !== height || matrix[0].length !== width) {
throw new Error('Matrix size does not fit');
}
for (i = 0; i < height; ++i) {
for (j = 0; j < width; ++j) {
if (!matrix[i][j]) { // 0 => walkable
var n = nodes[i][j];
// Add neighbors if they are walkable
if(i!=0 && !matrix[i-1][j]) n.neighbors.push(nodes[i-1][j]);
if(i!=height-1 && !matrix[i+1][j]) n.neighbors.push(nodes[i+1][j]);
if(j!=0 && !matrix[i][j-1]) n.neighbors.push(nodes[i][j-1]);
if(j!=width-1 && !matrix[i][j+1]) n.neighbors.push(nodes[i][j+1]);
}
}
}
return nodes;
};
Grid.prototype.getNodeAt = function(x, y) {
return this.nodes[y][x];
};
/**
* Determine whether the node at the given position is walkable.
* (Also returns false if the position is outside the grid.)
* @param {number} x - The x coordinate of the node.
* @param {number} y - The y coordinate of the node.
* @return {boolean} - The walkability of the node.
*/
Grid.prototype.isWalkableAt = function(x, y) {
return this.isInside(x, y) && this.matrix[y][x]==0;
};
/**
* Determine whether the position is inside the grid.
* XXX: `grid.isInside(x, y)` is wierd to read.
* It should be `(x, y) is inside grid`, but I failed to find a better
* name for this method.
* @param {number} x
* @param {number} y
* @return {boolean}
*/
Grid.prototype.isInside = function(x, y) {
return (x >= 0 && x < this.width) && (y >= 0 && y < this.height);
};
/**
* Set whether the node on the given position is walkable.
* NOTE: throws exception if the coordinate is not inside the grid.
* @param {number} x - The x coordinate of the node.
* @param {number} y - The y coordinate of the node.
* @param {boolean} walkable - Whether the position is walkable.
*/
Grid.prototype.setWalkableAt = function(x, y, walkable) {
//this.nodes[y][x].walkable = walkable;
this.matrix[y][x] = walkable ? 0 : 1;
this.nodes = this._buildNodes(this.width,this.height,this.matrix);
};
/**
* Get the neighbors of the given node.
*
* offsets diagonalOffsets:
* +---+---+---+ +---+---+---+
* | | 0 | | | 0 | | 1 |
* +---+---+---+ +---+---+---+
* | 3 | | 1 | | | | |
* +---+---+---+ +---+---+---+
* | | 2 | | | 3 | | 2 |
* +---+---+---+ +---+---+---+
*
* When allowDiagonal is true, if offsets[i] is valid, then
* diagonalOffsets[i] and
* diagonalOffsets[(i + 1) % 4] is valid.
* @param {Node} node
* @param {boolean} allowDiagonal
* @param {boolean} dontCrossCorners
*/
Grid.prototype.getNeighbors = function(node) {
return node.neighbors;
};
/**
* Get a clone of this grid.
* @return {Grid} Cloned grid.
*/
Grid.prototype.clone = function() {
var i, j,
width = this.width,
height = this.height,
thisNodes = this.nodes,
newGrid = new Grid(width, height),
row;
for (i = 0; i < height; ++i) {
for (j = 0; j < width; ++j) {
// Must use the Node objects generated by newGrid! Otherwise the pathfinding algos won't be able to compare endNode===someNode
var n = newGrid.getNodeAt(j,i);
oldNode = this.getNodeAt(j,i);
n.neighbors = [];
for(var k=0; k<oldNode.neighbors.length; k++)
n.neighbors.push(newGrid.getNodeAt(oldNode.neighbors[k].x, oldNode.neighbors[k].y));
}
}
return newGrid;
};
module.exports = Grid;
<|start_filename|>src/finders/AStarFinderMinTurns.js<|end_filename|>
var Heap = require('../core/Heap');
var Util = require('../core/Util');
var Heuristic = require('../core/Heuristic');
/**
* A* path-finder that considers the turns made during the path.
* based upon https://github.com/bgrins/javascript-astar
* @constructor
* @param {object} opt
* @param {function} opt.heuristic Heuristic function to estimate the distance (defaults to manhattan).
* @param {integer} opt.weight Weight to apply to the heuristic to allow for suboptimal paths, in order to speed up the search.
* @param {integer} opt.turnAngleWeight Weight to apply to the turn value, to make the algorithm take paths with less turns.
*/
function AStarFinderMinTurns(opt) {
opt = opt || {};
this.heuristic = opt.heuristic || Heuristic.manhattan;
this.weight = opt.weight || 1;
this.turnAngleWeight = opt.turnAngleWeight || 1;
}
/**
* Find and return the the path.
* @return {Array.<[number, number]>} The path, including both start and
* end positions.
*/
AStarFinderMinTurns.prototype.findPath = function(startNode, endNode, nodes) {
var openList = new Heap(function(nodeA, nodeB) {
return nodeA.f - nodeB.f;
}),
heuristic = this.heuristic,
weight = this.weight,
turnAngleWeight = this.turnAngleWeight,
abs = Math.abs, SQRT2 = Math.SQRT2,
node, neighbors, neighbor, i, l, x, y, z, ng;
// set the `g` and `f` value of the start node to be 0
startNode.g = 0;
startNode.f = 0;
// push the start node into the open list
openList.push(startNode);
startNode.opened = true;
// while the open list is not empty
while (!openList.empty()) {
// pop the position of node which has the minimum `f` value.
node = openList.pop();
node.closed = true;
// if reached the end position, construct the path and return it
if (node === endNode) {
return Util.backtrace(endNode);
}
// get neigbours of the current node
neighbors = node.neighbors;
for (i = 0, l = neighbors.length; i < l; ++i) {
neighbor = neighbors[i];
if (neighbor.closed) {
continue;
}
x = neighbor.x;
y = neighbor.y;
z = neighbor.z;
// Get the angle between the current node line and the neighbor line
// cos(theta) = a.dot(b) / (len(a)*len(b))
var angle = 0;
if(node.parent){
var ax = x - node.x,
ay = y - node.y,
az = z - node.z,
bx = node.x - node.parent.x,
by = node.y - node.parent.y,
bz = node.z - node.parent.z;
angle = Math.abs( Math.acos( (ax*bx + ay*by + az*bz) / ( Math.sqrt(ax*ax + ay*ay + az*az) ) + Math.sqrt(bx*bx + by*by + bz*bz) ) );
}
// get the distance between current node and the neighbor
// and calculate the next g score
ng = node.g + Math.sqrt(Math.pow(x - node.x,2) + Math.pow(y - node.y,2) + Math.pow(z-node.z,2)) + angle*turnAngleWeight;
// check if the neighbor has not been inspected yet, or
// can be reached with smaller cost from the current node
if (!neighbor.opened || ng < neighbor.g) {
neighbor.g = ng;
neighbor.h = neighbor.h || weight * heuristic(abs(x - endNode.x), abs(y - endNode.y), abs(z - endNode.z));
neighbor.f = neighbor.g + neighbor.h;
neighbor.parent = node;
if (!neighbor.opened) {
openList.push(neighbor);
neighbor.opened = true;
} else {
// the neighbor can be reached with smaller cost.
// Since its f value has been updated, we have to
// update its position in the open list
openList.updateItem(neighbor);
}
}
} // end for each neighbor
} // end while not open list empty
// fail to find the path
return [];
};
module.exports = AStarFinderMinTurns;
<|start_filename|>src/core/Node.js<|end_filename|>
var idCounter = 0;
/**
* A node in grid.
* This class holds some basic information about a node and custom
* attributes may be added, depending on the algorithms' needs.
* @constructor
* @param {number} x - The x coordinate of the node
* @param {number} y - The y coordinate of the node
* @param {number} z - The z coordinate of the node
*/
function Node(x, y, z) {
/**
* The x coordinate of the node on the grid.
* @type number
*/
this.x = x;
/**
* The y coordinate of the node on the grid.
* @type number
*/
this.y = y;
/**
* The z coordinate of the node on the grid.
* @type number
*/
this.z = z;
/**
* Neighboring nodes that are walkable from this node.
* @type array
*/
this.neighbors = [];
};
module.exports = Node;
<|start_filename|>visual/js/view.js<|end_filename|>
/**
* The pathfinding visualization.
* It uses raphael.js to show the grids.
*/
var View = {
nodeSize: 30, // width and height of a single node, in pixel
nodeStyle: {
normal: {
fill: 'white',
'stroke-opacity': 0.2, // the border
},
blocked: {
fill: 'grey',
'stroke-opacity': 0.2,
},
start: {
fill: '#0d0',
'stroke-opacity': 0.2,
},
end: {
fill: '#e40',
'stroke-opacity': 0.2,
},
opened: {
fill: '#98fb98',
'stroke-opacity': 0.2,
},
closed: {
fill: '#afeeee',
'stroke-opacity': 0.2,
},
failed: {
fill: '#ff8888',
'stroke-opacity': 0.2,
},
jumptest: {
fill: '#e5e5e5',
'stroke-opacity': 0.2,
},
},
nodeColorizeEffect: {
duration: 50,
},
nodeZoomEffect: {
duration: 200,
transform: 's1.2', // scale by 1.2x
transformBack: 's1.0',
},
pathStyle: {
stroke: 'yellow',
'stroke-width': 3,
},
supportedOperations: ['opened', 'closed', 'jumptest'],
init: function(opts) {
this.numCols = opts.numCols;
this.numRows = opts.numRows;
this.paper = Raphael('draw_area');
this.$stats = $('#stats');
},
/**
* Generate the grid asynchronously.
* This method will be a very expensive task.
* Therefore, in order to not to block the rendering of browser ui,
* I decomposed the task into smaller ones. Each will only generate a row.
*/
generateGrid: function(callback) {
var i, j, x, y,
rect,
normalStyle, nodeSize,
createRowTask, sleep, tasks,
nodeSize = this.nodeSize,
normalStyle = this.nodeStyle.normal,
numCols = this.numCols,
numRows = this.numRows,
paper = this.paper,
rects = this.rects = [],
$stats = this.$stats;
paper.setSize(numCols * nodeSize, numRows * nodeSize);
createRowTask = function(rowId) {
return function(done) {
rects[rowId] = [];
for (j = 0; j < numCols; ++j) {
x = j * nodeSize;
y = rowId * nodeSize;
rect = paper.rect(x, y, nodeSize, nodeSize);
rect.attr(normalStyle);
rects[rowId].push(rect);
}
$stats.text(
'generating grid ' +
Math.round((rowId + 1) / numRows * 100) + '%'
);
done(null);
};
};
sleep = function(done) {
setTimeout(function() {
done(null);
}, 0);
};
tasks = [];
for (i = 0; i < numRows; ++i) {
tasks.push(createRowTask(i));
tasks.push(sleep);
}
async.series(tasks, function() {
if (callback) {
callback();
}
});
},
setStartPos: function(gridX, gridY) {
var coord = this.toPageCoordinate(gridX, gridY);
if (!this.startNode) {
this.startNode = this.paper.rect(
coord[0],
coord[1],
this.nodeSize,
this.nodeSize
).attr(this.nodeStyle.normal)
.animate(this.nodeStyle.start, 1000);
} else {
this.startNode.attr({ x: coord[0], y: coord[1] }).toFront();
}
},
setEndPos: function(gridX, gridY) {
var coord = this.toPageCoordinate(gridX, gridY);
if (!this.endNode) {
this.endNode = this.paper.rect(
coord[0],
coord[1],
this.nodeSize,
this.nodeSize
).attr(this.nodeStyle.normal)
.animate(this.nodeStyle.end, 1000);
} else {
this.endNode.attr({ x: coord[0], y: coord[1] }).toFront();
}
},
/**
* Set the attribute of the node at the given coordinate.
*/
setAttributeAt: function(gridX, gridY, attr, value) {
var color, nodeStyle = this.nodeStyle;
switch (attr) {
case 'walkable':
color = value ? nodeStyle.normal.fill : nodeStyle.blocked.fill;
this.setWalkableAt(gridX, gridY, value);
break;
case 'opened':
this.colorizeNode(this.rects[gridY][gridX], nodeStyle.opened.fill);
this.setCoordDirty(gridX, gridY, true);
break;
case 'closed':
this.colorizeNode(this.rects[gridY][gridX], nodeStyle.closed.fill);
this.setCoordDirty(gridX, gridY, true);
break;
case 'jumptest':
this.colorizeNode(this.rects[gridY][gridX], nodeStyle.jumptest.fill);
this.setCoordDirty(gridX, gridY, true);
break;
case 'parent':
// XXX: Maybe draw a line from this node to its parent?
// This would be expensive.
break;
default:
console.error('unsupported operation: ' + attr + ':' + value);
return;
}
},
colorizeNode: function(node, color) {
node.animate({
fill: color
}, this.nodeColorizeEffect.duration);
},
zoomNode: function(node) {
node.toFront().attr({
transform: this.nodeZoomEffect.transform,
}).animate({
transform: this.nodeZoomEffect.transformBack,
}, this.nodeZoomEffect.duration);
},
setWalkableAt: function(gridX, gridY, value) {
var node, i, blockedNodes = this.blockedNodes;
if (!blockedNodes) {
blockedNodes = this.blockedNodes = new Array(this.numRows);
for (i = 0; i < this.numCols; ++i) {
blockedNodes[i] = [];
}
}
node = blockedNodes[gridY][gridX];
if (value) {
// clear blocked node
if (node) {
this.colorizeNode(node, this.rects[gridY][gridX].attr('fill'));
this.zoomNode(node);
setTimeout(function() {
node.remove();
}, this.nodeZoomEffect.duration);
blockedNodes[gridY][gridX] = null;
}
} else {
// draw blocked node
if (node) {
return;
}
node = blockedNodes[gridY][gridX] = this.rects[gridY][gridX].clone();
this.colorizeNode(node, this.nodeStyle.blocked.fill);
this.zoomNode(node);
}
},
clearFootprints: function() {
var i, x, y, coord, coords = this.getDirtyCoords();
for (i = 0; i < coords.length; ++i) {
coord = coords[i];
x = coord[0];
y = coord[1];
this.rects[y][x].attr(this.nodeStyle.normal);
this.setCoordDirty(x, y, false);
}
},
clearBlockedNodes: function() {
var i, j, blockedNodes = this.blockedNodes;
if (!blockedNodes) {
return;
}
for (i = 0; i < this.numRows; ++i) {
for (j = 0 ;j < this.numCols; ++j) {
if (blockedNodes[i][j]) {
blockedNodes[i][j].remove();
blockedNodes[i][j] = null;
}
}
}
},
drawPath: function(path) {
if (!path.length) {
return;
}
var svgPath = this.buildSvgPath(path);
this.path = this.paper.path(svgPath).attr(this.pathStyle);
},
/**
* Given a path, build its SVG represention.
*/
buildSvgPath: function(path) {
var i, strs = [], size = this.nodeSize;
strs.push('M' + (path[0][0] * size + size / 2) + ' ' +
(path[0][1] * size + size / 2));
for (i = 1; i < path.length; ++i) {
strs.push('L' + (path[i][0] * size + size / 2) + ' ' +
(path[i][1] * size + size / 2));
}
return strs.join('');
},
clearPath: function() {
if (this.path) {
this.path.remove();
}
},
/**
* Helper function to convert the page coordinate to grid coordinate
*/
toGridCoordinate: function(pageX, pageY) {
return [
Math.floor(pageX / this.nodeSize),
Math.floor(pageY / this.nodeSize)
];
},
/**
* helper function to convert the grid coordinate to page coordinate
*/
toPageCoordinate: function(gridX, gridY) {
return [
gridX * this.nodeSize,
gridY * this.nodeSize
];
},
showStats: function(opts) {
var texts = [
'length: ' + Math.round(opts.pathLength * 100) / 100,
'time: ' + opts.timeSpent + 'ms',
'operations: ' + opts.operationCount
];
$('#stats').show().html(texts.join('<br>'));
},
setCoordDirty: function(gridX, gridY, isDirty) {
var x, y,
numRows = this.numRows,
numCols = this.numCols,
coordDirty;
if (this.coordDirty === undefined) {
coordDirty = this.coordDirty = [];
for (y = 0; y < numRows; ++y) {
coordDirty.push([]);
for (x = 0; x < numCols; ++x) {
coordDirty[y].push(false);
}
}
}
this.coordDirty[gridY][gridX] = isDirty;
},
getDirtyCoords: function() {
var x, y,
numRows = this.numRows,
numCols = this.numCols,
coordDirty = this.coordDirty,
coords = [];
if (coordDirty === undefined) {
return [];
}
for (y = 0; y < numRows; ++y) {
for (x = 0; x < numCols; ++x) {
if (coordDirty[y][x]) {
coords.push([x, y]);
}
}
}
return coords;
},
};
| gbaiden/a |
<|start_filename|>src/glb_viewer.js<|end_filename|>
import {ArcballCamera} from "arcball_camera";
import {Controller} from "ez_canvas_controller";
import {mat4, vec3} from "gl-matrix";
import {uploadGLBModel} from "./glb_import.js";
import {GLBShaderCache} from "./glb_shader_cache.js";
(async () => {
if (navigator.gpu === undefined) {
document.getElementById("webgpu-canvas").setAttribute("style", "display:none;");
document.getElementById("no-webgpu").setAttribute("style", "display:block;");
return;
}
var adapter = await navigator.gpu.requestAdapter();
if (!adapter) {
document.getElementById("webgpu-canvas").setAttribute("style", "display:none;");
document.getElementById("no-webgpu").setAttribute("style", "display:block;");
return;
}
var device = await adapter.requestDevice();
var glbFile =
await fetch(
"https://www.dl.dropboxusercontent.com/s/7ndj8pfjhact7lz/DamagedHelmet.glb?dl=1")
.then(res => res.arrayBuffer().then(buf => uploadGLBModel(buf, device)));
var canvas = document.getElementById("webgpu-canvas");
var context = canvas.getContext("webgpu");
var swapChainFormat = "bgra8unorm";
context.configure(
{device: device, format: swapChainFormat, usage: GPUTextureUsage.RENDER_ATTACHMENT});
var depthTexture = device.createTexture({
size: {width: canvas.width, height: canvas.height, depth: 1},
format: "depth24plus-stencil8",
usage: GPUTextureUsage.RENDER_ATTACHMENT
});
var renderPassDesc = {
colorAttachments: [{view: undefined, loadValue: [0.3, 0.3, 0.3, 1]}],
depthStencilAttachment: {
view: depthTexture.createView(),
depthLoadValue: 1,
depthStoreOp: "store",
stencilLoadValue: 0,
stencilStoreOp: "store"
}
};
var viewParamsLayout = device.createBindGroupLayout({
entries: [{binding: 0, visibility: GPUShaderStage.VERTEX, buffer: {type: "uniform"}}]
});
var viewParamBuf = device.createBuffer(
{size: 4 * 4 * 4, usage: GPUBufferUsage.UNIFORM | GPUBufferUsage.COPY_DST});
var viewParamsBindGroup = device.createBindGroup(
{layout: viewParamsLayout, entries: [{binding: 0, resource: {buffer: viewParamBuf}}]});
var shaderCache = new GLBShaderCache(device);
var renderBundles = glbFile.buildRenderBundles(
device, shaderCache, viewParamsLayout, viewParamsBindGroup, swapChainFormat);
const defaultEye = vec3.set(vec3.create(), 0.0, 0.0, 1.0);
const center = vec3.set(vec3.create(), 0.0, 0.0, 0.0);
const up = vec3.set(vec3.create(), 0.0, 1.0, 0.0);
var camera = new ArcballCamera(defaultEye, center, up, 2, [canvas.width, canvas.height]);
var proj = mat4.perspective(
mat4.create(), 50 * Math.PI / 180.0, canvas.width / canvas.height, 0.1, 1000);
var projView = mat4.create();
var controller = new Controller();
controller.mousemove = function(prev, cur, evt) {
if (evt.buttons == 1) {
camera.rotate(prev, cur);
} else if (evt.buttons == 2) {
camera.pan([cur[0] - prev[0], prev[1] - cur[1]]);
}
};
controller.wheel = function(amt) {
camera.zoom(amt * 0.5);
};
controller.pinch = controller.wheel;
controller.twoFingerDrag = function(drag) {
camera.pan(drag);
};
controller.registerForCanvas(canvas);
var animationFrame = function() {
var resolve = null;
var promise = new Promise(r => resolve = r);
window.requestAnimationFrame(resolve);
return promise
};
requestAnimationFrame(animationFrame);
// Setup onchange listener for file uploads
var glbBuffer = null;
document.getElementById("uploadGLB").onchange =
function uploadGLB() {
var reader = new FileReader();
reader.onerror = function() {
alert("error reading GLB file");
};
reader.onload = function() {
glbBuffer = reader.result;
};
reader.readAsArrayBuffer(this.files[0]);
}
var fpsDisplay = document.getElementById("fps");
var numFrames = 0;
var totalTimeMS = 0;
while (true) {
await animationFrame();
if (glbBuffer != null) {
glbFile = await uploadGLBModel(glbBuffer, device);
renderBundles = glbFile.buildRenderBundles(
device, shaderCache, viewParamsLayout, viewParamsBindGroup, swapChainFormat);
camera =
new ArcballCamera(defaultEye, center, up, 2, [canvas.width, canvas.height]);
glbBuffer = null;
}
var start = performance.now();
renderPassDesc.colorAttachments[0].view = context.getCurrentTexture().createView();
var commandEncoder = device.createCommandEncoder();
projView = mat4.mul(projView, proj, camera.camera);
var upload = device.createBuffer({
size: 4 * 4 * 4,
usage: GPUBufferUsage.MAP_WRITE | GPUBufferUsage.COPY_SRC,
mappedAtCreation: true
});
new Float32Array(upload.getMappedRange()).set(projView);
upload.unmap();
commandEncoder.copyBufferToBuffer(upload, 0, viewParamBuf, 0, 4 * 4 * 4);
var renderPass = commandEncoder.beginRenderPass(renderPassDesc);
renderPass.executeBundles(renderBundles);
renderPass.endPass();
device.queue.submit([commandEncoder.finish()]);
await device.queue.onSubmittedWorkDone();
var end = performance.now();
numFrames += 1;
totalTimeMS += end - start;
fpsDisplay.innerHTML = `Avg. FPS ${Math.round(1000.0 * numFrames / totalTimeMS)}`;
}
})();
<|start_filename|>src/glb_shader_cache.js<|end_filename|>
export class GLBShaderCache {
constructor(device)
{
this.device = device;
this.shaderCache = {};
}
getShader(hasNormals, hasUVs, hasColorTexture)
{
var shaderID = "glb";
if (hasNormals) {
shaderID += "n";
}
if (hasUVs) {
shaderID += "uv";
}
if (hasColorTexture) {
shaderID += "colortex";
}
if (!(shaderID in this.shaderCache)) {
var shaderSource = generateGLTFShader(hasNormals, hasUVs, hasColorTexture);
this.shaderCache[shaderID] = this.device.createShaderModule({code: shaderSource});
}
return this.shaderCache[shaderID];
}
}
function generateGLTFShader(hasNormals, hasUVs, hasColorTexture)
{
var typeDefs =
`
type float2 = vec2<f32>;
type float3 = vec3<f32>;
type float4 = vec4<f32>;
`;
var vertexInputStruct =
`
struct VertexInput {
[[location(0)]] position: float3;
`;
var vertexOutputStruct =
`
struct VertexOutput {
[[builtin(position)]] position: float4;
`;
if (hasNormals) {
vertexInputStruct +=
`
[[location(1)]] normal: float3;
`;
vertexOutputStruct +=
`
[[location(1)]] normal: float3;
`;
}
if (hasUVs) {
vertexInputStruct +=
`
[[location(2)]] uv: float2;
`;
vertexOutputStruct +=
`
[[location(2)]] uv: float2;
`;
}
vertexInputStruct += '};';
vertexOutputStruct += '};';
var vertexUniformParams =
`
struct Mat4Uniform {
m: mat4x4<f32>;
};
[[group(0), binding(0)]]
var<uniform> view_proj: Mat4Uniform;
[[group(1), binding(0)]]
var<uniform> node_transform: Mat4Uniform;
`;
var vertexStage = vertexInputStruct + vertexOutputStruct + vertexUniformParams +
`
[[stage(vertex)]]
fn vertex_main(vin: VertexInput) -> VertexOutput {
var vout: VertexOutput;
vout.position = view_proj.m * node_transform.m * float4(vin.position, 1.0);
`;
if (hasNormals) {
vertexStage +=
`
vout.normal = vin.normal;
`;
}
if (hasUVs) {
vertexStage +=
`
vout.uv = vin.uv;
`;
}
vertexStage +=
`
return vout;
}`;
var fragmentParams =
`
struct MaterialParams {
base_color_factor: float4;
emissive_factor: float4;
metallic_factor: f32;
roughness_factor: f32;
};
[[group(2), binding(0)]]
var<uniform> material: MaterialParams;
`;
if (hasColorTexture) {
fragmentParams +=
`
[[group(2), binding(1)]]
var base_color_sampler: sampler;
[[group(2), binding(2)]]
var base_color_texture: texture_2d<f32>;
`;
}
var fragmentStage = fragmentParams +
`
fn linear_to_srgb(x: f32) -> f32 {
if (x <= 0.0031308) {
return 12.92 * x;
}
return 1.055 * pow(x, 1.0 / 2.4) - 0.055;
}
[[stage(fragment)]]
fn fragment_main(fin: VertexOutput) -> [[location(0)]] float4 {
var color = float4(material.base_color_factor.xyz, 1.0);
`;
if (hasUVs && hasColorTexture) {
fragmentStage +=
`
var texture_color = textureSample(base_color_texture, base_color_sampler, fin.uv);
if (texture_color.a < 0.001) {
discard;
}
color = float4(material.base_color_factor.xyz * texture_color.xyz, 1.0);
`;
}
fragmentStage +=
`
color.x = linear_to_srgb(color.x);
color.y = linear_to_srgb(color.y);
color.z = linear_to_srgb(color.z);
color.w = 1.0;
return color;
}
`;
return typeDefs + vertexStage + fragmentStage;
}
| Twinklebear/webgpu-gltf |
<|start_filename|>src/@rocketseat/gatsby-theme-docs/styles/theme.js<|end_filename|>
export default {
colors: {
primary: '#F5F5DC',
background: '#141414',
shape: `#4d4d4d`,
title: `#FFFFFF`,
text: `#FFFFFF`,
components: {
blockquote: {
background: `#feebc8`,
text: `#2d3748`,
},
},
},
};
| 0xsunil/balajis-almanack |
<|start_filename|>model/RNN.lua<|end_filename|>
local RNN = {}
function RNN.rnn(input_size, rnn_size, n)
-- there are n+1 inputs (hiddens on each layer and x)
local inputs = {}
table.insert(inputs, nn.Identity()()) -- x
for L = 1,n do
table.insert(inputs, nn.Identity()()) -- prev_h[L]
end
local x, input_size_L
local outputs = {}
for L = 1,n do
local prev_h = inputs[L+1]
if L == 1 then x = inputs[1] else x = outputs[L-1] end
if L == 1 then input_size_L = input_size else input_size_L = rnn_size end
-- RNN tick
local i2h = nn.Linear(input_size_L, rnn_size)(x)
local h2h = nn.Linear(rnn_size, rnn_size)(prev_h)
local next_h = nn.Tanh()(nn.CAddTable(){i2h, h2h})
table.insert(outputs, next_h)
end
return nn.gModule(inputs, outputs)
end
return RNN
| dwhit/recipes |
<|start_filename|>module/video_group.js<|end_filename|>
// 视频链接
module.exports = (query, request) => {
const data = {
groupId: query.id,
offset: query.offset || 0,
needUrl: true,
resolution: query.res || 1080
}
return request(
'POST', `https://music.163.com/weapi/videotimeline/videogroup/get`, data, {
crypto: 'weapi',
cookie: query.cookie,
proxy: query.proxy
}
)
}
| hty101101/API |
<|start_filename|>test/EventTarget.spec.js<|end_filename|>
/**
* Created by mgobbi on 05/04/2017.
*/
import {EventDispatcher} from "../src/events/EventTarget";
var assert = require("chai").assert;
describe('EventTarget', function () {
before(function () {
this.jsdom = require('jsdom-global')()
})
after(function () {
this.jsdom()
})
var Event, CustomEvent, dispatcher;
beforeEach(function () {
Event = window.Event;
CustomEvent = window.CustomEvent;
dispatcher = new EventDispatcher();
})
it('dispatcher: it is a object', function () {
assert.isObject(dispatcher);
});
it('ritorna un oggetto con le funzioni esposte', function () {
assert.isFunction(dispatcher.addEventListener);
assert.isFunction(dispatcher.removeEventListener);
assert.isFunction(dispatcher.dispatchEvent);
});
it('dispatchEvent dispaccia correttamente a 1 listener', function () {
var params = [12345, 'text', {a: 1}];
dispatcher.addEventListener("a", _ => assert.ok("ok"));
dispatcher.dispatchEvent(new Event("a"));
dispatcher.addEventListener("b", e => assert.equal(e.detail, params, "ok"));
dispatcher.dispatchEvent(new CustomEvent("b", {detail: params}));
});
it('dispatcher dispaccia correttamente a più listeners', function () {
var params = [12345, 'text', {a: 2}];
dispatcher.addEventListener("b", e => {
var a = 0;
assert.equal(e.detail, params)
});
dispatcher.addEventListener("b", e => {
var a = 1;
assert.equal(e.detail, params)
});
dispatcher.addEventListener("b", e => {
var a = 2;
assert.equal(e.detail, params)
});
dispatcher.addEventListener("b", e => assert.equal(e.detail, params));
dispatcher.dispatchEvent(new CustomEvent("b", {detail: params}));
});
it('dispatcher preventdefault ', function () {
dispatcher.addEventListener("b", e => {
return false;
});
dispatcher.addEventListener("a", e => {
});
assert.isFalse(dispatcher.dispatchEvent(new Event("b")));
assert.isTrue(dispatcher.dispatchEvent(new Event("a")));
});
it('addEventListener accetta un oggetto Listener', function () {
var listener = {
prop: "prop",
handleEvent: function (e) {
assert.ok("passa per il listener");
assert.equal(listener, this, "non mantiene il contesto");
assert.equal(listener.prop, this.prop, "non è lo stesso oggetto");
assert.equal(e.type, "a");
assert.equal(e.target, dispatcher, "non è lo stesso target")
}
};
dispatcher.addEventListener("a", listener);
dispatcher.dispatchEvent(new Event("a"));
});
it('dispatcher connette lo stesso listener solo una volta', function () {
let i = 0;
const listener = e => i = i + 1;
dispatcher.addEventListener("b", listener);
dispatcher.addEventListener("b", listener);
dispatcher.addEventListener("b", listener);
dispatcher.addEventListener("b", listener);
dispatcher.dispatchEvent(new Event("b"));
assert.equal(i, 1, "");
});
it('removeEventListener rimuove correttamente il listener', function () {
let i = 0;
const listener = e => i = i + 1;
dispatcher.addEventListener("b", listener);
dispatcher.removeEventListener("b", listener);
dispatcher.dispatchEvent(new Event("b"));
assert.equal(i, 0, "");
});
it('removeEventListener: non ci sono più listeners', function () {
let i = 0;
const listener = e => i = i + 1;
dispatcher.removeEventListener("b", listener);
dispatcher.dispatchEvent(new Event("b"));
assert.equal(i, 0, "");
});
it('removeEventListener: non c\'è quel listeners', function () {
let i = 0;
const listener = e => i = i + 1;
dispatcher.addEventListener("b", listener);
dispatcher.removeEventListener("b", function () {
});
dispatcher.dispatchEvent(new Event("b"));
assert.equal(i, 1, "");
});
it('removeEventListener senza listeners di quel type', function () {
let i = 0;
const listener = e => i = i + 1;
dispatcher.addEventListener("a", listener);
dispatcher.removeEventListener("b", listener);
dispatcher.dispatchEvent(new Event("a"));
assert.equal(i, 1, "");
});
it('removeEventListener con più listeners', function () {
let i = 0;
const listener = e => i = i + 1;
dispatcher.addEventListener("b", listener);
dispatcher.addEventListener("b", _ => 3);
dispatcher.addEventListener("b", _ => _);
dispatcher.removeEventListener("b", listener);
dispatcher.dispatchEvent(new Event("b"));
assert.equal(i, 0, "");
});
});
<|start_filename|>test/CustomElementHandler.spec.js<|end_filename|>
/**
* Created by mgobbi on 05/04/2017.
*/
import {AHandler} from "../src/display/AHandler";
import {EventDispatcher} from "../src/events/EventTarget";
import {Disposable} from "../src/display/Disposable";
import _noop from "../src/internal/_noop";
import {CustomElementHandler} from "../src/display/CustomElementHandler";
var assert = require("chai").assert;
describe('CustomElementHandler', function () {
before(function () {
this.jsdom = require('jsdom-global')()
});
after(function () {
this.jsdom()
});
function Mediator() {
}
function MediatorB() {
}
let handler, definitions = {"my-component": Mediator, "my-component-b": MediatorB};
beforeEach(() => {
handler = new CustomElementHandler({definitions});
});
it('CustomElementHandler is a instanceof AHandler', function () {
assert.instanceOf(handler, AHandler);
});
it('handler.selector,handler.REGISTERED_ELEMENTS', function () {
assert.isObject(handler.REGISTERED_ELEMENTS, "REGISTERED_ELEMENTS non è un {}");
assert.instanceOf(handler.dispatcher, EventDispatcher, "dispatcher non è un EventDispatcher");
assert.equal(handler.definitions, definitions, "definitions non è definitions");
});
it('handler.getDefinition', function () {
const div = document.createElement("my-component");
assert.equal(handler.getDefinition(div), Mediator)
});
it('handler.inCache', function () {
handler.REGISTERED_ELEMENTS["my-component"] = true;
assert.isTrue(handler.inCache("my-component"));
assert.isFalse(handler.inCache("my-component-b"));
});
it('handler.updateCache', function () {
handler.updateCache("my-component");
assert.isTrue(handler.REGISTERED_ELEMENTS["my-component"]);
});
it('handler.hasMediator', function () {
const div = document.createElement("my-component");
assert.isTrue(handler.hasMediator(div));
handler.updateCache("my-component");
assert.isFalse(handler.hasMediator(div));
const divB = document.createElement("my-component-b");
assert.isTrue(handler.hasMediator(divB));
});
//FIXME https://github.com/jsdom/jsdom/issues/1030
it('handler.create', function () {
let div = document.createElement("my-component");
let div2 = document.createElement("my-component");
let div3 = document.createElement("div");
window.HTMLElement=function HTMLElement() {
}
window.customElements = {
define(tagName, Clazz) {
assert.equal(tagName, "my-component");
const instance = new Clazz();
assert.instanceOf(instance, MyMediator);
assert.equal(instance.dispatcher, handler.dispatcher);
}
};
class MyMediator extends window.HTMLElement {
constructor(dispatcher) {
super();
this.dispatcher = dispatcher;
}
};
handler.create(div, MyMediator);
handler.create(div2, MyMediator);
assert.isTrue(handler.REGISTERED_ELEMENTS["my-component"]);
assert.throws(()=>{
handler.create(div3,MyMediator)
},Error);
});
it('handler.getAllElements', function () {
let container1 = document.createElement("div");
let container2 = document.createElement("my-component");
let div = document.createElement("my-component-b");
let div2 = document.createElement("my-component");
container1.appendChild(div);
div.appendChild(div2);
let childrenNoRoot=handler.getAllElements(container1);
assert.sameMembers(childrenNoRoot,[div,div2]);
container2.appendChild(div);
let childrenRoot=handler.getAllElements(container2);
assert.sameMembers(childrenRoot,[container2,div,div2]);
});
});
<|start_filename|>sample/redux-like/client/todo-add.js<|end_filename|>
/**
* Created by mgobbi on 12/12/2016.
*/
define(function (require) {
var Actions = require("./actions");
return function (node, dispatcher) {
var input = node.querySelector("input");
var button = node.querySelector("button");
button.addEventListener("click", function (e) {
dispatcher.dispatchEvent(new CustomEvent(Actions.ADD_TODO, {detail: input.value}));
})
};
});
<|start_filename|>sample/custom-element/client/my-counter.js<|end_filename|>
/**
* Created by mgobbi on 03/02/2016.
*/
define(function () {
function Module(dispatcher) {
this.dispatcher=dispatcher;
this.count = 0;
}
Module.prototype = Object.create(HTMLElement.prototype);
Module.prototype.constructor = Module;
Object.assign(Module.prototype , {
update: function () {
this.count++;
this.innerHTML = this.count;
},
connectedCallback: function () {
this.innerHTML = this.count;
this.dispatcher.addEventListener("create-element", function (e) {
var thumb_id = e.detail;
var id = this.getAttribute("data-id");
if (thumb_id === id) {
this.update();
}
}.bind(this))
},
disconnectedCallback: function () {
}
})
return Module;
});
<|start_filename|>sample/custom-element/client/definitions.js<|end_filename|>
/**
* Created by marco.gobbi on 09/12/2014.
*/
define(function () {
"use strict";
return {
"components-library": "./client/components-library",
"my-thumbnail": "./client/my-thumbnail",
"my-template": "./client/my-template",
"my-counter": "./client/my-counter",
"autocomplete-element": "./client/autocomplete-element",
"bar-element": "./client/bar-element",
"maps-element": "./client/maps-element"
}
});
<|start_filename|>src/display/Robo.js<|end_filename|>
/* @flow */
import {DomWatcher} from "./DomWatcher";
import {AMDLoader} from "../net/Loader";
import {MediatorHandler} from "./MediatorHandler";
import flatten from "../internal/_flatten";
export class Robo {
constructor(options) {
let {definitions, loader = new AMDLoader(), root = document.body} = options;
this.definitions = definitions;
this.loader = loader;
this.root = root;
this.handler = options.handler || new MediatorHandler({definitions});
this.watcher = options.watcher || new DomWatcher(root, this.handler);
this.watcher.onAdded.connect(this.getMediators.bind(this));
this.watcher.onRemoved.connect(this.removeMediators.bind(this));
this.init();
}
init() {
const nodes= [this.root].map(this.handler.getAllElements.bind(this.handler));
this.promise = this.getMediators(nodes);
}
getMediators(nodes) {
nodes = flatten(nodes);
const promises = nodes.filter(this.handler.hasMediator.bind(this.handler))
.map(node => {
const definition = this.handler.getDefinition(node);
return this.loader.load(definition)
.then(Mediator => this.handler.create(node, Mediator));
});
return Promise.all(promises);
}
removeMediators(nodes) {
nodes.forEach(this.handler.destroy.bind(this.handler));
}
dispose() {
this.watcher.dispose();
this.handler.dispose();
this.watcher = null;
this.handler = null;
this.definitions = null;
this.loader = null;
this.root = null;
this.promise = null;
}
}
<|start_filename|>sample/systemjs/client/definitions.js<|end_filename|>
/**
* Created by marco.gobbi on 09/12/2014.
*/
module.exports = {
"my-custom-element": "client/my-custom-element",
"foo-element": "client/foo-element",
"bar-element": "client/bar-element"
}
<|start_filename|>sample/systemjs/client/my-custom-element.js<|end_filename|>
/**
* Created by marco.gobbi on 09/12/2014.
*/
function handler(e) {
e.currentTarget.parentNode.removeChild(e.currentTarget)
}
function Mediator(node) {
node.addEventListener("click", handler, false);
return function () {
node.removeEventListener("click", handler)
}
}
module.exports = Mediator;
<|start_filename|>src/display/DomWatcher.js<|end_filename|>
import {Signal} from "../events/Signal";
import flatten from "../internal/_flatten";
import unique from "../internal/_unique";
export class DomWatcher {
constructor(root, handler) {
this.onAdded = new Signal();
this.onRemoved = new Signal();
this.root = root;
this.handler = handler;
this.init();
}
init() {
this.observer = new MutationObserver(this.handleMutations.bind(this));
this.observer.observe(this.root, {
attributes: false,//true
childList: true,
characterData: false,
subtree: true
});
}
handleMutations(mutations) {
mutations.forEach(mutation => {
this.updateNodes(mutation.removedNodes, this.onRemoved);
this.updateNodes(mutation.addedNodes, this.onAdded);
});
}
_parseNodes(nodes) {
nodes = flatten(nodes);
nodes = nodes.filter(node => node.querySelectorAll)
.map(this.handler.getAllElements.bind(this.handler))
.filter(nodes => nodes.length > 0);
nodes = flatten(nodes);
nodes = unique(nodes);
return nodes;
}
updateNodes(nodes, signal) {
nodes = this._parseNodes(nodes);
if (nodes.length > 0) {
signal.emit(nodes);
}
}
dispose() {
this.observer.disconnect();
this.onAdded.disconnectAll();
this.onRemoved.disconnectAll();
this.observer = null;
this.onAdded = null;
this.onRemoved = null;
}
}
<|start_filename|>src/events/Signal.js<|end_filename|>
export class Signal {
constructor() {
this.listenerBoxes = [];
this.listenersNeedCloning = false;
}
getNumListeners() {
return this.listenerBoxes.length;
}
connect(slot, scope) {
this.registerListener(slot, scope, false);
}
connectOnce(slot, scope) {
this.registerListener(slot, scope, true);
}
disconnect(slot, scope) {
if (this.listenersNeedCloning) {
this.listenerBoxes = this.listenerBoxes.slice();
this.listenersNeedCloning = false;
}
for (let i = this.listenerBoxes.length; i--;) {
if (this.listenerBoxes[i].listener === slot && this.listenerBoxes[i].scope === scope) {
this.listenerBoxes.splice(i, 1);
return;
}
}
}
disconnectAll() {
for (let i = this.listenerBoxes.length; i--;) {
this.disconnect(this.listenerBoxes[i].listener, this.listenerBoxes[i].scope);
}
}
emit(...args) {
this.listenersNeedCloning = true;
this.listenerBoxes.forEach(({scope, listener, once}) => {
if (once) {
this.disconnect(listener, scope);
}
listener.apply(scope, args);
});
this.listenersNeedCloning = false;
}
registerListener(listener, scope, once) {
const _listeners = this.listenerBoxes.filter(box => box.listener === listener && box.scope === scope);
if (!_listeners.length) {
if (this.listenersNeedCloning) {
this.listenerBoxes = this.listenerBoxes.slice();
}
this.listenerBoxes.push({listener, scope, once});
} else {
//
const addOnce_add = _listeners.find(box => box.once && !once);
const add_addOnce = _listeners.find(box => once && !box.once);
if (addOnce_add) {
throw new Error("You cannot addOnce() then try to add() the same listener " +
"without removing the relationship first.");
}
if (add_addOnce) {
throw new Error("You cannot add() then addOnce() the same listener " +
"without removing the relationship first.");
}
}
}
}
<|start_filename|>test/MediatorHandler.spec.js<|end_filename|>
/**
* Created by mgobbi on 05/04/2017.
*/
import {MediatorHandler} from "../src/display/MediatorHandler";
import {AHandler} from "../src/display/AHandler";
import {EventDispatcher} from "../src/events/EventTarget";
import {Disposable} from "../src/display/Disposable";
import _noop from "../src/internal/_noop";
var assert = require("chai").assert;
describe('MediatorHandler', function () {
before(function () {
this.jsdom = require('jsdom-global')()
});
after(function () {
this.jsdom()
});
function Mediator() {
}
function MediatorB() {
}
let handler, definitions = {a: Mediator, b: MediatorB};
beforeEach(() => {
handler = new MediatorHandler({definitions});
});
it('AHandler is a instanceof AHandler', function () {
const ahandler = new AHandler({definitions});
assert.instanceOf(ahandler, AHandler);
assert.instanceOf(ahandler.dispatcher, EventDispatcher, "dispatcher non è un EventDispatcher");
assert.isUndefined(ahandler.getDefinition());
assert.isUndefined(ahandler.inCache());
assert.isUndefined(ahandler.updateCache());
assert.isUndefined(ahandler.hasMediator());
assert.isUndefined(ahandler.create());
assert.isUndefined(ahandler.getAllElements());
assert.isUndefined(ahandler.destroy());
assert.isUndefined(ahandler.dispose());
});
it('MediatorHandler is a instanceof AHandler', function () {
assert.instanceOf(handler, AHandler);
});
it('handler.selector,handler.MEDIATORS_CACHE', function () {
assert.equal(handler.selector, "data-mediator", "selector non è un data-mediator");
assert.lengthOf(handler.MEDIATORS_CACHE, 0, "MEDIATORS_CACHE non è un []");
assert.instanceOf(handler.dispatcher, EventDispatcher, "dispatcher non è un EventDispatcher");
assert.equal(handler.definitions, definitions, "definitions non è definitions");
});
it('handler.getDefinition', function () {
const div = document.createElement("div");
div.dataset.mediator = "a";
assert.equal(handler.getDefinition(div), Mediator)
});
it('handler.inCache', function () {
const div = document.createElement("div");
const div2 = document.createElement("div");
div.dataset.mediator = "a";
const disposable = new Disposable({node: div, dispose: _ => _, mediatorId: "123"});
handler.MEDIATORS_CACHE.push(disposable);
assert.isTrue(handler.inCache(div));
assert.isFalse(handler.inCache(div2));
});
it('handler.updateCache', function () {
const div = document.createElement("div");
div.dataset.mediator = "a";
const disposable = new Disposable({node: div, dispose: _ => _, mediatorId: "123"});
handler.updateCache(disposable);
assert.lengthOf(handler.MEDIATORS_CACHE, 1);
assert.equal(handler.MEDIATORS_CACHE[0], disposable);
});
it('handler.hasMediator', function () {
const div = document.createElement("div");
div.dataset.mediator = "a";
assert.isTrue(handler.hasMediator(div));
const disposable = new Disposable({node: div, dispose: _ => _, mediatorId: "123"});
handler.updateCache(disposable);
assert.isFalse(handler.hasMediator(div));
const divB = document.createElement("div");
divB.dataset.mediator = "b";
assert.isTrue(handler.hasMediator(divB));
});
it('handler.create', function () {
const container = document.createElement("div");
let div = document.createElement("div");
div.dataset.mediator = "a";
const dispose = _ => {
};
const Mediator = _ => dispose;
//senza parentNode
let disposable = handler.create(div, Mediator);
assert.equal(disposable.mediatorId, div.getAttribute("mediatorid"));
assert.equal(disposable.dispose, _noop);
assert.equal(disposable.node, div);
assert.equal(handler.MEDIATORS_CACHE[0], disposable);
//con parentNode
const div2 = document.createElement("div");
div2.dataset.mediator = "b";
container.appendChild(div2);
disposable = handler.create(div2, Mediator);
assert.equal(disposable.mediatorId, div2.getAttribute("mediatorid"));
assert.equal(disposable.dispose, dispose);
assert.equal(disposable.node, div2);
assert.equal(handler.MEDIATORS_CACHE[1], disposable);
});
it('handler.getAllElements', function () {
const container = document.createElement("div");
let div = document.createElement("div");
div.dataset.mediator = "a";
const div2 = document.createElement("div");
div2.dataset.mediator = "b";
container.appendChild(div);
container.appendChild(div2);
let nodes = handler.getAllElements(container);
assert.equal(nodes[0], div);
assert.equal(nodes[1], div2);
//compreso il container
container.dataset.mediator = "c";
nodes = handler.getAllElements(container);
assert.equal(nodes[0], container);
assert.equal(nodes[1], div);
assert.equal(nodes[2], div2);
});
it('MediatorHandler.disposeMediator', function () {
const div = document.createElement("div");
div.dataset.mediator = "a";
const dispose = () => {
assert.ok("il dispose viene chiamato")
};
const disposable = new Disposable({node: div, dispose, mediatorId: "123"});
MediatorHandler.disposeMediator(disposable);
assert.isNull(disposable.node)
});
it('handler._destroy', function () {
});
it('handler.destroy', function () {
});
it('handler.dispose', function () {
});
});
<|start_filename|>sample/redux-like/client/actions.js<|end_filename|>
/**
* Created by mgobbi on 12/12/2016.
*/
define(function () {
return {
"ADD_TODO": "add-todo"
, "SET_VISIBILITY_FILTER": "set-visibility-filter"
, "TOGGLE_TODO": "toggle-todo"
};
});
<|start_filename|>src/internal/_compose.js<|end_filename|>
/**
* Created by mgobbi on 17/03/2017.
*/
// Performs left-to-right composition of one or more functions.
import _arity from "./_arity";
import reduce from "./_reduce";
import _pipe from "./_pipe";
export default function (...fns) {
fns.reverse();
const head = fns[0];
const tail = fns.slice(1);
return _arity(head.length, reduce(_pipe, head, tail));
}
<|start_filename|>src/display/AHandler.js<|end_filename|>
/**
* Created by marco.gobbi on 21/01/2015.
*/
import {EventTarget} from "../events/EventTarget";
export class AHandler {
constructor(params) {
let {definitions , dispatcher = new EventTarget()} = params;
this.definitions = definitions;
this.dispatcher = dispatcher;
}
getDefinition() {
// do nothing.
}
inCache() {
// do nothing.
}
updateCache() {
// do nothing.
}
hasMediator() {
// do nothing.
}
create() {
// do nothing.
}
getAllElements() {
// do nothing.
}
destroy() {
// do nothing.
}
dispose() {
// do nothing.
}
}
<|start_filename|>src/display/next-uid.js<|end_filename|>
/**
* Created by mgobbi on 31/03/2017.
*/
const REG_EXP = /[xy]/g;
const STRING = "xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx";
/**
*
* @return {string}
*/
export const nextUid = () => {
return STRING.replace(REG_EXP, c => {
let r = Math.random() * 16 | 0;
let v = c === "x" ? r : (r & 0x3 | 0x8);
return v.toString(16);
});
};
<|start_filename|>sample/custom-element/client/components-library.js<|end_filename|>
/**
* Created by marco.gobbi on 09/12/2014.
*/
define(function (require, exports, module) {
function Module(dispatcher) {
this.dispatcher = dispatcher;
}
Module.prototype = Object.create(HTMLElement.prototype);
Module.prototype.constructor = Module;
Object.assign(Module.prototype, {
handleLoaded: function (e) {
var thumbnails = JSON.parse(e.currentTarget.responseText);
console.log(this,this.innerHTML)
this.innerHTML = thumbnails.reduce(function (prev, curr) {
return prev.concat("<my-thumbnail id='" + curr + "'>" + curr + "</my-thumbnail>");
}, "<div class='thumbnails'>").concat("</div>");
},
connectedCallback: function () {
console.log("attached my-custom-element", this)
var xhr = new XMLHttpRequest();
xhr.open("GET", "components.json");
xhr.onload = function (e) {
this.handleLoaded(e);
}.bind(this);
xhr.send();
},
disconnectedCallback: function () {
console.log("deattached my-custom-element", this)
}
})
module.exports = Module;
});
<|start_filename|>src/internal/_for-each.js<|end_filename|>
/**
* Created by mgobbi on 20/04/2017.
*/
import curry from "./_curry";
export default curry(function (fn, list) {
const len = list.length;
let idx = 0;
while (idx < len) {
fn(list[idx]);
idx += 1;
}
return list;
});
<|start_filename|>sample/custom-element/client/autocomplete-element.js<|end_filename|>
/**
* Created by marco.gobbi on 09/12/2014.
*/
define(function (require, exports, module) {
function FooElement(dispatcher) {
this.dispatcher = dispatcher;
}
FooElement.prototype = Object.create(HTMLElement.prototype);
FooElement.prototype.constructor = FooElement;
Object.assign(FooElement.prototype, {
connectedCallback: function () {
console.log("attached foo element", this);
var input = document.createElement("input");
this.appendChild(input);
var autocomplete = new google.maps.places.Autocomplete(input);
google.maps.event.addListener(autocomplete, "place_changed", function (e) {
this.dispatcher.dispatchEvent(new CustomEvent("place-changed", {detail:autocomplete.getPlace().geometry.location}));
}.bind(this));
},
disconnectedCallback: function () {
console.log("deattached foo element", this)
}
});
module.exports = FooElement;
});
<|start_filename|>sample/custom-element/client/my-template.js<|end_filename|>
/**
* Created by mgobbi on 03/02/2016.
*/
define(function () {
function Module(dispatcher) {
this.dispatcher = dispatcher;
this.dispatcher.addEventListener("create-element", this.handleElementAdded.bind(this));
}
Module.prototype = Object.create(HTMLElement.prototype);
Module.prototype.constructor = Module;
Object.assign(Module.prototype , {
handleElementAdded: function (e) {
var id = e.detail;
this.appendChild(document.createElement(id));
},
connectedCallback: function () {
},
disconnectedCallback: function () {
}
})
return Module;
});
<|start_filename|>sample/amd/client/Application.js<|end_filename|>
/**
* Created by marco.gobbi on 18/12/2014.
*/
define(function (require) {
"use strict";
var rjs = require("robojs");
var definitions = require("./definitions");
function Application() {
rjs.bootstrap({
definitions: definitions
}).promise.catch(function(e){
console.log(e);
})
}
!function setHandlers() {
document.querySelector(".add-button").addEventListener("click", function handler() {
var element = document.createElement("div");
element.innerHTML = "<div data-mediator='foo-element'>foo! <div data-mediator='bar-element'>bar!</div></div>";//.clone();
document.body.appendChild(element.firstElementChild);
});
}();
return Application();
});
<|start_filename|>test/Robo.spec.js<|end_filename|>
/**
* Created by mgobbi on 05/04/2017.
*/
import {Robo} from "../src/display/Robo";
import {MediatorHandler} from "../src/display/MediatorHandler";
import {AMDLoader, CustomLoader} from "../src/net/Loader";
import {DomWatcher} from "../src/display/DomWatcher";
import {Disposable} from "../src/display/Disposable";
import _noop from "../src/internal/_noop";
var assert = require("chai").assert;
require('./libs/MutationObserver');
describe('Robo', function () {
before(function () {
this.jsdom = require('jsdom-global')()
});
let definitions;
let robo;
after(function () {
this.jsdom()
});
beforeEach(function () {
document.body.innerHTML="";
definitions = {
"a": _ => {
}, "b": _ => {
}
};
robo = new Robo({definitions});
});
it('robo.definitions are definitions', function () {
const definitions2 = {"a": 1, "b": 2};
const robo = new Robo({definitions: definitions2});
assert.equal(robo.definitions, definitions2);
});
it('default root is body', function () {
assert.equal(robo.root, document.body);
});
it('default loader is an AMDLoader', function () {
assert.instanceOf(robo.loader, AMDLoader);
});
it('default loader is an MediatorHandler', function () {
assert.instanceOf(robo.handler, MediatorHandler);
});
it('default watcher is a DomWatcher', function () {
assert.instanceOf(robo.watcher, DomWatcher);
});
it('robo.getMediators returns promise of mediators', function (done) {
window.require = (id, resolve, reject) => {
resolve(id[0]);
};
const div1 = document.createElement("div");
const div2 = document.createElement("div");
const div3 = document.createElement("div");
div1.setAttribute("data-mediator", "a");
div2.setAttribute("data-mediator", "b");
document.body.appendChild(div1);
document.body.appendChild(div2);
document.body.appendChild(div3);
robo.getMediators(document.body.querySelectorAll("*")).then(([a, b]) => {
assert.instanceOf(a, Disposable);
assert.instanceOf(b, Disposable);
assert.equal(a.node, div1);
assert.equal(b.node, div2);
assert.equal(a.mediatorId, div1.getAttribute("mediatorid"));
assert.equal(b.mediatorId, div2.getAttribute("mediatorid"));
assert.equal(a.dispose, _noop);
assert.equal(b.dispose, _noop);
done();
}).catch(_ => {
console.error(_);
assert.fail(_);
done();
})
});
it('robo.removeMediators removes mediators and update cache', function (done) {
window.require = (id, resolve, reject) => {
resolve(id[0]);
};
const div1 = document.createElement("div");
const div2 = document.createElement("div");
const div3 = document.createElement("div");
div1.setAttribute("data-mediator", "a");
div2.setAttribute("data-mediator", "b");
document.body.appendChild(div1);
document.body.appendChild(div2);
document.body.appendChild(div3);
robo.getMediators(document.body.querySelectorAll("*"))
.then(_ => {
robo.removeMediators([div1]);
assert.lengthOf(robo.handler.MEDIATORS_CACHE, 1);
robo.removeMediators([div3]);
assert.lengthOf(robo.handler.MEDIATORS_CACHE, 1);
robo.removeMediators(Array.from(document.body.querySelectorAll("*")));
assert.lengthOf(robo.handler.MEDIATORS_CACHE, 0);
done();
}).catch(_ => {
console.error(_);
assert.fail(_);
done();
})
});
});
<|start_filename|>package.json<|end_filename|>
{
"name": "robojs",
"version": "6.0.0",
"license": "MIT",
"repository": "https://github.com/marcog83/RoboJS",
"author": "<NAME>",
"main": "dist/robojs.min.js",
"jsnext:main": "robojs.js",
"module": "robojs.js",
"types": "src/type-definitions/robojs.d.ts",
"scripts": {
"build:babel-it": "cross-env NODE_ENV=production node_modules/.bin/babel ./dist/robojs.js --out-file ./dist/robojs.js",
"build:bundle": "node_modules/.bin/rollup -c",
"build:uglify": "node_modules/.bin/uglifyjs --compress unsafe --mangle -o dist/robojs.min.js dist/robojs.js",
"build": "cross-env NODE_ENV=production npm run build:bundle && npm run build:babel-it && npm run build:uglify",
"release:major": "shelljs-release major",
"release:minor": "shelljs-release minor",
"release:patch": "shelljs-release patch",
"test": "cross-env NODE_ENV=test nyc mocha --recursive test",
"coverage": "cross-env NODE_ENV=test nyc report --reporter=text-lcov | coveralls",
"nsp": "nsp check",
"docs": "typedoc --includeDeclarations --out docs src/type-definitions"
},
"devDependencies": {
"babel-cli": "^6.26.0",
"babel-core": "^6.26.3",
"babel-plugin-istanbul": "^4.1.6",
"babel-preset-env": "^1.7.0",
"babel-preset-import-export": "^1.0.2",
"chai": "^4.1.2",
"coveralls": "^3.0.2",
"cross-env": "^5.2.0",
"eslint": "^5.2.0",
"hoek": "^6.0.0",
"jsdom": "^14.0.0",
"jsdom-global": "^3.0.2",
"mocha": "^6.0.0",
"mochawesome": "^3.0.2",
"nyc": "^13.2.0",
"rollup": "^1.0.0",
"rollup-plugin-node-resolve": "^5.0.0",
"shelljs-release": "^0.3.0",
"uglify-js": "^3.4.5"
},
"nyc": {
"require": [
"babel-register"
],
"reporter": [
"lcov",
"text"
],
"exclude": [
"src/internal/_root.js",
"test/**/*.*"
],
"sourceMap": false,
"instrument": false
}
}
<|start_filename|>src/internal/_isArrayLike.js<|end_filename|>
import _isArray from "./_isArray";
function _isString(x) {
return Object.prototype.toString.call(x) === "[object String]";
}
export default function (x) {
const isArray = Array.isArray || _isArray;
if (!x) {
return false;
}
if (isArray(x)) {
return true;
}
if ("object" !== typeof x) {
return false;
}
if (_isString(x)) {
return false;
}
if (x.nodeType === 1) {
return !!x.length;
}
if (x.length === 0) {
return true;
}
if (x.length > 0) {
return x.hasOwnProperty(0) && x.hasOwnProperty(x.length - 1);
}
return false;
}
<|start_filename|>sample/amd/client/my-custom-element.js<|end_filename|>
/**
* Created by marco.gobbi on 09/12/2014.
*/
define(function (require, exports, module) {
function Mediator(node, dispatcher) {
console.log(node);
return function(){
console.log("removed",node);
}
}
module.exports = Mediator;
});
<|start_filename|>test/bootstrap.spec.js<|end_filename|>
/**
* Created by mgobbi on 05/04/2017.
*/
import {bootstrap} from "../src/index";
import {Loader} from "../src/net/Loader";
import {Robo} from "../src/display/Robo";
var assert = require("chai").assert;
require('./libs/MutationObserver');
describe('bootstrap', function () {
before(function () {
this.jsdom = require('jsdom-global')()
})
after(function () {
this.jsdom()
})
it('it is a function', function () {
assert.isFunction(bootstrap);
});
it('arity 1', function () {
assert.lengthOf(bootstrap, 1);
});
it('ritorna una istanza di Robo', function () {
assert.instanceOf(bootstrap({definitions: {}}), Robo, "non ritorna una istanza di Robo");
});
it('Si può passare il loader', function () {
const loader = new Loader();
const robo = bootstrap({definitions: {}, loader});
assert.equal(robo.loader, loader, "non ritorna un Oggetto");
});
it('Si può passare la root', function () {
const root = document.createElement("div");
const robo = bootstrap({definitions: {}, root });
assert.equal(robo.root,root, "non ritorna un Oggetto");
});
it('bootstrap: dispose viene chiamata ', function () {
const robo = bootstrap({definitions: {} });
robo.dispose();
assert.ok("dispose non da errori");
});
});
<|start_filename|>src/display/Disposable.js<|end_filename|>
import _noop from "../internal/_noop";
export class Disposable {
constructor({mediatorId = "", node = null, dispose = _noop}={}) {
this.mediatorId = mediatorId;
this.node = node;
this.dispose = dispose;
}
}
<|start_filename|>rollup.config.js<|end_filename|>
/**
* Created by marcogobbi on 07/05/2017.
*/
export default {
input: 'robojs.js',
output: {
format: 'es',
file: 'dist/robojs.js'
, name: 'robojs'
, exports: 'named'
}
, plugins: []
};
<|start_filename|>test/Loader.spec.js<|end_filename|>
/**
* Created by mgobbi on 05/04/2017.
*/
import {Loader, AMDLoader, CustomLoader} from "../src/net/Loader";
var assert = require("chai").assert;
describe('Loader', function () {
before(function () {
this.jsdom = require('jsdom-global')()
})
after(function () {
this.jsdom()
})
beforeEach(() => {
})
it('Loader is a instanceof Loader', function () {
assert.instanceOf(new Loader(), Loader);
});
it('L\'oggetto ritornato ha 1 metodo load', function () {
var loader = new CustomLoader(_ => _);
assert.instanceOf(loader.load(""), Promise, "load non è un Promise");
assert.isFunction(loader.load, "load non è una funzione");
});
it('AMDLoader is a instanceof Loader', function () {
const loader = new AMDLoader();
assert.instanceOf(loader, Loader);
});
it('AMDLoader use require correctly', function (done) {
const loader = new AMDLoader();
const id = "my-id";
window.require = (id, resolve, reject) => {
assert.equal(id[0], id, "id non coincide");
assert.isFunction(resolve, "resolve non è una funzione");
assert.isFunction(reject, "reject non è una funzione");
done();
};
loader.load(id)
});
it('CustomLoader is a instanceof Loader', function (done) {
const myid = "my-id";
const fn = (id, resolve, reject) => {
assert.equal(id, myid, "id non coincide");
assert.isFunction(resolve, "resolve non è una funzione");
assert.isFunction(reject, "reject non è una funzione");
done();
};
const loader = new CustomLoader(fn);
assert.instanceOf(loader, Loader);
assert.equal(loader.fn, fn);
loader.load(myid);
});
});
<|start_filename|>dist/robojs.js<|end_filename|>
(function (global, factory) {
if (typeof define === "function" && define.amd) {
define("robojs", ["exports"], factory);
} else if (typeof exports !== "undefined") {
factory(exports);
} else {
var mod = {
exports: {}
};
factory(mod.exports);
global.robojs = mod.exports;
}
})(this, function (exports) {
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
var _typeof = typeof Symbol === "function" && typeof Symbol.iterator === "symbol" ? function (obj) {
return typeof obj;
} : function (obj) {
return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj;
};
function _possibleConstructorReturn(self, call) {
if (!self) {
throw new ReferenceError("this hasn't been initialised - super() hasn't been called");
}
return call && (typeof call === "object" || typeof call === "function") ? call : self;
}
function _inherits(subClass, superClass) {
if (typeof superClass !== "function" && superClass !== null) {
throw new TypeError("Super expression must either be null or a function, not " + typeof superClass);
}
subClass.prototype = Object.create(superClass && superClass.prototype, {
constructor: {
value: subClass,
enumerable: false,
writable: true,
configurable: true
}
});
if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass;
}
function _classCallCheck(instance, Constructor) {
if (!(instance instanceof Constructor)) {
throw new TypeError("Cannot call a class as a function");
}
}
var _createClass = function () {
function defineProperties(target, props) {
for (var i = 0; i < props.length; i++) {
var descriptor = props[i];
descriptor.enumerable = descriptor.enumerable || false;
descriptor.configurable = true;
if ("value" in descriptor) descriptor.writable = true;
Object.defineProperty(target, descriptor.key, descriptor);
}
}
return function (Constructor, protoProps, staticProps) {
if (protoProps) defineProperties(Constructor.prototype, protoProps);
if (staticProps) defineProperties(Constructor, staticProps);
return Constructor;
};
}();
var Loader = function () {
function Loader() {
_classCallCheck(this, Loader);
}
_createClass(Loader, [{
key: "load",
value: function load(id) {
var _this = this;
return new Promise(function (resolve, reject) {
return _this.onComplete(id, resolve, reject);
});
}
}, {
key: "onComplete",
value: function onComplete() {
//not implemented
}
}]);
return Loader;
}();
var AMDLoader = function (_Loader) {
_inherits(AMDLoader, _Loader);
function AMDLoader() {
_classCallCheck(this, AMDLoader);
return _possibleConstructorReturn(this, (AMDLoader.__proto__ || Object.getPrototypeOf(AMDLoader)).apply(this, arguments));
}
_createClass(AMDLoader, [{
key: "onComplete",
value: function onComplete(id, resolve, reject) {
window.require([id], resolve, reject);
}
}]);
return AMDLoader;
}(Loader);
var CustomLoader = function (_Loader2) {
_inherits(CustomLoader, _Loader2);
function CustomLoader(fn) {
_classCallCheck(this, CustomLoader);
var _this3 = _possibleConstructorReturn(this, (CustomLoader.__proto__ || Object.getPrototypeOf(CustomLoader)).call(this));
_this3.fn = fn;
return _this3;
}
_createClass(CustomLoader, [{
key: "onComplete",
value: function onComplete(id, resolve, reject) {
this.fn(id, resolve, reject);
}
}]);
return CustomLoader;
}(Loader);
var _root = (typeof self === "undefined" ? "undefined" : _typeof(self)) === "object" && self.self === self && self || (typeof global === "undefined" ? "undefined" : _typeof(global)) === "object" && global.global === global && global || window || self;
var EventDispatcher = function () {
function EventDispatcher() {
_classCallCheck(this, EventDispatcher);
this.listeners_ = {};
}
_createClass(EventDispatcher, [{
key: "addEventListener",
value: function addEventListener(type, handler) {
// let listeners_type = this.listeners_[type];
if (!this.listeners_[type]) {
this.listeners_[type] = [];
}
if (!this.listeners_[type].includes(handler)) {
this.listeners_[type].push(handler);
}
}
}, {
key: "removeEventListener",
value: function removeEventListener(type, handler) {
var listeners_type = this.listeners_[type];
if (listeners_type === undefined) return;
for (var i = 0, l; l = listeners_type[i]; i++) {
if (l === handler) {
listeners_type.splice(i, 1);
break;
}
}if (!listeners_type.length) {
delete this.listeners_[type];
}
}
}, {
key: "dispatchEvent",
value: function dispatchEvent(event) {
// Since we are using DOM Event objects we need to override some of the
// properties and methods so that we can emulate this correctly.
var self = this;
event.__defineGetter__("target", function () {
return self;
});
var type = event.type;
var prevented = 0;
var listeners_type = this.listeners_[type];
if (listeners_type === undefined) return true;
var handlers = listeners_type.concat();
handlers.map(function (handler) {
return handler.handleEvent ? handler.handleEvent.bind(handler) : handler;
}).forEach(function (handler) {
prevented = handler(event) === false;
});
return !prevented && !event.defaultPrevented;
}
}]);
return EventDispatcher;
}();
//
var _EventTarget = _root.EventTarget;
try {
new _EventTarget();
} catch (e) {
_EventTarget = EventDispatcher;
}
var EventTarget = _EventTarget;
var Signal = function () {
function Signal() {
_classCallCheck(this, Signal);
this.listenerBoxes = [];
this.listenersNeedCloning = false;
}
_createClass(Signal, [{
key: "getNumListeners",
value: function getNumListeners() {
return this.listenerBoxes.length;
}
}, {
key: "connect",
value: function connect(slot, scope) {
this.registerListener(slot, scope, false);
}
}, {
key: "connectOnce",
value: function connectOnce(slot, scope) {
this.registerListener(slot, scope, true);
}
}, {
key: "disconnect",
value: function disconnect(slot, scope) {
if (this.listenersNeedCloning) {
this.listenerBoxes = this.listenerBoxes.slice();
this.listenersNeedCloning = false;
}
for (var i = this.listenerBoxes.length; i--;) {
if (this.listenerBoxes[i].listener === slot && this.listenerBoxes[i].scope === scope) {
this.listenerBoxes.splice(i, 1);
return;
}
}
}
}, {
key: "disconnectAll",
value: function disconnectAll() {
for (var i = this.listenerBoxes.length; i--;) {
this.disconnect(this.listenerBoxes[i].listener, this.listenerBoxes[i].scope);
}
}
}, {
key: "emit",
value: function emit() {
var _this4 = this;
for (var _len = arguments.length, args = Array(_len), _key = 0; _key < _len; _key++) {
args[_key] = arguments[_key];
}
this.listenersNeedCloning = true;
this.listenerBoxes.forEach(function (_ref) {
var scope = _ref.scope,
listener = _ref.listener,
once = _ref.once;
if (once) {
_this4.disconnect(listener, scope);
}
listener.apply(scope, args);
});
this.listenersNeedCloning = false;
}
}, {
key: "registerListener",
value: function registerListener(listener, scope, once) {
var _listeners = this.listenerBoxes.filter(function (box) {
return box.listener === listener && box.scope === scope;
});
if (!_listeners.length) {
if (this.listenersNeedCloning) {
this.listenerBoxes = this.listenerBoxes.slice();
}
this.listenerBoxes.push({ listener: listener, scope: scope, once: once });
} else {
//
var addOnce_add = _listeners.find(function (box) {
return box.once && !once;
});
var add_addOnce = _listeners.find(function (box) {
return once && !box.once;
});
if (addOnce_add) {
throw new Error("You cannot addOnce() then try to add() the same listener " + "without removing the relationship first.");
}
if (add_addOnce) {
throw new Error("You cannot add() then addOnce() the same listener " + "without removing the relationship first.");
}
}
}
}]);
return Signal;
}();
var _isArray = function _isArray(val) {
return val != null && val.length >= 0 && Object.prototype.toString.call(val) === "[object Array]";
};
function _isString(x) {
return Object.prototype.toString.call(x) === "[object String]";
}
function _isArrayLike(x) {
var isArray = Array.isArray || _isArray;
if (!x) {
return false;
}
if (isArray(x)) {
return true;
}
if ("object" !== (typeof x === "undefined" ? "undefined" : _typeof(x))) {
return false;
}
if (_isString(x)) {
return false;
}
if (x.nodeType === 1) {
return !!x.length;
}
if (x.length === 0) {
return true;
}
if (x.length > 0) {
return x.hasOwnProperty(0) && x.hasOwnProperty(x.length - 1);
}
return false;
}
/**
* Created by mgobbi on 12/04/2017.
*/
function flatten(arr) {
return Array.from(arr).reduce(function (flat, toFlatten) {
if (_isArrayLike(toFlatten)) {
toFlatten = Array.from(toFlatten);
}
return flat.concat(Array.isArray(toFlatten) ? flatten(toFlatten) : toFlatten);
}, []);
}
function unique(arrArg) {
return arrArg.filter(function (elem, pos, arr) {
return arr.indexOf(elem) === pos;
});
}
var DomWatcher = function () {
function DomWatcher(root, handler) {
_classCallCheck(this, DomWatcher);
this.onAdded = new Signal();
this.onRemoved = new Signal();
this.root = root;
this.handler = handler;
this.init();
}
_createClass(DomWatcher, [{
key: "init",
value: function init() {
this.observer = new MutationObserver(this.handleMutations.bind(this));
this.observer.observe(this.root, {
attributes: false, //true
childList: true,
characterData: false,
subtree: true
});
}
}, {
key: "handleMutations",
value: function handleMutations(mutations) {
var _this5 = this;
mutations.forEach(function (mutation) {
_this5.updateNodes(mutation.removedNodes, _this5.onRemoved);
_this5.updateNodes(mutation.addedNodes, _this5.onAdded);
});
}
}, {
key: "_parseNodes",
value: function _parseNodes(nodes) {
nodes = flatten(nodes);
nodes = nodes.filter(function (node) {
return node.querySelectorAll;
}).map(this.handler.getAllElements.bind(this.handler)).filter(function (nodes) {
return nodes.length > 0;
});
nodes = flatten(nodes);
nodes = unique(nodes);
return nodes;
}
}, {
key: "updateNodes",
value: function updateNodes(nodes, signal) {
nodes = this._parseNodes(nodes);
if (nodes.length > 0) {
signal.emit(nodes);
}
}
}, {
key: "dispose",
value: function dispose() {
this.observer.disconnect();
this.onAdded.disconnectAll();
this.onRemoved.disconnectAll();
this.observer = null;
this.onAdded = null;
this.onRemoved = null;
}
}]);
return DomWatcher;
}();
var _noop = function _noop(_) {
return _;
};
/**
* Created by mgobbi on 31/03/2017.
*/
var REG_EXP = /[xy]/g;
var STRING = "xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx";
/**
*
* @return {string}
*/
var nextUid = function nextUid() {
return STRING.replace(REG_EXP, function (c) {
var r = Math.random() * 16 | 0;
var v = c === "x" ? r : r & 0x3 | 0x8;
return v.toString(16);
});
};
/**
* Created by marco.gobbi on 21/01/2015.
*/
var AHandler = function () {
function AHandler(params) {
_classCallCheck(this, AHandler);
var definitions = params.definitions,
_params$dispatcher = params.dispatcher,
dispatcher = _params$dispatcher === undefined ? new EventTarget() : _params$dispatcher;
this.definitions = definitions;
this.dispatcher = dispatcher;
}
_createClass(AHandler, [{
key: "getDefinition",
value: function getDefinition() {
// do nothing.
}
}, {
key: "inCache",
value: function inCache() {
// do nothing.
}
}, {
key: "updateCache",
value: function updateCache() {
// do nothing.
}
}, {
key: "hasMediator",
value: function hasMediator() {
// do nothing.
}
}, {
key: "create",
value: function create() {
// do nothing.
}
}, {
key: "getAllElements",
value: function getAllElements() {
// do nothing.
}
}, {
key: "destroy",
value: function destroy() {
// do nothing.
}
}, {
key: "dispose",
value: function dispose() {
// do nothing.
}
}]);
return AHandler;
}();
var Disposable = function Disposable() {
var _ref2 = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {},
_ref2$mediatorId = _ref2.mediatorId,
mediatorId = _ref2$mediatorId === undefined ? "" : _ref2$mediatorId,
_ref2$node = _ref2.node,
node = _ref2$node === undefined ? null : _ref2$node,
_ref2$dispose = _ref2.dispose,
dispose = _ref2$dispose === undefined ? _noop : _ref2$dispose;
_classCallCheck(this, Disposable);
this.mediatorId = mediatorId;
this.node = node;
this.dispose = dispose;
};
var MediatorHandler = function (_AHandler) {
_inherits(MediatorHandler, _AHandler);
function MediatorHandler(params) {
_classCallCheck(this, MediatorHandler);
var _this6 = _possibleConstructorReturn(this, (MediatorHandler.__proto__ || Object.getPrototypeOf(MediatorHandler)).call(this, params));
_this6.MEDIATORS_CACHE = [];
return _this6;
}
_createClass(MediatorHandler, [{
key: "getDefinition",
value: function getDefinition(node) {
return this.definitions[node.getAttribute(this.selector)];
}
}, {
key: "inCache",
value: function inCache(node) {
return !!this.MEDIATORS_CACHE.find(function (disposable) {
return disposable.node === node;
});
}
}, {
key: "updateCache",
value: function updateCache(disposable) {
this.MEDIATORS_CACHE.push(disposable); //[mediatorId] = disposeFunction;
return this.MEDIATORS_CACHE;
}
}, {
key: "hasMediator",
value: function hasMediator(node) {
return !!this.getDefinition(node) && !this.inCache(node);
}
}, {
key: "create",
value: function create(node, Mediator) {
var mediatorId = nextUid();
node.setAttribute("mediatorid", mediatorId);
var dispose = _noop;
if (node.parentNode) {
dispose = Mediator(node, this.dispatcher) || _noop;
}
var disposable = new Disposable({
mediatorId: mediatorId,
node: node,
dispose: dispose
});
this.updateCache(disposable);
return disposable;
}
}, {
key: "getAllElements",
value: function getAllElements(node) {
var nodes = Array.from(node.querySelectorAll("[" + this.selector + "]")).slice(0);
if (node.getAttribute(this.selector)) {
nodes.unshift(node);
}
return nodes;
}
}, {
key: "_destroy",
value: function _destroy(node) {
var l = this.MEDIATORS_CACHE.length;
for (var i = 0; i < l; i++) {
var disposable = this.MEDIATORS_CACHE[i];
if (disposable && (!disposable.node || disposable.node === node)) {
MediatorHandler.disposeMediator(disposable);
this.MEDIATORS_CACHE[i] = null;
}
}
return this.MEDIATORS_CACHE.filter(function (i) {
return i;
});
}
}, {
key: "destroy",
value: function destroy(node) {
this.MEDIATORS_CACHE = this._destroy(node);
return this.MEDIATORS_CACHE;
}
}, {
key: "dispose",
value: function dispose() {
this.MEDIATORS_CACHE.forEach(MediatorHandler.disposeMediator);
this.MEDIATORS_CACHE = null;
this.dispatcher = null;
}
}, {
key: "selector",
get: function get() {
return "data-mediator";
}
}], [{
key: "disposeMediator",
value: function disposeMediator(disposable) {
disposable.dispose();
disposable.node = null;
}
}]);
return MediatorHandler;
}(AHandler);
var Robo = function () {
function Robo(options) {
_classCallCheck(this, Robo);
var definitions = options.definitions,
_options$loader = options.loader,
loader = _options$loader === undefined ? new AMDLoader() : _options$loader,
_options$root = options.root,
root = _options$root === undefined ? document.body : _options$root;
this.definitions = definitions;
this.loader = loader;
this.root = root;
this.handler = options.handler || new MediatorHandler({ definitions: definitions });
this.watcher = options.watcher || new DomWatcher(root, this.handler);
this.watcher.onAdded.connect(this.getMediators.bind(this));
this.watcher.onRemoved.connect(this.removeMediators.bind(this));
this.init();
}
_createClass(Robo, [{
key: "init",
value: function init() {
var nodes = [this.root].map(this.handler.getAllElements.bind(this.handler));
this.promise = this.getMediators(nodes);
}
}, {
key: "getMediators",
value: function getMediators(nodes) {
var _this7 = this;
nodes = flatten(nodes);
var promises = nodes.filter(this.handler.hasMediator.bind(this.handler)).map(function (node) {
var definition = _this7.handler.getDefinition(node);
return _this7.loader.load(definition).then(function (Mediator) {
return _this7.handler.create(node, Mediator);
});
});
return Promise.all(promises);
}
}, {
key: "removeMediators",
value: function removeMediators(nodes) {
nodes.forEach(this.handler.destroy.bind(this.handler));
}
}, {
key: "dispose",
value: function dispose() {
this.watcher.dispose();
this.handler.dispose();
this.watcher = null;
this.handler = null;
this.definitions = null;
this.loader = null;
this.root = null;
this.promise = null;
}
}]);
return Robo;
}();
var CustomElementHandler = function (_AHandler2) {
_inherits(CustomElementHandler, _AHandler2);
function CustomElementHandler(params) {
_classCallCheck(this, CustomElementHandler);
var _this8 = _possibleConstructorReturn(this, (CustomElementHandler.__proto__ || Object.getPrototypeOf(CustomElementHandler)).call(this, params));
_this8.REGISTERED_ELEMENTS = {};
return _this8;
}
_createClass(CustomElementHandler, [{
key: "updateCache",
value: function updateCache(id) {
this.REGISTERED_ELEMENTS[id] = true;
return this.REGISTERED_ELEMENTS;
}
}, {
key: "inCache",
value: function inCache(id) {
return !!this.REGISTERED_ELEMENTS[id];
}
}, {
key: "getDefinition",
value: function getDefinition(node) {
return this.definitions[node.tagName.toLowerCase()];
}
}, {
key: "create",
value: function create(node, Mediator) {
var tagName = "";
var dispatcher = this.dispatcher;
if (!this.inCache(node.tagName.toLowerCase())) {
tagName = node.tagName.toLowerCase();
if (!tagName.match(/-/gim)) {
throw new Error("The name of a custom element must contain a dash (-). So <x-tags>, <my-element>, and <my-awesome-app> are all valid names, while <tabs> and <foo_bar> are not.");
}
window.customElements.define(tagName, function (_Mediator) {
_inherits(_class, _Mediator);
function _class() {
_classCallCheck(this, _class);
return _possibleConstructorReturn(this, (_class.__proto__ || Object.getPrototypeOf(_class)).call(this, dispatcher));
}
return _class;
}(Mediator));
this.updateCache(tagName);
}
return new Disposable();
}
}, {
key: "hasMediator",
value: function hasMediator(node) {
var id = node.tagName.toLowerCase();
return !!this.getDefinition(node) && !this.inCache(id);
}
}, {
key: "getAllElements",
value: function getAllElements(node) {
var _children = Array.from(node.querySelectorAll("*")).filter(function (el) {
return el.tagName.match(/-/gim);
});
var root = [];
if (node.tagName.match(/-/gim)) {
root = [node];
}
return root.concat(_children);
}
}]);
return CustomElementHandler;
}(AHandler);
//
var bootstrap = function bootstrap(options) {
return new Robo(options);
};
exports.bootstrap = bootstrap;
exports.Loader = Loader;
exports.AMDLoader = AMDLoader;
exports.CustomLoader = CustomLoader;
exports.EventTarget = EventTarget;
exports.Signal = Signal;
exports.DomWatcher = DomWatcher;
exports.MediatorHandler = MediatorHandler;
exports.Robo = Robo;
exports.CustomElementHandler = CustomElementHandler;
});
<|start_filename|>src/internal/_curryN.js<|end_filename|>
/**
* Created by mgobbi on 20/04/2017.
*/
import _arity from "./_arity";
function _curryN(length, received, fn) {
return function() {
const combined = [];
let argsIdx = 0;
let left = length;
let combinedIdx = 0;
while (combinedIdx < received.length || argsIdx < arguments.length) {
let result;
if (combinedIdx < received.length ) {
result = received[combinedIdx];
} else {
result = arguments[argsIdx];
argsIdx += 1;
}
combined[combinedIdx] = result;
left -= 1;
combinedIdx += 1;
}
return left <= 0 ? fn.apply(this, combined)
: _arity(left, _curryN(length, combined, fn));
};
}
export default _curryN;
<|start_filename|>src/internal/_curry1.js<|end_filename|>
/**
* Created by mgobbi on 20/04/2017.
*/
export default function (fn) {
return function f1() {
if (arguments.length === 0 ) {
return f1;
} else {
return fn.apply(this, arguments);
}
};
}
<|start_filename|>sample/jquery-sortable/sortable-item.js<|end_filename|>
/**
* Created by marcogobbi on 30/03/2017.
*/
define(function () {
return function (node) {
console.trace("attach", node);
return function () {
console.log("dispose", node.getAttribute("mediatorid"));
}
};
});
<|start_filename|>sample/amd/client/definitions.js<|end_filename|>
/**
* Created by marco.gobbi on 09/12/2014.
*/
define(function () {
"use strict";
return {
"my-custom-element": "client/my-custom-element",
"foo-element": "client/foo-element",
"bar-element": "client/bar-element"
}
});
<|start_filename|>sample/redux-like/client/todo-app.js<|end_filename|>
/**
* Created by mgobbi on 12/12/2016.
*/
define(function (require) {
var Actions = require("./actions");
return function (node, dispatcher) {
var state = {
visibilityFilter: 'SHOW_ALL',
todos: []
};
dispatcher.addEventListener(Actions.ADD_TODO, function (e) {
var text = e.detail;
state = Object.assign({}, state, {
todos: [ {
text: text,
completed: false
}].concat(state.todos)
});
dispatcher.dispatchEvent(new CustomEvent("state", {detail:state}));
});
dispatcher.addEventListener(Actions.SET_VISIBILITY_FILTER, function (e) {
var visibilityFilter = e.detail;
state = Object.assign({}, state, {
visibilityFilter: visibilityFilter
});
dispatcher.dispatchEvent(new CustomEvent("state", {detail:state}));
});
dispatcher.addEventListener(Actions.TOGGLE_TODO, function (e) {
var toggleIndex = e.detail;
state = Object.assign({}, state, {
todos: state.todos.map((todo, index) => {
if (index === toggleIndex) {
return Object.assign({}, todo, {
completed: !todo.completed
})
}
return todo
})
});
dispatcher.dispatchEvent(new CustomEvent("state", {detail:state}));
})
};
});
<|start_filename|>test/internal/arity.spec.js<|end_filename|>
import {arity} from "../../src/internal/index";
var assert = require("chai").assert;
describe('arity', function () {
it('returns correct arguments', function () {
arity(0, function fn() {
assert.equal(arguments.length, 0)
})();
//
arity(1, function fn() {
assert.equal(arguments.length, 1)
})(1);
//
arity(2, function fn() {
assert.equal(arguments.length, 2)
})(1, 2);
//
arity(3, function fn() {
assert.equal(arguments.length, 3)
})(1, 2, 3);
//
arity(4, function fn() {
assert.equal(arguments.length, 4)
})(1, 2, 3, 4);
//
arity(5, function fn() {
assert.equal(arguments.length, 5)
})(1, 2, 3, 4, 5);
//
arity(6, function fn() {
assert.equal(arguments.length, 6)
})(1, 2, 3, 4, 5, 6);
//
arity(7, function fn() {
assert.equal(arguments.length, 7)
})(1, 2, 3, 4, 5, 6, 7);
//
arity(8, function fn() {
assert.equal(arguments.length, 8)
})(1, 2, 3, 4, 5, 6, 7, 8);
//
arity(9, function fn() {
assert.equal(arguments.length, 9)
})(1, 2, 3, 4, 5, 6, 7, 8, 9);
//
arity(10, function fn() {
assert.equal(arguments.length, 10)
})(1, 2, 3, 4, 5, 6, 7, 8, 9, 0);
});
it('accept only numbers', function () {
assert.throws(() => {
arity("1", _ => _);
}, TypeError);
});
it('has length 2', function () {
assert.lengthOf(arity, 2);
});
});
<|start_filename|>docs/index.html<|end_filename|>
<!doctype html>
<html class="default no-js">
<head>
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<title>robojs</title>
<meta name="description" content="">
<meta name="viewport" content="width=device-width, initial-scale=1">
<link rel="stylesheet" href="assets/css/main.css">
</head>
<body>
<header>
<div class="tsd-page-toolbar">
<div class="container">
<div class="table-wrap">
<div class="table-cell" id="tsd-search" data-index="assets/js/search.js" data-base=".">
<div class="field">
<label for="tsd-search-field" class="tsd-widget search no-caption">Search</label>
<input id="tsd-search-field" type="text" />
</div>
<ul class="results">
<li class="state loading">Preparing search index...</li>
<li class="state failure">The search index is not available</li>
</ul>
<a href="index.html" class="title">robojs</a>
</div>
<div class="table-cell" id="tsd-widgets">
<div id="tsd-filter">
<a href="#" class="tsd-widget options no-caption" data-toggle="options">Options</a>
<div class="tsd-filter-group">
<div class="tsd-select" id="tsd-filter-visibility">
<span class="tsd-select-label">All</span>
<ul class="tsd-select-list">
<li data-value="public">Public</li>
<li data-value="protected">Public/Protected</li>
<li data-value="private" class="selected">All</li>
</ul>
</div>
<input type="checkbox" id="tsd-filter-inherited" checked />
<label class="tsd-widget" for="tsd-filter-inherited">Inherited</label>
<input type="checkbox" id="tsd-filter-externals" checked />
<label class="tsd-widget" for="tsd-filter-externals">Externals</label>
<input type="checkbox" id="tsd-filter-only-exported" />
<label class="tsd-widget" for="tsd-filter-only-exported">Only exported</label>
</div>
</div>
<a href="#" class="tsd-widget menu no-caption" data-toggle="menu">Menu</a>
</div>
</div>
</div>
</div>
<div class="tsd-page-title">
<div class="container">
<ul class="tsd-breadcrumb">
<li>
<a href="globals.html">Globals</a>
</li>
</ul>
<h1> robojs</h1>
</div>
</div>
</header>
<div class="container container-main">
<div class="row">
<div class="col-8 col-content">
<div class="tsd-panel tsd-typography">
<p>RoboJS is a library that aims to dynamically load JS modules depending on how the DOM is composed.
Add a node to the DOM and a JS will be loaded!
Remove a node and the JS will be disposed!!
Not further framework frontend , but a tool that lets you manage the association DOM and JS in less than 4k gzipped;</p>
<p><a href="https://nodei.co/npm/robojs/"><img src="https://nodei.co/npm/robojs.png" alt="NPM"></a>
<a href="https://greenkeeper.io/"><img src="https://badges.greenkeeper.io/marcog83/RoboJS.svg" alt="Greenkeeper badge"></a>
<a href="https://travis-ci.org/marcog83/RoboJS"><img src="https://travis-ci.org/marcog83/RoboJS.svg?branch=master" alt="Build Status"></a>
<a href="https://codebeat.co/projects/github-com-marcog83-robojs-master"><img src="https://codebeat.co/badges/04be77bb-9247-4988-8499-3711bcbe1485" alt="codebeat badge"></a>
<a href="https://codeclimate.com/github/marcog83/RoboJS/maintainability"><img src="https://api.codeclimate.com/v1/badges/73702f345d75cdc37cb7/maintainability" alt="Maintainability"></a>
<a href="https://codeclimate.com/github/marcog83/RoboJS/test_coverage"><img src="https://api.codeclimate.com/v1/badges/73702f345d75cdc37cb7/test_coverage" alt="Test Coverage"></a>
<a href="https://coveralls.io/github/marcog83/RoboJS?branch=master"><img src="https://coveralls.io/repos/github/marcog83/RoboJS/badge.svg?branch=master" alt="Coverage Status"></a>
<a href="http://inch-ci.org/github/marcog83/RoboJS"><img src="http://inch-ci.org/github/marcog83/RoboJS.svg?branch=master" alt="Inline docs"></a>
<a href="https://nodesecurity.io/orgs/marcog83/projects/c8621ee6-fb99-4e5f-ae62-265279409532"><img src="https://nodesecurity.io/orgs/marcog83/projects/c8621ee6-fb99-4e5f-ae62-265279409532/badge" alt="NSP Status"></a>
<a href="https://david-dm.org/marcog83/RoboJS?type=dev"><img src="https://david-dm.org/marcog83/RoboJS/dev-status.svg" alt="devDependencies Status"></a></p>
<h1 id="the-idea-behind-the-code">The idea behind the code</h1>
<p>To understand how and why I decided to write this tool, please read this <a href="https://github.com/marcog83/RoboJS/wiki/RoboJS-::-the-idea-behind-the-code">post</a></p>
<h1 id="quick-demo">Quick Demo</h1>
<p>A quick demo can be found <a href="http://marcog83.github.io/RoboJS/">HERE</a>. It simulate a bunch of modules loaded from server and a page where to place them (on the right). </p>
<h1 id="installation">Installation</h1>
<pre><code class="lang-javascript">
npm install robojs
</code></pre>
<h1 id="how-it-works-">How it works.</h1>
<p><code>robojs</code> will iterate the DOM trying to match components id with <code>data-mediator</code> attributes.
Each time it finds a match, a request is send to load the right script.
The first time the script is loaded from network, while the next one is retrived from cache.
<code>MutationObserver</code> is used to handle DOM changes, and when it happens <code>robojs</code> iterates over the new added nodes.</p>
<h1 id="usage">Usage</h1>
<p>You set a <code>data-mediator</code> attribute with an ID (whatever you want)</p>
<pre><code class="lang-html"> <span class="hljs-tag"><<span class="hljs-name">div</span> <span class="hljs-attr">data-mediator</span>=<span class="hljs-string">"my-mediator"</span>></span>a-2<span class="hljs-tag"></<span class="hljs-name">div</span>></span>
</code></pre>
<p>in <code>definitions.js</code> you define a Map where the key is an ID , and the value is the file to request in order to register the element.</p>
<pre><code class="lang-json"> {
<span class="hljs-attr">"my-mediator"</span>: <span class="hljs-string">"component/mediator"</span>
}
</code></pre>
<p>By default <code>robojs</code> supposes the presence of an AMD Loader like <code>RequireJS</code> in order to request the component and its dependencies.
For example "component/mediator" looks like the follow</p>
<pre><code class="lang-javascript"><span class="hljs-comment">//mediator.js</span>
define(<span class="hljs-function"><span class="hljs-keyword">function</span>(<span class="hljs-params"></span>)</span>{
<span class="hljs-keyword">return</span> <span class="hljs-function"><span class="hljs-keyword">function</span> <span class="hljs-title">Mediator</span>(<span class="hljs-params">node</span>)</span>{
<span class="hljs-comment">//</span>
}
})
</code></pre>
<p>When <code>robojs</code> finds a match between a <code>data-mediator</code> attribute and an ID from <code>definitions.js</code>,
it will load <code>component/mediator.js</code> file and it will execute the <code>Mediator</code> function.
The <code>node</code> parameter is a reference to the DOM element.</p>
<pre><code class="lang-javascript"><span class="hljs-keyword">import</span> {bootstrap} <span class="hljs-keyword">from</span> <span class="hljs-string">"robojs"</span>
<span class="hljs-keyword">const</span> definitions={
<span class="hljs-string">"my-mediator"</span>: <span class="hljs-string">"component/mediator"</span>
}
<span class="hljs-comment">//basic usage</span>
bootstrap({definitions}) <span class="hljs-comment">// return {dispose,promise}</span>
</code></pre>
<p>you can store, and use later, the returned Object from bootstrap function. </p>
<pre><code class="lang-javascript"><span class="hljs-keyword">import</span> {bootstrap} <span class="hljs-keyword">from</span> <span class="hljs-string">"robojs"</span>
<span class="hljs-keyword">const</span> definitions={
<span class="hljs-string">"my-mediator"</span>: <span class="hljs-string">"component/mediator"</span>
};
<span class="hljs-keyword">var</span> application=bootstrap({definitions}) <span class="hljs-comment">// return {dispose:Function,promise:Promise<any>}</span>
<span class="hljs-comment">//you can handle when every Mediators in page are executed</span>
application.promise.then(<span class="hljs-function"><span class="hljs-keyword">function</span>(<span class="hljs-params"></span>)</span>{
<span class="hljs-built_in">console</span>.log(<span class="hljs-string">"all mediators loaded"</span>)
}).catch(<span class="hljs-function"><span class="hljs-keyword">function</span>(<span class="hljs-params">e</span>)</span>{
<span class="hljs-built_in">console</span>.log(<span class="hljs-string">"something went wrong"</span>,e);
})
<span class="hljs-comment">//later in your code you can dispose the RoboJS instance.</span>
application.dispose();
</code></pre>
<h1 id="mediator-function-">Mediator Function.</h1>
<p>Mediator is the context where your logic runs for a specific Mediator. It is a simple function.
When a <code>data-mediator</code> attribute matches an ID from the component definitions the <code>Mediator</code> function is called and a function returns.</p>
<p>The returned function is called later when the module will be disposed.
<code>Mediator</code> function takes two parameters, <code>node</code> and <code>dispatcher</code>. <code>node</code> is a reference to DOM element,
<code>dispatcher</code> is a reference to <code>EventDispatcher</code> Object.</p>
<pre><code class="lang-javascript">
<span class="hljs-function"><span class="hljs-keyword">function</span> <span class="hljs-title">Mediator</span>(<span class="hljs-params">node,dispacther</span>) </span>{
<span class="hljs-keyword">return</span> <span class="hljs-function"><span class="hljs-keyword">function</span>(<span class="hljs-params"></span>)</span>{
<span class="hljs-comment">// destroy everything, es. handlers</span>
}
}
</code></pre>
<h1 id="loader-object">Loader Object</h1>
<p>Default loader is <code>AMD</code> based, it means that by default any module should be exported as amd.
You can customize script loading strategy passing a function to <code>Loader</code>.</p>
<p>For instance if you use <code>SystemJS</code> module loader, you can do something like the follow.</p>
<pre><code class="lang-javascript"><span class="hljs-keyword">import</span> {bootstrap,Loader} <span class="hljs-keyword">from</span> <span class="hljs-string">"robojs"</span>
<span class="hljs-keyword">const</span> definitions={
<span class="hljs-string">"my-mediator"</span>: <span class="hljs-string">"component/mediator"</span>
};
<span class="hljs-comment">//this is the strategy used to load external modules</span>
<span class="hljs-function"><span class="hljs-keyword">function</span> <span class="hljs-title">loaderFn</span>(<span class="hljs-params">id, resolve, reject</span>) </span>{
System.import(id).then(resolve).catch(reject)
}
bootstrap({definitions,<span class="hljs-attr">loader</span>:Loader(loaderFn)})
</code></pre>
<p>If you use ES2015 <code>import</code> statement, you can create something different.
You don't need to load <code>Mediator</code> from external file, but just retrieve the <code>Mediator</code> function from <code>definitions</code> Map</p>
<pre><code class="lang-javascript"><span class="hljs-keyword">import</span> {bootstrap,Loader} <span class="hljs-keyword">from</span> <span class="hljs-string">"robojs"</span>
<span class="hljs-keyword">import</span> Mediator <span class="hljs-keyword">from</span> <span class="hljs-string">"./component/mediator"</span>;
<span class="hljs-keyword">const</span> definitions={
<span class="hljs-string">"my-mediator"</span>: Mediator
};
<span class="hljs-comment">//this is the strategy used to get Mediator from definitions</span>
<span class="hljs-function"><span class="hljs-keyword">function</span> <span class="hljs-title">loaderFn</span>(<span class="hljs-params">id, resolve, reject</span>) </span>{
resolve(definitions[id]);
}
bootstrap({definitions,<span class="hljs-attr">loader</span>:Loader(loaderFn)});
</code></pre>
<h3 id="eventdispatcher-object-">EventDispatcher Object.</h3>
<p>The <code>EventDispatcher</code> can be your messaging System. It dispatches and listens to <code>Events</code> from your Application.
It's meant to be a Singleton in your application. Every robojs instance has one. </p>
<p>You can get a new instance of EventDispatcher by calling <code>makeDispatcher</code> function</p>
<pre><code class="lang-javascript"><span class="hljs-keyword">import</span> {makeDispatcher} <span class="hljs-keyword">from</span> <span class="hljs-string">"robojs"</span>
<span class="hljs-keyword">var</span> myNewEventDispatcher=makeDispatcher();
</code></pre>
<h2 id="configurations">Configurations</h2>
<p>Using <code>RequireJS</code></p>
<pre><code class="lang-javascript">
requirejs.config({
<span class="hljs-attr">paths</span>: {
<span class="hljs-attr">robojs</span>: <span class="hljs-string">"../../dist/robojs"</span>
}
});
<span class="hljs-comment">//</span>
<span class="hljs-built_in">require</span>([<span class="hljs-string">"robojs"</span>],({bootstrap})=>{
<span class="hljs-keyword">const</span> definitions={
<span class="hljs-string">"my-mediator"</span>: <span class="hljs-string">"component/mediator"</span>
}
bootstrap({definitions});
});
</code></pre>
<p>Using <code>ES2015</code></p>
<pre><code class="lang-javascript"><span class="hljs-keyword">import</span> {bootstrap} <span class="hljs-keyword">from</span> <span class="hljs-string">"robojs"</span>
<span class="hljs-keyword">const</span> definitions={
<span class="hljs-string">"my-mediator"</span>: <span class="hljs-string">"component/mediator"</span>
};
bootstrap({definitions});
</code></pre>
<p>Using <code>SystemJS</code></p>
<pre><code class="lang-javascript">
System.config({
<span class="hljs-attr">defaultJSExtensions</span>: <span class="hljs-literal">true</span>,
<span class="hljs-attr">paths</span>:{
<span class="hljs-attr">robojs</span>:<span class="hljs-string">"../../dist/robojs"</span>
}
});
<span class="hljs-comment">//</span>
System.import(<span class="hljs-string">"robojs"</span>).then(<span class="hljs-function">(<span class="hljs-params">{bootstrap}</span>)=></span>{
<span class="hljs-keyword">const</span> definitions={
<span class="hljs-string">"my-mediator"</span>: <span class="hljs-string">"component/mediator"</span>
};
bootstrap({definitions});
});
</code></pre>
<h1 id="dependencies">Dependencies</h1>
<p>no dependencies</p>
<h3 id="build-project">Build project</h3>
<p>You can run npm script named <code>build</code>.</p>
<pre><code>npm <span class="hljs-keyword">run</span><span class="bash"> build</span>
</code></pre><h3 id="test-project">Test project</h3>
<p>from test folder you can run script named <code>test</code>.</p>
<pre><code>npm <span class="hljs-keyword">run</span><span class="bash"> <span class="hljs-built_in">test</span></span>
</code></pre><h3 id="polyfills">Polyfills</h3>
<p>If you need to support old browsers, you need to something for</p>
<ol>
<li><a href="https://github.com/megawac/MutationObserver.js">MutationObserver</a> by megawac.</li>
<li><a href="https://github.com/webcomponents">Webcomponents</a>. If you use custom element extension.</li>
</ol>
</div>
</div>
<div class="col-4 col-menu menu-sticky-wrap menu-highlight">
<nav class="tsd-navigation primary">
<ul>
<li class="globals ">
<a href="globals.html"><em>Globals</em></a>
</li>
<li class="label tsd-is-external">
<span>Internals</span>
</li>
<li class=" tsd-kind-external-module">
<a href="modules/_src_type_definitions_robojs_d_.html">"src/type-<wbr>definitions/robojs.d"</a>
</li>
<li class="label tsd-is-external">
<span>Externals</span>
</li>
<li class=" tsd-kind-external-module tsd-is-external">
<a href="modules/_node_modules__types_estree_index_d_.html">"node_<wbr>modules/@types/estree/index.d"</a>
</li>
<li class=" tsd-kind-external-module tsd-is-external">
<a href="modules/_node_modules__types_node_index_d_.html">"node_<wbr>modules/@types/node/index.d"</a>
</li>
<li class=" tsd-kind-external-module tsd-is-external">
<a href="modules/_node_modules__types_node_inspector_d_.html">"node_<wbr>modules/@types/node/inspector.d"</a>
</li>
</ul>
</nav>
<nav class="tsd-navigation secondary menu-sticky">
<ul class="before-current">
</ul>
</nav>
</div>
</div>
</div>
<footer class="with-border-bottom">
<div class="container">
<h2>Legend</h2>
<div class="tsd-legend-group">
<ul class="tsd-legend">
<li class="tsd-kind-module"><span class="tsd-kind-icon">Module</span></li>
<li class="tsd-kind-object-literal"><span class="tsd-kind-icon">Object literal</span></li>
<li class="tsd-kind-variable"><span class="tsd-kind-icon">Variable</span></li>
<li class="tsd-kind-function"><span class="tsd-kind-icon">Function</span></li>
<li class="tsd-kind-function tsd-has-type-parameter"><span class="tsd-kind-icon">Function with type parameter</span></li>
<li class="tsd-kind-index-signature"><span class="tsd-kind-icon">Index signature</span></li>
<li class="tsd-kind-type-alias"><span class="tsd-kind-icon">Type alias</span></li>
</ul>
<ul class="tsd-legend">
<li class="tsd-kind-enum"><span class="tsd-kind-icon">Enumeration</span></li>
<li class="tsd-kind-enum-member"><span class="tsd-kind-icon">Enumeration member</span></li>
<li class="tsd-kind-property tsd-parent-kind-enum"><span class="tsd-kind-icon">Property</span></li>
<li class="tsd-kind-method tsd-parent-kind-enum"><span class="tsd-kind-icon">Method</span></li>
</ul>
<ul class="tsd-legend">
<li class="tsd-kind-interface"><span class="tsd-kind-icon">Interface</span></li>
<li class="tsd-kind-interface tsd-has-type-parameter"><span class="tsd-kind-icon">Interface with type parameter</span></li>
<li class="tsd-kind-constructor tsd-parent-kind-interface"><span class="tsd-kind-icon">Constructor</span></li>
<li class="tsd-kind-property tsd-parent-kind-interface"><span class="tsd-kind-icon">Property</span></li>
<li class="tsd-kind-method tsd-parent-kind-interface"><span class="tsd-kind-icon">Method</span></li>
<li class="tsd-kind-index-signature tsd-parent-kind-interface"><span class="tsd-kind-icon">Index signature</span></li>
</ul>
<ul class="tsd-legend">
<li class="tsd-kind-class"><span class="tsd-kind-icon">Class</span></li>
<li class="tsd-kind-class tsd-has-type-parameter"><span class="tsd-kind-icon">Class with type parameter</span></li>
<li class="tsd-kind-constructor tsd-parent-kind-class"><span class="tsd-kind-icon">Constructor</span></li>
<li class="tsd-kind-property tsd-parent-kind-class"><span class="tsd-kind-icon">Property</span></li>
<li class="tsd-kind-method tsd-parent-kind-class"><span class="tsd-kind-icon">Method</span></li>
<li class="tsd-kind-accessor tsd-parent-kind-class"><span class="tsd-kind-icon">Accessor</span></li>
<li class="tsd-kind-index-signature tsd-parent-kind-class"><span class="tsd-kind-icon">Index signature</span></li>
</ul>
<ul class="tsd-legend">
<li class="tsd-kind-constructor tsd-parent-kind-class tsd-is-inherited"><span class="tsd-kind-icon">Inherited constructor</span></li>
<li class="tsd-kind-property tsd-parent-kind-class tsd-is-inherited"><span class="tsd-kind-icon">Inherited property</span></li>
<li class="tsd-kind-method tsd-parent-kind-class tsd-is-inherited"><span class="tsd-kind-icon">Inherited method</span></li>
<li class="tsd-kind-accessor tsd-parent-kind-class tsd-is-inherited"><span class="tsd-kind-icon">Inherited accessor</span></li>
</ul>
<ul class="tsd-legend">
<li class="tsd-kind-property tsd-parent-kind-class tsd-is-protected"><span class="tsd-kind-icon">Protected property</span></li>
<li class="tsd-kind-method tsd-parent-kind-class tsd-is-protected"><span class="tsd-kind-icon">Protected method</span></li>
<li class="tsd-kind-accessor tsd-parent-kind-class tsd-is-protected"><span class="tsd-kind-icon">Protected accessor</span></li>
</ul>
<ul class="tsd-legend">
<li class="tsd-kind-property tsd-parent-kind-class tsd-is-private"><span class="tsd-kind-icon">Private property</span></li>
<li class="tsd-kind-method tsd-parent-kind-class tsd-is-private"><span class="tsd-kind-icon">Private method</span></li>
<li class="tsd-kind-accessor tsd-parent-kind-class tsd-is-private"><span class="tsd-kind-icon">Private accessor</span></li>
</ul>
<ul class="tsd-legend">
<li class="tsd-kind-property tsd-parent-kind-class tsd-is-static"><span class="tsd-kind-icon">Static property</span></li>
<li class="tsd-kind-call-signature tsd-parent-kind-class tsd-is-static"><span class="tsd-kind-icon">Static method</span></li>
</ul>
</div>
</div>
</footer>
<div class="container tsd-generator">
<p>Generated using <a href="http://typedoc.org/" target="_blank">TypeDoc</a></p>
</div>
<div class="overlay"></div>
<script src="assets/js/main.js"></script>
<script>if (location.protocol == 'file:') document.write('<script src="assets/js/search.js"><' + '/script>');</script>
</body>
</html>
<|start_filename|>sample/systemjs/client/Application.js<|end_filename|>
/**
* Created by marco.gobbi on 18/12/2014.
*/
define(function (require) {
"use strict";
var rjs = require("robojs");
var definitions = require("./definitions");
function loaderFn(id, resolve, reject) {
return System.import(id).then(resolve).catch(reject)
}
function Application() {
var application= rjs.bootstrap({
definitions: definitions
, loader: new rjs.CustomLoader(loaderFn)
});
function handler() {
var element = document.createElement("div");
element.innerHTML = "<div data-mediator='foo-element'>foo! <div data-mediator='bar-element'>bar!</div></div>";//.clone();
document.body.appendChild(element.firstElementChild);
}
function dispose() {
application.dispose();
}
document.querySelector(".add-button").addEventListener("click",handler);
document.querySelector(".dispose-button").addEventListener("click",dispose );
}
return Application();
});
<|start_filename|>src/events/EventTarget.js<|end_filename|>
import root from "../internal/_root";
export class EventDispatcher {
constructor() {
this.listeners_ = {};
}
addEventListener(type, handler) {
// let listeners_type = this.listeners_[type];
if (!this.listeners_[type]) {
this.listeners_[type] = [];
}
if (!this.listeners_[type].includes(handler)) {
this.listeners_[type].push(handler);
}
}
removeEventListener(type, handler) {
let listeners_type = this.listeners_[type];
if (listeners_type === undefined) return;
for (let i = 0, l; l = listeners_type[i]; i++)
if (l === handler) {
listeners_type.splice(i, 1);
break;
}
if (!listeners_type.length) {
delete this.listeners_[type];
}
}
dispatchEvent(event) {
// Since we are using DOM Event objects we need to override some of the
// properties and methods so that we can emulate this correctly.
const self = this;
event.__defineGetter__("target", function () {
return self;
});
const type = event.type;
let prevented = 0;
let listeners_type = this.listeners_[type];
if (listeners_type === undefined) return true;
let handlers = listeners_type.concat();
handlers
.map(handler => handler.handleEvent ? handler.handleEvent.bind(handler) : handler)
.forEach(handler => {
prevented = handler(event) === false;
});
return !prevented && !event.defaultPrevented;
}
}
//
let _EventTarget = root.EventTarget;
try {
new _EventTarget();
} catch (e) {
_EventTarget = EventDispatcher;
}
export const EventTarget = _EventTarget;
<|start_filename|>src/internal/_map.js<|end_filename|>
/**
* Created by mgobbi on 20/04/2017.
*/
import curry from "./_curry";
export default curry((fn, list) => {
// return Array.from(list).map(fn);
let idx = 0;
const length = list.length;
const result = [];
for(idx;idx<length;idx++){
result[idx]=fn(list[idx]);
}
return result;
});
<|start_filename|>sample/custom-element/config.js<|end_filename|>
/**
* Created by marco.gobbi on 10/11/2014.
*/
requirejs.config({
paths: {
robojs: "../../dist/robojs"
}
});
require(["./polyfills","require"],function(_,require){
require(["./application"], function (app) {app()});
})
<|start_filename|>sample/jquery-sortable/config.js<|end_filename|>
/**
* Created by marco.gobbi on 10/11/2014.
*/
requirejs.config({
paths: {
robojs: "../../dist/robojs.min"
}
});
require(["./application"], function () {});
<|start_filename|>sample/redux-like/client/todo-list.js<|end_filename|>
/**
* Created by mgobbi on 12/12/2016.
*/
define(function (require) {
return function (node, dispatcher) {
var visibilityFilters = {
SHOW_ALL: function () {
return true;
}
, SHOW_COMPLETED: function (todo) {
return todo.completed;
}
, SHOW_ACTIVE: function (todo) {
return !todo.completed;
}
};
dispatcher.addEventListener("state", function (e) {
var state = e.detail;
var filterFn = visibilityFilters[state.visibilityFilter];
node.innerHTML = state.todos
.reduce(function (prev, todo, i) {
if (!filterFn(todo))return prev;
return prev.concat(`<li data-mediator="todo-thumb" data-index="${i}" class="${todo.completed ? 'completed' : ''}"><a href="#">${todo.text}</a></li>`)
}, "<ul>").concat("</ul>")
})
};
});
<|start_filename|>sample/redux-like/client/application.js<|end_filename|>
/**
* Created by marco.gobbi on 18/12/2014.
*/
define(function (require) {
"use strict";
var rjs = require("robojs");
var definitions = {
"todo-app": "client/todo-app"
,"todo-add": "client/todo-add"
,"todo-list": "client/todo-list"
,"todo-thumb": "client/todo-thumb"
,"todo-filter": "client/todo-filter"
};
return function () {
rjs.bootstrap({
definitions: definitions
}).promise.catch(function (e) {
console.log(e);
});
};
});
<|start_filename|>test/Signal.spec.js<|end_filename|>
/**
* Created by mgobbi on 05/04/2017.
*/
import {Signal} from "../src/events/Signal";
var assert = require("chai").assert;
describe('Signal', function () {
it('it is a function', function () {
assert.instanceOf(new Signal(),Signal);
});
it('ritorna un oggetto con le funzioni esposte', function () {
let signal = new Signal();
/*
connect,
connectOnce,
disconnect,
disconnectAll,
emit
* */
assert.isFunction(signal.connect);
assert.isFunction(signal.connectOnce);
assert.isFunction(signal.disconnect);
assert.isFunction(signal.disconnectAll);
assert.isFunction(signal.emit);
});
it('emit dispaccia correttamente a 1 listener', function () {
var params = [12345, 'text', {a: 1}];
let signal = new Signal();
signal.connect(_params => assert.equal(_params, params));
signal.emit(params);
});
it('emit dispaccia correttamente a più listeners', function () {
var params = [12345, 'text', {a: 1}];
let signal = new Signal();
signal.connect(_params => assert.equal(_params, params));
signal.connect(_params => assert.equal(_params, params));
signal.connect(_params => assert.equal(_params, params));
signal.connect(_params => assert.equal(_params, params));
signal.emit(params);
});
it('connect connette lo stesso listener/scope solo una volta', function () {
let signal = new Signal();
const listener = _ => _;
signal.connect(listener);
signal.connect(listener);
signal.connect(listener);
signal.connect(listener);
assert.lengthOf(signal.listenerBoxes, 1, "");
});
it('connect connette listener con scope differenti', function () {
let signal = new Signal();
const scope = {};
const scope2 = {a: 1};
const listener = _ => _;
signal.connect(listener);
signal.connect(listener, scope);
signal.connect(listener, scope2);
//
signal.connect(listener);
signal.connect(listener, scope2);
signal.connect(listener, scope);
assert.lengthOf(signal.listenerBoxes, 3, "");
});
it('disconnect rimuove correttamente il listener', function () {
let signal = new Signal();
const listener = _ => assert.fail("[listener è stato chiamato]", "[il listener doveva essere eliminato]", "gli slots non si rimuovono correttamente");
signal.connect(listener);
signal.disconnect(listener);
signal.emit({});
assert.isOk(true, 'il listener è stato cancellato');
});
it('disconnect rimuove listener con scope differenti', function () {
let signal = new Signal();
const scope = {};
const scope2 = {a: 1};
const listener = function (p) {
assert.equal(this, scope, "lo scope disconnesso è ancora in giro");
};
signal.connect(listener, scope2);
signal.connect(listener, scope);
signal.disconnect(listener, scope2);
assert.lengthOf(signal.listenerBoxes, 1, "");
signal.emit({});
});
it('disconnectAll rimuove tutti i listeners', function () {
let signal = new Signal();
const scope = {};
const scope2 = {a: 1};
const listener = function (p) {
assert.fail("doveva essere eliminato");
};
signal.connect(listener, scope2);
signal.connect(listener);
signal.connect(listener, scope);
signal.connect(_ => assert.fail("doveva essere eliminato"), scope);
signal.disconnectAll();
signal.emit({});
assert.lengthOf(signal.listenerBoxes, 0, "");
});
it("getNumListeners", function () {
let signal = new Signal();
const scope2 = {a: 1};
const listener = function (p) {
};
signal.connect(listener, scope2);
signal.connect(listener);
signal.emit({});
assert.equal(signal.getNumListeners(), 2, "");
})
//
it("connectOnce: un solo listener", function () {
let signal = new Signal();
const listener = function (p) {
};
signal.connectOnce(listener);
signal.connectOnce(listener);
// signal.emit({});
assert.equal(signal.getNumListeners(), 1, "");
})
it("connectOnce:emit una sola volta", function () {
let signal = new Signal();
var i = 0;
const listener = function (p) {
i = p;
};
signal.connectOnce(listener);
signal.connectOnce(listener);
signal.emit(7);
assert.equal(signal.getNumListeners(), 0, "una volta fatto emit viene eliminato il listener");
assert.equal(i, 7, "il valore di emit viene passato correttamente");
})
it("connectOnce:emit connect dentro listener!!!bomba", function () {
let signal = new Signal();
var i = 0;
const listener = function (p) {
signal.connect(k=>{
i=k;
});
i=p;
};
signal.connect(listener);
signal.emit(7);
signal.emit(8);
assert.equal(signal.getNumListeners(), 3);
assert.equal(i, 8, "il valore di emit viene passato correttamente");
})
it("connectOnce: no connectOnce e connect", function () {
let signal = new Signal();
const listener = function (p) {
};
signal.connectOnce(listener);
try {
signal.connect(listener);
assert.fail("no! deve andare in errore");
} catch (e) {
assert.ok("sì! va in error");
}
})
it("connectOnce: no connect e connectOnce", function () {
let signal = new Signal();
const listener = function (p) {
};
signal.connect(listener);
try {
signal.connectOnce(listener);
assert.fail("no! deve andare in errore");
} catch (e) {
assert.ok("sì! va in error");
}
})
});
<|start_filename|>src/internal/_arity.js<|end_filename|>
/**
* Created by mgobbi on 20/04/2017.
*/
const arityFn=(function (FUNCTIONS) {
return (arity, fn) => {
if (typeof arity !== 'number') {
throw new TypeError('Expected arity to be a number, got ' + arity);
}
if (!FUNCTIONS[arity]) {
let params = [];
for (let i = 0; i < arity; i++) {
params.push('_' + i);
}
FUNCTIONS[arity] = new Function(
'fn',
'return function arity' + arity + ' (' + params.join(', ') + ') { return fn.apply(this, arguments); }'
);
}
return FUNCTIONS[arity](fn);
};
})({});
export default arityFn;
<|start_filename|>src/internal/_unique.js<|end_filename|>
export default function (arrArg) {
return arrArg.filter(function (elem, pos, arr) {
return arr.indexOf(elem) === pos;
});
}
<|start_filename|>sample/custom-element/polyfills.js<|end_filename|>
define(function () {
// Polyfill for creating CustomEvents on IE9/10/11
// code pulled from:
// https://github.com/d4tocchini/customevent-polyfill
// https://developer.mozilla.org/en-US/docs/Web/API/CustomEvent#Polyfill
try {
var ce = new window.CustomEvent('test');
ce.preventDefault();
if (ce.defaultPrevented !== true) {
// IE has problems with .preventDefault() on custom events
// http://stackoverflow.com/questions/23349191
throw new Error('Could not prevent default');
}
} catch(e) {
var CustomEvent = function(event, params) {
var evt, origPrevent;
params = params || {
bubbles: false,
cancelable: false,
detail: undefined
};
evt = document.createEvent("CustomEvent");
evt.initCustomEvent(event, params.bubbles, params.cancelable, params.detail);
origPrevent = evt.preventDefault;
evt.preventDefault = function () {
origPrevent.call(this);
try {
Object.defineProperty(this, 'defaultPrevented', {
get: function () {
return true;
}
});
} catch(e) {
this.defaultPrevented = true;
}
};
return evt;
};
CustomEvent.prototype = window.Event.prototype;
window.CustomEvent = CustomEvent; // expose definition to window
}
if (!Object.assign) {
Object.defineProperty(Object, 'assign', {
enumerable: false,
configurable: true,
writable: true,
value: function(target) {
'use strict';
if (target === undefined || target === null) {
throw new TypeError('Cannot convert first argument to object');
}
var to = Object(target);
for (var i = 1; i < arguments.length; i++) {
var nextSource = arguments[i];
if (nextSource === undefined || nextSource === null) {
continue;
}
nextSource = Object(nextSource);
var keysArray = Object.keys(Object(nextSource));
for (var nextIndex = 0, len = keysArray.length; nextIndex < len; nextIndex++) {
var nextKey = keysArray[nextIndex];
var desc = Object.getOwnPropertyDescriptor(nextSource, nextKey);
if (desc !== undefined && desc.enumerable) {
to[nextKey] = nextSource[nextKey];
}
}
}
return to;
}
});
}
});
<|start_filename|>src/net/Loader.js<|end_filename|>
export class Loader {
load(id) {
return new Promise((resolve, reject) => this.onComplete(id, resolve, reject));
}
onComplete() {
//not implemented
}
}
export class AMDLoader extends Loader {
onComplete(id, resolve, reject) {
window.require([id], resolve, reject);
}
}
export class CustomLoader extends Loader {
constructor(fn) {
super();
this.fn = fn;
}
onComplete(id, resolve, reject) {
this.fn(id, resolve, reject);
}
}
<|start_filename|>test/internal/noop.spec.js<|end_filename|>
import {noop} from "../../src/internal/index";
var assert = require("chai").assert;
describe('noop', function () {
it('returns its first argument', function () {
assert.equal(noop(undefined), undefined);
assert.equal(noop('foo'), 'foo');
assert.equal(noop('foo', 'bar'), 'foo');
});
it('has length 1', function () {
assert.lengthOf(noop, 1);
});
});
<|start_filename|>src/index.js<|end_filename|>
export {Loader, AMDLoader, CustomLoader} from "./net/Loader";
export {EventTarget} from "./events/EventTarget";
export {Signal} from "./events/Signal";
export {DomWatcher} from "./display/DomWatcher";
export {MediatorHandler} from "./display/MediatorHandler";
import {Robo} from "./display/Robo";
export {Robo} from "./display/Robo";
export {CustomElementHandler} from "./display/CustomElementHandler";
//
export const bootstrap = options => {
return new Robo(options);
};
<|start_filename|>sample/custom-element/client/my-thumbnail.js<|end_filename|>
/**
* Created by mgobbi on 03/02/2016.
*/
define(function (require) {
function Module(dispatcher) {
this.dispatcher = dispatcher;
this.counter = document.createElement("my-counter");
this.counter.setAttribute("data-id", this.id);
this.addEventListener("click", function () {
this.dispatcher.dispatchEvent(new CustomEvent("create-element", {detail:this.id}));
}.bind(this));
}
Module.prototype = Object.create(HTMLElement.prototype);
Module.prototype.constructor = Module;
Object.assign(Module.prototype, {
connectedCallback: function () {
this.appendChild(this.counter);
},
disconnectedCallback: function () {
console.log("deattached my-custom-element", this)
}
});
return Module;
});
<|start_filename|>sample/multiple-context/client/application.js<|end_filename|>
/**
* Created by marco.gobbi on 18/12/2014.
*/
define(function (require) {
"use strict";
var rjs = require("robojs");
var definitions = {
"search-map": "client/search-map"
};
return function () {
rjs.bootstrap({
definitions: definitions
}).promise.catch(function (e) {
console.log(e);
});
};
});
<|start_filename|>src/display/CustomElementHandler.js<|end_filename|>
/**
* Created by marcogobbi on 07/05/2017.
*/
import {AHandler} from "./AHandler";
import {Disposable} from "./Disposable";
export class CustomElementHandler extends AHandler {
constructor(params) {
super(params);
this.REGISTERED_ELEMENTS = {};
}
updateCache(id) {
this.REGISTERED_ELEMENTS[id] = true;
return this.REGISTERED_ELEMENTS;
}
inCache(id) {
return !!this.REGISTERED_ELEMENTS[id];
}
getDefinition(node) {
return this.definitions[node.tagName.toLowerCase()];
}
create(node, Mediator) {
let tagName = "";
let dispatcher = this.dispatcher;
if (!this.inCache(node.tagName.toLowerCase())) {
tagName = node.tagName.toLowerCase();
if (!tagName.match(/-/gim)) {
throw new Error("The name of a custom element must contain a dash (-). So <x-tags>, <my-element>, and <my-awesome-app> are all valid names, while <tabs> and <foo_bar> are not.");
}
window.customElements.define(tagName, class extends Mediator {
constructor() {
super(dispatcher);
}
});
this.updateCache(tagName);
}
return new Disposable();
}
hasMediator(node) {
let id = node.tagName.toLowerCase();
return !!this.getDefinition(node) && !this.inCache(id);
}
getAllElements(node) {
const _children = Array.from(node.querySelectorAll("*")).filter(function (el) {
return el.tagName.match(/-/gim);
});
let root=[];
if(node.tagName.match(/-/gim)){
root=[node];
}
return root.concat(_children);
}
}
<|start_filename|>sample/multiple-context/client/map-panel.js<|end_filename|>
/**
* Created by mgobbi on 12/12/2016.
*/
define(function () {
return function (node, dispatcher) {
var map = new google.maps.Map(node, {
zoom: 8,
center: {lat: -34.397, lng: 150.644}
});
dispatcher.addEventListener("place-changed",function(e){
var center=e.detail;
map.setCenter(center);
})
};
});
<|start_filename|>sample/redux-like/client/todo-thumb.js<|end_filename|>
/**
* Created by mgobbi on 12/12/2016.
*/
define(function (require) {
var Actions = require("./actions");
return function (node, dispatcher) {
function handler() {
dispatcher.dispatchEvent(new CustomEvent(Actions.TOGGLE_TODO, {detail: index}));
}
var index = parseInt(node.dataset.index);
node.addEventListener("click", handler);
return function () {
node.removeEventListener("click", handler);
}
};
});
<|start_filename|>test/Disposable.spec.js<|end_filename|>
import {Disposable} from "../src/display/Disposable";
import _noop from "../src/internal/_noop";
var assert = require("chai").assert;
describe('Disposable', function () {
before(function () {
this.jsdom = require('jsdom-global')()
});
after(function () {
this.jsdom()
});
function myDispose() {
}
let disposable;
beforeEach(() => {
disposable = new Disposable();
});
it('Disposable is a instanceof Disposable', function () {
assert.instanceOf(disposable, Disposable);
});
it('handler default', function () {
assert.equal(disposable.mediatorId, "");
assert.isNull(disposable.node);
assert.equal(disposable.dispose, _noop);
});
it('handler.mediatorId', function () {
const config = {mediatorId: "mediatorId"};
disposable = new Disposable(config);
assert.equal(disposable.mediatorId, config.mediatorId);
});
it('handler.node', function () {
const myNode = {a: 1, b: 2};
const config = {node: myNode};
disposable = new Disposable(config);
assert.equal(disposable.node, config.node);
});
it('handler.dispose', function () {
const config = {dispose: myDispose};
disposable = new Disposable(config);
assert.equal(disposable.dispose, config.dispose);
});
});
<|start_filename|>sample/systemjs/config.js<|end_filename|>
/**
* Created by marcogobbi on 07/05/2017.
*/
System.config({
defaultJSExtensions: true,
paths:{
robojs:"../../dist/robojs.min"
}
});
System.import("./client/Application");
<|start_filename|>src/internal/_curry.js<|end_filename|>
/**
* Created by mgobbi on 14/03/2017.
*/
import _arity from "./_arity";
import _curryN from "./_curryN";
import _curry1 from "./_curry1";
export default function (fn) {
const length = fn.length;
if (length === 1) {
return _curry1(fn);
}
return _arity(length, _curryN(length, [], fn));
}
<|start_filename|>src/internal/index.js<|end_filename|>
export {default as arity} from "./_arity";
export {default as noop} from "./_noop";
export {default as compose} from "./_compose";
export {default as curry} from "./_curry";
export {default as curry1} from "./_curry1";
export {default as curryN} from "./_curryN";
export {default as filter} from "./_filter";
export {default as find} from "./_find";
export {default as flatten} from "./_flatten";
export {default as forEach} from "./_for-each";
export {default as isArrayLike} from "./_isArrayLike";
export {default as map} from "./_map";
export {default as pluck} from "./_pluck";
export {default as pipe} from "./_pipe";
export {default as reduce} from "./_reduce";
export {default as unique} from "./_unique";
export {default as root} from "./_root";
<|start_filename|>sample/multiple-context/client/autocomplete-bar.js<|end_filename|>
/**
* Created by mgobbi on 12/12/2016.
*/
define(function (require) {
return function (node, dispatcher) {
var autocomplete = new google.maps.places.Autocomplete(node);
google.maps.event.addListener(autocomplete, "place_changed", function (e) {
dispatcher.dispatchEvent(new CustomEvent("place-changed", {detail:autocomplete.getPlace().geometry.location}));
});
};
});
<|start_filename|>src/internal/_filter.js<|end_filename|>
/**
* Created by mgobbi on 20/04/2017.
*/
import curry from "./_curry";
export default curry((fn, list) => {
let idx = 0;
const len = list.length;
const result = [];
while (idx < len) {
if (fn(list[idx])) {
result[result.length] = list[idx];
}
idx += 1;
}
return result;
// return Array.from(list).filter(fn);
});
<|start_filename|>sample/multiple-context/client/search-map.js<|end_filename|>
/**
* Created by marco.gobbi on 09/12/2014.
*/
define(function (require) {
var rjs = require("robojs");
return function (node, dispatcher) {
var definitions = {
"autocomplete-bar": "client/autocomplete-bar"
, "map-panel": "client/map-panel"
, "button-dispose": "client/button-dispose"
};
// create a new EventDispatcher for each search-map component.
// By default a new instance of EventDispatcher is created when MediatorHandler is invoked.
// In this case we need a reference of dispatcher which listen to 'dispose-component' event.
var componentDispatcher = new rjs.EventTarget();
var robojs = rjs.bootstrap({
handler:new rjs.MediatorHandler({definitions:definitions,dispatcher: componentDispatcher})
, root: node
});
componentDispatcher.addEventListener("dispose-component", function () {
node.parentNode.removeChild(node);
});
return function () {
console.log("DISPOSE");
robojs.dispose();
}
}
});
<|start_filename|>sample/systemjs/client/foo-element.js<|end_filename|>
/**
* Created by marco.gobbi on 09/12/2014.
*/
function FooElement(node, dispatcher) {
function handler(e) {
e.currentTarget.parentElement.removeChild(e.currentTarget);
e.stopPropagation();
}
node.addEventListener("click", handler);
return function () {
node.removeEventListener("click", handler);
console.log("destroyed")
}
}
module.exports = FooElement;
<|start_filename|>src/display/MediatorHandler.js<|end_filename|>
/**
* Created by marco.gobbi on 21/01/2015.
*/
import noop from "../internal/_noop";
import {nextUid} from "./next-uid";
import {AHandler} from "./AHandler";
import {Disposable} from "./Disposable";
export class MediatorHandler extends AHandler {
constructor(params) {
super(params);
this.MEDIATORS_CACHE = [];
}
get selector() {
return "data-mediator";
}
getDefinition(node) {
return this.definitions[node.getAttribute(this.selector)];
}
inCache(node) {
return !!this.MEDIATORS_CACHE.find((disposable) => disposable.node === node);
}
updateCache(disposable) {
this.MEDIATORS_CACHE.push(disposable);//[mediatorId] = disposeFunction;
return this.MEDIATORS_CACHE;
}
hasMediator(node) {
return !!this.getDefinition(node) && !this.inCache(node);
}
create(node, Mediator) {
const mediatorId = nextUid();
node.setAttribute("mediatorid", mediatorId);
let dispose = noop;
if (node.parentNode) {
dispose = Mediator(node, this.dispatcher) || noop;
}
let disposable = new Disposable({
mediatorId,
node,
dispose
});
this.updateCache(disposable);
return disposable;
}
getAllElements(node) {
const nodes = Array.from(node.querySelectorAll(`[${this.selector}]`)).slice(0);
if (node.getAttribute(this.selector)) {
nodes.unshift(node);
}
return nodes;
}
static disposeMediator(disposable) {
disposable.dispose();
disposable.node = null;
}
_destroy(node) {
const l = this.MEDIATORS_CACHE.length;
for (let i = 0; i < l; i++) {
let disposable = this.MEDIATORS_CACHE[i];
if (disposable && (!disposable.node || disposable.node === node)) {
MediatorHandler.disposeMediator(disposable);
this.MEDIATORS_CACHE[i] = null;
}
}
return this.MEDIATORS_CACHE.filter(i => i);
}
destroy(node) {
this.MEDIATORS_CACHE = this._destroy(node);
return this.MEDIATORS_CACHE;
}
dispose() {
this.MEDIATORS_CACHE.forEach(MediatorHandler.disposeMediator);
this.MEDIATORS_CACHE = null;
this.dispatcher = null;
}
}
<|start_filename|>sample/custom-element/application.js<|end_filename|>
/**
* Created by marco.gobbi on 18/12/2014.
*/
define(function (require) {
"use strict";
var rjs = require("robojs");
var definitions = require("./client/definitions");
function Application() {
rjs.bootstrap({
definitions: definitions,
loader: new rjs.AMDLoader(),
handler:new rjs.CustomElementHandler({definitions:definitions})
}).promise.catch(function (e) {
console.log(e);
})
}
return Application;
});
<|start_filename|>sample/custom-element/client/bar-element.js<|end_filename|>
define(function (require, exports, module) {
function BarElement(dispatcher) {
this.dispatcher = dispatcher;
console.log("created bar element", this)
}
BarElement.prototype = Object.create(HTMLElement.prototype);
BarElement.prototype.constructor = BarElement;
Object.assign(BarElement.prototype, {
connectedCallback: function () {
console.log("attached bar element", this)
},
disconnectedCallback: function () {
console.log("deattached bar element", this)
}
})
module.exports = BarElement;
});
<|start_filename|>sample/redux-like/config.js<|end_filename|>
/**
* Created by marco.gobbi on 10/11/2014.
*/
requirejs.config({
paths: {
robojs: "../../dist/robojs"
}
});
require(["./client/application"], function (main) {
main();
});
<|start_filename|>test/DomWatcher.spec.js<|end_filename|>
/**
* Created by mgobbi on 05/04/2017.
*/
import {MediatorHandler} from "../src/display/MediatorHandler";
import {DomWatcher} from "../src/display/DomWatcher";
import {Signal} from "../src/events/Signal";
var assert = require("chai").assert;
require('./libs/MutationObserver');
describe('DomWatcher', function () {
before(function () {
this.jsdom = require('jsdom-global')()
});
after(function () {
this.jsdom()
});
let watcher, handler, mutations;
const definitions = {};
let div1
let div2
let div3 ;
beforeEach(() => {
handler = new MediatorHandler({definitions});
document.body.innerHTML = "";
watcher = new DomWatcher(document.body, handler);
div1 = document.createElement("div");
div2 = document.createElement("div");
div3 = document.createElement("div");
div1.setAttribute("data-mediator", "a");
div2.setAttribute("data-mediator", "b");
document.body.appendChild(div1);
document.body.appendChild(div2);
document.body.appendChild(div3);
mutations = [{
addedNodes: [document.body.querySelectorAll("*")]
, removedNodes: [document.body.querySelectorAll("*")]
}]
});
it('DomWatcher is instanceOf DomWatcher', function () {
assert.instanceOf(watcher, DomWatcher);
});
it('L\'oggetto ritornato ha 3 proprietà, 2 Signal e dispose', function () {
assert.instanceOf(watcher.onAdded, Signal, "onAdded non è un Signal");
assert.instanceOf(watcher.onRemoved, Signal, "onRemoved non è un Signal");
assert.isFunction(watcher.dispose, "dispose non è una funzione");
watcher.dispose();
});
it("watcher.handleMutations", () => {
watcher.onAdded.connect(nodes=>{
assert.equal(div1,nodes[0]);
assert.equal(div2,nodes[1]);
assert.lengthOf(nodes,2);
});
watcher.onRemoved.connect(nodes=>{
assert.equal(div1,nodes[0]);
assert.equal(div2,nodes[1]);
assert.lengthOf(nodes,2);
});
watcher.handleMutations(mutations)
})
});
<|start_filename|>test/internal/is-array-like.spec.js<|end_filename|>
import {isArrayLike} from "../../src/internal/index";
import _isArray from "../../src/internal/_isArray";
var assert = require("chai").assert;
describe('isArrayLike', function () {
it('is true for Arrays', function () {
assert.equal(isArrayLike([]), true);
assert.equal(isArrayLike([1, 2, 3, 4]), true);
assert.equal(isArrayLike([null]), true);
});
it('is true for arguments', function () {
function test() {
return isArrayLike(arguments);
}
assert.equal(test(), true, "nessun argomento");
assert.equal(test(1, 2, 3), true, "n argomenti");
assert.equal(test(null), true, "null argomento");
});
it('is false for Strings', function () {
assert.equal(isArrayLike(''), false);
assert.equal(isArrayLike(new String("asd")), false);
assert.equal(isArrayLike('abcdefg'), false);
});
it('is true for arbitrary objects with numeric length, if extreme indices are defined', function () {
var obj1 = {length: 0};
var obj2 = {0: 'something', length: 0};
var obj3 = {0: void 0, length: 0};
var obj4 = {0: 'zero', 1: 'one', length: 2};
var obj5 = {0: 'zero', length: 2};
var obj6 = {1: 'one', length: 2};
assert.equal(isArrayLike(obj1), true);
assert.equal(isArrayLike(obj2), true);
assert.equal(isArrayLike(obj3), true);
assert.equal(isArrayLike(obj4), true);
assert.equal(isArrayLike(obj5), false);
assert.equal(isArrayLike(obj6), false);
});
it('is false for everything else', function () {
assert.equal(isArrayLike(undefined), false);
assert.equal(isArrayLike(null), false);
assert.equal(isArrayLike(123), false);
assert.equal(isArrayLike({}), false);
assert.equal(isArrayLike({a:1}), false);
assert.equal(isArrayLike(false), false);
assert.equal(isArrayLike(function () {
}), false);
});
it('has length 1', function () {
assert.lengthOf(isArrayLike, 1);
assert.equal(_isArray([]), true);
});
it('is array polyfill', function () {
var _isarray=Array.isArray;
Array.isArray=null;
assert.equal(isArrayLike({}), false);
assert.equal(_isArray([]), true);
Array.isArray=_isarray;
});
});
<|start_filename|>sample/jquery-sortable/application.js<|end_filename|>
/**
* Created by marco.gobbi on 18/12/2014.
*/
define(function (require) {
"use strict";
var rjs = require("robojs");
function Application() {
/**
*
* @type {function}
* bootstrap is a sugar function to hide internal dependencies.
* A MediatorsBuilder is created.
* MediatorsBuilder will iterate the DOM trying to match definitions keys with custom elements tag name.
* @return {Promise}.
* Promise is meant to be resolved when every mediators are loaded.
*
*/
rjs.bootstrap({
definitions: {
"sortable-item":"./sortable-item"
}
}).promise.catch(function(e){
console.log(e);
})
}
return Application();
});
<|start_filename|>sample/custom-element/client/maps-element.js<|end_filename|>
/**
* Created by marco.gobbi on 09/12/2014.
*/
define(function (require, exports, module) {
function FooElement(dispatcher) {
this.dispatcher = dispatcher;
}
FooElement.prototype = Object.create(HTMLElement.prototype);
FooElement.prototype.constructor = FooElement;
Object.assign(FooElement.prototype, {
connectedCallback: function () {
console.log("map")
var map = new google.maps.Map(this, {
zoom: 8,
center: {lat: -34.397, lng: 150.644}
});
this.dispatcher.addEventListener("place-changed", function (e) {
var center = e.detail;
map.setCenter(center);
})
},
disconnectedCallback: function () {
console.log("deattached foo element", this)
}
});
module.exports = FooElement;
});
<|start_filename|>src/internal/_reduce.js<|end_filename|>
/**
* Created by marcogobbi on 20/04/2017.
*/
import curry from "./_curry";
export default curry(function (xf, acc, list) {
let idx = 0;
const len = list.length;
while (idx < len) {
acc = xf(acc, list[idx]);
idx += 1;
}
return acc;
});
<|start_filename|>test/libs/MutationObserver.js<|end_filename|>
global.MutationObserver = function (callback) {
this.callback = callback;
};
global.MutationObserver.prototype = {
disconnect() {
}
, observe(root, spec) {
this.callback(global.mutations || [{addedNodes: [], removedNodes: []}]);
}
}
<|start_filename|>sample/redux-like/client/todo-filter.js<|end_filename|>
/**
* Created by mgobbi on 12/12/2016.
*/
define(function (require) {
var Actions = require("./actions");
return function (node, dispatcher) {
function handler() {
dispatcher.dispatchEvent(new CustomEvent(Actions.SET_VISIBILITY_FILTER,{detail: node.value}));
}
node.addEventListener("change", handler);
return function () {
node.removeEventListener("change", handler);
}
};
});
<|start_filename|>src/internal/_flatten.js<|end_filename|>
/**
* Created by mgobbi on 12/04/2017.
*/
import _isArrayLike from "./_isArrayLike";
function flatten(arr) {
return Array.from(arr).reduce(function (flat, toFlatten) {
if(_isArrayLike(toFlatten)){
toFlatten=Array.from(toFlatten);
}
return flat.concat(Array.isArray(toFlatten) ? flatten(toFlatten) : toFlatten);
}, []);
}
// function flatten(list) {
// // return list.reduce((acc, val) => acc.concat(val), []);
// let value, jlen, j;
// const result = [];
// let idx = 0;
// const ilen = list.length;
//
// while (idx < ilen) {
// if (_isArrayLike(list[idx])) {
// value = flatten(list[idx]);
// j = 0;
// jlen = value.length;
// while (j < jlen) {
// result[result.length] = value[j];
// j += 1;
// }
// } else {
// result[result.length] = list[idx];
// }
// idx += 1;
// }
// return result;
// }
export default flatten;
<|start_filename|>src/internal/_pluck.js<|end_filename|>
/**
* Created by mgobbi on 20/04/2017.
*/
import map from "./_map";
import curry from "./_curry";
export default curry(function (p, list) {
return map(function (obj) {
return obj[p];
}, list);
}); | marcog83/RoboJS |
<|start_filename|>hermione/module_templates/__IMPLEMENTED_BASE__.json<|end_filename|>
{
"info": "Base files with implemented example",
"input_info": [
["project_name", "My Project", "Enter your project name"],
["project_start_date", "01/01/21", "Enter the date your project started"]
]
} | RodrigoATorres/hermione |
<|start_filename|>tableviews/src/main/kotlin/no/tornadofx/fxsamples/tableviews/DemoTableView.kt<|end_filename|>
package no.tornadofx.fxsamples.tableviews
import javafx.collections.FXCollections
import javafx.scene.layout.GridPane
import tornadofx.*
class DemoTableView : View() {
override val root = GridPane()
val mapTableContent = mapOf(Pair("item 1", 5), Pair("item 2", 10), Pair("item 3", 6))
init {
with (root) {
row {
vbox {
label("Tableview from a map")
tableview(FXCollections.observableArrayList<Map.Entry<String, Int>>(mapTableContent.entries)) {
readonlyColumn("Item", Map.Entry<String, Int>::key)
readonlyColumn("Count", Map.Entry<String, Int>::value)
resizeColumnsToFitContent()
}
}
}
}
}
}
<|start_filename|>itemviewmodel/withFXproperties/src/main/kotlin/no/tornadofx/fxsamples/withfxproperties/views/ItemViewModelWithFxMainView.kt<|end_filename|>
package no.tornadofx.fxsamples.withfxproperties.views
import tornadofx.*
class ItemViewModelWithFxMainView : View("Person Editor") {
override val root = hbox {
add<PersonList>()
add<PersonEditor>()
}
}
<|start_filename|>treeviews/src/main/kotlin/no/tornado/fxsample/treeviews/TreeViewApp.kt<|end_filename|>
package no.tornado.fxsample.treeviews
import javafx.application.Application
import tornadofx.App
import tornadofx.importStylesheet
class TreeViewApp : App() {
override val primaryView = DemoTreeViews::class
init {
importStylesheet(Styles::class)
}
}
fun main(args: Array<String>) {
Application.launch(TreeViewApp::class.java, *args)
}
<|start_filename|>spring-example/src/main/kotlin/no/tornadofx/fxsample/springexample/HelloBean.kt<|end_filename|>
package no.tornadofx.fxsample.springexample
import org.springframework.stereotype.Component
/**
* Created by ronsmits on 11/03/2017.
*/
@Component class HelloBean {
fun helloworld() : String = "Hello by di()"
}
<|start_filename|>login/src/main/kotlin/no/tornado/fxsample/login/LoginApp.kt<|end_filename|>
package no.tornado.fxsample.login
import javafx.stage.Stage
import tornadofx.*
class LoginApp : App(LoginScreen::class, Styles::class) {
val loginController: LoginController by inject()
override fun start(stage: Stage) {
super.start(stage)
loginController.init()
}
}
fun main(args: Array<String>) {
launch<LoginApp>(args)
}
<|start_filename|>spring-example/src/main/kotlin/no/tornadofx/fxsample/springexample/SpringExampleView.kt<|end_filename|>
package no.tornadofx.fxsample.springexample
import tornadofx.*
class SpringExampleView : View() {
val bean : HelloBean by di()
override val root = vbox {
label(bean.helloworld()).paddingAll=20
}
}
<|start_filename|>itemviewmodel/withFXproperties/src/main/kotlin/no/tornadofx/fxsamples/withfxproperties/withfxpropertiesApp.kt<|end_filename|>
package no.tornadofx.fxsamples.withfxproperties
import javafx.application.Application
import no.tornadofx.fxsamples.withfxproperties.views.ItemViewModelWithFxMainView
import tornadofx.App
class WithFXPropertiesApp : App(ItemViewModelWithFxMainView::class)
fun main(args: Array<String>) {
Application.launch(WithFXPropertiesApp::class.java, *args)
}
<|start_filename|>workspace/src/main/kotlin/no/tornado/fxsample/workspace/views.kt<|end_filename|>
package no.tornado.fxsample.workspace
import javafx.beans.property.SimpleBooleanProperty
import javafx.scene.control.TextArea
import tornadofx.*
import java.io.IOException
import java.io.OutputStream
import java.nio.charset.Charset
import java.util.*
class TextEditorFragment(val documentViewModel: DocumentViewModel) : Fragment(){
override val root = pane {
title = documentViewModel.title.value
textarea (documentViewModel.text) {
this.prefWidthProperty().bind([email protected]());
this.prefHeightProperty().bind([email protected]());
}
}
init {
documentViewModel.title.addListener { w, o, n ->
this.title = n
}
}
override val deletable = SimpleBooleanProperty(false)
override val closeable = SimpleBooleanProperty( true)
override val savable = documentViewModel.dirty
override val refreshable = documentViewModel.dirty
override fun onSave() {
documentViewModel.commit()
}
override fun onRefresh() {
documentViewModel.rollback()
}
}
class EmptyView : View() {
val controller: EditorController by inject()
override val root = label(controller.quote())
}
/**
* TextAreaOutputStream
*
* Binds an output stream to a textarea
*/
class TextAreaOutputStream(val textArea: TextArea): OutputStream() {
/**
* This doesn't support multibyte characters streams like utf8
*/
@Throws(IOException::class)
override fun write(b: Int) {
throw UnsupportedOperationException()
}
/**
* Supports multibyte characters by converting the array buffer to String
*/
@Throws(IOException::class)
override fun write(b: ByteArray, off: Int, len: Int) {
// redirects data to the text area
textArea.appendText(String(Arrays.copyOf(b, len), Charset.defaultCharset()))
// scrolls the text area to the end of data
textArea.scrollTop = java.lang.Double.MAX_VALUE
}
}
<|start_filename|>itemviewmodel/withpojos/src/main/kotlin/no/tornadofx/fxsamples/withpojo/controller/controller.kt<|end_filename|>
package no.tornadofx.fxsamples.withpojo.controller
import no.tornadofx.fxsamples.withpojo.model.Category
import no.tornadofx.fxsamples.withpojo.model.CategoryModel
import no.tornadofx.fxsamples.withpojo.model.Entry
import no.tornadofx.fxsamples.withpojo.model.EntryModel
import tornadofx.Controller
class MainController : Controller() {
val categoryModel = CategoryModel()
val entryModel = EntryModel()
val categories = listOf(
Category("a", 0),
Category("b", 1),
Category("c", 2),
Category("d", 3)
)
val entries = listOf(
listOf(
Entry("a", "aaa", "a writer"),
Entry("a1", "aaa1", "another writer"),
Entry("a2", "aaa2", "an amateur writer"),
Entry("a3", "aaa3", "a screen writer"),
Entry("a4", "aaa4", "the writer")),
listOf(
Entry("b", "bbb", "a writer")),
listOf(
Entry("c", "ccc", "a writer")),
listOf(
Entry("d", "ddd", "a writer"))
)
}
<|start_filename|>forms/src/main/kotlin/no/tornado/fxsample/forms/CustomerApp.kt<|end_filename|>
package no.tornado.fxsample.forms
import javafx.application.Application
import tornadofx.App
class CustomerApp : App(CustomerForm::class, Styles::class)
fun main(args: Array<String>) {
Application.launch(CustomerApp::class.java, *args)
}
<|start_filename|>treeviews/src/main/kotlin/no/tornado/fxsample/treeviews/Models.kt<|end_filename|>
package no.tornado.fxsample.treeviews
data class Group(val name: String, val children: List<Group>? = null)
sealed class PersonTreeItem(open val name: String)
object TreeRoot : PersonTreeItem("Departments")
data class Department(override val name: String): PersonTreeItem(name)
data class Person(override val name: String, val department: String) : PersonTreeItem(name)
val group = Group("Parent",
listOf(
Group("Child 1"),
Group("Child 2"),
Group("Child 3", listOf(
Group("Grand child 3.1",
listOf(
Group("Great grandchild 3.1.1"),
Group("Great grandchild 3.1.2"))))
),
Group("Child 4"))
)
val persons = listOf(
Person("<NAME>", "Marketing"),
Person("<NAME>", "Customer Service"),
Person("<NAME>", "IT Help Desk"),
Person("<NAME>", "Customer Service"),
Person("<NAME>", "Marketing"),
Person("<NAME>", "IT Help Desk"),
Person("<NAME>", "Customer Service"))
<|start_filename|>spring-example/src/main/kotlin/no/tornadofx/fxsample/springexample/SpringExampleApp.kt<|end_filename|>
package no.tornadofx.fxsample.springexample
import javafx.application.Application
import org.springframework.context.support.ClassPathXmlApplicationContext
import tornadofx.*
import kotlin.reflect.KClass
/**
* Created by ronsmits on 11/03/2017.
*/
class SpringExampleApp : App(SpringExampleView::class) {
init {
val springContext = ClassPathXmlApplicationContext("beans.xml")
FX.dicontainer = object : DIContainer {
override fun <T : Any> getInstance(type: KClass<T>): T = springContext.getBean(type.java)
}
}
}
fun main(args: Array<String>) {
Application.launch(SpringExampleApp::class.java, *args)
}
<|start_filename|>tableviews/src/main/kotlin/no/tornadofx/fxsamples/tableviews/TableViewApp.kt<|end_filename|>
package no.tornadofx.fxsamples.tableviews
import javafx.application.Application
import tornadofx.App
class TableViewApp : App() {
override val primaryView = DemoTableView::class
}
fun main(args: Array<String>) {
Application.launch(TableViewApp::class.java, *args)
}
<|start_filename|>workspace/src/main/kotlin/no/tornado/fxsample/workspace/styles.kt<|end_filename|>
package no.tornado.fxsample.workspace
import javafx.scene.paint.Color
import tornadofx.Stylesheet
import tornadofx.cssclass
import tornadofx.px
/**
* Created by miguelius on 04/09/2017.
*/
class Styles : Stylesheet() {
companion object {
val wrapper by cssclass()
val consola by cssclass()
}
init {
root {
prefHeight = 600.px
prefWidth = 800.px
}
textArea and consola {
baseColor= Color.BLACK
fontFamily = "Consolas"
textFill = Color.LIGHTGRAY
}
}
}
<|start_filename|>workspace/src/main/kotlin/no/tornado/fxsample/workspace/WorkspaceApp.kt<|end_filename|>
package no.tornado.fxsample.workspace
import javafx.application.Application
import tornadofx.App
import tornadofx.importStylesheet
/**
* Created by miguelius on 04/09/2017.
*/
class WorkspaceApp : App() {
override val primaryView = DemoWorkspace::class
init {
importStylesheet(Styles::class)
}
}
fun main(args: Array<String>) {
Application.launch(WorkspaceApp::class.java, *args)
}
<|start_filename|>login/src/main/kotlin/no/tornado/fxsample/login/Styles.kt<|end_filename|>
package no.tornado.fxsample.login
import tornadofx.*
class Styles : Stylesheet() {
companion object {
val loginScreen by cssclass()
}
init {
loginScreen {
padding = box(15.px)
vgap = 7.px
hgap = 10.px
}
}
}
<|start_filename|>login/src/main/kotlin/no/tornado/fxsample/login/SecureScreen.kt<|end_filename|>
package no.tornado.fxsample.login
import javafx.application.Platform
import javafx.geometry.Pos
import javafx.scene.text.Font
import tornadofx.*
class SecureScreen : View("Secure Screen") {
val loginController: LoginController by inject()
override val root = borderpane {
setPrefSize(800.0, 600.0)
top {
label(title) {
font = Font.font(22.0)
}
}
center {
vbox(spacing = 15) {
alignment = Pos.CENTER
label("If you can see this, you are successfully logged in!")
hbox {
alignment = Pos.CENTER
button("Logout") {
setOnAction {
loginController.logout()
}
}
button("Exit") {
setOnAction {
Platform.exit()
}
}
}
}
}
}
} | AzarguNazari/tornadofx-samples |
<|start_filename|>block_ad/params.json<|end_filename|>
{
"name": "Ad.js",
"tagline": "An ad blocker.",
"body": "ad.js\r\n=================\r\n\r\n####功能####\r\n_免越狱_ 去除视频广告、部分网页广告\r\n\r\n####适用于####\r\n 适用系统:iOS 7.0 + \r\n 广告屏蔽:爱奇艺(新增,仅支持网页版视频)、PPTV、优酷、土豆、乐视、湖南电视台、搜狐视频、腾讯视频、谷歌广告、百度广告、淘宝广告等\r\n\r\n####如何设置####\r\n 1. 打开 设置 - 无线局域网; \r\n 2. 点击已经连接成功的无线网络名称右侧的 ⓘ 按钮; \r\n 3. 进入界面后下拉至底部,在“HTTP 代理”一栏中点击“自动”标签; \r\n 4. 在 URL 框中输入:[http://cdce.cf/ad.js]; \r\n 5. 点击屏幕左上角【 <无线局域网 】返回即可保存设置。\r\n\r\n####拥有自己的ad.js <sub>(非必需操作)</sub>####\r\n 1. 你并不需要有一个服务器,只需一个GitHub帐号,即可拥有自己的ad.js; \r\n 2. 登录后点击页面右上角【fork】按钮; \r\n 3. 编辑ad.js文件,并保存; \r\n 4. 您的ad.js网址为:[http://xxx.github.io/ad.js/ad.js]([xxx]为你的GitHub帐号用户名)。设置方法同【如何设置】,将URL框中的网址改为以上网址; \r\n 5. 如果设置完成后需要修改ad.js,关闭设备的http代理,重复第3-4步。 \r\n ===========================================================\r\n 注:第3步不可跳过,如不想对ad.js修改,在文件末尾加上一个空格保存亦可;\r\n 您的ad.js不会随cdce.cf自动更新,请自行修改文件更新ad.js\r\n\r\n####如何关闭####\r\n 在“HTTP 代理”一栏中点击“关闭”标签。返回保存即可。\r\n\r\n####编辑ad.js####\r\n<a href=\"../../edit/gh-pages/ad.js\">〔点击此处打开〕</a>\r\n\r\n####项目主页####\r\n<http://cdce.cf/>\r\n\r\n####报告问题####\r\n<https://github.com/MikeWang000000/ad.js/issues>",
"note": "Don't delete this file! It's used internally to help with page regeneration."
}
<|start_filename|>block_ad/index.html<|end_filename|>
<!DOCTYPE html>
<html lang="en-us">
<head>
<meta charset="UTF-8">
<title>Ad.js by webcoding</title>
<meta name="keywords" content="去广告,免越狱,iOS,广告屏蔽,adblocker,ad.js"/>
<meta name="description" content="免越狱屏蔽视频广告、部分网页广告: 爱奇艺、PPTV、优酷、土豆、乐视、湖南电视台、搜狐视频、腾讯视频、谷歌广告、百度广告、淘宝广告等" />
<meta name="viewport" content="width=device-width, initial-scale=1">
<link rel="stylesheet" type="text/css" href="stylesheets/normalize.css" media="screen">
<link rel="stylesheet" type="text/css" href="stylesheets/stylesheet.css" media="screen">
<link rel="stylesheet" type="text/css" href="stylesheets/github-light.css" media="screen">
<script>"https:"!=window.location.protocol&&(window.location.href="https:"+window.location.href.substring(window.location.protocol.length));</script>
<script>var sc=0;</script>
</head>
<body>
<section class="page-header">
<h1 class="project-name">ad.js</h1>
<h2 class="project-tagline">
状态:
<script src="//sc.cdce.cf/sc.js?id=57a0695996f92"></script>
<script>if(sc!=1){x='<b style="color:green">●</b> 已开启';}else{x='<b style="color:red">●</b> 未开启';}document.write(x);</script>
<noscript><b style="color:grey">●</b> 未知</noscript>
[<a href="javascript:location.reload()" style="color:white">↻</a>]
</h2>
<a href="https://github.com/webcoding/js_block" class="btn">View on GitHub</a>
<a href="https://github.com/webcoding/js_block/zipball/master" class="btn">Download .zip</a>
<a href="https://github.com/webcoding/js_block/tarball/master" class="btn">Download .tar.gz</a>
</section>
<section class="main-content">
<h1>
<a id="adjs" class="anchor" href="#adjs" aria-hidden="true"><span aria-hidden="true" class="octicon octicon-link"></span></a>ad.js</h1>
<h4>
<a id="功能" class="anchor" href="#%E5%8A%9F%E8%83%BD" aria-hidden="true"><span aria-hidden="true" class="octicon octicon-link"></span></a>功能</h4>
<p><em>免越狱</em> 去除视频广告、部分网页广告</p>
<h4>
<a id="适用于" class="anchor" href="#%E9%80%82%E7%94%A8%E4%BA%8E" aria-hidden="true"><span aria-hidden="true" class="octicon octicon-link"></span></a>适用于</h4>
<pre><code>适用系统:iOS 7.0 +
广告屏蔽:爱奇艺(新增,仅支持网页版视频)、PPTV、优酷、土豆、乐视、湖南电视台、搜狐视频、腾讯视频、谷歌广告、百度广告、淘宝广告等
</code></pre>
<h4>
<a id="如何设置" class="anchor" href="#%E5%A6%82%E4%BD%95%E8%AE%BE%E7%BD%AE" aria-hidden="true"><span aria-hidden="true" class="octicon octicon-link"></span></a>如何设置</h4>
<pre><code>1. 打开 设置 - 无线局域网;
2. 点击已经连接成功的无线网络名称右侧的 ⓘ 按钮;
3. 进入界面后下拉至底部,在“HTTP 代理”一栏中点击“自动”标签;
4. 在 URL 框中输入:[http://cdce.cf/ad.js];
5. 点击屏幕左上角【 <无线局域网 】返回即可保存设置。
</code></pre>
<h4>
<a id="拥有自己的adjs-非必需操作" class="anchor" href="#%E6%8B%A5%E6%9C%89%E8%87%AA%E5%B7%B1%E7%9A%84adjs-%E9%9D%9E%E5%BF%85%E9%9C%80%E6%93%8D%E4%BD%9C" aria-hidden="true"><span aria-hidden="true" class="octicon octicon-link"></span></a>拥有自己的ad.js <sub>(非必需操作)</sub>
</h4>
<pre><code>1. 你并不需要有一个服务器,只需一个GitHub帐号,即可拥有自己的ad.js;
2. 登录后点击页面右上角【fork】按钮;
3. 编辑ad.js文件,并保存;
4. 您的ad.js网址为:[http://xxx.github.io/ad.js/ad.js]([xxx]为你的GitHub帐号用户名)。设置方法同【如何设置】,将URL框中的网址改为以上网址;
5. 如果设置完成后需要修改ad.js,关闭设备的http代理,重复第3-4步。
===========================================================
注:第3步不可跳过,如不想对ad.js修改,在文件末尾加上一个空格保存亦可;
您的ad.js不会随cdce.cf自动更新,请自行修改文件更新ad.js
</code></pre>
<h4>
<a id="如何关闭" class="anchor" href="#%E5%A6%82%E4%BD%95%E5%85%B3%E9%97%AD" aria-hidden="true"><span aria-hidden="true" class="octicon octicon-link"></span></a>如何关闭</h4>
<pre><code>在“HTTP 代理”一栏中点击“关闭”标签。返回保存即可。
</code></pre>
<h4>测试</h4>
<p>可通过直接打开此页面,来查看是否已开启 <a href="https://github.com/webcoding/js_block">https://github.com/webcoding/js_block</a></p>
<h4>
<a id="编辑adjs" class="anchor" href="#%E7%BC%96%E8%BE%91adjs" aria-hidden="true"><span aria-hidden="true" class="octicon octicon-link"></span></a>编辑ad.js</h4>
<p><a href="../../edit/gh-pages/ad.js">〔点击此处打开〕</a></p>
<h4>
<a id="项目主页" class="anchor" href="#%E9%A1%B9%E7%9B%AE%E4%B8%BB%E9%A1%B5" aria-hidden="true"><span aria-hidden="true" class="octicon octicon-link"></span></a>项目主页</h4>
<p><a href="http://cdce.cf/">http://cdce.cf/</a></p>
<h4>
<a id="报告问题" class="anchor" href="#%E6%8A%A5%E5%91%8A%E9%97%AE%E9%A2%98" aria-hidden="true"><span aria-hidden="true" class="octicon octicon-link"></span></a>报告问题</h4>
<p><a href="https://github.com/MikeWang000000/ad.js/issues">https://github.com/MikeWang000000/ad.js/issues</a></p>
<footer class="site-footer">
<span class="site-footer-owner"><a href="https://github.com/webcoding/ad.js">Ad.js</a> is maintained by <a href="https://github.com/webcoding">webcoding</a>.</span>
<span class="site-footer-credits">This page was generated by <a href="https://pages.github.com">GitHub Pages</a> using the <a href="https://github.com/jasonlong/cayman-theme">Cayman theme</a> by <a href="https://twitter.com/jasonlong"><NAME></a>.</span>
</footer>
</section>
</body>
</html>
<|start_filename|>block_res/ssltest/index.html<|end_filename|>
<!-- source: https://www.ssllabs.com/ssltest/viewMyClient.html -->
<!DOCTYPE html>
<html>
<head>
<title>Qualys SSL Labs - Projects / SSL Client Test</title>
<meta charset="utf-8" />
<meta http-equiv="refresh" content="0;url=https://www.ssllabs.com/ssltest/viewMyClient.html">
<link href="css/ssllabs.css" rel="styleSheet" type="text/css">
<link href="css/report.css" rel="styleSheet" type="text/css">
<link href="css/main.css" rel="styleSheet" type="text/css">
<script type="text/javascript" src="../zepto.min.js"></script>
</head>
<body>
<div id="page">
<div id="header">
<div id="logo">
<a href="/index.html" rel="noreferrer"><img src="https://ssllabs.com/images/qualys-ssl-labs-logo.png" width="341" height="55" alt="SSL Labs logo" title="SSL Labs logo"></a>
</div>
<div id="navigation">
<a class="link" href="/index.html" rel="noreferrer">Home</a>
<a class="link" href="/projects/index.html" rel="noreferrer">Projects</a>
<a class="link" href="https://www.qualys.com" rel="noreferrer">Qualys.com</a>
<a class="link" href="/about/contact.html" rel="noreferrer">Contact</a>
</div>
<br clear="all" />
</div>
<div id="breadcrumbs">
<b>You are here: </b>
<a href="/index.html">Home</a> > <a href="/projects/index.html">Projects</a> > SSL Client Test
</div>
<div id="main">
<div class="floatLeft">
<div class="reportTitle report-title-style">SSL/TLS Capabilities of Your Browser</div>
<div class="reportTime width-650"><b>User Agent:</b> Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3000.4 Safari/537.36</div>
</div>
<div class="floatRight"><br><a href="/ssltest/clients.html">Other User Agents »</a></div>
<br clear="all" />
<div align="center">
<noscript>
<div id=warningBox class="warning-box-line-height">
<b>Without JavaScript, the following tests will not run:</b>
(1) Accurate protocol support, (2) SSL 3 POODLE vulnerability, (3) mixed content handling,
(4) TLS authentication bug in Apple's products, and (5) the FREAK attack.
Please enable JavaScript for best results.
</ul>
</div>
<br>
</noscript>
<div id="mixedCSSHiddenDiv1" class="display-none"></div>
<div id="mixedCSSHiddenDiv2" class="display-none"></div>
<iframe src="http://plaintext.ssllabs.com/plaintext/frame.html" class="display-none" width="0" height="0"></iframe>
<div id="hiddenImagesDiv" class="display-none"></div>
<div id="protocolSupportDiv" class="display-none"></div>
<div class="reportSection" id="protocolTestDiv">
<div class="sectionTitle" id="protocolTestHeading">Protocol Support</div>
<div class="sectionBody">
<div id="protocolTestMsg"><img src=images/progress-indicator.gif width=16 height=16> Please wait, checking protocol support...</div>
<div id="protocolTestMsgNotes"></div>
</div>
</div>
<div class="reportSection" id="jamTestDiv">
<div class="sectionTitle">Logjam Vulnerability</div>
<div class="sectionBody">
<div id="jamTestMsg"><img src=images/progress-indicator.gif width=16 height=16> Please wait, checking if your user agent is vulnerable...</div>
<div id="jamTestMsgNotes">
For more information about the Logjam attack, please go to
<a href="https://weakdh.org">weakdh.org</a>.<br> To test manually, click <a href="https://www.ssllabs.com:10445/">here</a>. Your user agent is not vulnerable if it fails to connect to the site.</div>
</div>
</div>
<div class="reportSection" id="freakTestDiv">
<div class="sectionTitle">FREAK Vulnerability</div>
<div class="sectionBody">
<div id="freakTestMsg"><img src=images/progress-indicator.gif width=16 height=16> Please wait, checking if your user agent is vulnerable...</div>
<div id="freakTestMsgNotes">
For more information about the FREAK attack, please go to
<a href="https://www.freakattack.com">www.freakattack.com</a>.<br> To test manually, click <a href="https://www.ssllabs.com:10444/">here</a>. Your user agent is not vulnerable if it fails to connect to the site.</div>
</div>
</div>
<div class="reportSection" id="ssl3TestDiv">
<div class="sectionTitle">POODLE Vulnerability</div>
<div class="sectionBody">
<div id="ssl3TestMsg"><img src=images/progress-indicator.gif width=16 height=16> Please wait, checking if your user agent is vulnerable...</div>
<div id="ssl3TestMsgNotes">For more information about the POODLE attack, please read <a href="https://community.qualys.com/blogs/securitylabs/2014/10/15/ssl-3-is-dead-killed-by-the-poodle-attack">this blog post</a>.</div>
</div>
</div>
<div class="reportSection" id="ssl2TestDiv">
<div class="sectionTitle">SSL 2 Protocol Support</div>
<div class="sectionBody">
<div id="ssl2TestMsg">
<font color="red">Your user agent supports SSL 2. You should upgrade.</font>
</div>
<div id="ssl2TestMsgNotes">SSL 2 is a very old, obsolete, and insecure version of the SSL protocol. You can usually disable this protocol version in configuration, but modern clients don't support it at all. This really means that you should upgrade your software to
a better version.</div>
</div>
</div>
<script type="text/javascript" src="/includes/viewClient.js"></script>
<input id="mainsitehost" type="hidden" value=www.ssllabs.com>
<input id="plaintextSiteHost" type="hidden" value=plaintext.ssllabs.com>
<div class="reportSection" id="appleTestDiv">
<div class="sectionTitle">iOS and OS X TLS Authentication Vulnerability</div>
<div class="sectionBody">
<div id="appleTestMsg"><img src=images/progress-indicator.gif width=16 height=16> Please wait, checking if your user agent is vulnerable...</div>
<div id="appleTestMsgNotes">To test manually, <a href="https://www.ssllabs.com:10443">click here</a>. If your user agent refuses to connect, you are not vulnerable. This test requires a connection to the SSL Labs server on port 10443. A strict outbound firewall might
interfere. You should test Safari running on iOS or OS X. Chrome and Firefox are not vulnerable, even when running on a vulnerable operating system.
<a href="https://community.qualys.com/blogs/securitylabs/2014/02/24/ssl-labs-testing-for-apples-tls-authentication-bug"><b>MORE »</b></a></div>
<script type="text/javascript" src="/includes/viewClient-appleTest.js"></script>
</div>
</div>
<div class="reportSection">
<div class="sectionTitle">Protocol Features</div>
<div class="sectionBody">
<img class="tIcon" src="images/icon-protocol.png" width="65" height="50">
<table class="reportTable">
<thead>
<tr>
<td class="tableHead" colspan="2" align="left">Protocols</td>
</tr>
</thead>
<tbody>
<tr class="tableRow">
<td class="tableLeft" id="protocol_tls1_2_label">TLS 1.2</td>
<td class="tableRight" id="protocol_tls1_2">Yes*</td>
</tr>
<tr class="tableRow">
<td class="tableLeft">TLS 1.1</td>
<td class="tableRight" id="protocol_tls1_1">Yes*</td>
</tr>
<tr class="tableRow">
<td class="tableLeft">TLS 1.0</td>
<td class="tableRight" id="protocol_tls1">Yes*</td>
</tr>
<tr class="tableRow">
<td class="tableLeft" id="protocol_ssl3_label">SSL 3</td>
<td class="tableRight" id="protocol_ssl3">Yes*</td>
</tr>
<tr class="tableRow">
<td class="tableLeft" id="protocol_ssl2_label">SSL 2</td>
<td class="tableRight" id="protocol_ssl2">No</td>
</tr>
<noscript>
<tr class="tableRow">
<td colspan=2 align=left>
<span class="color666666">(*) Without JavaScript, this test reliably detects only the highest supported protocol.</span>
</td>
</tr>
</noscript>
</tbody>
</table>
<br><br>
<img class="tIcon" src="images/icon-cipher.png" width="65" height="50" alt="">
<table class="reportTable">
<thead>
<tr>
<td class="tableHead" colspan="3" align="left">Cipher Suites (in order of preference)</td>
</tr>
</thead>
<tr>
<td class="tableLeft">TLS_GREASE_9A (<code>0x9a9a</code>)</td>
<td class="tableRight">-</td>
</tr>
<tr>
<td class="tableLeft">TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256 (<code>0xc02b</code>) <span class="dhParams color-green">Forward Secrecy</span>
</td>
<td class="tableRight">128</td>
</tr>
<tr>
<td class="tableLeft">TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256 (<code>0xc02f</code>) <span class="dhParams color-green">Forward Secrecy</span>
</td>
<td class="tableRight">128</td>
</tr>
<tr>
<td class="tableLeft">TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384 (<code>0xc02c</code>) <span class="dhParams color-green">Forward Secrecy</span>
</td>
<td class="tableRight">256</td>
</tr>
<tr>
<td class="tableLeft">TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384 (<code>0xc030</code>) <span class="dhParams color-green">Forward Secrecy</span>
</td>
<td class="tableRight">256</td>
</tr>
<tr>
<td class="tableLeft">TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256 (<code>0xcca9</code>) <span class="dhParams color-green">Forward Secrecy</span>
</td>
<td class="tableRight">256</td>
</tr>
<tr>
<td class="tableLeft">TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256 (<code>0xcca8</code>) <span class="dhParams color-green">Forward Secrecy</span>
</td>
<td class="tableRight">256</td>
</tr>
<tr>
<td class="tableLeft">TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA (<code>0xc013</code>) <span class="dhParams color-green">Forward Secrecy</span>
</td>
<td class="tableRight">128</td>
</tr>
<tr>
<td class="tableLeft">TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA (<code>0xc014</code>) <span class="dhParams color-green">Forward Secrecy</span>
</td>
<td class="tableRight">256</td>
</tr>
<tr>
<td class="tableLeft">TLS_RSA_WITH_AES_128_GCM_SHA256 (<code>0x9c</code>)
</td>
<td class="tableRight">128</td>
</tr>
<tr>
<td class="tableLeft">TLS_RSA_WITH_AES_256_GCM_SHA384 (<code>0x9d</code>)
</td>
<td class="tableRight">256</td>
</tr>
<tr>
<td class="tableLeft">TLS_RSA_WITH_AES_128_CBC_SHA (<code>0x2f</code>)
</td>
<td class="tableRight">128</td>
</tr>
<tr>
<td class="tableLeft">TLS_RSA_WITH_AES_256_CBC_SHA (<code>0x35</code>)
</td>
<td class="tableRight">256</td>
</tr>
<tr>
<td class="tableLeft">TLS_RSA_WITH_3DES_EDE_CBC_SHA (<code>0xa</code>)
</td>
<td class="tableRight">112</td>
</tr>
<tr class="tableRow">
<td colspan=2 align=left>
<span class="color666666">(1) When a browser supports SSL 2, its SSL 2-only suites are shown
only on the very first connection to this site. To see the suites,
close all browser windows, then open this exact page directly. Don't refresh.</span>
</td>
</tr>
</table>
<br><br>
<img class="tIcon" src="images/icon-protocol-details.png" width="65" height="50" alt="">
<table class="reportTable">
<thead>
<tr>
<td class="tableHead" colspan="2" align="left">Protocol Details</td>
</tr>
</thead>
<tbody>
<tr class="tableRow">
<td class="tableLabel" width="250">Server Name Indication (SNI)</td>
<td class="tableCell">Yes</td>
</tr>
<tr class="tableRow">
<td class="tableLabel">Secure Renegotiation</td>
<td class="tableCell">Yes</td>
</tr>
<tr class="tableRow">
<td class="tableLabel">
<font color=green>TLS compression</font>
</td>
<td class="tableCell">
<font color=green>No</font>
</td>
</tr>
<tr class="tableRow">
<td class="tableLabel">Session tickets</td>
<td class="tableCell">Yes</td>
</tr>
<tr class="tableRow">
<td class="tableLabel">OCSP stapling</td>
<td class="tableCell">Yes</td>
</tr>
<tr class="tableRow">
<td class="tableLabel">Signature algorithms</td>
<td class="tableCell">
SHA256/ECDSA, Unknown (0x8)/Unknown (0x4), SHA256/RSA, SHA384/ECDSA, Unknown (0x8)/Unknown (0x5), SHA384/RSA, Unknown (0x8)/Unknown (0x6), SHA512/RSA, SHA1/RSA </td>
</tr>
<tr class="tableRow">
<td class="tableLabel">Elliptic curves</td>
<td class="tableCell">
tls_grease_1a1a, x25519, secp256r1, secp384r1 </td>
</tr>
<tr class="tableRow">
<td class="tableLabel">Next Protocol Negotiation</td>
<td class="tableCell">No</td>
</tr>
<tr class="tableRow">
<td class="tableLabel" width="250">Application Layer Protocol Negotiation</td>
<td class="tableCell">Yes <span class="greySmall">
h2
http/1.1
</td>
</tr>
<tr class="tableRow">
<td class="tableLabel"><font color=green>SSL 2 handshake compatibility</font></td>
<td class="tableCell"><font color=green>No</font></td>
</tr>
</tbody>
</table>
<br>
</div>
</div>
<div class="reportSection display-none" id="mixedDiv">
<div class="sectionTitle">Mixed Content Handling</div>
<div class="sectionBody">
<img class="tIcon" src="images/icon-misc.png" width="65" height="50">
<table class="reportTable">
<thead>
<tr>
<td class="tableHead" colspan="3" align="left">Mixed Content Tests</td>
</tr>
</thead>
<tbody>
<tr class="tableRow">
<td class="tableLeft">Images</td>
<td class="tableRight width-100">Passive</td>
<td class="tableRight width-100" id="mixedImages">Testing...</td>
</tr>
<tr class="tableRow">
<td class="tableLeft">CSS</td>
<td class="tableRight">Active</td>
<td class="tableRight" id="mixedCssLink">Testing...</td>
</tr>
<tr class="tableRow">
<td class="tableLeft">Scripts</td>
<td class="tableRight">Active</td>
<td class="tableRight" id="mixedScripts">Testing...</td>
</tr>
<tr class="tableRow">
<td class="tableLeft">XMLHttpRequest</td>
<td class="tableRight">Active</td>
<td class="tableRight" id="mixedXhr">Testing...</td>
</tr>
<tr class="tableRow">
<td class="tableLeft">WebSockets</td>
<td class="tableRight">Active</td>
<td class="tableRight" id="mixedWebSockets">Testing...</td>
</tr>
<tr class="tableRow">
<td class="tableLeft">Frames</td>
<td class="tableRight">Active</td>
<td class="tableRight" id="mixedFrame">Testing...</td>
</tr>
<tr class="tableRow">
<td colspan="3" align="left">
<span class="color666666">(1) These tests might cause a mixed content warning in your browser. That's expected.<br></span>
<span class="color666666">(2) If you see a failed test, try to reload the page. If the error persists, please get in touch.</span>
</td>
</tr>
</tbody>
</table>
<br>
<table class="reportTable report-table-margin">
<thead>
<tr>
<td class="tableHead" colspan="3" align="left">Related Functionality</td>
</tr>
</thead>
<tbody>
<tr class="tableRow">
<td class="tableLeft">Upgrade Insecure Requests request header (<a href="https://w3c.github.io/webappsec/specs/upgrade/">more info</a>)</td>
<td class="tableRight">Yes</td>
</tr>
</tbody>
</table>
</div>
</div>
</div>
</div>
<script type="text/javascript" src="viewClient-clientTest.js"></script>
</div>
</div>
</div>
<div id="pageEnd">
<div id="copyright">
<table width=1050 border=0 cellpadding=5 cellspacing=0>
<tr>
<td class="footer">
Copyright © 2009-2017 <a href="https://www.qualys.com">Qualys, Inc</A>. All Rights Reserved.
</td>
<td align=right class="footer">
<a href="https://www.ssllabs.com/about/terms.html" rel="noreferrer">Terms and Conditions</a>
</td>
</tr>
</table>
</div>
</div>
</body>
</html>
<|start_filename|>block_res/ssltest/css/main.css<|end_filename|>
.mainPhoto {
width: 1100px;
height: 320px;
background: #2d63a5 url(../images/main-background.jpg);
margin: 0 auto;
padding: 50px;
-moz-box-sizing: border-box;
box-sizing: border-box;
position: relative;
}
.mainPhoto > h2 {
font-size: 380%;
line-height: 100%;
letter-spacing: -1px;
font-weight: bold;
color: #fff;
max-width: 400px;
margin: 0;
text-shadow: 2px 2px 0 #000000;
}
.mainPhoto > h3 {
font-size: 200%;
line-height: 130%;
font-weight: normal;
color: #fff;
max-width: 450px;
margin: 0;
margin-top: 20px;
text-shadow: 2px 2px 0 #000000;
}
.featured {
width: 360px;
right: 20px;
top: 18px;
position: absolute;
margin-right: 20px;
}
a.featuredLink {
display: block;
margin: 2px;
background: url(../images/black40.png);
font-size: 170%;
line-height: 100%;
font-weight: normal;
width: 350px;
height: 69px;
-moz-box-sizing: border-box;
box-sizing: border-box;
padding: 15px;
padding-left: 25px;
text-decoration: none;
color: #fff;
text-shadow: 2px 2px 0 #000000;
}
a.featuredLink:hover {
background: url(../images/black50.png);
}
a.featuredLink img {
float: left;
display: block;
margin-right: 20px;
}
a.featuredLink > .d {
font-size: 60%;
font-weight: normal;
display: block;
margin-top: 0px;
}
.bookImage {
border: 1px solid #bbbbbb;
margin-right:10px;
margin-top: 5px;
}
.spanFont {
font-weight: bold;
font-size: 11px;
}
.footer {
font-size: 12px;
}
/*ssltest/viewClient.vm*/
.floatLeft {
float: left;
}
/*ssltest/viewClient.vm*/
.floatRight {
float:right;
font-weight: bold;
font-size: 20px;
}
.clientsJsonLink {
float:left;
margin-left: 10px;
margin-top: 25px;
text-decoration: underline;
}
#multiTable th {
cursor: pointer;
cursor: hand;
}
.greyColor {
color:grey;
font-size: 12px;
}
.partnerLogo {
float: right;
padding-right: 30px;
padding-top: 10px;
}
#serverTestPara {
font-size: 16px;
line-height: 24px;
}
#serverTestHideResults {
font-size: 12px;
color: black;
font-weight: normal;
text-align: left;
}
.boxStyle{
float:left;
width:327px;
margin-right:20px;
height:340px;
}
.boxStyleRight{
float:left;
width:327px;
height:340px;
}
#appleTestDiv {
display: none;
}
#statusDetailMsg {
font-size: 14px;
color: #666666;
padding-top: 5px;
}
.certMismatch {
font-size: 0.7em;
}
.invalidHost {
font-weight: normal;
font-size: 0.7em;
}
#knownIssues {
font-size: 1.2em;
}
#commonError {
font-size: 1.2em;
}
#gradeA {
margin-bottom: 8px;
}
.Aplus{
font-size: 0.75em;
}
.Aminus{
font-size: 0.80em;
}
.zeroMargin {
margin:0px;
}
.serverKeyCert {
float: left;
width: 825px;
}
.downloadIcon {
float: left;
margin-left: 8px;
margin-top: 5px;
}
.blackColor{
color: black;
}
.colorF88017 {
color: #f88017 !important;
}
.colorRed {
color : red !important;
}
.color666666 {
color: #666666 !important;
}
.color-green {
color : green !important;
}
.tableCellRight {
width: 75px;
text-align: right;
}
.tableCellCenter {
width: 125px;
text-align: center;
padding-left: 10px;
padding-right: 10px;
font-size: 11px;
}
#no-trust-path {
text-align: center;
}
.cursor-help{
cursor : help;
}
.font-10{
font-size: 10px;
}
.font-11{
font-size: 11px;
}
.drown-test {
font-size: 12px;
color: grey;
line-height: 18px;
}
/*ssltest/viewClient.vm*/
.display-none{
display: none;
}
.infoBox {
border: 1px solid #bbbbbb;
padding: 5px;
background: #fffacd;
margin-top: 10px;
font-weight: bold;
color: #222222;
}
.highlightBox {
border: 1px solid #888888;
padding: 5px;
background: #7ed84d;
margin-top: 10px;
font-weight: bold;
color: #222222;
}
.noticeBox {
border: 1px solid #bbbbbb;
padding: 5px;
background: #CCEEFF;
margin-top: 10px;
font-weight: bold;
color: #222222;
}
.warningBox {
border: 1px solid #bbbbbb;
padding: 5px;
background: #FFCF79;
margin-top: 10px;
font-weight: bold;
color: #222222;
}
.errorBox {
border: 1px solid #bbbbbb;
padding: 5px;
background: #FFCCCB;
margin-top: 10px;
font-weight: bold;
color: #222222;
}
#drownTable th {
color: #009ddf;
border-bottom: 2px solid #c6d2d4;
text-align: left;
font-size: 12px;
padding-top: 4px;
padding-bottom: 2px;
}
#drownTable td {
text-align: left;
font-size: 12px;
padding-top: 4px;
padding-bottom: 2px;
}
.warning-box-margin {
margin-top: 10px;
}
.text-align-left {
text-align: left;
}
/*ssltest/viewClient.vm*/
.report-title-style{
color: black;
padding-top: 10px;
}
/*ssltest/viewClient.vm*/
.width-650{
width: 650px;
}
/*ssltest/viewClient.vm*/
.browser-handshake-fail{
font-size: 0.7em;
color: #666666;
}
/*ssltest/viewClient.vm*/
.warning-box-line-height{
line-height: 145px;
}
/*ssltest/viewClient.vm*/
#protocolTestDiv, #ssl2TestDiv, #ssl3TestDiv, #appleTestDiv, #freakTestDiv, #jamTestDiv {
display: none;
}
#protocolTestMsg, #ssl2TestMsg, #ssl3TestMsg, #appleTestMsg, #freakTestMsg, #jamTestMsg {
font-size: 16px;
font-weight: bold;
}
#protocolTestMsgNotes, #ssl2TestMsgNotes, #ssl3TestMsgNotes, #appleTestMsgNotes, #freakTestMsgNotes, #jamTestMsgNotes {
color: grey;
font-size: 14px;
}
.width-100{
width : 100px;
}
.report-table-margin{
margin-left: 75px !important;
}
.settings-message{
width: 800px;
color: grey;
font-size: 12px;
}
.category_width_0{
width:0px;
}
.category_width_3{
width:3px;
}
.category_width_6{
width:6px;
}
.category_width_9{
width:9px;
}
.category_width_12{
width:12px;
}
.category_width_15{
width:15px;
}
.category_width_18{
width:18px;
}
.category_width_21{
width:21px;
}
.category_width_24{
width:24px;
}
.category_width_27{
width:27px;
}
.category_width_30{
width:30px;
}
.category_width_33{
width:33px;
}
.category_width_36{
width:36px;
}
.category_width_39{
width:39px;
}
.category_width_42{
width:42px;
}
.category_width_45{
width:45px;
}
.category_width_48{
width:48px;
}
.category_width_51{
width:51px;
}
.category_width_54{
width:54px;
}
.category_width_57{
width:57px;
}
.category_width_60{
width:60px;
}
.category_width_63{
width:63px;
}
.category_width_66{
width:66px;
}
.category_width_69{
width:69px;
}
.category_width_72{
width:72px;
}
.category_width_75{
width:75px;
}
.category_width_78{
width:78px;
}
.category_width_81{
width:81px;
}
.category_width_84{
width:84px;
}
.category_width_87{
width:87px;
}
.category_width_90{
width:90px;
}
.category_width_93{
width:93px;
}
.category_width_96{
width:96px;
}
.category_width_99{
width:99px;
}
.category_width_102{
width:102px;
}
.category_width_105{
width:105px;
}
.category_width_108{
width:108px;
}
.category_width_111{
width:111px;
}
.category_width_114{
width:114px;
}
.category_width_117{
width:117px;
}
.category_width_120{
width:120px;
}
.category_width_123{
width:123px;
}
.category_width_126{
width:126px;
}
.category_width_129{
width:129px;
}
.category_width_132{
width:132px;
}
.category_width_135{
width:135px;
}
.category_width_138{
width:138px;
}
.category_width_141{
width:141px;
}
.category_width_144{
width:144px;
}
.category_width_147{
width:147px;
}
.category_width_150{
width:150px;
}
.category_width_153{
width:153px;
}
.category_width_156{
width:156px;
}
.category_width_159{
width:159px;
}
.category_width_162{
width:162px;
}
.category_width_165{
width:165px;
}
.category_width_168{
width:168px;
}
.category_width_171{
width:171px;
}
.category_width_174{
width:174px;
}
.category_width_177{
width:177px;
}
.category_width_180{
width:180px;
}
.category_width_183{
width:183px;
}
.category_width_186{
width:186px;
}
.category_width_189{
width:189px;
}
.category_width_192{
width:192px;
}
.category_width_195{
width:195px;
}
.category_width_198{
width:198px;
}
.category_width_201{
width:201px;
}
.category_width_204{
width:204px;
}
.category_width_207{
width:207px;
}
.category_width_210{
width:210px;
}
.category_width_213{
width:213px;
}
.category_width_216{
width:216px;
}
.category_width_219{
width:219px;
}
.category_width_222{
width:222px;
}
.category_width_225{
width:225px;
}
.category_width_228{
width:228px;
}
.category_width_231{
width:231px;
}
.category_width_234{
width:234px;
}
.category_width_237{
width:237px;
}
.category_width_240{
width:240px;
}
.category_width_243{
width:243px;
}
.category_width_246{
width:246px;
}
.category_width_249{
width:249px;
}
.category_width_252{
width:252px;
}
.category_width_255{
width:255px;
}
.category_width_258{
width:258px;
}
.category_width_261{
width:261px;
}
.category_width_264{
width:264px;
}
.category_width_267{
width:267px;
}
.category_width_270{
width:270px;
}
.category_width_273{
width:273px;
}
.category_width_276{
width:276px;
}
.category_width_279{
width:279px;
}
.category_width_282{
width:282px;
}
.category_width_285{
width:285px;
}
.category_width_288{
width:288px;
}
.category_width_291{
width:291px;
}
.category_width_294{
width:294px;
}
.category_width_297{
width:297px;
}
.category_width_300{
width:300px;
}
/*ssltest/clients.html*/
.h {
color: green;
}
.w {
color: #F88017;
}
.e {
color: red;
}
/*projects/documentation/*/
.openssl-book-style{
margin-right: 10px; margin-top: 5px;
}
.minisite-header-3pixel{
padding-bottom: 3px;
}
.minisite-header{
padding-bottom: 1px;
}
/*ssltest/blacklistcheck.html*/
#donot-scan-list {
font-size: 16px;
line-height: 24px;
}
/*about/activityLog.vm*/
#activityTable th {
font-size: 13px;
}
#activityTable td {
font-size: 13px;
}
<|start_filename|>block_spider/spider_c_dkey/crawler.js<|end_filename|>
#! /usr/bin/env node
const request = require('request');
var crawler = function(){
request('http://localhost:3000/key?_=' + (+new Date), function(error, response, body){
request('http://localhost:3000/price?key=' + body, function(error, response, body){
if(!error && response.statusCode == 200){
console.log(body); // show the HTML for the test page
}
})
})
}
setInterval(crawler, 1000);
<|start_filename|>block_spider/spider_e_mkey/encodes/e2.js<|end_filename|>
module.exports = {
encode: function(i){
return (i%2) ? i+2 : i+4;
},
decode: function(i){
return (i%2) ? i-2 : i-4;
}
};
<|start_filename|>github_rank/index.js<|end_filename|>
#!/usr/bin/env node
var argv = process.argv;
argv.shift();
console.log(argv);
var user = argv[1]
var Crawler = require("crawler");
var jsdom = require('jsdom');
var c = new Crawler({
jQuery: jsdom,
maxConnections : 100,
forceUTF8: true,
// incomingEncoding: 'gb2312',
// This will be called for each crawled page
callback : function (error, result, $) {
var td = $("a[href='https://github.com/" + user + "']").closest('tr').find('td')
for(var i = 0; i< td.length; i++) {
var item = $(td).eq(i).text();
console.log(item)
}
process.exit()
}
});
c.queue('http://githubrank.com/');
<|start_filename|>block_spider/spider_e_mkey/encodes/e3.js<|end_filename|>
module.exports = {
encode: function(item){
return item+5;
},
decode: function(item){
return item-5;
}
};
<|start_filename|>block_res/ssltest/css/report.css<|end_filename|>
.highlight {
color: green;
}
.warning {
color: #F88017;
}
.error {
color: red;
}
.moreInfo {
font-size: 11px;
font-weight: bold;
}
#certificateMismatch li {
font-size: 20px;
line-height: 30px;
font-weight: bold;
}
#warningBeta {
margin: 20px;
margin-bottom: 0px;
text-align: center;
padding: 5px;
font-size: 12px;
font-weight: bold;
xborder: 1px solid #bbbbbb;
background: #009ddf;
color: #ffffff;
}
#warningBox {
margin-bottom: 0px;
text-align: center;
padding: 15px;
font-size: 18px;
border: 1px solid #bbbbbb;
background: #fffacd;
}
.testDate {
font-size: 12px;
line-height: 24px;
}
.grayText {
font-size: 12px;
line-height: 24px;
color : #666666;
}
.dhParams {
font-size: 10px;
color : #666666;
}
.dhParamsNoColour {
font-size: 10px;
}
.grayCode {
font: 12px/20px monospace;
color : #666666;
}
.reportTitle {
margin: 0px;
margin-bottom: 5px;
font: 26px/28px Arial, Helvetica, sans-serif;
color: #666666;
font-weight: bold;
}
.reportTime {
margin-bottom: 20px;
color : #666666;
}
.reportInfo {
font-size: 10px;
color: #666666;
}
.url {
font-weight: bold;
color: #444444;
}
.ip {
font-size: 18px;
color: #666666;
}
.box {
border: 3px solid #a6c2c4;
background: #fdfdfd;
text-align: left;
}
.boxHead {
color: #009ddf;
font-weight: bold;
padding-bottom: 5px;
vertical-align: middle;
border-bottom: 2px solid #c6d2d4;
font-size: 14px;
}
.boxContent {
padding: 15px;
font-size: 12px;
}
.box p {
margin: 6px 0px;
}
a:link {
color: #005ccf;
}
.box a {
color: #005ccf;
}
.box a:hover {
text-decoration: underline;
}
.box .rating {
float: right;
font-weight: bold;
width: 15px;
text-align: left;
padding-right: 10px;
}
.submitBox {
padding: 15px;
background: #dbe7f7;
text-align: center;
vertical-align: middle;
margin-bottom: 20px;
font-size: 16px;
font-weight: bold;
color: #009ddf;
}
.submitBox input {
font-size: 16px;
padding: 4px;
}
.submitError {
text-align: center;
font-size: 12px;
line-height: 14px;
font-weight: bold;
color: #cc0000;
margin-top: 10px;
}
.reportSection {
width: 940px;
border: 4px solid #a6c2c4;
background: #fdfdfd;
/*margin-top: 20px;*/
margin-bottom: 20px;
text-align: center;
}
.bannerSection {
width: 940px;
border: 1px solid #c0c0c0;
background: #fdfdfd;
margin-top: 20px;
margin-bottom: 20px;
text-align: center;
padding: 3px;
}
.sectionTitle {
text-align: left;
font: 23px/28px Arial, Helvetica, sans-serif;
font-weight: bold;
color: #888888;
padding: 16px;
background: #f7f7f7;
border-bottom: 1px solid #c6d2d4;
}
.sectionBody {
margin: 20px 15px;
}
#chart {
margin-top: 20px;
margin-right: 70px;
float: right;
width: 450px;
height: 200px;
}
.chartScale {
position: absolute;
margin-left: 110px;
width: 360px;
height: 162px;
z-index: 100;
}
.chartBody {
position: relative;
width: 450px;
z-index: 200;
}
.chartScaleDiv {
width: 1px;
margin-right: 59px;
height: 162px;
float: left;
background: #bbbbbb;
}
.chartScaleLabelRow {
position: absolute;
top: 160px;
left: -29px;
}
.chartScaleLabel {
width: 60px;
float: left;
text-align: left;
color: #999999;
text-align: center;
}
.chartRow {
height: 28px;
clear: both;
margin-top: 10px;
margin-bottom: 10px;
}
.chartValue {
float: right;
display: block;
line-height: 28px;
font-weight: bold;
margin-left: 15px;
}
.chartLabel {
font-size: 11px;
float: left;
display: block;
margin-right: 10px;
line-height: 28px;
width: 100px;
text-align: right;
font-weight: bold;
}
.chartBar_g{
float: left;
text-align: right;
height: 28px;
background: url('../images/chart-green.gif') repeat-x;
}
.chartBar_a {
float: left;
text-align: right;
height: 28px;
background: url('../images/chart-amber.gif') repeat-x;
}
.chartBar_r {
float: left;
text-align: right;
height: 28px;
background: url('../images/chart-red.gif') repeat-x;
}
#rating {
text-align: center;
padding: 0px 0px;
margin-bottom: 0px;
float: left;
width: 240px;
}
.ratingTitle {
font-size: 11px;
font-weight: bold;
font-size: 14px;
color: #999999;
}
.rating_g {
font-family: Arial, Helvetica, sans-serif;
text-align: center;
margin: 15px auto;
width: 128px;
height: 128px;
font-size: 100px;
line-height: 128px;
font-weight: bold;
color: #ffffff;
background: url('../images/rating-green.gif') no-repeat;
}
.rating_a {
font-family: Arial, Helvetica, sans-serif;
text-align: center;
margin: 15px auto;
width: 128px;
height: 128px;
font-size: 100px;
line-height: 128px;
font-weight: bold;
color: #ffffff;
background: url('../images/rating-amber.gif') no-repeat;
}
.rating_r {
font-family: Arial, Helvetica, sans-serif;
text-align: center;
margin: 15px auto;
width: 128px;
height: 128px;
font-size: 100px;
line-height: 128px;
font-weight: bold;
color: #ffffff;
background: url('../images/rating-red.gif') no-repeat;
}
.rating_r_icon {
font-family: Arial, Helvetica, sans-serif;
text-align: center;
display:inline;
font-size: 12px;
font-weight: bold;
color: #ffffff;
background: url('../images/rating-red.gif') no-repeat;
background-size: 100% 100%;
}
.belowGrade {
line-height: 16px;
font-weight: normal;
font-size: 11px;
color: grey;
}
.percentage_g {
font-weight: bold;
font-size: 28px;
color: #7ed84d;
}
.percentage_a {
font-weight: bold;
font-size: 28px;
color: #ffa100;
}
.percentage_r {
font-weight: bold;
font-size: 28px;
color: #ff553e;
}
table.reportTable {
width: 810px;
margin: 0px;
padding: 0px;
margin-top: 0px;
font-size: 12px;
line-height: 20px;
margin-right: 10px;
}
.tableHead {
color: #009ddf;
font-weight: bold;
padding-bottom: 5px;
vertical-align: middle;
border-bottom: 2px solid #c6d2d4;
font-size: 13px;
}
.tableSubHead {
color: #009ddf;
font-weight: bold;
padding-bottom: 5px;
vertical-align: middle;
padding-top: 15px;
border-bottom: 1px solid #c6d2d4;
font-size: 12px;
}
.tableLeft {
text-align: left;
padding: 3px 0px;
border-bottom: 1px solid #f0f0f0;
vertical-align: middle;
}
.tableRight {
width: 50px;
text-align: right;
padding: 3px 0px;
border-bottom: 1px solid #f0f0f0;
vertical-align: middle;
}
.tableLabel {
text-align: left;
padding: 3px 0px;
padding-right: 10px;
color: #444444;
border-bottom: 1px solid #f0f0f0;
vertical-align: middle;
font-size: 12px;
font-weight: bold;
width: 250px;
}
.tableLabelTop {
text-align: left;
padding: 3px 0px;
padding-right: 10px;
color: #444444;
border-bottom: 1px solid #f0f0f0;
vertical-align: top;
font-size: 12px;
font-weight: bold;
width: 180;
}
.tableLabelRight {
text-align: right;
padding: 3px 0px;
padding-right: 15px;
color: #444444;
border-bottom: 1px solid #f0f0f0;
vertical-align: middle;
font-size: 12px;
font-weight: bold;
}
.tableCell {
text-align: left;
padding: 3px 0px;
border-bottom: 1px solid #f0f0f0;
vertical-align: middle;
font-size: 12px;
word-wrap: break-word;
word-break: break-all;
word-break: break-word;
-webkit-hyphens: auto;
-moz-hyphens: auto;
hyphens: auto;
}
.tableIcon {
text-align: right;
padding: 0px;
padding-left: 15px;
vertical-align: top;
width: 20px;
}
.tableIcon img {
vertical-align: middle;
margin-top: 2px;
}
.fleft {
float: left;
}
.fright {
float: right;
}
.black {
color: #000000;
}
.g {
color: #4ec83d;
}
.a {
color: #ffa100;
}
.r {
color: #ef251e;
}
.b {
color: #009ddf;
}
#urlInput {
width: 500px;
font-size: 24px;
margin-bottom: 20px;
padding: 4px;
}
#urlSubmit {
font-size: 24px;
}
.intro {
font-size: 11px;
line-height: 16px;
width: 400px;
text-align: center;
margin: 0px auto;
margin-top: 20px;
}
.tIcon {
float: left;
margin-right: 10px;
}
.note {
position: absolute;
left: 0px;
top: 0px;
width: 300px;
border: 2px solid #ffffff;
z-index: 1000;
}
.noteBack {
border: 2px solid #c6d2d4;
background: #dddfe0;
}
.noteHead {
color: #ffffff;
padding: 2px 6px;
font-weight: bold;
}
.noteHead.green {
background: url('../images/chart-green.gif') repeat-x;
}
.noteHead.amber {
background: url('../images/chart-amber.gif') repeat-x;
}
.noteHead.red {
background: url('../images/chart-red.gif') repeat-x;
}
.noteBody {
font-size: 11px;
padding: 8px 10px;
text-align: left;
}
#multiTable {
border: 3px solid #a6c2c4;
width: 1040px;
}
#multiTable th {
padding: 14px;
background: #CFECEC;
border: 1px solid #a6c2c4;
font-size: 18px;
}
#multiTable td {
border: 1px solid #CFECEC;
}
.greySmall {
color : #666666;
font-size: 10px;
}
.greenSmall {
color : green;
font-size: 10px;
}
.orangeSmall {
color : #F88017;
font-size: 10px;
}
.redSmall {
color : red;
font-size: 10px;
}
img {
border: none;
}
table {
border-collapse: collapse;
}
div.icon-download
{
width: 12px;
height: 12px;
background: url('../images/download.png');
background-repeat: no-repeat;
background-size: 12px 12px;
}
<|start_filename|>block_res/index.html<|end_filename|>
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>测试 https 加载资源</title>
<link rel="stylesheet" href="http://necolas.github.io/normalize.css/5.0.0/normalize.css">
<style>
iframe{display: block;width: 400px; height: 200px;}
</style>
<script src="http://zeptojs.com/zepto.min.js"></script>
<script src="zepto.min.js"></script>
</head>
<body>
<h3>https 加载各种资源</h3>
<p>使用 // 格式,在 https 协议下,会自动加载 // 对应的 https 资源</p>
<dl>
<dt>全站 https 需要部署的项目:</dt>
<dd>
<pre>
m.iqianggou.com
stats-m.iqianggou.com
m.haoshiqi.net
jingxuan.haoshiqi.net
以上站点及 app 内,会加载一些外部的静态资源,如用于协议、帮助等页面
通过iframe 加载外部 http资源,会被阻止加载
www.iqianggou.com
www.haoshiqi.com
www.doweidu.com
static.haoshiqi.net
<!-- www.iqianggou.com/iqg/help/help.html -->
还有一部分资源(图片、js、css等)放在【七牛】上,也需要处理
img1.iqianggou.com ==> oc5ac7rq3.bkt.clouddn.com
img1.haoshiqi.net ==> 7xs7z4.com1.z0.glb.clouddn.com
static.doweidu.com ==> 7xr4hg.com1.z0.glb.clouddn.com
www 站对 com-iqianggou-img-agc.qiniudn.com 的引用作废掉,处理下改引用 static.doweidu.com
除此外,还要处理部分图片及连接资源的 url
百度及高德地图的 api
统计相关的 js等
还有融云上传图片 hsq-img.image.alimmdn.com
另,使用 iframe 加载外部页面时,外部页面必须不能设置 X-Frame-Options,否则加载受限
</pre>
</dd>
</dl>
<ol>
<li>加载 字体 资源,使用// 自动支持 ✔️</li>
<li>请求 本站 api,通过 location.protocol 判断自动选择 ✔️</li>
<li>加载 css/js 资源,必须使用 https。大部分脚本 ⚠️ 已判断处理,会加载对应的 https资源
<p>百度统计,使用// 自动支持;growing 脚本也自动支持。支持加载 ✔️</p>
<script>
if (!window._hmt) window._hmt = [];
_hmt.push(['_setAutoPageview', false]);
(function() {
var hm = document.createElement("script");
hm.src = "//hm.baidu.com/hm.js?dc887238fee1088e443acb020cd664f2";
var s = document.getElementsByTagName("script")[0];
s.parentNode.insertBefore(hm, s);
})();
</script>
</li>
<li>
<h3>加载 img 资源 ⚠️</h3>
<p>加载非 https图片,会给个警告,但网页可以正常显示图片</p>
<img src="http://img-agc.iqianggou.com/f8f330abb4489745bda8ec3a36a6202e!180x180" alt="七牛">
<img src="http://img1.iqianggou.com/assets/images/logo.png" alt="七牛">
<img src="http://jingxuan.haoshiqi.net/assets/img/join-talent-bg.jpg" height="300" alt="非七牛">
</li>
<li>
<h3>加载 SVG 资源呢✔️</h3>
<p>SVG 所引用的命名空间会被影响吗?完全不受影响</p>
<p>svg1</p>
<svg width="540" height="200" viewBox="0 0 270 100" style="margin:0">
<defs>
<radialGradient id="radial" cx="50%" cy="50%" fx="25%" fy="25%">
<stop offset="0%" stop-color="#60bafc"></stop>
<stop offset="50%" stop-color="#409adc"></stop>
<stop offset="100%" stop-color="#005a9c"></stop>
</radialGradient>
<path id="curve" d="M18,60 C100,10 180,110 264,60" fill="none"></path>
</defs>
<circle cx="138" cy="50" r="38" fill="url(#radial)" stroke="#005a9c"></circle>
<text font-family="Verdana" font-size="20" fill="#ff9900"><textPath xmlns:xlink="http://www.w3.org/1999/xlink" xlink:href="#curve" method="stretch" style="visibility: visible;">Scalable <tspan fill="white">Vector</tspan> Graphics</textPath></text>
</svg>
<p>svg2</p>
<svg width="100%" height="100%" version="1.1" xmlns="http://www.w3.org/2000/svg">
<ellipse cx="240" cy="100" rx="220" ry="30" style="fill:yellow"/>
<ellipse cx="220" cy="100" rx="190" ry="20" style="fill:white"/>
</svg>
</li>
<li>
<h3>加载 iframe 资源 ❌</h3>
<p>iframe 加载非 https 资源会被浏览器拦截,并报错,必须为 https 资源</p>
<iframe src="http://www.iqianggou.com/mobile/agreement.html" width="" height=""></iframe>
</li>
<li>
<h3>ajax请求 http资源 ❌</h3>
<p>请求 api,必须 https 资源,否则报错</p>
<p>GEO百度城市 api,需要升级为 https api</p>
<button id='baidu'>API,请求http资源 ❌</button>
<p>高德定位 api,需要升级为 https api或使用//</p>
<button id='gaode'>API,请求升级为// ✔️</button>
</li>
<li>请求 websocket 接口ws://,待测试,理论上会要求使用 wss ❌</li>
</ol>
<script>
function getId(id){
return document.getElementById(id);
}
var btnBaidu = getId('baidu');
var btnGaode = getId('gaode');
var testHttpApi = function(protocol){
protocol = protocol || '';
$.ajax({
type: 'GET',
url: protocol + '//api.v3.iqianggou.com/api/appconfig',
// data: { name: 'Zepto.js' },
dataType: 'json',
timeout: 3000,
// context: $('body'),
success: function(data){
// Supposing this JSON payload was received:
// {"project": {"id": 42, "html": "<div>..." }}
// append the HTML to context object.
console.log(data);
alert('Ajax success!');
},
error: function(xhr, type){
alert('Ajax error!');
}
})
}
btnBaidu.onclick = function(){
// 请求 http api资源,随便一个测试即可
// http://api.v3.iqianggou.com/api/appconfig GET
console.log('baidu')
testHttpApi('http:')
}
btnGaode.onclick = function(){
console.log('gaode');
testHttpApi()
}
</script>
</body>
</html>
<|start_filename|>block_spider/spider_b_key/server.js<|end_filename|>
#! /usr/bin/env node
const http = require('http');
// == 1 ==
var accepturl = '/price';
// var accepturl = '/pricea';
var regKey = /[?]key=(.*)?/;
var getKey = function(input){
var result = regKey.exec(input);
return result && result[1];
}
// == 2 ==
//此处可用 key,可以写入数据库(随时在线上更新,而不像更改 url一样,需要发布)
var acceptKey = "3";
const server = http.createServer((req, res) => {
res.statusCode = 200;
res.setHeader('Content-Type', 'text/plain');
var key = getKey(req.url);
if(key == acceptKey){
res.end('rel price\n');
}else{
res.end('fake price\n');
}
});
server.listen(port, hostname, () => {
console.log(`Server running at http://${hostname}:${port}`);
})
<|start_filename|>block_ad/test_open.js<|end_filename|>
var sc=1;
<|start_filename|>block_spider/spider_e_mkey/encodes/e1.js<|end_filename|>
module.exports = {
encode: function(item){
return item-3;
},
decode: function(item){
return item+3*(+!(typeof document === 'undefined'));
}
};
<|start_filename|>block_res/ssltest/css/ssllabs.css<|end_filename|>
body {
font: normal 12px Arial, Helvetica, sans-serif;
line-height: 180%;
color: #222222;
margin: 0px 0px;
background: #f0f0f0;
text-align: center;
}
.footnote {
color:gray;
font-size: 11px;
}
#pageEnd {
width: 1100px;
background: #ffffff;
margin: auto;
text-align: left;
padding-bottom: 10px;
border-left: 1px solid #BCD4DD;
border-right: 1px solid #BCD4DD;
border-bottom: 1px solid #BCD4DD;
}
#copyright {
width: 1100px;
padding: 10px;
padding-left: 20px;
padding-bottom: 0px;
}
#breadcrumbs {
margin-top: 6px;
margin-left: 30px;
}
#page {
width: 1100px;
background: #ffffff;
margin: auto;
text-align: left;
padding-bottom: 10px;
border-left: 1px solid #BCD4DD;
border-right: 1px solid #BCD4DD;
border-bottom: 1px solid #BCD4DD;
}
#main {
padding: 30px;
padding-top: 10px;
}
.newsTitle {
font-size: 13px;
font-weight: bold;
color: #444444;
margin: 0px;
}
.projectTitle {
color: #444444;
font-family: Arial, Helvetica, sans-serif;
font-weight: bold;
font-size: 13px;
line-height: 20px;
}
.projectTitle2 {
font-size: 16px;
color: #444444;
font-family: Arial, Helvetica, sans-serif;
font-weight: bold;
padding-top: 25px;
padding-bottom: 0px;
line-height: 0px;
}
p {
font-size: 13px;
line-height: 20px;
}
li {
font-size: 13px;
line-height: 20px;
}
.newsDate {
padding: 0px;
margin: 0px;
color: gray;
}
a {
color: #005ccf;
}
h1 {
color: #444444;
font-weight: bold;
padding-bottom: 5px;
font: 27px/28px Arial, Helvetica, sans-serif;
font-weight: bold;
letter-spacing: -0.04em;
}
h2 {
font-size: 16px;
color: #444444;
font-family: Arial, Helvetica, sans-serif;
font-weight: bold;
padding-top: 10px;
}
h3 {
color: #58585a;
font-size: 110%;
font-weight: bold;
margin-top: 0px;
margin-bottom: 0px;
}
#header {
width: 1100px;
padding-top: 8px;
padding-bottom: 8px;
border-top: 18px solid #de1d0b;
border-bottom: 1px solid #BCD4DD;
}
#mini-header {
width: 1100px;
padding-top: 8px;
padding-bottom: 8px;
border-top: 18px solid #de1d0b;
border-bottom: 1px solid #BCD4DD;
margin-bottom: 20px;
}
#dev-header {
padding: 10px 20px;
font-weight: bold;
background-color: #fffacd;
}
#logo {
float: left;
margin: 0px;
padding: 4px;
margin-left: 14px;
padding-top: 6px;
padding-bottom: 6px;
}
#logo-powered-by {
float: right;
padding: 10px;
padding-right: 30px;
}
#navigation {
float: right;
margin-top: 10px;
margin-right: 10px;
font-family: Arial, Helvetica, sans-sefif;
font-size: 15px;
font-weight: bold;
}
#navigation .link {
color: #444444;
text-decoration: none;
padding-right: 25px;
}
#navigation .link:hover {
text-decoration: underline;
}
#threeColumns {
margin: 20px;
margin-top: 0px;
padding-top: 20px;
}
#pageHeader {
padding: 30px 30px 0px 30px;
clear: both;
}
a.newsLink {
display: block;
font-family: Arial, Helvetica, sans-serif;
margin-top: 0px;
margin-bottom: 5px;
color: #58585a;
}
a.newsLink:hover {
color: #a70b16;
text-decoration: underline;
}
<|start_filename|>block_spider/spider_e_mkey/encodes/e4.js<|end_filename|>
module.exports = {
encode: function(item){
return item*2;
},
decode: function(item){
return item/2;
}
};
<|start_filename|>block_res/server.js<|end_filename|>
#! /usr/bin/env node
const http = require('http');
const fs = require('fs');
const path = require('path');
const hostname = '127.0.0.1';
const port = 3000;
// var pageHtml = require('./test.html');
const server = http.createServer((req, res) => {
res.statusCode = 200;
var url = req.url === '/' ? '/test.html' : req.url;
res.setHeader('Content-Type', 'text/html');
var filename = path.resolve(__dirname, 'content' + url);
//内容安全策略 CSP
//参看 https://content-security-policy.com/
res.setHeader('Content-Security-Policy', "default-src 'self' 'unsafe-inline' 'unsafe-eval' *.iqianggou.com hm.baidu.com *.baidustatic.com pos.baidu.com dn-growing.qbox.me data: api.growingio.com;font-src at.alicdn.com;");
// 'unsafe-inline' 'unsafe-eval'
// script-src hm.baidu.com dn-growing.qbox.me *.iqianggou.com;
// img-src *;
// connect-src *.iqianggou.com api.growingio.com;");
if(fs.existsSync(filename)){
fs.readFile(filename, (err, data) => {
//这里测试,模拟各种广告注入(内联、外链等等)
//这里 data 是 Buffer数据,要注入需要变更成文本,可在 test.html 中直接操作测试
// console.log(data);
res.end(data);
});
return;
}else{
res.end('err\n');
}
});
server.listen(port, hostname, () => {
console.log(`Server running at http://${hostname}:${port}`);
})
<|start_filename|>block_spider/spider_e_mkey/server.js<|end_filename|>
#! /usr/bin/env node
'use strict'
// const config = require('./web.config.js');
const useEval = true; //config.useEval;
const http = require('http');
const fs = require('fs');
const path = require('path');
const port = 3000;
var accepturl = '/data';
var allEncodeList = [];
fs.readdir(path.resolve(__dirname, './encodes'), (err, files) =>{
files.map((file)=>{
allEncodeList.push(require(path.resolve(__dirname, './encodes/'+ file)));
});
})
//生成随机差值
function GetRandomNum(Min,Max){
var Range = Max - Min;
var Rand = Math.random();
return(Min + Math.round(Rand * Range));
}
var diffDate = GetRandomNum(2, 13) * 37000;
var createKey = function(callback, justkey){
//这里是时间戳,但如果仅仅用这个,那么可以被傻瓜破解了,因为时间戳服务器端是固定的,与客户端差值是股东的,所以可以推算出来,这就傻瓜式破解了
//为了避免出现这种情况,使用随机差值方式,这里面要确认一件事,就是什么时机变值,这种方法可行吗?
//key 和 data 接口会成对出现么;貌似实现不了,
var tempDate = +new Date + diffDate;//Math.floor(Math.random()*10);
var key = (Math.floor(tempDate/10000)); //后面要对数字进行操作,所以这里用数字,不是字符串
if(justkey){
// console.log(key);
return key;
}
var encodeDecodeList = allEncodeList.slice().sort((a,b)=>Math.random() > 0.5);
key = encodeDecodeList[1].encode(encodeDecodeList[0].encode(key));
var source = `
(function(){
// debugger;
var decode1 = ${encodeDecodeList[1].decode.toString()};
var decode2 = ${encodeDecodeList[0].decode.toString()};
var key = ${JSON.stringify( key.toString().split('').map((item) => { return item.charCodeAt() }) )};
// console.log(key);
var result = key.map( (item)=>String.fromCharCode(item) ).join('');
key=+result;
key=decode1(key);
key=decode2(key);
${callback}(key);
})();
`;
if(useEval){
var code = JSON.stringify( source.split('').map((item) =>{ return item.charCodeAt() }) );
return `
eval(${code}.map(item=>String.fromCharCode(item)).join(''));
`;
}
return source;
}
var regKey = /[?]key=(.*)?/;
var getKey = function(input){
var result = regKey.exec(input);
return result && result[1];
};
var regCallback = /[?]callback=([^&]*)?/;
var getCallback = function(input){
var result = regCallback.exec(input);
return result && result[1];
};
const server = http.createServer((req, res) => {
res.statusCode = 200;
var url = req.url === '/' ? '/index.html' : req.url;
res.setHeader('Content-Type', 'text/html');
var filename = path.resolve(__dirname, 'content' + url);
// console.log(filename)
// console.log(fs.existsSync(filename))
if(fs.existsSync(filename)){
fs.readFile(filename, (err, data) => {
res.end(data);
});
return;
}
else{
//demo 要求两个请求之间 最大时间间隔20s
if(req.url.startsWith('/key')){
res.setHeader('Content-Type', 'application/javascript');
res.end(createKey(getCallback(req.url)));
return;
}
if(req.url.startsWith('/data')){
var key = getKey(req.url);
var acceptKey = createKey('', true);
if(key == acceptKey || key == acceptKey -1){
res.end('1\n'); //real data
}else{
res.end('0\n'); //fake data
}
}
}
});
// 使用 hostname 会导致被限制只能使用 hostname,打开 IP则无法用
// const hostname = '127.0.0.1';
// server.listen(port, hostname, () => {
// console.log(`Server running at http://${hostname}:${port}`);
// })
server.listen(port, () => {
console.log(`Server running at http://127.0.0.1/:${port}`);
})
<|start_filename|>block_res/ssltest/viewClient-clientTest.js<|end_filename|>
var mainsitehost = document.getElementById("mainsitehost").value;
var plaintextSiteHost = document.getElementById("plaintextSiteHost").value;
var highest_protocol = -1;
function find_xhr() {
// http://stackoverflow.com/questions/305894/best-practice-for-detecting-ajax-xmlhttprequestsupport
var xhr = null;
try { xhr = new XMLHttpRequest(); } catch (e) {}
try { xhr = new ActiveXObject("Microsoft.XMLHTTP"); } catch (e) {}
try { xhr = new ActiveXObject("Msxml2.XMLHTTP"); } catch (e) {}
return (xhr!=null);
}
// If there's a hashtag in the URL, remove it. We need a clean
// URL in order to detect a message from the frame.
// Commented to avoid multiple redirect issue in IE
/* if (location.href.indexOf('#') != -1) {
location = 'viewMyClient.html';
}*/
var time = new Date().getTime();
// Add a plaintext image to the page.
var img = jQuery('<img />');
jQuery(img).bind({
load: function() {
jQuery('#mixedImages').text('Yes');
jQuery('#mixedImages').css('color', '#F88017');
},
error: function() {
jQuery('#mixedImages').text('No');
}
});
jQuery('#hiddenImagesDiv').append(img);
try {
jQuery(img).attr('src', 'http://'+plaintextSiteHost+'/plaintext/1x1-transparent.png?t=' + time);
} catch(err) {
// Strict mixed content restrictions might create
// an error here and break the entire page.
jQuery('#mixedImages').text('No');
}
// Test SSL 2
jQuery('#protocol_ssl2').text('Testing...');
var img = jQuery('<img />');
jQuery(img).bind({
load: function() {
jQuery('#protocol_ssl2').text('Yes');
jQuery('#protocol_ssl2').css('color', 'red');
jQuery('#protocol_ssl2_label').css('color', 'red');
jQuery('#ssl2TestDiv').css('display', 'block');
if ((highest_protocol == -1)||(highest_protocol < 0x0200)) {
highest_protocol = 0x0200;
}
},
error: function() {
jQuery('#protocol_ssl2').text('No');
jQuery('#protocol_ssl2').css('color', 'black');
}
});
jQuery('#hiddenImagesDiv').append(img);
jQuery(img).attr('src','https://'+mainsitehost+':10200/1x1-transparent.png?t=' + time);
// Test Logjam
var img = jQuery('<img />');
jQuery(img).bind({
load: function() {
jQuery('#jamTestMsg').text('Your user agent is vulnerable. Upgrade as soon as possible.');
jQuery('#jamTestMsg').css('color', 'red');
},
error: function() {
jQuery('#jamTestMsg').text('Your user agent is not vulnerable.');
}
});
jQuery('#hiddenImagesDiv').append(img);
jQuery(img).attr('src','https://'+mainsitehost+':10445/ssl-labs-logo.gif?t=' + time);
// Test FREAK
var img = jQuery('<img />');
jQuery(img).bind({
load: function() {
jQuery('#freakTestMsg').text('Your user agent is vulnerable. Upgrade as soon as possible.');
jQuery('#freakTestMsg').css('color', 'red');
},
error: function() {
jQuery('#freakTestMsg').text('Your user agent is not vulnerable.');
}
});
jQuery('#hiddenImagesDiv').append(img);
jQuery(img).attr('src','https://'+mainsitehost+':10444/ssl-labs-logo.gif?t=' + time);
// Test SSL 3
jQuery('#protocol_ssl3').text('Testing...');
var img = jQuery('<img />');
jQuery(img).bind({
load: function() {
jQuery('#protocol_ssl3').text('Yes');
jQuery('#protocol_ssl3').css('color', 'red');
jQuery('#protocol_ssl3_label').css('color', 'red');
jQuery('#ssl3TestMsg').text('Your user agent is vulnerable. You should disable SSL 3.');
jQuery('#ssl3TestMsg').css('color', 'red');
if ((highest_protocol == -1)||(highest_protocol < 0x0300)) {
highest_protocol = 0x0300;
}
},
error: function() {
jQuery('#protocol_ssl3').text('No');
jQuery('#protocol_ssl3').css('color', 'black');
jQuery('#ssl3TestMsg').text('Your user agent is not vulnerable.');
}
});
jQuery('#hiddenImagesDiv').append(img);
jQuery(img).attr('src','https://'+mainsitehost+':10300/1x1-transparent.png?t=' + time);
// Test TLS 1.0
jQuery('#protocol_tls1').text('Testing...');
var img = jQuery('<img />');
jQuery(img).bind({
load: function() {
jQuery('#protocol_tls1').text('Yes');
jQuery('#protocol_tls1').css('color', 'black');
if ((highest_protocol == -1)||(highest_protocol < 0x0301)) {
highest_protocol = 0x0301;
}
},
error: function() {
jQuery('#protocol_tls1').text('No');
jQuery('#protocol_tls1').css('color', 'black');
}
});
jQuery('#hiddenImagesDiv').append(img);
jQuery(img).attr('src','https://'+mainsitehost+':10301/1x1-transparent.png?t=' + time);
// Test TLS 1.1
jQuery('#protocol_tls1_1').text('Testing...');
var img = jQuery('<img />');
jQuery(img).bind({
load: function() {
jQuery('#protocol_tls1_1').text('Yes');
jQuery('#protocol_tls1_1').css('color', 'black');
if ((highest_protocol == -1)||(highest_protocol < 0x0302)) {
highest_protocol = 0x0302;
}
},
error: function() {
jQuery('#protocol_tls1_1').text('No');
jQuery('#protocol_tls1_1').css('color', 'black');
}
});
jQuery('#hiddenImagesDiv').append(img);
jQuery(img).attr('src','https://'+mainsitehost+':10302/1x1-transparent.png?t=' + time);
// Test TLS 1.2
jQuery('#protocol_tls1_2').text('Testing...');
var img = jQuery('<img />');
jQuery(img).bind({
load: function() {
jQuery('#protocol_tls1_2').text('Yes');
jQuery('#protocol_tls1_2').css('color', 'green');
jQuery('#protocol_tls1_2_label').css('color', 'green');
if ((highest_protocol == -1)||(highest_protocol < 0x0303)) {
highest_protocol = 0x0303;
}
},
error: function() {
jQuery('#protocol_tls1_2').text('No');
jQuery('#protocol_tls1_2').css('color', 'black');
}
});
jQuery('#hiddenImagesDiv').append(img);
jQuery(img).attr('src','https://'+mainsitehost+':10303/1x1-transparent.png?t=' + time);
// Add a plaintext script to the page.
mixed_script_loaded = false;
var script = jQuery('<script type="text/javascript" src="http://'+plaintextSiteHost+'/plaintext/script.js?t=' + time + '"></script>');
jQuery('head').append(script);
// Add a plaintext stylesheet to the page.
var css = jQuery('<link rel="styleSheet" type="text/css" href="http://'+plaintextSiteHost+'/plaintext/style1.css?t=' + time + '">');
jQuery('head').append(css);
// Mixed XHR
if (find_xhr() == null) {
jQuery('#mixedXhr').text('N/A');
} else {
jQuery.ajax({
type: 'GET',
url: 'http://'+plaintextSiteHost+'/plaintext/xhr.txt?t=' + time,
timeout: 5000,
success: function(response, status) {
jQuery('#mixedXhr').text('Yes');
jQuery('#mixedXhr').css('color', 'red');
},
error: function(jqXHR, status, errorMessage) {
if (status == 'timeout') {
jQuery('#mixedXhr').text('No (timeout)');
} else {
jQuery('#mixedXhr').text('No');
}
}
});
}
// Determine which of the plaintext resources loaded.
function check_test_success() {
// Mixed scripts
if (mixed_script_loaded) {
jQuery('#mixedScripts').text('Yes');
jQuery('#mixedScripts').css('color', 'red');
} else {
jQuery('#mixedScripts').text('No');
}
// Mixed CSS
var mixed_css_color1 = jQuery('#mixedCSSHiddenDiv1').css('color');
if (mixed_css_color1.indexOf('255') != -1) {
jQuery('#mixedCssLink').text('Yes');
jQuery('#mixedCssLink').css('color', 'red');
} else {
jQuery('#mixedCssLink').text('No');
}
// Mixed frames
if (location.href.indexOf('frame_loaded') != -1) {
jQuery('#mixedFrame').text('Yes');
jQuery('#mixedFrame').css('color', 'red');
} else {
jQuery('#mixedFrame').text('No');
}
if (jQuery('#mixedWebSockets').text() == 'Testing...') {
jQuery('#mixedWebSockets').text('Test failed');
}
if (highest_protocol == -1) {
jQuery('#protocol_ssl2').text('Firewall');
jQuery('#protocol_ssl2').css('color', 'red');
jQuery('#protocol_ssl3').text('Firewall');
jQuery('#protocol_ssl3').css('color', 'red');
jQuery('#protocol_tls1').text('Firewall');
jQuery('#protocol_tls1').css('color', 'red');
jQuery('#protocol_tls1_1').text('Firewall');
jQuery('#protocol_tls1_1').css('color', 'red');
jQuery('#protocol_tls1_2').text('Firewall');
jQuery('#protocol_tls1_2').css('color', 'red');
jQuery('#ssl3TestDiv').css('display', 'hidden');
//jQuery('#protocolTestDiv').css('display', 'block');
jQuery('#freakTestDiv').css('display', 'hidden');
jQuery('#jamTestDiv').css('display', 'hidden');
jQuery('#protocolTestHeading').text('Partial Test Failure');
jQuery('#protocolTestMsg').text('Failed, probably due to firewall restrictions');
jQuery('#protocolTestMsg').css('color', 'red');
jQuery('#protocolTestMsgNotes').text('We couldn\'t detect any secure protocols. Many of our tests run on non-standard protocols; it\'s possible that you are in an environment that limits outbound connections, thus breaking our tests. If possible, try this test in a different environment.');
} else {
if (highest_protocol == 0x0303) {
jQuery('#protocolTestMsg').text('Your user agent has good protocol support.');
jQuery('#protocolTestMsg').css('color', 'green');
jQuery('#protocolTestMsgNotes').text('Your user agent supports TLS 1.2, which is the best available protocol version at the moment.');
} else {
jQuery('#protocolTestMsg').text('Your user agent doesn\'t support TLS 1.2. You should upgrade.');
jQuery('#protocolTestMsg').css('color', 'red');
jQuery('#protocolTestMsgNotes').text('The protocols supported by your user agent are old and have known vulnerabilities. You should upgrade as soon as possible. The latest versions of Chrome, Firefox, and IE are all good choices. If you can\'t upgrade IE to version 11, we recommend that you try Chrome or Firefox on your platform.');
}
}
}
function check_mixed_content1() {
// Check the success of mixed tests only after this
// control image (which is expected to always work) loads.
var img2 = jQuery('<img />');
jQuery(img2).bind({
load: function() {
check_test_success();
},
error: function() {
// If we fail to load this image, then fail the pending tests.
jQuery('#mixedScripts').text('Test failed');
jQuery('#mixedCssLink').text('Test failed');
jQuery('#mixedFrame').text('Test failed');
if (jQuery('#mixedWebSockets').text() == 'Testing...') {
jQuery('#mixedWebSockets').text('Test failed');
}
}
});
jQuery('#hiddenImagesDiv').append(img2);
jQuery(img2).attr('src','https://'+mainsitehost+'/plaintext/1x1-transparent.png?t=' + time);
}
jQuery(document).ready(function() {
jQuery('#mixedDiv').css('display', 'block');
// The mixed content tests are in progress; delay the
// success check in order to give them time to finish.
setTimeout(function(){
check_mixed_content1();}, 1000);
});
// WebSockets Test
if (window.WebSocket == undefined) {
jQuery('#mixedWebSockets').text('N/A');
} else {
try {
var ws = new WebSocket('wss://'+mainsitehost+'/plaintext/ping');
ws.onopen = function(e) {
ws.send('ping');
}
ws.onmessage = function(e) {
//alert('secure: ' + e.data);
try {
var ws2 = new WebSocket('ws://'+plaintextSiteHost+'/plaintext/ping');
ws2.onopen = function(e) {
ws2.send('ping');
}
ws2.onmessage = function(e) {
jQuery('#mixedWebSockets').text('Yes');
jQuery('#mixedWebSockets').css('color', 'red');
ws2.close();
}
} catch(e2) {
// Exception thrown in Firefox: "This operation is insecure"
jQuery('#mixedWebSockets').text('No');
}
ws.close();
}
} catch(e) {
jQuery('#mixedWebSockets').text('N/A');
}
}
| webcoding/js_spider |
<|start_filename|>scripts/pay.js<|end_filename|>
'use strict'
const chalk = require('chalk')
const minimist = require('minimist')
const inquirer = require('inquirer')
const StellarSdk = require('stellar-sdk')
const StellarBase = require('stellar-base')
const config = require('./config.json')
const server = new StellarSdk.Server('https://horizon.stellar.org')
console.log(chalk.green('-----------------------------------------------'))
console.log(chalk.green('Stellar Wallet'), chalk.yellow('Make Payment'))
console.log(chalk.green('-----------------------------------------------'), '\n')
const argv = minimist(process.argv.slice(2))
const currencyType = StellarSdk.Asset.native()
const getBalance = (address) => {
return server.loadAccount(address).then((account) => {
let xlmBalance = 0
account.balances.forEach((balance) => {
if (balance.asset_type === 'native') xlmBalance += balance.balance
})
return +xlmBalance
}).catch(fail)
}
const waitForBalancesUpdate = (sourceAddress, destinationAddress, origSourceBalance) => {
Promise.all([
getBalance(sourceAddress),
getBalance(destinationAddress)
]).then(([sourceBalance, destinationBalance]) => {
if (sourceBalance < origSourceBalance) {
console.log('New source balance:', chalk.green(sourceBalance, config.currency))
console.log('New destination balance:', chalk.green(destinationBalance, config.currency))
process.exit(0)
} else {
setTimeout(() => waitForBalancesUpdate(sourceAddress, destinationAddress, origSourceBalance), 1000)
}
})
}
const fail = (message) => {
console.error(chalk.red(message))
if (message.response && message.response.data && message.response.data.extras && message.response.data.extras.result_codes && message.response.data.extras.result_codes.operations) {
const reason = message.response.data.extras.result_codes.operations;
switch(reason) {
case 'op_underfunded':
console.log(chalk.red('reason:', 'Sender account has insufficient funds'));
break;
default:
console.log(chalk.red('reason:', reason))
}
}
process.exit(1)
}
const questions = [
{
type: 'input',
name: 'amount',
default: argv.amount,
message: 'Enter ' + config.currency + ' amount to send:',
validate: (value) => isNaN(parseInt(value)) ? 'Please enter a number' : true
},
{
type: 'input',
name: 'destinationAddress',
default: argv.to,
message: 'Enter destination address:',
validate: (value) => StellarBase.StrKey.isValidEd25519PublicKey(value) ? true : 'Please enter a valid address'
},
{
type: 'input',
name: 'memo',
default: argv.memo,
message: 'Enter memo (optional):',
validate: (value) => value && value.length > 26 ? 'Please enter a valid memo' : true,
},
{
type: 'input',
name: 'sourceSecret',
message: 'Enter sender secret:',
validate: (value) => StellarBase.StrKey.isValidEd25519SecretSeed(value) ? true : 'Invalid secret'
}
]
inquirer.prompt(questions).then((answers) => {
const sourceKeypair = StellarSdk.Keypair.fromSecret(answers.sourceSecret)
const sourceAddress = sourceKeypair.publicKey()
if (sourceAddress === answers.destinationAddress) {
fail('Sender address should not be the same as the destination address')
}
console.log()
return Promise.all([
getBalance(sourceAddress),
getBalance(answers.destinationAddress)
]).then(([sourceBalance, destinationBalance]) => {
console.log('Current destination balance:', chalk.green(destinationBalance, config.currency))
if (!destinationBalance || destinationBalance + answers.amount < config.minimumAccountBalance) {
fail(`Send at least ${config.minimumAccountBalance} XLM to create the destination address`)
}
console.log('Current sender balance:', chalk.green(sourceBalance, config.currency))
if (!sourceBalance || sourceBalance - answers.amount < config.minimumAccountBalance) {
fail(`There should be at least ${config.minimumAccountBalance} XLM remaining at the sender address`)
}
inquirer.prompt([
{
type: 'confirm',
name: 'sure',
default: false,
message: 'Ready to send?'
}
]).then((confirm) => {
if (!confirm.sure) {
process.exit()
}
console.log('\nConnecting...')
server.loadAccount(sourceAddress)
.then((account) => {
console.log('Preparing payment transaction...')
let transaction = new StellarSdk.TransactionBuilder(account, { fee: StellarBase.BASE_FEE, networkPassphrase: StellarBase.Networks.PUBLIC })
.addOperation(StellarSdk.Operation.payment({
destination: answers.destinationAddress,
asset: currencyType,
amount: String(answers.amount)
})).setTimeout(1000)
// Add Memo?
if (answers.memo) {
if (String(answers.memo).match(/^[0-9]*$/)) {
transaction = transaction.addMemo(StellarSdk.Memo.id(answers.memo))
} else {
transaction = transaction.addMemo(StellarSdk.Memo.text(answers.memo))
}
}
// Finalize
transaction = transaction.build()
transaction.sign(sourceKeypair)
console.log('Submitting payment...')
server.submitTransaction(transaction)
.then((transactionResult) => {
console.log('\nSuccess! View the transaction at: ')
console.log(chalk.yellow(transactionResult._links.transaction.href), '\n')
console.log('Waiting for balance to update (use Ctrl-C to abort)')
waitForBalancesUpdate(sourceAddress, answers.destinationAddress, sourceBalance)
})
.catch(fail)
})
.catch(fail)
})
})
})
<|start_filename|>scripts/set-inflation-pool.js<|end_filename|>
'use strict'
const chalk = require('chalk')
const inquirer = require('inquirer')
const StellarSdk = require('stellar-sdk')
const StellarBase = require('stellar-base')
const server = new StellarSdk.Server('https://horizon.stellar.org')
console.log(chalk.green('-----------------------------------------------'))
console.log(chalk.green('Stellar Wallet'), chalk.yellow('Set Inflation Pool'))
console.log(chalk.green('-----------------------------------------------'), '\n')
const setInflationPool = (secret, pool) => {
const sourceKeypair = StellarSdk.Keypair.fromSecret(secret)
server.loadAccount(sourceKeypair.publicKey())
.then((account) => {
const tx = new StellarSdk.TransactionBuilder(account, { fee: StellarBase.BASE_FEE, networkPassphrase: StellarBase.Networks.PUBLIC })
.addOperation(StellarSdk.Operation.setOptions({
inflationDest: pool
})).build()
tx.sign(sourceKeypair)
console.log('Please wait...')
return server.submitTransaction(tx)
})
.then(() => {
console.log('OK')
process.exit(0)
})
.catch(fail)
}
const fail = (message) => {
console.error(chalk.red(message.name), '\n')
process.exit(1)
}
const questions = [
{
type: 'input',
name: 'pool',
message: 'Enter pool address:',
validate: (value) => StellarBase.StrKey.isValidEd25519PublicKey(value) ? true : 'Please enter a valid address'
},
{
type: 'input',
name: 'sourceSecret',
message: 'Enter wallet secret:',
validate: (value) => StellarBase.StrKey.isValidEd25519SecretSeed(value) ? true : 'Invalid secret'
}
]
inquirer.prompt(questions).then((answers) => {
console.log()
setInflationPool(answers.sourceSecret, answers.pool)
})
<|start_filename|>scripts/balance.js<|end_filename|>
'use strict'
const chalk = require('chalk')
const minimist = require('minimist')
const inquirer = require('inquirer')
const StellarSdk = require('stellar-sdk')
const StellarBase = require('stellar-base')
const config = require('./config.json')
const server = new StellarSdk.Server('https://horizon.stellar.org')
console.log(chalk.green('-----------------------------------------------'))
console.log(chalk.green('Stellar Wallet'), chalk.yellow('Balance Check'))
console.log(chalk.green('-----------------------------------------------'), '\n')
const argv = minimist(process.argv.slice(3))
const limitTransactions = argv.limit || 10
const getBalance = (address) => {
server.loadAccount(address)
.then((account) => {
// Show balances
console.log(chalk.yellow('Current Balance'))
account.balances.forEach((balance) => {
if (balance.balance > 0) {
console.log(balance.balance, balance.asset_code || config.currency)
}
})
console.log()
// Show inflation pool if set
if (account.inflation_destination) {
console.log(chalk.yellow('Inflation pool'))
console.log(account.inflation_destination, '\n')
}
// Show recent transactions
server.operations()
.forAccount(address)
.order('desc')
.limit(limitTransactions)
.call()
.then((results) => {
if (results.records && results.records.length) {
console.log(chalk.yellow(`Last ${limitTransactions} transactions`))
results.records.forEach(t => displayRecord(t, address))
console.log()
}
})
}).catch(fail)
}
const displayRecord = (record, address) => {
switch (record.type) {
case 'payment':
const amount = +parseFloat(record.amount).toFixed(5)
const plusMinus = record.from === address ? '-' : '+'
const directionArrow = record.from === address ? '→' : '←'
const currency = record.asset_type === 'native' ? config.currency : record.asset_type
console.log(`${record.created_at} ${record.type}\t${plusMinus}${amount} ${currency} ${directionArrow} ${record.to}`)
break
default:
console.log(`${record.created_at} ${record.type}`)
break
}
}
const fail = (message) => {
console.error(chalk.red(message.name), '\n')
process.exit(1)
}
if (process.argv[3] && StellarBase.StrKey.isValidEd25519PublicKey(process.argv[3])) {
getBalance(process.argv[3])
} else {
const questions = [
{
type: 'input',
name: 'wallet',
message: 'Enter wallet address:',
validate: (value) => StellarBase.StrKey.isValidEd25519PublicKey(value) ? true : 'Please enter a valid address'
}
]
inquirer.prompt(questions).then((answers) => {
console.log()
getBalance(answers.wallet)
})
}
<|start_filename|>scripts/generate.js<|end_filename|>
'use strict'
const chalk = require('chalk')
const StellarSdk = require('stellar-sdk')
const config = require('./config.json')
console.log(chalk.green('-----------------------------------------------'))
console.log(chalk.green('Stellar Wallet'), chalk.yellow('Generate Wallet'))
console.log(chalk.green('-----------------------------------------------'), '\n')
const account = StellarSdk.Keypair.random()
console.log(' Public address:', chalk.yellow(account.publicKey()))
console.log(' Wallet secret:', chalk.yellow(account.secret()), '\n')
console.log(chalk.red(' Print this wallet and make sure to store it somewhere safe!'), '\n')
console.log(` Note: You need to put at least ${config.minimumAccountBalance} ${config.currency} on this key for it to be an active account\n`)
<|start_filename|>package.json<|end_filename|>
{
"name": "stellar-wallet-cli",
"version": "1.2.1",
"description": "Simple Stellar Wallet client, made in node using js-stellar-sdk",
"author": "<NAME> <<EMAIL>>",
"license": "ISC",
"repository": {
"type": "git",
"url": "<EMAIL>:filidorwiese/stellar-wallet.git"
},
"bin": "./bin/cmd.js",
"scripts": {
"postversion": "git push --follow-tags"
},
"dependencies": {
"chalk": "2.4.1",
"inquirer": "5.2.0",
"lodash.get": "4.4.2",
"minimist": "^1.2.5",
"stellar-sdk": "^8.1.0"
}
}
| Techrocket9/stellar-wallet |
<|start_filename|>package.json<|end_filename|>
{
"name": "jquery-popup-overlay",
"version": "2.1.5",
"description": "Lightweight modal popup overlay for jquery",
"main": "index.js",
"scripts": {
"start": "http-server -c-1",
"start:ci": "http-server -c-1 --silent",
"dev": "npm start -- -o",
"test": "start-server-and-test start http://localhost:8080 test:cypress",
"test:cypress": "cypress run",
"test:watch": "npm start & cypress open",
"lint": "eslint . *.js"
},
"pre-commit": [
"lint",
"test"
],
"repository": {
"type": "git",
"url": "https://github.com/vast-engineering/jquery-popup-overlay.git"
},
"keywords": [
"jquery",
"popup",
"overlay",
"modal"
],
"author": "Vast",
"license": "MIT",
"devDependencies": {
"cypress": "^4.1.0",
"eslint": "^6.8.0",
"http-server": "^0.12.1",
"pre-commit": "^1.2.2",
"start-server-and-test": "^1.10.8"
}
}
<|start_filename|>cypress/index.html<|end_filename|>
<!doctype html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>Test Page</title>
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.5/css/bootstrap.min.css" />
<script src="https://code.jquery.com/jquery-1.8.2.min.js"></script>
<script src="../jquery.popupoverlay.js"></script>
</head>
<style>
.well {
display: none;
}
.popup_content {
background: white;
padding: 10px;
border: 2px solid #498659;
}
:focus {
outline: 5px solid red;
}
.btn-success {
position: relative;
z-index: 55; /* setting some z-index for tests */
}
</style>
<body>
<div class="container" id="pagecontainer">
<section class="col-Md-10 col-md-offset-1">
<h1>Tests</h1>
<p class="lineheight">
<input type="text" data-dummy-input id="dummy-input">
<a data-default-open class="initialism default_open btn btn-success" href="#default">Default</a>
<a data-tooltip-open class="initialism tooltip_open btn btn-success" href="#tooltip">Tooltip</a>
<a data-locked-open class="initialism locked_open btn btn-success" href="#locked">Locked</a>
<a data-custom-open class="initialism custom_open btn btn-success" href="#custom">Custom</a>
</p>
<p>TOP</p>
<p>Lorem ipsum dolor sit amet, consectetur adipisicing elit. Minus possimus impedit porro accusamus sed repellat laborum eius a. Accusamus soluta expedita atque quisquam consequatur, dolorum sapiente dolore in! Deleniti, id. Lorem ipsum dolor sit amet, consectetur adipisicing elit. Minus possimus impedit porro accusamus sed repellat laborum eius a. Accusamus soluta expedita atque quisquam consequatur, dolorum sapiente dolore in! Deleniti, id.</p>
<p>Lorem ipsum dolor sit amet, consectetur adipisicing elit. Minus possimus impedit porro accusamus sed repellat laborum eius a. Accusamus soluta expedita atque quisquam consequatur, dolorum sapiente dolore in! Deleniti, id. Lorem ipsum dolor sit amet, consectetur adipisicing elit. Minus possimus impedit porro accusamus sed repellat laborum eius a. Accusamus soluta expedita atque quisquam consequatur, dolorum sapiente dolore in! Deleniti, id.</p>
<p>Lorem ipsum dolor sit amet, consectetur adipisicing elit. Minus possimus impedit porro accusamus sed repellat laborum eius a. Accusamus soluta expedita atque quisquam consequatur, dolorum sapiente dolore in! Deleniti, id. Lorem ipsum dolor sit amet, consectetur adipisicing elit. Minus possimus impedit porro accusamus sed repellat laborum eius a. Accusamus soluta expedita atque quisquam consequatur, dolorum sapiente dolore in! Deleniti, id.</p>
<p>Lorem ipsum dolor sit amet, consectetur adipisicing elit. Minus possimus impedit porro accusamus sed repellat laborum eius a. Accusamus soluta expedita atque quisquam consequatur, dolorum sapiente dolore in! Deleniti, id. Lorem ipsum dolor sit amet, consectetur adipisicing elit. Minus possimus impedit porro accusamus sed repellat laborum eius a. Accusamus soluta expedita atque quisquam consequatur, dolorum sapiente dolore in! Deleniti, id.</p>
<p>Lorem ipsum dolor sit amet, consectetur adipisicing elit. Minus possimus impedit porro accusamus sed repellat laborum eius a. Accusamus soluta expedita atque quisquam consequatur, dolorum sapiente dolore in! Deleniti, id. Lorem ipsum dolor sit amet, consectetur adipisicing elit. Minus possimus impedit porro accusamus sed repellat laborum eius a. Accusamus soluta expedita atque quisquam consequatur, dolorum sapiente dolore in! Deleniti, id.</p>
<p>Lorem ipsum dolor sit amet, consectetur adipisicing elit. Minus possimus impedit porro accusamus sed repellat laborum eius a. Accusamus soluta expedita atque quisquam consequatur, dolorum sapiente dolore in! Deleniti, id. Lorem ipsum dolor sit amet, consectetur adipisicing elit. Minus possimus impedit porro accusamus sed repellat laborum eius a. Accusamus soluta expedita atque quisquam consequatur, dolorum sapiente dolore in! Deleniti, id.</p>
<p>Lorem ipsum dolor sit amet, consectetur adipisicing elit. Minus possimus impedit porro accusamus sed repellat laborum eius a. Accusamus soluta expedita atque quisquam consequatur, dolorum sapiente dolore in! Deleniti, id. Lorem ipsum dolor sit amet, consectetur adipisicing elit. Minus possimus impedit porro accusamus sed repellat laborum eius a. Accusamus soluta expedita atque quisquam consequatur, dolorum sapiente dolore in! Deleniti, id.</p>
<p>Lorem ipsum dolor sit amet, consectetur adipisicing elit. Minus possimus impedit porro accusamus sed repellat laborum eius a. Accusamus soluta expedita atque quisquam consequatur, dolorum sapiente dolore in! Deleniti, id. Lorem ipsum dolor sit amet, consectetur adipisicing elit. Minus possimus impedit porro accusamus sed repellat laborum eius a. Accusamus soluta expedita atque quisquam consequatur, dolorum sapiente dolore in! Deleniti, id.</p>
<p>Lorem ipsum dolor sit amet, consectetur adipisicing elit. Minus possimus impedit porro accusamus sed repellat laborum eius a. Accusamus soluta expedita atque quisquam consequatur, dolorum sapiente dolore in! Deleniti, id. Lorem ipsum dolor sit amet, consectetur adipisicing elit. Minus possimus impedit porro accusamus sed repellat laborum eius a. Accusamus soluta expedita atque quisquam consequatur, dolorum sapiente dolore in! Deleniti, id.</p>
<p>Lorem ipsum dolor sit amet, consectetur adipisicing elit. Minus possimus impedit porro accusamus sed repellat laborum eius a. Accusamus soluta expedita atque quisquam consequatur, dolorum sapiente dolore in! Deleniti, id. Lorem ipsum dolor sit amet, consectetur adipisicing elit. Minus possimus impedit porro accusamus sed repellat laborum eius a. Accusamus soluta expedita atque quisquam consequatur, dolorum sapiente dolore in! Deleniti, id.</p>
<p>Lorem ipsum dolor sit amet, consectetur adipisicing elit. Minus possimus impedit porro accusamus sed repellat laborum eius a. Accusamus soluta expedita atque quisquam consequatur, dolorum sapiente dolore in! Deleniti, id. Lorem ipsum dolor sit amet, consectetur adipisicing elit. Minus possimus impedit porro accusamus sed repellat laborum eius a. Accusamus soluta expedita atque quisquam consequatur, dolorum sapiente dolore in! Deleniti, id.</p>
<p>Lorem ipsum dolor sit amet, consectetur adipisicing elit. Minus possimus impedit porro accusamus sed repellat laborum eius a. Accusamus soluta expedita atque quisquam consequatur, dolorum sapiente dolore in! Deleniti, id. Lorem ipsum dolor sit amet, consectetur adipisicing elit. Minus possimus impedit porro accusamus sed repellat laborum eius a. Accusamus soluta expedita atque quisquam consequatur, dolorum sapiente dolore in! Deleniti, id.</p>
<p>Lorem ipsum dolor sit amet, consectetur adipisicing elit. Minus possimus impedit porro accusamus sed repellat laborum eius a. Accusamus soluta expedita atque quisquam consequatur, dolorum sapiente dolore in! Deleniti, id. Lorem ipsum dolor sit amet, consectetur adipisicing elit. Minus possimus impedit porro accusamus sed repellat laborum eius a. Accusamus soluta expedita atque quisquam consequatur, dolorum sapiente dolore in! Deleniti, id.</p>
<p>BOTTOM</p>
</section>
</div>
<div id="default" class="well">
<h4>default example</h4>
<input type="text" id="inputz">
<p>Lorem ipsum dolor sit amet, consectetur adipisicing elit. Quisquam, est modi quos placeat ex a? Expedita quidem, odio a vel ex. Quisquam voluptatibus, natus quas, iusto distinctio labore nihil tenetur!</p>
<button class="custom_open btn btn-default" data-custom-open-from-popup>Next example</button>
<button data-default-close class="default_close btn btn-default">Close</button>
</div>
<div id="tooltip" class="well">
<h4>tooltip example</h4>
<!-- <button class="tooltip_close btn btn-default" data-tooltip-close>Close</button> -->
</div>
<div id="locked" class="well">
<h4>locked example</h4>
<button class="locked_close btn btn-default" data-locked-close>Close</button>
</div>
<div id="custom" class="well">
<h4>custom example</h4>
<button class="custom_close btn btn-default" data-custom-close>Close</button>
<button class="locked_open btn btn-default" data-locked-open-from-popup>Open locked</button>
</div>
<!-- <div id="dynamic">
This is a test popup.
<button class="dynamic_close" data-dynamic-close>Button</button>
<input id="dynamic-dummy-input" data-dynamic-dummy-input />
</div>
<button class="dynamic_open" data-dynamic-open>dynamic</button> -->
</body>
</html>
<|start_filename|>cypress/integration/test.js<|end_filename|>
// Cypress end-to-end tests
// Debugging tips:
// - cy.pause() can be placed before and after the assertion.
// - cy.debug() can be useful... cy.get('.ls-btn').click({ force: true }).debug()
// - Also, you can click on Before/After state in Cypress UI.
// - If you want to use a console in Cypress UI dev tools, first click on Inspect Element to get the correct `window` scope.
// TODO:
// - add tests for rightedge and leftedge, for tooltips
// - test destroyAll with popups with fadeOut animation and scrolllock
// - add tests with mixed options
// - try to improve some tests if possible: test from user perspective instead of testing CSS properties
// create tests for accessibility and wai-aria
// - add tests for default option to all non-boolean options?
// - try to make the same tests in Jest+Puppeteer to see if there will be less issues (although Puppeteer has no UI for debugging)
//-----------------------------------------------------------------------------
// Prepare random options
const randomOptions = {};
const allOptions = {
color: ['blue', 'red'],
opacity: [0, 0.5, 1],
background: [true, false]
}
// Get random value from array
const rand = function (arr) {
return arr[Math.floor(Math.random() * arr.length)];
}
// Randomize options
Object.keys(allOptions).forEach(function(key) {
const randomValue = rand(allOptions[key]);
randomOptions[key] = randomValue; // extend randomOptions object
});
//-----------------------------------------------------------------------------
// Tests
describe("jQuery Popup Overlay", () => {
context("Options", () => {
before(() => {
cy.visit("/cypress/index.html");
cy.window().then(win => {
// Extend plugin's defaults with randomOptions
// UNCOMMENT THIS LINE FOR TESTS WITH RANDOM PLUGIN OPTIONS:
// Object.assign(win.$.fn.popup.defaults, randomOptions);
// Log plugin defaults to console (for debugging)
cy.log('**Defaults:**', JSON.stringify(win.$.fn.popup.defaults));
win.console.log('**Defaults:**', JSON.stringify(win.$.fn.popup.defaults));
// jquery ':focus' selector fails when window is not in focus, replace it with our own version.
// Although, this still doesn't solve an issue when testing :focus styles and such tests will fail.
// If that happens, re-run the tests with window in focus, or without Cypress UI (i.e. headless).
// https://github.com/cypress-io/cypress/issues/2176
win.jQuery.find.selectors.filters.focus = function(elem) {
const doc = elem.ownerDocument;
return elem === doc.activeElement && !!(elem.type || elem.href);
};
});
});
beforeEach(() => {
cy.window().then(win => {
// Destroy all popups
win.$.fn.popup.destroyall();
// Add markup for temporary (dynamic) test popup
win
.$(
`<div id="dynamic">This is a test popup. <input id="dynamic-dummy-input" /></div>`
)
.appendTo("body");
});
// A bug in Cypress requires focus to be cleared with .blur(), else next .typeEsc() will fail.
cy
.get("#dummy-input")
.focus()
.blur();
});
it("autoopen true", () => {
cy.window().then(win => {
win.$("#dynamic").popup({ autoopen: true });
});
cy.get("#dynamic").should("be.visible");
});
it("autoopen false", () => {
cy.window().then(win => {
win.$("#dynamic").popup({ autoopen: false });
});
cy.get("#dynamic").should("not.be.visible");
cy.get("#dynamic_wrapper").should("exist");
});
it("absolute true", () => {
cy.window().then(win => {
win.$("#dynamic").popup({ absolute: true, autoopen: true });
});
cy.log("IMPORTANT: This test might fail in UI, just re-run the tests.");
cy.wait(10); // If we don't wait, rect.bottom will be 0 (another Cypress bug probably)
cy.scrollTo(0, 4000);
// Manually test for whether elem is out of viewport - https://github.com/cypress-io/cypress/issues/877
cy.get("#dynamic").then($el => {
const rect = $el[0].getBoundingClientRect();
// expect( rect.bottom ).to.be.lessThan( 0 );
expect(rect.bottom).to.be.lessThan(0);
});
});
it("absolute false", () => {
cy.window().then(win => {
win.$("#dynamic").popup({ absolute: false, autoopen: true });
});
cy.log("IMPORTANT: This test might fail in UI, just re-run the tests.");
cy.wait(10); // If we don't wait, rect.bottom will be 0 (another Cypress bug probably)
cy.scrollTo(0, 4000);
// Manually test for whether elem is out of viewport - https://github.com/cypress-io/cypress/issues/877
cy.get("#dynamic").then($el => {
const rect = $el[0].getBoundingClientRect();
expect(rect.bottom).to.be.greaterThan(0);
});
});
it("type tooltip", () => {
cy.window().then(win => {
win.$("#dynamic").popup({ type: "tooltip", autoopen: true });
});
cy.get("#dynamic_wrapper").should("have.css", "position", "absolute");
cy.get("#dynamic_wrapper").should("have.css", "overflow", "visible");
cy.get("#dynamic_background").should("not.exist");
cy.get("body").should("have.css", "overflow", "visible");
});
it("type overlay", () => {
cy.window().then(win => {
win.$("#dynamic").popup({ type: "overlay", autoopen: true });
});
cy.get("#dynamic_background").should("exist");
cy.get("#dynamic_wrapper").should("have.css", "overflow", "auto");
cy.get("#dynamic_wrapper").should("have.css", "text-align", "center");
});
it("closebutton", () => {
cy.window().then(win => {
win.$("#dynamic").popup({ closebutton: true, autoopen: true });
});
cy.get(".dynamic_close").should("exist"); // button should exist
cy.get(".dynamic_close").click(); // button should be clickable
});
it("scrolllock true", () => {
cy.window().then(win => {
win.$("#dynamic").popup({ scrolllock: true, autoopen: true });
});
cy.get("body").should("have.css", "overflow", "hidden");
});
it("scrolllock false", () => {
cy.window().then(win => {
win.$("#dynamic").popup({ scrolllock: false, autoopen: true });
});
cy.get("body").should("have.css", "overflow", "visible");
});
it("background true", () => {
cy.window().then(win => {
win.$("#dynamic").popup({ background: true, autoopen: true });
});
cy.get("#dynamic_background").should("exist");
});
it("background false", () => {
cy.window().then(win => {
win
.$("#dynamic")
.popup({ background: false, blur: false, autoopen: true });
});
cy.get("#dynamic_background").should("not.exist");
cy
.get("#dummy-input")
.click()
.focus();
cy.get("#dynamic_background").should("not.exist");
});
it("color", () => {
cy.window().then(win => {
win.$("#dynamic").popup({ color: "rgb(255, 0, 0)", autoopen: true });
});
cy
.get("#dynamic_background")
.should("have.css", "background-color", "rgb(255, 0, 0)");
});
it("opacity", () => {
cy.window().then(win => {
win.$("#dynamic").popup({ opacity: 0.1, autoopen: true });
});
cy.get("#dynamic_background").should("have.css", "opacity", "0.1");
});
it("horizontal", () => {
cy.window().then(win => {
win.$("#dynamic").popup({ horizontal: "left", autoopen: true });
});
cy.get("#dynamic_wrapper").should("have.css", "text-align", "left");
});
it("vertical", () => {
cy.window().then(win => {
win.$("#dynamic").popup({ vertical: "bottom", autoopen: true });
});
cy.get("#dynamic").should("have.css", "vertical-align", "bottom");
});
it("offsettop", () => {
cy.window().then(win => {
win
.$("#dynamic")
.popup({
offsettop: 100,
vertical: "bottom",
type: "tooltip",
autoopen: true
});
});
cy.get("#dynamic_wrapper").should("have.css", "top", "100px");
});
it("offsetleft", () => {
cy.window().then(win => {
win
.$("#dynamic")
.popup({
offsetleft: 100,
horizontal: "leftedge",
type: "tooltip",
autoopen: true
});
});
cy.get("#dynamic_wrapper").should("have.css", "left", "100px");
});
it("escape true", () => {
cy.window().then(win => {
win.$("#dynamic").popup({ escape: true, autoopen: true });
});
cy.typeEsc();
cy.get("#dynamic").should("be.hidden");
});
it("escape false", () => {
cy.window().then(win => {
win.$("#dynamic").popup({ escape: false, autoopen: true });
});
cy.typeEsc();
cy.get("#dynamic").should("be.visible");
});
it("blur true", () => {
cy.window().then(win => {
win.$("#dynamic").popup({ blur: true, autoopen: true });
});
// Clicking on the popup content should not hide it (this click is also required for a bug in Cypress with pointer-events:none)
cy.get("#dynamic").click(1, 1);
cy.get("#dynamic").should("be.visible");
// Clicking outside of the popup
cy.get("#dynamic_wrapper").click(1, 1, { force: true }); // Cypress doesn't respect pointer-events:none; so we have to force the click
cy.get("#dynamic").should("be.hidden");
});
it("blur false", () => {
cy.window().then(win => {
win.$("#dynamic").popup({ blur: false, autoopen: true });
});
// Clicking on the popup content should not hide it (this click is also required for a bug in Cypress with pointer-events:none)
cy.get("#dynamic").click(1, 1);
cy.get("#dynamic").should("be.visible");
// Clicking outside of the popup
cy.get("#dynamic_background").click(1, 1, { force: true }); // Cypress doesn't respect pointer-events:none; so we have to force the click
cy.get("#dynamic").should("be.visible");
});
it("blurignore", () => {
cy.window().then(win => {
win.$("#dynamic").popup({ blurignore: 'h1', blur: true, autoopen: true });
});
// Clicking on the popup content should not hide it (this click is also required for a bug in Cypress with pointer-events:none)
cy.get("#dynamic").click(1, 1);
cy.get("#dynamic").should("be.visible");
// Clicking on h1 to test `blurignore`
cy.get("h1").click({ force: true });
cy.get("#dynamic").should("be.visible");
// Clicking outside of the popup
cy.get("#dynamic_wrapper").click(1, 1, { force: true }); // Cypress doesn't respect pointer-events:none; so we have to force the click
cy.get("#dynamic").should("be.hidden");
});
it("setzindex true", () => {
cy.window().then(win => {
win.$("#dynamic").popup({ setzindex: true, autoopen: true });
});
cy.get("#dynamic_background").should("have.css", "z-index", "100000");
cy.get("#dynamic_wrapper").should("have.css", "z-index", "100001");
});
it("setzindex false", () => {
cy.window().then(win => {
win.$("#dynamic").popup({ setzindex: false, autoopen: true });
});
cy.get("#dynamic_background").should("have.css", "z-index", "auto");
cy.get("#dynamic_wrapper").should("have.css", "z-index", "auto");
});
it("autozindex true", () => {
cy.window().then(win => {
win.$("#dynamic").popup({ autozindex: true, autoopen: true });
});
cy.get("#dynamic_background").should("have.css", "z-index", "56"); // z-index:55 is set in HTML file
cy.get("#dynamic_wrapper").should("have.css", "z-index", "57");
});
it("autozindex false", () => {
cy.window().then(win => {
win.$("#dynamic").popup({ autozindex: false, autoopen: true });
});
cy.get("#dynamic_background").should("have.css", "z-index", "100000");
cy.get("#dynamic_wrapper").should("have.css", "z-index", "100001");
});
it("keepfocus true", () => {
cy.window().then(win => {
win
.$("#dynamic")
.popup({ keepfocus: true, closebutton: true, autoopen: true });
});
cy.focused().should("have.id", "dynamic"); // check if popup gets focus
cy.get(".dynamic_close").focus();
cy.typeTab();
cy.focused().should("have.id", "dynamic-dummy-input"); // check if focus stays in popup
});
it("keepfocus false", () => {
cy.window().then(win => {
win.$('<button class="dynamic_open">').appendTo("body"); // add dynamic open button
win.$("#dynamic").popup({ keepfocus: false });
cy
.get(".dynamic_open")
.focus()
.click();
cy.focused().should("not.have.id", "dynamic"); // check if popup gets focus
});
cy.window().then(win => {
win.$(".dynamic_open").remove(); // remove dynamic open button
});
});
it("focuselement", () => {
cy.window().then(win => {
win
.$("#dynamic")
.popup({ focuselement: "#dynamic-dummy-input", autoopen: true });
});
cy.focused().should("have.attr", "id", "dynamic-dummy-input");
});
it("focusdelay", () => {
cy.window().then(win => {
win
.$("#dynamic")
.popup({ focusdelay: 30, outline: true, autoopen: true });
});
cy.get("#dynamic").should("have.css", "outline-style", "none");
cy.wait(60);
cy.log(
"IMPORTANT: This test might fail if Cypress UI window is not in focus. Re-run tests with the window in focus, or without Cypress UI (i.e.headless)."
);
cy
.get("#dynamic", { timeout: 10 })
.should("have.css", "outline-style", "solid");
});
it("pagecontainer", () => {
cy.window().then(win => {
win
.$("#dynamic")
.popup({ pagecontainer: "#pagecontainer", autoopen: true });
});
cy.get("#pagecontainer").should("have.attr", "aria-hidden", "true");
cy.get("#dynamic").should("have.attr", "aria-hidden", "false");
});
it("outline true", () => {
cy.window().then(win => {
win.$("#dynamic").popup({ outline: true, autoopen: true });
});
cy.log(
"IMPORTANT: This test might fail if Cypress UI window is not in focus. Re-run tests with the window in focus, or without Cypress UI (i.e.headless)."
);
cy.get("#dynamic").should("have.css", "outline-style", "solid");
});
it("outline false", () => {
cy.window().then(win => {
win.$("#dynamic").popup({ outline: false, autoopen: true });
});
cy.get("#dynamic").should("have.css", "outline-style", "none");
});
it("detach true", () => {
cy.window().then(win => {
win.$("#dynamic").popup({ detach: true });
});
cy.get("#dynamic").should("not.exist");
});
it("detach false", () => {
cy.window().then(win => {
win.$("#dynamic").popup({ detach: false });
});
cy.get("#dynamic").should("exist");
});
it("openelement", () => {
cy.window().then(win => {
win.$("#dynamic").popup({ openelement: "[data-dummy-input]" });
});
cy.get("[data-dummy-input]").click();
cy.get("#dynamic").should("be.visible");
});
it("openelement default", () => {
cy.window().then(win => {
win.$("#dynamic").popup({});
});
cy.get("[data-dummy-input]").click();
cy.get("#dynamic").should("be.hidden");
});
it("closeelement", () => {
cy.window().then(win => {
win
.$("#dynamic")
.popup({ closeelement: "#dynamic-dummy-input", autoopen: true });
});
cy.get("#dynamic-dummy-input").click();
cy.get("#dynamic").should("be.hidden");
});
it("closeelement default", () => {
cy.window().then(win => {
win.$("#dynamic").popup({ autoopen: true, closebutton: true });
});
cy.get("#dynamic-dummy-input").click();
cy.get("#dynamic").should("be.visible");
cy.get(".dynamic_close").click();
cy.get("#dynamic").should("be.hidden");
});
it("transition", () => {
cy.window().then(win => {
win
.$("#dynamic")
.popup({ transition: "all 1s ease 0s", autoopen: true });
});
cy
.get("#dynamic_background")
.should("have.css", "transition", "all 1s ease 0s");
});
it("tooltipanchor", () => {
cy.window().then(win => {
win
.$("#dynamic")
.popup({
tooltipanchor: "#dummy-input",
horizontal: "leftedge",
vertical: "top",
type: "tooltip",
autoopen: true
});
cy.get("#dynamic_wrapper").should(
"have.css",
"left",
win
.$("#dummy-input")
.offset()
.left.toFixed(3) + "px"
);
});
});
it("tooltipanchor default (button visible)", () => {
cy.window().then(win => {
win.$('<button class="dynamic_open">').appendTo("body"); // add dynamic open button
win
.$("#dynamic")
.popup({ type: "tooltip", horizontal: "leftedge", autoopen: true });
cy
.get("#dynamic_wrapper")
.should(
"have.css",
"left",
win.$(".dynamic_open").offset().left + "px"
);
win.$(".dynamic_open").remove(); // remove dynamic open button
});
});
it("tooltipanchor default (button invisible)", () => {
cy.window().then(win => {
win
.$("#dynamic")
.popup({
type: "tooltip",
horizontal: "leftedge",
vertical: "bottom",
autoopen: true
});
cy
.get("#dynamic_wrapper")
.should("have.css", "left", "0px")
.should("have.css", "top", "0px");
});
});
});
context("Methods", () => {
before(() => {
cy.visit("/cypress/index.html");
});
it(".show()", () => {
cy.window().then(win => {
win.$("#default").popup("show");
});
cy.get("#default").should("be.visible");
});
it(".hide()", () => {
cy.window().then(win => {
win.$("#default").popup("hide");
});
cy.get("#default").should("be.hidden");
});
it(".toggle()", () => {
cy.window().then(win => {
win.$("#default").popup("toggle");
});
cy.get("#default").should("be.visible");
});
it(".toggle() once more", () => {
cy.window().then(win => {
win.$("#default").popup("toggle");
});
cy.get("#default").should("be.hidden");
});
it("TODO: .addclosebutton()", () => {
cy.window().then(win => {
win.$("#tooltip").popup("show");
});
// cy.get(".tooltip_close").should("not.exist");
cy.window().then(win => {
win.$("#tooltip").popup("addclosebutton");
});
// cy.get(".tooltip_close").should("exist");
cy.get(".tooltip_close").click();
});
it.skip("TODO: .reposition()", () => {});
it.skip("TODO AND IMPLEMENT: .destroy()", () => {});
it.skip("TODO: $.fn.popup.destroyall()", () => {});
});
context("Multiple Instances", () => {
before(() => {
cy.visit("/cypress/index.html");
cy.window().then(win => {
win.$("#default").popup();
win.$("#tooltip").popup({
type: "tooltip"
});
win.$("#locked").popup({
scrolllock: true,
autozindex: true
});
win.$("#custom").popup({
background: false,
escape: false,
blur: false,
setzindex: false,
autozindex: true,
scrolllock: true,
closebutton: true,
outline: true,
detach: true
});
});
});
it("1. unlocked + locked", () => {
cy.get("[data-default-open]").click();
cy.get("[data-custom-open-from-popup]").click();
cy.get("body").should("have.css", "overflow", "hidden");
});
it("1a. unlocked - locked", () => {
cy.get("[data-custom-close]").click();
cy.get("body").should("have.css", "overflow", "visible");
cy.get("[data-default-close]").click();
});
it("2. locked + locked ", () => {
cy.get("[data-custom-open]").click();
cy.get("[data-locked-open-from-popup]").click();
cy.get("body").should("have.css", "overflow", "hidden");
});
it("2a. locked - locked ", () => {
cy.get("[data-locked-close]").click();
cy.get("body").should("have.css", "overflow", "hidden");
cy.get("[data-custom-close]").click();
});
it("3. locked + unlocked", () => {
cy.get("[data-custom-open]").click();
cy.get("body").should("have.css", "overflow", "hidden");
cy.window().then(win => {
win.$("#default").popup("show");
});
cy.get("body").should("have.css", "overflow", "hidden"); // if at least one overlay is locked, scrolling should be locked
});
it("3a. locked - unlocked", () => {
cy.window().then(win => {
win.$("#default").popup("hide");
});
cy.get("body").should("have.css", "overflow", "hidden");
});
it("all windows closed, body should be unlocked", () => {
cy.get("[data-custom-close]").click();
cy.get("body").should("have.css", "overflow", "visible");
});
});
context("Misc.", () => {
it("TODO: check tabindex and other features from README!!!", () => {});
});
});
| timgates42/jquery-popup-overlay |
<|start_filename|>routes/index.js<|end_filename|>
var express = require('express');
var router = express.Router();
/**
* @swagger
* tags:
* name: Index
* description: All about /
*/
/**
* @swagger
* /:
* get:
* summary: Index
* description: It will return 'index' results
* tags: [Index]
* produces:
* - application/json
* responses:
* 200:
* description: hello world
*/
router.get('/', function(req, res, next) {
res.json({results: 'index'});
});
module.exports = router;
<|start_filename|>routes/users.js<|end_filename|>
var debug = require('debug')('swagger-express-jsdoc');
var express = require('express');
var router = express.Router();
var users = [];
/**
* @swagger
* definitions:
* Users:
* required:
* - username
* properties:
* username:
* type: string
* path:
* type: string
*/
/**
* @swagger
* tags:
* name: Users
* description: All about /users
*/
/**
* @swagger
* /users:
* post:
* summary: Add User
* description: Add user by username
* tags: [Users]
* parameters:
* - name: username
* description: User's name
* in: formData
* type: string
* required: true
* produces:
* - application/json
* responses:
* 200:
* description: Success add user
*/
router.post('/', function(req, res, next) {
users.push(req.body.username);
res.json({results: users});
});
/**
* @swagger
* /users:
* get:
* summary: Get Users
* description: Get All Users
* tags: [Users]
* produces:
* - application/json
* responses:
* 200:
* description: Success get all users
*/
router.get('/', function(req, res, next) {
res.json({results: users});
});
/**
* @swagger
* /users/{username}:
* put:
* summary: Update User
* description: Update username by username
* tags: [Users]
* parameters:
* - name: username
* description: User's name
* in: path
* type: string
* required: true
* - name: name
* description: Name you want to edit
* in: formData
* type: string
* required: true
* produces:
* - application/json
* responses:
* 200:
* description: Success update user
* 400:
* description: Can't find user by username
*/
router.put('/:username', function(req, res, next) {
var user = users.indexOf(req.params.username);
if (user > -1) {
users.splice(user, 1, req.body.name);
res.json({results: users});
} else {
res.sendStatus(400);
}
});
/**
* @swagger
* /users/{username}:
* delete:
* summary: Delete User
* description: Delete user by username
* tags: [Users]
* parameters:
* - name: username
* description: User's name
* in: path
* type: string
* required: true
* produces:
* - application/json
* responses:
* 200:
* description: Success delete user
* 400:
* description: Can't find user by username
*/
router.delete('/:username', function(req, res, next) {
var user = users.indexOf(req.params.username);
if (user > -1) {
users.splice(user ,1);
res.json({results: users});
} else {
res.sendStatus(400);
}
});
module.exports = router;
| SangHakLee/swagger-express-jsdoc |
<|start_filename|>example/ios/Runner/Runner-Bridging-Header.h<|end_filename|>
#import "GeneratedPluginRegistrant.h"
| hmeranda/flutter_link_preview |
<|start_filename|>wk-starter/wk-starter-job/src/main/java/com/budwk/starter/job/JobService.java<|end_filename|>
package com.budwk.starter.job;
import com.budwk.starter.common.constant.RedisConstant;
import org.nutz.integration.jedis.RedisService;
import org.nutz.lang.random.R;
/**
* 判断是否可被执行,防止多实例重复执行
*
* @author <EMAIL>
*/
public abstract class JobService {
protected RedisService redisService;
protected String instanceId;
public void init(RedisService redisService) {
this.redisService = redisService;
this.instanceId = R.UU32();
}
public boolean canExecute(String name, String jobId) {
String redisValue = redisService.get(RedisConstant.JOB_EXECUTE + name);
if (redisValue == null) {
redisService.setex(RedisConstant.JOB_EXECUTE + name, 1, instanceId + "_" + jobId);
}
redisValue = redisService.get(RedisConstant.JOB_EXECUTE + name);
if ((instanceId + "_" + jobId).equalsIgnoreCase(redisValue)) {
return true;
}
return false;
}
}
<|start_filename|>wk-platform/wk-platform-server/src/main/java/com/budwk/app/sys/commons/task/TaskServer.java<|end_filename|>
package com.budwk.app.sys.commons.task;
import com.budwk.starter.common.constant.RedisConstant;
import com.budwk.starter.job.JobInfo;
import lombok.extern.slf4j.Slf4j;
import org.nutz.integration.jedis.pubsub.PubSubService;
import org.nutz.integration.quartz.QuartzJob;
import org.nutz.integration.quartz.QuartzManager;
import org.nutz.ioc.loader.annotation.Inject;
import org.nutz.ioc.loader.annotation.IocBean;
import org.nutz.json.Json;
import org.quartz.JobKey;
import org.quartz.TriggerUtils;
import org.quartz.impl.triggers.CronTriggerImpl;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
/**
* 定时任务服务类
*
* @author <EMAIL>
*/
@IocBean(create = "init")
@Slf4j
public class TaskServer {
@Inject
private QuartzManager quartzManager;
// 注入下历史记录类,实现消息订阅
@Inject
private TaskHistory taskHistory;
@Inject
private PubSubService pubSubService;
public void init() {
quartzManager.clear();
}
/**
* 执行任务
*
* @param taskId 任务ID
* @param iocName IOC对象名
* @param jobName 执行方法名
* @param params 传递参数
*/
public void doNow(String taskId, String iocName, String jobName, String params) {
JobInfo info = new JobInfo();
info.setTaskId(taskId);
info.setIocName(iocName);
info.setJobName(jobName);
info.setParams(params);
pubSubService.fire(RedisConstant.JOB_PUBLISH, Json.toJson(info));
}
/**
* 判断任务是否存在
*
* @param taskId 任务ID
* @return
*/
public boolean isExist(String taskId) {
return quartzManager.exist(new JobKey(taskId, taskId));
}
/**
* 添加定时任务
*
* @param taskId 任务ID
* @param iocName IOC对象名称
* @param jobName 任务作业名称
* @param cron 表达式
* @param note 备注信息
* @param params 传递参数
* @throws Exception
*/
public void add(String taskId, String iocName, String jobName, String cron, String note, String params) throws Exception {
QuartzJob qj = new QuartzJob();
qj.setJobName(taskId);
qj.setJobGroup(taskId);
qj.setClassName("com.budwk.app.sys.commons.task.TaskJob");
qj.setCron(cron);
qj.setComment(note);
JobInfo info = new JobInfo();
info.setTaskId(taskId);
info.setIocName(iocName);
info.setJobName(jobName);
info.setParams(params);
qj.setDataMap(Json.toJson(info));
quartzManager.add(qj);
}
/**
* 删除任务
*
* @param taskId 任务ID
* @return
*/
public boolean delete(String taskId) {
QuartzJob qj = new QuartzJob();
qj.setJobName(taskId);
qj.setJobGroup(taskId);
return quartzManager.delete(qj);
}
/**
* 清除任务
*/
public void clear() {
quartzManager.clear();
}
/**
* 获取cron表达式最近执行时间
*
* @param cronExpression 表达式
* @return
*/
public List<String> getCronExeTimes(String cronExpression) throws Exception {
List<String> list = new ArrayList<>();
CronTriggerImpl cronTriggerImpl = new CronTriggerImpl();
cronTriggerImpl.setCronExpression(cronExpression);
Calendar calendar = Calendar.getInstance();
Date now = calendar.getTime();
calendar.add(2, 1);
List<Date> dates = TriggerUtils.computeFireTimesBetween(cronTriggerImpl, null, now, calendar.getTime());
SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
for (int i = 0; i < dates.size() && i <= 4; ++i) {
list.add(dateFormat.format((Date) dates.get(i)));
}
return list;
}
}
<|start_filename|>wk-platform/wk-platform-server/src/main/java/com/budwk/app/sys/commons/task/TaskHistory.java<|end_filename|>
package com.budwk.app.sys.commons.task;
import com.budwk.app.sys.models.Sys_task_history;
import com.budwk.app.sys.services.SysTaskHistoryService;
import com.budwk.starter.common.constant.RedisConstant;
import com.budwk.starter.job.JobInfo;
import org.nutz.integration.jedis.RedisService;
import org.nutz.integration.jedis.pubsub.PubSub;
import org.nutz.integration.jedis.pubsub.PubSubService;
import org.nutz.ioc.loader.annotation.Inject;
import org.nutz.ioc.loader.annotation.IocBean;
import org.nutz.json.Json;
import org.nutz.lang.Strings;
import org.nutz.lang.random.R;
/**
* @author <EMAIL>
*/
@IocBean(create = "init")
public class TaskHistory implements PubSub {
@Inject
private PubSubService pubSubService;
@Inject
private RedisService redisService;
@Inject
private SysTaskHistoryService sysTaskHistoryService;
public void init() {
pubSubService.reg(RedisConstant.JOB_SUBSCRIBE, this);
}
@Override
public void onMessage(String channel, String message) {
if (!channel.equalsIgnoreCase(RedisConstant.JOB_SUBSCRIBE)) {
return;
}
JobInfo jobInfo = Json.fromJson(JobInfo.class, message);
Sys_task_history history = new Sys_task_history();
history.setTaskId(jobInfo.getTaskId());
history.setMessage(Strings.sNull(history.getMessage()));
history.setSuccess(jobInfo.isSuccess());
history.setInstanceId(jobInfo.getInstanceId());
history.setJobId(jobInfo.getJobId());
history.setEndTime(jobInfo.getEndTime());
history.setTookTime(jobInfo.getTookTime());
String uuid = R.UU32();
String redisVal = Strings.sNull(redisService.get(RedisConstant.JOB_HISTORY));
if (Strings.isBlank(redisVal)) {
redisService.setex(RedisConstant.JOB_HISTORY, 1, uuid);
}
redisVal = Strings.sNull(redisService.get(RedisConstant.JOB_HISTORY));
if (redisVal.equalsIgnoreCase(uuid)) {
sysTaskHistoryService.insert(history);
}
}
}
<|start_filename|>wk-starter/wk-starter-job/src/main/java/com/budwk/starter/job/WkJobStarter.java<|end_filename|>
package com.budwk.starter.job;
import com.budwk.starter.common.constant.RedisConstant;
import com.budwk.starter.job.annotation.SJob;
import lombok.extern.slf4j.Slf4j;
import org.nutz.integration.jedis.pubsub.PubSub;
import org.nutz.integration.jedis.pubsub.PubSubService;
import org.nutz.ioc.Ioc;
import org.nutz.ioc.impl.PropertiesProxy;
import org.nutz.ioc.loader.annotation.Inject;
import org.nutz.ioc.loader.annotation.IocBean;
import org.nutz.json.Json;
import java.lang.reflect.Method;
/**
* @author <EMAIL>
*/
@IocBean(create = "init")
@Slf4j
public class WkJobStarter implements PubSub {
@Inject
protected PubSubService pubSubService;
@Inject("refer:$ioc")
protected Ioc ioc;
@Inject
protected PropertiesProxy conf;
public void init() {
pubSubService.reg(RedisConstant.JOB_PUBLISH, this);
}
@Override
public void onMessage(String channel, String message) {
if (!channel.equalsIgnoreCase(RedisConstant.JOB_PUBLISH)) {
return;
}
JobInfo wkJobInfo = Json.fromJson(JobInfo.class, message);
try {
Class<?> iocClass = ioc.getType(wkJobInfo.getIocName());
// ioc 名称对得上 且带SJob注解
if (iocClass != null) {
for (Method method : iocClass.getDeclaredMethods()) {
SJob sJob = method.getAnnotation(SJob.class);
if (sJob != null && sJob.value().equalsIgnoreCase(wkJobInfo.getJobName())) {
method.invoke(ioc.get(iocClass), wkJobInfo.getTaskId(), wkJobInfo.getParams());
}
}
}
} catch (Exception e) {
log.error(e.getMessage());
}
}
}
<|start_filename|>wk-platform/wk-platform-server/src/main/java/com/budwk/app/sys/commons/task/TaskJob.java<|end_filename|>
package com.budwk.app.sys.commons.task;
import com.budwk.starter.common.constant.RedisConstant;
import org.nutz.integration.jedis.pubsub.PubSubService;
import org.nutz.ioc.loader.annotation.Inject;
import org.nutz.ioc.loader.annotation.IocBean;
import org.nutz.json.Json;
import org.quartz.Job;
import org.quartz.JobExecutionContext;
import org.quartz.JobExecutionException;
/**
* @author <EMAIL>
*/
@IocBean
public class TaskJob implements Job {
@Inject
private PubSubService pubSubService;
@Override
public void execute(JobExecutionContext jobExecutionContext) throws JobExecutionException {
pubSubService.fire(RedisConstant.JOB_PUBLISH, Json.toJson(jobExecutionContext.getJobDetail().getJobDataMap()));
}
}
| budwk/budwk |
<|start_filename|>src/main/java/org/codelibs/elasticsearch/dynarank/filter/SearchActionFilter.java<|end_filename|>
package org.codelibs.elasticsearch.dynarank.filter;
import org.codelibs.elasticsearch.dynarank.ranker.DynamicRanker;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.action.search.SearchAction;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.support.ActionFilter;
import org.elasticsearch.action.support.ActionFilterChain;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Setting.Property;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.tasks.Task;
public class SearchActionFilter implements ActionFilter {
public static Setting<Integer> SETTING_DYNARANK_FILTER_ORDER = Setting.intSetting("dynarank.filter.order", 10, Property.NodeScope);
private final int order;
public SearchActionFilter(final Settings settings) {
order = SETTING_DYNARANK_FILTER_ORDER.get(settings);
}
@Override
public int order() {
return order;
}
@Override
public <Request extends ActionRequest, Response extends ActionResponse> void apply(final Task task, final String action,
final Request request, final ActionListener<Response> listener, final ActionFilterChain<Request, Response> chain) {
if (!SearchAction.INSTANCE.name().equals(action)) {
chain.proceed(task, action, request, listener);
return;
}
final SearchRequest searchRequest = (SearchRequest) request;
final ActionListener<Response> wrappedListener = DynamicRanker.getInstance().wrapActionListener(action, searchRequest, listener);
chain.proceed(task, action, request, wrappedListener == null ? listener : wrappedListener);
}
}
<|start_filename|>src/main/java/org/codelibs/elasticsearch/dynarank/painless/DynaRankWhitelistExtension.java<|end_filename|>
package org.codelibs.elasticsearch.dynarank.painless;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import org.codelibs.elasticsearch.dynarank.script.DynaRankScript;
import org.elasticsearch.painless.spi.PainlessExtension;
import org.elasticsearch.painless.spi.Whitelist;
import org.elasticsearch.painless.spi.WhitelistLoader;
import org.elasticsearch.script.ScriptContext;
public class DynaRankWhitelistExtension implements PainlessExtension {
private static final Whitelist WHITELIST =
WhitelistLoader.loadFromResourceFiles(DynaRankWhitelistExtension.class, "dynarank_whitelist.txt");
@Override
public Map<ScriptContext<?>, List<Whitelist>> getContextWhitelists() {
return Collections.singletonMap(DynaRankScript.CONTEXT, Collections.singletonList(WHITELIST));
}
}
<|start_filename|>src/main/java/org/codelibs/elasticsearch/dynarank/DynamicRankingPlugin.java<|end_filename|>
package org.codelibs.elasticsearch.dynarank;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import org.codelibs.elasticsearch.dynarank.filter.SearchActionFilter;
import org.codelibs.elasticsearch.dynarank.ranker.DynamicRanker;
import org.codelibs.elasticsearch.dynarank.script.DiversitySortScriptEngine;
import org.codelibs.elasticsearch.dynarank.script.DynaRankScript;
import org.elasticsearch.action.support.ActionFilter;
import org.elasticsearch.common.component.LifecycleComponent;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.plugins.ActionPlugin;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.plugins.ScriptPlugin;
import org.elasticsearch.rest.RestHeaderDefinition;
import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.script.ScriptEngine;
public class DynamicRankingPlugin extends Plugin implements ActionPlugin, ScriptPlugin {
private Settings settings;
public DynamicRankingPlugin(final Settings settings) {
this.settings = settings;
}
@Override
public ScriptEngine getScriptEngine(Settings settings, Collection<ScriptContext<?>> contexts) {
return new DiversitySortScriptEngine(settings);
}
@Override
public List<ActionFilter> getActionFilters() {
return Arrays.asList(new SearchActionFilter(settings));
}
@Override
public Collection<RestHeaderDefinition> getRestHeaders() {
return Arrays.asList(
new RestHeaderDefinition(DynamicRanker.DYNARANK_RERANK_ENABLE,
false),
new RestHeaderDefinition(DynamicRanker.DYNARANK_MIN_TOTAL_HITS,
false));
}
@Override
public Collection<Class<? extends LifecycleComponent>> getGuiceServiceClasses() {
final Collection<Class<? extends LifecycleComponent>> services = new ArrayList<>();
services.add(DynamicRanker.class);
return services;
}
@Override
public List<ScriptContext<?>> getContexts() {
return Arrays.asList(DynaRankScript.CONTEXT);
}
@Override
public List<Setting<?>> getSettings() {
return Arrays.asList(//
DynamicRanker.SETTING_INDEX_DYNARANK_SCRIPT, //
DynamicRanker.SETTING_INDEX_DYNARANK_LANG, //
DynamicRanker.SETTING_INDEX_DYNARANK_TYPE, //
DynamicRanker.SETTING_INDEX_DYNARANK_PARAMS, //
DynamicRanker.SETTING_INDEX_DYNARANK_REORDER_SIZE, //
DynamicRanker.SETTING_INDEX_DYNARANK_KEEP_TOPN, //
DynamicRanker.SETTING_DYNARANK_CACHE_CLEAN_INTERVAL, //
DynamicRanker.SETTING_DYNARANK_CACHE_EXPIRE //
);
}
}
<|start_filename|>src/main/java/org/codelibs/elasticsearch/dynarank/script/bucket/Buckets.java<|end_filename|>
package org.codelibs.elasticsearch.dynarank.script.bucket;
import org.elasticsearch.search.SearchHit;
public interface Buckets {
SearchHit[] getHits(final SearchHit[] searchHit);
}
| matsui-stb/elasticsearch-dynarank |
<|start_filename|>src/main/kotlin/com/gh0u1l5/wechatmagician/spellbook/hookers/MenuAppender.kt<|end_filename|>
package com.gh0u1l5.wechatmagician.spellbook.hookers
import android.app.Activity
import android.content.Context
import android.view.ContextMenu
import android.view.View
import android.widget.AdapterView
import com.gh0u1l5.wechatmagician.spellbook.C
import com.gh0u1l5.wechatmagician.spellbook.WechatStatus
import com.gh0u1l5.wechatmagician.spellbook.base.EventCenter
import com.gh0u1l5.wechatmagician.spellbook.base.Hooker
import com.gh0u1l5.wechatmagician.spellbook.interfaces.IPopupMenuHook
import com.gh0u1l5.wechatmagician.spellbook.mirror.com.tencent.mm.ui.base.Classes.MMListPopupWindow
import com.gh0u1l5.wechatmagician.spellbook.mirror.com.tencent.mm.ui.contact.Classes.AddressUI
import com.gh0u1l5.wechatmagician.spellbook.mirror.com.tencent.mm.ui.contact.Classes.ContactLongClickListener
import com.gh0u1l5.wechatmagician.spellbook.mirror.com.tencent.mm.ui.conversation.Classes.ConversationCreateContextMenuListener
import com.gh0u1l5.wechatmagician.spellbook.mirror.com.tencent.mm.ui.conversation.Classes.ConversationLongClickListener
import de.robv.android.xposed.XC_MethodHook
import de.robv.android.xposed.XposedHelpers.*
object MenuAppender : EventCenter() {
override val interfaces: List<Class<*>>
get() = listOf(IPopupMenuHook::class.java)
data class PopupMenuItem (
val groupId: Int,
val itemId: Int,
val order: Int,
val title: String,
val onClickListener: (context: Context) -> Unit
)
@Volatile var currentUsername: String? = null
@Volatile var currentMenuItems: List<PopupMenuItem>? = null
override fun provideStaticHookers(): List<Hooker>? {
return listOf(onMMListPopupWindowShowHooker, onMMListPopupWindowDismissHooker)
}
override fun provideEventHooker(event: String): Hooker? {
return when (event) {
"onPopupMenuForContactsCreating" -> onPopupMenuForContactsCreateHooker
"onPopupMenuForConversationsCreating" -> onPopupMenuForConversationsCreateHooker
else -> throw IllegalArgumentException("Unknown event: $event")
}
}
private val onMMListPopupWindowShowHooker = Hooker {
findAndHookMethod(MMListPopupWindow, "show", object : XC_MethodHook() {
override fun beforeHookedMethod(param: MethodHookParam) {
val listenerField = findFirstFieldByExactType(MMListPopupWindow, C.AdapterView_OnItemClickListener)
val listener = listenerField.get(param.thisObject) as AdapterView.OnItemClickListener
listenerField.set(param.thisObject, AdapterView.OnItemClickListener { parent, view, position, id ->
val title = parent.adapter.getItem(position)
val context = getObjectField(param.thisObject, "mContext") as Context
currentMenuItems?.forEach {
if (title == it.title) {
it.onClickListener(context)
}
}
listener.onItemClick(parent, view, position, id)
})
}
})
}
private val onMMListPopupWindowDismissHooker = Hooker {
findAndHookMethod(MMListPopupWindow, "dismiss", object : XC_MethodHook() {
override fun afterHookedMethod(param: MethodHookParam?) {
currentUsername = null
currentMenuItems = null
}
})
}
private val onPopupMenuForContactsCreateHooker = Hooker {
findAndHookMethod(
ContactLongClickListener, "onItemLongClick",
C.AdapterView, C.View, C.Int, C.Long, object : XC_MethodHook() {
override fun beforeHookedMethod(param: MethodHookParam) {
val parent = param.args[0] as AdapterView<*>
val position = param.args[2] as Int
val item = parent.adapter?.getItem(position)
currentUsername = getObjectField(item, "field_username") as String?
}
})
findAndHookMethod(
AddressUI, "onCreateContextMenu",
C.ContextMenu, C.View, C.ContextMenuInfo, object : XC_MethodHook() {
override fun afterHookedMethod(param: MethodHookParam) {
val menu = param.args[0] as ContextMenu
val view = param.args[1] as View
currentMenuItems = notifyForResults("onPopupMenuForContactsCreating") { plugin ->
(plugin as IPopupMenuHook).onPopupMenuForContactsCreating(currentUsername ?: "")
}.flatten().sortedBy { it.itemId }
currentMenuItems?.forEach {
val item = menu.add(it.groupId, it.itemId, it.order, it.title)
item.setOnMenuItemClickListener { _ ->
it.onClickListener(view.context)
return@setOnMenuItemClickListener true
}
}
}
})
WechatStatus.toggle(WechatStatus.StatusFlag.STATUS_FLAG_CONTACT_POPUP)
}
private val onPopupMenuForConversationsCreateHooker = Hooker {
findAndHookMethod(
ConversationLongClickListener, "onItemLongClick",
C.AdapterView, C.View, C.Int, C.Long, object : XC_MethodHook() {
@Throws(Throwable::class)
override fun beforeHookedMethod(param: MethodHookParam) {
val parent = param.args[0] as AdapterView<*>
val position = param.args[2] as Int
val item = parent.adapter?.getItem(position)
currentUsername = getObjectField(item, "field_username") as String?
}
})
findAndHookMethod(
ConversationCreateContextMenuListener, "onCreateContextMenu",
C.ContextMenu, C.View, C.ContextMenuInfo, object : XC_MethodHook() {
@Throws(Throwable::class)
override fun afterHookedMethod(param: MethodHookParam) {
val menu = param.args[0] as ContextMenu
currentMenuItems = notifyForResults("onPopupMenuForConversationsCreating") { plugin ->
(plugin as IPopupMenuHook).onPopupMenuForConversationsCreating(currentUsername ?: "")
}.flatten().sortedBy { it.itemId }
currentMenuItems?.forEach {
val item = menu.add(it.groupId, it.itemId, it.order, it.title)
item.setOnMenuItemClickListener { _ ->
it.onClickListener(param.thisObject as Activity)
return@setOnMenuItemClickListener true
}
}
}
})
WechatStatus.toggle(WechatStatus.StatusFlag.STATUS_FLAG_CONVERSATION_POPUP)
}
}
<|start_filename|>src/main/kotlin/com/gh0u1l5/wechatmagician/spellbook/interfaces/IActivityHook.kt<|end_filename|>
package com.gh0u1l5.wechatmagician.spellbook.interfaces
import android.app.Activity
import android.os.Bundle
import android.view.Menu
interface IActivityHook {
/**
* Called when a Wechat MMActivity has created a options menu.
*
* @param activity the activity shown in foreground.
* @param menu the options menu just created by the activity.
*/
fun onMMActivityOptionsMenuCreated(activity: Activity, menu: Menu) { }
/**
* Called when an Activity is going to invoke [Activity.onCreate] method.
*
* @param activity the activity object that is creating.
* @param savedInstanceState the saved instance state for restoring the state.
*/
fun onActivityCreating(activity: Activity, savedInstanceState: Bundle?) { }
/**
* Called when an activity is going to invoke [Activity.onStart] method.
*
* @param activity the activity object that is starting.
*/
fun onActivityStarting(activity: Activity) { }
/**
* Called when an activity is going to invoke [Activity.onResume] method.
*
* @param activity the activity object that is resuming.
*/
fun onActivityResuming(activity: Activity) { }
}
<|start_filename|>src/main/kotlin/com/gh0u1l5/wechatmagician/spellbook/SpellBook.kt<|end_filename|>
package com.gh0u1l5.wechatmagician.spellbook
import android.content.Context
import com.gh0u1l5.wechatmagician.spellbook.base.EventCenter
import com.gh0u1l5.wechatmagician.spellbook.base.HookerProvider
import com.gh0u1l5.wechatmagician.spellbook.base.Version
import com.gh0u1l5.wechatmagician.spellbook.hookers.*
import com.gh0u1l5.wechatmagician.spellbook.util.ParallelUtil.parallelForEach
import com.gh0u1l5.wechatmagician.spellbook.util.XposedUtil
import de.robv.android.xposed.XposedBridge.log
import de.robv.android.xposed.XposedHelpers.*
import de.robv.android.xposed.callbacks.XC_LoadPackage
import de.robv.android.xposed.IXposedHookLoadPackage
import java.io.File
/**
* Wechat Magician SpellBook的核心引擎部分
*
* Refer: https://github.com/Gh0u1L5/WechatSpellbook/wiki
*/
object SpellBook {
/**
* 目前支持的 [EventCenter] 列表
*
* Refer: https://github.com/Gh0u1L5/WechatSpellbook/wiki/事件机制
*/
private val centers: List<EventCenter> = listOf(
Activities,
Adapters,
Database,
FileSystem,
MenuAppender,
Notifications,
SearchBar,
Storage,
UriRouter,
XLog,
XmlParser
)
/**
* 判断当前进程是否为微信的重要进程, 目前会被判定为重要进程的只有主进程和 :tools 进程
*
* @param lpparam 通过重载 [IXposedHookLoadPackage.handleLoadPackage] 方法拿到的
* [XC_LoadPackage.LoadPackageParam] 对象
*/
fun isImportantWechatProcess(lpparam: XC_LoadPackage.LoadPackageParam): Boolean {
// 检查进程名
val processName = lpparam.processName
when {
!processName.contains(':') -> {
// 找到主进程 继续
}
processName.endsWith(":tools") -> {
// 找到 :tools 进程 继续
}
else -> return false
}
// 检查微信依赖的JNI库是否存在, 以此判断当前应用是不是微信
val features = listOf (
"libwechatcommon.so",
"libwechatmm.so",
"libwechatnetwork.so",
"libwechatsight.so",
"libwechatxlog.so"
)
return try {
val libraryDir = File(lpparam.appInfo.nativeLibraryDir)
features.filter { filename ->
File(libraryDir, filename).exists()
}.size >= 3
} catch (t: Throwable) { false }
}
/**
* 利用 Reflection 获取当前的系统 Context
*/
fun getSystemContext(): Context {
val activityThreadClass = findClass("android.app.ActivityThread", null)
val activityThread = callStaticMethod(activityThreadClass, "currentActivityThread")
val context = callMethod(activityThread, "getSystemContext") as Context?
return context ?: throw Error("Failed to get system context.")
}
/**
* 获取指定应用的 APK 路径
*/
fun getApplicationApkPath(packageName: String): String {
val pm = getSystemContext().packageManager
val apkPath = pm.getApplicationInfo(packageName, 0)?.publicSourceDir
return apkPath ?: throw Error("Failed to get the APK path of $packageName")
}
/**
* 获取指定应用的版本号
*/
fun getApplicationVersion(packageName: String): Version {
val pm = getSystemContext().packageManager
val versionName = pm.getPackageInfo(packageName, 0)?.versionName
return Version(versionName
?: throw Error("Failed to get the version of $packageName"))
}
/**
* 启动 SpellBook 框架, 注册相关插件
*
* @param lpparam 通过重载 [IXposedHookLoadPackage.handleLoadPackage] 方法拿到的
* [XC_LoadPackage.LoadPackageParam] 对象
* @param plugins 由开发者编写的 SpellBook 插件, 这些插件应当实现 [HookerProvider.provideStaticHookers]
* 方法, 或 interfaces 包中提供的标准接口
*
* Refer: https://github.com/Gh0u1L5/WechatSpellbook/wiki/事件机制
*/
fun startup(lpparam: XC_LoadPackage.LoadPackageParam, plugins: List<Any>?) {
log("Wechat SpellBook: ${plugins?.size ?: 0} plugins.")
WechatGlobal.init(lpparam)
registerPlugins(plugins)
registerHookers(plugins)
}
/**
* 检查插件是否实现了标准化的接口, 并将它们注册到对应的 [EventCenter] 中
*/
private fun registerPlugins(plugins: List<Any>?) {
val observers = plugins?.filter { it !is HookerProvider } ?: listOf()
centers.parallelForEach { center ->
center.interfaces.forEach { `interface` ->
observers.forEach { plugin ->
val assignable = `interface`.isAssignableFrom(plugin::class.java)
if (assignable) {
center.register(`interface`, plugin)
}
}
}
}
}
/**
* 检查插件中是否存在自定义的事件, 将它们直接注册到 Xposed 框架上
*/
private fun registerHookers(plugins: List<Any>?) {
val providers = plugins?.filter { it is HookerProvider } ?: listOf()
(providers + listOf(ListViewHider, MenuAppender)).parallelForEach { provider ->
(provider as HookerProvider).provideStaticHookers()?.forEach { hooker ->
if (!hooker.hasHooked) {
XposedUtil.postHooker(hooker)
}
}
}
}
}
<|start_filename|>src/main/kotlin/com/gh0u1l5/wechatmagician/spellbook/hookers/Storage.kt<|end_filename|>
package com.gh0u1l5.wechatmagician.spellbook.hookers
import com.gh0u1l5.wechatmagician.spellbook.base.EventCenter
import com.gh0u1l5.wechatmagician.spellbook.base.Hooker
import com.gh0u1l5.wechatmagician.spellbook.interfaces.IImageStorageHook
import com.gh0u1l5.wechatmagician.spellbook.interfaces.IMessageStorageHook
import com.gh0u1l5.wechatmagician.spellbook.mirror.com.tencent.mm.Classes.ImgInfoStorage
import com.gh0u1l5.wechatmagician.spellbook.mirror.com.tencent.mm.Methods.ImgInfoStorage_load
import com.gh0u1l5.wechatmagician.spellbook.mirror.com.tencent.mm.storage.Classes.MsgInfoStorage
import com.gh0u1l5.wechatmagician.spellbook.mirror.com.tencent.mm.storage.Methods.MsgInfoStorage_insert
import de.robv.android.xposed.XC_MethodHook
import de.robv.android.xposed.XposedBridge.hookAllConstructors
import de.robv.android.xposed.XposedBridge.hookMethod
import de.robv.android.xposed.XposedHelpers.getLongField
object Storage : EventCenter() {
override val interfaces: List<Class<*>>
get() = listOf(IMessageStorageHook::class.java, IImageStorageHook::class.java)
override fun provideEventHooker(event: String): Hooker? {
return when (event) {
"onMessageStorageCreated" -> onMessageStorageCreateHooker
"onMessageStorageInserting", "onMessageStorageInserted" -> onMessageStorageInsertHooker
"onImageStorageCreated" -> onImageStorageCreateHooker
"onImageStorageLoading", "onImageStorageLoaded" -> onImageStorageLoadHooker
else -> throw IllegalArgumentException("Unknown event: $event")
}
}
private val onMessageStorageCreateHooker = Hooker {
hookAllConstructors(MsgInfoStorage, object : XC_MethodHook() {
override fun afterHookedMethod(param: MethodHookParam) {
notify("onMessageStorageCreated") { plugin ->
(plugin as IMessageStorageHook).onMessageStorageCreated(param.thisObject)
}
}
})
}
private val onMessageStorageInsertHooker = Hooker {
hookMethod(MsgInfoStorage_insert, object : XC_MethodHook() {
override fun beforeHookedMethod(param: MethodHookParam) {
val msgObject = param.args[0]
val msgId = getLongField(msgObject, "field_msgId")
notifyForBypassFlags("onMessageStorageInserting", param) { plugin ->
(plugin as IMessageStorageHook).onMessageStorageInserting(msgId, msgObject)
}
}
override fun afterHookedMethod(param: MethodHookParam) {
val msgObject = param.args[0]
val msgId = getLongField(msgObject, "field_msgId")
notify("onMessageStorageInserted") { plugin ->
(plugin as IMessageStorageHook).onMessageStorageInserted(msgId, msgObject)
}
}
})
}
private val onImageStorageCreateHooker = Hooker {
hookAllConstructors(ImgInfoStorage, object : XC_MethodHook() {
override fun afterHookedMethod(param: MethodHookParam) {
notify("onImageStorageCreated") { plugin ->
(plugin as IImageStorageHook).onImageStorageCreated(param.thisObject)
}
}
})
}
private val onImageStorageLoadHooker = Hooker {
hookMethod(ImgInfoStorage_load, object : XC_MethodHook() {
override fun beforeHookedMethod(param: MethodHookParam) {
val imageId = param.args[0] as String?
val prefix = param.args[1] as String?
val suffix = param.args[2] as String?
notifyForBypassFlags("onImageStorageLoading", param) { plugin ->
(plugin as IImageStorageHook).onImageStorageLoading(imageId, prefix, suffix)
}
}
override fun afterHookedMethod(param: MethodHookParam) {
val imageId = param.args[0] as String?
val prefix = param.args[1] as String?
val suffix = param.args[2] as String?
notify("onImageStorageLoaded") { plugin ->
(plugin as IImageStorageHook).onImageStorageLoaded(imageId, prefix, suffix)
}
}
})
}
}
<|start_filename|>src/main/kotlin/com/gh0u1l5/wechatmagician/spellbook/hookers/XmlParser.kt<|end_filename|>
package com.gh0u1l5.wechatmagician.spellbook.hookers
import com.gh0u1l5.wechatmagician.spellbook.WechatStatus
import com.gh0u1l5.wechatmagician.spellbook.base.EventCenter
import com.gh0u1l5.wechatmagician.spellbook.base.Hooker
import com.gh0u1l5.wechatmagician.spellbook.interfaces.IXmlParserHook
import com.gh0u1l5.wechatmagician.spellbook.mirror.com.tencent.mm.sdk.platformtools.Methods.XmlParser_parse
import de.robv.android.xposed.XC_MethodHook
import de.robv.android.xposed.XposedBridge.hookMethod
object XmlParser : EventCenter() {
override val interfaces: List<Class<*>>
get() = listOf(IXmlParserHook::class.java)
override fun provideEventHooker(event: String): Hooker? {
return when (event) {
"onXmlParsing", "onXmlParsed" -> onXmlParseHooker
else -> throw IllegalArgumentException("Unknown event: $event")
}
}
private val onXmlParseHooker = Hooker {
hookMethod(XmlParser_parse, object : XC_MethodHook() {
override fun beforeHookedMethod(param: MethodHookParam) {
val xml = param.args[0] as String
val root = param.args[1] as String
notifyForOperations("onXmlParsing", param) { plugin ->
(plugin as IXmlParserHook).onXmlParsing(xml, root)
}
}
@Suppress("UNCHECKED_CAST")
override fun afterHookedMethod(param: MethodHookParam) {
val xml = param.args[0] as String
val root = param.args[1] as String
val result = param.result as MutableMap<String, String>? ?: return
notify("onXmlParsed") { plugin ->
(plugin as IXmlParserHook).onXmlParsed(xml, root, result)
}
}
})
WechatStatus.toggle(WechatStatus.StatusFlag.STATUS_FLAG_XML_PARSER)
}
}
<|start_filename|>src/main/kotlin/com/gh0u1l5/wechatmagician/spellbook/mirror/com/tencent/mm/Methods.kt<|end_filename|>
package com.gh0u1l5.wechatmagician.spellbook.mirror.com.tencent.mm
import com.gh0u1l5.wechatmagician.spellbook.C
import com.gh0u1l5.wechatmagician.spellbook.WechatGlobal.wxLazy
import com.gh0u1l5.wechatmagician.spellbook.mirror.com.tencent.mm.Classes.ImgInfoStorage
import com.gh0u1l5.wechatmagician.spellbook.mirror.com.tencent.mm.Classes.LruCacheWithListener
import com.gh0u1l5.wechatmagician.spellbook.util.ReflectionUtil.findMethodsByExactParameters
import java.lang.reflect.Method
object Methods {
val ImgInfoStorage_load: Method by wxLazy("ImgInfoStorage_load") {
findMethodsByExactParameters(ImgInfoStorage, C.String, C.String, C.String, C.String, C.Boolean)
.firstOrNull()?.apply { isAccessible = true }
}
val LruCacheWithListener_put: Method by wxLazy("LruCacheWithListener_put") {
findMethodsByExactParameters(LruCacheWithListener, null, C.Object, C.Object)
.firstOrNull()?.apply { isAccessible = true }
}
}
<|start_filename|>src/main/kotlin/com/gh0u1l5/wechatmagician/spellbook/mirror/com/tencent/mm/modelsfs/Classes.kt<|end_filename|>
package com.gh0u1l5.wechatmagician.spellbook.mirror.com.tencent.mm.modelsfs
import com.gh0u1l5.wechatmagician.spellbook.C
import com.gh0u1l5.wechatmagician.spellbook.WechatGlobal.wxClasses
import com.gh0u1l5.wechatmagician.spellbook.WechatGlobal.wxLazy
import com.gh0u1l5.wechatmagician.spellbook.WechatGlobal.wxLoader
import com.gh0u1l5.wechatmagician.spellbook.WechatGlobal.wxPackageName
import com.gh0u1l5.wechatmagician.spellbook.util.ReflectionUtil.findClassesFromPackage
object Classes {
val EncEngine: Class<*> by wxLazy("EncEngine") {
findClassesFromPackage(wxLoader!!, wxClasses!!, "$wxPackageName.modelsfs")
.filterByMethod(null, "seek", C.Long)
.filterByMethod(null, "free")
.firstOrNull()
}
}
<|start_filename|>src/main/kotlin/com/gh0u1l5/wechatmagician/spellbook/hookers/Database.kt<|end_filename|>
package com.gh0u1l5.wechatmagician.spellbook.hookers
import android.content.ContentValues
import com.gh0u1l5.wechatmagician.spellbook.C
import com.gh0u1l5.wechatmagician.spellbook.base.EventCenter
import com.gh0u1l5.wechatmagician.spellbook.base.Hooker
import com.gh0u1l5.wechatmagician.spellbook.interfaces.IDatabaseHook
import com.gh0u1l5.wechatmagician.spellbook.mirror.com.tencent.wcdb.Classes.SQLiteErrorHandler
import com.gh0u1l5.wechatmagician.spellbook.mirror.com.tencent.wcdb.database.Classes.SQLiteCursorFactory
import com.gh0u1l5.wechatmagician.spellbook.mirror.com.tencent.wcdb.database.Classes.SQLiteDatabase
import com.gh0u1l5.wechatmagician.spellbook.mirror.com.tencent.wcdb.support.Classes.SQLiteCancellationSignal
import de.robv.android.xposed.XC_MethodHook
import de.robv.android.xposed.XposedHelpers.findAndHookMethod
object Database : EventCenter() {
override val interfaces: List<Class<*>>
get() = listOf(IDatabaseHook::class.java)
override fun provideEventHooker(event: String): Hooker? {
return when (event) {
"onDatabaseOpening", "onDatabaseOpened" -> onOpenHooker
"onDatabaseQuerying", "onDatabaseQueried" -> onQueryHooker
"onDatabaseInserting", "onDatabaseInserted" -> onInsertHooker
"onDatabaseUpdating", "onDatabaseUpdated" -> onUpdateHooker
"onDatabaseDeleting", "onDatabaseDeleted" -> onDeleteHooker
"onDatabaseExecuting", "onDatabaseExecuted" -> onExecuteHooker
else -> throw IllegalArgumentException("Unknown event: $event")
}
}
private val onOpenHooker = Hooker {
findAndHookMethod(
SQLiteDatabase, "openDatabase",
C.String, SQLiteCursorFactory, C.Int, SQLiteErrorHandler, object : XC_MethodHook() {
override fun beforeHookedMethod(param: MethodHookParam) {
val path = param.args[0] as String
val factory = param.args[1]
val flags = param.args[2] as Int
val handler = param.args[3]
notifyForOperations("onDatabaseOpening", param) { plugin ->
(plugin as IDatabaseHook).onDatabaseOpening(path, factory, flags, handler)
}
}
override fun afterHookedMethod(param: MethodHookParam) {
val path = param.args[0] as String
val factory = param.args[1]
val flags = param.args[2] as Int
val handler = param.args[3]
val result = param.result
notifyForOperations("onDatabaseOpened", param) { plugin ->
(plugin as IDatabaseHook).onDatabaseOpened(path, factory, flags, handler, result)
}
}
})
}
private val onQueryHooker = Hooker {
findAndHookMethod(
SQLiteDatabase, "rawQueryWithFactory",
SQLiteCursorFactory, C.String, C.StringArray, C.String, SQLiteCancellationSignal, object : XC_MethodHook() {
@Suppress("UNCHECKED_CAST")
override fun beforeHookedMethod(param: MethodHookParam) {
val thisObject = param.thisObject
val factory = param.args[0]
val sql = param.args[1] as String
val selectionArgs = param.args[2] as Array<String>?
val editTable = param.args[3] as String?
val cancellation = param.args[4]
notifyForOperations("onDatabaseQuerying", param) { plugin ->
(plugin as IDatabaseHook).onDatabaseQuerying(
thisObject, factory, sql, selectionArgs, editTable, cancellation)
}
}
@Suppress("UNCHECKED_CAST")
override fun afterHookedMethod(param: MethodHookParam) {
val thisObject = param.thisObject
val factory = param.args[0]
val sql = param.args[1] as String
val selectionArgs = param.args[2] as Array<String>?
val editTable = param.args[3] as String?
val cancellation = param.args[4]
val result = param.result
notifyForOperations("onDatabaseQueried", param) { plugin ->
(plugin as IDatabaseHook).onDatabaseQueried(
thisObject, factory, sql, selectionArgs, editTable, cancellation, result)
}
}
})
}
private val onInsertHooker = Hooker {
findAndHookMethod(
SQLiteDatabase, "insertWithOnConflict",
C.String, C.String, C.ContentValues, C.Int, object : XC_MethodHook() {
override fun beforeHookedMethod(param: MethodHookParam) {
val thisObject = param.thisObject
val table = param.args[0] as String
val nullColumnHack = param.args[1] as String?
val initialValues = param.args[2] as ContentValues?
val conflictAlgorithm = param.args[3] as Int
notifyForOperations("onDatabaseInserting", param) { plugin ->
(plugin as IDatabaseHook).onDatabaseInserting(
thisObject, table, nullColumnHack, initialValues, conflictAlgorithm)
}
}
override fun afterHookedMethod(param: MethodHookParam) {
val thisObject = param.thisObject
val table = param.args[0] as String
val nullColumnHack = param.args[1] as String?
val initialValues = param.args[2] as ContentValues?
val conflictAlgorithm = param.args[3] as Int
val result = param.result as Long?
notifyForOperations("onDatabaseInserted", param) { plugin ->
(plugin as IDatabaseHook).onDatabaseInserted(
thisObject, table, nullColumnHack, initialValues, conflictAlgorithm, result)
}
}
})
}
private val onUpdateHooker = Hooker {
findAndHookMethod(
SQLiteDatabase, "updateWithOnConflict",
C.String, C.ContentValues, C.String, C.StringArray, C.Int, object : XC_MethodHook() {
@Suppress("UNCHECKED_CAST")
override fun beforeHookedMethod(param: MethodHookParam) {
val thisObject = param.thisObject
val table = param.args[0] as String
val values = param.args[1] as ContentValues
val whereClause = param.args[2] as String?
val whereArgs = param.args[3] as Array<String>?
val conflictAlgorithm = param.args[4] as Int
notifyForOperations("onDatabaseUpdating", param) { plugin ->
(plugin as IDatabaseHook).onDatabaseUpdating(
thisObject, table, values, whereClause, whereArgs, conflictAlgorithm)
}
}
@Suppress("UNCHECKED_CAST")
override fun afterHookedMethod(param: MethodHookParam) {
val thisObject = param.thisObject
val table = param.args[0] as String
val values = param.args[1] as ContentValues
val whereClause = param.args[2] as String?
val whereArgs = param.args[3] as Array<String>?
val conflictAlgorithm = param.args[4] as Int
val result = param.result as Int
notifyForOperations("onDatabaseUpdated", param) { plugin ->
(plugin as IDatabaseHook).onDatabaseUpdated(
thisObject, table, values, whereClause, whereArgs, conflictAlgorithm, result)
}
}
})
}
private val onDeleteHooker = Hooker {
findAndHookMethod(
SQLiteDatabase, "delete",
C.String, C.String, C.StringArray, object : XC_MethodHook() {
@Suppress("UNCHECKED_CAST")
override fun beforeHookedMethod(param: MethodHookParam) {
val thisObject = param.thisObject
val table = param.args[0] as String
val whereClause = param.args[1] as String?
val whereArgs = param.args[2] as Array<String>?
notifyForOperations("onDatabaseDeleting", param) { plugin ->
(plugin as IDatabaseHook).onDatabaseDeleting(thisObject, table, whereClause, whereArgs)
}
}
@Suppress("UNCHECKED_CAST")
override fun afterHookedMethod(param: MethodHookParam) {
val thisObject = param.thisObject
val table = param.args[0] as String
val whereClause = param.args[1] as String?
val whereArgs = param.args[2] as Array<String>?
val result = param.result as Int
notifyForOperations("onDatabaseDeleted", param) { plugin ->
(plugin as IDatabaseHook).onDatabaseDeleted(thisObject, table, whereClause, whereArgs, result)
}
}
})
}
private val onExecuteHooker = Hooker {
findAndHookMethod(
SQLiteDatabase, "executeSql",
C.String, C.ObjectArray, SQLiteCancellationSignal, object : XC_MethodHook() {
@Suppress("UNCHECKED_CAST")
override fun beforeHookedMethod(param: MethodHookParam) {
val thisObject = param.thisObject
val sql = param.args[0] as String
val bindArgs = param.args[1] as Array<Any?>?
val cancellation = param.args[2]
notifyForBypassFlags("onDatabaseExecuting", param) { plugin ->
(plugin as IDatabaseHook).onDatabaseExecuting(thisObject, sql, bindArgs, cancellation)
}
}
@Suppress("UNCHECKED_CAST")
override fun afterHookedMethod(param: MethodHookParam) {
val thisObject = param.thisObject
val sql = param.args[0] as String
val bindArgs = param.args[1] as Array<Any?>?
val cancellation = param.args[2]
notify("onDatabaseExecuted") { plugin ->
(plugin as IDatabaseHook).onDatabaseExecuted(thisObject, sql, bindArgs, cancellation)
}
}
})
}
}
<|start_filename|>src/main/kotlin/com/gh0u1l5/wechatmagician/spellbook/mirror/com/tencent/wcdb/database/Classes.kt<|end_filename|>
package com.gh0u1l5.wechatmagician.spellbook.mirror.com.tencent.wcdb.database
import com.gh0u1l5.wechatmagician.spellbook.WechatGlobal.wxLazy
import com.gh0u1l5.wechatmagician.spellbook.WechatGlobal.wxLoader
import com.gh0u1l5.wechatmagician.spellbook.mirror.com.tencent.wcdb.Package.WECHAT_PACKAGE_SQLITE
import com.gh0u1l5.wechatmagician.spellbook.util.ReflectionUtil.findClassIfExists
object Classes {
val SQLiteDatabase: Class<*> by wxLazy("SQLiteDatabase") {
findClassIfExists("$WECHAT_PACKAGE_SQLITE.database.SQLiteDatabase", wxLoader!!)
}
val SQLiteCursorFactory: Class<*> by wxLazy("SQLiteCursorFactory") {
findClassIfExists("$WECHAT_PACKAGE_SQLITE.database.SQLiteDatabase\$CursorFactory", wxLoader!!)
}
}
<|start_filename|>src/main/kotlin/com/gh0u1l5/wechatmagician/spellbook/mirror/com/tencent/mm/Fields.kt<|end_filename|>
package com.gh0u1l5.wechatmagician.spellbook.mirror.com.tencent.mm
import com.gh0u1l5.wechatmagician.spellbook.WechatGlobal.wxLazy
import com.gh0u1l5.wechatmagician.spellbook.mirror.com.tencent.mm.Classes.ImgInfoStorage
import com.gh0u1l5.wechatmagician.spellbook.mirror.com.tencent.mm.Classes.LruCacheWithListener
import com.gh0u1l5.wechatmagician.spellbook.util.ReflectionUtil.findFieldsWithGenericType
import java.lang.reflect.Field
object Fields {
val ImgInfoStorage_mBitmapCache: Field by wxLazy("ImgInfoStorage_mBitmapCache") {
findFieldsWithGenericType(
ImgInfoStorage, "${LruCacheWithListener.canonicalName}<java.lang.String, android.graphics.Bitmap>")
.firstOrNull()?.apply { isAccessible = true }
}
}
<|start_filename|>src/main/kotlin/com/gh0u1l5/wechatmagician/spellbook/hookers/Activities.kt<|end_filename|>
package com.gh0u1l5.wechatmagician.spellbook.hookers
import android.app.Activity
import android.os.Bundle
import android.view.Menu
import com.gh0u1l5.wechatmagician.spellbook.C
import com.gh0u1l5.wechatmagician.spellbook.base.EventCenter
import com.gh0u1l5.wechatmagician.spellbook.base.Hooker
import com.gh0u1l5.wechatmagician.spellbook.interfaces.IActivityHook
import com.gh0u1l5.wechatmagician.spellbook.mirror.com.tencent.mm.ui.Classes.MMActivity
import de.robv.android.xposed.XC_MethodHook
import de.robv.android.xposed.XposedHelpers.findAndHookMethod
object Activities : EventCenter() {
override val interfaces: List<Class<*>>
get() = listOf(IActivityHook::class.java)
override fun provideEventHooker(event: String) = when (event) {
"onMMActivityOptionsMenuCreated" -> onCreateOptionsMenuHooker
"onActivityCreating" -> onCreateHooker
"onActivityStarting" -> onStartHooker
"onActivityResuming" -> onResumeHooker
else -> throw IllegalArgumentException("Unknown event: $event")
}
private val onCreateOptionsMenuHooker = Hooker {
findAndHookMethod(MMActivity, "onCreateOptionsMenu", C.Menu, object : XC_MethodHook() {
override fun afterHookedMethod(param: MethodHookParam) {
val activity = param.thisObject as? Activity ?: return
val menu = param.args[0] as? Menu ?: return
notify("onMMActivityOptionsMenuCreated") { plugin ->
(plugin as IActivityHook).onMMActivityOptionsMenuCreated(activity, menu)
}
}
})
}
private val onCreateHooker = Hooker {
findAndHookMethod(C.Activity, "onCreate", C.Bundle, object : XC_MethodHook() {
override fun beforeHookedMethod(param: MethodHookParam) {
val activity = param.thisObject as? Activity ?: return
val savedInstanceState = param.args[0] as Bundle?
notify("onActivityCreating") { plugin ->
(plugin as IActivityHook).onActivityCreating(activity, savedInstanceState)
}
}
})
}
private val onStartHooker = Hooker {
findAndHookMethod(C.Activity, "onStart", object : XC_MethodHook() {
override fun beforeHookedMethod(param: MethodHookParam) {
val activity = param.thisObject as? Activity ?: return
notify("onActivityStarting") { plugin ->
(plugin as IActivityHook).onActivityStarting(activity)
}
}
})
}
private val onResumeHooker = Hooker {
findAndHookMethod(C.Activity, "onResume", object : XC_MethodHook() {
override fun beforeHookedMethod(param: MethodHookParam) {
val activity = param.thisObject as? Activity ?: return
notify("onActivityResuming") { plugin ->
(plugin as IActivityHook).onActivityResuming(activity)
}
}
})
}
}
<|start_filename|>src/main/kotlin/com/gh0u1l5/wechatmagician/spellbook/mirror/com/tencent/mm/Classes.kt<|end_filename|>
package com.gh0u1l5.wechatmagician.spellbook.mirror.com.tencent.mm
import com.gh0u1l5.wechatmagician.spellbook.C
import com.gh0u1l5.wechatmagician.spellbook.WechatGlobal.wxClasses
import com.gh0u1l5.wechatmagician.spellbook.WechatGlobal.wxLazy
import com.gh0u1l5.wechatmagician.spellbook.WechatGlobal.wxLoader
import com.gh0u1l5.wechatmagician.spellbook.WechatGlobal.wxPackageName
import com.gh0u1l5.wechatmagician.spellbook.mirror.com.tencent.mm.sdk.platformtools.Classes.LruCache
import com.gh0u1l5.wechatmagician.spellbook.util.ReflectionUtil.findClassesFromPackage
object Classes {
val ImgInfoStorage: Class<*> by wxLazy("ImgInfoStorage") {
findClassesFromPackage(wxLoader!!, wxClasses!!, wxPackageName, 1)
.filterByMethod(C.String, C.String, C.String, C.String, C.Boolean)
.firstOrNull()
}
val LruCacheWithListener: Class<*> by wxLazy("LruCacheWithListener") {
findClassesFromPackage(wxLoader!!, wxClasses!!, wxPackageName, 1)
.filterBySuper(LruCache)
.firstOrNull()
}
}
<|start_filename|>src/main/kotlin/com/gh0u1l5/wechatmagician/spellbook/mirror/com/tencent/mm/sdk/platformtools/Classes.kt<|end_filename|>
package com.gh0u1l5.wechatmagician.spellbook.mirror.com.tencent.mm.sdk.platformtools
import com.gh0u1l5.wechatmagician.spellbook.C
import com.gh0u1l5.wechatmagician.spellbook.WechatGlobal.wxClasses
import com.gh0u1l5.wechatmagician.spellbook.WechatGlobal.wxLazy
import com.gh0u1l5.wechatmagician.spellbook.WechatGlobal.wxLoader
import com.gh0u1l5.wechatmagician.spellbook.WechatGlobal.wxPackageName
import com.gh0u1l5.wechatmagician.spellbook.util.ReflectionUtil.findClassesFromPackage
object Classes {
val Logcat: Class<*> by wxLazy("Logcat") {
findClassesFromPackage(wxLoader!!, wxClasses!!, "$wxPackageName.sdk.platformtools")
.filterByEnclosingClass(null)
.filterByMethod(C.Int, "getLogLevel")
.firstOrNull()
}
val LruCache: Class<*> by wxLazy("LruCache") {
findClassesFromPackage(wxLoader!!, wxClasses!!, "$wxPackageName.sdk.platformtools")
.filterByMethod(null, "trimToSize", C.Int)
.firstOrNull()
}
val XmlParser: Class<*> by wxLazy("XmlParser") {
findClassesFromPackage(wxLoader!!, wxClasses!!, "$wxPackageName.sdk.platformtools")
.filterByMethod(C.Map, C.String, C.String)
.firstOrNull()
}
}
<|start_filename|>src/main/kotlin/com/gh0u1l5/wechatmagician/spellbook/util/ReflectionUtil.kt<|end_filename|>
package com.gh0u1l5.wechatmagician.spellbook.util
import com.gh0u1l5.wechatmagician.spellbook.WechatGlobal
import com.gh0u1l5.wechatmagician.spellbook.base.Classes
import com.gh0u1l5.wechatmagician.spellbook.parser.ApkFile
import com.gh0u1l5.wechatmagician.spellbook.parser.ClassTrie
import de.robv.android.xposed.XC_MethodHook
import de.robv.android.xposed.XposedBridge.hookMethod
import java.lang.reflect.Field
import java.lang.reflect.Method
import java.util.concurrent.ConcurrentHashMap
/**
* 封装了一批关于 Reflection 的方法, 用来辅助 [Classes] 进行自动适配
*/
object ReflectionUtil {
/**
* 利用 Reflection 对指定对象进行浅拷贝
*/
@JvmStatic fun shadowCopy(obj: Any, copy: Any, clazz: Class<*>? = obj::class.java) {
if (clazz == null) {
return
}
shadowCopy(obj, copy, clazz.superclass)
clazz.declaredFields.forEach {
it.isAccessible = true
it.set(copy, it.get(obj))
}
}
/**
* 用于缓存已经完成的[findClassesFromPackage]的搜索结果
*/
private val classCache: MutableMap<String, Classes> = ConcurrentHashMap()
@JvmStatic fun clearClassCache() {
classCache.clear()
}
/**
* 用于缓存已经完成的[findMethodExact]的搜索结果
*/
private val methodCache: MutableMap<String, Method?> = ConcurrentHashMap()
@JvmStatic fun clearMethodCache() {
methodCache.clear()
}
/**
* 查找一个确定的类, 如果不存在返回 null
*/
@JvmStatic fun findClassIfExists(className: String, classLoader: ClassLoader): Class<*>? {
try {
return Class.forName(className, false, classLoader)
} catch (throwable: Throwable) {
if (WechatGlobal.wxUnitTestMode) {
throw throwable
}
}
return null
}
/**
* 查找指定包里指定深度的所有类
*
* 出于性能方面的考虑, 只有深度相等的类才会被返回, 比如搜索深度为0的时候, 就只返回这个包自己拥有的类, 不包括它
* 里面其他包拥有的类.
*
* @param loader 用于取出 [Class] 对象的加载器
* @param trie 整个 APK 的包结构, 由于 Java 的 [ClassLoader] 对象不支持读取所有类名, 我们必须先通过其他手段
* 解析 APK 结构, 然后才能检索某个包内的所有类, 详情请参见 [ApkFile] 和 [WechatGlobal]
* @param packageName 包名
* @param depth 深度
*/
@JvmStatic fun findClassesFromPackage(loader: ClassLoader, trie: ClassTrie, packageName: String, depth: Int = 0): Classes {
val key = "$depth-$packageName"
val cached = classCache[key]
if (cached != null) {
return cached
}
val classes = Classes(trie.search(packageName, depth).mapNotNull { name ->
findClassIfExists(name, loader)
})
return classes.also { classCache[key] = classes }
}
/**
* 查找一个确定的方法, 如果不存在返回 null
*/
@JvmStatic fun findMethodExactIfExists(clazz: Class<*>, methodName: String, vararg parameterTypes: Class<*>): Method? =
try { findMethodExact(clazz, methodName, *parameterTypes) } catch (_: Throwable) { null }
/**
* 根据 JVM Specification 生成一个参数签名
*/
@JvmStatic private fun getParametersString(vararg clazzes: Class<*>): String =
"(" + clazzes.joinToString(","){ it.canonicalName ?: "" } + ")"
/**
* 查找一个确定的方法, 如果不存在, 抛出 [NoSuchMethodException] 异常
*
* @param clazz 该方法所属的类
* @param methodName 该方法的名称
* @param parameterTypes 该方法的参数类型
*/
@JvmStatic fun findMethodExact(clazz: Class<*>, methodName: String, vararg parameterTypes: Class<*>): Method {
val fullMethodName = "${clazz.name}#$methodName${getParametersString(*parameterTypes)}#exact"
if (fullMethodName in methodCache) {
return methodCache[fullMethodName] ?: throw NoSuchMethodError(fullMethodName)
}
try {
val method = clazz.getDeclaredMethod(methodName, *parameterTypes).apply {
isAccessible = true
}
return method.also { methodCache[fullMethodName] = method }
} catch (e: NoSuchMethodException) {
methodCache[fullMethodName] = null
throw NoSuchMethodError(fullMethodName)
}
}
/**
* 查找所有满足要求的方法
*
* @param clazz 该方法所属的类
* @param returnType 该方法的返回类型
* @param parameterTypes 该方法的参数类型
*/
@JvmStatic fun findMethodsByExactParameters(clazz: Class<*>, returnType: Class<*>?, vararg parameterTypes: Class<*>): List<Method> {
return clazz.declaredMethods.filter { method ->
if (returnType != null && returnType != method.returnType) {
return@filter false
}
val methodParameterTypes = method.parameterTypes
if (parameterTypes.size != methodParameterTypes.size) {
return@filter false
}
for (i in parameterTypes.indices) {
if (parameterTypes[i] != methodParameterTypes[i]) {
return@filter false
}
}
method.isAccessible = true
return@filter true
}
}
/**
* 查找一个确定的成员变量, 如果不存在返回 null
*/
@JvmStatic fun findFieldIfExists(clazz: Class<*>, fieldName: String): Field? =
try { clazz.getField(fieldName) } catch (_: Throwable) { null }
/**
* 查找指定类中所有特定类型的成员变量
*/
@JvmStatic fun findFieldsWithType(clazz: Class<*>, typeName: String): List<Field> {
return clazz.declaredFields.filter {
it.type.name == typeName
}
}
/**
* 查找指定类中所有特定泛型的成员变量
*/
@JvmStatic fun findFieldsWithGenericType(clazz: Class<*>, genericTypeName: String): List<Field> {
return clazz.declaredFields.filter {
it.genericType.toString() == genericTypeName
}
}
/**
* 钩住一个类中所有的方法, 一般只用于测试
*/
@JvmStatic fun hookAllMethodsInClass(clazz: Class<*>, callback: XC_MethodHook) {
clazz.declaredMethods.forEach { method -> hookMethod(method, callback) }
}
}
| fwb7014/WechatSpellbook |
<|start_filename|>lib/cryptoutils.dart<|end_filename|>
library cryptoutils;
export "utils.dart";
export "hash.dart";
export "encoding/base64.dart";
| paycoin-com/dart-cryptoutils |
<|start_filename|>src/util/compareAToZ.js<|end_filename|>
function compareAToZ(a, b) {
return a.localeCompare(b);
}
export { compareAToZ };
<|start_filename|>bin/options/helpers/parseUppercaseList.js<|end_filename|>
function parseUppercaseList(values) {
return values.split(',').map((entry) => entry.toUpperCase());
}
export { parseUppercaseList };
<|start_filename|>src/api/ftx/controllers/lending-rates/index.js<|end_filename|>
import { get } from './get.js';
const lendingRates = { get };
export { lendingRates };
<|start_filename|>src/api/ftx/endpoints/user-rewards/getUserRewards.js<|end_filename|>
import { request } from '../request.js';
async function getUserRewards(options) {
return request({
...options,
rawEndpoint: 'user_rewards',
method: 'get',
});
}
export { getUserRewards };
<|start_filename|>src/util/shorthandNumber.js<|end_filename|>
const ONE_TRILLION = 1_000_000_000_000;
const ONE_BILLION = 1_000_000_000;
const ONE_MILLION = 1_000_000;
const ONE_THOUSAND = 1000;
function formatNumber(value, divisor, shorthandCharacter) {
const multiplier = value / divisor;
return `${multiplier.toFixed(2)}${shorthandCharacter}`;
}
function shorthandNumber(value) {
if (value >= ONE_TRILLION) {
return formatNumber(value, ONE_TRILLION, 'T');
}
if (value >= ONE_BILLION) {
return formatNumber(value, ONE_BILLION, 'B');
}
if (value >= ONE_MILLION) {
return formatNumber(value, ONE_MILLION, 'M');
}
if (value >= ONE_THOUSAND) {
return formatNumber(value, ONE_THOUSAND, 'K');
}
return value.toFixed(2);
}
export { shorthandNumber };
<|start_filename|>src/util/secondsPerHour.js<|end_filename|>
const SECONDS_PER_HOUR = 3600;
export { SECONDS_PER_HOUR };
<|start_filename|>src/api/ftx/endpoints/spot-margin/getLendingInfo.js<|end_filename|>
import { request } from '../request.js';
async function getLendingInfo(options) {
return request({
...options,
rawEndpoint: 'spot_margin/lending_info',
method: 'get',
});
}
export { getLendingInfo };
<|start_filename|>src/mocks/ftx-api/orders/common/isValidSize.js<|end_filename|>
import BigNumber from 'bignumber.js';
function isValidSize(size) {
const safeSize = new BigNumber(size);
// Size should be a number greater than zero.
return !safeSize.isNaN() && safeSize.isPositive();
}
export { isValidSize };
<|start_filename|>src/api/ftx/endpoints/orders/cancelOrders.js<|end_filename|>
import { request } from '../request.js';
async function cancelOrders(options) {
return request({
...options,
rawEndpoint: 'orders',
method: 'delete',
});
}
export { cancelOrders };
<|start_filename|>src/api/ftx/endpoints/futures/index.js<|end_filename|>
import { getFundingRates } from './getFundingRates.js';
import { getFutures } from './getFutures.js';
import { getFutureStats } from './getFutureStats.js';
const futures = {
getFundingRates,
getFutures,
getFutureStats,
};
export { futures };
<|start_filename|>src/api/ftx/endpoints/futures/getFundingRates.js<|end_filename|>
import { request } from '../request.js';
async function getFundingRates(options) {
return request({
...options,
rawEndpoint: 'funding_rates',
method: 'get',
});
}
export { getFundingRates };
<|start_filename|>src/util/max32BitInteger.js<|end_filename|>
const MAX_32_BIT_INTEGER = 2_147_483_647;
export { MAX_32_BIT_INTEGER };
<|start_filename|>.lintstagedrc.json<|end_filename|>
{
"**/*.{js,jsx,ts,tsx}": ["eslint --max-warnings=0", "prettier --check"],
"**/*.{html,json,md}": ["prettier --check"]
}
<|start_filename|>src/api/ftx/controllers/futures/index.js<|end_filename|>
import { getStats } from './getStats.js';
const futures = { getStats };
export { futures };
<|start_filename|>src/mocks/ftx-api/orders/common/isValidExternalReferralProgram.js<|end_filename|>
function isValidExternalReferralProgram(externalReferralProgram) {
return externalReferralProgram === 'FTX CLI';
}
export { isValidExternalReferralProgram };
<|start_filename|>bin/options/helpers/parseNumberRange.js<|end_filename|>
import { InvalidOptionArgumentError } from 'commander';
import { parseNumber } from './parseNumber.js';
function composeNumberRange(numbers, errorMessage, numberOptions) {
return {
from: parseNumber(numbers[0], errorMessage, numberOptions),
to: parseNumber(numbers[1], errorMessage, numberOptions),
};
}
function parseNumberRange(numberRange, errorMessage, numberOptions) {
const numbers = numberRange.split(':');
if (numbers.length !== 2) {
throw new InvalidOptionArgumentError(errorMessage);
}
return composeNumberRange(numbers, errorMessage, numberOptions);
}
export { parseNumberRange };
<|start_filename|>src/api/ftx/controllers/spot/index.js<|end_filename|>
import { get } from './get.js';
const spot = { get };
export { spot };
<|start_filename|>src/util/truncate.js<|end_filename|>
const MATCH_INTEGER = '-?\\d+';
function matchFloat(decimalPlaces) {
return `${MATCH_INTEGER}(?:\\.\\d{1,${decimalPlaces}})?`;
}
function composeRegEx(decimalPlaces) {
return new RegExp(
decimalPlaces == null ? MATCH_INTEGER : matchFloat(decimalPlaces)
);
}
function truncate(value, decimalPlaces) {
/**
* Use toFixed over toString to avoid scientific notation, with extra decimal
* places to prevent rounding.
*/
const stringValue = value.toFixed(decimalPlaces + 2);
const match = stringValue.match(composeRegEx(decimalPlaces));
if (match == null) {
return null;
}
return Number.parseFloat(match[0]);
}
export { truncate };
<|start_filename|>tests/bin/commands/trade/helpers/spawnTestChild.js<|end_filename|>
import { spawn } from 'child_process';
function spawnTestChild(command) {
return spawn(command, {
shell: true,
env: { ...process.env, NODE_ENV: 'test-child' },
});
}
export { spawnTestChild };
<|start_filename|>src/api/index.js<|end_filename|>
export { Ftx } from './ftx/index.js';
<|start_filename|>src/api/ftx/endpoints/spot-margin/createLendingOffer.js<|end_filename|>
import { request } from '../request.js';
async function createLendingOffer(options) {
return request({
...options,
rawEndpoint: 'spot_margin/offers',
method: 'post',
});
}
export { createLendingOffer };
<|start_filename|>src/mocks/ftx-api/orders/common/mockInvalidMarket.js<|end_filename|>
const MOCK_INVALID_MARKET = 'INVALID-MARKET';
export { MOCK_INVALID_MARKET };
<|start_filename|>src/util/compareHighToLow.js<|end_filename|>
function compareHighToLow(a, b) {
if (a === b) {
return 0;
}
if (a == null) {
return 1;
}
if (b == null) {
return -1;
}
return b - a;
}
export { compareHighToLow };
<|start_filename|>src/api/ftx/endpoints/user-rewards/index.js<|end_filename|>
import { getUserRewards } from './getUserRewards.js';
const userRewards = {
getUserRewards,
};
export { userRewards };
<|start_filename|>src/config/package.js<|end_filename|>
import { createRequire } from 'module';
const require = createRequire(import.meta.url);
const PACKAGE = require('../../package.json');
export { PACKAGE };
<|start_filename|>src/commands/composeTableData.js<|end_filename|>
function composeTableData(data, composeEntry) {
return data.map((entry) => composeEntry(entry));
}
export { composeTableData };
<|start_filename|>src/mocks/ftx-api/orders/common/isValidReduceOnly.js<|end_filename|>
function isValidReduceOnly(reduceOnly) {
return typeof reduceOnly === 'boolean';
}
export { isValidReduceOnly };
<|start_filename|>src/api/ftx/endpoints/orders/index.js<|end_filename|>
import { cancelOrders } from './cancelOrders.js';
import { placeOrder } from './placeOrder.js';
import { placeTriggerOrder } from './placeTriggerOrder.js';
const orders = {
cancelOrders,
placeOrder,
placeTriggerOrder,
};
export { orders };
<|start_filename|>src/mocks/ftx-api/orders/common/isValidSide.js<|end_filename|>
function isValidSide(side) {
return ['buy', 'sell'].includes(side);
}
export { isValidSide };
<|start_filename|>src/mocks/ftx-api/orders/index.js<|end_filename|>
import { mockPlaceOrder } from './mockPlaceOrder.js';
import { mockPlaceTriggerOrder } from './mockPlaceTriggerOrder.js';
function mockOrders() {
mockPlaceOrder();
mockPlaceTriggerOrder();
}
export { mockOrders };
<|start_filename|>src/util/convertHourlyToYearly.js<|end_filename|>
const HOURS_PER_YEAR = 8760;
function convertHourlyToYearly(rate) {
return rate * HOURS_PER_YEAR;
}
export { convertHourlyToYearly };
<|start_filename|>src/api/ftx/controllers/futures/getPreviousFunding.js<|end_filename|>
import { getUnixTimestamp, SECONDS_PER_HOUR } from '../../../../util/index.js';
import { futures } from '../../endpoints/index.js';
function calculateStartTime() {
return getUnixTimestamp() - SECONDS_PER_HOUR;
}
function composeParameters() {
return { startTime: calculateStartTime() };
}
async function getPreviousFunding({ exchange }) {
const parameters = composeParameters();
return futures.getFundingRates({ exchange, parameters });
}
export { getPreviousFunding };
<|start_filename|>src/api/ftx/endpoints/orders/placeTriggerOrder.js<|end_filename|>
import { request } from '../request.js';
async function placeTriggerOrder(options) {
return request({
...options,
rawEndpoint: 'conditional_orders',
method: 'post',
enableExternalReferralProgram: true,
});
}
export { placeTriggerOrder };
<|start_filename|>src/mocks/index.js<|end_filename|>
export { MockFtxApi } from './ftx-api/index.js';
export { MockUserConfig } from './user-config/index.js';
<|start_filename|>src/api/ftx/controllers/allowValue.js<|end_filename|>
function allowValue(allowedValues, value) {
if (allowedValues == null) {
return true;
}
return allowedValues.includes(value);
}
export { allowValue };
<|start_filename|>src/api/ftx/endpoints/spot-margin/getLendingRates.js<|end_filename|>
import { request } from '../request.js';
async function getLendingRates(options) {
return request({
...options,
rawEndpoint: 'spot_margin/lending_rates',
method: 'get',
});
}
export { getLendingRates };
<|start_filename|>src/mocks/user-config/index.js<|end_filename|>
function create(defaults) {
// TODO: Clean up cloning, although it should be fine for this simple mock.
const config = JSON.parse(JSON.stringify(defaults));
return {
get(key) {
return config[key];
},
set(key, value) {
config[key] = value;
},
delete(key) {
delete config[key];
},
store: config,
};
}
const MockUserConfig = { create };
export { MockUserConfig };
<|start_filename|>bin/options/helpers/parseChoice.js<|end_filename|>
import { InvalidOptionArgumentError } from 'commander';
function findParsedChoice(choice, choices) {
const choiceEntry = choices.find((entry) => entry.options.includes(choice));
return choiceEntry?.parsed;
}
function parseChoice(choice, choices, errorMessage) {
const parsedChoice = findParsedChoice(choice, choices);
if (parsedChoice == null) {
throw new InvalidOptionArgumentError(errorMessage);
}
return parsedChoice;
}
export { parseChoice };
<|start_filename|>src/api/ftx/endpoints/orders/placeOrder.js<|end_filename|>
import { request } from '../request.js';
async function placeOrder(options) {
return request({
...options,
rawEndpoint: 'orders',
method: 'post',
enableExternalReferralProgram: true,
});
}
export { placeOrder };
<|start_filename|>bin/enableTestMode.js<|end_filename|>
import nock from 'nock';
import { MockFtxApi } from '../src/mocks/ftx-api/index.js';
function enableTestMode() {
nock.disableNetConnect();
MockFtxApi.create();
}
export { enableTestMode };
<|start_filename|>src/mocks/ftx-api/orders/common/index.js<|end_filename|>
export { MARKET_NOT_FOUND_ERROR } from './marketNotFoundError.js';
export { isValidExternalReferralProgram } from './isValidExternalReferralProgram.js';
export { isValidMarket } from './isValidMarket.js';
export { isValidReduceOnly } from './isValidReduceOnly.js';
export { isValidSide } from './isValidSide.js';
export { isValidSize } from './isValidSize.js';
<|start_filename|>src/api/ftx/controllers/lending-offers/index.js<|end_filename|>
import { create } from './create.js';
import { get } from './get.js';
import { stop } from './stop.js';
const lendingOffers = { create, get, stop };
export { lendingOffers };
<|start_filename|>src/api/ftx/endpoints/futures/getFutures.js<|end_filename|>
import { request } from '../request.js';
async function getFutures(options) {
return request({
...options,
rawEndpoint: 'futures',
method: 'get',
});
}
export { getFutures };
<|start_filename|>src/util/getUnixTimestamp.js<|end_filename|>
function getUnixTimestamp() {
return Math.floor(Date.now() / 1000);
}
export { getUnixTimestamp };
<|start_filename|>src/config/index.js<|end_filename|>
import { PACKAGE } from './package.js';
import { USER } from './user.js';
const CONFIG = {
PACKAGE,
USER,
EXTERNAL_REFERRAL_PROGRAM_NAME: 'FTX CLI',
};
export { CONFIG };
<|start_filename|>src/api/ftx/endpoints/futures/getFutureStats.js<|end_filename|>
import { request } from '../request.js';
async function getFutureStats(options) {
return request({
...options,
rawEndpoint: `futures/${options.pathParameters.name}/stats`,
method: 'get',
});
}
export { getFutureStats };
<|start_filename|>src/util/convertIsoToUnix.js<|end_filename|>
function convertIsoToUnix(timestamp) {
return Math.floor(new Date(timestamp).getTime() / 1000);
}
export { convertIsoToUnix };
<|start_filename|>src/util/convertDecimalToPercentage.js<|end_filename|>
function convertDecimalToPercentage(decimal) {
return decimal * 100;
}
export { convertDecimalToPercentage };
<|start_filename|>src/util/removeNullValues.js<|end_filename|>
function removeNullValues(object) {
return Object.fromEntries(
Object.entries(object).filter(([, value]) => value != null)
);
}
export { removeNullValues };
<|start_filename|>bin/options/helpers/parseChoiceList.js<|end_filename|>
import { parseChoice } from './parseChoice.js';
function parseChoiceList(choiceList, choices, errorMessage) {
const choiceArray = choiceList.split(',');
const parsedChoices = [];
for (const choice of choiceArray) {
const parsedChoice = parseChoice(choice, choices, errorMessage);
if (!parsedChoices.includes(parsedChoice)) {
parsedChoices.push(parsedChoice);
}
}
return parsedChoices;
}
export { parseChoiceList };
<|start_filename|>bin/options/index.js<|end_filename|>
import { COMMANDS } from './commands/index.js';
import { GLOBAL } from './global/index.js';
import { composeOption } from './composeOption.js';
const OPTIONS = {
COMMANDS,
GLOBAL,
};
export { OPTIONS, composeOption };
<|start_filename|>.markdownlint.json<|end_filename|>
{
"fenced-code-language": false,
"line-length": false,
"no-duplicate-heading": { "siblings_only": true },
"no-inline-html": { "allowed_elements": ["br", "sup"] }
}
<|start_filename|>src/api/ftx/index.js<|end_filename|>
import { Controllers } from './controllers/index.js';
const Ftx = { ...Controllers };
export { Ftx };
| dabaojian1992/ftx-cli |
<|start_filename|>public/css/style.css<|end_filename|>
body{font-family: 'Open Sans', sans-serif; font-size:14px; color:#777; background:#f2f4f8;}
/* Resets */
input[type='number'] {-moz-appearance:textfield;}
input[type=number]::-webkit-inner-spin-button,
input[type=number]::-webkit-outer-spin-button { -webkit-appearance: none; margin: 0;}
ul, ol {
margin: 0;
padding: 0;
list-style-position: inside;
list-style: none;
line-height: 2;
}
p{line-height:24px;}
/* General Classess and Utilities*/
.clearBoth{ clear:both;}
a, a:hover, a:focus, a:active{color: #d23232; text-decoration: none;; outline: none;}
.btn:focus, .btn.focus{background: none; outline: 0; }
strong{font-weight:700;}
.text-small{font-size:12px;}
.text-xs{font-size:10px;}
.asterix-red{color:#ff0000;}
.error{ border: 1px solid #f00!important;}
.popover{border-radius:0;}
.pointer{cursor:pointer;}
.padTopNone{ padding-top:0 !important;}
.form-control[readonly]{ background:transparent;}
.noborder{border:none !important;}
.noborderTop{border-top:none !important;}
.noborderBottom{border-bottom:none !important;}
.noborderRight{border-right:none !important;}
/* Margins */
.m5{ margin:5px;}
.mT5{margin-top:5px;}
.mTn5{margin-top:-5px;}
.mR20{margin-right:20px;}
/* Paddings */
.nopadding {padding: 0 !important;}
.nopaddingRight{padding-right:0 !important;}
/*Widths*/
.w33pcnt{width:33%;}
.w45pcnt{width:45%;}
.w50pcnt{width:50%;}
.w60pcnt{width:60%;}
.w70pcnt{width:70%;}
.w100pcnt{width:100%;}
/*Pre - Loader Styles*/
#loading{ background: #FFF;bottom: 0; left: 0; position: fixed; right: 0;
top: 0; z-index: 999999;}
.loading_img {background:url(../images/loading.gif) no-repeat center center;height: 200px; left: 50%; margin: -100px 0 0 -100px; position: absolute; top: 50%; width: 200px;}
/* back to top */
#back-top {
display:none;
position: fixed;
bottom: 10px;
right: 20px;
z-index: 100;
color: #FFF;
background: #d23232;
padding: 8px 11px;
font-size: 24px;
width: 46px;
height: 45px;
border-radius: 50%;
}
#back-top:hover{
opacity: 1;
-webkit-box-shadow: 0 10px 20px rgba(0,0,0,0.19),0 6px 6px rgba(0,0,0,0.23);
-moz-box-shadow: 0 10px 20px rgba(0,0,0,0.19),0 6px 6px rgba(0,0,0,0.23);
-o-box-shadow: 0 10px 20px rgba(0,0,0,0.19),0 6px 6px rgba(0,0,0,0.23);
box-shadow: 0 10px 20px rgba(0,0,0,0.19),0 6px 6px rgba(0,0,0,0.23);
-webkit-transition: 0.2s ease-in-out;
-moz-transition: 0.2s ease-in-out;
-ms-transition: 0.2s ease-in-out;
-o-transition: 0.2s ease-in-out;
transition: 0.2s ease-in-out;
}
.navbar{
background: #FFF;
box-shadow: 0px 0px 30px rgba(69, 101, 173, 0.1);
padding: 15px 0;
}
.nav > li {
margin: 0 15px;
}
.navbar-brand {padding: 8px;}
.navbar-brand i{font-size: 24px; color: #007025;}
.navbar-brand > img {
margin: -15px 0 0 0;
}
h2{
border-bottom: 1px solid rgba(0, 0, 0, 0.05);
padding-bottom: 20px;
margin-bottom: 50px;
position: relative;
z-index: 1;
color: #000000;
}
h2:after {
content: "";
background-color: #047bf8;
width: 42px;
height: 6px;
border-radius: 2px;
display: block;
position: absolute;
bottom: -3px;
left: 0px;
}
.card{
background: #FFF;
border-radius: 6px;
padding: 30px;
margin: 0 0 30px 0;
box-shadow: 0px 0px 30px rgba(69, 101, 173, 0.1);
border:1px solid transparent;
}
.card:hover{border:1px solid #047bf8; transition: all 0.3s;}
.card .line{margin: 0 0 15px 0; overflow: hidden; border-bottom: 1px solid #e0e0e0; padding: 0 0 15px 0;}
.card .line div{float:left; margin: 3px 0 0 0;}
.card .line span{float:right; width: 130px; text-align: right; white-space: nowrap;overflow: hidden; text-overflow: ellipsis; font-size: 18px; font-weight: 600;}
.card .stats{margin:0 0 30px 0;}
.card h3{text-align:center; margin: 0 0 15px 0; padding: 0 0 20px 0; font-size: 42px; border-bottom: 1px solid #e0e0e0; font-weight: 300;}
.btn-primary {
color: #fff;
background-color: #047bf8;
border-color: #047bf8;
border-radius: 42px;
padding: 8px 15px;
font-size: 18px;
text-transform: uppercase;
}
.btn-primary:hover, .btn-primary:active, .btn-primary:focus {
color: #fff;
background-color: #0362c6;
border-color: #035dbc;
}
.btn-success {
background-color: #90be2e;
border-color: #90be2e;
border-radius: 42px;
color:#FFFFFF !important;
min-width: 120px;
text-transform: uppercase;
padding: 10px 15px;
}
.btn-success:hover, .btn-success:active, .btn-success:focus {
background-color: #719524 !important;
border-color: #6b8d22 !important;
}
.btn-info {
background-color: #5bc0de;
border-color: #5bc0de;
border-radius: 42px;
color:#FFFFFF !important;
min-width: 120px;
text-transform: uppercase;
padding: 10px 15px;
}
.btn-info:hover, .btn-info:active, .btn-info:focus {
background-color: #31b0d5!important;
border-color: #2aabd2!important;
}
.form-control{height: 50px;}
.modal-header {color: #000; font-weight: 600;}
<|start_filename|>producer.js<|end_filename|>
/* @file producer.js
* @authors <NAME> [<EMAIL>]
* <NAME> [<EMAIL>]
* <NAME> [<EMAIL>]
* <NAME> [<EMAIL>]
*/
// Instances //
/* web3.js is a collection of libraries that allow you to interact with a
* local or remote ethereum node using HTTP, IPC or WebSocket.
*/
var web3 = require('web3');
var Web3 = require('web3');
var web3 = new Web3();
var web3Admin = require('web3admin')
// Node.js web application framework
var express = require('express');
var app = require('express')();
var http = require('http').Server(app);
var io = require('socket.io')(http);
var other_servers = require('socket.io-client'); // This is a client connecting to the SERVER 2 (MAIN SERVER)
var main_server = other_servers.connect('http://localhost:4000', {reconnect: true});
// Setup Serial Connection with Arduino Nano to control relay
// var arduino_serial_port = require("serialport");
// var serialport = new SerialPort('/dev/cu.usbmodem1421',{parser: SerialPort.parsers.Readline('\n')});
var SerialPort = require('serialport');
var Readline = SerialPort.parsers.Readline;
var arduino_serial_port = new SerialPort('/dev/cu.usbmodem1421');
var parser = new Readline('\n');
arduino_serial_port.pipe(parser);
// Serial Port Setup for Energy Meter Reading
var serialPort = require('serialport');
var meter_serial_port = new serialPort('/dev/cu.usbserial', {
baudRate: 115200,
parser: new serialPort.parsers.Readline('\n\r')
});
// Variables Declaration
var meter_reading_string = ""; var value_meter;
var producer_address; var consumer_address;
var ether_per_token = 0; var accepted_bid;
var accept_deal_flag = 0; var block_deal_flag = 1;
var energy_tokens;
var pending_tx_list = []
var energy_KWH = 0; var prev_energy_KWH = 0; var difference = 0;
var producer; var consumer;
// Smart Contract for generation of Virtual Energy Tokens and Automate transactions
var abi = [
{
"constant": false,
"inputs": [
{
"name": "_account",
"type": "address"
}
],
"name": "token_balance",
"outputs": [
{
"name": "",
"type": "uint256"
}
],
"payable": false,
"stateMutability": "nonpayable",
"type": "function"
},
{
"constant": true,
"inputs": [
{
"name": "",
"type": "address"
}
],
"name": "balances",
"outputs": [
{
"name": "",
"type": "uint256"
}
],
"payable": false,
"stateMutability": "view",
"type": "function"
},
{
"constant": true,
"inputs": [
{
"name": "_account",
"type": "address"
}
],
"name": "eth_balance",
"outputs": [
{
"name": "",
"type": "uint256"
}
],
"payable": false,
"stateMutability": "view",
"type": "function"
},
{
"constant": false,
"inputs": [
{
"name": "_account",
"type": "address"
},
{"name": "amount",
"type": "uint256"
}
],
"name": "send_eth",
"outputs": [],
"payable": true,
"stateMutability": "payable",
"type": "function"
},
{
"constant": false,
"inputs": [
{
"name": "_account",
"type": "address"
},
{"name": "amount",
"type": "uint256"
}
],
"name": "update_tokens",
"outputs": [],
"payable": false,
"stateMutability": "nonpayable",
"type": "function"
}
]
// Contract Address obtained after deploying contract from 'Remix Solidity Compiler'
// This is the token generation and ether sending contract
var contract_address = "0xbccc53572694ea920a4bf3070b4780a7892855a2" ;
// Contract Object Creation at Contract Address
var obj = web3.eth.contract(abi).at("0xbccc53572694ea920a4bf3070b4780a7892855a2");
// Web3 setup
web3.setProvider(new web3.providers.HttpProvider('http://localhost:8545'));
web3.eth.defaultAccount = web3.eth.accounts[0];
web3Admin.extend(web3);
// Energy Meter Serial Port
meter_serial_port.on('open', function()
{
console.log("Energy Meter Serial Port Open\n")
setInterval(function()
{
// Request to read energy consumption data from the meter
meter_serial_port.write(Buffer.from('SHOW=\r\n'), function(error)
{
// Search for 'KWH'(Power consumption reading) in the data returned by the meter
KWH_index = meter_reading_string.indexOf("KWH", 0);
// Our required reading is at index 8 and 9 from the 'KWH' substring start index
value_meter = meter_reading_string.substring(KWH_index + 8, KWH_index + 9);
energy_KWH = 1 + Number(value_meter);
// Reset String
meter_reading_string = "";
// Check for writing error
if (error)
{
return console.log('Error on writing to Meter: ', error.message);
}
});
}, 5000)
// Read energy consumption data from the meter
meter_serial_port.on('data', function (data)
{
meter_reading_string = meter_reading_string + data.toString();
});
});
// Arduino Nano Serial Port
arduino_serial_port.on('open', function()
{
console.log("Arduino Serial Port Open\n")
main_server.on('connect', function(){ });
//Close Connection
main_server.on('close',function(data)
{
console.log("Closing Connection");
arduino_serial_port.write('2');
accept_deal_flag = 0;
block_deal_flag = 1;
});
// Accept Bid
// PoC - Active only for 1st Deal
main_server.on('accept_deal_0',function(data)
{
producer_address = data.producer_address;
consumer_address = data.consumer_address;
accepted_bid = data.bid;
// Toggle Relay Connection to stop producer from charging battery
if(consumer_address == web3.eth.accounts[0])
{
arduino_serial_port.write('1');
accept_deal_flag = 1 ;
}
});
//Accept request for Sharing Energy Tokens of Producers for Display on Marketplace
main_server.on('req_tokens_0', function(data)
{
if(data == web3.eth.accounts[4])
{
main_server.emit('display_tokens_0', energy_tokens);
}
});
main_server.on('req_tokens_1', function(data)
{
if(data == web3.eth.accounts[4])
{
main_server.emit('display_tokens_1', energy_tokens);
}
});
main_server.on('req_tokens_2', function(data)
{
if(data == web3.eth.accounts[4])
{
main_server.emit('display_tokens_2', energy_tokens);
}
});
main_server.on('req_tokens_3', function(data)
{
if(data == web3.eth.accounts[4])
{
main_server.emit('display_tokens_3', energy_tokens);
}
});
main_server.on('req_tokens_4', function(data)
{
if(data == web3.eth.accounts[4])
{
main_server.emit('display_tokens_4', energy_tokens);
}
});
main_server.on('req_tokens_5', function(data)
{
if(data == web3.eth.accounts[4])
{
main_server.emit('display_tokens_5', energy_tokens);
}
});
// Login to Personal Account using Passphrase
app.use(express.static('public'));
app.get('/', function(req, res)
{
res.sendfile('login_page.html');
io.once('connection', function (socket)
{
socket.on('check_passphrase', function (data)
{
// Unlock Ethereum Account [0] and send its boolean output
var unlock_result = web3.personal.unlockAccount(web3.eth.accounts[0], data, 100000);
socket.emit('unlock_ethereum_account_result', unlock_result);
});
});
});
//Wallet Functions
app.get('/enter_wallet', function(req, res)
{
res.sendfile('wallet.html');
io.on('connection', function (socket)
{
// Start Mining
socket.on('startmine', function(socket)
{
var Mine = web3.miner.start();
});
// Stop Mining
socket.on('stopmine', function(socket)
{
var StopMine = web3.miner.stop();
});
// Do a Basic Transaction
socket.on('basic_tx', function(data)
{
from_address = web3.eth.accounts[0];
to_address = data.add;
value = data.val;
var send = web3.eth.sendTransaction({from : from_address, to: to_address , value:value})
});
setInterval(function()
{
difference= energy_KWH - prev_energy_KWH;
// Get Account Balance
var balance = web3.eth.getBalance(web3.eth.accounts[4]);
socket.emit('pending_tx_list', {tx_1:pending_tx_list[0], tx_2:pending_tx_list[1], tx_3:pending_tx_list[2]});
socket.emit('energy_token_balance', {energy:energy_KWH, tok:energy_tokens, bal:balance});
// If increment in Energy supplied to battery, increase energy tokens
if(difference != 0)
{
obj.update_tokens(web3.eth.accounts[4], difference);
prev_energy_KWH = energy_KWH;
}
// Get list of pending transactions
pending_tx_list = web3.eth.getBlock("pending").transactions;
// Get Token balance from Smart Contract Object
energy_tokens = obj.token_balance.call(web3.eth.accounts[4]);
}, 1000);
});
});
// Check if a bid has been accepted
// If yes, then register the account addresses and initiate realtime
// transactions through the smart contract
setInterval(function()
{
if (accept_deal_flag == 1 && block_deal_flag == 1)
{
// Register account addresses and purchase value
producer = producer_address;
consumer = consumer_address;
ether_per_token = accepted_bid;
// Toggle flag
block_deal_flag = 2;
}
if (accept_deal_flag == 1)
{
obj.send_eth(producer, ether_per_token, {from:consumer, to:contract_address, value:ether_per_token});
}
}, 8000);
});
// HTTP SERVER
http.listen(3000, function()
{
console.log('listening on *:3000');
});
<|start_filename|>consumer.js<|end_filename|>
/* @file comsumer.js
* @authors <NAME> [<EMAIL>]
* <NAME> [<EMAIL>]
* <NAME> [<EMAIL>]
* <NAME> [<EMAIL>]
*/
// Instances //
/* web3.js is a collection of libraries that allow you to interact with a
* local or remote ethereum node using HTTP, IPC or WebSocket.
*/
var web3 = require('web3');
var Web3 = require('web3');
var web3 = new Web3();
var web3Admin = require('web3admin')
// Node.js web application framework
var express = require('express');
var app = require('express')();
var http = require('http').Server(app);
var io = require('socket.io')(http);
var other_servers = require('socket.io-client'); // This is a client connecting to the SERVER 2 (MAIN SERVER)
// Replace IP adress with that of the machine running the main server
var main_server = other_servers.connect('http://192.168.43.50:4000', {reconnect: true});
// Setup Serial Connection with Arduino Nano to control relay
// var SerialPort = require("serialport");
// var serialport = new SerialPort("/dev/cu.usbmodem1411",{parser: SerialPort.parsers.readline('\n')});
var SerialPort = require('serialport');
var Readline = SerialPort.parsers.Readline;
var arduino_serial_port = new SerialPort('/dev/cu.wchusbserial1410');
var parser = new Readline('\n');
arduino_serial_port.pipe(parser);
// // Serial Port Setup for Energy Meter Reading
// var serialPort = require('serialport');
// var meter_serial_port = new serialPort('/dev/cu.usbserial', {
// baudRate: 115200,
// parser: new serialPort.parsers.Readline('\n\r')
// });
// Variables Declaration
var producer_address; var consumer_address;
var ether_per_token = 0; var accepted_bid;
var accept_deal_flag = 0; var block_deal_flag = 1;
var pending_tx_list = []
var producer; var consumer;
// Smart Contract for generation of Virtual Energy Tokens and Automate transactions
var abi = [
{
"constant": false,
"inputs": [
{
"name": "_account",
"type": "address"
}
],
"name": "token_balance",
"outputs": [
{
"name": "",
"type": "uint256"
}
],
"payable": false,
"stateMutability": "nonpayable",
"type": "function"
},
{
"constant": true,
"inputs": [
{
"name": "",
"type": "address"
}
],
"name": "balances",
"outputs": [
{
"name": "",
"type": "uint256"
}
],
"payable": false,
"stateMutability": "view",
"type": "function"
},
{
"constant": true,
"inputs": [
{
"name": "_account",
"type": "address"
}
],
"name": "eth_balance",
"outputs": [
{
"name": "",
"type": "uint256"
}
],
"payable": false,
"stateMutability": "view",
"type": "function"
},
{
"constant": false,
"inputs": [
{
"name": "_account",
"type": "address"
},
{"name": "amount",
"type": "uint256"
}
],
"name": "send_eth",
"outputs": [],
"payable": true,
"stateMutability": "payable",
"type": "function"
},
{
"constant": false,
"inputs": [
{
"name": "_account",
"type": "address"
},
{"name": "amount",
"type": "uint256"
}
],
"name": "update_tokens",
"outputs": [],
"payable": false,
"stateMutability": "nonpayable",
"type": "function"
}
]
// Contract Address obtained after deploying contract from 'Remix Solidity Compiler'
var contract_address = "0xbccc53572694ea920a4bf3070b4780a7892855a2";
// Contract Object Creation at Contract Address
var obj = web3.eth.contract(abi).at("0xbccc53572694ea920a4bf3070b4780a7892855a2");
// Web3 setup
web3.setProvider(new web3.providers.HttpProvider('http://localhost:8545'));
web3.eth.defaultAccount = web3.eth.accounts[0];
web3Admin.extend(web3);
// Arduino Nano Serial Port
arduino_serial_port.on('open', function()
{
console.log("Arduino Serial Port Open\n")
main_server.on('connect', function(){ });
//Close Connection
main_server.on('close', function(data)
{
console.log("Closing Connection");
arduino_serial_port.write('2');
accept_deal_flag = 0;
block_deal_flag = 1;
});
// Accept Bid
// PoC - Active only for 1st Deal
main_server.on('accept_deal_0', function(data)
{
producer_address = data.producer_address;
consumer_address = data.consumer_address;
accepted_bid = data.bid;
// Toggle Relay Connection to connect the consumer to the battery
if(consumer_address == web3.eth.accounts[0])
{
arduino_serial_port.write('1');
accept_deal_flag = 1 ;
}
});
// Login to Personal Account using Passphrase
app.use(express.static('public'));
app.get('/', function(req, res)
{
res.sendfile('login_page.html');
io.once('connection', function (socket)
{
socket.on('check_passphrase', function (data)
{
// Unlock Ethereum Account [0] and send its boolean output
var unlock_result = web3.personal.unlockAccount(web3.eth.accounts[0], data, 100000);
socket.emit('unlock_ethereum_account_result', unlock_result);
});
});
});
// Wallet Functions
app.get('/enter_wallet', function(req, res)
{
res.sendfile('wallet.html');
io.on('connection', function (socket)
{
// Start Mining
socket.on('startmine', function(socket)
{
var Mine = web3.miner.start();
});
// Stop Mining
socket.on('stopmine', function(socket)
{
var StopMine = web3.miner.stop();
});
// Do a Basic Transaction
socket.on('basic_tx', function(data)
{
from_address = web3.eth.accounts[0];
to_address = data.add;
value = data.val;
var send = web3.eth.sendTransaction({from : from_address, to: to_address , value:value})
});
setInterval(function()
{
// Get Account Balance
var balance = web3.eth.getBalance(web3.eth.accounts[4]);
socket.emit('pending_tx_list', {tx_1: pending_tx_list[0], tx_2: pending_tx_list[1], tx_3: pending_tx_list[2]});
socket.emit('energy_token_balance',{bal:balance});
// Get list of pending transactions
pending_tx_list = web3.eth.getBlock("pending").transactions;
}, 1000);
});
});
// Check if a bid has been accepted
// If yes, then register the account addresses and initiate realtime
// transactions through the smart contract
setInterval(function()
{
if (accept_deal_flag == 1 && block_deal_flag == 1)
{
// Register account addresses and purchase value
producer = producer_address;
consumer = consumer_address;
ether_per_token = accepted_bid;
// Toggle flag
block_deal_flag = 2;
}
if (accept_deal_flag == 1)
{
console.log(producer); // Print Producer account address
console.log(consumer); // Print Consumer account address
console.log(ether_per_token); // Print Bid Value
obj.send_eth(producer, ether_per_token, {from:consumer, to:contract_address, value:ether_per_token});
}
}, 8000);
});
// HTTP SERVER
http.listen(3000, function()
{
console.log('listening on *:3000');
});
<|start_filename|>public/js/script.js<|end_filename|>
jQuery.fn.exists = function(){return this.length>0;}
/////////////////////////////////////////////// Window load function //////////////////////////////////////////////////////
$(window).load(function(e) {
// will first fade out the loading animation
$(".loading_img").fadeOut();
// will fade out the whole DIV that covers the website.
$("#loading").delay(600).fadeOut("slow");
});
////////////////////////////////////////// Begin document ready function /////////////////////////////////////////
$(function(){
// end bracket for document ready function
});
//////////////////////////////////////////////////// End document ready function ///////////////////////////////////////////////////////
<|start_filename|>server.js<|end_filename|>
/* @file server.js
* @authors <NAME> [<EMAIL>]
* <NAME> [<EMAIL>]
* <NAME> [<EMAIL>]
* <NAME> [<EMAIL>]
*/
// Instances //
var app = require('express')();
var http = require('http').Server(app);
var io = require('socket.io')(http);
var express = require('express');
//Variables
var consumer_address_list = ["", "", "", "", "", ""];
var producer_address_list = ["", "", "", "", "", ""];
var energy_token = ["", "", "", "", "", ""];
var asking_bid = [0, 0, 0, 0, 0, 0];
var seller_available_0 = 0, seller_available_1 = 0, seller_available_2 = 0;
var seller_available_3 = 0, seller_available_4 = 0, seller_available_5 = 0;
var buyer_available_0 = 0, buyer_available_1 = 0, buyer_available_2 = 0;
var buyer_available_3 = 0, buyer_available_4 = 0, buyer_available_5 = 0;
var Card_0; var Card_1; var Card_2; var Card_3; var Card_4; var Card_5;
app.use(express.static('public'));
//Energy Marketplace Functions
app.get('/', function(req, res)
{
res.sendfile('energy_marketplace.html');
io.once('connection', function (socket)
{
// Send request to producer server to get number of energy tokens in entered account address
// Adds corresponding producer address and asking_bid value to the list
// Sets the corresponding 'Seller Available' Flag to 1
socket.on('broadcast_seller_info', function (data)
{
if(buyer_available_0 == 0 && seller_available_0 == 0)
{
io.emit('req_tokens_0', data.address);
producer_address_list[0] = data.address;
asking_bid[0] = data.base;
seller_available_0 = 1;
}
else if(buyer_available_1 == 0 && seller_available_1 == 0)
{
io.emit('req_tokens_1', data.address);
producer_address_list[1] = data.address;
asking_bid[1] = data.base;
seller_available_1 = 1;
}
else if(buyer_available_2 == 0 && seller_available_2 == 0)
{
io.emit('req_tokens_2', data.address);
producer_address_list[2] = data.address;
asking_bid[2] = data.base;
seller_available_2 = 1;
}
else if(buyer_available_3 == 0 && seller_available_3 == 0)
{
io.emit('req_tokens_3', data.address);
producer_address_list[3] = data.address;
asking_bid[3] = data.base;
seller_available_3 = 1;
}
else if(buyer_available_4 == 0 && seller_available_4 == 0)
{
io.emit('req_tokens_4', data.address);
producer_address_list[4] = data.address;
asking_bid[4] = data.base;
seller_available_4 = 1;
}
else if(buyer_available_5 == 0 && seller_available_5 == 0)
{
io.emit('req_tokens_5', data.address);
producer_address_list[5] = data.address;
asking_bid[5] = data.base;
seller_available_5 = 1;
}
});
// Close Physical Connection with the Consumer through Arduino Serial Port
socket.on('close_connection', function (data)
{
io.emit('close', 2);
});
// Bidding Function for each seller
// Gets account data of each buyer
// Increments bid by 1 Ether on every call
// Sets the corresponding 'Buyer Available' Flag to 1
socket.on('bid_for_producer_0', function(data)
{
if(!(producer_address_list[0] == "" || data == null))
{
clearTimeout(Card_0);
asking_bid[0] = Number(asking_bid[0]) + 1;
consumer_address_list[0] = data;
buyer_available_0 = 1;
}
});
socket.on('bid_for_producer_1', function(data)
{
if(!(producer_address_list[1] == "" || data == null))
{
clearTimeout(Card_1);
asking_bid[1] = Number(asking_bid[1]) + 1;
consumer_address_list[1] = data;
buyer_available_1 = 1;
}
});
socket.on('bid_for_producer_2', function(data)
{
if(!(producer_address_list[2] == "" || data == null))
{
clearTimeout(Card_2);
asking_bid[2] = Number(asking_bid[2]) + 1;
consumer_address_list[2] = data;
buyer_available_2 = 1;
}
});
socket.on('bid_for_producer_3', function(data)
{
if(!(producer_address_list[3] == "" || data == null))
{
clearTimeout(Card_3);
asking_bid[3] = Number(asking_bid[3]) + 1;
consumer_address_list[3] = data;
buyer_available_3 = 1;
}
});
socket.on('bid_for_producer_4', function(data)
{
if(!(producer_address_list[4] == "" || data == null))
{
clearTimeout(Card_4);
asking_bid[4] = Number(asking_bid[4]) + 1;
consumer_address_list[4] = data;
buyer_available_4 = 1;
}
});
socket.on('bid_for_producer_5', function(data)
{
if(!(producer_address_list[5] == "" || data == null))
{
clearTimeout(Card_5);
asking_bid[5] = Number(asking_bid[5]) + 1;
consumer_address_list[5] = data;
buyer_available_5 = 1;
}
});
// Update Marketplace in a regular interval
setInterval(function()
{
io.emit('update_marketplace',
{
add0:producer_address_list[0], add1:producer_address_list[1], add2:producer_address_list[2], add3:producer_address_list[3], add4:producer_address_list[4], add5:producer_address_list[5],
bid0:asking_bid[0], bid1:asking_bid[1], bid2:asking_bid[2], bid3:asking_bid[3], bid4:asking_bid[4], bid5:asking_bid[5],
add6:consumer_address_list[0], add7:consumer_address_list[1], add8:consumer_address_list[2], add9:consumer_address_list[3], add10:consumer_address_list[4], add11:consumer_address_list[5],
tok0:energy_token[0], tok1:energy_token[1], tok2:energy_token[2], tok3:energy_token[3], tok4:energy_token[4], tok5:energy_token[5]
});
}, 1000);
});
// Accept a transaction after a timeout
// Reset all Flags to 0 and clear corresponding data from lists
// Emit accepted deal's transaction details through socket
setInterval(function()
{
if(buyer_available_0 == 1)
{
Card_0 = setTimeout(function()
{
io.emit('accept_deal_0', {producer_address:producer_address_list[0], bid:asking_bid[0], consumer_address:consumer_address_list[0]});
producer_address_list[0] = "";
consumer_address_list[0] = "";
asking_bid[0] = 0;
energy_token[0] = "";
buyer_available_0 = 0;
seller_available_0 = 0;
}, 5000);
}
if(buyer_available_1 == 1)
{
Card_1 = setTimeout(function()
{
io.emit('accept_deal_1', {producer_address:producer_address_list[1], bid:asking_bid[1], consumer_address:consumer_address_list[1]});
producer_address_list[1] = "";
consumer_address_list[1] = "";
asking_bid[1] = 0;
energy_token[1] = "";
buyer_available_1 = 0;
seller_available_1 = 0;
}, 5000);
}
if(buyer_available_2 == 1)
{
Card_2 = setTimeout(function()
{
io.emit('accept_deal_2', {producer_address:producer_address_list[2], bid:asking_bid[2], consumer_address:consumer_address_list[2]});
producer_address_list[2] = "";
consumer_address_list[2] = "";
asking_bid[2] = 0;
energy_token[2] = "";
buyer_available_2 = 0;
seller_available_2 = 0;
}, 5000);
}
if(buyer_available_3 == 1)
{
Card_3 = setTimeout(function()
{
io.emit('accept_deal_3', {producer_address:producer_address_list[3], bid:asking_bid[3], consumer_address:consumer_address_list[3]});
producer_address_list[3] = "";
consumer_address_list[3] = "";
asking_bid[3] = 0;
energy_token[3] = "";
buyer_available_3 = 0;
seller_available_3 = 0;
}, 5000);
}
if(buyer_available_4 == 1)
{
Card_4 = setTimeout(function()
{
io.emit('accept_deal_4', {producer_address:producer_address_list[4], bid:asking_bid[4], consumer_address:consumer_address_list[4]});
producer_address_list[4] = "";
consumer_address_list[4] = "";
asking_bid[4] = 0;
energy_token[4] = "";
buyer_available_4 = 0;
seller_available_4 = 0;
}, 5000);
}
if(buyer_available_5 == 1)
{
Card_5 = setTimeout(function()
{
io.emit('accept_deal_5', {producer_address:producer_address_list[5], bid:asking_bid[5], consumer_address:consumer_address_list[5]});
producer_address_list[5] = "";
consumer_address_list[5] = "";
asking_bid[5] = 0;
energy_token[5] = "";
buyer_available_5 = 0;
seller_available_5 = 0;
}, 5000);
}
}, 10000);
});
// Display Energy Tokens as per request from Producer Server
io.on('connection', function(socket)
{
socket.on('display_tokens_0', function(data)
{
energy_token[0] = data;
});
socket.on('display_tokens_1', function(data)
{
energy_token[1] = data;
});
socket.on('display_tokens_2', function(data)
{
energy_token[2] = data;
});
socket.on('display_tokens_3', function(data)
{
energy_token[3] = data;
});
socket.on('display_tokens_4', function(data)
{
energy_token[4] = data;
});
socket.on('display_tokens_5', function(data)
{
energy_token[5] = data;
});
});
http.listen(4000, function()
{
console.log('listening on *:4000');
});
| nordicenergy/energy-trading-blockchain |
<|start_filename|>src/utils.jl<|end_filename|>
"""
Like `filter()[1]`.
"""
function getfirst(f::Function, A)
for el in A
if f(el)
return el
end
end
end
"""
separate!(f::Function, B::T, A::T) where T
Like `separate` but a vector `B` can be given in which the result will be stored.
"""
function separate!(f::Function, B::T, A::T) where T
nt, nf = 0, length(A)+1
@inbounds for a in A
if f(a)
B[nt+=1] = a
else
B[nf-=1] = a
end
end
fid = 1+nt
reverse!(@view B[fid:end])
return B, fid
end
"""
separate(f::Function, A::AbstractVector{T})
Returns an `array` and `index` where `array[1:index-1]` holds all the values of `A` for which `f` returns `true`, and `array[index:end]` holds those for which `f` returns `false`.
"""
separate(f::Function, A::AbstractVector) =
separate!(f, similar(A), A)
<|start_filename|>benchmark/benchmarks.jl<|end_filename|>
using BenchmarkTools
const SUITE = BenchmarkGroup()
for file in sort(readdir(@__DIR__))
if startswith(file, "bench_") && endswith(file, ".jl")
SUITE[chop(file, head = length("bench_"), tail = length(".jl"))] =
include(file)
end
end
<|start_filename|>src/system.jl<|end_filename|>
update(::S, m::AbstractLedger) where {S<:System}= error("No update method implemented for $S")
requested_components(::System) = ()
const Stage = Pair{Symbol, Vector{System}}
Base.push!(s::Stage, sys) = push!(last(s), sys)
Base.insert!(s::Stage, i::Integer, sys) = insert!(last(s), i, sys)
function requested_components(stage::Stage)
comps = Type{<:ComponentData}[]
for s in last(stage)
for c in requested_components(s)
push!(comps, c)
end
end
return comps
end
function prepare(s::Stage, m::AbstractLedger)
for sys in last(s)
prepare(sys, m)
end
end
prepare(::System, ::AbstractLedger) = nothing
<|start_filename|>benchmark/bench_component.jl<|end_filename|>
module BenchComponent
using BenchmarkTools
import Random
using Overseer
using Parameters
@component @with_kw struct Spatial
position::NTuple{3, Float64} = (1.0,1.0,1.0)
velocity::NTuple{3, Float64} = (1.0,1.0,1.0)
end
@component struct Spring
center::NTuple{3, Float64}
spring_constant::Float64
end
@component mutable struct Rotation
omega::Float64
center::NTuple{3, Float64}
axis::NTuple{3, Float64}
function Rotation()
new(1.0, (2.0,2.0,2.0), (2.0,2.0,2.0))
end
end
struct Oscillator <: System end
Overseer.requested_components(::Oscillator) = (Spatial, Spring)
function Overseer.update(::Oscillator, m::AbstractLedger)
spatial = m[Spatial]
spring = m[Spring]
# g = group(m, Spatial, Spring)
@inbounds for e in @entities_in(spatial && spring)
e_spat = spatial[e]
spr = spring[e]
v_prev = e_spat.velocity
new_v = v_prev .- (e_spat.position .- spr.center) .* spr.spring_constant
spatial[e] = Spatial(e_spat.position, new_v)
end
end
struct Rotator <: System end
Overseer.requested_components(::Rotator) = (Spatial, Rotation)
function Overseer.update(::Rotator, dio::AbstractLedger)
rotation = dio[Rotation]
spatial = dio[Spatial]
dt = 0.01
@inbounds for e in @entities_in(rotation && spatial)
e_rotation = rotation[e]
e_spatial = spatial[e]
n = e_rotation.axis
r = e_spatial.position .- e_rotation.center
theta = e_rotation.omega * dt
nnd = n .* sum(n .* r)
t = (r[2] * n[3] - r[3] * n[2], r[3]*n[1] - r[1] * n[3] , r[1] * n[2] - r[2] * n[1])
spatial[e] = Spatial(e_rotation.center .+ nnd .+ (r .- nnd) .* cos(theta) .+ t .* sin(theta), e_spatial.velocity)
end
end
struct Mover <: System end
Overseer.requested_components(::Mover) = (Spatial, )
function Overseer.update(::Mover, m::AbstractLedger)
dt = 0.01
spat = m[Spatial]
@inbounds for e in @entities_in(spat)
e_spat = spat[e]
spat[e] = Spatial(e_spat.position .+ e_spat.velocity.*dt, e_spat.velocity)
end
end
# Benchmark defs
const suite = BenchmarkGroup()
Random.seed!(1234)
function bench_insertion(c::Component, ids::Vector{Entity}, v)
for e in ids
c[e] = v
end
return c
end
function bench_access(c::Component, ids::Vector{Entity}, v)
for e in ids
v += c[e].position[1]
end
return v
end
function bench_access_inbounds(c::Component, ids::Vector{Entity}, v)
@inbounds for e in ids
v += c[e].position[1]
end
return v
end
function bench_delete(c::Component, ids::Vector{Entity})
delete!(c, ids)
return c
end
suite["basic"] = BenchmarkGroup()
const ids = Entity.(unique(rand(1:1000, 1000)))
suite["basic"]["insertion"] = @benchmarkable bench_insertion(c, $ids, $(Spatial((123.0, 1.2, 1.0), (0.4, 12.0, 234.9)))) setup=(c=Component{Spatial}()) evals=1
suite["basic"]["access"] = @benchmarkable bench_access(c, $ids, 0.0) setup=(c = Component{Spatial}(); bench_insertion(c, $ids, Spatial((123.0, 1.2, 1.0), (0.4, 12.0, 234.9))))
const c_full = Component{Spatial}()
bench_insertion(c_full, ids, Spatial((123.0, 1.2, 1.0), (0.4, 12.0, 234.9)))
suite["basic"]["access inbounds"] = @benchmarkable bench_access_inbounds(c, $ids, 0.0) setup=(c = deepcopy(c_full))
suite["basic"]["deletion"] = @benchmarkable bench_delete(c, $ids) setup=(c = deepcopy(c_full)) evals=1
suite["real life"] = BenchmarkGroup()
suite["real life"]["creation"] = BenchmarkGroup()
st = Stage(:simulation, [Oscillator(), Rotator(), Mover()])
suite["real life"]["creation"]["ledger"] = @benchmarkable m = Ledger($st)
function e_fill(m)
for i=1:1000
e1 = Entity(m,
Spatial((i, 1.0, 1.0), (0.0, 0.0, 0.0)),
Spring((1.0, 0.0, 0.0), 0.01))
e1 = Entity(m,
Spatial((i, 1.0, 1.0), (0.0, 0.0, 0.0)),
Spring((1.0, 0.0, 0.0), 0.01),
Rotation())
end
end
suite["real life"]["creation"]["filling entities"] = @benchmarkable e_fill(m) setup=(m = Ledger(st))
suite["real life"]["update"] = BenchmarkGroup()
suite["real life"]["update"]["old school empty"] = @benchmarkable update(m) setup=(m = Ledger(st))
suite["real life"]["update"]["old school full"] = @benchmarkable update(m) setup=(m = Ledger(st); e_fill(m))
function update_new(::Oscillator, m::AbstractLedger)
@inbounds for e in @entities_in(m, Spatial && Spring)
new_v = e.velocity .- (e.position .- e[Spring].center) .* e.spring_constant
e[Spatial] = Spatial(e.position, new_v)
end
end
function update_new(::Rotator, m::AbstractLedger)
dt = 0.01
@inbounds for e in @entities_in(m, Rotation && Spatial)
n = e.axis
r = e.position .- e.center
theta = e.omega * dt
nnd = n .* sum(n .* r)
t = (r[2] * n[3] - r[3] * n[2], r[3]*n[1] - r[1] * n[3] , r[1] * n[2] - r[2] * n[1])
e[Spatial] = Spatial(e.center .+ nnd .+ (r .- nnd) .* cos(theta) .+ t .* sin(theta), e.velocity)
end
end
function update_new(::Mover, m::AbstractLedger)
dt = 0.01
@inbounds for e in @entities_in(m, Spatial)
e[Spatial] = Spatial(e.position .+ e.velocity.*dt, e.velocity)
end
end
suite["real life"]["update"]["new school empty"] = @benchmarkable (update_new(Oscillator(), m);
update_new(Rotator(), m);
update_new(Mover(), m)) setup=(m = Ledger(st))
suite["real life"]["update"]["new school full"] = @benchmarkable (update_new(Oscillator(), m);
update_new(Rotator(), m);
update_new(Mover(), m)) setup=(m = Ledger(st); e_fill(m))
end
BenchComponent.suite
<|start_filename|>test/runtests.jl<|end_filename|>
using Test
using Overseer
@testset "Indices" begin include("test_indices.jl") end
@testset "Components" begin include("test_components.jl") end
@testset "Ledger" begin include("test_ledger.jl") end
<|start_filename|>benchmark/tune.json<|end_filename|>
[{"Julia":"1.6.2","BenchmarkTools":"0.7.0"},[["BenchmarkGroup",{"data":{"shared_component":["BenchmarkGroup",{"data":{"basic":["BenchmarkGroup",{"data":{"insertion":["Parameters",{"gctrial":true,"time_tolerance":0.05,"samples":10000,"evals":7,"gcsample":false,"seconds":5.0,"overhead":0.0,"memory_tolerance":0.01}],"access inbounds":["Parameters",{"gctrial":true,"time_tolerance":0.05,"samples":10000,"evals":113,"gcsample":false,"seconds":5.0,"overhead":0.0,"memory_tolerance":0.01}],"access":["Parameters",{"gctrial":true,"time_tolerance":0.05,"samples":10000,"evals":10,"gcsample":false,"seconds":5.0,"overhead":0.0,"memory_tolerance":0.01}],"deletion":["Parameters",{"gctrial":true,"time_tolerance":0.05,"samples":10000,"evals":141,"gcsample":false,"seconds":5.0,"overhead":0.0,"memory_tolerance":0.01}]},"tags":[]}],"real life":["BenchmarkGroup",{"data":{"update":["BenchmarkGroup",{"data":{"old school full":["Parameters",{"gctrial":true,"time_tolerance":0.05,"samples":10000,"evals":1,"gcsample":false,"seconds":5.0,"overhead":0.0,"memory_tolerance":0.01}],"old school empty":["Parameters",{"gctrial":true,"time_tolerance":0.05,"samples":10000,"evals":950,"gcsample":false,"seconds":5.0,"overhead":0.0,"memory_tolerance":0.01}],"new school full":["Parameters",{"gctrial":true,"time_tolerance":0.05,"samples":10000,"evals":1,"gcsample":false,"seconds":5.0,"overhead":0.0,"memory_tolerance":0.01}],"new school empty":["Parameters",{"gctrial":true,"time_tolerance":0.05,"samples":10000,"evals":982,"gcsample":false,"seconds":5.0,"overhead":0.0,"memory_tolerance":0.01}]},"tags":[]}],"creation":["BenchmarkGroup",{"data":{"filling entities":["Parameters",{"gctrial":true,"time_tolerance":0.05,"samples":10000,"evals":1,"gcsample":false,"seconds":5.0,"overhead":0.0,"memory_tolerance":0.01}],"ledger":["Parameters",{"gctrial":true,"time_tolerance":0.05,"samples":10000,"evals":3,"gcsample":false,"seconds":5.0,"overhead":0.0,"memory_tolerance":0.01}]},"tags":[]}]},"tags":[]}]},"tags":[]}],"component":["BenchmarkGroup",{"data":{"basic":["BenchmarkGroup",{"data":{"insertion":["Parameters",{"gctrial":true,"time_tolerance":0.05,"samples":10000,"evals":9,"gcsample":false,"seconds":5.0,"overhead":0.0,"memory_tolerance":0.01}],"access inbounds":["Parameters",{"gctrial":true,"time_tolerance":0.05,"samples":10000,"evals":112,"gcsample":false,"seconds":5.0,"overhead":0.0,"memory_tolerance":0.01}],"access":["Parameters",{"gctrial":true,"time_tolerance":0.05,"samples":10000,"evals":10,"gcsample":false,"seconds":5.0,"overhead":0.0,"memory_tolerance":0.01}],"deletion":["Parameters",{"gctrial":true,"time_tolerance":0.05,"samples":10000,"evals":141,"gcsample":false,"seconds":5.0,"overhead":0.0,"memory_tolerance":0.01}]},"tags":[]}],"real life":["BenchmarkGroup",{"data":{"update":["BenchmarkGroup",{"data":{"old school full":["Parameters",{"gctrial":true,"time_tolerance":0.05,"samples":10000,"evals":1,"gcsample":false,"seconds":5.0,"overhead":0.0,"memory_tolerance":0.01}],"old school empty":["Parameters",{"gctrial":true,"time_tolerance":0.05,"samples":10000,"evals":937,"gcsample":false,"seconds":5.0,"overhead":0.0,"memory_tolerance":0.01}],"new school full":["Parameters",{"gctrial":true,"time_tolerance":0.05,"samples":10000,"evals":1,"gcsample":false,"seconds":5.0,"overhead":0.0,"memory_tolerance":0.01}],"new school empty":["Parameters",{"gctrial":true,"time_tolerance":0.05,"samples":10000,"evals":976,"gcsample":false,"seconds":5.0,"overhead":0.0,"memory_tolerance":0.01}]},"tags":[]}],"creation":["BenchmarkGroup",{"data":{"filling entities":["Parameters",{"gctrial":true,"time_tolerance":0.05,"samples":10000,"evals":1,"gcsample":false,"seconds":5.0,"overhead":0.0,"memory_tolerance":0.01}],"ledger":["Parameters",{"gctrial":true,"time_tolerance":0.05,"samples":10000,"evals":3,"gcsample":false,"seconds":5.0,"overhead":0.0,"memory_tolerance":0.01}]},"tags":[]}]},"tags":[]}]},"tags":[]}]},"tags":[]}]]] | louisponet/ECS.jl |
<|start_filename|>PreviewDot/ComInterop/RECT.cs<|end_filename|>
using System.Drawing;
using System.Runtime.InteropServices;
namespace PreviewDot.ComInterop
{
[StructLayout(LayoutKind.Sequential)]
internal struct RECT
{
public readonly int left;
public readonly int top;
public readonly int right;
public readonly int bottom;
public Rectangle ToRectangle() { return Rectangle.FromLTRB(left, top, right, bottom); }
}
}
<|start_filename|>PreviewDot/IPreviewGenerator.cs<|end_filename|>
using System.Drawing;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace PreviewDot
{
internal interface IPreviewGenerator
{
Task<GeneratePreviewResult> GeneratePreview(Stream drawingContent, FileDetail fileDetail, Size previewSize, CancellationToken token);
}
}
<|start_filename|>PreviewDot/copy_external.bat<|end_filename|>
@set working_dir=%~1
@set output_dir=%~2
@set package_dir=packages\Graphviz.2.38.0.2
@echo %working_dir%%package_dir% to %output_dir%external
@xcopy /S /C /I /H /R /Y "%working_dir%%package_dir%" "%output_dir%external"
@REM remove some files we dont want to package.
@echo Removing superfluous files...
del "%output_dir%external\gd.zip"
del "%output_dir%external\Graphviz.2.38.0.2.nupkg"
del "%output_dir%external\New Text Document.txt"
del "%output_dir%external\Temp.rar"
@echo External files copied.
<|start_filename|>PreviewDot/PreviewControl.cs<|end_filename|>
using System;
using System.Drawing;
using System.Drawing.Drawing2D;
using System.Drawing.Imaging;
using System.IO;
using System.Windows.Forms;
using PreviewDot.ComInterop;
namespace PreviewDot
{
internal partial class PreviewControl : UserControl
{
private const double _maxZoom = 2d;
private const double _minZoom = 0.3d;
private readonly PreviewContext _context;
private readonly Image _originalPreview;
private double? _currentZoom;
private bool _ctrlPressed;
private bool _shouldPan;
private Point _panOrigin;
private Point _scrollOrigin;
public PreviewControl(Image preview, PreviewContext context)
{
if (preview == null)
throw new ArgumentNullException("preview");
if (context == null)
throw new ArgumentNullException("context");
_originalPreview = preview;
_context = context;
InitializeComponent();
picPreview.Image = _ResizePreviewImageToSize(preview, context.DrawingSize);
picPreview.Size = preview.Size;
itmZoomIn.Enabled = context.DrawingSize != null;
itmZoomOut.Enabled = context.DrawingSize != null;
_UpdateDrawingDetails();
}
private Image _ResizePreviewImageToZoom(Image preview, double zoom)
{
if (_context.DrawingSize == null)
return preview;
var drawingSize = _context.DrawingSize.Value;
var newSize = new Size((int)(drawingSize.Width * zoom), (int)(drawingSize.Height * zoom));
return _ResizePreviewImageToSize(preview, newSize);
}
private static Image _ResizePreviewImageToSize(Image preview, Size? drawingSize)
{
if (drawingSize == null)
return preview;
var newSize = drawingSize.Value;
var image = new Bitmap(newSize.Width, newSize.Height, PixelFormat.Format32bppArgb);
using (var graphics = Graphics.FromImage(image))
{
graphics.InterpolationMode = InterpolationMode.HighQualityBicubic;
graphics.SmoothingMode = SmoothingMode.HighQuality;
graphics.CompositingQuality = CompositingQuality.HighQuality;
graphics.DrawImage(preview, new Rectangle(Point.Empty, newSize));
}
return image;
}
// ReSharper disable InconsistentNaming
private void itmPrint_Click(object sender, EventArgs e)
// ReSharper restore InconsistentNaming
{
string tempFile = null;
try
{
tempFile = _CreateTempFile();
_originalPreview.Save(tempFile, ImageFormat.Png);
Wia.Print(this, tempFile);
}
catch (Exception exc)
{
if (ParentForm == null)
throw;
var parentForm = ParentForm; //we have to store a reference to the ParentForm as it will be removed when this control is remove from it (by Controls.Clear())
parentForm.Controls.Clear();
parentForm.Controls.Add(new ErrorControl(exc.ToString())
{
Dock = DockStyle.Fill
});
}
finally
{
if (!string.IsNullOrEmpty(tempFile))
_DeleteTempFile(tempFile);
}
}
private static string _CreateTempFile()
{
try
{
var path = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString() + ".png");
File.WriteAllBytes(path, new byte[0]);
return path;
}
catch (Exception exc)
{
throw new IOException("Could not create temporary file for printing - " + exc.Message);
}
}
private static void _DeleteTempFile(string filePath)
{
try
{
if (File.Exists(filePath))
File.Delete(filePath);
}
// ReSharper disable EmptyGeneralCatchClause
catch { }
// ReSharper restore EmptyGeneralCatchClause
}
// ReSharper disable InconsistentNaming
private void itmFitImage_Click(object sender, EventArgs e)
// ReSharper restore InconsistentNaming
{
if (_currentZoom != null)
picPreview.Image = _ResizePreviewImageToSize(_originalPreview, _context.DrawingSize);
itmFitImage.Checked = !itmFitImage.Checked;
pnlScroller.AutoScrollMinSize = itmFitImage.Checked
? Size.Empty
: picPreview.Image.Size;
pnlScroller.AutoScroll = !itmFitImage.Checked;
picPreview.SizeMode = itmFitImage.Checked
? PictureBoxSizeMode.Zoom
: PictureBoxSizeMode.CenterImage;
_currentZoom = null;
picPreview_MouseUp(null, null);
_UpdateDrawingDetails();
}
// ReSharper disable InconsistentNaming
private void itmZoomIn_Click(object sender, EventArgs e)
// ReSharper restore InconsistentNaming
{
_UpdateZoom(0.1d);
}
// ReSharper disable InconsistentNaming
private void itmZoomOut_Click(object sender, EventArgs e)
// ReSharper restore InconsistentNaming
{
_UpdateZoom(-0.1d);
}
private double _CalculateZoomStart()
{
var zoom = _CalculateZoom();
var roundedZoom = Math.Floor(zoom * 10) / 10;
return roundedZoom;
}
private void _UpdateZoom(double step)
{
if (_currentZoom.HasValue)
_currentZoom = _currentZoom.Value + step;
else
{
_currentZoom = _CalculateZoomStart();
if (_currentZoom.Value == _CalculateZoom())
_currentZoom = _currentZoom.Value + step;
}
_currentZoom = Math.Min(Math.Max(_currentZoom.Value, _minZoom), _maxZoom);
picPreview.Image = _ResizePreviewImageToZoom(_originalPreview, _currentZoom.Value);
itmFitImage.Checked = false;
pnlScroller.AutoScrollMinSize = picPreview.Image.Size;
pnlScroller.AutoScroll = true;
picPreview.SizeMode = PictureBoxSizeMode.CenterImage;
_UpdateDrawingDetails();
}
// ReSharper disable InconsistentNaming
private void PreviewControl_KeyDown(object sender, KeyEventArgs e)
// ReSharper restore InconsistentNaming
{
if (e.Control)
_ctrlPressed = true;
}
// ReSharper disable InconsistentNaming
private void PreviewControl_KeyUp(object sender, KeyEventArgs e)
// ReSharper restore InconsistentNaming
{
if (e.KeyCode == Keys.Add && _ctrlPressed)
_UpdateZoom(0.1d);
if (e.KeyCode == Keys.Subtract && _ctrlPressed)
_UpdateZoom(-0.1d);
if (e.Control)
_ctrlPressed = false;
}
// ReSharper disable InconsistentNaming
private void PreviewControl_Scroll(object sender, ScrollEventArgs e)
// ReSharper restore InconsistentNaming
{
if (!_ctrlPressed)
return;
double? zoomAdjustment = null;
switch (e.Type)
{
case ScrollEventType.LargeIncrement:
zoomAdjustment = 0.2d;
break;
case ScrollEventType.LargeDecrement:
zoomAdjustment = -0.2d;
break;
case ScrollEventType.SmallIncrement:
zoomAdjustment = 0.1d;
break;
case ScrollEventType.SmallDecrement:
zoomAdjustment = -0.1d;
break;
}
if (zoomAdjustment == null)
return;
_UpdateZoom(zoomAdjustment.Value);
}
private void _UpdateDrawingDetails()
{
var zoomPercentage = _currentZoom ?? _CalculateZoom();
var zoom = string.Format(" (x{0:n0}%)", zoomPercentage * 100);
var size = _context.DrawingSize ?? picPreview.Image.Size;
itmDrawingDetails.Text = string.Format(
"{0} x {1}{2}",
size.Width,
size.Height,
zoom);
}
private double _CalculateZoom()
{
var originalWidth = (double)_context.DrawingSize.Value.Width;
if (originalWidth == 0)
return 0;
var displayWidth = (double)picPreview.Width;
return Math.Round(displayWidth / originalWidth, 2);
}
// ReSharper disable InconsistentNaming
private void picPreview_MouseDown(object sender, MouseEventArgs e)
// ReSharper restore InconsistentNaming
{
if (!pnlScroller.AutoScroll || e.Button != MouseButtons.Left)
return;
picPreview.Cursor = Cursors.SizeAll;
_shouldPan = true;
_panOrigin = new Point(
e.Location.X + pnlScroller.AutoScrollPosition.X,
e.Location.Y + pnlScroller.AutoScrollPosition.Y);
_scrollOrigin = pnlScroller.AutoScrollPosition;
}
// ReSharper disable InconsistentNaming
private void picPreview_MouseUp(object sender, MouseEventArgs e)
// ReSharper restore InconsistentNaming
{
picPreview.Cursor = Cursors.Default;
_shouldPan = false;
_panOrigin = Point.Empty;
_scrollOrigin = Point.Empty;
}
// ReSharper disable InconsistentNaming
private void picPreview_MouseMove(object sender, MouseEventArgs e)
// ReSharper restore InconsistentNaming
{
if (!_shouldPan)
return;
var currentLocation = new Point(
e.Location.X + pnlScroller.AutoScrollPosition.X,
e.Location.Y + pnlScroller.AutoScrollPosition.Y);
var movement = new Point(
currentLocation.X - _panOrigin.X,
currentLocation.Y - _panOrigin.Y);
var newScroll = new Point(
0 - _scrollOrigin.X - movement.X,
0 - _scrollOrigin.Y - movement.Y);
pnlScroller.AutoScrollPosition = newScroll;
}
// ReSharper disable InconsistentNaming
private void mnuContext_Opening(object sender, System.ComponentModel.CancelEventArgs e)
// ReSharper restore InconsistentNaming
{
_UpdateDrawingDetails();
var zoomStart = _CalculateZoom();
itmZoomIn.Enabled = zoomStart < _maxZoom;
itmZoomOut.Enabled = zoomStart > _minZoom;
}
private void itmCopy_Click(object sender, EventArgs e)
{
Clipboard.SetImage(_originalPreview);
}
}
}
<|start_filename|>PreviewDot/GeneratePreviewResult.cs<|end_filename|>
using System.IO;
namespace PreviewDot
{
public class GeneratePreviewResult
{
public GeneratePreviewResult(Stream imageData)
{
ImageData = imageData;
Success = true;
}
public GeneratePreviewResult(string errorMessage)
{
ErrorMessage = errorMessage;
Success = false;
}
public string ErrorMessage { get; }
public Stream ImageData { get; }
public bool Success { get; }
}
}
<|start_filename|>PreviewDot.UnitTests/IntegrationTests.cs<|end_filename|>
using System;
using System.Drawing;
using System.Drawing.Imaging;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using NUnit.Framework;
namespace PreviewDot.UnitTests
{
[TestFixture]
public class IntegrationTests
{
[TestCase("Sample file.gv")]
public async Task ShouldBeAbleToGenerateAPreview(string fileName)
{
var settings = new PreviewSettings
{
RenderingFormat = ImageFormat.Png,
UpScaleForPrint = 4
};
using (var fileStream = File.OpenRead(@"..\..\..\" + fileName))
{
var drawing = new Drawing(fileStream, new FileDetail("file", DateTime.MinValue));
var previewGeneratorFactory = new PreviewGeneratorFactory(settings);
var generator = previewGeneratorFactory.Create();
var preview = await drawing.GeneratePreview(generator, new Size(100, 100), CancellationToken.None);
Assert.That(preview.ImageData.Length, Is.GreaterThan(0));
Assert.That(
() => Image.FromStream(preview.ImageData),
Throws.Nothing);
}
}
}
}
<|start_filename|>PreviewDot/LoadingControl.cs<|end_filename|>
using System.Windows.Forms;
namespace PreviewDot
{
internal partial class LoadingControl : UserControl
{
public LoadingControl()
{
InitializeComponent();
}
}
}
<|start_filename|>PreviewDot/PreviewHandlerController.cs<|end_filename|>
using System;
using System.Diagnostics;
using System.IO;
using System.Runtime.InteropServices;
using System.Runtime.InteropServices.ComTypes;
using System.Windows.Forms;
using PreviewDot.ComInterop;
using STATSTG = System.Runtime.InteropServices.ComTypes.STATSTG;
namespace PreviewDot
{
[ProgId("PreviewDot.PreviewHandlerController")]
[Guid(Installer.ControllerId)]
[ClassInterface(ClassInterfaceType.None)]
[ComVisible(true)]
public class PreviewHandlerController : IPreviewHandler, IOleWindow, IObjectWithSite, IInitializeWithStream
{
private readonly PreviewContext _context;
private readonly PreviewHandlerForm _previewForm;
private IntPtr _previewWindowHandle;
private Stream _previewFileStream = Stream.Null;
private IPreviewHandlerFrame _frame;
private FileDetail _fileDetail;
public PreviewHandlerController()
{
try
{
Logging.InstallListeners();
_context = new PreviewContext();
var previewGeneratorFactory = new PreviewGeneratorFactory(_context.Settings);
var generator = previewGeneratorFactory.Create();
_previewForm = new PreviewHandlerForm(_context, generator);
_previewForm.Handle.GetHashCode(); //initialse the form
}
catch (Exception exc)
{
Trace.TraceError("PreviewHandlerController.ctor: {0}", exc);
}
}
void IInitializeWithStream.Initialize(IStream pstream, uint grfMode)
{
try
{
if (_previewForm == null)
return;
_previewForm.Reset();
_fileDetail = _GetPreviewFileDetail(pstream);
_previewFileStream = pstream.ToStream().ToMemoryStream();
Marshal.ReleaseComObject(pstream);
}
catch (Exception exc)
{
Trace.TraceError("PreviewHandlerController.Initialize: {0}", exc);
}
}
private static FileDetail _GetPreviewFileDetail(IStream pstream)
{
STATSTG stats;
pstream.Stat(out stats, 0);
return new FileDetail(
stats.pwcsName,
DateTime.FromFileTime(stats.mtime.dwHighDateTime));
}
void IPreviewHandler.SetWindow(IntPtr hwnd, ref RECT rect)
{
try
{
if (_previewForm == null)
return;
_previewForm.Invoke(new MethodInvoker(() => _previewForm.Show()));
_previewWindowHandle = hwnd;
_context.OnViewPortChanged(rect.ToRectangle());
WinApi.SetParent(_previewForm.Handle, _previewWindowHandle);
}
catch (Exception exc)
{
Trace.TraceError("PreviewHandlerController.SetWindow: {0}", exc);
}
}
void IPreviewHandler.SetRect(ref RECT rect)
{
try
{
if (_previewForm == null)
return;
_previewForm.Invoke(new MethodInvoker(() => _previewForm.Show()));
_context.OnViewPortChanged(rect.ToRectangle());
WinApi.SetParent(_previewForm.Handle, _previewWindowHandle); //is this required? - if not then remove _previewWindowHandle?
}
catch (Exception exc)
{
Trace.TraceError("PreviewHandlerController.SetRect: {0}", exc);
}
}
public void DoPreview()
{
try
{
if (_previewForm == null)
return;
_previewForm.Invoke(new MethodInvoker(() => _previewForm.Show()));
if (_previewFileStream != Stream.Null)
{
_context.OnPreviewRequired(_previewFileStream, _fileDetail);
WinApi.SetParent(_previewForm.Handle, _previewWindowHandle); //is this required? - if not then remove _previewWindowHandle
}
else
{
Trace.TraceError("No File stream set");
}
}
catch (Exception exc)
{
Trace.TraceError("PreviewHandlerController.DoPreview: {0}", exc);
}
finally
{
if (_previewFileStream != Stream.Null)
{
_previewFileStream.Dispose();
}
}
}
public void Unload()
{
_previewForm?.Invoke(new MethodInvoker(() => _previewForm.Reset()));
}
public void SetFocus()
{
_previewForm?.Invoke(new MethodInvoker(() => _previewForm.Focus()));
}
public void QueryFocus(out IntPtr phwnd)
{
var focusResult = IntPtr.Zero;
_previewForm?.Invoke(new MethodInvoker(() => WinApi.GetFocus()));
phwnd = focusResult;
}
uint IPreviewHandler.TranslateAccelerator(ref MSG pmsg)
{
if (_previewForm != null && _frame != null)
{
var msg = new Message
{
HWnd = pmsg.hwnd,
LParam = pmsg.lParam,
Msg = pmsg.message,
WParam = pmsg.wParam
};
if (_previewForm.PreProcessMessage(ref msg))
return _frame.TranslateAccelerator(ref pmsg);
}
return WinApi.S_FALSE;
}
public void GetWindow(out IntPtr phwnd)
{
phwnd = _previewForm?.Handle ?? IntPtr.Zero;
}
public void ContextSensitiveHelp(bool fEnterMode)
{
//not implemented
}
public void SetSite(object pUnkSite)
{
_frame = pUnkSite as IPreviewHandlerFrame;
}
public void GetSite(ref Guid riid, out object ppvSite)
{
ppvSite = _frame;
}
[ComRegisterFunction]
public static void Register(Type type)
{
if (type != typeof(PreviewHandlerController))
return;
Installer.RegisterPreviewHandler("DOT format diagram previewer", type);
}
[ComUnregisterFunction]
public static void Unregister(Type type)
{
if (type != typeof(PreviewHandlerController))
return;
Installer.UnregisterPreviewHandler(type);
}
}
}
<|start_filename|>PreviewDot/PreviewContext.cs<|end_filename|>
using System;
using System.Drawing;
using System.IO;
using System.Threading;
namespace PreviewDot
{
internal class PreviewContext
{
public event EventHandler ViewPortChanged;
public event EventHandler PreviewRequired;
public Rectangle ViewPort { get; private set; }
public bool DisplayPreview { get; private set; }
public Stream FileStream { get; private set; }
public PreviewSettings Settings { get; }
public CancellationTokenSource TokenSource { get; private set; }
public Size? DrawingSize { get; set; }
public FileDetail FileDetail { get; private set; }
public PreviewContext()
{
TokenSource = new CancellationTokenSource();
Settings = new PreviewSettings();
}
public void OnViewPortChanged(Rectangle newSize)
{
ViewPort = newSize;
ViewPortChanged?.Invoke(this, EventArgs.Empty);
}
public void OnPreviewRequired(Stream stream, FileDetail fileDetail)
{
if (stream == null)
throw new ArgumentNullException("stream");
if (fileDetail == null)
throw new ArgumentNullException("fileDetail");
if (!stream.CanRead)
throw new ArgumentException("Stream must be readable", "stream");
TokenSource.Cancel();
TokenSource = new CancellationTokenSource();
FileStream = stream;
FileDetail = fileDetail;
DisplayPreview = true;
PreviewRequired?.Invoke(this, EventArgs.Empty);
}
public Size GetPreviewSize()
{
return _IncreaseSizeForPrint(DrawingSize) ?? Size.Empty;
}
private Size? _IncreaseSizeForPrint(Size? drawingSize)
{
if (drawingSize == null)
return null;
var size = drawingSize.Value;
return new Size(size.Width * Settings.UpScaleForPrint, size.Height * Settings.UpScaleForPrint);
}
public void RecalculateDrawingSize(Size upscaledPreviewSize)
{
if (Settings.UpScaleForPrint <= 0)
throw new InvalidOperationException("Settings.UpScaleForPrint must be a positive number");
var actualSize = new Size(
upscaledPreviewSize.Width / Settings.UpScaleForPrint,
upscaledPreviewSize.Height / Settings.UpScaleForPrint);
var previousDrawingSize = DrawingSize;
DrawingSize = actualSize;
if (previousDrawingSize == null || previousDrawingSize.Value.Width == 0 || previousDrawingSize.Value.Height == 0)
return;
//work out the actual scale of the preview compared to the requested size
var scale = new SizeF(
((float)actualSize.Width / previousDrawingSize.Value.Width) * Settings.UpScaleForPrint,
((float)actualSize.Height / previousDrawingSize.Value.Height) * Settings.UpScaleForPrint);
var mostAppropriateScale = _GetMostAppropriateScale(scale);
if (mostAppropriateScale == 0)
mostAppropriateScale = 1;
//reset the drawing size to that of the preview
DrawingSize = new Size(
upscaledPreviewSize.Width / mostAppropriateScale,
upscaledPreviewSize.Height / mostAppropriateScale);
}
private int _GetMostAppropriateScale(SizeF scale)
{
var widthScale = Math.Abs(Settings.UpScaleForPrint - scale.Width);
var heightScale = Math.Abs(Settings.UpScaleForPrint - scale.Height);
if (widthScale < heightScale)
return (int)scale.Height;
return (int)scale.Width;
}
}
}
<|start_filename|>PreviewDot/Drawing.cs<|end_filename|>
using System;
using System.Drawing;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace PreviewDot
{
internal class Drawing
{
private readonly Stream _drawingContent;
private readonly FileDetail _fileDetail;
public Drawing(Stream drawingContent, FileDetail fileDetail)
{
if (drawingContent == null)
throw new ArgumentNullException("drawingContent");
if (fileDetail == null)
throw new ArgumentNullException("fileDetail");
_drawingContent = drawingContent;
_fileDetail = fileDetail;
}
public async Task<GeneratePreviewResult> GeneratePreview(IPreviewGenerator generator, Size previewSize, CancellationToken token)
{
if (generator == null)
throw new ArgumentNullException("generator");
if (previewSize.Width <= 0 || previewSize.Height <= 0)
throw new ArgumentException("Preview must have a size");
return await generator.GeneratePreview(_drawingContent, _fileDetail, previewSize, token);
}
}
}
<|start_filename|>PreviewDot/ComInterop/IOleWindow.cs<|end_filename|>
using System;
using System.Runtime.InteropServices;
namespace PreviewDot.ComInterop
{
[ComImport]
[Guid("00000114-0000-0000-C000-000000000046")]
[InterfaceType(ComInterfaceType.InterfaceIsIUnknown)]
public interface IOleWindow
{
void GetWindow(out IntPtr phwnd);
void ContextSensitiveHelp([MarshalAs(UnmanagedType.Bool)] bool fEnterMode);
}
}
<|start_filename|>PreviewDot/ErrorControl.cs<|end_filename|>
using System.Windows.Forms;
namespace PreviewDot
{
internal partial class ErrorControl : UserControl
{
public ErrorControl(string message)
{
InitializeComponent();
txtMessage.Text = message;
txtMessage.Text += "\r\n\r\nLog:\r\n" + Logging.ReadLog();
}
}
}
<|start_filename|>PreviewDot/LoadingControl.Designer.cs<|end_filename|>
namespace PreviewDot
{
partial class LoadingControl
{
/// <summary>
/// Required designer variable.
/// </summary>
private System.ComponentModel.IContainer components = null;
/// <summary>
/// Clean up any resources being used.
/// </summary>
/// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param>
protected override void Dispose(bool disposing)
{
if (disposing && (components != null))
{
components.Dispose();
}
base.Dispose(disposing);
}
#region Component Designer generated code
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
private void InitializeComponent()
{
this.picLoading = new System.Windows.Forms.PictureBox();
this.tableLayoutPanel1 = new System.Windows.Forms.TableLayoutPanel();
this.lblPreviewIo = new System.Windows.Forms.Label();
((System.ComponentModel.ISupportInitialize)(this.picLoading)).BeginInit();
this.tableLayoutPanel1.SuspendLayout();
this.SuspendLayout();
//
// picLoading
//
this.picLoading.Dock = System.Windows.Forms.DockStyle.Bottom;
this.picLoading.Image = global::PreviewDot.Properties.Resources.ajax_loader;
this.picLoading.Location = new System.Drawing.Point(0, 41);
this.picLoading.Margin = new System.Windows.Forms.Padding(0);
this.picLoading.Name = "picLoading";
this.picLoading.Size = new System.Drawing.Size(150, 34);
this.picLoading.SizeMode = System.Windows.Forms.PictureBoxSizeMode.CenterImage;
this.picLoading.TabIndex = 1;
this.picLoading.TabStop = false;
//
// tableLayoutPanel1
//
this.tableLayoutPanel1.ColumnCount = 1;
this.tableLayoutPanel1.ColumnStyles.Add(new System.Windows.Forms.ColumnStyle(System.Windows.Forms.SizeType.Percent, 50F));
this.tableLayoutPanel1.Controls.Add(this.picLoading, 0, 0);
this.tableLayoutPanel1.Controls.Add(this.lblPreviewIo, 0, 1);
this.tableLayoutPanel1.Dock = System.Windows.Forms.DockStyle.Fill;
this.tableLayoutPanel1.Location = new System.Drawing.Point(0, 0);
this.tableLayoutPanel1.Margin = new System.Windows.Forms.Padding(0);
this.tableLayoutPanel1.Name = "tableLayoutPanel1";
this.tableLayoutPanel1.RowCount = 2;
this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Percent, 50F));
this.tableLayoutPanel1.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Percent, 50F));
this.tableLayoutPanel1.Size = new System.Drawing.Size(150, 150);
this.tableLayoutPanel1.TabIndex = 2;
//
// lblPreviewIo
//
this.lblPreviewIo.Dock = System.Windows.Forms.DockStyle.Top;
this.lblPreviewIo.Location = new System.Drawing.Point(3, 75);
this.lblPreviewIo.Name = "lblPreviewIo";
this.lblPreviewIo.Size = new System.Drawing.Size(144, 23);
this.lblPreviewIo.TabIndex = 2;
this.lblPreviewIo.Text = "Preview.dot, Loading Preview...";
this.lblPreviewIo.TextAlign = System.Drawing.ContentAlignment.MiddleCenter;
//
// LoadingControl
//
this.AutoScaleDimensions = new System.Drawing.SizeF(6F, 13F);
this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font;
this.Controls.Add(this.tableLayoutPanel1);
this.Name = "LoadingControl";
((System.ComponentModel.ISupportInitialize)(this.picLoading)).EndInit();
this.tableLayoutPanel1.ResumeLayout(false);
this.ResumeLayout(false);
}
#endregion
private System.Windows.Forms.PictureBox picLoading;
private System.Windows.Forms.TableLayoutPanel tableLayoutPanel1;
private System.Windows.Forms.Label lblPreviewIo;
}
}
<|start_filename|>PreviewDot/StreamHelper.cs<|end_filename|>
using System.IO;
namespace PreviewDot
{
internal class StreamHelper
{
public Stream ExcludeBomFromStream(Stream stream)
{
var memoryStream = new MemoryStream();
stream.Seek(0, SeekOrigin.Begin);
stream.CopyTo(memoryStream);
memoryStream.Seek(0, SeekOrigin.Begin);
var buffer = new byte[3];
var bytesRead = memoryStream.Read(buffer, 0, 3);
if (bytesRead != 3 || buffer[0] != 0xef || buffer[1] != 0xbb || buffer[2] != 0xbf)
{
memoryStream.Seek(0, SeekOrigin.Begin); //there is no BOM, reset to the start so the start data isn't excluded
return memoryStream;
}
var cleanStream = new MemoryStream();
memoryStream.CopyTo(cleanStream); //we've read the BOM so it'll not be copied
cleanStream.Seek(0, SeekOrigin.Begin); //reset the new stream back to the start, no BOM in this one.
return cleanStream;
}
}
}
<|start_filename|>PreviewDot/StreamExtensions.cs<|end_filename|>
using System;
using System.IO;
using System.Runtime.InteropServices.ComTypes;
namespace PreviewDot
{
internal static class StreamExtensions
{
public static Stream ToStream(this IStream stream)
{
if (stream == null)
throw new ArgumentNullException("stream");
return new _ReadOnlyStream(stream);
}
public static MemoryStream ToMemoryStream(this Stream stream)
{
if (stream == null)
throw new ArgumentNullException("stream");
if (!stream.CanRead)
throw new ArgumentException("Stream must be readable", "stream");
var buffer = new byte[stream.Length];
stream.Read(buffer, 0, buffer.Length);
return new MemoryStream(buffer, false);
}
public static string ReadAsString(this Stream stream)
{
if (stream == null)
throw new ArgumentNullException("stream");
if (!stream.CanRead)
throw new ArgumentException("Stream must be readable", "stream");
if (!stream.CanSeek)
throw new InvalidOperationException("This operation would corrupt the stream");
stream.Position = 0;
try
{
var reader = new StreamReader(stream);
return reader.ReadToEnd();
}
finally
{
stream.Position = 0;
}
}
private class _ReadOnlyStream : Stream
{
// ReSharper disable InconsistentNaming
private const int STATFLAG_NONAME = 1;
// ReSharper restore InconsistentNaming
private IStream _stream;
public _ReadOnlyStream(IStream stream)
{
_stream = stream;
}
#region stream write members
public override void Flush()
{
throw new NotSupportedException();
}
public override void Write(byte[] buffer, int offset, int count)
{
throw new NotSupportedException();
}
public override void SetLength(long value)
{
throw new NotSupportedException();
}
public override bool CanWrite { get; } = false;
#endregion
protected override void Dispose(bool disposing)
{
_stream = null;
}
public override unsafe long Seek(long offset, SeekOrigin origin)
{
if (_stream == null)
throw new ObjectDisposedException("Stream");
long pos = 0;
var posPtr = new IntPtr(&pos);
_stream.Seek(offset, (int)origin, posPtr);
return pos;
}
public unsafe override int Read(byte[] buffer, int offset, int count)
{
if (_stream == null)
throw new ObjectDisposedException("Stream");
var bytesRead = 0;
if (count > 0)
{
var ptr = new IntPtr(&bytesRead);
if (offset == 0)
{
if (count > buffer.Length)
throw new ArgumentOutOfRangeException("count");
_stream.Read(buffer, count, ptr);
}
else
{
var tempBuffer = new byte[count];
_stream.Read(tempBuffer, count, ptr);
if (bytesRead > 0)
Array.Copy(tempBuffer, 0, buffer, offset, bytesRead);
}
}
return bytesRead;
}
public override bool CanRead => _stream != null;
public override bool CanSeek => _stream != null;
public override long Length
{
get
{
if (_stream == null)
throw new ObjectDisposedException("Stream");
STATSTG stats;
_stream.Stat(out stats, STATFLAG_NONAME);
return stats.cbSize;
}
}
public override long Position
{
get
{
return Seek(0, SeekOrigin.Current);
}
set
{
Seek(value, SeekOrigin.Begin);
}
}
}
}
}
<|start_filename|>PreviewDot/ComInterop/IPreviewHandler.cs<|end_filename|>
using System;
using System.Runtime.InteropServices;
namespace PreviewDot.ComInterop
{
[ComImport]
[InterfaceType(ComInterfaceType.InterfaceIsIUnknown)]
[Guid(Installer.PreviewHandlerClassId)]
interface IPreviewHandler
{
void SetWindow(IntPtr hwnd, ref RECT rect);
void SetRect(ref RECT rect);
void DoPreview();
void Unload();
void SetFocus();
void QueryFocus(out IntPtr phwnd);
[PreserveSig]
uint TranslateAccelerator(ref MSG pmsg);
}
}
<|start_filename|>PreviewDot.UnitTests/InstallerTests.cs<|end_filename|>
using System;
using System.Diagnostics;
using NUnit.Framework;
namespace PreviewDot.UnitTests
{
[TestFixture, Explicit]
public class InstallerTests
{
private string _assemblyPath;
private string _registerApplicationPath;
[TestFixtureSetUp]
public void SetupOnce()
{
var codeBase = new Uri(typeof(Installer).Assembly.CodeBase);
_assemblyPath = codeBase.AbsolutePath.Replace("/", "\\").Replace(".UnitTests", "");
_registerApplicationPath = "C:\\Windows\\Microsoft.NET\\Framework64\\v4.0.30319\\regasm.exe";
}
[Test]
public void ShouldInstallSuccessfully()
{
var exitCode = _ExecuteRegisterApplication("\"" + _assemblyPath + "\" /codebase");
if (exitCode != 0)
Assert.Fail("Failed to install - " + exitCode);
}
[Test]
public void ShouldUninstallSuccessfully()
{
var exitCode = _ExecuteRegisterApplication("\"" + _assemblyPath + "\" /unregister");
if (exitCode != 0)
Assert.Fail("Failed to uninstall - " + exitCode);
}
private int _ExecuteRegisterApplication(string arguments)
{
var process = new Process
{
StartInfo = new ProcessStartInfo(_registerApplicationPath, arguments)
{
UseShellExecute = false,
RedirectStandardError = true,
RedirectStandardOutput = true
},
EnableRaisingEvents = true
};
process.ErrorDataReceived += (sender, args) => Trace.TraceError(args.Data.Trim());
process.OutputDataReceived += (sender, args) => Trace.TraceInformation(args.Data.Trim());
Trace.TraceInformation("Starting: " + process.StartInfo.FileName + " " + process.StartInfo.Arguments);
process.Start();
process.BeginErrorReadLine();
process.BeginOutputReadLine();
process.WaitForExit();
return process.ExitCode;
}
}
}
<|start_filename|>PreviewDot/PreviewGeneratorFactory.cs<|end_filename|>
using System;
namespace PreviewDot
{
internal class PreviewGeneratorFactory
{
private readonly PreviewSettings _settings;
public PreviewGeneratorFactory(
PreviewSettings settings)
{
if (settings == null)
throw new ArgumentNullException("settings");
_settings = settings;
}
public IPreviewGenerator Create()
{
return new PreviewGenerator(_settings);
}
}
}
<|start_filename|>PreviewDot/PreviewGenerator.cs<|end_filename|>
using System;
using System.Diagnostics;
using System.Drawing;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace PreviewDot
{
internal class PreviewGenerator : IPreviewGenerator
{
private readonly PreviewSettings _settings;
private readonly StreamHelper _streamHelper;
public PreviewGenerator(PreviewSettings settings, StreamHelper streamHelper = null)
{
if (settings == null)
throw new ArgumentNullException(nameof(settings));
_settings = settings;
_streamHelper = streamHelper ?? new StreamHelper();
}
public async Task<GeneratePreviewResult> GeneratePreview(Stream drawingContent, FileDetail fileDetail, Size previewSize, CancellationToken token)
{
if (drawingContent == null)
throw new ArgumentNullException(nameof(drawingContent));
if (fileDetail == null)
throw new ArgumentNullException(nameof(fileDetail));
if (!drawingContent.CanRead)
throw new ArgumentException("Stream must be readable", nameof(drawingContent));
var process = new Process
{
StartInfo =
{
FileName = Path.Combine(_settings.DotApplicationPath),
RedirectStandardInput = true,
RedirectStandardOutput = true,
RedirectStandardError = true,
UseShellExecute = false,
CreateNoWindow = true,
Arguments = "-T" + _settings.RenderingFormat.ToString().ToLower()
},
EnableRaisingEvents = true
};
process.Start();
var baseOutputStream = process.StandardOutput.BaseStream;
var baseErrorStream = process.StandardError.BaseStream;
var drawingStreamSansBom = _streamHelper.ExcludeBomFromStream(drawingContent);
await drawingStreamSansBom.CopyToAsync(process.StandardInput.BaseStream);
process.StandardInput.BaseStream.Close();
var outputStream = new MemoryStream();
baseOutputStream.CopyTo(outputStream);
var errorStream = new MemoryStream();
baseErrorStream.CopyTo(errorStream);
process.WaitForExit();
if (errorStream.Length > 0)
{
errorStream.Seek(0, SeekOrigin.Begin);
var message = new StreamReader(errorStream).ReadToEnd();
return new GeneratePreviewResult(message);
}
if (process.ExitCode != 0)
return new GeneratePreviewResult("Failed to render drawing: " + process.ExitCode);
outputStream.Seek(0, SeekOrigin.Begin);
return new GeneratePreviewResult(outputStream);
}
}
}
<|start_filename|>PreviewDot/FileDetail.cs<|end_filename|>
using System;
namespace PreviewDot
{
public class FileDetail
{
public string FileName { get; }
public DateTime LastModified { get; }
public FileDetail(string fileName, DateTime lastModified)
{
if (string.IsNullOrEmpty(fileName))
throw new ArgumentNullException("fileName");
FileName = fileName;
FileName = fileName;
LastModified = lastModified;
}
public override int GetHashCode()
{
return FileName.GetHashCode() ^ LastModified.GetHashCode();
}
public override bool Equals(object obj)
{
var other = obj as FileDetail;
if (other == null)
return false;
return FileName == other.FileName
&& LastModified == other.LastModified;
}
}
}
<|start_filename|>PreviewDot/ComInterop/IPreviewHandlerFrame.cs<|end_filename|>
using System;
using System.Runtime.InteropServices;
namespace PreviewDot.ComInterop
{
[ComImport]
[InterfaceType(ComInterfaceType.InterfaceIsIUnknown)]
[Guid("fec87aaf-35f9-447a-adb7-20234491401a")]
interface IPreviewHandlerFrame
{
void GetWindowContext(IntPtr pinfo);
[PreserveSig]
uint TranslateAccelerator(ref MSG pmsg);
};
} | laingsimon/preview-dot |
<|start_filename|>src/main.cpp<|end_filename|>
#define _CRT_SECURE_NO_WARNINGS
#include <stdio.h>
#include <stdlib.h>
#include <math.h>
#include <map>
#include "lodepng.h"
#include "Eigen/Sparse"
typedef Eigen::SparseMatrix<double> SpMat;
typedef Eigen::Triplet<double> Triplet;
typedef Eigen::VectorXd Vec;
// gamma correction constant.
constexpr float GAMMA = 2.2f;
class vec3 {
private:
float x, y, z;
public:
vec3(float x, float y, float z) { this->x = x; this->y = y; this->z = z; }
vec3(float v) { this->x = v; this->y = v; this->z = v; }
vec3() { this->x = this->y = this->z = 0; }
vec3& operator+=(const vec3& b) { (*this) = (*this) + b; return (*this); }
friend vec3 operator-(const vec3& a, const vec3& b) { return vec3(a.x - b.x, a.y - b.y, a.z - b.z); }
friend vec3 operator+(const vec3& a, const vec3& b) { return vec3(a.x + b.x, a.y + b.y, a.z + b.z); }
friend vec3 operator*(const float s, const vec3& a) { return vec3(s * a.x, s * a.y, s * a.z); }
friend vec3 operator*(const vec3& a, const float s) { return s * a; }
const float& operator[] (int index)const{return ((float*)(this))[index];}
float& operator[] (int index){return ((float*)(this))[index];}
};
float clamp(float x) {
if (x > 1.0f) {
return 1.0f;
}
else if (x < 0.0f) {
return 0.0f;
}
else {
return x;
}
}
struct ImageData {
std::vector<vec3> data;
unsigned int width;
unsigned int height;
};
ImageData maskImage;
ImageData sourceImage;
ImageData targetImage;
// load image, and perform gamma correction on it.
void loadImage(const char* file, ImageData& image) {
std::vector<unsigned char> buf;
unsigned error = lodepng::decode(buf, image.width, image.height, file);
if (error) {
printf("could not open input image %s: %s\n", file, lodepng_error_text(error));
exit(1);
}
for (unsigned int i = 0; i < buf.size(); i += 4) {
vec3 v = vec3(
pow(buf[i + 0] / 255.0f, 1.0f / GAMMA),
pow(buf[i + 1] / 255.0f, 1.0f / GAMMA),
pow(buf[i + 2] / 255.0f, 1.0f / GAMMA)
);
image.data.push_back(v);
}
}
// we represent the pixel coordinates in the target image as a single 1D number,
// by flattening the (x,y) into a single index value. Every (x,y) will have its own unique 1D coordinate.
int targetFlatten(unsigned int x, unsigned int y) {
return targetImage.width * y + x;
}
unsigned int maskFlatten(unsigned int x, unsigned int y) {
return maskImage.width * y + x;
}
// check if pixel is part in mask. pixels with a red RGB value of 1.0 are part of the mask. Note that we also have a small margin, though.
bool isMaskPixel(unsigned int x, unsigned int y) {
return maskImage.data[maskFlatten(x, y)][0] > 0.99;
}
// compute image gradient.
float vpq(
float fpstar, float fqstar,
float gp, float gq) {
float fdiff = fpstar - fqstar;
float gdiff = gp - gq;
// equation (11) in the paper.
return gdiff;
// we can also mix gradients using equation (13) in the paper, as shown below.
// but I didn't find the results that compelling, so I didn't
// implement it in the final program
/*
if (fabs(fdiff) > fabs(gdiff)) {
return fdiff;
}
else {
return gdiff;
}
*/
}
const char* findToken(const char* param, int argc, char* argv[]) {
const char* token = nullptr;
for (int i = 0; i < argc; ++i) {
if (strcmp(argv[i], param) == 0) {
if (i + 1 < argc) {
token = argv[i + 1];
break;
}
}
}
if (token == nullptr) {
printf("Could not find command-line parameter %s\n", param);
return nullptr;
}
return token;
}
const char* parseStringParam(const char* param, int argc, char* argv[]) {
const char* token = findToken(param, argc, argv);
return token;
}
bool parseIntParam(const char* param, int argc, char* argv[], unsigned int& out) {
const char* token = findToken(param, argc, argv);
if (token == nullptr)
return false;
int r = sscanf(token, "%u,", &out);
if (r != 1 || r == EOF) {
return false;
}
else {
return true;
}
}
void printHelpExit() {
printf("Invalid command line arguments specified!\n\n");
printf("USAGE: poisson_blend [options]\n\n");
printf("NOTE: it is not allowed to blend an image to the exact borders of the image.\n i.e., you can't set something like mx=0, my=0\n\n");
printf("OPTIONS: \n");
printf(" -target\t\t\ttarget image\n");
printf(" -source\t\t\tsource image\n");
printf(" -output\t\t\toutput image\n");
printf(" -mask \t\t\tmask image\n");
printf(" -mx \t\t\tblending target x-position\n");
printf(" -my \t\t\tblending target y-position\n");
exit(1);
}
int main(int argc, char *argv[]) {
// this is the position into which the source image is pasted.
unsigned int mx;
unsigned int my;
//
// begin with some command line parsing.
//
const char* targetFile = parseStringParam("-target", argc, argv);
if (targetFile == nullptr) printHelpExit();
const char* sourceFile = parseStringParam("-source", argc, argv);
if (sourceFile == nullptr) printHelpExit();
const char* outputFile = parseStringParam("-output", argc, argv);
if (outputFile == nullptr) printHelpExit();
const char* maskFile = parseStringParam("-mask", argc, argv);
if (maskFile == nullptr) printHelpExit();
if (!parseIntParam("-mx", argc, argv, mx)) printHelpExit();
if (!parseIntParam("-my", argc, argv, my)) printHelpExit();
// load all three images.
loadImage(targetFile, targetImage);
loadImage(maskFile, maskImage);
loadImage(sourceFile, sourceImage);
// we sanity check mx and my.
{
unsigned int xmin = mx;
unsigned int ymin = my;
unsigned int xmax = mx + maskImage.width;
unsigned int ymax = my + maskImage.height;
if (xmin > 0 && ymin > 0 && xmax < targetImage.width-1 && ymax < targetImage.height-1) {
// sanity check passed!
}
else {
printf("The specified source image(min = (%d,%d), max = (%d, %d)) does not fit in target image(%d,%d), max = (%d, %d))\n",
xmin, ymin, xmax, ymax,
1, 1, targetImage.height-1, targetImage.height-1);
printHelpExit();
}
}
/*
Every pixel involved in the poisson blending process, will have an unknown variable associated.
The first pixel encountered in the mask is variable number 0, the second one is variable number 1, and so on.
we use a std::map to associate pixel coordinates in the mask with variable numbers.
*/
std::map<unsigned int, unsigned int> varMap;
{
int i = 0;
for (unsigned int y = 0; y < maskImage.height; ++y) {
for (unsigned int x = 0; x < maskImage.width; ++x) {
if (isMaskPixel(x, y)) {
varMap[maskFlatten(x, y)] = i;
++i;
}
}
}
}
const unsigned int numUnknowns = (unsigned int)varMap.size();
/*
The poisson blending process involves solving a linear system Mx = b for the vector x.
Below, we construct the matrix M. It is very sparse, so we only store the non-zero entries.
*/
std::vector<Triplet> mt; // M triplets. sparse matrix entries of M matrix.
{
unsigned int irow = 0;
for (unsigned int y = my; y < my + maskImage.height; ++y) {
for (unsigned int x = mx; x < mx + maskImage.width; ++x) {
if (isMaskPixel(x - mx, y - my)) {
/*
Equation numbers are from the paper "Poisson Image Editing"
http://www.cs.virginia.edu/~connelly/class/2014/comp_photo/proj2/poisson.pdf
We use the left-hand-side of equation (7) for determining the values of M
We do not allow poisson blending on the edges of the image, so we always have a value
of 4 here.
*/
mt.push_back(Triplet(irow, varMap[maskFlatten(x - mx, y - my)], 4)); // |N_p| = 4.
/*
The neighbouring pixels determine the next entries
If a neighbouring pixel is not part of the mask, we must take the boundary condition into account,
and this means that f_q ends up on the right-hand-side(because if it is a boundary condition, we already know the value of f_q, so it cannot be an unknown).
So if a neighbour is outside the mask, no entry is pushed. Instead, it will be added to b(which we explain soon).
*/
if (isMaskPixel(x - mx, y - my - 1)) {
mt.push_back(Triplet(irow, varMap[maskFlatten(x - mx, y - 1 - my)], -1));
}
if (isMaskPixel(x - mx + 1, y - my)) {
mt.push_back(Triplet(irow, varMap[maskFlatten(x - mx + 1, y - my)], -1));
}
if (isMaskPixel(x - mx, y - my + 1)) {
mt.push_back(Triplet(irow, varMap[maskFlatten(x - mx, y - my + 1)], -1));
}
if (isMaskPixel(x - mx - 1, y - my)) {
mt.push_back(Triplet(irow, varMap[maskFlatten(x - mx - 1, y - my)], -1));
}
++irow; // jump to the next row in the matrix.
}
}
}
}
// we will use M to solve for x three times in a row.
// we found that a simple Cholesky decomposition gave us good, and fast results.
Eigen::SimplicialCholesky<SpMat> solver;
{
SpMat mat(numUnknowns, numUnknowns);
mat.setFromTriplets(mt.begin(), mt.end());
solver.compute(mat);
}
Vec solutionChannels[3];
Vec b(numUnknowns);
for (unsigned int ic = 0; ic < 3; ++ic)
{
/*
For each of the three color channels RGB, there will be a different b vector.
So to perform poisson blending on the entire image, we must solve for x three times in a row, one time for each channel.
*/
unsigned int irow = 0;
for (unsigned int y = my; y < my + maskImage.height; ++y) {
for (unsigned int x = mx; x < mx + maskImage.width; ++x) {
if (isMaskPixel(x - mx, y - my)) {
// we only ended up using v in the end.
vec3 v = sourceImage.data[maskFlatten(x - mx, y - my)];
vec3 u = targetImage.data[targetFlatten(x, y)];
/*
The right-hand side of (7) determines the value of b.
below, we sum up all the values of v_pq(the gradient) for all neighbours.
*/
float grad =
vpq(
u[ic], targetImage.data[targetFlatten(x, y - 1)][ic], // unused
v[ic], sourceImage.data[maskFlatten(x - mx, y - 1 - my)][ic]) // used
+
vpq(
u[ic], targetImage.data[targetFlatten(x - 1, y)][ic], // unused
v[ic], sourceImage.data[maskFlatten(x - 1 - mx, y - my)][ic]) // used
+
vpq(
u[ic], targetImage.data[targetFlatten(x, y + 1)][ic], // unused
v[ic], sourceImage.data[maskFlatten(x - mx, y + 1 - my)][ic] // used
)
+
vpq(
u[ic], targetImage.data[targetFlatten(x + 1, y)][ic], // unused
v[ic], sourceImage.data[maskFlatten(x + 1 - mx, y - my)][ic]); // used
b[irow] = grad;
/*
Finally, due to the boundary condition, some values of f_q end up on the right-hand-side, because they are not unknown.
The ones outside the mask end up here.
*/
if (!isMaskPixel(x - mx, y - my - 1)) {
b[irow] += targetImage.data[targetFlatten(x, y - 1)][ic];
}
if (!isMaskPixel(x - mx + 1, y - my)) {
b[irow] += targetImage.data[targetFlatten(x + 1, y)][ic];
}
if (!isMaskPixel(x - mx, y - my + 1)) {
b[irow] += targetImage.data[targetFlatten(x, y + 1)][ic];
}
if (!isMaskPixel(x - mx - 1, y - my)) {
b[irow] += targetImage.data[targetFlatten(x - 1, y)][ic];
}
++irow;
}
}
}
// solve for channel number ic.
solutionChannels[ic] = solver.solve(b);
}
// finally, we output the poisson blended image.
{
std::vector<unsigned char> outImage;
// first, output the original image to outImage
for (unsigned int i = 0; i < targetImage.data.size(); ++i) {
vec3 v = targetImage.data[i];
outImage.push_back((unsigned char)(pow(v[0], GAMMA) * 255.0f));
outImage.push_back((unsigned char)(pow(v[1], GAMMA) * 255.0f));
outImage.push_back((unsigned char)(pow(v[2], GAMMA) * 255.0f));
outImage.push_back(255);
}
// now modify outImage, to include the poisson blended pixels.
for (unsigned int y = my; y < my + maskImage.height; ++y) {
for (unsigned int x = mx; x < mx + maskImage.width; ++x) {
if (isMaskPixel(x - mx, y - my)) {
unsigned int i = varMap[maskFlatten(x - mx, y - my)];
vec3 col = vec3((float)solutionChannels[0][i], (float)solutionChannels[1][i], (float)solutionChannels[2][i]);
col[0] = clamp(col[0]);
col[1] = clamp(col[1]);
col[2] = clamp(col[2]);
outImage[4 * targetFlatten(x, y) + 0] = (unsigned char)(pow(col[0], GAMMA) * 255.0f);
outImage[4 * targetFlatten(x, y) + 1] = (unsigned char)(pow(col[1], GAMMA) * 255.0f);
outImage[4 * targetFlatten(x, y) + 2] = (unsigned char)(pow(col[2], GAMMA) * 255.0f);
}
}
}
lodepng::encode(outputFile, outImage, targetImage.width, targetImage.height);
}
}
| yijiuzai/poisson_blend |
<|start_filename|>app/src/main/java/com/example/astrobin/ui/components/Example.kt<|end_filename|>
package com.example.astrobin.ui.components
import androidx.compose.foundation.lazy.LazyColumn
import androidx.compose.runtime.Composable
import androidx.paging.compose.LazyPagingItems
@Composable
fun Example(data: LazyPagingItems<Any>) {
LazyColumn {
// items(data) {
// }
}
}
<|start_filename|>app/src/main/java/com/example/astrobin/api/AstroImage.kt<|end_filename|>
package com.example.astrobin.api
import com.squareup.moshi.Json
data class AstroImage(
@field:Json(name="id") val id: Int,
@field:Json(name="hash") val hash: String?,
@field:Json(name="title") val title: String?,
@field:Json(name="user") val user: String, // username
// dates
@field:Json(name="published") val published: String,
@field:Json(name="updated") val updated: String,
@field:Json(name="uploaded") val uploaded: String,
// astrometry
@field:Json(name="is_solved") val is_solved: Boolean,
@field:Json(name="solution_status") val solution_status: String,
@field:Json(name="ra") val ra: String?, // float
@field:Json(name="dec") val dec: String?, // Float
@field:Json(name="pixscale") val pixscale: String?, // Float
@field:Json(name="radius") val radius: String?, // float
@field:Json(name="orientation") val orientation: String?, // Float
@field:Json(name="w") val w: Int,
@field:Json(name="h") val h: Int,
// images
@field:Json(name="url_advanced_solution") val url_advanced_solution: String?,
@field:Json(name="url_duckduckgo") val url_duckduckgo: String,
@field:Json(name="url_duckduckgo_small") val url_duckduckgo_small: String,
@field:Json(name="url_gallery") val url_gallery: String,
@field:Json(name="url_hd") val url_hd: String,
@field:Json(name="url_histogram") val url_histogram: String,
@field:Json(name="url_real") val url_real: String,
@field:Json(name="url_regular") val url_regular: String,
@field:Json(name="url_skyplot") val url_skyplot: String?,
@field:Json(name="url_solution") val url_solution: String?,
@field:Json(name="url_thumb") val url_thumb: String,
// statistics
@field:Json(name="comments") val comments: Int,
@field:Json(name="likes") val likes: Int,
@field:Json(name="views") val views: Int,
// technical card
@field:Json(name="imaging_cameras") val imaging_cameras: List<String>,
@field:Json(name="imaging_telescopes") val imaging_telescopes: List<String>,
@field:Json(name="data_source") val data_source: String,
@field:Json(name="locations") val locations: List<String>,
@field:Json(name="remote_source") val remote_source: String?,
@field:Json(name="subjects") val subjects: List<String>,
// other
@field:Json(name="animated") val animated: Boolean,
@field:Json(name="bookmarks") val bookmarks: Int,
@field:Json(name="is_final") val is_final: Boolean,
@field:Json(name="license") val license: Int,
@field:Json(name="license_name") val license_name: String,
@field:Json(name="link") val link: String?,
@field:Json(name="link_to_fits") val link_to_fits: String?,
@field:Json(name="resource_uri") val resource_uri: String,
@field:Json(name="revisions") val revisions: List<String>,
) {
val aspectRatio: Float get() = w.toFloat() / h.toFloat()
}
data class AstroImageV2(
@field:Json(name="pk") val pk: Int,
@field:Json(name="user") val user: Int,
@field:Json(name="hash") val hash: String,
@field:Json(name="title") val title: String,
@field:Json(name="imageFile") val imageFile: String?,
@field:Json(name="isWip") val isWip: Boolean,
@field:Json(name="skipNotifications") val skipNotifications: Boolean,
@field:Json(name="w") val w: Int,
@field:Json(name="h") val h: Int,
@field:Json(name="imagingTelescopes") val imagingTelescopes: List<AstroProduct>,
@field:Json(name="imagingCameras") val imagingCameras: List<AstroProduct>,
@field:Json(name="guidingTelescopes") val guidingTelescopes: List<AstroProduct>,
@field:Json(name="guidingCameras") val guidingCameras: List<AstroProduct>,
@field:Json(name="focalReducers") val focalReducers: List<AstroProduct>,
@field:Json(name="mounts") val mounts: List<AstroProduct>,
@field:Json(name="accessories") val accessories: List<AstroProduct>,
@field:Json(name="software") val software: List<AstroProduct>,
// @field:Json(name="imagingTelescopes2") val imagingTelescopes2: List<TelescopeInterface2>,
// @field:Json(name="imagingCameras2") val imagingCameras2: List<CameraInterface2>,
// @field:Json(name="guidingTelescopes2") val guidingTelescopes2: List<TelescopeInterface2>,
// @field:Json(name="guidingCameras2") val guidingCameras2: List<CameraInterface2>,
// @field:Json(name="mounts2") val mounts2: List<MountInterface2>,
// @field:Json(name="filters2") val filters2: List<FilterInterface2>,
// @field:Json(name="accessories2") val accessories2: List<AccessoryInterface2>,
// @field:Json(name="software2") val software2: List<SoftwareInterface2>,
@field:Json(name="published") val published: String,
@field:Json(name="license") val license: String,
@field:Json(name="description") val description: String?,
@field:Json(name="descriptionBbcode") val descriptionBbcode: String?,
@field:Json(name="link") val link: String?,
@field:Json(name="linkToFits") val linkToFits: String?,
@field:Json(name="acquisitionType") val acquisitionType: String,
@field:Json(name="subjectType") val subjectType: String,
@field:Json(name="solarSystemMainSubject") val solarSystemMainSubject: String?,
@field:Json(name="dataSource") val dataSource: String,
@field:Json(name="remoteSource") val remoteSource: String?,
@field:Json(name="partOfGroupSet") val partOfGroupSet: List<Int>,
@field:Json(name="mouseHoverImage") val mouseHoverImage: String,
@field:Json(name="allowComments") val allowComments: Boolean,
@field:Json(name="squareCropping") val squareCropping: String,
@field:Json(name="watermark") val watermark: Boolean,
@field:Json(name="watermarkText") val watermarkText: String?,
@field:Json(name="watermarkPosition") val watermarkPosition: String,
@field:Json(name="watermarkSize") val watermarkSize: String,
@field:Json(name="watermarkOpacity") val watermarkOpacity: Float,
@field:Json(name="sharpenThumbnails") val sharpenThumbnails: Boolean,
@field:Json(name="keyValueTags") val keyValueTags: String,
@field:Json(name="locations") val locations: List<Int>,
// TODO: we should make sure to honor this if it isn't enforced by backend
@field:Json(name="fullSizeDisplayLimitation") val fullSizeDisplayLimitation: String,
// TODO: we should make sure to honor this if it isn't enforced by backend
@field:Json(name="downloadLimitation") val downloadLimitation: String,
@field:Json(name="thumbnails") val thumbnails: List<AstroThumbnail>,
// Ephemeral form fields
// showGuidingEquipment?: boolean;
) {
private fun urlFor(alias: String) = thumbnails.single { it.alias == alias }.url
val url_story: String get() = urlFor("story")
val url_regular: String get() = urlFor("regular")
val url_hd: String get() = urlFor("hd")
val url_qhd: String get() = urlFor("qhd")
val url_histogram: String get() = imageUrl(hash,"histogram")
val aspectRatio: Float get() = w.toFloat() / h.toFloat()
val bookmarksCount: Int
get() = 6 // TODO: ask about including this in the API
val likesCount: Int
get() = 126 // TODO: ask about including this in the API
}
data class AstroImageRevision(
val id: Int,
)
data class TopPick(
@field:Json(name="date") val date: String,
@field:Json(name="image") val image: String,
@field:Json(name="resource_uri") val resource_uri: String
) {
val hash: String get() = image.substringAfterLast('/')
val url_regular: String get() = imageUrl(hash, "regular")
val url_thumb: String get() = imageUrl(hash, "thumb")
val url_real: String get() = imageUrl(hash, "real")
val url_gallery: String get() = imageUrl(hash, "gallery")
val url_hd: String get() = imageUrl(hash, "hd")
}
data class AstroProduct(
@field:Json(name="pk") val pk: Int,
@field:Json(name="make") val make: String?,
@field:Json(name="name") val name: String,
)
data class ThumbnailGroup(
@field:Json(name="image") val image: Int,
@field:Json(name="pk") val pk: Int,
@field:Json(name="revision") val revision: String,
@field:Json(name="real") val real: String,
@field:Json(name="hd") val hd: String,
@field:Json(name="regular") val regular: String,
@field:Json(name="gallery") val gallery: String,
@field:Json(name="thumb") val thumb: String,
)
data class AstroThumbnail(
@field:Json(name="id") val id: Int,
@field:Json(name="revision") val revision: String,
@field:Json(name="alias") val alias: String,
@field:Json(name="url") val url: String,
)
data class PlateSolve(
@field:Json(name="id") val id: Int,
@field:Json(name="status") val status: Int,
@field:Json(name="submission_id") val submission_id: Int,
@field:Json(name="object_id") val object_id: String,
@field:Json(name="image_file") val image_file: String,
@field:Json(name="skyplot_zoom1") val skyplot_zoom1: String,
@field:Json(name="objects_in_field") val objects_in_field: String,
@field:Json(name="ra") val ra: String,
@field:Json(name="dec") val dec: String,
@field:Json(name="pixscale") val pixscale: String,
@field:Json(name="orientation") val orientation: String,
@field:Json(name="radius") val radius: String,
// @field:Json(name="annotations") val annotations: String,
// @field:Json(name="pixinsight_serial_number") val pixinsight_serial_number: String?,
// @field:Json(name="pixinsight_svg_annotation_hd") val pixinsight_svg_annotation_hd: String?,
// @field:Json(name="pixinsight_svg_annotation_regular") val pixinsight_svg_annotation_regular: String,
// @field:Json(name="advanced_ra") val advanced_ra: String?,
// @field:Json(name="advanced_ra_top_left") val advanced_ra_top_left: String?,
// @field:Json(name="advanced_ra_top_right") val advanced_ra_top_right: String?,
// @field:Json(name="advanced_ra_bottom_left") val advanced_ra_bottom_left: String?,
// @field:Json(name="advanced_ra_bottom_right") val advanced_ra_bottom_right: String?,
// @field:Json(name="advanced_dec") val advanced_dec: String?,
// @field:Json(name="advanced_dec_top_left") val advanced_dec_top_left: String?,
// @field:Json(name="advanced_dec_top_right") val advanced_dec_top_right: String?,
// @field:Json(name="advanced_dec_bottom_left") val advanced_dec_bottom_left: String?,
// @field:Json(name="advanced_dec_bottom_right") val advanced_dec_bottom_right: String?,
// @field:Json(name="advanced_pixscale") val advanced_pixscale: String?,
// @field:Json(name="advanced_orientation") val advanced_orientation: String?,
// @field:Json(name="advanced_flipped") val advanced_flipped: Boolean?,
// @field:Json(name="advanced_wcs_transformation") val advanced_wcs_transformation: String?,
// @field:Json(name="advanced_matrix_rect") val advanced_matrix_rect: String?,
// @field:Json(name="advanced_matrix_delta") val advanced_matrix_delta: String?,
// @field:Json(name="advanced_ra_matrix") val advanced_ra_matrix: String?,
// @field:Json(name="advanced_dec_matrix") val advanced_dec_matrix: String?,
// @field:Json(name="advanced_annotations") val advanced_annotations: String?,
// @field:Json(name="advanced_annotations_regular") val advanced_annotations_regular: String?,
@field:Json(name="settings") val settings: Int,
@field:Json(name="content_type") val content_type: Int,
@field:Json(name="advanced_settings") val advanced_settings: Int?,
)
private fun imageUrl(hash: String, type: String) = "https://www.astrobin.com/$hash/0/rawthumb/$type/"
<|start_filename|>app/src/main/java/com/example/astrobin/ui/components/UserViews.kt<|end_filename|>
package com.example.astrobin.ui.components
import androidx.compose.foundation.Image
import androidx.compose.foundation.background
import androidx.compose.foundation.border
import androidx.compose.foundation.clickable
import androidx.compose.foundation.layout.*
import androidx.compose.foundation.shape.CircleShape
import androidx.compose.material.MaterialTheme
import androidx.compose.material.Text
import androidx.compose.material.icons.Icons
import androidx.compose.material.icons.filled.PeopleAlt
import androidx.compose.material.icons.filled.ThumbUp
import androidx.compose.material.icons.outlined.Visibility
import androidx.compose.runtime.Composable
import androidx.compose.ui.Alignment
import androidx.compose.ui.Modifier
import androidx.compose.ui.draw.clip
import androidx.compose.ui.graphics.Color
import androidx.compose.ui.unit.dp
import androidx.navigation.NavController
import coil.compose.rememberImagePainter
import com.example.astrobin.api.AstroUser
import com.example.astrobin.api.AstroUserProfile
import com.example.astrobin.api.avatarUrl
import com.example.astrobin.ui.screens.IconCount
import com.example.astrobin.ui.theme.Yellow
@Composable
fun UserRow(user: AstroUserProfile, nav: NavController) {
Row(
modifier = Modifier
.clickable {
nav.navigate("user/${user.id}")
},
verticalAlignment = Alignment.CenterVertically
) {
AstroAvatar(user.url_avatar)
Column(Modifier.padding(start=10.dp)) {
Text(user.display_name, style= MaterialTheme.typography.subtitle1)
Row {
IconCount(
user.followers_count,
Icons.Filled.PeopleAlt,
)
IconCount(
user.received_likes_count,
Icons.Filled.ThumbUp,
)
}
}
}
}
@Composable
fun SmallUserRow(
user: String,
likeCount: Int,
views: Int,
modifier: Modifier = Modifier,
) {
Row(modifier, verticalAlignment = Alignment.CenterVertically) {
AstroAvatar(avatarUrl(user))
Column(Modifier.padding(start=10.dp)) {
Text("@$user", style= MaterialTheme.typography.subtitle1)
Row {
IconCount(
views,
Icons.Outlined.Visibility,
)
IconCount(
likeCount,
Icons.Filled.ThumbUp,
)
}
}
}
}
@Composable fun AstroAvatar(
imageUrl: String
) {
Image(
painter = rememberImagePainter(imageUrl),
contentDescription = "",
modifier = Modifier
.border(2.dp, Yellow, CircleShape)
.padding(2.dp)
.clip(CircleShape)
.background(Color.Black)
.size(34.dp)
)
}
@Composable fun AstroAvatar(
user: AstroUserProfile
) {
AstroAvatar(imageUrl = user.url_avatar)
}
<|start_filename|>app/src/main/java/com/example/astrobin/AstrobinApp.kt<|end_filename|>
package com.example.astrobin
import android.app.Application
import android.graphics.Color
import android.view.Window
import androidx.core.view.WindowCompat
import dagger.hilt.android.HiltAndroidApp
@HiltAndroidApp
class AstrobinApp : Application()
fun Window.makeTransparentStatusBar() {
WindowCompat.setDecorFitsSystemWindows(this, false)
statusBarColor = Color.TRANSPARENT
navigationBarColor = Color.BLACK
}
fun Window.markAttributes(bits: Int, value: Boolean) {
val params = attributes
if (value) {
params.flags = params.flags or bits
} else {
params.flags = params.flags and bits.inv()
}
attributes = params
}
<|start_filename|>app/src/main/java/com/example/astrobin/ui/components/SearchBox.kt<|end_filename|>
package com.example.astrobin.ui.components
import androidx.compose.foundation.background
import androidx.compose.foundation.layout.Box
import androidx.compose.foundation.layout.fillMaxWidth
import androidx.compose.foundation.layout.padding
import androidx.compose.foundation.layout.size
import androidx.compose.foundation.shape.RoundedCornerShape
import androidx.compose.foundation.text.BasicTextField
import androidx.compose.foundation.text.KeyboardActions
import androidx.compose.foundation.text.KeyboardOptions
import androidx.compose.material.Icon
import androidx.compose.material.icons.Icons
import androidx.compose.material.icons.filled.Search
import androidx.compose.runtime.Composable
import androidx.compose.ui.Alignment
import androidx.compose.ui.Modifier
import androidx.compose.ui.graphics.Color
import androidx.compose.ui.graphics.SolidColor
import androidx.compose.ui.text.TextStyle
import androidx.compose.ui.unit.dp
import androidx.compose.ui.unit.sp
import com.example.astrobin.ui.theme.DarkBlue
@Composable
fun SearchBox(
value: String,
onValueChange: (String) -> Unit,
modifier: Modifier = Modifier,
keyboardOptions: KeyboardOptions = KeyboardOptions.Default,
keyboardActions: KeyboardActions = KeyboardActions.Default,
) {
BasicTextField(
value = value,
onValueChange = onValueChange,
modifier = modifier,
keyboardOptions = keyboardOptions,
keyboardActions = keyboardActions,
maxLines = 1,
textStyle = TextStyle(color = Color.White, fontSize = 16.sp),
cursorBrush = SolidColor(Color.White),
decorationBox = { textField ->
Box(
Modifier
.background(DarkBlue, RoundedCornerShape(24.dp))
.padding(18.dp)
) {
Icon(
imageVector = Icons.Filled.Search,
contentDescription = "Search",
tint = Color.White,
modifier = Modifier
.align(Alignment.CenterStart)
.size(24.dp)
)
Box(
Modifier
.align(Alignment.CenterStart)
.padding(start = 36.dp)
.fillMaxWidth()
) {
textField()
}
}
}
)
}
<|start_filename|>app/src/main/java/com/example/astrobin/api/Api.kt<|end_filename|>
package com.example.astrobin.api
import androidx.compose.runtime.staticCompositionLocalOf
import retrofit2.http.*
val LocalAstrobinApi = staticCompositionLocalOf<Astrobin> {
error("No Astrobin API was provided, but it should have been")
}
class Astrobin(
private val auth: AuthenticationInterceptor,
private val api: AstrobinApi,
) {
class NotFoundException : Exception()
private fun NotFound(): Nothing { throw NotFoundException() }
fun isLoggedIn(): Boolean = auth.isLoggedIn()
fun logout(): Unit = auth.clear()
suspend fun login(username: String, password: String) = auth.setCredentials(username, password)
suspend fun register(
username: String,
email: String,
password: String
): Boolean {
// TODO: an api doesn't exist for this yet
return false
}
suspend fun currentUser(): AstroUserProfile? = if (isLoggedIn()) api.currentUser().singleOrNull() else null
suspend fun image(hash: String): AstroImageV2 {
val pages = api.image(hash)
if (pages.results.isEmpty()) NotFound()
return pages.results.single()
}
suspend fun image(id: Int): AstroImageV2 = api.image(id)
suspend fun imageRevision(image: Int): Paginated<AstroImageRevision> = api.imageRevision(image)
suspend fun thumbnailGroup(imageId: Int): ThumbnailGroup = api.thumbnailGroup(imageId)
suspend fun comments(
contentType: Int,
objectId: Int,
): List<AstroComment> {
return AstroComment
.collect(api.comments(contentType, objectId))
.deepFlattenInto(mutableListOf()) { it.children }
}
suspend fun createComment(
comment: AstroComment,
): AstroComment = api.createComment(comment)
suspend fun plateSolve(
contentType: Int,
objectId: Int
): PlateSolve? = api.plateSolve(contentType, objectId).singleOrNull()
suspend fun plateSolves(
contentType: Int,
objectIds: List<Int>,
): List<PlateSolve> = api.plateSolves(contentType, objectIds.joinToString(","))
suspend fun user(
id: Int
): AstroUser = api.user(id)
suspend fun userProfile(id: Int): AstroUserProfile = api.userProfile(id)
suspend fun userProfile(username: String): ListResponse<AstroUserProfile> = api.userProfile(username)
suspend fun imageOld(hash: String): AstroImage = api.imageOld(hash)
suspend fun imageSearch(
limit: Int,
offset: Int,
params: Map<String, String>
): ListResponse<AstroImage> = api.imageSearch(limit, offset, params)
suspend fun topPicks(
limit: Int,
offset: Int,
): ListResponse<TopPick> = api.topPicks(limit, offset)
suspend fun topPickNominations(
limit: Int,
offset: Int,
): ListResponse<TopPick> = api.topPickNominations(limit, offset)
suspend fun imageOfTheDay(
limit: Int,
offset: Int,
): ListResponse<TopPick> = api.imageOfTheDay(limit, offset)
}
interface AstrobinApi {
@GET("api/v2/common/userprofiles/current/")
suspend fun currentUser(): List<AstroUserProfile>
@GET("api/v2/images/image/")
suspend fun image(@Query("hash") hash: String): Paginated<AstroImageV2>
@GET("api/v2/images/image/{id}/")
suspend fun image(@Path("id") id: Int): AstroImageV2
@GET("api/v2/images/image-revision/")
suspend fun imageRevision(@Query("image") image: Int): Paginated<AstroImageRevision>
@GET("api/v2/images/thumbnail-group/")
suspend fun thumbnailGroup(@Query("image") imageId: Int): ThumbnailGroup
@GET("api/v2/nestedcomments/nestedcomments/")
suspend fun comments(
@Query("content_type") contentType: Int,
@Query("object_id") objectId: Int,
): List<AstroComment>
@POST("api/v2/nestedcomments/")
suspend fun createComment(
@Body comment: AstroComment,
): AstroComment
@GET("api/v2/platesolving/solutions/")
suspend fun plateSolve(
@Query("content_type") contentType: Int,
@Query("object_id") objectId: Int
): List<PlateSolve>
@GET("api/v2/platesolving/solutions/")
suspend fun plateSolves(
@Query("content_type") contentType: Int,
@Query("object_ids") objectIds: String
): List<PlateSolve>
@GET("api/v2/common/users/{id}/")
suspend fun user(
@Path("id") id: Int
): AstroUser
@GET("api/v1/userprofile/{id}/")
suspend fun userProfile(@Path("id") id: Int): AstroUserProfile
@GET("api/v1/userprofile/")
suspend fun userProfile(@Query("username") username: String): ListResponse<AstroUserProfile>
@GET("api/v1/image/{hash}/")
suspend fun imageOld(@Path("hash") hash: String): AstroImage
@GET("api/v1/image/")
suspend fun imageSearch(
@Query("limit") limit: Int,
@Query("offset") offset: Int,
@QueryMap params: Map<String, String>
): ListResponse<AstroImage>
@GET("api/v1/toppick/")
suspend fun topPicks(
@Query("limit") limit: Int,
@Query("offset") offset: Int,
): ListResponse<TopPick>
@GET("api/v1/toppicknominations/")
suspend fun topPickNominations(
@Query("limit") limit: Int,
@Query("offset") offset: Int,
): ListResponse<TopPick>
@GET("api/v1/imageoftheday/")
suspend fun imageOfTheDay(
@Query("limit") limit: Int,
@Query("offset") offset: Int,
): ListResponse<TopPick>
}
<|start_filename|>app/src/main/java/com/example/astrobin/ui/screens/UserScreen.kt<|end_filename|>
package com.example.astrobin.ui.screens
import androidx.compose.foundation.Image
import androidx.compose.foundation.border
import androidx.compose.foundation.layout.*
import androidx.compose.foundation.lazy.LazyColumn
import androidx.compose.foundation.shape.CircleShape
import androidx.compose.material.Icon
import androidx.compose.material.Text
import androidx.compose.material.icons.Icons
import androidx.compose.material.icons.filled.PeopleAlt
import androidx.compose.material.icons.filled.ThumbUp
import androidx.compose.runtime.Composable
import androidx.compose.runtime.produceState
import androidx.compose.runtime.remember
import androidx.compose.ui.Alignment
import androidx.compose.ui.Modifier
import androidx.compose.ui.draw.clip
import androidx.compose.ui.graphics.Color
import androidx.compose.ui.layout.ContentScale
import androidx.compose.ui.unit.dp
import androidx.compose.ui.unit.sp
import androidx.navigation.NavController
import androidx.paging.LoadState
import androidx.paging.Pager
import androidx.paging.PagingConfig
import androidx.paging.compose.collectAsLazyPagingItems
import androidx.paging.compose.items
import coil.compose.rememberImagePainter
import com.example.astrobin.api.AstroUserProfile
import com.example.astrobin.api.ImageSearchPagingSource
import com.example.astrobin.api.LocalAstrobinApi
import com.example.astrobin.ui.components.LoadingIndicator
import com.example.astrobin.ui.components.UserImageRow
import com.google.accompanist.insets.statusBarsPadding
@Composable
fun UserScreen(
id: Int,
padding: PaddingValues,
nav: NavController
) {
val api = LocalAstrobinApi.current
val user = produceState<AstroUserProfile?>(null) {
value = api.userProfile(id)
}.value
if (user == null) {
LoadingIndicator(Modifier.fillMaxWidth().statusBarsPadding())
} else {
val pager = remember(user.username) {
Pager(PagingConfig(pageSize = 20)) {
ImageSearchPagingSource(api, mapOf("user" to user.username))
}
}
val userImages = pager.flow.collectAsLazyPagingItems()
val loadState = userImages.loadState
LazyColumn(
modifier = Modifier.fillMaxSize(),
contentPadding = padding,
) {
item { Spacer(Modifier.statusBarsPadding()) }
item {
UserHeaderContent(user, nav)
Spacer(modifier = Modifier.height(16.dp))
}
items(userImages) {
UserImageRow(it!!, nav)
Spacer(modifier = Modifier.height(8.dp))
}
when {
loadState.refresh is LoadState.Loading -> {
item { LoadingIndicator(Modifier.fillMaxWidth()) }
}
loadState.append is LoadState.Loading -> {
item { LoadingIndicator(Modifier.fillMaxWidth()) }
}
loadState.refresh is LoadState.Error -> {
val e = loadState.refresh as LoadState.Error
item {
// TODO: retry?
Text(
text = e.error.localizedMessage!!,
modifier = Modifier.fillParentMaxSize(),
)
}
}
loadState.append is LoadState.Error -> {
val e = loadState.append as LoadState.Error
item {
// TODO: retry?
Text(e.error.localizedMessage!!)
}
}
}
}
}
}
@Composable
private fun UserHeaderContent(user: AstroUserProfile, nav: NavController) {
Column(
modifier = Modifier.fillMaxWidth()
) {
Spacer(modifier = Modifier.height(16.dp))
Image(
painter = rememberImagePainter(user.url_avatar),
contentDescription = "avatar",
contentScale = ContentScale.Crop,
modifier = Modifier
.align(Alignment.CenterHorizontally)
.size(128.dp)
.clip(CircleShape)
.border(2.dp, Color.Black, CircleShape)
)
Text(
text = user.display_name,
modifier = Modifier.align(Alignment.CenterHorizontally),
fontSize = 32.sp
)
Text(
text = "@${user.username}",
modifier = Modifier.align(Alignment.CenterHorizontally),
)
Row(
modifier = Modifier.align(Alignment.CenterHorizontally),
) {
Icon(
imageVector = Icons.Filled.PeopleAlt,
contentDescription = "like icon",
modifier = Modifier
.size(24.dp)
.padding(2.dp)
)
Text("${user.received_likes_count}")
Spacer(modifier = Modifier.width(8.dp))
Icon(
imageVector = Icons.Filled.ThumbUp,
contentDescription = "followers icon",
modifier = Modifier
.size(24.dp)
.padding(2.dp)
)
Text("${user.followers_count}")
}
if (user.about != null) {
Text("${user.about}")
}
}
}
<|start_filename|>app/src/main/java/com/example/astrobin/MainActivity.kt<|end_filename|>
package com.example.astrobin
import android.graphics.Color
import android.os.Bundle
import android.view.View
import android.view.Window
import android.view.WindowManager
import androidx.activity.ComponentActivity
import androidx.activity.compose.setContent
import androidx.core.view.WindowCompat
import coil.ImageLoader
import com.example.astrobin.api.Astrobin
import com.example.astrobin.api.AstrobinApi
import dagger.hilt.android.AndroidEntryPoint
import javax.inject.Inject
@AndroidEntryPoint
class MainActivity : ComponentActivity() {
@Inject
lateinit var api: Astrobin
@Inject
lateinit var imageLoader: ImageLoader
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
window.makeTransparentStatusBar()
setContent { Astrobin(api, imageLoader) }
}
}
<|start_filename|>app/src/main/java/com/example/astrobin/ui/theme/Color.kt<|end_filename|>
package com.example.astrobin.ui.theme
import androidx.compose.ui.graphics.Color
val Purple200 = Color(0xFFBB86FC)
val Purple500 = Color(0xFF6200EE)
val Purple700 = Color(0xFF3700B3)
val Teal200 = Color(0xFF03DAC5)
val Yellow = Color(0xFFF8BC04)
val DarkBlue = Color(0xFF051427)
val Maroon = Color(0xFF530F1E)
val Orange = Color(0xFFA44322)
<|start_filename|>app/src/main/java/com/example/astrobin/api/AstrobinComponent.kt<|end_filename|>
package com.example.astrobin.api
import android.app.Application
import android.content.Context
import com.squareup.moshi.Moshi
import com.squareup.moshi.kotlin.reflect.KotlinJsonAdapterFactory
import dagger.Module
import dagger.Provides
import dagger.hilt.InstallIn
import dagger.hilt.components.SingletonComponent
import okhttp3.Cache
import okhttp3.OkHttpClient
import okhttp3.logging.HttpLoggingInterceptor
import retrofit2.Retrofit
import retrofit2.converter.moshi.MoshiConverterFactory
import java.io.File
@Module
@InstallIn(SingletonComponent::class)
object AstrobinComponent {
@Provides
fun authenticationInterceptor(
application: Application,
authApi: AstrobinAuthApi
): AuthenticationInterceptor = AuthenticationInterceptor(
authApi,
application.getSharedPreferences("com.example.astrobin", Context.MODE_PRIVATE)
)
@Provides
fun baseOkHttpClient(
application: Application,
authenticationInterceptor: AuthenticationInterceptor
) = OkHttpClient
.Builder()
.addInterceptor {
it.proceed(
it
.request()
.newBuilder()
.header("Accept","application/json")
.build()
)
}
.addInterceptor(authenticationInterceptor)
.addInterceptor(HttpLoggingInterceptor().apply { level = HttpLoggingInterceptor.Level.BODY })
.cache(
Cache(
directory = File(application.cacheDir, "http_cache"),
maxSize = 50L * 1024L * 1024L // 50 MiB
)
)
.build()
@Provides
fun retrofit(baseOkHttpClient: OkHttpClient) = Retrofit
.Builder()
.baseUrl("https://www.astrobin.com/")
.client(baseOkHttpClient)
.addConverterFactory(
MoshiConverterFactory.create(
Moshi.Builder()
.addLast(KotlinJsonAdapterFactory())
.build()
)
)
.build()
@Provides
fun astrobinApi(retrofit: Retrofit): AstrobinApi {
return retrofit
.create(AstrobinApi::class.java)
}
@Provides
fun authApi(): AstrobinAuthApi {
return Retrofit
.Builder()
.baseUrl("https://www.astrobin.com/")
.client(OkHttpClient
.Builder()
.addInterceptor(HttpLoggingInterceptor().apply { level = HttpLoggingInterceptor.Level.BODY })
.build()
)
.addConverterFactory(
MoshiConverterFactory.create(
Moshi.Builder()
.addLast(KotlinJsonAdapterFactory())
.build()
)
)
.build()
.create(AstrobinAuthApi::class.java)
}
@Provides
fun astrobin(
authenticationInterceptor: AuthenticationInterceptor,
astrobinApi: AstrobinApi,
) = Astrobin(authenticationInterceptor, astrobinApi)
}
<|start_filename|>app/src/main/java/com/example/astrobin/exp/LoadableFlow.kt<|end_filename|>
package com.example.astrobin.exp
import android.annotation.SuppressLint
import androidx.compose.runtime.*
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.channels.ProducerScope
import kotlinx.coroutines.currentCoroutineContext
import kotlinx.coroutines.flow.*
import kotlinx.coroutines.withContext
import kotlin.coroutines.CoroutineContext
import kotlin.coroutines.EmptyCoroutineContext
@SuppressLint("StateFlowValueCalledInComposition")
@Composable
fun <R> StateFlow<R>.collectAsState(
context: CoroutineContext = EmptyCoroutineContext
): State<R> = produceState(this.value, this, context) {
if (context == EmptyCoroutineContext) {
collect { value = it }
} else withContext(context) {
collect { value = it }
}
}
@Composable
fun <T> loadFlow(
initial: T,
build: suspend ProducerScope<(T) -> T>.() -> Unit
): Loadable<T> {
val scope = rememberCoroutineScope()
val flow = remember {
val start = Loadable(initial, true)
val last: (T) -> T = { it }
channelFlow {
build()
send(last)
}
.runningFold(start) { acc, it ->
acc.copy(value = it(acc.value), it !== last)
}
.stateIn(scope, SharingStarted.Eagerly, start)
}
return flow.collectAsState().value
}
<|start_filename|>app/src/main/java/com/example/astrobin/ui/theme/Theme.kt<|end_filename|>
package com.example.astrobin.ui.theme
import androidx.compose.material.MaterialTheme
import androidx.compose.material.darkColors
import androidx.compose.runtime.Composable
import androidx.compose.ui.graphics.Color
private val DarkColorPalette = darkColors(
primary = DarkBlue,
primaryVariant = Color.Black,
onPrimary = Yellow,
secondary = Maroon,
secondaryVariant = Maroon,
onSecondary = Color.White,
background = Orange,
onBackground = Color.White,
surface = Color(0xFF121212),
onSurface = Color.White,
error = Color(0xFFCF6679),
onError = Color.Black
)
@Composable
fun AstrobinTheme(content: @Composable () -> Unit) {
MaterialTheme(
colors = DarkColorPalette,
typography = Typography,
shapes = Shapes,
content = content
)
}
<|start_filename|>app/src/main/java/com/example/astrobin/ui/components/Foreground.kt<|end_filename|>
package com.example.astrobin.ui.components
import androidx.compose.ui.Modifier
import androidx.compose.ui.draw.DrawModifier
import androidx.compose.ui.geometry.Size
import androidx.compose.ui.graphics.*
import androidx.compose.ui.graphics.drawscope.ContentDrawScope
import androidx.compose.ui.platform.InspectorInfo
import androidx.compose.ui.platform.InspectorValueInfo
import androidx.compose.ui.platform.debugInspectorInfo
import androidx.compose.ui.unit.LayoutDirection
fun Modifier.foreground(
brush: Brush,
shape: Shape = RectangleShape,
/*@FloatRange(from = 0.0, to = 1.0)*/
alpha: Float = 1.0f
) = this.then(
Foreground(
brush = brush,
alpha = alpha,
shape = shape,
inspectorInfo = debugInspectorInfo {
name = "foreground"
properties["alpha"] = alpha
properties["brush"] = brush
properties["shape"] = shape
}
)
)
private class Foreground constructor(
private val color: Color? = null,
private val brush: Brush? = null,
private val alpha: Float = 1.0f,
private val shape: Shape,
inspectorInfo: InspectorInfo.() -> Unit
) : DrawModifier, InspectorValueInfo(inspectorInfo) {
// naive cache outline calculation if size is the same
private var lastSize: Size? = null
private var lastLayoutDirection: LayoutDirection? = null
private var lastOutline: Outline? = null
override fun ContentDrawScope.draw() {
drawContent()
if (shape === RectangleShape) {
// shortcut to avoid Outline calculation and allocation
drawRect()
} else {
drawOutline()
}
}
private fun ContentDrawScope.drawRect() {
color?.let { drawRect(color = it) }
brush?.let { drawRect(brush = it, alpha = alpha) }
}
private fun ContentDrawScope.drawOutline() {
val outline =
if (size == lastSize && layoutDirection == lastLayoutDirection) {
lastOutline!!
} else {
shape.createOutline(size, layoutDirection, this)
}
color?.let { drawOutline(outline, color = color) }
brush?.let { drawOutline(outline, brush = brush, alpha = alpha) }
lastOutline = outline
lastSize = size
}
override fun hashCode(): Int {
var result = color?.hashCode() ?: 0
result = 31 * result + (brush?.hashCode() ?: 0)
result = 31 * result + alpha.hashCode()
result = 31 * result + shape.hashCode()
return result
}
override fun equals(other: Any?): Boolean {
val otherModifier = other as? Foreground ?: return false
return color == otherModifier.color &&
brush == otherModifier.brush &&
alpha == otherModifier.alpha &&
shape == otherModifier.shape
}
override fun toString(): String =
"Background(color=$color, brush=$brush, alpha = $alpha, shape=$shape)"
}
<|start_filename|>app/src/main/java/com/example/astrobin/api/Pagination.kt<|end_filename|>
package com.example.astrobin.api
import com.squareup.moshi.Json
data class AstroResultsMeta(
@field:Json(name="limit") val limit: Int,
@field:Json(name="next") val next: String?,
@field:Json(name="offset") val offset: Int,
@field:Json(name="previous") val previous: String?,
@field:Json(name="total_count") val total_count: Int,
)
data class ListResponse<T>(
@field:Json(name="meta") val meta: AstroResultsMeta,
@field:Json(name="objects") val objects: List<T>,
)
data class Paginated<T>(
@field:Json(name="count") val count: Int,
@field:Json(name="next") val next: String?,
@field:Json(name="prev") val prev: String?,
@field:Json(name="results") val results: List<T>,
)
<|start_filename|>app/src/main/java/com/example/astrobin/ui/components/LoadingIndicator.kt<|end_filename|>
package com.example.astrobin.ui.components
import androidx.compose.foundation.layout.Box
import androidx.compose.material.CircularProgressIndicator
import androidx.compose.material.LinearProgressIndicator
import androidx.compose.runtime.Composable
import androidx.compose.ui.Alignment
import androidx.compose.ui.Modifier
import androidx.compose.ui.graphics.Color
import com.example.astrobin.ui.theme.Orange
@Composable
fun LoadingIndicator(modifier: Modifier) {
Box(
modifier = modifier
) {
CircularProgressIndicator(
color = Color.White,
modifier = Modifier.align(Alignment.Center)
)
}
}
@Composable
fun LoadingBar(modifier: Modifier) {
LinearProgressIndicator(
color = Orange,
modifier = modifier
)
}
<|start_filename|>app/src/main/java/com/example/astrobin/api/Paging.kt<|end_filename|>
package com.example.astrobin.api
import androidx.paging.PagingSource
import androidx.paging.PagingState
abstract class AstroPagingSource<T : Any> : PagingSource<Int, T>() {
abstract suspend fun load(limit: Int, offset: Int): ListResponse<T>
override suspend fun load(params: LoadParams<Int>): LoadResult<Int, T> {
return try {
val offset = params.key ?: 0
val response = load(limit = params.loadSize, offset = offset)
val total = response.meta.total_count
val prevOffset = offset - params.loadSize
val nextOffset = offset + params.loadSize
LoadResult.Page(
data = response.objects,
prevKey = if (prevOffset < 0) null else prevOffset,
nextKey = if (nextOffset > total) null else nextOffset
)
} catch (e: Exception) {
LoadResult.Error(e)
}
}
override fun getRefreshKey(state: PagingState<Int, T>): Int? {
return null
}
}
class TopPickPagingSource(
private val api: Astrobin
) : AstroPagingSource<TopPick>() {
override suspend fun load(limit: Int, offset: Int): ListResponse<TopPick> {
return api.topPicks(limit, offset)
}
}
class ImageOfTheDayPagingSource(
private val api: Astrobin
) : AstroPagingSource<TopPick>() {
override suspend fun load(limit: Int, offset: Int): ListResponse<TopPick> {
return api.imageOfTheDay(limit, offset)
}
}
class ImageSearchPagingSource(
private val api: Astrobin,
val params: Map<String, String>
) : AstroPagingSource<AstroImage>() {
override suspend fun load(limit: Int, offset: Int): ListResponse<AstroImage> {
return api.imageSearch(limit, offset, params)
}
}
<|start_filename|>app/src/main/java/com/example/astrobin/api/Authentication.kt<|end_filename|>
package com.example.astrobin.api
import android.content.SharedPreferences
import android.util.Log
import androidx.core.content.edit
import com.example.astrobin.BuildConfig
import com.squareup.moshi.Json
import okhttp3.Interceptor
import okhttp3.Response
import retrofit2.http.Field
import retrofit2.http.FormUrlEncoded
import retrofit2.http.POST
private const val TOKEN_KEY: String = "astrobin.api.access_token"
interface AstrobinAuthApi {
@FormUrlEncoded
@POST("api/v2/api-auth-token/")
suspend fun authToken(
@Field("username") username: String,
@Field("password") password: String
): ApiToken
}
data class ApiToken(
@field:Json(name="token") val token: String
)
class AuthenticationInterceptor(
private val auth: AstrobinAuthApi,
private val sharedPrefs: SharedPreferences
) : Interceptor {
suspend fun setCredentials(username: String, password: String): Boolean {
clear()
try {
val token = auth.authToken(username, password)
sharedPrefs.edit {
putString(TOKEN_KEY, token.token)
}
} catch (e: Exception) {
return false
}
return true
}
fun clear() {
sharedPrefs.edit {
remove(TOKEN_KEY)
}
}
fun isLoggedIn(): Boolean = token() != null
private fun token(): String? {
return sharedPrefs.getString(TOKEN_KEY, null)
}
override fun intercept(chain: Interceptor.Chain): Response {
val request = chain.request()
val url = request.url
if (url.encodedPath.startsWith("/api/v2/")) {
Log.d("LELAND", "path started with apiv2!")
val token = token()
if (token != null) {
Log.d("LELAND", "token was non null!")
return chain.proceed(
request
.newBuilder()
.header("Authorization", "Token $token")
.build()
)
}
// authorization header will be added by the authenticator
return chain.proceed(request)
} else if (url.encodedPath.startsWith("/api/v1/")) {
// even if we are authenticated, we want to add the api key/secret to the url here
val requestBuilder = request.newBuilder()
val urlBuilder = url.newBuilder()
if (url.queryParameter("api_key") == null) {
urlBuilder.addQueryParameter("api_key", BuildConfig.ASTROBIN_API_KEY)
}
if (url.queryParameter("api_secret") == null) {
urlBuilder.addQueryParameter("api_secret", BuildConfig.ASTROBIN_API_SECRET)
}
return chain.proceed(
requestBuilder
.url(urlBuilder.build())
.build()
)
} else {
return chain.proceed(request)
}
}
}
<|start_filename|>app/src/main/java/com/example/astrobin/ui/theme/Type.kt<|end_filename|>
package com.example.astrobin.ui.theme
import androidx.compose.material.Typography
import androidx.compose.ui.text.TextStyle
import androidx.compose.ui.text.font.Font
import androidx.compose.ui.text.font.FontFamily
import androidx.compose.ui.text.font.FontWeight
import androidx.compose.ui.unit.sp
import com.example.astrobin.R
val nunito = FontFamily(
Font(R.font.nunito_regular),
)
val lato = FontFamily(
Font(R.font.lato_regular),
)
// Set of Material typography styles to start with
val Typography = Typography(
defaultFontFamily = nunito,
h1 = TextStyle(
fontWeight = FontWeight.ExtraBold,
fontSize = 24.sp,
lineHeight = 34.sp,
letterSpacing = (-0.5).sp
),
h2 = TextStyle(
fontWeight = FontWeight.Bold,
fontSize = 36.sp,
letterSpacing = (-0.5).sp
),
h3 = TextStyle(
fontWeight = FontWeight.Bold,
fontSize = 24.sp,
letterSpacing = 0.sp
),
// h4 = TextStyle(
// fontWeight = FontWeight.Normal,
// fontSize = 34.sp,
// letterSpacing = 0.25.sp
// ),
// h5 = TextStyle(
// fontWeight = FontWeight.Normal,
// fontSize = 24.sp,
// letterSpacing = 0.sp
// ),
subtitle1 = TextStyle(
fontFamily = lato,
fontWeight = FontWeight.Bold,
fontSize = 12.sp,
lineHeight = 17.sp,
letterSpacing = 0.15.sp
),
subtitle2 = TextStyle(
fontFamily = lato,
fontWeight = FontWeight.Normal,
fontSize = 11.sp,
lineHeight = 16.sp,
letterSpacing = 0.15.sp
),
body1 = TextStyle(
fontWeight = FontWeight.Normal,
fontSize = 16.sp,
letterSpacing = 0.5.sp
),
caption = TextStyle(
fontWeight = FontWeight.ExtraBold,
fontSize = 12.sp,
letterSpacing = 0.5.sp
),
)
<|start_filename|>app/src/main/java/com/example/astrobin/api/ImageLoader.kt<|end_filename|>
package com.example.astrobin.api
import android.app.Application
import coil.ImageLoader
import coil.util.CoilUtils
import dagger.Module
import dagger.Provides
import dagger.hilt.InstallIn
import dagger.hilt.components.SingletonComponent
import okhttp3.OkHttpClient
@Module
@InstallIn(SingletonComponent::class)
object ImageLoaderModule {
@Provides
fun imageLoader(
application: Application,
okHttpClient: OkHttpClient
) = ImageLoader.Builder(application)
.okHttpClient(
okHttpClient.newBuilder()
.cache(CoilUtils.createDefaultCache(application))
.build()
)
.build()
}
<|start_filename|>app/src/main/java/com/example/astrobin/exp/Loadable.kt<|end_filename|>
package com.example.astrobin.exp
import androidx.compose.runtime.*
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.coroutineScope
import kotlin.coroutines.CoroutineContext
data class Loadable<T>(
val value: T,
val loading: Boolean,
)
class LoadableScope<T>(
@PublishedApi
internal val state: MutableState<Loadable<T>>,
private val scope: CoroutineScope
) : CoroutineScope {
inline fun push(fn: (T) -> T) {
state.value = state.value.let { Loadable(fn(it.value), it.loading) }
}
override val coroutineContext: CoroutineContext
get() = scope.coroutineContext
}
@Composable
fun <T> load(initial: T, build: suspend LoadableScope<T>.() -> Unit): Loadable<T> {
val result = remember { mutableStateOf(Loadable(initial, true)) }
LaunchedEffect(Unit) {
coroutineScope {
LoadableScope(result, this).build()
}
result.value = Loadable(result.value.value, false)
}
return result.value
}
<|start_filename|>app/src/main/java/com/example/astrobin/ui/Routes.kt<|end_filename|>
package com.example.astrobin.ui
object Routes {
val Top = "top"
val Latest = "latest"
val Search = "search"
val Profile = "profile"
fun User(id: Int) = "user/$id"
fun Image(hash: String) = "image/$hash"
}
<|start_filename|>app/src/main/java/com/example/astrobin/ui/screens/SearchScreen.kt<|end_filename|>
package com.example.astrobin.ui.screens
import androidx.compose.foundation.Image
import androidx.compose.foundation.border
import androidx.compose.foundation.clickable
import androidx.compose.foundation.layout.*
import androidx.compose.foundation.lazy.LazyColumn
import androidx.compose.foundation.text.KeyboardActions
import androidx.compose.foundation.text.KeyboardOptions
import androidx.compose.material.MaterialTheme
import androidx.compose.material.Text
import androidx.compose.material.TextField
import androidx.compose.runtime.*
import androidx.compose.ui.Alignment
import androidx.compose.ui.ExperimentalComposeUiApi
import androidx.compose.ui.Modifier
import androidx.compose.ui.graphics.Color
import androidx.compose.ui.layout.ContentScale
import androidx.compose.ui.platform.LocalSoftwareKeyboardController
import androidx.compose.ui.text.input.ImeAction
import androidx.compose.ui.text.style.TextOverflow
import androidx.compose.ui.unit.dp
import androidx.navigation.NavBackStackEntry
import androidx.navigation.NavController
import androidx.paging.LoadState
import androidx.paging.Pager
import androidx.paging.PagingConfig
import androidx.paging.compose.collectAsLazyPagingItems
import androidx.paging.compose.items
import coil.compose.rememberImagePainter
import com.example.astrobin.api.AstroImage
import com.example.astrobin.api.ImageSearchPagingSource
import com.example.astrobin.api.LocalAstrobinApi
import com.example.astrobin.ui.components.ImageRow
import com.example.astrobin.ui.components.LoadingIndicator
import com.example.astrobin.ui.components.SearchBox
import com.google.accompanist.insets.statusBarsPadding
@OptIn(ExperimentalComposeUiApi::class)
@Composable
fun SearchScreen(
nav: NavController,
entry: NavBackStackEntry,
padding: PaddingValues,
) {
var searchQueryText by remember { mutableStateOf(entry.arguments?.getString("q") ?: "") }
val api = LocalAstrobinApi.current
var pagingSource by remember { mutableStateOf<ImageSearchPagingSource?>(null) }
val pager = remember {
Pager(PagingConfig(pageSize = 20)) {
ImageSearchPagingSource(
api = api,
params = if (searchQueryText.isBlank()) {
emptyMap()
} else {
// Really want description__icontains but its broken on the API
mapOf("title__icontains" to searchQueryText)
}
).also { pagingSource = it }
}
}
val keyboardController = LocalSoftwareKeyboardController.current
val results = pager.flow.collectAsLazyPagingItems()
val loadState = results.loadState
LazyColumn(
Modifier.fillMaxSize(),
contentPadding = padding
) {
item {
Spacer(Modifier.statusBarsPadding().height(48.dp))
SearchBox(
modifier = Modifier.padding(horizontal = 16.dp),
value = searchQueryText,
onValueChange = {
searchQueryText = it
},
keyboardOptions = KeyboardOptions(imeAction = ImeAction.Search),
keyboardActions = KeyboardActions {
pagingSource?.invalidate()
keyboardController?.hide()
}
)
Spacer(Modifier.height(16.dp))
}
items(results) {
ImageRow(it!!, nav)
Spacer(Modifier.height(8.dp))
}
when {
loadState.refresh is LoadState.Loading -> {
item { LoadingIndicator(Modifier.fillParentMaxSize()) }
}
loadState.append is LoadState.Loading -> {
item { LoadingIndicator(Modifier.fillMaxWidth()) }
}
loadState.refresh is LoadState.Error -> {
val e = loadState.refresh as LoadState.Error
item {
// TODO: retry?
Text(
text = e.error.localizedMessage!!,
modifier = Modifier.fillParentMaxSize(),
)
}
}
loadState.append is LoadState.Error -> {
val e = loadState.append as LoadState.Error
item {
// TODO: retry?
Text(e.error.localizedMessage!!)
}
}
}
}
}
<|start_filename|>app/src/main/java/com/example/astrobin/api/AstroComment.kt<|end_filename|>
package com.example.astrobin.api
import com.squareup.moshi.Json
data class AstroComment(
@field:Json(name="id") val id: Int,
@field:Json(name="content_type") val content_type: Int,
@field:Json(name="object_id") val object_id: Int,
@field:Json(name="author") val author: Int,
@field:Json(name="author_avatar") val author_avatar: String,
@field:Json(name="text") val text: String,
@field:Json(name="html") val html: String,
@field:Json(name="created") val created: String,
@field:Json(name="updated") val updated: String,
@field:Json(name="parent") val parent: Int?,
@field:Json(name="deleted") val deleted: Boolean,
@field:Json(name="pending_moderation") val pending_moderation: Boolean?,
@field:Json(name="moderator") val moderator: Int?,
@field:Json(name="likes") val likes: List<Int>?,
@field:Json(name="depth") val depth: Int,
) {
private val mutableChildren = mutableListOf<AstroComment>()
val children: List<AstroComment> get() = mutableChildren
companion object {
fun collect(comments: List<AstroComment>): List<AstroComment> {
if (comments.isEmpty()) return comments
val commentById = comments.associateBy { it.id }
val result = mutableListOf<AstroComment>()
// !assumes comments are sorted by creation date
for (comment in comments) {
if (comment.parent == null) {
result.add(comment)
} else {
commentById[comment.parent]!!.mutableChildren.add(comment)
}
}
return result
}
}
}
fun <T> List<T>.deepFlattenInto(target: MutableList<T>, fn: (T) -> List<T>): List<T> {
forEach {
target.add(it)
fn(it).deepFlattenInto(target, fn)
}
return target
}
<|start_filename|>app/src/main/java/com/example/astrobin/api/AstroUser.kt<|end_filename|>
package com.example.astrobin.api
import com.squareup.moshi.Json
data class AstroUser(
@field:Json(name="id") val id: Int,
@field:Json(name="userprofile") val userprofile: Int?,
@field:Json(name="username") val username: String,
@field:Json(name="first_name") val first_name: String,
@field:Json(name="avatar") val avatar: String,
@field:Json(name="last_login") val last_login: String,
@field:Json(name="date_joined") val date_joined: String,
@field:Json(name="is_superuser") val is_superuser: Boolean,
@field:Json(name="is_staff") val is_staff: Boolean,
@field:Json(name="is_active") val is_active: Boolean,
// @field:Json(name="groups") val groups: AuthGroupInterface[],
// @field:Json(name="userPermissions") val userPermissions: PermissionInterface[],
) {
val displayName: String
get() = username // TODO: ask salvatore about adding full name or display name
}
data class AstroUserProfile(
@field:Json(name="id") val id: Int,
@field:Json(name="username") val username: String,
@field:Json(name="real_name") val real_name: String?,
// stats
@field:Json(name="followers_count") val followers_count: Int,
@field:Json(name="following_count") val following_count: Int,
@field:Json(name="post_count") val post_count: Int,
@field:Json(name="received_likes_count") val received_likes_count: Int,
@field:Json(name="image_count") val image_count: Int,
// bio
@field:Json(name="about") val about: String?,
@field:Json(name="hobbies") val hobbies: String?,
@field:Json(name="website") val website: String?,
@field:Json(name="job") val job: String?,
@field:Json(name="date_joined") val date_joined: String,
@field:Json(name="language") val language: String,
@field:Json(name="avatar") val avatar: String?,
@field:Json(name="resource_uri") val resource_uri: String,
) {
val display_name: String get() = real_name ?: "@$username"
val url_avatar: String get() {
// since we are faking some avatar data temporarily, i am special casing my avatar for demo purposes
if (id == 93620) return "https://cdn.astrobin.com/images/avatars/2/7/93669/resized/194/65616d9d63bbe81142157196d34396f4.png"
return avatar ?: avatarUrl(username)
}
}
fun avatarUrl(username: String): String {
return "https://i.pravatar.cc/300?u=$username"
}
<|start_filename|>app/src/main/java/com/example/astrobin/ui/screens/TopScreen.kt<|end_filename|>
package com.example.astrobin.ui.screens
import androidx.compose.foundation.Image
import androidx.compose.foundation.border
import androidx.compose.foundation.clickable
import androidx.compose.foundation.layout.*
import androidx.compose.foundation.lazy.LazyColumn
import androidx.compose.material.CircularProgressIndicator
import androidx.compose.material.Text
import androidx.compose.runtime.Composable
import androidx.compose.ui.Modifier
import androidx.navigation.NavController
import com.example.astrobin.api.LocalAstrobinApi
import com.example.astrobin.api.TopPick
import androidx.compose.material.MaterialTheme
import androidx.compose.runtime.remember
import androidx.compose.ui.Alignment
import androidx.compose.ui.graphics.Color
import androidx.compose.ui.layout.ContentScale
import androidx.compose.ui.unit.dp
import androidx.paging.LoadState
import androidx.paging.Pager
import androidx.paging.PagingConfig
import coil.compose.rememberImagePainter
import com.google.accompanist.insets.statusBarsPadding
import com.example.astrobin.api.*
import androidx.paging.compose.collectAsLazyPagingItems
import androidx.paging.compose.items
import com.example.astrobin.ui.components.LoadingIndicator
import com.example.astrobin.ui.components.TopPickRow
@Composable
fun TopScreen(
padding: PaddingValues,
nav: NavController
) {
val api = LocalAstrobinApi.current
val pager = remember { Pager(PagingConfig(pageSize = 20)) { TopPickPagingSource(api) } }
val topPicks = pager.flow.collectAsLazyPagingItems()
val loadState = topPicks.loadState
LazyColumn(Modifier.fillMaxSize(), contentPadding = padding) {
item { Spacer(Modifier.statusBarsPadding()) }
item {
Text(
"Top Picks",
style = MaterialTheme.typography.h1,
modifier = Modifier.padding(start = 16.dp, bottom = 16.dp)
)
}
items(topPicks) {
TopPickRow(it!!, nav)
Spacer(Modifier.height(8.dp))
}
when {
loadState.refresh is LoadState.Loading -> {
item { LoadingIndicator(Modifier.fillParentMaxSize()) }
}
loadState.append is LoadState.Loading -> {
item { LoadingIndicator(Modifier.fillMaxWidth()) }
}
loadState.refresh is LoadState.Error -> {
val e = loadState.refresh as LoadState.Error
item {
// TODO: retry?
Text(
text = e.error.localizedMessage!!,
modifier = Modifier.fillParentMaxSize(),
)
}
}
loadState.append is LoadState.Error -> {
val e = loadState.append as LoadState.Error
item {
// TODO: retry?
Text(e.error.localizedMessage!!)
}
}
}
}
}
<|start_filename|>app/src/main/java/com/example/astrobin/ui/screens/FullScreen.kt<|end_filename|>
package com.example.astrobin.ui.screens
import androidx.compose.foundation.Image
import androidx.compose.foundation.background
import androidx.compose.foundation.gestures.awaitFirstDown
import androidx.compose.foundation.gestures.calculatePan
import androidx.compose.foundation.gestures.calculateZoom
import androidx.compose.foundation.gestures.forEachGesture
import androidx.compose.foundation.layout.*
import androidx.compose.material.icons.Icons
import androidx.compose.material.icons.outlined.Layers
import androidx.compose.runtime.*
import androidx.compose.ui.Alignment
import androidx.compose.ui.Modifier
import androidx.compose.ui.graphics.Color
import androidx.compose.ui.graphics.graphicsLayer
import androidx.compose.ui.input.pointer.pointerInput
import androidx.compose.ui.unit.dp
import androidx.navigation.NavController
import coil.compose.rememberImagePainter
import com.example.astrobin.ui.components.*
@Composable
fun FullScreen(
hd: String,
solution: String?,
w: Int,
h: Int,
padding: PaddingValues,
nav: NavController
) {
var annotations by remember { mutableStateOf(false) }
var scale by remember { mutableStateOf(1f) }
var offsetX by remember { mutableStateOf(0f) }
var offsetY by remember { mutableStateOf(0f) }
val aspectRatio = w.toFloat() / h.toFloat()
val zoomModifier = Modifier.graphicsLayer {
scaleX = scale
scaleY = scale
translationX = offsetX
translationY = offsetY
}
Box(
Modifier
.fillMaxSize()
.pointerInput(Unit) {
forEachGesture {
awaitPointerEventScope {
awaitFirstDown()
do {
val event = awaitPointerEvent()
scale *= event.calculateZoom()
val offset = event.calculatePan()
offsetX += offset.x
offsetY += offset.y
} while (event.changes.any { it.pressed })
}
}
}
.background(Color.Black)
) {
val regularPainter = rememberImagePainter(hd)
Image(
modifier = Modifier
.fillMaxWidth()
.aspectRatio(aspectRatio)
.align(Alignment.Center)
.then(zoomModifier),
painter = regularPainter,
contentDescription = "Full Image",
)
if (solution != null) {
val annotatedPainter = rememberImagePainter(solution)
if (annotations) {
Image(
modifier = Modifier
.fillMaxWidth()
.aspectRatio(aspectRatio)
.align(Alignment.Center)
.then(zoomModifier),
painter = annotatedPainter,
contentDescription = "Full Image",
)
}
AstroButton(
icon = Icons.Outlined.Layers,
selected = annotations,
onClick = { annotations = !annotations },
modifier = Modifier
.align(Alignment.BottomStart)
.padding(24.dp)
,
)
}
}
}
<|start_filename|>app/src/main/java/com/example/astrobin/api/ApiTest.kt<|end_filename|>
package com.example.astrobin.api
import androidx.compose.foundation.layout.Column
import androidx.compose.foundation.layout.padding
import androidx.compose.material.Text
import androidx.compose.runtime.*
import androidx.compose.ui.Modifier
import androidx.compose.ui.unit.dp
import com.example.astrobin.exp.load
import com.example.astrobin.exp.loadFlow
import kotlinx.coroutines.*
import kotlinx.coroutines.channels.ProducerScope
import kotlinx.coroutines.flow.*
import kotlin.coroutines.CoroutineContext
@Composable
fun ApiTest() {
ImageDetail2(imageHash = "r259z7")
// val api = LocalAstrobinApi.current
// LaunchedEffect(Unit) {
// val userId = 24520 // Min Xie
// val userProfileId = 93620 // leland
// val username = "Lrichardson"
// val imageHash = "r259z7"
// val imageId = 681332
// val imageContentType = 19
//// api.login("Lrichardson", "<redacted>")
// val a = api.user(userId)
// val b = api.userProfile(username)
// val c = api.userProfile(userProfileId)
// val d = api.image(imageId)
// val e = api.image(imageHash)
// val f = api.imageOld(imageHash)
// val g = api.plateSolve(imageContentType, imageId)
// val h = api.comments(imageContentType, imageId)
// }
}
data class ImageModel(
val image: AstroImageV2? = null,
val author: AstroUser? = null,
val plateSolve: PlateSolve? = null,
val comments: List<AstroComment>? = null,
) {
companion object {
val Empty = ImageModel()
}
}
@Composable fun ImageDetail(imageHash: String) {
val api = LocalAstrobinApi.current
val (model, loading) = load(ImageModel.Empty) {
val image = api.image(imageHash)
push { it.copy(image = image) }
launch {
val author = api.user(image.user) // -> userprofile?
push { it.copy(author = author) }
}
launch {
val plateSolve = api.plateSolve(19, image.pk)
push { it.copy(plateSolve = plateSolve) }
}
launch {
val comments = api.comments(19, image.pk)
push { it.copy(comments = comments) }
}
}
ImageDetail(loading, model.image, model.author, model.plateSolve, model.comments)
}
@Composable fun ImageDetail2(imageHash: String) {
val api = LocalAstrobinApi.current
val (model, loading) = loadFlow(ImageModel.Empty) {
val image = api.image(imageHash)
send { it.copy(image = image) }
launch {
val author = api.user(image.user) // -> userprofile?
send { it.copy(author = author) }
}
launch {
val plateSolve = api.plateSolve(19, image.pk)
send { it.copy(plateSolve = plateSolve) }
}
launch {
val comments = api.comments(19, image.pk)
send { it.copy(comments = comments) }
}
}
ImageDetail(loading, model.image, model.author, model.plateSolve, model.comments)
}
@Composable fun ImageDetail(
loading: Boolean,
image: AstroImageV2?,
author: AstroUser?,
plateSolve: PlateSolve?,
comments: List<AstroComment>?,
) {
Column(Modifier.padding(top=50.dp)) {
Text("Loading?: $loading")
Text("Image?: ${image?.pk}")
Text("Author?: ${author?.username}")
Text("PlateSolve?: ${plateSolve?.dec}")
Text("Comments?: ${comments?.size}")
}
}
<|start_filename|>app/src/main/java/com/example/astrobin/AstrobinUi.kt<|end_filename|>
package com.example.astrobin
import androidx.compose.foundation.background
import androidx.compose.foundation.layout.*
import androidx.compose.foundation.shape.CircleShape
import androidx.compose.foundation.shape.RoundedCornerShape
import androidx.compose.material.*
import androidx.compose.material.icons.Icons
import androidx.compose.material.icons.filled.*
import androidx.compose.runtime.Composable
import androidx.compose.runtime.CompositionLocalProvider
import androidx.compose.runtime.getValue
import androidx.compose.ui.Alignment
import androidx.compose.ui.Modifier
import androidx.compose.ui.draw.clip
import androidx.compose.ui.geometry.Offset
import androidx.compose.ui.graphics.Brush
import androidx.compose.ui.graphics.Color
import androidx.compose.ui.graphics.vector.ImageVector
import androidx.compose.ui.layout.SubcomposeLayout
import androidx.compose.ui.unit.LayoutDirection
import androidx.compose.ui.unit.dp
import androidx.navigation.*
import androidx.navigation.NavDestination.Companion.hierarchy
import androidx.navigation.NavGraph.Companion.findStartDestination
import androidx.navigation.compose.NavHost
import androidx.navigation.compose.composable
import androidx.navigation.compose.currentBackStackEntryAsState
import androidx.navigation.compose.rememberNavController
import coil.ImageLoader
import coil.compose.LocalImageLoader
import com.example.astrobin.api.AstrobinApi
import com.example.astrobin.api.LocalAstrobinApi
import com.example.astrobin.api.ApiTest
import com.example.astrobin.api.Astrobin
import com.example.astrobin.ui.Routes
import com.example.astrobin.ui.screens.*
import com.example.astrobin.ui.theme.AstrobinTheme
import com.example.astrobin.ui.theme.DarkBlue
import com.example.astrobin.ui.theme.Yellow
import com.google.accompanist.insets.ProvideWindowInsets
import com.google.accompanist.insets.navigationBarsPadding
import com.google.accompanist.insets.statusBarsHeight
import com.google.accompanist.insets.statusBarsPadding
@Composable fun Astrobin(api: Astrobin, imageLoader: ImageLoader) {
CompositionLocalProvider(
LocalAstrobinApi provides api,
LocalImageLoader provides imageLoader
) {
val nav = rememberNavController()
val current by nav.currentBackStackEntryAsState()
AstrobinTheme {
AstroAppWindow(
Modifier
.fillMaxSize()
.navigationBarsPadding(),
top = {
if (current != null && nav.previousBackStackEntry != null) {
IconButton(
modifier = Modifier.statusBarsPadding().padding(8.dp),
onClick = { nav.popBackStack() }
) {
Icon(Icons.Filled.ArrowBack, contentDescription = "Back", tint = Color.White)
}
}
},
bottom = {
if (current?.destination?.route?.startsWith("fullscreen") != true) {
AstrobinBottomNav(nav)
}
},
) { padding ->
NavHost(nav, startDestination = "home") {
composable("home") {
// UserScreen(93620, padding, nav)
// ImageScreen("r259z7", padding, nav)
// ApiTest()
TopScreen(padding = padding, nav = nav)
// SearchScreen(nav = nav, entry = it, padding = padding)
}
composable("profile") {
// NOTE: hardcoded to leland's user id for demo purposes
UserScreen(93620, padding, nav)
}
composable(
"user/{id}",
listOf(navArgument("id") { type = NavType.IntType })
) {
UserScreen(it.arguments?.getInt("id")!!, padding, nav)
}
composable(
"image/{hash}",
listOf(navArgument("hash") { type = NavType.StringType })
) {
ImageScreen(it.arguments!!.getString("hash")!!, padding, nav)
}
composable(
"search?q={q}",
listOf(navArgument("q") { type = NavType.StringType })
) {
SearchScreen(nav = nav, entry = it, padding)
}
composable(
"fullscreen?hd={hd}&solution={solution}&w={w}&h={h}",
listOf(
navArgument("hd") { type = NavType.StringType },
navArgument("solution") { type = NavType.StringType; nullable = true },
navArgument("w") { type = NavType.IntType },
navArgument("h") { type = NavType.IntType },
)
) {
FullScreen(
it.arguments!!.getString("hd")!!,
it.arguments?.getString("solution"),
it.arguments!!.getInt("w"),
it.arguments!!.getInt("h"),
padding,
nav
)
}
composable(Routes.Top) {
TopScreen(padding, nav)
}
composable(Routes.Search) {
SearchScreen(nav, entry = it, padding)
}
}
}
}
}
}
@Composable fun RowScope.AstrobinBottomNavigationItem(
route: String,
name: String,
icon: ImageVector,
nav: NavController,
current: NavDestination?,
) {
BottomNavigationItem(
icon = { Icon(icon, contentDescription = null) },
label = { Text(name) },
selected = current?.hierarchy?.any { it.route == route } == true,
onClick = {
nav.navigate(route) {
popUpTo(nav.graph.findStartDestination().id) {
saveState = true
}
launchSingleTop = true
restoreState = true
}
},
selectedContentColor = Yellow,
unselectedContentColor = Color.White,
)
}
@OptIn(ExperimentalMaterialApi::class)
@Composable fun AstrobinBottomNav(nav: NavController) {
Box {
val navBackStackEntry by nav.currentBackStackEntryAsState()
val current = navBackStackEntry?.destination
BottomNavigation(
Modifier
.clip(RoundedCornerShape(topStart = 50.dp, topEnd = 50.dp))
.align(Alignment.BottomCenter),
backgroundColor = Color.Black,
) {
AstrobinBottomNavigationItem(
route = Routes.Top,
name = "TOP",
icon = Icons.Filled.Star,
nav = nav,
current = current
)
Spacer(Modifier.width(48.dp))
AstrobinBottomNavigationItem(
route = Routes.Profile,
name = "PROFILE",
icon = Icons.Filled.Person,
nav = nav,
current = current
)
}
Surface(
modifier = Modifier
.align(Alignment.BottomCenter),
elevation = 40.dp,
shape = RoundedCornerShape(topStart = 50.dp, topEnd = 50.dp),
color = DarkBlue,
contentColor = Color.White,
onClick = {
nav.navigate(Routes.Search) {
popUpTo(nav.graph.findStartDestination().id) {
saveState = true
}
launchSingleTop = true
restoreState = true
}
}
) {
Icon(
Icons.Filled.Search,
contentDescription = null,
tint = DarkBlue,
modifier = Modifier
.padding(
start = 20.dp,
end = 20.dp,
top = 16.dp,
bottom = 20.dp
)
.background(Yellow, CircleShape)
.padding(8.dp)
.size(30.dp)
)
}
}
}
private val mainWindowGradient = Brush.linearGradient(
listOf(Color.Black, DarkBlue),
start = Offset.Zero,
end = Offset(0f, Float.POSITIVE_INFINITY)
)
private enum class AstroScaffoldLayoutContent { TopBar, MainContent, BottomBar }
@Composable fun AstroAppWindow(
modifier: Modifier = Modifier,
top: @Composable () -> Unit,
bottom: @Composable () -> Unit,
content: @Composable (PaddingValues) -> Unit
) {
ProvideWindowInsets {
CompositionLocalProvider(
LocalContentColor provides Color.White,
) {
SubcomposeLayout(
modifier.background(mainWindowGradient)
) { constraints ->
val layoutWidth = constraints.maxWidth
val layoutHeight = constraints.maxHeight
val looseConstraints = constraints.copy(minWidth = 0, minHeight = 0)
layout(layoutWidth, layoutHeight) {
val topBarPlaceables = subcompose(AstroScaffoldLayoutContent.TopBar, top)
.map { it.measure(looseConstraints) }
// hard code to 0 as we want the top bar to "float" on top of things
val topBarHeight = 0 // topBarPlaceables.maxByOrNull { it.height }?.height ?: 0
val bottomBarPlaceables = subcompose(AstroScaffoldLayoutContent.BottomBar, bottom)
.map { it.measure(looseConstraints) }
val bottomBarHeight = bottomBarPlaceables.maxByOrNull { it.height }?.height ?: 0
val bodyContentHeight = layoutHeight - topBarHeight
val bodyContentPlaceables = subcompose(AstroScaffoldLayoutContent.MainContent) {
val innerPadding = PaddingValues(bottom = bottomBarHeight.toDp())
content(innerPadding)
}.map { it.measure(looseConstraints.copy(maxHeight = bodyContentHeight)) }
// Placing to control drawing order to match default elevation of each placeable
bodyContentPlaceables.forEach {
it.place(0, topBarHeight)
}
topBarPlaceables.forEach {
it.place(0, 0)
}
// The bottom bar is always at the bottom of the layout
bottomBarPlaceables.forEach {
it.place(0, layoutHeight - bottomBarHeight)
}
}
}
}
}
}
<|start_filename|>app/src/main/java/com/example/astrobin/ui/components/Buttons.kt<|end_filename|>
package com.example.astrobin.ui.components
import android.graphics.Paint
import androidx.compose.foundation.BorderStroke
import androidx.compose.foundation.clickable
import androidx.compose.foundation.layout.PaddingValues
import androidx.compose.foundation.layout.Row
import androidx.compose.foundation.layout.padding
import androidx.compose.foundation.shape.RoundedCornerShape
import androidx.compose.material.*
import androidx.compose.runtime.Composable
import androidx.compose.runtime.State
import androidx.compose.ui.Alignment
import androidx.compose.ui.Modifier
import androidx.compose.ui.graphics.Color
import androidx.compose.ui.graphics.vector.ImageVector
import androidx.compose.ui.text.font.FontWeight
import androidx.compose.ui.unit.dp
import com.example.astrobin.ui.theme.DarkBlue
import com.example.astrobin.ui.theme.Yellow
@OptIn(ExperimentalMaterialApi::class)
@Composable
fun AstroButton(
icon: ImageVector,
onClick: () -> Unit,
selected: Boolean = false,
modifier: Modifier = Modifier,
) {
val color = if (selected) Yellow else Color.Transparent
val contentColor = if (selected) DarkBlue else Color.White
val borderColor = if (selected) Yellow else Color.White
Surface(
modifier = modifier,
shape = RoundedCornerShape(6.dp),
border = BorderStroke(1.dp, borderColor),
color = color,
contentColor = contentColor,
onClick = onClick,
) {
Icon(
icon,
contentDescription = null,
modifier = Modifier.padding(8.dp)
)
}
}
@OptIn(ExperimentalMaterialApi::class)
@Composable
fun AstroButton2(
icon: ImageVector,
label: String,
onClick: () -> Unit,
selected: Boolean = false,
modifier: Modifier = Modifier,
) {
val color = if (selected) Yellow else Color.Transparent
val contentColor = if (selected) DarkBlue else Yellow
val borderColor = Yellow
Surface(
modifier = modifier,
shape = RoundedCornerShape(10.dp),
border = BorderStroke(1.dp, borderColor),
color = color,
contentColor = contentColor,
onClick = onClick,
) {
Row(verticalAlignment = Alignment.CenterVertically) {
Icon(
icon,
contentDescription = null,
modifier = Modifier.padding(8.dp)
)
Text(
label,
modifier = Modifier.padding(end = 8.dp),
fontWeight = FontWeight.ExtraBold,
)
}
}
}
@OptIn(ExperimentalMaterialApi::class)
@Composable
fun CountButton(
icon: ImageVector,
label: String,
onClick: () -> Unit,
selected: Boolean = false,
modifier: Modifier = Modifier,
) {
val color = if (selected) Yellow else Color.White
Row(
modifier = modifier
.clickable(onClick = onClick)
.padding(8.dp),
verticalAlignment = Alignment.CenterVertically,
) {
Icon(
icon,
contentDescription = null,
tint = color,
modifier = Modifier.padding(end = 4.dp)
)
Text(
label,
color = color,
)
}
}
<|start_filename|>app/src/main/java/com/example/astrobin/ui/components/AstroImageRow.kt<|end_filename|>
package com.example.astrobin.ui.components
import androidx.compose.foundation.Image
import androidx.compose.foundation.background
import androidx.compose.foundation.clickable
import androidx.compose.foundation.layout.*
import androidx.compose.foundation.shape.RoundedCornerShape
import androidx.compose.material.MaterialTheme
import androidx.compose.material.Text
import androidx.compose.runtime.Composable
import androidx.compose.runtime.produceState
import androidx.compose.ui.Alignment
import androidx.compose.ui.Modifier
import androidx.compose.ui.draw.clip
import androidx.compose.ui.geometry.Offset
import androidx.compose.ui.graphics.*
import androidx.compose.ui.layout.ContentScale
import androidx.compose.ui.text.style.TextOverflow
import androidx.compose.ui.unit.dp
import androidx.compose.ui.unit.sp
import androidx.navigation.NavController
import coil.compose.rememberImagePainter
import com.example.astrobin.api.AstroImage
import com.example.astrobin.api.LocalAstrobinApi
import com.example.astrobin.api.TopPick
private val imageForegroundGradient = Brush.linearGradient(
0.5f to Color.Transparent,
1.0f to Color.Black,
start = Offset.Zero,
end = Offset(0f, Float.POSITIVE_INFINITY)
)
private val imageRoundedCornerShape = RoundedCornerShape(8.dp)
private val imageModifier = Modifier
.fillMaxWidth()
.aspectRatio(16f / 9f)
.clip(imageRoundedCornerShape)
.background(Color.Black)
.foreground(imageForegroundGradient)
@Composable
fun AstroImage(
imageUrl: String,
onClick: () -> Unit,
modifier: Modifier = Modifier,
) {
Image(
painter = rememberImagePainter(imageUrl),
contentDescription = "",
contentScale = ContentScale.FillWidth,
// Bug here if I don't specify a size, I want fillWidth(). :(
modifier = modifier
.then(imageModifier)
.clickable(onClick = onClick)
)
}
@Composable
fun AstroImageWithContent(
imageUrl: String,
onClick: () -> Unit,
modifier: Modifier = Modifier,
content: @Composable BoxScope.() -> Unit,
) {
Box(modifier) {
AstroImage(
imageUrl,
onClick,
)
content()
}
}
@Composable
fun TopPickRow(image: TopPick, nav: NavController) {
val api = LocalAstrobinApi.current
val fullImage = produceState<AstroImage?>(null) {
value = api.imageOld(image.hash)
}.value
AstroImageWithContent(
modifier = Modifier.padding(horizontal = 16.dp),
imageUrl = image.url_regular,
onClick = { nav.navigate("image/${image.hash}")}
) {
if (fullImage != null) {
Text(
fullImage.title ?: "",
style = MaterialTheme.typography.h1,
maxLines = 1,
fontSize = 18.sp,
modifier = Modifier.align(Alignment.TopStart).padding(8.dp)
)
SmallUserRow(
user = fullImage.user,
likeCount = fullImage.likes,
views = fullImage.views,
modifier = Modifier.align(Alignment.BottomStart).padding(8.dp)
)
}
}
}
@Composable
fun ImageRow(image: AstroImage, nav: NavController) {
AstroImageWithContent(
modifier = Modifier.padding(horizontal = 16.dp),
imageUrl = image.url_regular,
onClick = {
if (image.hash != null) {
nav.navigate("image/${image.hash}")
}
}
) {
Text(
text = image.title ?: "",
style = MaterialTheme.typography.h1,
color = Color.White,
maxLines = 1,
overflow = TextOverflow.Ellipsis,
modifier = Modifier.align(Alignment.TopStart).padding(8.dp)
)
SmallUserRow(
user = image.user,
likeCount = image.likes,
views = image.views,
modifier = Modifier.align(Alignment.BottomStart).padding(8.dp)
)
}
}
@Composable
fun UserImageRow(image: AstroImage, nav: NavController) {
AstroImageWithContent(
modifier = Modifier.padding(horizontal = 16.dp),
imageUrl = image.url_regular,
onClick = {
if (image.hash != null) {
nav.navigate("image/${image.hash}")
}
}
) {
Text(
text = image.title ?: "",
style = MaterialTheme.typography.h1,
color = Color.White,
maxLines = 1,
overflow = TextOverflow.Ellipsis,
modifier = Modifier.align(Alignment.BottomStart).padding(8.dp)
)
}
}
<|start_filename|>app/src/main/java/com/example/astrobin/ui/screens/ImageScreen.kt<|end_filename|>
package com.example.astrobin.ui.screens
import androidx.compose.foundation.Image
import androidx.compose.foundation.background
import androidx.compose.foundation.border
import androidx.compose.foundation.clickable
import androidx.compose.foundation.layout.*
import androidx.compose.foundation.lazy.LazyColumn
import androidx.compose.foundation.lazy.items
import androidx.compose.foundation.shape.RoundedCornerShape
import androidx.compose.material.CircularProgressIndicator
import androidx.compose.material.Icon
import androidx.compose.material.MaterialTheme
import androidx.compose.material.Text
import androidx.compose.material.icons.Icons
import androidx.compose.material.icons.filled.Fullscreen
import androidx.compose.material.icons.outlined.BookmarkBorder
import androidx.compose.material.icons.outlined.Layers
import androidx.compose.material.icons.outlined.PersonAdd
import androidx.compose.material.icons.outlined.ThumbUp
import androidx.compose.runtime.*
import androidx.compose.ui.Alignment
import androidx.compose.ui.Modifier
import androidx.compose.ui.graphics.Color
import androidx.compose.ui.graphics.vector.ImageVector
import androidx.compose.ui.layout.ContentScale
import androidx.compose.ui.text.font.FontWeight
import androidx.compose.ui.unit.dp
import androidx.navigation.NavController
import coil.compose.rememberImagePainter
import com.example.astrobin.api.*
import com.example.astrobin.exp.load
import com.example.astrobin.ui.components.*
import com.google.accompanist.flowlayout.FlowRow
import kotlinx.coroutines.flow.asFlow
import kotlinx.coroutines.flow.map
import kotlinx.coroutines.flow.toList
import kotlinx.coroutines.launch
import java.net.URLEncoder
import java.time.OffsetDateTime
data class ImageModel(
val image: AstroImageV2? = null,
val author: AstroUserProfile? = null,
val plateSolve: PlateSolve? = null,
val comments: List<AstroComment>? = null,
val commentAuthors: Map<Int, AstroUser> = emptyMap(),
) {
companion object {
val Empty = ImageModel()
}
}
@Composable
fun ImageScreen(
hash: String,
padding: PaddingValues,
nav: NavController
) {
val api = LocalAstrobinApi.current
val (model, loading) = load(ImageModel.Empty) {
val image = api.image(hash)
push { it.copy(image = image) }
launch {
val author = api.user(image.user)
val profileId = author.userprofile ?: return@launch
val authorProfile = api.userProfile(profileId)
push { it.copy(author = authorProfile) }
}
launch {
val plateSolve = api.plateSolve(19, image.pk)
push { it.copy(plateSolve = plateSolve) }
}
launch {
val comments = api.comments(19, image.pk)
push { it.copy(comments = comments) }
val commentAuthors = comments.map { it.author }.distinct().asFlow().map {
api.user(it)
}.toList().associateBy { it.id }
push { it.copy(commentAuthors = commentAuthors) }
}
}
ImageScreen(
loading,
model.image,
model.author,
model.plateSolve,
model.comments,
model.commentAuthors,
padding,
nav
)
}
@Composable fun ImageScreen(
loading: Boolean,
image: AstroImageV2?,
author: AstroUserProfile?,
plateSolve: PlateSolve?,
comments: List<AstroComment>?,
commentAuthors: Map<Int, AstroUser>,
padding: PaddingValues,
nav: NavController,
) {
var annotations by remember { mutableStateOf(false) }
LazyColumn(Modifier.fillMaxSize(), contentPadding = padding) {
if (image != null) {
item {
val regularPainter = rememberImagePainter(image.url_regular)
val annotatedPainter = rememberImagePainter(plateSolve?.image_file)
Box {
Image(
modifier = Modifier
.fillMaxWidth()
.aspectRatio(image.aspectRatio),
painter = regularPainter,
contentDescription = "Full Image",
)
if (annotations) {
Image(
modifier = Modifier
.fillMaxWidth()
.aspectRatio(image.aspectRatio),
painter = annotatedPainter,
contentDescription = "Full Image",
)
}
}
}
item {
Row(
Modifier
.background(Color.Black)
.fillMaxWidth()
.padding(horizontal = 16.dp, vertical = 8.dp),
verticalAlignment = Alignment.CenterVertically,
) {
AstroButton(
icon = Icons.Filled.Fullscreen,
onClick = {
val hd = image.url_hd.urlEncode()
val solution = plateSolve?.image_file?.urlEncode() ?: ""
val w = image.w.toString()
val h = image.h.toString()
nav.navigate(
"fullscreen?hd=$hd&solution=$solution&w=$w&h=$h"
)
},
modifier = Modifier.padding(end = 8.dp),
)
AstroButton(
icon = Icons.Outlined.Layers,
selected = annotations,
onClick = { annotations = !annotations },
modifier = Modifier.padding(end = 8.dp),
)
Spacer(Modifier.weight(1f))
CountButton(
icon = Icons.Outlined.BookmarkBorder,
label = image.bookmarksCount.toString(),
selected = false,
onClick = {},
)
CountButton(
icon = Icons.Outlined.ThumbUp,
label = image.likesCount.toString(),
selected = false,
onClick = {},
)
}
}
item {
Column(Modifier.padding(horizontal = 10.dp)) {
Text(image.title ?: "", style = MaterialTheme.typography.h1)
if (author != null) {
Row(
Modifier.fillMaxWidth(),
verticalAlignment = Alignment.CenterVertically,
horizontalArrangement = Arrangement.SpaceBetween,
) {
UserRow(author, nav)
AstroButton2(
icon = Icons.Outlined.PersonAdd,
label = "Follow",
selected = false,
onClick = {},
modifier = Modifier
)
}
} else {
CircularProgressIndicator(
Modifier
.align(Alignment.CenterHorizontally)
)
}
}
}
if (plateSolve != null) {
item {
Section("What is this") {
FlowRow(mainAxisSpacing = 10.dp, crossAxisSpacing = 4.dp) {
for (subject in plateSolve.objects_in_field.split(","))
Chip(subject.trim(), onClick = { nav.navigate("search?q=${subject.trim().urlEncode()}")})
}
}
}
if (image.description != null) {
item {
Section("Description") {
Text(image.description)
}
}
}
item {
Section("Technical Card") {
TechCardItem("Declination", plateSolve.dec)
TechCardItem("Right Ascension", plateSolve.ra)
TechCardItem("Data Source", image.dataSource)
TechCardItem("Resolution", "${image.w}px x ${image.h}px")
TechCardItem("Pixel Scale", "${plateSolve.pixscale} arc-sec/px")
TechCardItem("Imaging Camera(s)", image
.imagingCameras
.joinToString(", ") { "${it.make} ${it.name}" }
)
TechCardItem("Imaging Telescope(s)", image
.imagingTelescopes
.joinToString(", ") { "${it.make} ${it.name}" }
)
}
}
item {
Section("Sky Plot") {
Image(
modifier = Modifier
.fillMaxWidth()
.aspectRatio(1f),
painter = rememberImagePainter(plateSolve.skyplot_zoom1),
contentScale = ContentScale.FillWidth,
contentDescription = "Sky Plot",
)
}
}
}
item {
Section("Histogram") {
Image(
modifier = Modifier
.fillMaxWidth()
.aspectRatio(274f / 120f),
painter = rememberImagePainter(image.url_histogram),
contentScale = ContentScale.FillWidth,
contentDescription = "Histogram",
)
}
}
if (comments != null) {
item {
Section("Comments") {}
}
items(comments, key = {it.id }) {
CommentRow(it, commentAuthors[it.author])
}
}
}
}
if (loading) {
LoadingBar(modifier = Modifier.fillMaxWidth())
}
}
@Composable fun CommentRow(comment: AstroComment, author: AstroUser?) {
Row(Modifier.padding(start=50.dp * (comment.depth-1))) {
AstroAvatar(imageUrl = comment.author_avatar)
Column(Modifier.padding(start=8.dp)) {
Row {
Text(author?.username ?: "...", fontWeight = FontWeight.Bold)
Text(timeAgo(comment.created), color=Color.Gray, modifier = Modifier.padding(start = 8.dp))
}
Text(comment.text, modifier=Modifier.padding(vertical=4.dp))
}
}
}
fun timeAgo(isoDate: String): String {
return "5 days ago"
// return OffsetDateTime.parse(isoDate).
}
@Composable fun IconCount(
count: Int,
icon: ImageVector,
contentDescription: String? = null,
) {
Row(Modifier.padding(end=12.dp)) {
Icon(
icon,
contentDescription = contentDescription,
modifier = Modifier
.padding(top = 0.dp, end = 4.dp)
.size(14.dp)
)
Text("$count", style = MaterialTheme.typography.subtitle2)
}
}
@Composable fun Section(
title: String,
fullWidth: Boolean = false,
content: @Composable () -> Unit
) {
Column(
Modifier
.padding(horizontal = if (fullWidth) 0.dp else 16.dp)
.padding(bottom = 16.dp)
) {
Text(title, style = MaterialTheme.typography.h1, modifier = Modifier.padding(bottom = 8.dp))
content()
}
}
@Composable fun Chip(
value: String,
color: Color = Color.White,
onClick: () -> Unit,
) {
Text(
value,
modifier = Modifier
.clickable(onClick = onClick)
.border(1.dp, color, RoundedCornerShape(6.dp))
.padding(4.dp, 4.dp),
style = MaterialTheme.typography.caption,
fontWeight = FontWeight.Bold,
color = color,
)
}
@Composable fun TechCardItem(
key: String,
value: String?,
) {
if (value != null) {
Row {
Text(key, fontWeight = FontWeight.Bold)
Text(": ")
Text(value)
}
}
}
fun String.urlEncode(): String = URLEncoder.encode(this, "utf-8") | lelandrichardson/astrobin-compose |
<|start_filename|>logrus_prefixed_formatter_suite_test.go<|end_filename|>
package prefixed_test
import (
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
"testing"
)
type LogOutput struct {
buffer string
}
func (o *LogOutput) Write(p []byte) (int, error) {
o.buffer += string(p[:])
return len(p), nil
}
func (o *LogOutput) GetValue() string {
return o.buffer
}
func TestLogrusPrefixedFormatter(t *testing.T) {
RegisterFailHandler(Fail)
RunSpecs(t, "LogrusPrefixedFormatter Suite")
}
| codeskipper/logrus-prefixed-formatter |
<|start_filename|>src/utils/db.js<|end_filename|>
import low from 'lowdb'
import LocalStorage from 'lowdb/adapters/LocalStorage'
import database from './database'
const adapter = new LocalStorage('bin-data')
const db = low(adapter)
db
.defaults(database)
.write()
// db.set('canvasMaps', database.canvasMaps).write()
export default db
| Chiuuuu/fastbi |
<|start_filename|>examples/hello.lua<|end_filename|>
#!/usr/bin/env lua
local fl = require( "fltk4lua" )
local window = fl.Window( 340, 180 )
local box = fl.Box( 20, 40, 300, 100, "Hello World!" )
box.box = "FL_UP_BOX"
box.labelfont = "FL_HELVETICA_BOLD_ITALIC"
box.labelsize = 36
box.labeltype = "FL_SHADOW_LABEL"
window:end_group()
window:show( arg )
print( window.xid )
print( fl.w(), fl.h() )
fl.run()
<|start_filename|>src/f4l_color_chooser.cxx<|end_filename|>
#include "fltk4lua.hxx"
#include "f4l_widget.hxx"
#include <FL/Fl_Color_Chooser.H>
#include <cstring>
namespace {
inline Fl_Color_Chooser* check_color_chooser( lua_State* L, int idx ) {
void* p = moon_checkobject( L, idx, F4L_COLOR_CHOOSER_NAME );
return static_cast< Fl_Color_Chooser* >( p );
}
int chooser_index_( lua_State* L, Fl_Color_Chooser* c,
char const* key, size_t n ) {
using namespace std;
switch( n ) {
case 1:
switch( *key ) {
case 'r':
lua_pushnumber( L, c->r() );
return 1;
case 'g':
lua_pushnumber( L, c->g() );
return 1;
case 'b':
lua_pushnumber( L, c->b() );
return 1;
}
break;
case 3:
if( F4L_MEMCMP( key, "hue", 3 ) == 0 ) {
lua_pushnumber( L, c->hue() );
return 1;
}
break;
case 4:
if( F4L_MEMCMP( key, "mode", 4 ) == 0 ) {
lua_pushinteger( L, c->mode() );
return 1;
}
break;
case 5:
if( F4L_MEMCMP( key, "value", 5 ) == 0 ) {
lua_pushnumber( L, c->value() );
return 1;
}
break;
case 10:
if( F4L_MEMCMP( key, "saturation", 10 ) == 0 ) {
lua_pushnumber( L, c->saturation() );
return 1;
}
break;
}
return 0;
}
int chooser_newindex_( lua_State* L, Fl_Color_Chooser* c,
char const* key, size_t n ) {
using namespace std;
switch( n ) {
case 4:
if( F4L_MEMCMP( key, "mode", 4 ) == 0 ) {
c->mode( moon_checkint( L, 3, 0, 3 ) );
return 1;
}
break;
}
return 0;
}
} // anonymous namespace
F4L_DEF_DELETE( Fl_Color_Chooser )
F4L_LUA_LLINKAGE_BEGIN
static int new_color_chooser( lua_State* L ) {
F4L_TRY( L ) {
f4l_new_widget< Fl_Color_Chooser >( L, F4L_COLOR_CHOOSER_NAME,
f4l_delete_Fl_Color_Chooser );
} F4L_CATCH( L );
return 1;
}
static int chooser_index( lua_State* L ) {
Fl_Color_Chooser* c = check_color_chooser( L, 1 );
size_t n = 0;
char const* key = luaL_checklstring( L, 2, &n );
F4L_TRY( L ) {
if( !chooser_index_( L, c, key, n ) &&
!f4l_widget_index_( L, c, key, n ) &&
!f4l_bad_property( L, F4L_COLOR_CHOOSER_NAME, key ) )
lua_pushnil( L );
} F4L_CATCH( L );
return 1;
}
static int chooser_newindex( lua_State* L ) {
Fl_Color_Chooser* c = check_color_chooser( L, 1 );
size_t n = 0;
char const* key = luaL_checklstring( L, 2, &n );
F4L_TRY( L ) {
(void)(chooser_newindex_( L, c, key, n ) ||
f4l_widget_newindex_( L, c, key, n ) ||
f4l_bad_property( L, F4L_COLOR_CHOOSER_NAME, key ));
} F4L_CATCH( L );
return 0;
}
static int chooser_rgb( lua_State* L ) {
Fl_Color_Chooser* c = check_color_chooser( L, 1 );
double r = luaL_checknumber( L, 2 );
double g = luaL_checknumber( L, 3 );
double b = luaL_checknumber( L, 4 );
F4L_TRY( L ) {
lua_pushboolean( L, c->rgb( r, g, b ) );
} F4L_CATCH( L );
return 1;
}
static int chooser_hsv( lua_State* L ) {
Fl_Color_Chooser* c = check_color_chooser( L, 1 );
double h = luaL_checknumber( L, 2 );
double s = luaL_checknumber( L, 3 );
double v = luaL_checknumber( L, 4 );
F4L_TRY( L ) {
lua_pushboolean( L, c->hsv( h, s, v ) );
} F4L_CATCH( L );
return 1;
}
F4L_LUA_LLINKAGE_END
F4L_DEF_CAST( Fl_Color_Chooser, Fl_Widget )
MOON_LOCAL void f4l_color_chooser_setup( lua_State* L ) {
luaL_Reg const methods[] = {
F4L_WIDGET_METHODS,
{ "rgb", chooser_rgb },
{ "hsv", chooser_hsv },
{ "__index", chooser_index },
{ "__newindex", chooser_newindex },
{ NULL, NULL }
};
moon_defobject( L, F4L_COLOR_CHOOSER_NAME, 0, methods, 0 );
moon_defcast( L, F4L_COLOR_CHOOSER_NAME, F4L_WIDGET_NAME,
f4l_cast_Fl_Color_Chooser_Fl_Widget );
f4l_new_class_table( L, "Color_Chooser", new_color_chooser );
}
<|start_filename|>src/f4l_clock.cxx<|end_filename|>
#include "fltk4lua.hxx"
#include "f4l_widget.hxx"
#include "f4l_enums.hxx"
#include <FL/Fl_Clock.H>
#include <cstring>
#include <climits>
#define TYPE_LIST( _ ) \
_( "FL_SQUARE_CLOCK", FL_SQUARE_CLOCK ) \
_( "FL_ROUND_CLOCK", FL_ROUND_CLOCK )
F4L_GEN_TYPE_ENUM( TYPE_LIST, clock )
namespace {
inline Fl_Clock_Output* check_clock( lua_State* L, int idx ) {
void* p = moon_checkobject( L, idx, F4L_CLOCK_OUTPUT_NAME );
return static_cast< Fl_Clock_Output* >( p );
}
int clock_index_( lua_State* L, Fl_Clock_Output* c,
char const* key, size_t n ) {
using namespace std;
switch( n ) {
case 4:
if( F4L_MEMCMP( key, "hour", 4 ) == 0 ) {
lua_pushinteger( L, c->hour() );
return 1;
} else if( F4L_MEMCMP( key, "type", 4 ) == 0 ) {
f4l_push_type_clock( L, c->type() );
return 1;
}
break;
case 5:
if( F4L_MEMCMP( key, "value", 5 ) == 0 ) {
if( sizeof( lua_Integer ) * CHAR_BIT > 32 )
lua_pushinteger( L, c->value() );
else
lua_pushnumber( L, c->value() );
return 1;
}
break;
case 6:
if( F4L_MEMCMP( key, "minute", 6 ) == 0 ) {
lua_pushinteger( L, c->minute() );
return 1;
} else if( F4L_MEMCMP( key, "second", 6 ) == 0 ) {
lua_pushinteger( L, c->second() );
return 1;
}
break;
}
return 0;
}
int clock_newindex_( lua_State* L, Fl_Clock_Output* c,
char const* key, size_t n ) {
using namespace std;
switch( n ) {
case 4:
if( F4L_MEMCMP( key, "type", 4 ) == 0 ) {
c->type( f4l_check_type_clock( L, 3 ) );
return 1;
}
break;
case 5:
if( F4L_MEMCMP( key, "value", 5 ) == 0 ) {
if( sizeof( lua_Integer ) * CHAR_BIT > 32 )
c->value( moon_checkint( L, 3, 0, ULONG_MAX ) );
else
c->value( static_cast< ulong >( luaL_checknumber( L, 3 ) ) );
return 1;
}
break;
}
return 0;
}
} // anonymous namespace
F4L_DEF_DELETE( Fl_Clock_Output )
F4L_DEF_DELETE( Fl_Clock )
F4L_LUA_LLINKAGE_BEGIN
static int new_clock_output( lua_State* L ) {
F4L_TRY( L ) {
f4l_new_widget< Fl_Clock_Output >( L, F4L_CLOCK_OUTPUT_NAME,
f4l_delete_Fl_Clock_Output );
} F4L_CATCH( L );
return 1;
}
static int new_clock( lua_State* L ) {
F4L_TRY( L ) {
f4l_new_widget< Fl_Clock >( L, F4L_CLOCK_NAME,
f4l_delete_Fl_Clock );
} F4L_CATCH( L );
return 1;
}
static int clock_index( lua_State* L ) {
Fl_Clock_Output* c = check_clock( L, 1 );
size_t n = 0;
char const* key = luaL_checklstring( L, 2, &n );
F4L_TRY( L ) {
if( !clock_index_( L, c, key, n ) &&
!f4l_widget_index_( L, c, key, n ) &&
!f4l_bad_property( L, F4L_CLOCK_OUTPUT_NAME, key ) )
lua_pushnil( L );
} F4L_CATCH( L );
return 1;
}
static int clock_newindex( lua_State* L ) {
Fl_Clock_Output* c = check_clock( L, 1 );
size_t n = 0;
char const* key = luaL_checklstring( L, 2, &n );
F4L_TRY( L ) {
(void)(clock_newindex_( L, c, key, n ) ||
f4l_widget_newindex_( L, c, key, n ) ||
f4l_bad_property( L, F4L_CLOCK_OUTPUT_NAME, key ));
} F4L_CATCH( L );
return 0;
}
static int clock_setvalue( lua_State* L ) {
Fl_Clock_Output* c = check_clock( L, 1 );
int h = moon_checkint( L, 2, 0, 23 );
int m = moon_checkint( L, 3, 0, 59 );
int s = moon_checkint( L, 4, 0, 60 );
F4L_TRY( L ) {
c->value( h, m, s );
} F4L_CATCH( L );
return 0;
}
F4L_LUA_LLINKAGE_END
F4L_DEF_CAST( Fl_Clock_Output, Fl_Widget )
F4L_DEF_CAST( Fl_Clock, Fl_Clock_Output )
F4L_DEF_CAST( Fl_Clock, Fl_Widget )
MOON_LOCAL void f4l_clock_setup( lua_State* L ) {
luaL_Reg const methods[] = {
F4L_WIDGET_METHODS,
{ "setvalue", clock_setvalue },
{ "__index", clock_index },
{ "__newindex", clock_newindex },
{ NULL, NULL }
};
moon_defobject( L, F4L_CLOCK_OUTPUT_NAME, 0, methods, 0 );
moon_defcast( L, F4L_CLOCK_OUTPUT_NAME, F4L_WIDGET_NAME,
f4l_cast_Fl_Clock_Output_Fl_Widget );
moon_defobject( L, F4L_CLOCK_NAME, 0, methods, 0 );
moon_defcast( L, F4L_CLOCK_NAME, F4L_CLOCK_OUTPUT_NAME,
f4l_cast_Fl_Clock_Fl_Clock_Output );
moon_defcast( L, F4L_CLOCK_NAME, F4L_WIDGET_NAME,
f4l_cast_Fl_Clock_Fl_Widget );
f4l_new_class_table( L, "Clock_Output", new_clock_output );
f4l_new_class_table( L, "Clock", new_clock );
}
<|start_filename|>src/fltk4lua.cxx<|end_filename|>
#include "fltk4lua.hxx"
#include "f4l_enums.hxx"
#include <FL/filename.H>
#include <climits>
namespace {
void get_fd_cache( lua_State * L ) {
static int xyz = 0;
lua_rawgetp( L, LUA_REGISTRYINDEX, static_cast< void* >( &xyz ) );
if( lua_isnil( L, -1 ) ) {
lua_pop( L, 1 );
lua_newtable( L );
lua_pushvalue( L, -1 );
lua_rawsetp( L, LUA_REGISTRYINDEX, static_cast< void* >( &xyz ) );
}
}
void f4l_fd_cb( FL_SOCKET fd, void* ud, int when ) {
f4l_active_L* th = static_cast< f4l_active_L* >( ud );
if( th != NULL && th->cb_L != NULL ) {
lua_State* L = th->cb_L;
luaL_checkstack( L, 4, "f4l_fd_cb" );
int top = lua_gettop( L );
lua_pushcfunction( L, f4l_backtrace ); // top+1: f4l_backtrace
get_fd_cache( L ); // top+2: cache
lua_rawgeti( L, -1, static_cast< int >( fd ) ); // top+3: fd callbacks table
lua_remove( L, -2 ); // remove cache, top+1: f4l_backtrace, fd callbacks table
lua_rawgeti( L, -1, when ); // top+3: lua callback
lua_remove( L, -2 ); // remove fd callbacks table, top+1: f4l_backtrace, lua callback
lua_pushinteger( L, static_cast< int >( fd ) ); // top+3: fd
lua_pushinteger( L, when ); // top+4: when XXX not very useful to the Lua function!
// top+1: f4l_backtrace, lua callback, fd, when
int status = lua_pcall( L, 2, 0, top + 1 );
// top+1: f4l_backtrace, err msg or nil
if( status != 0 ) {
lua_remove( L, -2 );
f4l_fix_backtrace( L );
lua_error( L );
}
lua_settop( L, top );
}
}
void f4l_fd_cb_read( FL_SOCKET fd, void* ud ) {
f4l_fd_cb( fd, ud, FL_READ );
}
void f4l_fd_cb_write( FL_SOCKET fd, void* ud ) {
f4l_fd_cb( fd, ud, FL_WRITE );
}
void f4l_fd_cb_except( FL_SOCKET fd, void* ud ) {
f4l_fd_cb( fd, ud, FL_EXCEPT );
}
} // anonymous namespace
F4L_LUA_LLINKAGE_BEGIN
static int f4l_screen_width( lua_State* L) {
F4L_TRY( L ) {
lua_pushinteger( L, Fl::w() );
} F4L_CATCH( L );
return 1;
}
static int f4l_screen_height( lua_State* L) {
F4L_TRY( L ) {
lua_pushinteger( L, Fl::h() );
} F4L_CATCH( L );
return 1;
}
static int f4l_run_( lua_State* L ) {
F4L_TRY( L ) {
lua_pushinteger( L, Fl::run() );
} F4L_CATCH( L );
return 1;
}
static int f4l_run( lua_State* L ) {
F4L_CALL_PROTECTED( L, f4l_run_, 1 );
return 1;
}
static int f4l_wait_( lua_State* L ) {
lua_Number timeout = luaL_optnumber( L, 1, -1 );
luaL_argcheck( L, timeout >= -1, 1, "timeout must be positive" );
F4L_TRY( L ) {
lua_pushboolean( L, timeout < 0 ? Fl::wait() : Fl::wait( timeout ) );
} F4L_CATCH( L );
return 1;
}
static int f4l_wait( lua_State* L ) {
F4L_CALL_PROTECTED( L, f4l_wait_, 1 );
return 1;
}
static int f4l_check_( lua_State* L ) {
F4L_TRY( L ) {
lua_pushboolean( L, Fl::check() );
} F4L_CATCH( L );
return 1;
}
static int f4l_check( lua_State* L ) {
F4L_CALL_PROTECTED( L, f4l_check_, 1 );
return 1;
}
static int f4l_args( lua_State* L ) {
int argc = 0;
luaL_checktype( L, 1, LUA_TTABLE );
char** argv = f4l_push_argv( L, 1, &argc );
F4L_TRY( L ) {
Fl::args( argc, argv );
} F4L_CATCH( L );
return 0;
}
static int f4l_get_system_colors( lua_State* L ) {
F4L_TRY( L ) {
Fl::get_system_colors();
} F4L_CATCH( L );
return 0;
}
static int f4l_scheme( lua_State* L ) {
F4L_TRY( L ) {
if( lua_gettop( L ) > 0 ) {
size_t len = 0;
char const* s = luaL_optlstring( L, 1, NULL, &len );
luaL_argcheck( L, len < 1024, 1, "scheme name too long" );
Fl::scheme( s );
return 0;
} else {
char const* s = Fl::scheme();
if( s != NULL )
lua_pushstring( L, s );
else
lua_pushnil( L );
return 1;
}
} F4L_CATCH( L );
return 0;
}
static int f4l_redraw( lua_State* L ) {
F4L_TRY( L ) {
Fl::redraw();
} F4L_CATCH( L );
return 0;
}
static int f4l_option( lua_State* L ) {
Fl::Fl_Option o = f4l_check_option( L, 1 );
F4L_TRY( L ) {
if( lua_gettop( L ) > 1 ) {
Fl::option( o, lua_toboolean( L, 2 ) );
return 0;
} else {
lua_pushboolean( L, Fl::option( o ) );
return 1;
}
} F4L_CATCH( L );
return 0;
}
static int f4l_open_uri( lua_State* L ) {
char const* uri = luaL_checkstring( L, 1 );
F4L_TRY( L ) {
lua_pushboolean( L, fl_open_uri( uri, NULL, 0 ) );
} F4L_CATCH( L );
return 1;
}
static int f4l_add_fd( lua_State* L ) {
static int whens[] = { FL_READ, FL_WRITE, FL_EXCEPT };
static Fl_FD_Handler when_cbs[] = {
f4l_fd_cb_read, f4l_fd_cb_write, f4l_fd_cb_except
};
int fd = moon_checkint( L, 1, 0, INT_MAX );
luaL_checktype( L, 2, LUA_TFUNCTION );
int when = luaL_opt( L, f4l_check_fd_when, 3, FL_READ );
get_fd_cache( L );
if( lua_rawgeti( L, -1, fd ) == LUA_TNIL ) {
lua_pop( L, 1 );
lua_newtable( L );
lua_pushvalue( L, -1 );
lua_rawseti( L, -3, fd );
}
lua_replace( L, -2 ); // fd cache no longer needed
// Stack top contains callbacks table for this fd
void* fd_cb_user_data = static_cast< void* >( f4l_get_active_thread( L ) );
// Discard the userdata produced by f4l_get_active_thread
lua_pop( L, 1 );
F4L_TRY( L ) {
for( int i = 0; i < 3; i++ ) {
if ( when & whens[i] ) {
lua_pushvalue( L, 2 );
lua_rawseti( L, -2, whens[i] );
Fl::add_fd( fd, whens[i], when_cbs[i], fd_cb_user_data );
}
}
} F4L_CATCH( L );
return 0;
}
static int f4l_remove_fd( lua_State* L ) {
static int whens[] = { FL_READ, FL_WRITE, FL_EXCEPT };
int fd = moon_checkint( L, 1, 0, INT_MAX );
int when = luaL_opt( L, f4l_check_fd_when, 2, 0 );
// The code here is pretty protective since FL::remove_fd() crashes when
// removing callbacks of events that were not set. FL::add_fd() doesn’t
// seem picky at all, though.
F4L_TRY( L ) {
get_fd_cache( L );
if( lua_rawgeti( L, -1, fd ) == LUA_TNIL ) {
// No event set for this FD at all, do nothing.
} else if( when == 0 ) {
// Remove all events
lua_pop( L, 1 );
lua_pushnil( L );
lua_rawseti( L, -2, fd );
Fl::remove_fd( fd );
} else {
bool empty = true;
for( int i = 0; i < 3; i++ ) {
// Is there a cb for this event?
if( lua_rawgeti( L, -1, whens[i] ) != LUA_TNIL ) {
// Are we removing this cb?
if( when & whens[i] ) {
lua_pushnil( L );
lua_rawseti( L, -3, whens[i] );
Fl::remove_fd( fd, whens[i] );
} else {
// This cb stays, so there’s at least one cb left for this fd
empty = false;
}
}
lua_pop( L, 1 ); // remove callback (or nil)
}
lua_pop( L, 1 ); // remove table of callbacks for this fd
if( empty ) {
// No more cb for this fd, remove the table of this fd from cache
lua_pushnil( L );
lua_rawseti( L, -2, fd );
}
}
} F4L_CATCH( L );
return 0;
}
/* registered via moon_atexit to make sure that all widgets
* are deleted, even those collected during a callback */
static int delete_remaining_widgets( lua_State* L ) {
int* v = static_cast< int* >( lua_touserdata( L, 1 ) );
if( *v > 0 ) {
F4L_TRY( L ) {
Fl::check();
} F4L_CATCH( L );
}
return 0;
}
F4L_LUA_LLINKAGE_END
MOON_LOCAL void f4l_delete_widget( Fl_Widget* w ) {
f4l_active_L* ud = static_cast< f4l_active_L* >( w->user_data() );
if( ud != NULL && ud->cb_L != NULL )
Fl::delete_widget( w );
else
delete w;
}
MOON_LOCAL void f4l_delete_widget( Fl_Group* g ) {
for( int i = g->children(); i > 0; --i )
g->remove( i-1 );
f4l_delete_widget( static_cast< Fl_Widget* >( g ) );
}
MOON_LOCAL char f4l_check_char( lua_State* L, int idx ) {
size_t len = 0;
char const* s = luaL_checklstring( L, idx, &len );
luaL_argcheck( L, len == 1, idx, "single character expected" );
return *s;
}
MOON_LOCAL int f4l_backtrace( lua_State* L ) {
char const* msg = lua_tostring( L, 1 );
if( msg != NULL )
luaL_traceback( L, L, msg, 1 );
return 1;
}
MOON_LOCAL void f4l_fix_backtrace( lua_State* L ) {
size_t n = 0;
char const* msg = lua_tolstring( L, -1, &n );
if( msg != NULL && n > 0 ) {
/* figure out our current stack level */
int lvl = 0;
lua_Debug d;
while( lua_getstack( L, lvl, &d ) )
++lvl;
/* remove the last lines of the stack trace that belongs to the
* levels below us */
char const* p = msg + n - 1;
int cnt = 2;
while( p != msg && (*p != '\n' || cnt < lvl) ) {
if( *p == '\n' )
++cnt;
--p;
}
if( p != msg ) {
lua_pushlstring( L, msg, p-msg );
lua_replace( L, -2 );
}
}
}
MOON_LOCAL f4l_active_L* f4l_get_active_thread( lua_State* L ) {
static char xyz = 0; // used as a unique key
luaL_checkstack( L, 3, "f4l_get_active_thread" );
lua_rawgetp( L, LUA_REGISTRYINDEX, static_cast< void* >( &xyz ) );
if( lua_type( L, -1 ) != LUA_TUSERDATA ) {
lua_pop( L, 1 );
void* p = lua_newuserdata( L, sizeof( f4l_active_L ) );
f4l_active_L* sp = static_cast< f4l_active_L* >( p );
sp->L = NULL;
sp->cb_L = NULL;
lua_pushvalue( L, -1 );
lua_rawsetp( L, LUA_REGISTRYINDEX, static_cast< void* >( &xyz ) );
lua_pushvalue( L, -1 ); // used as a place holder, since L is NULL
sp->thread_ref = luaL_ref( L, LUA_REGISTRYINDEX );
return sp;
} else
return static_cast< f4l_active_L* >( lua_touserdata( L, -1 ) );
}
MOON_LOCAL void f4l_set_active_thread( lua_State* L ) {
f4l_active_L* th = f4l_get_active_thread( L );
lua_pushthread( L );
lua_rawseti( L, LUA_REGISTRYINDEX, th->thread_ref );
lua_pop( L, 1 );
}
/* the following function exploits implementation details in FLTK
* (that Fl_Window::show( int, char** ) does not modify the arguments)
* and in Lua (that strings stored in a table cannot move in memory)!
*/
MOON_LOCAL char** f4l_push_argv( lua_State* L, int idx, int* argc ) {
int n = luaL_len( L, idx );
char** argv = (char**)lua_newuserdata( L, sizeof( char* ) * (n+2) );
if( lua_rawgeti( L, idx, 0 ) == LUA_TSTRING ) {
argv[ 0 ] = const_cast< char* >( lua_tostring( L, -1 ) );
} else
argv[ 0 ] = const_cast< char* >( "fltk4lua" );
lua_pop( L, 1 );
argv[ n+1 ] = NULL;
int i = 1;
for( ; i <= n; ++i ) {
if( lua_rawgeti( L, idx, i ) == LUA_TSTRING ) {
argv[ i ] = const_cast< char* >( lua_tostring( L, -1 ) );
lua_pop( L, 1 );
} else {
argv[ i ] = NULL;
lua_pop( L, 1 );
break;
}
}
*argc = i;
return argv;
}
MOON_LOCAL void f4l_new_class_table( lua_State* L, char const* name,
lua_CFunction constructor,
luaL_Reg const* smethods ) {
luaL_checkstack( L, 3, "f4l_new_class_table" );
lua_newtable( L );
if( smethods != NULL )
luaL_setfuncs( L, smethods, 0 );
if( constructor != 0 ) {
lua_createtable( L, 0, 1 );
lua_pushcfunction( L, constructor );
lua_setfield( L, -2, "__call" );
lua_setmetatable( L, -2 );
}
lua_setfield( L, -2, name );
}
MOON_LOCAL void f4l_add_properties( lua_State* L, int udidx, int tidx ) {
udidx = lua_absindex( L, udidx );
tidx = lua_absindex( L, tidx );
lua_pushnil( L );
while( lua_next( L, tidx ) != 0 ) {
if( lua_type( L, -2 ) != LUA_TSTRING )
lua_pop( L, 1 ); // ignore non-string keys
else {
lua_pushvalue( L, -2 );
lua_insert( L, -2 );
lua_settable( L, udidx );
}
}
}
MOON_LOCAL int (f4l_bad_property)( lua_State* L, char const* tname,
char const* pname ) {
/* raise an error when unknown properties are encountered */
return luaL_error( L, "bad property '%s' for '%s' object",
pname, tname );
}
/* setup functions defined in the other source files */
MOON_LOCAL void f4l_enums_setup( lua_State* L );
MOON_LOCAL void f4l_ask_setup( lua_State* L );
MOON_LOCAL void f4l_shared_image_setup( lua_State* L );
MOON_LOCAL void f4l_widget_setup( lua_State* L );
MOON_LOCAL void f4l_box_setup( lua_State* L );
MOON_LOCAL void f4l_button_setup( lua_State* L );
MOON_LOCAL void f4l_chart_setup( lua_State* L );
MOON_LOCAL void f4l_clock_setup( lua_State* L );
MOON_LOCAL void f4l_group_setup( lua_State* L );
MOON_LOCAL void f4l_browser_setup( lua_State* L );
MOON_LOCAL void f4l_file_browser_setup( lua_State* L );
MOON_LOCAL void f4l_check_browser_setup( lua_State* L );
MOON_LOCAL void f4l_color_chooser_setup( lua_State* L );
MOON_LOCAL void f4l_input_choice_setup( lua_State* L );
MOON_LOCAL void f4l_pack_setup( lua_State* L );
MOON_LOCAL void f4l_scroll_setup( lua_State* L );
MOON_LOCAL void f4l_spinner_setup( lua_State* L );
MOON_LOCAL void f4l_tabs_setup( lua_State* L );
MOON_LOCAL void f4l_tile_setup( lua_State* L );
MOON_LOCAL void f4l_window_setup( lua_State* L );
MOON_LOCAL void f4l_wizard_setup( lua_State* L );
MOON_LOCAL void f4l_input_setup( lua_State* L );
MOON_LOCAL void f4l_menu_setup( lua_State* L );
MOON_LOCAL void f4l_choice_setup( lua_State* L );
MOON_LOCAL void f4l_menu_bar_setup( lua_State* L );
MOON_LOCAL void f4l_menu_button_setup( lua_State* L );
MOON_LOCAL void f4l_progress_setup( lua_State* L );
MOON_LOCAL void f4l_adjuster_setup( lua_State* L );
MOON_LOCAL void f4l_counter_setup( lua_State* L );
MOON_LOCAL void f4l_dial_setup( lua_State* L );
MOON_LOCAL void f4l_roller_setup( lua_State* L );
MOON_LOCAL void f4l_slider_setup( lua_State* L );
MOON_LOCAL void f4l_value_input_setup( lua_State* L );
MOON_LOCAL void f4l_value_output_setup( lua_State* L );
F4L_API int luaopen_fltk4lua( lua_State* L ) {
luaL_Reg const functions[] = {
{ "w", f4l_screen_width },
{ "h", f4l_screen_height },
{ "run", f4l_run },
{ "wait", f4l_wait },
{ "check", f4l_check },
{ "args", f4l_args },
{ "get_system_colors", f4l_get_system_colors },
{ "scheme", f4l_scheme },
{ "redraw", f4l_redraw },
{ "option", f4l_option },
{ "open_uri", f4l_open_uri },
{ "add_fd", f4l_add_fd },
{ "remove_fd", f4l_remove_fd },
{ NULL, NULL }
};
luaL_newlib( L, functions );
lua_pushfstring( L, "fltk4lua v%d.%d using FLTK %d.%d.%d",
(int)F4L_VERSION_MAJOR, (int)F4L_VERSION_MINOR,
(int)FL_MAJOR_VERSION, (int)FL_MINOR_VERSION,
(int)FL_PATCH_VERSION );
lua_setfield( L, -2, "_VERSION" );
lua_pushstring( L, Fl::help );
lua_setfield( L, -2, "help" );
*moon_atexit( L, delete_remaining_widgets ) = 1;
lua_pop( L, 1 ); // remove atexit userdata from stack
f4l_enums_setup( L );
f4l_ask_setup( L );
f4l_shared_image_setup( L );
f4l_widget_setup( L );
f4l_box_setup( L );
f4l_button_setup( L );
f4l_chart_setup( L );
f4l_clock_setup( L );
f4l_group_setup( L );
f4l_browser_setup( L );
f4l_file_browser_setup( L );
f4l_check_browser_setup( L );
f4l_color_chooser_setup( L );
f4l_input_choice_setup( L );
f4l_pack_setup( L );
f4l_scroll_setup( L );
f4l_spinner_setup( L );
f4l_tabs_setup( L );
f4l_tile_setup( L );
f4l_window_setup( L );
f4l_wizard_setup( L );
f4l_input_setup( L );
f4l_menu_setup( L );
f4l_choice_setup( L );
f4l_menu_bar_setup( L );
f4l_menu_button_setup( L );
f4l_progress_setup( L );
f4l_adjuster_setup( L );
f4l_counter_setup( L );
f4l_dial_setup( L );
f4l_roller_setup( L );
f4l_slider_setup( L );
f4l_value_input_setup( L );
f4l_value_output_setup( L );
return 1;
}
<|start_filename|>examples/button.lua<|end_filename|>
#!/usr/bin/env lua
local fl = require( "fltk4lua" )
local window = fl.Window( 320, 65 )
local b1 = fl.Button( 20, 20, 80, 25, "&Beep" )
function b1:callback()
fl.beep()
end
fl.Button( 120, 20, 80, 25, "&no op" )
local b3 = fl.Button( 220, 20, 80, 25, "E&xit" )
function b3:callback()
--os.exit( 0 ) -- bad style
window:hide()
end
window:end_group()
window:show( arg )
fl.run()
<|start_filename|>examples/clock.lua<|end_filename|>
#!/usr/bin/env lua
local fl = require( "fltk4lua" )
local window1 = fl.Window{ 220, 220, "Fl_Clock", xclass = "Fl_Clock" }
local c1 = fl.Clock{ 0, 0, 220, 220 }
--window1.resizable = c1 -- looks bad on a tiling window manager
window1:end_group()
local window2 = fl.Window{ 220, 220, "Fl_Round_Clock", xclass = "Fl_Clock" }
local c1 = fl.Clock{ 0, 0, 220, 220, type = "FL_ROUND_CLOCK" }
--window2.resizable = c1 -- looks bad on a tiling window manager
window2:end_group()
window1:show( arg )
window2:show()
fl.run()
<|start_filename|>src/f4l_input_choice.cxx<|end_filename|>
#include "fltk4lua.hxx"
#include "f4l_menu.hxx"
#include "f4l_widget.hxx"
#include "f4l_enums.hxx"
#include <FL/Fl_Input_Choice.H>
#include <cstring>
#include <climits>
namespace {
inline Fl_Input_Choice* check_input_choice( lua_State* L, int idx ) {
void* p = moon_checkobject( L, idx, F4L_INPUT_CHOICE_NAME );
return static_cast< Fl_Input_Choice* >( p );
}
int input_choice_index_( lua_State* L, Fl_Input_Choice* ic,
char const* key, size_t n ) {
using namespace std;
switch( n ) {
case 5:
if( F4L_MEMCMP( key, "input", 5 ) == 0 ) {
if( moon_getuvfield( L, 1, "input" ) == LUA_TNIL ) {
f4l_new_member< Fl_Input >( L, F4L_INPUT_NAME,
ic->input(), 1 );
lua_pushvalue( L, -1 );
moon_setuvfield( L, 1, "input" );
}
return 1;
} else if( F4L_MEMCMP( key, "value", 5 ) == 0 ) {
char const* s = ic->value();
if( s != NULL )
lua_pushstring( L, ic->value() );
else
lua_pushnil( L );
return 1;
}
break;
case 7:
if( F4L_MEMCMP( key, "changed", 7 ) == 0 ) {
lua_pushboolean( L, ic->changed() );
return 1;
}
break;
case 8:
if( F4L_MEMCMP( key, "down_box", 8 ) == 0 ) {
f4l_push_boxtype( L, ic->down_box() );
return 1;
} else if( F4L_MEMCMP( key, "textfont", 8 ) == 0 ) {
f4l_push_font( L, ic->textfont() );
return 1;
} else if( F4L_MEMCMP( key, "textsize", 8 ) == 0 ) {
lua_pushinteger( L, ic->textsize() );
return 1;
}
break;
case 9:
if( F4L_MEMCMP( key, "textcolor", 9 ) == 0 ) {
f4l_push_color( L, ic->textcolor() );
return 1;
}
break;
case 10:
if( F4L_MEMCMP( key, "menubutton", 10 ) == 0 ) {
if( moon_getuvfield( L, 1, "menubutton" ) == LUA_TNIL ) {
f4l_new_member< Fl_Menu_Button >( L, F4L_MENU_BUTTON_NAME,
ic->menubutton(), 1 );
lua_pushvalue( L, -1 );
moon_setuvfield( L, 1, "menubutton" );
}
return 1;
}
break;
}
return 0;
}
int input_choice_newindex_( lua_State* L, Fl_Input_Choice* ic,
char const* key, size_t n ) {
using namespace std;
switch( n ) {
case 5:
if( F4L_MEMCMP( key, "value", 5 ) == 0 ) {
if( lua_type( L, 3 ) == LUA_TNUMBER ) {
int idx = moon_checkint( L, 3, 0, INT_MAX );
int sz = 0;
Fl_Menu_Item const* mi = ic->menu();
if( mi != NULL )
sz = mi->size();
luaL_argcheck( L, idx < sz, 3, "index too large" );
ic->value( idx );
} else {
char const* s = luaL_optstring( L, 3, NULL );
ic->value( s );
}
return 1;
}
break;
case 8:
if( F4L_MEMCMP( key, "down_box", 8 ) == 0 ) {
ic->down_box( f4l_check_boxtype( L, 3 ) );
return 1;
} else if( F4L_MEMCMP( key, "textfont", 8 ) == 0 ) {
ic->textfont( f4l_check_font( L, 3 ) );
return 1;
} else if( F4L_MEMCMP( key, "textsize", 8 ) == 0 ) {
ic->textsize( moon_checkint( L, 3, 0, INT_MAX ) );
return 1;
}
break;
case 9:
if( F4L_MEMCMP( key, "textcolor", 9 ) == 0 ) {
ic->textcolor( f4l_check_color( L, 3 ) );
return 1;
}
break;
}
return 0;
}
} // anonymous namespace
F4L_DEF_DELETE( Fl_Input_Choice )
F4L_LUA_LLINKAGE_BEGIN
static int new_input_choice( lua_State* L ) {
F4L_TRY( L ) {
Fl_Input_Choice* ic = NULL;
ic = f4l_new_widget< Fl_Input_Choice >( L, F4L_INPUT_CHOICE_NAME,
f4l_delete_Fl_Input_Choice );
/* The input userdata can be created on demand in the __index
* metamethod, but we need the menubutton registered as a widget
* in the implementation of add() and clear(). */
f4l_new_member< Fl_Menu_Button >( L, F4L_MENU_BUTTON_NAME,
ic->menubutton(), -1 );
moon_setuvfield( L, -2, "menubutton" );
} F4L_CATCH( L );
return 1;
}
static int input_choice_index( lua_State* L ) {
Fl_Input_Choice* ic = check_input_choice( L, 1 );
size_t n = 0;
char const* key = luaL_checklstring( L, 2, &n );
F4L_TRY( L ) {
if( !input_choice_index_( L, ic, key, n ) &&
!f4l_widget_index_( L, ic, key, n ) &&
!f4l_bad_property( L, F4L_INPUT_CHOICE_NAME, key ) )
lua_pushnil( L );
} F4L_CATCH( L );
return 1;
}
static int input_choice_newindex( lua_State* L ) {
Fl_Input_Choice* ic = check_input_choice( L, 1 );
size_t n = 0;
char const* key = luaL_checklstring( L, 2, &n );
F4L_TRY( L ) {
(void)(input_choice_newindex_( L, ic, key, n ) ||
f4l_widget_newindex_( L, ic, key, n ) ||
f4l_bad_property( L, F4L_INPUT_CHOICE_NAME, key ));
} F4L_CATCH( L );
return 0;
}
static int input_choice_add( lua_State* L ) {
Fl_Input_Choice* ic = check_input_choice( L, 1 );
luaL_checkstring( L, 2 );
lua_settop( L, 2 );
f4l_push_widget( L, ic->menubutton() );
lua_replace( L, 1 );
/* Forward to f4l_menu_add(). A similar thing happens in
* Fl_Input_Choice::add() anyway, but this way we don't have
* to duplicate the book keeping code. */
f4l_menu_add( L );
return 0;
}
static int input_choice_clear( lua_State* L ) {
Fl_Input_Choice* ic = check_input_choice( L, 1 );
lua_settop( L, 1 );
f4l_push_widget( L, ic->menubutton() );
lua_replace( L, 1 );
/* Forward to f4l_menu_clear(). A similar thing happens in
* Fl_Input_Choice::clear() anyway, but this way we don't have
* to duplicate the book keeping code. */
return f4l_menu_clear( L );
}
static int input_choice_clear_changed( lua_State* L ) {
Fl_Input_Choice* ic = check_input_choice( L, 1 );
F4L_TRY( L ) {
ic->clear_changed();
} F4L_CATCH( L );
return 0;
}
static int input_choice_set_changed( lua_State* L ) {
Fl_Input_Choice* ic = check_input_choice( L, 1 );
F4L_TRY( L ) {
ic->set_changed();
} F4L_CATCH( L );
return 0;
}
F4L_LUA_LLINKAGE_END
F4L_DEF_CAST( Fl_Input_Choice, Fl_Widget )
MOON_LOCAL void f4l_input_choice_setup( lua_State* L ) {
luaL_Reg const methods[] = {
F4L_WIDGET_METHODS,
{ "add", input_choice_add },
{ "clear", input_choice_clear },
{ "clear_changed", input_choice_clear_changed },
{ "set_changed", input_choice_set_changed },
{ "__index", input_choice_index },
{ "__newindex", input_choice_newindex },
{ NULL, NULL }
};
moon_defobject( L, F4L_INPUT_CHOICE_NAME, 0, methods, 0 );
moon_defcast( L, F4L_INPUT_CHOICE_NAME, F4L_WIDGET_NAME,
f4l_cast_Fl_Input_Choice_Fl_Widget );
f4l_new_class_table( L, "Input_Choice", new_input_choice );
}
<|start_filename|>src/f4l_menu_bar.cxx<|end_filename|>
#include "fltk4lua.hxx"
#include "f4l_menu.hxx"
#include "f4l_widget.hxx"
#include <FL/Fl_Menu_Bar.H>
namespace {
inline Fl_Menu_Bar* check_menu_bar( lua_State* L, int idx ) {
void* p = moon_checkobject( L, idx, F4L_MENU_BAR_NAME );
return static_cast< Fl_Menu_Bar* >( p );
}
} // anonymous namespace
F4L_DEF_DELETE( Fl_Menu_Bar )
F4L_LUA_LLINKAGE_BEGIN
static int new_menu_bar( lua_State* L ) {
F4L_TRY( L ) {
f4l_new_widget< Fl_Menu_Bar >( L, F4L_MENU_BAR_NAME,
f4l_delete_Fl_Menu_Bar );
} F4L_CATCH( L );
return 1;
}
static int menu_bar_index( lua_State* L ) {
Fl_Menu_Bar* mb = check_menu_bar( L, 1 );
size_t n = 0;
char const* key = luaL_checklstring( L, 2, &n );
F4L_TRY( L ) {
if( !f4l_menu_index_( L, mb, key, n ) &&
!f4l_widget_index_( L, mb, key, n ) &&
!f4l_bad_property( L, F4L_MENU_BAR_NAME, key ) )
lua_pushnil( L );
} F4L_CATCH( L );
return 1;
}
static int menu_bar_newindex( lua_State* L ) {
Fl_Menu_Bar* mb = check_menu_bar( L, 1 );
size_t n = 0;
char const* key = luaL_checklstring( L, 2, &n );
F4L_TRY( L ) {
(void)(f4l_menu_newindex_( L, mb, key, n ) ||
f4l_widget_newindex_( L, mb, key, n ) ||
f4l_bad_property( L, F4L_MENU_BAR_NAME, key ));
} F4L_CATCH( L );
return 0;
}
F4L_LUA_LLINKAGE_END
F4L_DEF_CAST( Fl_Menu_Bar, Fl_Menu_ )
F4L_DEF_CAST( Fl_Menu_Bar, Fl_Widget )
MOON_LOCAL void f4l_menu_bar_setup( lua_State* L ) {
luaL_Reg const methods[] = {
F4L_WIDGET_METHODS,
F4L_MENU_METHODS,
{ "__index", menu_bar_index },
{ "__newindex", menu_bar_newindex },
{ NULL, NULL }
};
moon_defobject( L, F4L_MENU_BAR_NAME, 0, methods, 0 );
moon_defcast( L, F4L_MENU_BAR_NAME, F4L_MENU_NAME,
f4l_cast_Fl_Menu_Bar_Fl_Menu_ );
moon_defcast( L, F4L_MENU_BAR_NAME, F4L_WIDGET_NAME,
f4l_cast_Fl_Menu_Bar_Fl_Widget );
f4l_new_class_table( L, "Menu_Bar", new_menu_bar );
}
<|start_filename|>src/f4l_menu.cxx<|end_filename|>
#include "fltk4lua.hxx"
#include "f4l_menu.hxx"
#include "f4l_widget.hxx"
#include "f4l_enums.hxx"
#include <cstring>
#include <climits>
#include <cstdio>
#define MENU_FLAG_LIST( _ ) \
_( "MENU_REGULAR", 0 ) \
_( "MENU_INACTIVE", FL_MENU_INACTIVE ) \
_( "MENU_TOGGLE", FL_MENU_TOGGLE ) \
_( "MENU_VALUE", FL_MENU_VALUE ) \
_( "MENU_RADIO", FL_MENU_RADIO ) \
_( "MENU_INVISIBLE", FL_MENU_INVISIBLE ) \
_( "SUBMENU", FL_SUBMENU ) \
_( "MENU_DIVIDER", FL_MENU_DIVIDER )
#define MOON_FLAG_NAME "flkt4lua.Menu_Flag"
#define MOON_FLAG_TYPE int
#define MOON_FLAG_SUFFIX menu
#define MOON_FLAG_USECACHE
#define MOON_FLAG_NORELOPS
#include "moon_flag.h"
namespace {
inline Fl_Menu_* check_menu( lua_State* L, int idx ) {
void* p = moon_checkobject( L, idx, F4L_MENU_NAME );
return static_cast< Fl_Menu_* >( p );
}
/* during a menu callback the menu must not be modified! */
Fl_Menu_* check_rw_menu( lua_State* L, int idx ) {
Fl_Menu_* m = check_menu( L, idx );
void* p = lua_touserdata( L, idx );
moon_object_header* h = NULL;
h = static_cast< moon_object_header* >( p );
if( h->flags & F4L_CALLBACK_ACTIVE )
luaL_error( L, "menu is temporarily readonly" );
return m;
}
void f4l_menu_callback( Fl_Widget* w, void* ) {
Fl_Menu_* menu = static_cast< Fl_Menu_* >( w );
int v = menu->value();
// we use the Menu_'s user_data instead of the Menu_Item's:
f4l_active_L* th = static_cast< f4l_active_L* >( w->user_data() );
if( th != NULL && th->cb_L != NULL && v >= 0 ) {
lua_State* L = th->cb_L;
luaL_checkstack( L, 6, "f4l_menu_callback" );
int top = lua_gettop( L );
lua_pushcfunction( L, f4l_backtrace );
f4l_push_widget( L, w );
lua_pushvalue( L, -1 ); // widget, widget
moon_object_header* h = NULL;
h = static_cast< moon_object_header* >( lua_touserdata( L, -2 ) );
if( moon_getuvfield( L, -1, "menu" ) == LUA_TTABLE &&
lua_rawgeti( L, -1, v+1 ) == LUA_TTABLE ) {
lua_replace( L, -2 ); // widget, widget, menu-entry
if( lua_getfield( L, -1, "callback" ) == LUA_TFUNCTION ) {
lua_insert( L, -3 ); // widget, callback, widget, menu-entry
lua_getfield( L, -1, "user_data" );
lua_replace( L, -2 ); // widget, callback, widget, user_data
int oldf = h->flags & F4L_CALLBACK_ACTIVE;
h->flags |= F4L_CALLBACK_ACTIVE;
int status = lua_pcall( L, 2, 0, top+1 );
h->flags = (h->flags & ~F4L_CALLBACK_ACTIVE) | oldf;
if( status != 0 ) {
lua_replace( L, -3 );
lua_pop( L, 1 );
f4l_fix_backtrace( L );
lua_error( L );
}
}
}
lua_settop( L, top );
}
}
// helper function for table_rotate
void table_reverse( lua_State* L, int idx, int a, int b ) {
idx = lua_absindex( L, idx );
for( ; a < b; ++a, --b ) {
lua_rawgeti( L, idx, a );
lua_rawgeti( L, idx, b );
lua_rawseti( L, idx, a );
lua_rawseti( L, idx, b );
}
}
// similar to lua_rotate, but on table contents instead of stack!
void table_rotate( lua_State* L, int idx, int i, int n ) {
int n_elems = luaL_len( L, idx )-i+1;
if( n < 0 )
n += n_elems;
if( n > 0 && n < n_elems ) {
n = n_elems - n;
table_reverse( L, idx, i, i+n-1 );
table_reverse( L, idx, i+n, i+n_elems-1 );
table_reverse( L, idx, i, i+n_elems-1 );
}
}
void table_remove( lua_State* L, int idx, int i, int n ) {
idx = lua_absindex( L, idx );
int oldn = luaL_len( L, idx );
while( i <= oldn ) {
lua_rawgeti( L, idx, i+n );
lua_rawseti( L, idx, i );
++i;
}
}
void table_shrink( lua_State* L, int idx, int newn ) {
idx = lua_absindex( L, idx );
for( int oldn = luaL_len( L, idx ); oldn > newn; --oldn ) {
lua_pushnil( L );
lua_rawseti( L, idx, oldn );
}
}
#if 0
// for debugging only; otherwise unused function
void menu_array_dump( lua_State* L, int idx ) {
using namespace std;
idx = lua_absindex( L, idx );
moon_checkobject( L, idx, F4L_MENU_NAME );
int t = moon_getuvfield( L, idx, "menu" );
if( t == LUA_TNIL ) {
fprintf( stderr, "no menu item mirror table\n" );
} else if( t != LUA_TTABLE ) {
fprintf( stderr, "menu uservalue field has wrong type: %s\n",
lua_typename( L, t ) );
lua_pop( L, 1 );
} else {
luaL_checkstack( L, 5, "menu_array_dump" );
int n = luaL_len( L, -1 );
for( int i = 1; i <= n; ++i ) {
int it = lua_rawgeti( L, -1, i );
fprintf( stderr, "item %d, lua type: %s\n", i-1,
lua_typename( L, it ) );
if( it == LUA_TTABLE ) {
lua_getfield( L, -1, "callback" );
lua_getfield( L, -2, "user_data" );
char const* cb = luaL_tolstring( L, -2, NULL );
char const* ud = luaL_tolstring( L, -2, NULL );
fprintf( stderr, " callback: %s, user_data: %s\n", cb, ud );
lua_pop( L, 4 );
}
lua_pop( L, 1 );
}
lua_pop( L, 1 );
}
}
#endif
#undef MENU_SYNC_WARN
#define MENU_SYNC_ERROR
// setup the menu item mirror table in the uservalue table
void menu_array_sync( lua_State* L, int idx, Fl_Menu_* m, int msize ) {
using namespace std;
idx = lua_absindex( L, idx );
lua_getuservalue( L, idx );
if( lua_getfield( L, -1, "menu" ) != LUA_TTABLE ) {
lua_pop( L, 1 );
lua_createtable( L, msize, 0 );
lua_pushvalue( L, -1 );
lua_setfield( L, -3, "menu" );
}
lua_replace( L, -2 ); // remove uservalue table, keep mirror table
Fl_Menu_Item const* p = m->menu();
for( int i = 0; i < msize; ++i ) {
char const* t = p[ i ].label();
if( t == NULL ) { // sub-menu terminator
if( lua_rawgeti( L, -1, i+1 ) != LUA_TBOOLEAN ) {
#ifdef MENU_SYNC_WARN
fprintf( stderr, "menu out of sync (item: %d, Lua type: %s)!\n",
i, luaL_typename( L, -1 ) );
#endif
#ifdef MENU_SYNC_ERROR
luaL_error( L, "menu out of sync (item %d)", i );
#endif
lua_pushboolean( L, 0 ); // terminators are falses
lua_rawseti( L, -3, i+1 );
}
} else { // normal menu element
if( lua_rawgeti( L, -1, i+1 ) != LUA_TTABLE ) {
#ifdef MENU_SYNC_WARN
fprintf( stderr, "menu out of sync (item: %d, Lua type: %s)!\n",
i, luaL_typename( L, -1 ) );
#endif
#ifdef MENU_SYNC_ERROR
luaL_error( L, "menu out of sync (item %d)", i );
#endif
lua_newtable( L );
lua_rawseti( L, -3, i+1 );
}
}
lua_pop( L, 1 );
}
table_shrink( L, -1, msize );
}
} // anonymous namespace
MOON_LOCAL void f4l_prepare_menu_insert( lua_State* L, int midx,
Fl_Menu_* m, int msize,
char const* nlabel ) {
menu_array_sync( L, midx, m, msize ); // pushes mirror table
/* the number of slashes in the label gives an upper limit to the
* number of submenus that will be added automatically */
int n_slashes = 0;
for( ; *nlabel != '\0'; ++nlabel )
if( *nlabel == '/' )
++n_slashes;
// preallocate memory for sub-menus + element to be inserted
for( int i = 0; i < n_slashes+1; ++i ) {
lua_newtable( L );
lua_rawseti( L, -2, msize+i+1 );
}
// preallocate sub-menu terminators
for( int i = n_slashes+1; i < 2*n_slashes+2; ++i ) {
lua_pushboolean( L, 0 );
lua_rawseti( L, -2, msize+i+1 );
}
}
MOON_LOCAL void f4l_commit_menu_insert( lua_State* L, int midx,
Fl_Menu_* m, int osize,
int pos ) {
(void)midx; // don't need it
int newsize = m->size();
/* update the mirror table by shifting preallocated items to the
* correct positions. */
if( newsize > osize ) {
int tsize = luaL_len( L, -1 );
int added = newsize-osize;
int n_submenus = (added-1)/2, n_terminators = added/2;
int n_pasubmenus = (tsize - osize)/2 - 1;
// remove preallocated terminators we don't need
table_shrink( L, -1, osize+n_pasubmenus+1+n_terminators );
// move preallocated stuff to the right position
table_rotate( L, -1, pos+1-n_submenus, n_submenus+1+n_terminators );
}
// remove preallocated leftovers
table_shrink( L, -1, newsize );
lua_rawgeti( L, -1, pos+1 );
lua_replace( L, -2 ); // remove mirror, put item table on stack top
}
MOON_LOCAL int f4l_menu_index_( lua_State* L, Fl_Menu_* m,
char const* key, size_t n ) {
using namespace std;
switch( n ) {
case 5:
if( F4L_MEMCMP( key, "value", 5 ) == 0 ) {
lua_pushinteger( L, m->value() );
return 1;
}
break;
case 8:
if( F4L_MEMCMP( key, "down_box", 8 ) == 0 ) {
f4l_push_boxtype( L, m->down_box() );
return 1;
} else if( F4L_MEMCMP( key, "textfont", 8 ) == 0 ) {
f4l_push_font( L, m->textfont() );
return 1;
} else if( F4L_MEMCMP( key, "textsize", 8 ) == 0 ) {
lua_pushinteger( L, m->textsize() );
return 1;
}
break;
case 9:
if( F4L_MEMCMP( key, "menu_size", 9 ) == 0 ) {
lua_pushinteger( L, m->size() );
return 1;
} else if( F4L_MEMCMP( key, "textcolor", 9 ) == 0 ) {
f4l_push_color( L, m->textcolor() );
return 1;
}
break;
}
return 0;
}
MOON_LOCAL int f4l_menu_newindex_( lua_State* L, Fl_Menu_* m,
char const* key, size_t n ) {
using namespace std;
switch( n ) {
case 5:
if( F4L_MEMCMP( key, "value", 5 ) == 0 ) {
int i = moon_checkint( L, 3, 0, INT_MAX );
luaL_argcheck( L, 0 <= i && i < m->size(), 3,
"index out of range" );
m->value( i );
return 1;
}
break;
case 8:
if( F4L_MEMCMP( key, "down_box", 8 ) == 0 ) {
m->down_box( f4l_check_boxtype( L, 3 ) );
return 1;
} else if( F4L_MEMCMP( key, "textfont", 8 ) == 0 ) {
m->textfont( f4l_check_font( L, 3 ) );
return 1;
} else if( F4L_MEMCMP( key, "textsize", 8 ) == 0 ) {
m->textsize( moon_checkint( L, 3, 0, INT_MAX ) );
return 1;
}
break;
case 9:
if( F4L_MEMCMP( key, "textcolor", 9 ) == 0 ) {
m->textcolor( f4l_check_color( L, 3 ) );
return 1;
}
break;
}
return 0;
}
F4L_LUA_LLINKAGE_BEGIN
MOON_LOCAL int f4l_menu_add( lua_State* L ) {
F4L_TRY( L ) {
Fl_Menu_* m = check_rw_menu( L, 1 );
size_t n = 0;
char const* label = luaL_checklstring( L, 2, &n );
luaL_argcheck( L, n < 1024, 2,
"menu label too long (FLTK limitation)" );
lua_settop( L, 6 );
Fl_Shortcut sc = luaL_opt( L, f4l_check_shortcut, 3, 0 );
int have_cb = 0;
if( !lua_isnil( L, 4 ) ) {
luaL_checktype( L, 4, LUA_TFUNCTION );
have_cb = 1;
}
int mflags = luaL_opt( L, moon_flag_get_menu, 6, 0 );
luaL_argcheck( L, !(mflags & FL_SUBMENU), 6,
"FL_SUBMENU not allowed (FLTK bug)" );
int msize = m->size();
f4l_prepare_menu_insert( L, 1, m, msize, label );
int pos = m->add( label, sc, have_cb ? f4l_menu_callback : 0,
NULL, mflags );
f4l_commit_menu_insert( L, 1, m, msize, pos );
// set callback and user_data fields
lua_pushvalue( L, 4 );
lua_setfield( L, -2, "callback" );
lua_pushvalue( L, 5 );
lua_setfield( L, -2, "user_data" );
lua_pushinteger( L, pos );
} F4L_CATCH( L );
return 1;
}
MOON_LOCAL int f4l_menu_clear( lua_State* L ) {
Fl_Menu_* m = check_rw_menu( L, 1 );
F4L_TRY( L ) {
m->clear();
} F4L_CATCH( L );
lua_pushnil( L );
moon_setuvfield( L, 1, "menu" );
return 0;
}
MOON_LOCAL int f4l_menu_clear_submenu( lua_State* L ) {
Fl_Menu_* m = check_rw_menu( L, 1 );
int idx = moon_checkint( L, 2, 0, INT_MAX );
F4L_TRY( L ) {
int oldsize = m->size();
luaL_argcheck( L, idx < oldsize, 2, "index too large" );
menu_array_sync( L, 1, m, oldsize ); // pushes mirror table
if( m->clear_submenu( idx ) == 0 ) {
int newsize = m->size();
table_remove( L, -1, idx+1, oldsize-newsize );
lua_pushboolean( L, 1 );
} else
lua_pushnil( L );
} F4L_CATCH( L );
return 1;
}
MOON_LOCAL int f4l_menu_find_index( lua_State* L ) {
Fl_Menu_* m = check_menu( L, 1 );
char const* name = luaL_checkstring( L, 2 );
F4L_TRY( L ) {
int v = m->find_index( name );
if( v >= 0 )
lua_pushinteger( L, v );
else
lua_pushnil( L );
} F4L_CATCH( L );
return 1;
}
MOON_LOCAL int f4l_menu_insert( lua_State* L ) {
F4L_TRY( L ) {
Fl_Menu_* m = check_rw_menu( L, 1 );
int msize = m->size();
int idx = moon_checkint( L, 2, -1, INT_MAX );
luaL_argcheck( L, idx < msize, 2, "index too large" );
size_t n = 0;
char const* label = luaL_checklstring( L, 3, &n );
luaL_argcheck( L, n < 1024, 3,
"menu label too long (FLTK limitation)" );
lua_settop( L, 7 );
Fl_Shortcut sc = luaL_opt( L, f4l_check_shortcut, 4, 0 );
int have_cb = 0;
if( !lua_isnil( L, 5 ) ) {
luaL_checktype( L, 5, LUA_TFUNCTION );
have_cb = 1;
}
int mflags = luaL_opt( L, moon_flag_get_menu, 7, 0 );
luaL_argcheck( L, !(mflags & FL_SUBMENU), 7,
"FL_SUBMENU not allowed (FLTK bug)" );
f4l_prepare_menu_insert( L, 1, m, msize, label );
int pos = m->insert( idx, label, sc, have_cb ? f4l_menu_callback : 0,
NULL, mflags );
f4l_commit_menu_insert( L, 1, m, msize, pos );
// set callback and user_data fields
lua_pushvalue( L, 5 );
lua_setfield( L, -2, "callback" );
lua_pushvalue( L, 6 );
lua_setfield( L, -2, "user_data" );
lua_pushinteger( L, pos );
} F4L_CATCH( L );
return 1;
}
MOON_LOCAL int f4l_menu_remove( lua_State* L ) {
Fl_Menu_* m = check_rw_menu( L, 1 );
int idx = moon_checkint( L, 2, 0, INT_MAX );
F4L_TRY( L ) {
int oldsize = m->size();
luaL_argcheck( L, idx < oldsize, 2, "index too large" );
if( m->text( idx ) != NULL ) { // don't remove terminators
menu_array_sync( L, 1, m, oldsize ); // pushes mirror table
m->remove( idx );
int newsize = m->size();
table_remove( L, -1, idx+1, oldsize-newsize );
}
} F4L_CATCH( L );
return 0;
}
MOON_LOCAL int f4l_menu_size( lua_State* L ) {
Fl_Menu_* m = check_menu( L, 1 );
int w = moon_checkint( L, 2, 0, INT_MAX );
int h = moon_checkint( L, 3, 0, INT_MAX );
F4L_TRY( L ) {
m->size( w, h );
} F4L_CATCH( L );
return 0;
}
MOON_LOCAL int f4l_menu_menuitem_getp( lua_State* L ) {
Fl_Menu_* m = check_menu( L, 1 );
int idx = moon_checkint( L, 2, 0, INT_MAX );
F4L_TRY( L ) {
int size = m->size();
luaL_argcheck( L, idx < size, 2, "index too large" );
size_t n = 0;
char const* key = luaL_checklstring( L, 3, &n );
using namespace std;
switch( n ) {
case 4:
if( F4L_MEMCMP( key, "text", 4 ) == 0 ) {
char const* v = m->text( idx );
if( v != NULL )
lua_pushstring( L, v );
else
lua_pushnil( L );
return 1;
}
break;
case 5:
if( F4L_MEMCMP( key, "flags", 5 ) == 0 ) {
moon_flag_new_menu( L, m->mode( idx ) );
return 1;
} else if( F4L_MEMCMP( key, "label", 5 ) == 0 ) {
char const* v = m->text( idx );
if( v != NULL )
lua_pushstring( L, v );
else
lua_pushnil( L );
return 1;
}
break;
case 8:
if( F4L_MEMCMP( key, "callback", 8 ) == 0 ) {
if( moon_getuvfield( L, 1, "menu" ) == LUA_TTABLE &&
lua_rawgeti( L, -1, idx+1 ) == LUA_TTABLE )
lua_getfield( L, -1, "callback" );
else
lua_pushnil( L );
return 1;
} else if( F4L_MEMCMP( key, "shortcut", 8 ) == 0 ) {
f4l_push_shortcut( L, m->menu()[ idx ].shortcut() );
return 1;
}
break;
case 9:
if( F4L_MEMCMP( key, "labelfont", 9 ) == 0 ) {
f4l_push_font( L, m->menu()[ idx ].labelfont() );
return 1;
} else if( F4L_MEMCMP( key, "labelsize", 9 ) == 0 ) {
lua_pushinteger( L, m->menu()[ idx ].labelsize() );
return 1;
} else if( F4L_MEMCMP( key, "labeltype", 9 ) == 0 ) {
f4l_push_labeltype( L, m->menu()[ idx ].labeltype() );
return 1;
} else if( F4L_MEMCMP( key, "user_data", 9 ) == 0 ) {
if( moon_getuvfield( L, 1, "menu" ) == LUA_TTABLE &&
lua_rawgeti( L, -1, idx+1 ) == LUA_TTABLE )
lua_getfield( L, -1, "user_data" );
else
lua_pushnil( L );
return 1;
}
break;
case 10:
if( F4L_MEMCMP( key, "labelcolor", 10 ) == 0 ) {
f4l_push_color( L, m->menu()[ idx ].labelcolor() );
return 1;
}
break;
}
if( f4l_bad_property( L, "fltk4lua.Menu_Item", key ) )
return 1;
} F4L_CATCH( L );
lua_pushnil( L );
return 1;
}
MOON_LOCAL int f4l_menu_menuitem_setp( lua_State* L ) {
Fl_Menu_* m = check_menu( L, 1 );
int idx = moon_checkint( L, 2, 0, INT_MAX );
F4L_TRY( L ) {
int size = m->size();
luaL_argcheck( L, idx < size, 2, "index too large" );
luaL_argcheck( L, m->menu()[ idx ].label() != NULL, 2,
"can't modify terminator Menu_Item" );
size_t n = 0;
char const* key = luaL_checklstring( L, 3, &n );
lua_settop( L, 4 );
using namespace std;
switch( n ) {
case 4:
if( F4L_MEMCMP( key, "text", 4 ) == 0 ) {
char const* v = luaL_checkstring( L, 4 );
m->replace( idx, v );
return 0;
}
break;
case 5:
if( F4L_MEMCMP( key, "flags", 5 ) == 0 ) {
int oldf = m->mode( idx );
int newf = moon_flag_get_menu( L, 4 );
luaL_argcheck( L, (oldf & FL_SUBMENU) == (newf & FL_SUBMENU),
4, "cannot change FL_SUBMENU flag" );
m->mode( idx, newf );
return 0;
} else if( F4L_MEMCMP( key, "label", 5 ) == 0 ) {
char const* v = luaL_checkstring( L, 4 );
m->replace( idx, v );
return 0;
}
break;
case 8:
if( F4L_MEMCMP( key, "callback", 8 ) == 0 ) {
int has_cb = 0;
luaL_argcheck( L, f4l_our_widget( L, m ), 1,
"internal FLTK widget" );
if( !lua_isnoneornil( L, 4 ) ) {
luaL_checktype( L, 4, LUA_TFUNCTION );
has_cb = 1;
}
if( moon_getuvfield( L, 1, "menu" ) == LUA_TTABLE &&
lua_rawgeti( L, -1, idx+1 ) == LUA_TTABLE ) {
lua_pushvalue( L, 4 );
lua_setfield( L, -2, "callback" );
const_cast< Fl_Menu_Item* >( m->menu() )[ idx ]
.callback( has_cb ? f4l_menu_callback : 0 );
} else
luaL_error( L, "menu item %d doesn't have a uservalue table",
idx );
return 0;
} else if( F4L_MEMCMP( key, "shortcut", 8 ) == 0 ) {
m->shortcut( idx, f4l_check_shortcut( L, 4 ) );
return 0;
}
break;
case 9:
if( F4L_MEMCMP( key, "labelfont", 9 ) == 0 ) {
const_cast< Fl_Menu_Item* >( m->menu() )[ idx ]
.labelfont( f4l_check_font( L, 4 ) );
return 0;
} else if( F4L_MEMCMP( key, "labelsize", 9 ) == 0 ) {
const_cast< Fl_Menu_Item* >( m->menu() )[ idx ]
.labelsize( moon_checkint( L, 4, 0, INT_MAX ) );
return 0;
} else if( F4L_MEMCMP( key, "labeltype", 9 ) == 0 ) {
Fl_Labeltype t = f4l_check_labeltype( L, 4 );
luaL_argcheck( L, t != _FL_IMAGE_LABEL, 4,
"this labeltype is invalid here" );
const_cast< Fl_Menu_Item* >( m->menu() )[ idx ]
.labeltype( t );
return 0;
} else if( F4L_MEMCMP( key, "user_data", 9 ) == 0 ) {
if( moon_getuvfield( L, 1, "menu" ) == LUA_TTABLE &&
lua_rawgeti( L, -1, idx+1 ) == LUA_TTABLE ) {
lua_pushvalue( L, 4 );
lua_setfield( L, -2, "user_data" );
} else
luaL_error( L, "menu item %d doesn't have a uservalue table",
idx );
return 0;
}
// we don't set labeltype for now, because it can cause memory
// corruption when used with images!
break;
case 10:
if( F4L_MEMCMP( key, "labelcolor", 10 ) == 0 ) {
const_cast< Fl_Menu_Item* >( m->menu() )[ idx ]
.labelcolor( f4l_check_color( L, 4 ) );
return 0;
}
break;
}
f4l_bad_property( L, "fltk4lua.Menu_Item", key );
} F4L_CATCH( L );
return 0;
}
MOON_LOCAL int f4l_menu_menuitem_activate( lua_State* L ) {
Fl_Menu_* m = check_menu( L, 1 );
int idx = moon_checkint( L, 2, 0, INT_MAX );
F4L_TRY( L ) {
int size = m->size();
luaL_argcheck( L, idx < size, 2, "index too large" );
const_cast< Fl_Menu_Item* >( m->menu() )[ idx ]
.activate();
} F4L_CATCH( L );
return 0;
}
MOON_LOCAL int f4l_menu_menuitem_active( lua_State* L ) {
Fl_Menu_* m = check_menu( L, 1 );
int idx = moon_checkint( L, 2, 0, INT_MAX );
F4L_TRY( L ) {
int size = m->size();
luaL_argcheck( L, idx < size, 2, "index too large" );
int v = const_cast< Fl_Menu_Item* >( m->menu() )[ idx ]
.active();
lua_pushboolean( L, v );
} F4L_CATCH( L );
return 1;
}
MOON_LOCAL int f4l_menu_menuitem_activevisible( lua_State* L ) {
Fl_Menu_* m = check_menu( L, 1 );
int idx = moon_checkint( L, 2, 0, INT_MAX );
F4L_TRY( L ) {
int size = m->size();
luaL_argcheck( L, idx < size, 2, "index too large" );
int v = const_cast< Fl_Menu_Item* >( m->menu() )[ idx ]
.activevisible();
lua_pushboolean( L, v );
} F4L_CATCH( L );
return 1;
}
MOON_LOCAL int f4l_menu_menuitem_checkbox( lua_State* L ) {
Fl_Menu_* m = check_menu( L, 1 );
int idx = moon_checkint( L, 2, 0, INT_MAX );
F4L_TRY( L ) {
int size = m->size();
luaL_argcheck( L, idx < size, 2, "index too large" );
int v = const_cast< Fl_Menu_Item* >( m->menu() )[ idx ]
.checkbox();
lua_pushboolean( L, v );
} F4L_CATCH( L );
return 1;
}
MOON_LOCAL int f4l_menu_menuitem_clear( lua_State* L ) {
Fl_Menu_* m = check_menu( L, 1 );
int idx = moon_checkint( L, 2, 0, INT_MAX );
F4L_TRY( L ) {
int size = m->size();
luaL_argcheck( L, idx < size, 2, "index too large" );
const_cast< Fl_Menu_Item* >( m->menu() )[ idx ]
.clear();
} F4L_CATCH( L );
return 0;
}
MOON_LOCAL int f4l_menu_menuitem_deactivate( lua_State* L ) {
Fl_Menu_* m = check_menu( L, 1 );
int idx = moon_checkint( L, 2, 0, INT_MAX );
F4L_TRY( L ) {
int size = m->size();
luaL_argcheck( L, idx < size, 2, "index too large" );
const_cast< Fl_Menu_Item* >( m->menu() )[ idx ]
.deactivate();
} F4L_CATCH( L );
return 0;
}
MOON_LOCAL int f4l_menu_menuitem_hide( lua_State* L ) {
Fl_Menu_* m = check_menu( L, 1 );
int idx = moon_checkint( L, 2, 0, INT_MAX );
F4L_TRY( L ) {
int size = m->size();
luaL_argcheck( L, idx < size, 2, "index too large" );
const_cast< Fl_Menu_Item* >( m->menu() )[ idx ]
.hide();
} F4L_CATCH( L );
return 0;
}
MOON_LOCAL int f4l_menu_menuitem_radio( lua_State* L ) {
Fl_Menu_* m = check_menu( L, 1 );
int idx = moon_checkint( L, 2, 0, INT_MAX );
F4L_TRY( L ) {
int size = m->size();
luaL_argcheck( L, idx < size, 2, "index too large" );
int v = const_cast< Fl_Menu_Item* >( m->menu() )[ idx ]
.radio();
lua_pushboolean( L, v );
} F4L_CATCH( L );
return 1;
}
MOON_LOCAL int f4l_menu_menuitem_set( lua_State* L ) {
Fl_Menu_* m = check_menu( L, 1 );
int idx = moon_checkint( L, 2, 0, INT_MAX );
F4L_TRY( L ) {
int size = m->size();
luaL_argcheck( L, idx < size, 2, "index too large" );
const_cast< Fl_Menu_Item* >( m->menu() )[ idx ]
.set();
} F4L_CATCH( L );
return 0;
}
MOON_LOCAL int f4l_menu_menuitem_setonly( lua_State* L ) {
Fl_Menu_* m = check_menu( L, 1 );
int idx = moon_checkint( L, 2, 0, INT_MAX );
F4L_TRY( L ) {
int size = m->size();
luaL_argcheck( L, idx < size, 2, "index too large" );
const_cast< Fl_Menu_Item* >( m->menu() )[ idx ]
.setonly();
} F4L_CATCH( L );
return 0;
}
MOON_LOCAL int f4l_menu_menuitem_show( lua_State* L ) {
Fl_Menu_* m = check_menu( L, 1 );
int idx = moon_checkint( L, 2, 0, INT_MAX );
F4L_TRY( L ) {
int size = m->size();
luaL_argcheck( L, idx < size, 2, "index too large" );
const_cast< Fl_Menu_Item* >( m->menu() )[ idx ]
.show();
} F4L_CATCH( L );
return 0;
}
MOON_LOCAL int f4l_menu_menuitem_submenu( lua_State* L ) {
Fl_Menu_* m = check_menu( L, 1 );
int idx = moon_checkint( L, 2, 0, INT_MAX );
F4L_TRY( L ) {
int size = m->size();
luaL_argcheck( L, idx < size, 2, "index too large" );
int v = const_cast< Fl_Menu_Item* >( m->menu() )[ idx ]
.submenu();
lua_pushboolean( L, v );
} F4L_CATCH( L );
return 1;
}
MOON_LOCAL int f4l_menu_menuitem_value( lua_State* L ) {
Fl_Menu_* m = check_menu( L, 1 );
int idx = moon_checkint( L, 2, 0, INT_MAX );
F4L_TRY( L ) {
int size = m->size();
luaL_argcheck( L, idx < size, 2, "index too large" );
int v = const_cast< Fl_Menu_Item* >( m->menu() )[ idx ]
.value();
lua_pushboolean( L, v );
} F4L_CATCH( L );
return 1;
}
MOON_LOCAL int f4l_menu_menuitem_visible( lua_State* L ) {
Fl_Menu_* m = check_menu( L, 1 );
int idx = moon_checkint( L, 2, 0, INT_MAX );
F4L_TRY( L ) {
int size = m->size();
luaL_argcheck( L, idx < size, 2, "index too large" );
int v = const_cast< Fl_Menu_Item* >( m->menu() )[ idx ]
.visible();
lua_pushboolean( L, v );
} F4L_CATCH( L );
return 1;
}
F4L_LUA_LLINKAGE_END
MOON_LOCAL void f4l_menu_setup( lua_State* L ) {
moon_flag_def_menu( L );
#define GEN_UDATA( _a, _b ) \
(moon_flag_new_menu( L, _b ), lua_setfield( L, -2, _a ));
MENU_FLAG_LIST( GEN_UDATA )
#undef GEN_UDATA
}
<|start_filename|>src/f4l_dial.cxx<|end_filename|>
#include "fltk4lua.hxx"
#include "f4l_valuator.hxx"
#include "f4l_widget.hxx"
#include "f4l_enums.hxx"
#include <FL/Fl_Dial.H>
#include <cstring>
#include <climits>
#define TYPE_LIST( _ ) \
_( "FL_NORMAL_DIAL", FL_NORMAL_DIAL ) \
_( "FL_FILL_DIAL", FL_FILL_DIAL ) \
_( "FL_LINE_DIAL", FL_LINE_DIAL )
F4L_GEN_TYPE_ENUM( TYPE_LIST, dial )
namespace {
inline Fl_Dial* check_dial( lua_State* L, int idx ) {
void* p = moon_checkobject( L, idx, F4L_DIAL_NAME );
return static_cast< Fl_Dial* >( p );
}
int dial_index_( lua_State* L, Fl_Dial* d,
char const* key, size_t n ) {
using namespace std;
switch( n ) {
case 4:
if( F4L_MEMCMP( key, "type", 4 ) == 0 ){
f4l_push_type_dial( L, d->type() );
return 1;
}
break;
case 6:
if( F4L_MEMCMP( key, "angle1", 6 ) == 0 ) {
lua_pushinteger( L, d->angle1() );
return 1;
} else if( F4L_MEMCMP( key, "angle2", 6 ) == 0 ) {
lua_pushinteger( L, d->angle2() );
return 1;
}
break;
}
return 0;
}
int dial_newindex_( lua_State* L, Fl_Dial* d,
char const* key, size_t n ) {
using namespace std;
switch( n ) {
case 4:
if( F4L_MEMCMP( key, "type", 4 ) == 0 ){
d->type( f4l_check_type_dial( L, 3 ) );
return 1;
}
break;
case 6:
if( F4L_MEMCMP( key, "angle1", 6 ) == 0 ) {
d->angle1( moon_checkint( L, 3, SHRT_MIN, SHRT_MAX ) );
return 1;
} else if( F4L_MEMCMP( key, "angle2", 6 ) == 0 ) {
d->angle2( moon_checkint( L, 3, SHRT_MIN, SHRT_MAX ) );
return 1;
}
break;
}
return 0;
}
} // anonymous namespace
F4L_DEF_DELETE( Fl_Dial )
F4L_LUA_LLINKAGE_BEGIN
static int new_dial( lua_State* L ) {
F4L_TRY( L ) {
f4l_new_widget< Fl_Dial>( L, F4L_DIAL_NAME,
f4l_delete_Fl_Dial );
} F4L_CATCH( L );
return 1;
}
static int dial_index( lua_State* L ) {
Fl_Dial* d = check_dial( L, 1 );
size_t n = 0;
char const* key = luaL_checklstring( L, 2, &n );
F4L_TRY( L ) {
if( !dial_index_( L, d, key, n ) &&
!f4l_valuator_index_( L, d, key, n ) &&
!f4l_widget_index_( L, d, key, n ) &&
!f4l_bad_property( L, F4L_DIAL_NAME, key ) )
lua_pushnil( L );
} F4L_CATCH( L );
return 1;
}
static int dial_newindex( lua_State* L ) {
Fl_Dial* d = check_dial( L, 1 );
size_t n = 0;
char const* key = luaL_checklstring( L, 2, &n );
F4L_TRY( L ) {
(void)(dial_newindex_( L, d, key, n ) ||
f4l_valuator_newindex_( L, d, key, n ) ||
f4l_widget_newindex_( L, d, key, n ) ||
f4l_bad_property( L, F4L_DIAL_NAME, key ));
} F4L_CATCH( L );
return 0;
}
static int dial_angles( lua_State* L ) {
Fl_Dial* d = check_dial( L, 1 );
short a = moon_checkint( L, 2, SHRT_MIN, SHRT_MAX );
short b = moon_checkint( L, 3, SHRT_MIN, SHRT_MAX );
F4L_TRY( L ) {
d->angles( a, b );
} F4L_CATCH( L );
return 0;
}
F4L_LUA_LLINKAGE_END
F4L_DEF_CAST( Fl_Dial, Fl_Valuator )
F4L_DEF_CAST( Fl_Dial, Fl_Widget )
MOON_LOCAL void f4l_dial_setup( lua_State* L ) {
luaL_Reg const methods[] = {
F4L_WIDGET_METHODS,
F4L_VALUATOR_METHODS,
{ "angles", dial_angles },
{ "__index", dial_index },
{ "__newindex", dial_newindex },
{ NULL, NULL }
};
moon_defobject( L, F4L_DIAL_NAME, 0, methods, 0 );
moon_defcast( L, F4L_DIAL_NAME, F4L_VALUATOR_NAME,
f4l_cast_Fl_Dial_Fl_Valuator );
moon_defcast( L, F4L_DIAL_NAME, F4L_WIDGET_NAME,
f4l_cast_Fl_Dial_Fl_Widget );
f4l_new_class_table( L, "Dial", new_dial );
}
<|start_filename|>src/f4l_tile.cxx<|end_filename|>
#include "fltk4lua.hxx"
#include "f4l_group.hxx"
#include "f4l_widget.hxx"
#include <FL/Fl_Tile.H>
namespace {
inline Fl_Tile* check_tile( lua_State* L, int idx ) {
void* p = moon_checkobject( L, idx, F4L_TILE_NAME );
return static_cast< Fl_Tile* >( p );
}
} // anonymous namespace
F4L_DEF_DELETE( Fl_Tile )
F4L_LUA_LLINKAGE_BEGIN
static int new_tile( lua_State* L ) {
F4L_TRY( L ) {
f4l_new_widget< Fl_Tile >( L, F4L_TILE_NAME,
f4l_delete_Fl_Tile );
} F4L_CATCH( L );
return 1;
}
static int tile_index( lua_State* L ) {
Fl_Tile* t = check_tile( L, 1 );
size_t n = 0;
char const* key = luaL_checklstring( L, 2, &n );
F4L_TRY( L ) {
if( !f4l_group_index_( L, t, key, n ) &&
!f4l_widget_index_( L, t, key, n ) &&
!f4l_bad_property( L, F4L_TILE_NAME, key ) )
lua_pushnil( L );
} F4L_CATCH( L );
return 1;
}
static int tile_newindex( lua_State* L ) {
Fl_Tile* t = check_tile( L, 1 );
size_t n = 0;
char const* key = luaL_checklstring( L, 2, &n );
F4L_TRY( L ) {
(void)(f4l_group_newindex_( L, t, key, n ) ||
f4l_widget_newindex_( L, t, key, n ) ||
f4l_bad_property( L, F4L_TILE_NAME, key ));
} F4L_CATCH( L );
return 0;
}
static int tile_position( lua_State* L ) {
Fl_Tile* t = check_tile( L, 1 );
int oix = moon_checkint( L, 2, 0, INT_MAX );
int oiy = moon_checkint( L, 3, 0, INT_MAX );
int newx = moon_checkint( L, 4, 0, INT_MAX );
int newy = moon_checkint( L, 5, 0, INT_MAX );
F4L_TRY( L ) {
t->position( oix, oiy, newx, newy );
} F4L_CATCH( L );
return 0;
}
F4L_LUA_LLINKAGE_END
F4L_DEF_CAST( Fl_Tile, Fl_Group )
F4L_DEF_CAST( Fl_Tile, Fl_Widget )
MOON_LOCAL void f4l_tile_setup( lua_State* L ) {
luaL_Reg const methods[] = {
F4L_WIDGET_METHODS,
F4L_GROUP_METHODS,
{ "position", tile_position },
{ "__index", tile_index },
{ "__newindex", tile_newindex },
{ NULL, NULL }
};
moon_defobject( L, F4L_TILE_NAME, 0, methods, 0 );
moon_defcast( L, F4L_TILE_NAME, F4L_GROUP_NAME,
f4l_cast_Fl_Tile_Fl_Group );
moon_defcast( L, F4L_TILE_NAME, F4L_WIDGET_NAME,
f4l_cast_Fl_Tile_Fl_Widget );
f4l_new_class_table( L, "Tile", new_tile );
}
<|start_filename|>src/f4l_group.hxx<|end_filename|>
#ifndef F4L_GROUP_HXX_
#define F4L_GROUP_HXX_
#include "fltk4lua.hxx"
#include <FL/Fl_Group.H>
/* (Meta-)methods for Fl_Groups: */
MOON_LOCAL int f4l_group_index_( lua_State* L, Fl_Group* g,
char const* key, size_t n );
MOON_LOCAL int f4l_group_newindex_( lua_State* L, Fl_Group* g,
char const* key, size_t n );
F4L_LUA_LLINKAGE_BEGIN
MOON_LOCAL int f4l_group_add( lua_State* L );
MOON_LOCAL int f4l_group_add_resizable( lua_State* L );
MOON_LOCAL int f4l_group_begin( lua_State* L );
MOON_LOCAL int f4l_group_child( lua_State* L );
MOON_LOCAL int f4l_group_clear( lua_State* L );
MOON_LOCAL int f4l_group_end( lua_State* L );
MOON_LOCAL int f4l_group_find( lua_State* L );
MOON_LOCAL int f4l_group_insert( lua_State* L );
MOON_LOCAL int f4l_group_remove( lua_State* L );
F4L_LUA_LLINKAGE_END
#define F4L_GROUP_METHODS \
{ "add", f4l_group_add }, \
{ "add_resizable", f4l_group_add_resizable }, \
{ "begin", f4l_group_begin }, \
{ "begin_group", f4l_group_begin }, \
{ "child", f4l_group_child }, \
{ "clear", f4l_group_clear }, \
{ "end_group", f4l_group_end }, \
{ "find", f4l_group_find }, \
{ "insert", f4l_group_insert }, \
{ "remove", f4l_group_remove }
#endif /* F4L_GROUP_HXX_ */
<|start_filename|>src/f4l_ask.cxx<|end_filename|>
#include "fltk4lua.hxx"
#include "f4l_enums.hxx"
#include <FL/fl_ask.H>
#include <FL/Fl_Color_Chooser.H>
#include <FL/Fl_File_Chooser.H>
namespace {
#define BEEP_LIST( _ ) \
_( FL_BEEP_DEFAULT ) \
_( FL_BEEP_MESSAGE ) \
_( FL_BEEP_ERROR ) \
_( FL_BEEP_QUESTION ) \
_( FL_BEEP_PASSWORD ) \
_( FL_BEEP_NOTIFICATION )
int check_beep( lua_State* L, int idx ) {
static char const* const names[] = {
#define GEN_NAME( _a ) #_a,
BEEP_LIST( GEN_NAME )
#undef GEN_NAME
NULL
};
static int const values[] = {
#define GEN_VALUE( _a ) _a,
BEEP_LIST( GEN_VALUE )
#undef GEN_VALUE
0 // dummy value
};
return values[ luaL_checkoption( L, idx, NULL, names ) ];
}
} // anonymous namespace
F4L_LUA_LLINKAGE_BEGIN
static int f4l_alert_( lua_State* L ) {
char const* s = luaL_checkstring( L, 1);
F4L_TRY( L ) {
fl_alert( "%s", s );
} F4L_CATCH( L );
return 0;
}
static int f4l_alert( lua_State* L ) {
F4L_CALL_PROTECTED( L, f4l_alert_, 0 );
return 0;
}
static int f4l_beep( lua_State* L ) {
int b = luaL_opt( L, check_beep, 1, FL_BEEP_DEFAULT );
F4L_TRY( L ) {
fl_beep( b );
} F4L_CATCH( L );
return 0;
}
static int f4l_choice_( lua_State* L ) {
char const* s = luaL_checkstring( L, 1 );
char const* b0 = luaL_optstring( L, 2, NULL );
char const* b1 = luaL_optstring( L, 3, NULL );
char const* b2 = luaL_optstring( L, 4, NULL );
lua_settop( L, 4 );
F4L_TRY( L ) {
lua_pushvalue( L, 2+fl_choice( "%s", b0, b1, b2, s ) );
} F4L_CATCH( L );
return 1;
}
static int f4l_choice( lua_State* L ) {
F4L_CALL_PROTECTED( L, f4l_choice_, 1 );
return 1;
}
static int f4l_color_chooser_( lua_State* L ) {
char const* s = luaL_optstring( L, 1, NULL );
uchar r = moon_optint( L, 2, 0, 255, 0 );
uchar g = moon_optint( L, 3, 0, 255, 0 );
uchar b = moon_optint( L, 4, 0, 255, 0 );
int mode = moon_optint( L, 5, -1, 3, -1 );
F4L_TRY( L ) {
if( fl_color_chooser( s, r, g, b, mode ) )
f4l_push_color( L, fl_rgb_color( r, g, b ) );
else
lua_pushnil( L );
} F4L_CATCH( L );
return 1;
}
static int f4l_color_chooser( lua_State* L ) {
F4L_CALL_PROTECTED( L, f4l_color_chooser_, 1 );
return 1;
}
static int f4l_dir_chooser_( lua_State* L ) {
char const* msg = luaL_checkstring( L, 1 );
char const* fname = luaL_checkstring( L, 2 );
int relative = lua_toboolean( L, 3 );
F4L_TRY( L ) {
char const* p = fl_dir_chooser( msg, fname, relative );
if( p != NULL )
lua_pushstring( L, p );
else
lua_pushnil( L );
} F4L_CATCH( L );
return 1;
}
static int f4l_dir_chooser( lua_State* L ) {
F4L_CALL_PROTECTED( L, f4l_dir_chooser_, 1 );
return 1;
}
static int f4l_file_chooser_( lua_State* L ) {
char const* msg = luaL_checkstring( L, 1 );
char const* pat = luaL_checkstring( L, 2 );
char const* fname = luaL_checkstring( L, 3 );
int relative = lua_toboolean( L, 4 );
F4L_TRY( L ) {
char const* p = fl_file_chooser( msg, pat, fname, relative );
if( p != NULL )
lua_pushstring( L, p );
else
lua_pushnil( L );
} F4L_CATCH( L );
return 1;
}
static int f4l_file_chooser( lua_State* L ) {
F4L_CALL_PROTECTED( L, f4l_file_chooser_, 1 );
return 1;
}
static int f4l_input_( lua_State* L ) {
char const* msg = luaL_checkstring( L, 1 );
char const* def = luaL_optstring( L, 2, NULL );
F4L_TRY( L ) {
char const* r = fl_input( "%s", def, msg );
if( r == NULL )
lua_pushnil( L );
else if( r == def )
lua_settop( L, 2 );
else
lua_pushstring( L, r );
} F4L_CATCH( L );
return 1;
}
static int f4l_input( lua_State* L ) {
F4L_CALL_PROTECTED( L, f4l_input_, 1 );
return 1;
}
static int f4l_message_( lua_State* L ) {
char const* msg = luaL_checkstring( L, 1 );
F4L_TRY( L ) {
fl_message( "%s", msg );
} F4L_CATCH( L );
return 0;
}
static int f4l_message( lua_State* L ) {
F4L_CALL_PROTECTED( L, f4l_message_, 0 );
return 0;
}
static int f4l_password_( lua_State* L ) {
char const* msg = luaL_checkstring( L, 1 );
char const* def = luaL_optstring( L, 2, NULL );
F4L_TRY( L ) {
char const* r = fl_password( "%s", def, msg );
if( r == NULL )
lua_pushnil( L );
else if( r == def )
lua_settop( L, 2 );
else
lua_pushstring( L, r );
} F4L_CATCH( L );
return 1;
}
static int f4l_password( lua_State* L ) {
F4L_CALL_PROTECTED( L, f4l_password_, 1 );
return 1;
}
static int f4l_message_hotspot( lua_State* L ) {
F4L_TRY( L ) {
if( lua_gettop( L ) > 0 ) {
fl_message_hotspot( lua_toboolean( L, 1 ) );
return 0;
} else {
lua_pushboolean( L, fl_message_hotspot() );
return 1;
}
} F4L_CATCH( L );
return 0;
}
F4L_LUA_LLINKAGE_END
MOON_LOCAL void f4l_ask_setup( lua_State* L ) {
luaL_Reg const functions[] = {
{ "alert", f4l_alert },
{ "beep", f4l_beep },
{ "choice", f4l_choice },
{ "color_chooser", f4l_color_chooser },
{ "dir_chooser", f4l_dir_chooser },
{ "file_chooser", f4l_file_chooser },
{ "input", f4l_input },
{ "message", f4l_message },
{ "password", <PASSWORD> },
{ "message_hotspot", f4l_message_hotspot },
{ NULL, NULL }
};
luaL_setfuncs( L, functions, 0 );
}
<|start_filename|>examples/adjuster.lua<|end_filename|>
#!/usr/bin/env lua
local fl = require( "fltk4lua" )
local window = fl.Window( 320, 100, arg[ 0 ] )
local b1 = fl.Box( 20, 35, 80, 25 )
b1.box = "FL_DOWN_BOX"
b1.color = fl.WHITE
local a1 = fl.Adjuster( 20+80, 35, 3*25, 25 )
a1.user_data = b1
function a1:callback( v )
v.label = self:format()
v:redraw()
end
a1:callback( b1 )
local b2 = fl.Box( 20+80+4*25, 35, 80, 25 )
b2.box = "FL_DOWN_BOX"
b2.color = fl.WHITE
local a2 = fl.Adjuster( b2.x+b2.w, 10, 25, 3*25 )
a2.user_data = b2
a2.callback = a1.callback
a2:callback( b2 )
--window.resizable = window -- looks bad in a tiling window manager
window:end_group()
window:show( arg )
fl.run()
<|start_filename|>src/f4l_file_browser.cxx<|end_filename|>
#include "fltk4lua.hxx"
#include "f4l_browser.hxx"
#include "f4l_browserx.hxx"
#include "f4l_widget.hxx"
#include "f4l_enums.hxx"
#include <FL/Fl_File_Browser.H>
#include <cstring>
#include <climits>
#define TYPE_LIST( _ ) \
_( "FILES", Fl_File_Browser::FILES ) \
_( "DIRECTORIES", Fl_File_Browser::DIRECTORIES )
F4L_GEN_TYPE_ENUM( TYPE_LIST, fbrowser )
namespace {
inline Fl_File_Browser* check_fbrowser( lua_State* L, int idx ) {
void* p = moon_checkobject( L, idx, F4L_FILE_BROWSER_NAME );
return static_cast< Fl_File_Browser* >( p );
}
int fbrowser_index_( lua_State* L, Fl_File_Browser* b,
char const* key, size_t n ) {
using namespace std;
switch( n ) {
case 6:
if( F4L_MEMCMP( key, "filter", 6 ) == 0 ) {
lua_pushstring( L, b->filter() );
return 1;
}
break;
case 8:
if( F4L_MEMCMP( key, "filetype", 8 ) == 0 ) {
f4l_push_type_fbrowser( L, b->filetype() );
return 1;
} else if( F4L_MEMCMP( key, "iconsize", 8 ) == 0 ) {
lua_pushinteger( L, b->iconsize() );
return 1;
} else if( F4L_MEMCMP( key, "textsize", 8 ) == 0 ) {
lua_pushinteger( L, b->textsize() );
return 1;
}
break;
}
return 0;
}
int fbrowser_newindex_( lua_State* L, Fl_File_Browser* b,
char const* key, size_t n ) {
using namespace std;
switch( n ) {
case 6:
if( F4L_MEMCMP( key, "filter", 6 ) == 0 ) {
b->filter( luaL_checkstring( L, 3 ) );
return 1;
}
break;
case 8:
if( F4L_MEMCMP( key, "filetype", 8 ) == 0 ) {
b->filetype( f4l_check_type_fbrowser( L, 3 ) );
return 1;
} else if( F4L_MEMCMP( key, "iconsize", 8 ) == 0 ) {
b->iconsize( moon_checkint( L, 3, 0, UCHAR_MAX ) );
return 1;
} else if( F4L_MEMCMP( key, "textsize", 8 ) == 0 ) {
b->textsize( moon_checkint( L, 3, 0, INT_MAX ) );
return 1;
}
break;
}
return 0;
}
} // anonymous namespace
F4L_DEF_DELETE( Fl_File_Browser )
F4L_LUA_LLINKAGE_BEGIN
static int new_file_browser( lua_State* L ) {
F4L_TRY( L ) {
f4l_new_widget< Fl_File_Browser >( L, F4L_FILE_BROWSER_NAME,
f4l_delete_Fl_File_Browser );
} F4L_CATCH( L );
return 1;
}
static int fbrowser_index( lua_State* L ) {
Fl_File_Browser* b = check_fbrowser( L, 1 );
size_t n = 0;
char const* key = luaL_checklstring( L, 2, &n );
F4L_TRY( L ) {
if( !fbrowser_index_( L, b, key, n ) &&
!f4l_browser_index_( L, b, key, n ) &&
!f4l_browserx_index_( L, b, key, n ) &&
!f4l_widget_index_( L, b, key, n ) &&
!f4l_bad_property( L, F4L_FILE_BROWSER_NAME, key ) )
lua_pushnil( L );
} F4L_CATCH( L );
return 1;
}
static int fbrowser_newindex( lua_State* L ) {
Fl_File_Browser* b = check_fbrowser( L, 1 );
size_t n = 0;
char const* key = luaL_checklstring( L, 2, &n );
F4L_TRY( L ) {
(void)(fbrowser_newindex_( L, b, key, n ) ||
f4l_browser_newindex_( L, b, key, n ) ||
f4l_browserx_newindex_( L, b, key, n ) ||
f4l_widget_newindex_( L, b, key, n ) ||
f4l_bad_property( L, F4L_FILE_BROWSER_NAME, key ));
} F4L_CATCH( L );
return 0;
}
static int fbrowser_load( lua_State* L ) {
static char const* names[] = {
"fl_numericsort", "fl_casenumericsort",
"fl_alphasort", "fl_casealphasort", NULL
};
static Fl_File_Sort_F* const values[] = {
fl_numericsort,
fl_casenumericsort,
fl_alphasort,
fl_casealphasort
};
Fl_File_Browser* b = check_fbrowser( L, 1 );
char const* dname = luaL_checkstring( L, 2 );
Fl_File_Sort_F* f = values[ luaL_checkoption( L, 3, "fl_numericsort", names ) ];
F4L_TRY( L ) {
return luaL_fileresult( L, b->load( dname, f ), dname );
} F4L_CATCH( L );
return 0;
}
F4L_LUA_LLINKAGE_END
F4L_DEF_CAST( Fl_File_Browser, Fl_Browser )
F4L_DEF_CAST( Fl_File_Browser, Fl_Browser_ )
F4L_DEF_CAST( Fl_File_Browser, Fl_Widget )
MOON_LOCAL void f4l_file_browser_setup( lua_State* L ) {
luaL_Reg const methods[] = {
F4L_WIDGET_METHODS,
F4L_BROWSERX_METHODS,
F4L_BROWSER_METHODS,
{ "load", fbrowser_load },
{ "__index", fbrowser_index },
{ "__newindex", fbrowser_newindex },
{ NULL, NULL }
};
moon_defobject( L, F4L_FILE_BROWSER_NAME, 0, methods, 0 );
moon_defcast( L, F4L_FILE_BROWSER_NAME, F4L_BROWSER_NAME,
f4l_cast_Fl_File_Browser_Fl_Browser );
moon_defcast( L, F4L_FILE_BROWSER_NAME, F4L_BROWSERX_NAME,
f4l_cast_Fl_File_Browser_Fl_Browser_ );
moon_defcast( L, F4L_FILE_BROWSER_NAME, F4L_WIDGET_NAME,
f4l_cast_Fl_File_Browser_Fl_Widget );
f4l_new_class_table( L, "File_Browser", new_file_browser );
}
<|start_filename|>src/f4l_widget.hxx<|end_filename|>
#ifndef F4L_WIDGET_HXX_
#define F4L_WIDGET_HXX_
#include "fltk4lua.hxx"
#include <FL/Fl_Widget.H>
#include <climits>
/* Custom flag to use in moon_object_headers to indicate that the
* widget is running a callback (this is used by the menu code,
* which prevents you from modifying the menu items during the
* callback). */
#define F4L_CALLBACK_ACTIVE (0x80u)
/* Inside a FLTK callback the Fl_Widget pointer must be mapped to
* a real userdata. This is done using a weak table in the registry.
*/
MOON_LOCAL void f4l_register_widget( lua_State* L, Fl_Widget* w,
int setud = 1);
MOON_LOCAL void f4l_push_widget( lua_State* L, Fl_Widget* w );
/* Detect whether we created a given widget */
MOON_LOCAL int f4l_our_widget( lua_State* L, Fl_Widget* w );
/* Most widgets take the same arguments for their constructors, so
* it makes sense to refactor creation of the widget userdata into
* a helper function: */
template< typename T >
MOON_LOCAL T* f4l_new_widget( lua_State* L, char const* tname,
moon_object_destructor destructor ) {
int has_properties = 0;
if( lua_gettop( L ) == 2 && lua_istable( L, 2 ) ) {
has_properties = 1;
lua_replace( L, 1 );
for( int i = 1; i <= 5; ++i )
lua_rawgeti( L, 1, i );
}
lua_settop( L, 6 );
lua_rotate( L, 1, -1 );
int x = moon_checkint( L, 1, 0, INT_MAX );
int y = moon_checkint( L, 2, 0, INT_MAX );
int w = moon_checkint( L, 3, 0, INT_MAX );
int h = moon_checkint( L, 4, 0, INT_MAX );
char const* label = luaL_optstring( L, 5, NULL );
void** p = moon_newpointer( L, tname, destructor );
/* widgets need a uservalue table to store the callback function
* and for keeping references to child widgets */
lua_newtable( L );
if( label != NULL ) {
lua_pushvalue( L, 5 );
lua_setfield( L, -2, "label" );
}
lua_setuservalue( L, -2 );
/* all widgets are allocated via `new` because FLTK uses `delete`
* internally */
T* widget = new T( x, y, w, h, label );
*p = static_cast< void* >( widget );
/* add the new widget to the lightuserdata -> full userdata mapping
* in the registry, set the Lua thread pointer as (FLTK) user data
* for the callbacks, and put a reference to this widget into the
* (Lua) uservalue table of the parent group widget (if any) */
f4l_register_widget( L, widget );
/* in case a table was used to pass constructor arguments, there
* might be more properties to set on the userdata */
if( has_properties )
f4l_add_properties( L, 7, 6 );
return widget;
}
/* Sometimes we want access to widgets that are members of another
* object: */
template< typename T >
MOON_LOCAL void f4l_new_member( lua_State* L, char const* tname, T* w,
int pindex, int (*checkfn)( void* ) = 0,
void* ptr = NULL ) {
void** p = moon_newfield( L, tname, pindex, checkfn, ptr );
if( pindex == 0 ) {
lua_newtable( L );
lua_setuservalue( L, -2 );
}
*p = static_cast< void* >( w );
f4l_register_widget( L, w, 0 );
}
/* (Meta-)methods for Fl_Widgets: */
MOON_LOCAL int f4l_widget_index_( lua_State* L, Fl_Widget* w,
char const* key, size_t n );
MOON_LOCAL int f4l_widget_newindex_( lua_State* L, Fl_Widget* w,
char const* key, size_t n );
F4L_LUA_LLINKAGE_BEGIN
MOON_LOCAL int f4l_widget_activate( lua_State* L );
MOON_LOCAL int f4l_widget_clear_changed( lua_State* L );
MOON_LOCAL int f4l_widget_clear_damage( lua_State* L );
MOON_LOCAL int f4l_widget_clear_output( lua_State* L );
MOON_LOCAL int f4l_widget_clear_visible( lua_State* L );
MOON_LOCAL int f4l_widget_clear_visible_focus( lua_State* L );
MOON_LOCAL int f4l_widget_contains( lua_State* L );
MOON_LOCAL int f4l_widget_deactivate( lua_State* L );
MOON_LOCAL int f4l_widget_hide( lua_State* L );
MOON_LOCAL int f4l_widget_inside( lua_State* L );
MOON_LOCAL int f4l_widget_measure_label( lua_State* L );
MOON_LOCAL int f4l_widget_position( lua_State* L );
MOON_LOCAL int f4l_widget_redraw( lua_State* L );
MOON_LOCAL int f4l_widget_redraw_label( lua_State* L );
MOON_LOCAL int f4l_widget_resize( lua_State* L );
MOON_LOCAL int f4l_widget_set_changed( lua_State* L );
MOON_LOCAL int f4l_widget_set_output( lua_State* L );
MOON_LOCAL int f4l_widget_set_visible( lua_State* L );
MOON_LOCAL int f4l_widget_set_visible_focus( lua_State* L );
MOON_LOCAL int f4l_widget_show( lua_State* L );
MOON_LOCAL int f4l_widget_size( lua_State* L );
MOON_LOCAL int f4l_widget_take_focus( lua_State* L );
F4L_LUA_LLINKAGE_END
#define F4L_WIDGET_METHODS \
{ "activate", f4l_widget_activate }, \
{ "clear_changed", f4l_widget_clear_changed }, \
{ "clear_damage", f4l_widget_clear_damage }, \
{ "clear_output", f4l_widget_clear_output }, \
{ "clear_visible", f4l_widget_clear_visible }, \
{ "clear_visible_focus", f4l_widget_clear_visible_focus }, \
{ "contains", f4l_widget_contains }, \
{ "deactivate", f4l_widget_deactivate }, \
{ "hide", f4l_widget_hide }, \
{ "inside", f4l_widget_inside }, \
{ "measure_label", f4l_widget_measure_label }, \
{ "position", f4l_widget_position }, \
{ "redraw", f4l_widget_redraw }, \
{ "redraw_label", f4l_widget_redraw_label }, \
{ "resize", f4l_widget_resize }, \
{ "set_changed", f4l_widget_set_changed }, \
{ "set_output", f4l_widget_set_output }, \
{ "set_visible", f4l_widget_set_visible }, \
{ "set_visible_focus", f4l_widget_set_visible_focus }, \
{ "show", f4l_widget_show }, \
{ "size", f4l_widget_size }, \
{ "take_focus", f4l_widget_take_focus }
#endif /* F4L_WIDGET_HXX_ */
<|start_filename|>src/f4l_scroll.cxx<|end_filename|>
#include "fltk4lua.hxx"
#include "f4l_group.hxx"
#include "f4l_widget.hxx"
#include "f4l_enums.hxx"
#include <FL/Fl_Scroll.H>
#define TYPE_LIST( _ ) \
_( "NO_SCROLLBARS", 0 ) \
_( "HORIZONTAL", Fl_Scroll::HORIZONTAL ) \
_( "VERTICAL", Fl_Scroll::VERTICAL ) \
_( "BOTH", Fl_Scroll::BOTH ) \
_( "HORIZONTAL_ALWAYS", Fl_Scroll::HORIZONTAL_ALWAYS ) \
_( "VERTICAL_ALWAYS", Fl_Scroll::VERTICAL_ALWAYS ) \
_( "BOTH_ALWAYS", Fl_Scroll::BOTH_ALWAYS )
F4L_GEN_TYPE_ENUM( TYPE_LIST, scroll )
namespace {
inline Fl_Scroll* check_scroll( lua_State* L, int idx ) {
void* p = moon_checkobject( L, idx, F4L_SCROLL_NAME );
return static_cast< Fl_Scroll* >( p );
}
int scroll_index_( lua_State* L, Fl_Scroll* s,
char const* key, size_t n ) {
using namespace std;
switch( n ) {
case 4:
if( F4L_MEMCMP( key, "type", 4 ) == 0 ) {
f4l_push_type_scroll( L, s->type() );
return 1;
}
break;
case 9:
if( F4L_MEMCMP( key, "scrollbar", 9 ) == 0 ) {
if( moon_getuvfield( L, 1, "scrollbar" ) == LUA_TNIL ) {
f4l_new_member< Fl_Scrollbar >( L, F4L_SCROLLBAR_NAME,
&s->scrollbar, 1 );
lua_pushvalue( L, -1 );
moon_setuvfield( L, 1, "scrollbar" );
}
return 1;
} else if( F4L_MEMCMP( key, "xposition", 9 ) == 0 ) {
lua_pushinteger( L, s->xposition() );
return 1;
} else if( F4L_MEMCMP( key, "yposition", 9 ) == 0 ) {
lua_pushinteger( L, s->yposition() );
return 1;
}
break;
case 10:
if( F4L_MEMCMP( key, "hscrollbar", 10 ) == 0 ) {
if( moon_getuvfield( L, 1, "hscrollbar" ) == LUA_TNIL ) {
f4l_new_member< Fl_Scrollbar >( L, F4L_SCROLLBAR_NAME,
&s->hscrollbar, 1 );
lua_pushvalue( L, -1 );
moon_setuvfield( L, 1, "hscrollbar" );
}
return 1;
}
break;
case 14:
if( F4L_MEMCMP( key, "scrollbar_size", 14 ) == 0 ) {
lua_pushinteger( L, s->scrollbar_size() );
return 1;
}
break;
}
return 0;
}
int scroll_newindex_( lua_State* L, Fl_Scroll* s,
char const* key, size_t n ) {
using namespace std;
switch( n ) {
case 4:
if( F4L_MEMCMP( key, "type", 4 ) == 0 ) {
s->type( f4l_check_type_scroll( L, 3 ) );
return 1;
}
break;
case 14:
if( F4L_MEMCMP( key, "scrollbar_size", 14 ) == 0 ) {
s->scrollbar_size( moon_checkint( L, 3, 0, INT_MAX ) );
return 1;
}
break;
}
return 0;
}
} // anonymous namespace
F4L_DEF_DELETE( Fl_Scroll )
F4L_LUA_LLINKAGE_BEGIN
static int new_scroll( lua_State* L ) {
F4L_TRY( L ) {
Fl_Scroll* s = f4l_new_widget< Fl_Scroll >( L, F4L_SCROLL_NAME,
f4l_delete_Fl_Scroll );
/* The __index handler would create those objects on demand,
* but the scrollbars are stored in the group's children, so an
* error could be thrown if they are encountered there before
* having a corresponding userdata registered! */
f4l_new_member< Fl_Scrollbar >( L, F4L_SCROLLBAR_NAME,
&s->hscrollbar, -1 );
moon_setuvfield( L, -2, "hscrollbar" );
f4l_new_member< Fl_Scrollbar >( L, F4L_SCROLLBAR_NAME,
&s->scrollbar, -1 );
moon_setuvfield( L, -2, "scrollbar" );
} F4L_CATCH( L );
return 1;
}
static int scroll_index( lua_State* L ) {
Fl_Scroll* s = check_scroll( L, 1 );
size_t n = 0;
char const* key = luaL_checklstring( L, 2, &n );
F4L_TRY( L ) {
if( !scroll_index_( L, s, key, n ) &&
!f4l_group_index_( L, s, key, n ) &&
!f4l_widget_index_( L, s, key, n ) &&
!f4l_bad_property( L, F4L_SCROLL_NAME, key ) )
lua_pushnil( L );
} F4L_CATCH( L );
return 1;
}
static int scroll_newindex( lua_State* L ) {
Fl_Scroll* s = check_scroll( L, 1 );
size_t n = 0;
char const* key = luaL_checklstring( L, 2, &n );
F4L_TRY( L ) {
(void)(scroll_newindex_( L, s, key, n ) ||
f4l_group_newindex_( L, s, key, n ) ||
f4l_widget_newindex_( L, s, key, n ) ||
f4l_bad_property( L, F4L_SCROLL_NAME, key ));
} F4L_CATCH( L );
return 0;
}
static int scroll_clear( lua_State* L ) {
Fl_Scroll* s = check_scroll( L, 1 );
lua_getuservalue( L, 1 );
F4L_TRY( L ) {
int n = s->children();
for( int i = n; i > 0; --i ) {
Fl_Widget* w = s->child( i-1 );
s->remove( i-1 );
lua_pushnil( L );
lua_rawsetp( L, -2, static_cast< void* >( w ) );
}
s->add( s->hscrollbar );
s->add( s->scrollbar );
} F4L_CATCH( L );
lua_pop( L, 1 );
return 0;
}
static int scroll_scroll_to( lua_State* L ) {
Fl_Scroll* s = check_scroll( L, 1 );
int w = moon_checkint( L, 2, 0, INT_MAX );
int h = moon_checkint( L, 3, 0, INT_MAX );
F4L_TRY( L ) {
s->scroll_to( w, h );
} F4L_CATCH( L );
return 0;
}
F4L_LUA_LLINKAGE_END
F4L_DEF_CAST( Fl_Scroll, Fl_Group )
F4L_DEF_CAST( Fl_Scroll, Fl_Widget )
MOON_LOCAL void f4l_scroll_setup( lua_State* L ) {
luaL_Reg const methods[] = {
F4L_WIDGET_METHODS,
F4L_GROUP_METHODS,
{ "clear", scroll_clear },
{ "scroll_to", scroll_scroll_to },
{ "__index", scroll_index },
{ "__newindex", scroll_newindex },
{ NULL, NULL }
};
moon_defobject( L, F4L_SCROLL_NAME, 0, methods, 0 );
moon_defcast( L, F4L_SCROLL_NAME, F4L_GROUP_NAME,
f4l_cast_Fl_Scroll_Fl_Group );
moon_defcast( L, F4L_SCROLL_NAME, F4L_WIDGET_NAME,
f4l_cast_Fl_Scroll_Fl_Widget );
f4l_new_class_table( L, "Scroll", new_scroll );
}
<|start_filename|>src/f4l_menu_button.cxx<|end_filename|>
#include "fltk4lua.hxx"
#include "f4l_menu.hxx"
#include "f4l_widget.hxx"
#include "f4l_enums.hxx"
#include <FL/Fl_Menu_Button.H>
#define TYPE_LIST( _ ) \
_( "NOPOPUP", 0 ) \
_( "POPUP1", Fl_Menu_Button::POPUP1 ) \
_( "POPUP2", Fl_Menu_Button::POPUP2 ) \
_( "POPUP12", Fl_Menu_Button::POPUP12 ) \
_( "POPUP3", Fl_Menu_Button::POPUP3 ) \
_( "POPUP13", Fl_Menu_Button::POPUP13 ) \
_( "POPUP23", Fl_Menu_Button::POPUP23 ) \
_( "POPUP123", Fl_Menu_Button::POPUP123 )
F4L_GEN_TYPE_ENUM( TYPE_LIST, popup )
namespace {
inline Fl_Menu_Button* check_menu_button( lua_State* L, int idx ) {
void* p = moon_checkobject( L, idx, F4L_MENU_BUTTON_NAME );
return static_cast< Fl_Menu_Button* >( p );
}
int menu_button_index_( lua_State* L, Fl_Menu_Button* mb,
char const* key, size_t n ) {
using namespace std;
if( n == 4 && F4L_MEMCMP( key, "type", 4 ) == 0 ) {
f4l_push_type_popup( L, mb->type() );
return 1;
}
return 0;
}
int menu_button_newindex_( lua_State* L, Fl_Menu_Button* mb,
char const* key, size_t n ) {
using namespace std;
if( n == 4 && F4L_MEMCMP( key, "type", 4 ) == 0 ) {
mb->type( f4l_check_type_popup( L, 3 ) );
return 1;
}
return 0;
}
} // anonymous namespace
F4L_DEF_DELETE( Fl_Menu_Button )
F4L_LUA_LLINKAGE_BEGIN
static int new_menu_button( lua_State* L ) {
F4L_TRY( L ) {
f4l_new_widget< Fl_Menu_Button >( L, F4L_MENU_BUTTON_NAME,
f4l_delete_Fl_Menu_Button );
} F4L_CATCH( L );
return 1;
}
static int menu_button_index( lua_State* L ) {
Fl_Menu_Button* mb = check_menu_button( L, 1 );
size_t n = 0;
char const* key = luaL_checklstring( L, 2, &n );
F4L_TRY( L ) {
if( !menu_button_index_( L, mb, key, n ) &&
!f4l_menu_index_( L, mb, key, n ) &&
!f4l_widget_index_( L, mb, key, n ) &&
!f4l_bad_property( L, F4L_MENU_BUTTON_NAME, key ) )
lua_pushnil( L );
} F4L_CATCH( L );
return 1;
}
static int menu_button_newindex( lua_State* L ) {
Fl_Menu_Button* mb = check_menu_button( L, 1 );
size_t n = 0;
char const* key = luaL_checklstring( L, 2, &n );
F4L_TRY( L ) {
(void)(menu_button_newindex_( L, mb, key, n ) ||
f4l_menu_newindex_( L, mb, key, n ) ||
f4l_widget_newindex_( L, mb, key, n ) ||
f4l_bad_property( L, F4L_MENU_BUTTON_NAME, key ));
} F4L_CATCH( L );
return 0;
}
F4L_LUA_LLINKAGE_END
F4L_DEF_CAST( Fl_Menu_Button, Fl_Menu_ )
F4L_DEF_CAST( Fl_Menu_Button, Fl_Widget )
MOON_LOCAL void f4l_menu_button_setup( lua_State* L ) {
luaL_Reg const methods[] = {
F4L_WIDGET_METHODS,
F4L_MENU_METHODS,
{ "__index", menu_button_index },
{ "__newindex", menu_button_newindex },
{ NULL, NULL }
};
moon_defobject( L, F4L_MENU_BUTTON_NAME, 0, methods, 0 );
moon_defcast( L, F4L_MENU_BUTTON_NAME, F4L_MENU_NAME,
f4l_cast_Fl_Menu_Button_Fl_Menu_ );
moon_defcast( L, F4L_MENU_BUTTON_NAME, F4L_WIDGET_NAME,
f4l_cast_Fl_Menu_Button_Fl_Widget );
f4l_new_class_table( L, "Menu_Button", new_menu_button );
}
| caoliver/lua-fltk4lua |
<|start_filename|>DXMPP/SASL/CryptoPP_byte.hpp<|end_filename|>
//
// CryptoPP_byte.cpp
// DXMPP
//
// Created by <NAME> 2020
// Copyright (c) 2014 <NAME>. All rights reserved.
//
// Work around Crypto++ API incompatibility between 5.x and later versions
#include <cryptopp/config.h>
#if (CRYPTOPP_VERSION < 600)
namespace CryptoPP {
typedef unsigned char byte;
}
#endif
<|start_filename|>DXMPP/SASL/SASLMechanism_EXTERNAL.hpp<|end_filename|>
//
// SASLMechanism_PLAIN.hpp
// DXMPP
//
// Created by <NAME> 2019
// Copyright (c) 2019 <NAME>. All rights reserved.
//
#ifndef DXMPP_SASLMechanism_EXTERNAL_hpp
#define DXMPP_SASLMechanism_EXTERNAL_hpp
#include "SASLMechanism.hpp"
namespace DXMPP
{
namespace SASL
{
class SASL_Mechanism_EXTERNAL: public SASLMechanism
{
public:
void Begin();
SASL_Mechanism_EXTERNAL(boost::shared_ptr<DXMPP::Network::AsyncTCPXMLClient> Uplink)
:SASLMechanism(Uplink, JID(""), std::string(""))
{
}
void Challenge(const pugi::xpath_node &challenge);
bool Verify(const pugi::xpath_node &SuccessTag);
};
}
}
#endif // DXMPP_SASLMechanism_EXTERNAL_hpp
<|start_filename|>DXMPP/Network/AsyncTCPXMLClient.hpp<|end_filename|>
//
// AsyncTCPXMLClient.hpp
// DXMPP
//
// Created by <NAME> 2014
// Copyright (c) 2014 <NAME>. All rights reserved.
//
#ifndef DXMPP_AsyncTCPClient_hpp
#define DXMPP_AsyncTCPClient_hpp
#ifdef __APPLE__
#if defined(__clang__)
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wdeprecated-declarations"
#elif defined(__GNUC__) || defined(__GNUG__)
#pragma GCC diagnostics push
#pragma GCC diagnostics ignored "-Wdeprecated-declarations"
#endif // __clang__
#endif
#include <boost/asio.hpp>
#include <boost/asio/ssl.hpp>
#include <boost/thread.hpp>
#include <boost/bind.hpp>
#include <boost/enable_shared_from_this.hpp>
#include <boost/smart_ptr.hpp>
#include <memory>
#include <sstream>
#include <queue>
#include <pugixml/pugixml.hpp>
#include <DXMPP/Debug/DebugOutputTreshold.hpp>
#include <DXMPP/TLSVerification.hpp>
namespace DXMPP
{
namespace Network
{
class AsyncTCPXMLClient
{
TLSVerification *TLSConfig;
DebugOutputTreshold DebugTreshold;
boost::asio::const_buffer Certificate;
boost::asio::const_buffer Privatekey;
static const int ReadDataBufferSize = 1024;
char ReadDataBufferNonSSL[ReadDataBufferSize];
std::stringstream ReadDataStreamNonSSL;
char ReadDataBufferSSL[ReadDataBufferSize];
std::stringstream ReadDataStreamSSL;
boost::asio::mutable_buffers_1 SSLBuffer;
boost::asio::mutable_buffers_1 NonSSLBuffer;
void SendKeepAliveWhitespace();
std::unique_ptr<boost::asio::deadline_timer> SendKeepAliveWhitespaceTimer;
std::string SendKeepAliveWhiteSpaceDataToSend;
int SendKeepAliveWhiteSpaceTimeeoutSeconds;
boost::asio::io_context::strand SynchronizationStrand;
//boost::shared_mutex ReadMutex;
//boost::shared_mutex WriteMutex;
boost::posix_time::ptime LastWrite;
boost::shared_mutex IncomingDocumentsMutex;
std::queue<pugi::xml_document*> IncomingDocuments;
std::queue<std::shared_ptr<std::string>> OutgoingData;
boost::shared_mutex OutgoingDataMutex;
bool Flushing;
void FlushOutgoingDataUnsafe();
public:
void FlushOutgoingData();
void SignalError();
std::stringstream *ReadDataStream;
char * ReadDataBuffer;
enum class ConnectionState
{
Connected,
Upgrading,
Disconnected,
Error
};
std::string Hostname;
int Portnumber;
bool SSLConnection;
volatile ConnectionState CurrentConnectionState;
boost::shared_ptr<boost::asio::io_service> io_service;
boost::scoped_ptr<boost::asio::ssl::context> ssl_context;
boost::scoped_ptr<boost::asio::ip::tcp::socket> tcp_socket;
boost::scoped_ptr<boost::asio::ssl::stream<boost::asio::ip::tcp::socket&>> ssl_socket;
void HandleWrite(boost::asio::ip::tcp::socket *active_socket,
std::shared_ptr<std::string> Data,
const boost::system::error_code &error);
void SetKeepAliveByWhiteSpace(const std::string &DataToSend,
int TimeoutSeconds = 5);
bool EnsureTCPKeepAlive();
bool ConnectTLSSocket();
bool ConnectSocket();
void AsyncRead();
bool VerifyCertificate(bool preverified,
boost::asio::ssl::verify_context& ctx);
void WriteXMLToSocket(pugi::xml_document *Doc);
void WriteTextToSocket(const std::string &Data);
void HandleRead(
boost::asio::ip::tcp::socket *active_socket,
char *ActiveDataBuffer,
const boost::system::error_code &error,
std::size_t bytes_transferred);
bool InnerLoadXML();
bool LoadXML(int Iteration );
void LoadXML();
std::unique_ptr<pugi::xml_document> FetchDocument();
void ClearReadDataStream();
void Reset();
~AsyncTCPXMLClient()
{
//std::cout << "~AsyncTCPXMLClient" << std::endl;
}
typedef boost::function<void (void)> ErrorCallbackFunction;
typedef boost::function<void (void)> GotDataCallbackFunction;
ErrorCallbackFunction ErrorCallback;
GotDataCallbackFunction GotDataCallback;
AsyncTCPXMLClient(
boost::shared_ptr<boost::asio::io_service> IOService,
boost::asio::const_buffer Certificate,
boost::asio::const_buffer Privatekey,
TLSVerification *TLSConfig,
const std::string &Hostname,
int Portnumber,
const ErrorCallbackFunction &ErrorCallback,
const GotDataCallbackFunction &GotDataCallback,
DebugOutputTreshold DebugTreshold = DebugOutputTreshold::Error)
:
TLSConfig(TLSConfig),
Certificate(Certificate),
Privatekey(Privatekey),
DebugTreshold(DebugTreshold),
SSLBuffer( boost::asio::buffer(ReadDataBufferSSL, ReadDataBufferSize) ),
NonSSLBuffer( boost::asio::buffer(ReadDataBufferNonSSL, ReadDataBufferSize) ),
SynchronizationStrand(*IOService),
ErrorCallback(ErrorCallback),
GotDataCallback(GotDataCallback)
{
this->io_service = IOService;
this->Hostname = Hostname;
this->Portnumber = Portnumber;
Flushing = false;
}
};
}
}
#ifdef __APPLE__
#if defined(__clang__)
#pragma clang diagnostic pop
#elif defined(__GNUC__) || defined(__GNUG__)
#pragma GCC diagnostic pop
#endif // __clang__
#endif
#endif // DXMPP_AsyncTCPClient_hpp
<|start_filename|>Examples/EchoBot.cpp<|end_filename|>
//
// EchoBot.cpp
// DXMPP
//
// Created by <NAME> 2014
// Copyright (c) 2014 <NAME>. All rights reserved.
//
#include <DXMPP/Connection.hpp>
#include <boost/thread.hpp>
using namespace std;
using namespace DXMPP;
using namespace pugi;
class EchoBot :
public IEventHandler,
public StanzaCallback,
public ConnectionCallback
{
public:
volatile bool Quit;
EchoBot()
:
Quit(false)
{
}
void StanzaReceived(SharedStanza Stanza,
SharedConnection Sender)
{
xml_node Body = Stanza->Payload.select_node("//body").node();
if(!Body)
return;
if( string(Body.text().as_string()) == "quit")
{
cout << "Received quit. Quiting" << endl;
Quit = true;
return;
}
cout << "Echoing message '" << Body.text().as_string() << "' from "
<< Stanza->From.GetFullJID() << endl;
SharedStanza ResponseStanza = Sender->CreateStanza(Stanza->From, StanzaType::Message);
ResponseStanza->Payload.append_copy(Body);
Sender->SendStanza(ResponseStanza);
}
void ConnectionStateChanged(ConnectionState NewState,
SharedConnection /*Sender*/)
{
switch(NewState)
{
case ConnectionState::Connected:
cout << "\\o/ Lets annoy!" << endl;
break;
case ConnectionState::Connecting:
cout << "-o/ Lets go allready.. Connect connect connect!!" << endl;
break;
case ConnectionState::ErrorAuthenticating:
case ConnectionState::ErrorConnecting:
case ConnectionState::ErrorUnknown:
cerr << "-o/~ Mercy mercy i am bugged!!" << endl;
Quit = true;
break;
case ConnectionState::NotConnected:
cout << "-o- WHY NOT CONNECTED?!" << endl;
break;
}
}
};
int main(int, const char **)
{
EchoBot Handler;
// Please note we are using selfsigned certificates on dev server so need to pass
// TLSVerificationMode::None. This should not be done in production.
SharedConnection Uplink = Connection::Create( string("deusexmachinae.se") /* Host */,
5222 /* Port number */,
DXMPP::JID( "dxmpp<EMAIL>" ) /* Requested JID */,
string("dxmpp") /* Password */,
&Handler,
TLSVerificationMode::None);
cout << "Entering fg loop." << endl;
while(!Handler.Quit)
{
boost::this_thread::sleep(boost::posix_time::milliseconds(10));
}
return 0;
}
<|start_filename|>DXMPP/Connection.cpp<|end_filename|>
//
// Connection.cpp
// DXMPP
//
// Created by <NAME> on 31/05/14.
// Copyright (c) 2014 <NAME>. All rights reserved.
//
#include "Connection.hpp"
#include <boost/thread.hpp>
#include <iostream>
#include <DXMPP/SASL/SASLMechanism.hpp>
#include <DXMPP/SASL/SASLMechanism_DIGEST_MD5.hpp>
#include <DXMPP/SASL/SASLMechanism_SCRAM_SHA1.hpp>
#include <DXMPP/SASL/SASLMechanism_PLAIN.hpp>
#include <DXMPP/SASL/SASLMechanism_EXTERNAL.hpp>
namespace DXMPP
{
#define DebugOut(DebugLevel) \
if (DebugLevel > DebugTreshold) {} \
else std::cout
using namespace std;
using namespace pugi;
std::atomic<int> Connection::ReconnectionCount(-1);
void Connection::BrodcastConnectionState(ConnectionCallback::ConnectionState NewState)
{
if(PreviouslyBroadcastedState == NewState)
return;
PreviouslyBroadcastedState = NewState;
boost::shared_lock<boost::shared_mutex> ReadLock(ConnectionHandlerMutex);
if(!ConnectionHandler)
return;
ConnectionHandler->ConnectionStateChanged(NewState, shared_from_this());
}
void Connection::OpenXMPPStream()
{
CurrentConnectionState = ConnectionState::WaitingForFeatures;
stringstream Stream;
Stream << "<?xml version='1.0' encoding='utf-8'?>";
Stream << "<stream:stream";
Stream << " from = '" << MyJID.GetBareJID() << "'";
Stream << " to = '" << MyJID.GetDomain() << "'";
Stream << " version='1.0'";
Stream << " xml:lang='en'";
Stream << " xmlns='jabber:client'";
Stream << " xmlns:stream='http://etherx.jabber.org/streams'>";
DebugOut(DebugOutputTreshold::Debug)
<< "DXMPP: Opening stream" << std::endl;// << Stream.str();
Client->WriteTextToSocket(Stream.str());
}
void Connection::CheckStreamForFeatures()
{
string str = Client->ReadDataStream->str();
size_t streamfeatures = str.find("</stream:features>");
if(streamfeatures == string::npos)
return;
if(CurrentAuthenticationState == AuthenticationState::SASL)
{
Client->ClearReadDataStream();
CurrentConnectionState = ConnectionState::Authenticating;
return;
}
if(CurrentAuthenticationState == AuthenticationState::Bind)
{
Client->ClearReadDataStream();
CurrentConnectionState = ConnectionState::Authenticating;
BindResource();
return;
}
// note to self: cant use loadxml() here because this is not valid xml!!!!
// due to <stream> <stream::features></stream::features>
xml_document xdoc;
xdoc.load(*Client->ReadDataStream, parse_full&~parse_eol, encoding_auto);
Client->ClearReadDataStream();
ostringstream o;
xdoc.save(o, "\t", format_no_declaration);
pugi::xpath_node starttls = xdoc.select_node("//starttls");
if(starttls)
{
DebugOut(DebugOutputTreshold::Debug)
<< std::endl << "START TLS SUPPORTED" << std::endl;
FeaturesStartTLS = true;
}
//Move to sasl class
pugi::xpath_node_set mechanisms = xdoc.select_nodes("//mechanism");
for (auto it = mechanisms.begin(); it != mechanisms.end(); it++)
{
xml_node node = it->node();
string mechanism = string(node.child_value());
DebugOut(DebugOutputTreshold::Debug)
<< "Mechanism supported: " << mechanism << std::endl;
if(mechanism == "EXTERNAL")
FeaturesSASL_External = true;
if(mechanism == "DIGEST-MD5")
FeaturesSASL_DigestMD5 = true;
if(mechanism == "CRAM-MD5")
FeaturesSASL_CramMD5 = true;
if(mechanism == "SCRAM-SHA-1")
FeaturesSASL_ScramSHA1 = true;
if(mechanism == "PLAIN")
FeaturesSASL_Plain = true;
}
CurrentConnectionState = ConnectionState::Authenticating;
// If start tls: initiate shit/restart stream
if(CurrentAuthenticationState != AuthenticationState::StartTLS)
{
if(FeaturesStartTLS)
{
CurrentAuthenticationState = AuthenticationState::StartTLS;
DebugOut(DebugOutputTreshold::Debug) << "Initializing TLS" << std::endl;
stringstream Stream;
Stream << "<starttls xmlns='urn:ietf:params:xml:ns:xmpp-tls'/>";
Client->WriteTextToSocket(Stream.str());
return;
}
}
CurrentAuthenticationState = AuthenticationState::SASL;
DebugOut(DebugOutputTreshold::Debug) << "SASL MANDLEBRASL" << std::endl;
// I shall has picked an algorithm!
if(Certificate.size() >0 && FeaturesSASL_External)
{
Authentication.reset(new SASL::SASL_Mechanism_EXTERNAL ( Client)),
Authentication->Begin();
return;
}
if(FeaturesSASL_ScramSHA1)
{
Authentication.reset(new SASL::SASL_Mechanism_SCRAM_SHA1 ( Client, MyJID, Password)),
Authentication->Begin();
return;
}
if(FeaturesSASL_DigestMD5)
{
Authentication.reset(new SASL::Weak::SASL_Mechanism_DigestMD5 ( Client , MyJID, Password)),
Authentication->Begin();
return;
}
if(FeaturesSASL_Plain)
{
Authentication.reset(new SASL::Weak::SASL_Mechanism_PLAIN ( Client , MyJID, Password)),
Authentication->Begin();
return;
}
}
void Connection::InitTLS()
{
DebugOut(DebugOutputTreshold::Debug)
<< "Server accepted to start TLS handshake" << std::endl;
bool Success = Client->ConnectTLSSocket();
if(Success)
{
DebugOut(DebugOutputTreshold::Debug)
<< "TLS Connection successfull. Reopening stream." << std::endl;
OpenXMPPStream();
}
else
{
std::cerr << "TLS Connection failed" << std::endl;
CurrentConnectionState = ConnectionState::ErrorUnknown;
BrodcastConnectionState(ConnectionCallback::ConnectionState::ErrorUnknown);
}
}
// Explicit string hax
void Connection::CheckForStreamEnd()
{
string str = Client->ReadDataStream->str();
size_t streamend = str.find("</stream:stream>");
if(streamend == string::npos)
streamend = str.find("</stream>");
if(streamend == string::npos)
return;
std::cerr << "Got end of stream from xmppserver" << std::endl;
Client->ClearReadDataStream();
CurrentConnectionState = ConnectionState::ErrorUnknown;
BrodcastConnectionState(ConnectionCallback::ConnectionState::ErrorUnknown);
DebugOut(DebugOutputTreshold::Debug) << "Got stream end" << std::endl;
}
void Connection::CheckForTLSProceed(pugi::xml_document* Doc)
{
if(!Doc->select_node("//proceed").node())
{
std::cerr << "No proceed tag; B0rked SSL?!";
BrodcastConnectionState(ConnectionCallback::ConnectionState::ErrorUnknown);
CurrentConnectionState = ConnectionState::ErrorUnknown;
return;
}
if(CurrentAuthenticationState == AuthenticationState::StartTLS)
InitTLS();
}
void Connection::CheckForWaitingForSession(pugi::xml_document* Doc)
{
xml_node iqnode = Doc->select_node("//iq").node();
if(!iqnode)
{
std::cerr << "No iqnode?!";
BrodcastConnectionState(ConnectionCallback::ConnectionState::ErrorUnknown);
CurrentConnectionState = ConnectionState::ErrorUnknown;
return;
}
// TODO: Verify iq response..
string Presence = "<presence/>";
Client->WriteTextToSocket(Presence);
CurrentConnectionState = ConnectionState::Connected;
Client->SetKeepAliveByWhiteSpace(string(" "), 5);
DebugOut(DebugOutputTreshold::Debug) << std::endl << "ONLINE" << std::endl;
}
void Connection::CheckForBindSuccess(pugi::xml_document* Doc)
{
xml_node iqnode = Doc->select_node("//iq").node();
if(!iqnode)
{
std::cerr << "No iqnode?!";
BrodcastConnectionState(ConnectionCallback::ConnectionState::ErrorUnknown);
CurrentConnectionState = ConnectionState::ErrorUnknown;
return;
}
xml_node bind = iqnode.child("bind");
xml_node jid = bind.child("jid");
string sjid = jid.text().as_string();
this->MyJID = JID(sjid);
DebugOut(DebugOutputTreshold::Debug)
<< std::endl
<< "AUTHENTICATED"
<< std::endl; // todo: verify xml ;)
string StartSession = "<iq type='set' id='1'><session xmlns='urn:ietf:params:xml:ns:xmpp-session'/></iq>";
Client->WriteTextToSocket(StartSession);
CurrentConnectionState = ConnectionState::WaitingForSession;
CurrentAuthenticationState = AuthenticationState::Authenticated;
}
void Connection::BindResource()
{
// TODO: Make Proper XML ?
//bind resource..
stringstream TStream;
TStream << "<iq type='set' id='bindresource'>";
TStream << "<bind xmlns='urn:ietf:params:xml:ns:xmpp-bind'>";
TStream << "<resource>" << MyJID.GetResource() << "</resource>";
TStream << "</bind>";
TStream << "</iq>";
Client->WriteTextToSocket(TStream.str());
}
void Connection::StartBind()
{
CurrentAuthenticationState = AuthenticationState::Bind;
OpenXMPPStream();
}
void Connection::CheckForSASLData(pugi::xml_document* Doc)
{
xml_node challenge = Doc->select_node("//challenge").node();
xml_node success = Doc->select_node("//success").node();
if(!challenge && !success)
{
std::cerr << "Bad authentication." << std::endl;
BrodcastConnectionState(ConnectionCallback::ConnectionState::ErrorAuthenticating);
CurrentConnectionState = ConnectionState::ErrorAuthenticating;
return;
}
if(challenge)
{
Authentication->Challenge(challenge);
return;
}
if(success)
{
if( !Authentication->Verify(success) )
{
std::cerr << "Bad success verification from server" << std::endl;
BrodcastConnectionState(ConnectionCallback::ConnectionState::ErrorAuthenticating);
CurrentConnectionState = ConnectionState::ErrorAuthenticating;
return;
}
DebugOut(DebugOutputTreshold::Debug)
<<
std::endl
<< "Authentication succesfull."
<< std::endl;
StartBind();
}
}
void Connection::CheckStreamForAuthenticationData(pugi::xml_document* Doc)
{
switch(CurrentAuthenticationState)
{
case AuthenticationState::StartTLS:
CheckForTLSProceed(Doc);
break;
case AuthenticationState::SASL:
CheckForSASLData(Doc);
break;
case AuthenticationState::Bind:
CheckForBindSuccess(Doc);
break;
case AuthenticationState::Authenticated:
break;
default:
break;
}
}
bool Connection::CheckStreamForStanza(pugi::xml_document* Doc)
{
xml_node message = Doc->select_node("//message").node();
xml_node iq= Doc->select_node("//iq").node();
if(!message && !iq)
return false;
return true;
}
void Connection::DispatchStanza(std::unique_ptr<pugi::xml_document> Doc)
{
xml_node message = Doc->select_node("//message").node();
xml_node iq = Doc->select_node("//iq").node();
boost::shared_lock<boost::shared_mutex> ReadLock(StanzaHandlerMutex);
if(!StanzaHandler)
return;
SharedStanza StanzaToDispatch;
if(message)
{
StanzaToDispatch = SharedStanzaMessage(
new StanzaMessage( std::move(Doc),
message));
}
if(iq)
{
StanzaToDispatch = SharedStanzaIQ(
new StanzaIQ( std::move(Doc),
iq));
}
StanzaHandler->StanzaReceived(StanzaToDispatch ,shared_from_this());
}
void Connection::CheckForPresence(pugi::xml_document* Doc)
{
xml_node presence = Doc->select_node("//presence").node();
if(!presence)
return;
Roster->OnPresence(presence);
}
SharedStanza Connection::CreateStanza(const JID &TargetJID, StanzaType Type)
{
SharedStanza ReturnValue;
switch(Type)
{
case StanzaType::IQ:
ReturnValue = boost::make_shared<StanzaIQ>();
break;
case StanzaType::Message:
ReturnValue = boost::make_shared<StanzaMessage>();
break;
case StanzaType::Presence:
return nullptr;
}
ReturnValue->To = TargetJID;
ReturnValue->From = MyJID;
return ReturnValue;
}
void Connection::SendStanza(SharedStanza Stanza)
{
if(this->CurrentConnectionState != ConnectionState::Connected)
{
throw std::runtime_error("Trying to send Stanza with disconnected connection.");
}
Stanza->ProvisionOutgoingStanza();
Stanza->Payload.attribute("from").set_value( MyJID.GetFullJID().c_str() );
Stanza->Payload.attribute("to").set_value( Stanza->To.GetFullJID().c_str() );
Stanza->Payload.attribute("id").set_value( Stanza->ID.c_str() );
Client->WriteXMLToSocket(Stanza->Document.get());
}
void Connection::CheckStreamForValidXML()
{
if(CurrentConnectionState == ConnectionState::WaitingForFeatures)
{
BrodcastConnectionState(ConnectionCallback::ConnectionState::Connecting);
CheckStreamForFeatures();
return;
}
std::unique_ptr<pugi::xml_document> Doc;
int NrFetched = 0;
int Iteration = 0;
bool LoadMore = true;
do//while(Client->LoadXML(Iteration++))
{
if(LoadMore)
LoadMore = Client->LoadXML(Iteration++);
Doc = Client->FetchDocument();
if(Doc == nullptr)
break;
NrFetched++;
switch(CurrentConnectionState)
{
case ConnectionState::WaitingForSession:
BrodcastConnectionState(ConnectionCallback::ConnectionState::Connecting);
CheckForWaitingForSession(Doc.get());
break;
case ConnectionState::WaitingForFeatures:
break;
case ConnectionState::Authenticating:
BrodcastConnectionState(ConnectionCallback::ConnectionState::Connecting);
CheckStreamForAuthenticationData(Doc.get());
break;
case ConnectionState::Connected:
BrodcastConnectionState(ConnectionCallback::ConnectionState::Connected);
CheckForPresence(Doc.get());
if(CheckStreamForStanza(Doc.get()))
{
DispatchStanza(std::move(Doc));
}
break;
default:
break;
}
}while(true);
CheckForStreamEnd();
}
void Connection::Reset()
{
FeaturesSASL_CramMD5 = false;
FeaturesSASL_DigestMD5 = false;
FeaturesSASL_Plain = false;
FeaturesSASL_ScramSHA1 = false;
FeaturesStartTLS = false;
CurrentAuthenticationState = AuthenticationState::None;
if(Authentication != nullptr)
{
Authentication.reset();
}
}
void Connection::Reconnect()
{
ReconnectionCount++;
Reset();
Connect();
}
Connection::Connection(const std::string &Hostname,
int Portnumber,
const std::string &Domain,
boost::asio::const_buffer Certificate,
boost::asio::const_buffer Privatekey,
ConnectionCallback *ConnectionHandler,
StanzaCallback *StanzaHandler,
PresenceCallback *PresenceHandler,
SubscribeCallback *SubscribeHandler,
SubscribedCallback *SubscribedHandler,
UnsubscribedCallback *UnsubscribedHandler,
TLSVerification *Verification,
TLSVerificationMode VerificationMode,
DebugOutputTreshold DebugTreshold)
:
Disposing(false),
SelfHostedVerifier(new TLSVerification(VerificationMode)),
ConnectionHandler(ConnectionHandler),
StanzaHandler(StanzaHandler),
DebugTreshold(DebugTreshold),
CurrentAuthenticationState(AuthenticationState::None),
Hostname(Hostname),
Certificate(Certificate),
Privatekey(Privatekey),
Portnumber(Portnumber),
MyJID(Domain),
Verification(Verification),
VerificationMode(VerificationMode),
Authentication(nullptr)
{
Roster = new RosterMaintaner (nullptr,
PresenceHandler,
SubscribeHandler,
SubscribedHandler,
UnsubscribedHandler);
}
Connection::Connection(const std::string &Hostname,
int Portnumber,
const JID &RequestedJID,
const std::string &Password,
ConnectionCallback *ConnectionHandler,
StanzaCallback *StanzaHandler,
PresenceCallback *PresenceHandler,
SubscribeCallback *SubscribeHandler,
SubscribedCallback *SubscribedHandler,
UnsubscribedCallback *UnsubscribedHandler,
TLSVerification *Verification,
TLSVerificationMode VerificationMode,
DebugOutputTreshold DebugTreshold)
:
Disposing(false),
SelfHostedVerifier(new TLSVerification(VerificationMode)),
ConnectionHandler(ConnectionHandler),
StanzaHandler(StanzaHandler),
DebugTreshold(DebugTreshold),
CurrentAuthenticationState(AuthenticationState::None),
Hostname(Hostname),
Password(Password),
Portnumber(Portnumber),
MyJID(RequestedJID),
Verification(Verification),
VerificationMode(VerificationMode),
Authentication(nullptr)
{
Roster = new RosterMaintaner (nullptr,
PresenceHandler,
SubscribeHandler,
SubscribedHandler,
UnsubscribedHandler);
}
void Connection::Connect()
{
DebugOut(DebugOutputTreshold::Debug)
<< "Starting io_service run in background thread"
<< std::endl;
if( io_service != nullptr )
{
io_service->stop();
}
if( IOThread != nullptr )
{
IOThread->join();
}
if (Client != nullptr)
{
this->Roster->ResetClient(nullptr);
if (this->Authentication != nullptr)
{
this->Authentication.reset();
}
}
io_service.reset( new boost::asio::io_service() );
Client.reset(
new Network::AsyncTCPXMLClient (
io_service,
Certificate,
Privatekey,
((Verification != nullptr) ? Verification : SelfHostedVerifier.get()),
Hostname,
Portnumber,
boost::bind(&Connection::ClientDisconnected, this),
boost::bind(&Connection::ClientGotData, this),
DebugTreshold ) );
PreviouslyBroadcastedState = ConnectionCallback::ConnectionState::Connecting;
//Client->Reset();
if( !Client->ConnectSocket() )
{
CurrentConnectionState = ConnectionState::ErrorConnecting;
std::cerr << "DXMPP: Failed to connect" << std::endl;
BrodcastConnectionState(ConnectionCallback::ConnectionState::ErrorConnecting);
return;
}
OpenXMPPStream();
Client->AsyncRead();
Roster->ResetClient(Client);
// Fork io
IOThread.reset(
new boost::thread(boost::bind(
&Connection::Run,
this)));
}
void Connection::Run()
{
DebugOut(DebugOutputTreshold::Debug)
<< "DXMPP: Starting io run" << std::endl;
try
{
io_service->run();
}
catch(const std::exception &ex)
{
DebugOut(DebugOutputTreshold::Error)
<< "DXMPP IO Exception: " << ex.what() << std::endl;
}
catch(...)
{
DebugOut(DebugOutputTreshold::Error)
<< "DXMPP IO Exception: Unknown" << std::endl;
}
BrodcastConnectionState(ConnectionCallback::ConnectionState::ErrorUnknown);
}
Connection::~Connection()
{
Disposing=true;
//std::cout << "~Connection" << std::endl;
if( Authentication != nullptr )
{
//std::cout << "Delete authentication" << std::endl;
Authentication.reset();
}
//std::cout << "Delete roster" << std::endl;
delete Roster;
if( io_service != nullptr )
{
//std::cout << "Stop IO Service" << std::endl;
io_service->stop();
}
io_service.reset();
//std::cout << "Client = nullptr" << std::endl;
Client.reset();
if( IOThread != nullptr )
{
//std::cout << "Join IO Thread" << std::endl;
IOThread->join();
}
//std::cout << "~Connection done" << std::endl;
}
SharedConnection Connection::Create( const std::string &Hostname,
int Portnumber,
const JID &RequestedJID,
const std::string &Password,
IEventHandler* Handler,
TLSVerification *Verification,
DebugOutputTreshold DebugTreshold)
{
ConnectionCallback *ConnectionHandler = dynamic_cast<ConnectionCallback*> (Handler);
StanzaCallback *StanzaHandler = dynamic_cast<StanzaCallback*> (Handler);
PresenceCallback *PresenceHandler = dynamic_cast<PresenceCallback*>(Handler);
SubscribeCallback *SubscribeHandler = dynamic_cast<SubscribeCallback*>(Handler);
SubscribedCallback *SubscribedHandler = dynamic_cast<SubscribedCallback*>(Handler);
UnsubscribedCallback *UnsubscribedHandler = dynamic_cast<UnsubscribedCallback*>(Handler);
SharedConnection RVal = boost::shared_ptr<Connection>(
new Connection(Hostname,
Portnumber,
RequestedJID,
Password,
ConnectionHandler,
StanzaHandler,
PresenceHandler,
SubscribeHandler,
SubscribedHandler,
UnsubscribedHandler,
Verification,
Verification->Mode,
DebugTreshold) );
RVal->Reconnect();
return RVal;
}
SharedConnection Connection::Create(const std::string &Hostname,
int Portnumber,
const JID &RequestedJID,
const std::string &Password,
IEventHandler* Handler,
TLSVerificationMode VerificationMode,
DebugOutputTreshold DebugTreshold)
{
ConnectionCallback *ConnectionHandler = dynamic_cast<ConnectionCallback*> (Handler);
StanzaCallback *StanzaHandler = dynamic_cast<StanzaCallback*> (Handler);
PresenceCallback *PresenceHandler = dynamic_cast<PresenceCallback*>(Handler);
SubscribeCallback *SubscribeHandler = dynamic_cast<SubscribeCallback*>(Handler);
SubscribedCallback *SubscribedHandler = dynamic_cast<SubscribedCallback*>(Handler);
UnsubscribedCallback *UnsubscribedHandler = dynamic_cast<UnsubscribedCallback*>(Handler);
if( ConnectionHandler == nullptr)
std::cerr << "ConnectionHandler is null" << std::endl;
SharedConnection RVal = boost::shared_ptr<Connection>(
new Connection(Hostname,
Portnumber,
RequestedJID,
Password,
ConnectionHandler,
StanzaHandler,
PresenceHandler,
SubscribeHandler,
SubscribedHandler,
UnsubscribedHandler,
nullptr,
VerificationMode,
DebugTreshold) );
RVal->Reconnect();
return RVal;
}
SharedConnection Connection::Create( const std::string &Hostname,
int Portnumber,
const std::string &Domain,
boost::asio::const_buffer Certificate,
boost::asio::const_buffer Privatekey,
IEventHandler* Handler,
TLSVerification *Verification,
DebugOutputTreshold DebugTreshold)
{
ConnectionCallback *ConnectionHandler = dynamic_cast<ConnectionCallback*> (Handler);
StanzaCallback *StanzaHandler = dynamic_cast<StanzaCallback*> (Handler);
PresenceCallback *PresenceHandler = dynamic_cast<PresenceCallback*>(Handler);
SubscribeCallback *SubscribeHandler = dynamic_cast<SubscribeCallback*>(Handler);
SubscribedCallback *SubscribedHandler = dynamic_cast<SubscribedCallback*>(Handler);
UnsubscribedCallback *UnsubscribedHandler = dynamic_cast<UnsubscribedCallback*>(Handler);
SharedConnection RVal = boost::shared_ptr<Connection>(
new Connection(Hostname,
Portnumber,
Domain,
Certificate,
Privatekey,
ConnectionHandler,
StanzaHandler,
PresenceHandler,
SubscribeHandler,
SubscribedHandler,
UnsubscribedHandler,
Verification,
Verification->Mode,
DebugTreshold) );
RVal->Reconnect();
return RVal;
}
SharedConnection Connection::Create( const std::string &Hostname,
int Portnumber,
const std::string &Domain,
boost::asio::const_buffer Certificate,
boost::asio::const_buffer Privatekey,
IEventHandler* Handler,
TLSVerificationMode VerificationMode,
DebugOutputTreshold DebugTreshold)
{
ConnectionCallback *ConnectionHandler = dynamic_cast<ConnectionCallback*> (Handler);
StanzaCallback *StanzaHandler = dynamic_cast<StanzaCallback*> (Handler);
PresenceCallback *PresenceHandler = dynamic_cast<PresenceCallback*>(Handler);
SubscribeCallback *SubscribeHandler = dynamic_cast<SubscribeCallback*>(Handler);
SubscribedCallback *SubscribedHandler = dynamic_cast<SubscribedCallback*>(Handler);
UnsubscribedCallback *UnsubscribedHandler = dynamic_cast<UnsubscribedCallback*>(Handler);
SharedConnection RVal = boost::shared_ptr<Connection>(
new Connection(Hostname,
Portnumber,
Domain,
Certificate,
Privatekey,
ConnectionHandler,
StanzaHandler,
PresenceHandler,
SubscribeHandler,
SubscribedHandler,
UnsubscribedHandler,
nullptr,
VerificationMode,
DebugTreshold) );
RVal->Reconnect();
return RVal;
}
void Connection::ClientDisconnected()
{
//std::cerr << "Client disconnected." << std::endl;
CurrentConnectionState = ConnectionState::ErrorUnknown;
BrodcastConnectionState(ConnectionCallback::ConnectionState::ErrorUnknown);
}
void Connection::ClientGotData()
{
CheckStreamForValidXML();
}
}
<|start_filename|>DXMPP/Connection.hpp<|end_filename|>
//
// Connection.hpp
// DXMPP
//
// Created by <NAME> on 31/05/14.
// Copyright (c) 2014 <NAME>. All rights reserved.
//
#ifndef DXMPP_Connection_hpp
#define DXMPP_Connection_hpp
#include <boost/asio.hpp>
#include <boost/asio/ssl.hpp>
#include <boost/thread.hpp>
#include <boost/bind.hpp>
#include <boost/enable_shared_from_this.hpp>
#include <boost/any.hpp>
#include <sstream>
#include <pugixml/pugixml.hpp>
#include <DXMPP/JID.hpp>
#include <DXMPP/SASL/SASLMechanism.hpp>
#include <DXMPP/Stanza.hpp>
#include <DXMPP/StanzaCallback.hpp>
#include <DXMPP/ConnectionCallback.hpp>
#include <DXMPP/Network/AsyncTCPXMLClient.hpp>
#include <DXMPP/Debug/DebugOutputTreshold.hpp>
#include <DXMPP/Roster.hpp>
#include <DXMPP/IEventHandler.hpp>
#include <atomic>
namespace DXMPP
{
typedef boost::shared_ptr<pugi::xml_node> SharedXMLNode;
class Connection
:
public boost::enable_shared_from_this<Connection>
{
enum class ConnectionState
{
NotConnected = 0,
Connecting,
WaitingForFeatures,
Authenticating,
WaitingForSession,
Connected,
ErrorConnecting,
ErrorAuthenticating,
ErrorUnknown
};
std::atomic<bool> Disposing;
static std::atomic<int> ReconnectionCount;
enum class AuthenticationState
{
None,
StartTLS,
SASL,
Bind,
Authenticated
};
boost::shared_ptr<boost::asio::io_service> io_service;
boost::scoped_ptr<boost::thread> IOThread;
boost::scoped_ptr<TLSVerification> SelfHostedVerifier;
ConnectionCallback * ConnectionHandler;
boost::shared_mutex ConnectionHandlerMutex;
StanzaCallback * StanzaHandler;
boost::shared_mutex StanzaHandlerMutex;
ConnectionCallback::ConnectionState PreviouslyBroadcastedState;
boost::shared_ptr<DXMPP::Network::AsyncTCPXMLClient> Client;
DebugOutputTreshold DebugTreshold;
bool FeaturesSASL_External;
bool FeaturesSASL_DigestMD5;
bool FeaturesSASL_CramMD5;
bool FeaturesSASL_ScramSHA1;
bool FeaturesSASL_Plain;
bool FeaturesStartTLS;
ConnectionState CurrentConnectionState;
AuthenticationState CurrentAuthenticationState;
std::string Hostname;
std::string Password;
boost::asio::const_buffer Certificate;
boost::asio::const_buffer Privatekey;
int Portnumber;
JID MyJID;
TLSVerification *Verification;
TLSVerificationMode VerificationMode;
boost::scoped_ptr<SASL::SASLMechanism> Authentication;
void Reset();
void InitTLS();
void Connect();
void OpenXMPPStream();
void CheckStreamForFeatures();
void ClearReadDataStream();
// who vomited?
void CheckForTLSProceed(pugi::xml_document *Doc);
void CheckForWaitingForSession(pugi::xml_document *Doc);
void CheckForBindSuccess(pugi::xml_document *Doc);
void CheckForSASLData(pugi::xml_document *Doc);
void CheckStreamForAuthenticationData(pugi::xml_document *Doc);
bool CheckStreamForStanza(pugi::xml_document *Doc);
void DispatchStanza( std::unique_ptr< pugi::xml_document> Doc);
void CheckForPresence(pugi::xml_document *Doc);
// this is ok (invalid xml)
void CheckForStreamEnd();
// this is ok
void CheckStreamForValidXML();
void BindResource();
void StartBind();
void ClientDisconnected();
void ClientGotData();
void BrodcastConnectionState(ConnectionCallback::ConnectionState NewState);
Connection(const std::string &Hostname,
int Portnumber,
const JID &RequestedJID,
const std::string &Password,
ConnectionCallback *ConnectionHandler = nullptr,
StanzaCallback *StanzaHandler = nullptr,
PresenceCallback *PresenceHandler = nullptr,
SubscribeCallback *SubscribeHandler = nullptr,
SubscribedCallback *SubscribedHandler = nullptr,
UnsubscribedCallback *UnsubscribedHandler = nullptr,
TLSVerification *Verification = nullptr,
TLSVerificationMode VerificationMode = TLSVerificationMode::RFC2818_Hostname,
DebugOutputTreshold DebugTreshold = DebugOutputTreshold::Error);
Connection(const std::string &Hostname,
int Portnumber,
const std::string &Domain,
boost::asio::const_buffer Certificate,
boost::asio::const_buffer Privatekey,
ConnectionCallback *ConnectionHandler = nullptr,
StanzaCallback *StanzaHandler = nullptr,
PresenceCallback *PresenceHandler = nullptr,
SubscribeCallback *SubscribeHandler = nullptr,
SubscribedCallback *SubscribedHandler = nullptr,
UnsubscribedCallback *UnsubscribedHandler = nullptr,
TLSVerification *Verification = nullptr,
TLSVerificationMode VerificationMode = TLSVerificationMode::RFC2818_Hostname,
DebugOutputTreshold DebugTreshold = DebugOutputTreshold::Error);
void Run();
public:
static int GetReconnectionCount()
{
return ReconnectionCount;
}
void Reconnect();
RosterMaintaner *Roster;
SharedStanza CreateStanza(const JID &TargetJID, StanzaType Type);
template <typename T>
T CreateStanza(const JID &TargetJID, StanzaType Type)
{
return boost::dynamic_pointer_cast<typename T::element_type> ( CreateStanza(TargetJID, Type) );
}
void SendStanza(SharedStanza Stanza);
static SharedConnection Create(const std::string &Hostname,
int Portnumber,
const JID &RequestedJID,
const std::string &Password,
IEventHandler* Handler,
TLSVerification *Verification ,
DebugOutputTreshold DebugTreshold = DebugOutputTreshold::Error);
static SharedConnection Create(const std::string &Hostname,
int Portnumber,
const JID &RequestedJID,
const std::string &Password,
IEventHandler* Handler,
TLSVerificationMode VerificationMode = TLSVerificationMode::RFC2818_Hostname,
DebugOutputTreshold DebugTreshold = DebugOutputTreshold::Error);
static SharedConnection Create(const std::string &Hostname,
int Portnumber,
const std::string &Domain,
boost::asio::const_buffer Certificate,
boost::asio::const_buffer Privatekey,
IEventHandler* Handler,
TLSVerification *Verification,
DebugOutputTreshold DebugTreshold = DebugOutputTreshold::Error);
static SharedConnection Create(const std::string &Hostname,
int Portnumber,
const std::string &Domain,
boost::asio::const_buffer Certificate,
boost::asio::const_buffer Privatekey,
IEventHandler* Handler,
TLSVerificationMode VerificationMode = TLSVerificationMode::RFC2818_Hostname,
DebugOutputTreshold DebugTreshold = DebugOutputTreshold::Error);
void DeRegisterHandlers()
{
{
boost::unique_lock<boost::shared_mutex> WriteLock(ConnectionHandlerMutex);
ConnectionHandler = nullptr;
}
{
boost::unique_lock<boost::shared_mutex> WriteLock(StanzaHandlerMutex);
StanzaHandler = nullptr;
}
}
~Connection();
};
}
#endif
<|start_filename|>DXMPP/SASL/SASLMechanism.hpp<|end_filename|>
//
// SASLMechanism.cpp
// DXMPP
//
// Created by <NAME> 2014
// Copyright (c) 2014 <NAME>. All rights reserved.
//
#ifndef DXMPP_SASL_hpp
#define DXMPP_SASL_hpp
#include <pugixml/pugixml.hpp>
#include <DXMPP/JID.hpp>
#include <DXMPP/SASL/SaslChallengeParser.hpp>
#include <DXMPP/Network/AsyncTCPXMLClient.hpp>
namespace DXMPP
{
namespace SASL
{
using namespace pugi;
class SASLMechanism
{
protected:
std::string DecodeBase64(std::string Input);
std::string EncodeBase64(std::string Input);
public:
enum class SASLMechanisms
{
None = 0,
PLAIN = 1,
DIGEST_MD5 = 2,
CRAM_MD5 = 3, // Not implemented
SCRAM_SHA1 = 4
};
JID MyJID;
std::string Password;
boost::shared_ptr<DXMPP::Network::AsyncTCPXMLClient> Uplink;
SASLMechanism(boost::shared_ptr<DXMPP::Network::AsyncTCPXMLClient> Uplink,
const JID &MyJID,
const std::string &Password)
:
MyJID(MyJID), Password(Password), Uplink (Uplink)
{
}
std::string SelectedNounce;
virtual void Begin() = 0;
virtual void Challenge(const pugi::xpath_node &ChallengeTag) = 0;
virtual bool Verify(const pugi::xpath_node &SuccessTag) = 0;
virtual ~SASLMechanism()
{
}
};
}
}
#endif
<|start_filename|>DXMPP/Roster.hpp<|end_filename|>
//
// Roster.hpp
// DXMPP
//
// Created by <NAME> 2014
// Copyright (c) 2014 <NAME>. All rights reserved.
//
#ifndef DXMPP_Roster_hpp
#define DXMPP_Roster_hpp
#include <pugixml/pugixml.hpp>
#include <DXMPP/Network/AsyncTCPXMLClient.hpp>
#include <DXMPP/JID.hpp>
namespace DXMPP
{
class PresenceCallback
{
public:
virtual void OnPresence(JID From,
bool Available,
int Priority,
std::string Status,
std::string Message) = 0;
};
class SubscribeCallback
{
public:
enum class Response
{
Allow,
AllowAndSubscribe,
Reject
};
virtual Response OnSubscribe(JID From) = 0;
};
class SubscribedCallback
{
public:
virtual void OnSubscribed(JID To) = 0;
};
class UnsubscribedCallback
{
public:
virtual void OnUnsubscribed(JID From) = 0;
};
class RosterMaintaner
{
friend class Connection;
boost::shared_ptr<DXMPP::Network::AsyncTCPXMLClient> Uplink;
PresenceCallback *PresenceHandler;
SubscribeCallback *SubscribeHandler;
SubscribedCallback *SubscribedHandler;
UnsubscribedCallback *UnsubscribedHandler;
void HandleSubscribe(pugi::xml_node Node);
void HandleSubscribed(pugi::xml_node Node);
void HandleError(pugi::xml_node Node);
void HandleUnsubscribe(pugi::xml_node Node);
void HandleUnsubscribed(pugi::xml_node Node);
// Invoked from Connection
void OnPresence(pugi::xml_node Node);
public:
// User functions
void Subscribe(JID To);
void Unsubscribe(JID To);
void ResetClient(boost::shared_ptr<DXMPP::Network::AsyncTCPXMLClient> Uplink)
{
this->Uplink = Uplink;
}
RosterMaintaner(boost::shared_ptr<DXMPP::Network::AsyncTCPXMLClient> Uplink,
PresenceCallback *PresenceHandler,
SubscribeCallback *SubscribeHandler,
SubscribedCallback *SubscribedHandler,
UnsubscribedCallback *UnsubscribedHandler)
:
Uplink(Uplink),
PresenceHandler(PresenceHandler),
SubscribeHandler(SubscribeHandler),
SubscribedHandler(SubscribedHandler),
UnsubscribedHandler(UnsubscribedHandler)
{
}
};
}
#endif // DXMPP_Roster_hpp
<|start_filename|>DXMPP/JID.hpp<|end_filename|>
//
// JID.hpp
// DXMPP
//
// Created by <NAME> on 31/05/14.
// Copyright (c) 2014 <NAME>. All rights reserved.
//
#ifndef DXMPP_JID_hpp
#define DXMPP_JID_hpp
#include <string>
namespace DXMPP {
class JID
{
std::string Username;
std::string Domain;
std::string Resource;
void LoadJID(const std::string &JID)
{
size_t IndexOfSlash = JID.find('/');
if(IndexOfSlash != std::string::npos)
{
Resource = JID.substr(IndexOfSlash+1);
}
size_t IndexOfAt = JID.find('@');
if(IndexOfAt != std::string::npos && IndexOfAt < IndexOfSlash)
{
Username = JID.substr(0, IndexOfAt);
IndexOfAt++;
}
else
IndexOfAt = 0;
Domain = JID.substr(IndexOfAt, IndexOfSlash-IndexOfAt);
}
public:
~JID()
{
}
JID()
{
}
JID(std::string Username,
std::string Domain,
std::string &Resource)
:
Username(Username),
Domain(Domain),
Resource(Resource)
{
}
JID(const std::string &FullJID)
{
LoadJID(FullJID);
}
JID(const std::string &Bare, std::string Resource)
{
LoadJID(Bare);
this->Resource = Resource;
}
JID(const JID &B)
{
this->Username = B.Username;
this->Domain = B.Domain;
this->Resource = B.Resource;
}
void SetResource(std::string Resource)
{
this->Resource = Resource;
}
std::string GetFullJID() const
{
if (Resource.empty() && Username.empty())
return Domain;
if (Resource.empty())
return Username + "@" + Domain;
if (Username.empty())
return Domain + "/" + Resource;
return Username + "@" + Domain + "/" + Resource;
}
std::string GetBareJID() const
{
if (Username.empty())
return Domain;
return Username + "@" + Domain;
}
std::string GetUsername() const
{
return Username;
}
std::string GetDomain() const
{
return Domain;
}
std::string GetResource() const
{
return Resource;
}
};
}
#endif
<|start_filename|>DXMPP/ConnectionCallback.hpp<|end_filename|>
//
// ConnectionCallback.hpp
// DXMPP
//
// Created by <NAME> 2014
// Copyright (c) 2014 <NAME>. All rights reserved.
//
#ifndef DXMPP_ConnectionCallback_hpp
#define DXMPP_ConnectionCallback_hpp
#include "pugixml/pugixml.hpp"
#include "JID.hpp"
#include <string>
#include <boost/shared_ptr.hpp>
#include "Stanza.hpp"
#include <memory>
namespace DXMPP
{
class Connection;
typedef boost::shared_ptr<Connection> SharedConnection;
class ConnectionCallback
{
public:
enum class ConnectionState
{
NotConnected = 0,
Connecting,
Connected,
ErrorConnecting,
ErrorAuthenticating,
ErrorUnknown
};
virtual void ConnectionStateChanged(ConnectionState NewState,
SharedConnection Sender) = 0;
virtual ~ConnectionCallback()
{
}
};
}
#endif // DXMPP_ConnectionCallback_hpp
<|start_filename|>DXMPP/Stanza.hpp<|end_filename|>
//
// Stanza.hpp
// DXMPP
//
// Created by <NAME> 2014
// Copyright (c) 2014 <NAME>. All rights reserved.
//
#ifndef DXMPP_Stanza_hpp
#define DXMPP_Stanza_hpp
#include "pugixml/pugixml.hpp"
#include "JID.hpp"
#include <boost/shared_ptr.hpp>
#include <boost/uuid/uuid.hpp>
#include <boost/uuid/uuid_generators.hpp>
#include <boost/uuid/uuid_io.hpp>
#include <boost/lexical_cast.hpp>
#include <memory>
namespace DXMPP {
enum class StanzaType
{
IQ,
Message,
Presence
};
enum class StanzaMessageType
{
Chat,
Error,
Groupchat,
Headline,
Normal
};
enum class StanzaIQType
{
Get,
Set,
Result,
Error
};
class Stanza
{
public:
std::unique_ptr<pugi::xml_document> Document;
pugi::xml_node Payload;
JID To;
JID From;
StanzaType Type;
std::string ID;
Stanza(StanzaType Type, std::unique_ptr<pugi::xml_document> Document, pugi::xml_node Payload)
: Document( std::move(Document) ), Payload(Payload), Type(Type)
{
ID = std::string(Payload.attribute("id").value());
To = JID(Payload.attribute("to").value());
From = JID(Payload.attribute("from").value());
}
Stanza(StanzaType Type)
:Type(Type)
{
}
virtual void ProvisionOutgoingStanza() = 0;
virtual ~Stanza()
{
}
};
class StanzaMessage : public Stanza
{
public:
StanzaMessageType MessageType;
void ProvisionOutgoingStanza()
{
switch(MessageType)
{
case StanzaMessageType::Chat:
Payload.attribute("type").set_value( "chat" );
break;
case StanzaMessageType::Normal:
Payload.attribute("type").set_value( "normal" );
break;
case StanzaMessageType::Groupchat:
Payload.attribute("type").set_value( "groupchat" );
break;
case StanzaMessageType::Headline:
Payload.attribute("type").set_value( "headline" );
break;
case StanzaMessageType::Error:
Payload.attribute("type").set_value( "error" );
break;
}
}
StanzaMessage(std::unique_ptr<pugi::xml_document> Document, pugi::xml_node Payload)
: Stanza( StanzaType::Message, std::move(Document), Payload)
{
MessageType = StanzaMessageType::Normal;
if( std::string(Payload.attribute("type").value()) == "normal")
MessageType = StanzaMessageType::Normal;
if( std::string(Payload.attribute("type").value()) == "headline")
MessageType = StanzaMessageType::Headline;
if( std::string(Payload.attribute("type").value()) == "groupchat")
MessageType = StanzaMessageType::Groupchat;
if( std::string(Payload.attribute("type").value()) == "error")
MessageType = StanzaMessageType::Error;
if( std::string(Payload.attribute("type").value()) == "chat")
MessageType = StanzaMessageType::Chat;
}
StanzaMessage()
: Stanza(StanzaType::Message), MessageType(StanzaMessageType::Chat)
{
Document.reset(new pugi::xml_document());
Payload = Document->append_child("message");
Payload.append_attribute("from");
Payload.append_attribute("to");
Payload.append_attribute("type");
Payload.append_attribute("id");
if(ID.length() == 0)
{
ID = boost::lexical_cast<std::string>( boost::uuids::random_generator()() );
}
Payload.attribute("id").set_value( ID.c_str() );
}
~StanzaMessage()
{
}
};
class StanzaIQ: public Stanza
{
public:
StanzaIQType IQType;
void ProvisionOutgoingStanza()
{
switch(IQType)
{
case StanzaIQType::Get:
Payload.attribute("type").set_value( "get" );
break;
case StanzaIQType::Set:
Payload.attribute("type").set_value( "set" );
break;
case StanzaIQType::Result:
Payload.attribute("type").set_value( "result" );
break;
case StanzaIQType::Error:
Payload.attribute("type").set_value( "error" );
break;
}
}
StanzaIQ(std::unique_ptr<pugi::xml_document> Document, pugi::xml_node Payload)
: Stanza(StanzaType::IQ,std::move(Document), Payload)
{
std::string strtype = std::string(Payload.attribute("type").value());
if( strtype == "error")
IQType = StanzaIQType::Error;
if( strtype == "get")
IQType = StanzaIQType::Get;
if( strtype == "set")
IQType = StanzaIQType::Set;
if( strtype == "result")
IQType = StanzaIQType::Result;
}
StanzaIQ()
: Stanza(StanzaType::Message), IQType(StanzaIQType::Get)
{
Document.reset(new pugi::xml_document());
Payload = Document->append_child("iq");
Payload.append_attribute("from");
Payload.append_attribute("to");
Payload.append_attribute("type");
Payload.append_attribute("id");
if(ID.length() == 0)
{
ID = boost::lexical_cast<std::string>( boost::uuids::random_generator()() );
}
Payload.attribute("id").set_value( ID.c_str() );
}
~StanzaIQ()
{
}
};
typedef boost::shared_ptr<Stanza> SharedStanza;
typedef boost::shared_ptr<StanzaMessage> SharedStanzaMessage;
typedef boost::shared_ptr<StanzaIQ> SharedStanzaIQ;
}
#endif
<|start_filename|>DXMPP/SASL/SASLMechanism_DIGEST_MD5.cpp<|end_filename|>
//
// SASLMechanism_DIGEST_MD5.cpp
// DXMPP
//
// Created by <NAME> 2014
// Copyright (c) 2014 <NAME>. All rights reserved.
//
#include <DXMPP/SASL/SASLMechanism_DIGEST_MD5.hpp>
#include <pugixml/pugixml.hpp>
#include <DXMPP/JID.hpp>
#include <boost/function.hpp>
#include <boost/asio.hpp>
#include <boost/asio/ssl.hpp>
#include <boost/thread.hpp>
#include <boost/lexical_cast.hpp>
#include <boost/shared_ptr.hpp>
#include <boost/array.hpp>
#include <boost/algorithm/string.hpp>
#include <boost/uuid/uuid.hpp>
#include <boost/uuid/uuid_generators.hpp>
#include <boost/uuid/uuid_io.hpp>
#define CRYPTOPP_ENABLE_NAMESPACE_WEAK 1
#include <cryptopp/cryptlib.h>
#include <cryptopp/hex.h>
#include <cryptopp/md5.h>
#include <cryptopp/hmac.h>
#include <cryptopp/sha.h>
#include <cryptopp/base64.h>
#include <cryptopp/queue.h>
#include <sstream>
#include <ostream>
#include <iostream>
#include "SaslChallengeParser.hpp"
#include "CryptoPP_byte.hpp"
namespace DXMPP
{
namespace SASL
{
using namespace std;
using namespace pugi;
namespace Weak
{
void SASL_Mechanism_DigestMD5::Begin()
{
//ChosenSASLMechanism = SASLMechanisms::DIGEST_MD5;
string AuthXML ="<auth xmlns='urn:ietf:params:xml:ns:xmpp-sasl' mechanism='DIGEST-MD5'/>";
Uplink->WriteTextToSocket(AuthXML);
}
string SASL_Mechanism_DigestMD5::GetMD5Hex(string Input)
{
CryptoPP::Weak::MD5 hash;
CryptoPP::byte digest[ CryptoPP::Weak::MD5::DIGESTSIZE ];
int length =CryptoPP::Weak::MD5::DIGESTSIZE;
CryptoPP::HexEncoder Hexit;
std::string TOutput;
Hexit.Attach( new CryptoPP::StringSink( TOutput ) );
hash.CalculateDigest( digest,
reinterpret_cast<const unsigned char *>( Input.c_str() ),
Input.length() );
Hexit.Put( digest, length );
Hexit.MessageEnd();
boost::algorithm::to_lower(TOutput);
return TOutput;
}
string SASL_Mechanism_DigestMD5::GetHA1(string X, string nonce, string cnonce)
{
CryptoPP::Weak::MD5 hash;
CryptoPP::byte digest[ CryptoPP::Weak::MD5::DIGESTSIZE ];
int digestlength =CryptoPP::Weak::MD5::DIGESTSIZE;
// Calculatey Y
hash.CalculateDigest( digest,
reinterpret_cast<const unsigned char *>( X.c_str() ),
X.length() );
// X is now in digest
stringstream TStream;
TStream << ":" << nonce << ":" << cnonce;
string AuthentiationDetails = TStream.str();
int TLen = (int)digestlength + (int)AuthentiationDetails.length();
CryptoPP::byte *T = new CryptoPP::byte[TLen];
memcpy(T, digest, digestlength);
memcpy(T+digestlength, AuthentiationDetails.c_str(), AuthentiationDetails.length());
hash.CalculateDigest( digest, reinterpret_cast<const unsigned char *>( T ), TLen );
CryptoPP::HexEncoder Hexit;
std::string TOutput;
Hexit.Attach( new CryptoPP::StringSink( TOutput ) );
Hexit.Put( digest, digestlength );
Hexit.MessageEnd();
boost::algorithm::to_lower(TOutput);
return TOutput;
}
void SASL_Mechanism_DigestMD5::Challenge(const pugi::xpath_node &challenge)
{
string ChallengeBase64 = challenge.node().child_value();
string DecodedChallenge = DecodeBase64(ChallengeBase64);
//std::cout << "GOT CHALLENGE " << ChallengeBase64 << " decoded as " << DecodedChallenge << std::endl;
std::map<std::string,std::string> ChallengeMap; // its like a map to teh challengeh
if(!ParseSASLChallenge(DecodedChallenge, ChallengeMap))
throw runtime_error("DXMPP: Failed to parse SASL Challenge.");
string nonce = ChallengeMap["nonce"];
string qop = ChallengeMap["qop"];
string cnonce = boost::lexical_cast<string>( boost::uuids::random_generator()() );
string rspauth = "";
if(ChallengeMap.find("rspauth") != ChallengeMap.end())
rspauth = ChallengeMap["rspauth"];
if(ChallengeMap.find("cnonce") != ChallengeMap.end())
cnonce = ChallengeMap["cnonce"];
string realm = "";// MyJID.GetDomain();
if(ChallengeMap.find("realm") != ChallengeMap.end())
realm = ChallengeMap["realm"];
std::string authid = MyJID.GetUsername();
std::string authzid = MyJID.GetFullJID();
string nc = "00000001";
if(!rspauth.empty())
{
Uplink->WriteTextToSocket("<response xmlns='urn:ietf:params:xml:ns:xmpp-sasl'/>");
return;
}
//cnonce= "lWThUe35I/Mykro89Cg44aks8HUCi2w0mfdrjdiz"; // hack
//std::cout << "SASL Mechanism = DIGEST_MD5" << std::endl;
stringstream TStream;
std::string TOutput;
// X
TStream << MyJID.GetUsername() << ':' << realm << ':' << Password;
string X = TStream.str();
TStream.str("");
TStream << "AUTHENTICATE:xmpp/" << MyJID.GetDomain();
string A2 = TStream.str();
string HA1 = GetHA1(X, nonce, cnonce);
string HA2 = GetMD5Hex(A2);
TStream.str("");
TStream << HA1
<< ':'
<< nonce
<< ':'
<< nc
<< ':'
<< cnonce
<< ':'
<< qop
<< ':'
<< HA2;
string KD = TStream.str();
string Z = GetMD5Hex(KD);
TStream.str("");
TStream << "username=\""
<< MyJID.GetUsername() << "\""
//<< ",realm=\"" << realm << "\""
<< ",nonce=\"" << nonce << "\""
<< ",cnonce=\"" << cnonce << "\""
<< ",nc=" << nc
<< ",qop=auth,digest-uri=\"xmpp/"
<< MyJID.GetDomain()<< "\""
<< ",response="
<< Z
<< ",charset=utf-8";//,authzid=\"" << MyJID.GetFullJID() << "\"";
string Response = TStream.str();
Response = EncodeBase64(Response);
TStream.str("");
TStream << "<response xmlns='urn:ietf:params:xml:ns:xmpp-sasl'>"
<< Response
<< "</response>";
string ResponseXML = TStream.str();
Uplink->WriteTextToSocket(ResponseXML);
}
bool SASL_Mechanism_DigestMD5::Verify(const pugi::xpath_node &SuccessTag)
{
return true;
}
}
}
}
| mstfn/dxmpp |
<|start_filename|>Gruntfile.coffee<|end_filename|>
module.exports = (grunt) ->
banner =
"/*! JpegCamera <%= pkg.version %> | " +
"<%= grunt.template.today('yyyy-mm-dd') %>\n" +
" (c) 2013 <NAME>\n" +
" <%= pkg.homepage %> */\n"
grunt.initConfig
pkg: grunt.file.readJSON("package.json")
coffee:
compile:
options:
join: true
files:
"dist/jpeg_camera.js": [
"src/jpeg_camera.coffee",
"src/jpeg_camera_html5.coffee",
"src/jpeg_camera_flash.coffee",
"src/snapshot.coffee",
"src/stats.coffee"
]
"dist/jpeg_camera_no_flash.js": [
"src/jpeg_camera.coffee",
"src/jpeg_camera_html5.coffee",
"src/snapshot.coffee",
"src/stats.coffee"
]
uglify:
minify:
options:
banner: banner
files:
"dist/jpeg_camera.min.js": ["dist/jpeg_camera.js"]
"dist/jpeg_camera_no_flash.min.js": ["dist/jpeg_camera_no_flash.js"]
concat:
add_banner:
options:
banner: banner
files:
"dist/jpeg_camera.js": ["dist/jpeg_camera.js"]
"dist/jpeg_camera_no_flash.js": ["dist/jpeg_camera_no_flash.js"]
with_dependencies:
options:
banner: "/*! SWFObject + Canvas-to-Blob + JpegCamera */\n\n"
files:
"dist/jpeg_camera_with_dependencies.min.js": [
"dist/swfobject.min.js",
"dist/canvas-to-blob.min.js",
"dist/jpeg_camera.min.js"
]
exec:
swf:
cmd: "mxmlc -static-link-runtime-shared-libraries " +
"-output dist/jpeg_camera.swf src/as3/JpegCamera.as"
rm_doc:
cmd: "rm -fr doc"
doc:
cmd: "PATH=\"$(npm bin):$PATH\" codo; " +
"cp index.html doc/index.html"
grunt.loadNpmTasks "grunt-contrib-coffee"
grunt.loadNpmTasks "grunt-contrib-uglify"
grunt.loadNpmTasks "grunt-contrib-concat"
grunt.loadNpmTasks "grunt-exec"
grunt.registerTask "rubygem", "Prepare gem for Ruby on Rails apps", ->
js_files = [
"jpeg_camera.js",
"jpeg_camera.min.js",
"jpeg_camera_no_flash.js",
"jpeg_camera_no_flash.min.js",
"canvas-to-blob.js",
"canvas-to-blob.min.js",
"swfobject.js",
"swfobject.min.js"
]
for file in js_files
grunt.file.copy "dist/#{file}",
"vendor/assets/javascripts/jpeg_camera/#{file}"
grunt.file.copy "dist/jpeg_camera.swf",
"app/assets/images/jpeg_camera/jpeg_camera.swf"
grunt.file.copy "dist/shutter.mp3",
"app/assets/audios/jpeg_camera/shutter.mp3"
grunt.file.copy "dist/shutter.ogg",
"app/assets/audios/jpeg_camera/shutter.ogg"
grunt.registerTask "js",
["coffee", "uglify", "concat:add_banner", "concat:with_dependencies"]
grunt.registerTask "swf", ["exec:swf"]
grunt.registerTask "doc", ["exec:rm_doc", "exec:doc"]
grunt.registerTask "dist", ["js", "swf", "rubygem"]
grunt.registerTask "all", ["dist", "doc"]
grunt.registerTask "default", ["all"]
<|start_filename|>doc/extra/README.md.html<|end_filename|>
<!DOCTYPE html>
<html>
<head>
<meta charset='UTF-8'>
<title>JpegCamera Documentation</title>
<script src='../javascript/application.js'></script>
<script src='../javascript/search.js'></script>
<link rel='stylesheet' href='../stylesheets/application.css' type='text/css'>
</head>
<body>
<div id='base' data-path='../'></div>
<div id='header'>
<div id='menu'>
<a href='../extra/README.md.html' title='JpegCamera'>
JpegCamera
</a>
»
<a href='../alphabetical_index.html' title='Index'>
Index
</a>
»
<span class='title'>README.md</span>
</div>
</div>
<div id='content'>
<nav class='toc'>
<p class='title'>
<a class='hide_toc' href='#'>
<strong>Table of Contents</strong>
</a>
<small>
(<a class='float_toc' href='#'>left</a>)
</small>
</p>
</nav>
<div id='filecontents'>
<h2 id="about">About</h2><p>JpegCamera is a JavaScript library that allows you to display a camera stream on
a web page and then capture, show and upload JPEG snapshots to the server. It
uses HTML5 in Chrome, Firefox and Opera and falls back to Flash in less capable
browsers. The video stream is placed without any UI in a container of your
choice and you control it through JavaScript API and your own UI elements.</p><p>The idea is based on a similar
<a href="https://github.com/mattclements/jpegcam">JpegCam</a> library which was Flash only.
Beside working without Flash and offering a cleaner, more modern API, JpegCamera
has some nice, new features.</p><h2 id="features">Features</h2>
<ul>
<li>Works natively in Chrome, Firefox, Opera and with a Flash plugin in all other
browsers.</li>
<li>Manage and upload multiple snapshots at once. You don't have to wait for the
first upload to finish before capturing the next image. This means you can
take a few shots in a short interval.</li>
<li>You can get snapshots for display outside the camera container in browsers
that support <code>canvas</code> element - even when using Flash fallback.</li>
<li>Allows you to retry failed uploads.</li>
<li>Easily read server response text and code after upload.</li>
<li>Send CSRF tokens to secure your user's session from <a href="http://en.wikipedia.org/wiki/Cross-site_request_forgery#Prevention">Cross-site request
forgery</a></li>
<li>Prevents users from messing with HTML5 VIDEO or Flash object elements
by overlaying transparent DIV over them after initialization.</li>
<li>Makes sure the camera is really ready by checking stream's color standard
deviation. Safeguard from weird all-black or all-white snapshots.</li>
</ul>
<h2 id="demo">Demo</h2><p>Check out the <a href="https://amw.github.io/jpeg_camera/demo/">demo page</a>.</p><h2 id="dependencies">Dependencies</h2>
<ul>
<li><a href="https://github.com/blueimp/JavaScript-Canvas-to-Blob">Canvas-to-Blob</a>
polyfill for the standard JavaScript <code>canvas.toBlob</code> method.</li>
<li><a href="http://code.google.com/p/swfobject/">SWFObject</a> for embedding the
Flash-based fallback.</li>
</ul><p>For convenience these scripts are packaged with JpegCamera.</p><h2 id="installation">Installation</h2><p>You can load JpegCamera directly on any web page, but if you're writing Rails
3.1 application consider using a gem. In either case you have an option
of loading full library that includes HTML5 implementation with Flash fallback
or HTML5 version only.</p><h3 id="standalone-app">Standalone app</h3><p>Copy all the files from <code>dist</code> into <code>jpeg_camera</code> directory under your server's
root.</p><p>Load JpegCamera and it's dependencies in the <code>HEAD</code> section of your page.</p><pre><code><script src="/jpeg_camera/swfobject.min.js" type="text/javascript"></script>
<script src="/jpeg_camera/canvas-to-blob.min.js" type="text/javascript"></script>
<script src="/jpeg_camera/jpeg_camera.min.js" type="text/javascript"></script>
</code></pre><p>SWFObject and Canvas-to-Blob are stored in separate files so that you don't have
to load them again if you already use them in your project. If you want to cut
down on HTTP requests then there is a concatenated version you can use.</p><pre><code><script src="/jpeg_camera/jpeg_camera_with_dependencies.min.js" type="text/javascript"></script>
</code></pre><p>If you want to use HTML5-only version you can load
<code>jpeg_camera_no_flash.min.js</code>. There is no "with dependencies" version of this
file, so you have to remember to also load Canvas-to-Blob. You don't need
SWFObject for HTML5.</p><h3 id="ruby-on-rails-apps">Ruby on Rails apps</h3><p>If you use Ruby on Rails version 3.1 (or higher) then you can use a gem and
take advantage of the assets pipeline.</p><pre><code>gem "jpeg_camera", "~> 1.3.2"
</code></pre><p>Create a file <code>jpeg_camera.js.coffee.erb</code> somewhere in the
<code>app/assets/javascripts</code> tree.</p><pre><code>#= require jpeg_camera/swfobject
#= require jpeg_camera/canvas-to-blob
#= require jpeg_camera/jpeg_camera
$ ->
if window.JpegCamera
JpegCamera.DefaultOptions.swf_url =
"<%= asset_path "jpeg_camera/jpeg_camera.swf" %>"
JpegCamera.DefaultOptions.shutter_mp3_url =
"<%= asset_path "jpeg_camera/shutter.mp3" %>"
JpegCamera.DefaultOptions.shutter_ogg_url =
"<%= asset_path "jpeg_camera/shutter.ogg" %>"
JpegCamera.DefaultOptions.csrf_token =
$("meta[name=\"csrf-token\"]").attr("content")
</code></pre><p>SWFObject and Canvas-to-Blob are stored in separate files so that you don't have
to load them again if you already use them in your project. The assets pipeline
will take care of minifying and concatenating everything into one script.</p><p>If you want to use HTML5-only version then change the <code>jpeg_camera</code> require
directive into this one:</p><pre><code>#= require jpeg_camera/jpeg_camera_no_flash
</code></pre><h2 id="usage">Usage</h2>
<pre><code>var camera = new JpegCamera("#camera");
var snapshot = camera.capture();
snapshot.show(); // Display the snapshot
snapshot.upload({api_url: "/upload_image"}).done(function(response) {
response_container.innerHTML = response;
this.discard(); // discard snapshot and show video stream again
}).fail(function(status_code, error_message, response) {
alert("Upload failed with status " + status_code);
});
</code></pre><p>A detailed documentation using in-code comments is maintained for
<a href="https://amw.github.io/jpeg_camera/doc/class/JpegCamera.html">JpegCamera</a> and
<a href="https://amw.github.io/jpeg_camera/doc/class/Snapshot.html">Snapshot</a>
classes.</p><h2 id="user-privacy">User privacy</h2><p>Respect your users privacy. Make sure they understand why you want to capture
their webcam image and what you're going to do with it. A useful information
is whether you're only going to use the image on the client side or if
you're going to upload it to some server.</p><p>To protect their identity and their data host your app on HTTPS servers.
JpegCamera does not enforce this, but some browsers promise to do so in the
future. Google Chrome already forbids HTTP websites from accessing camera
through <code>getUserMedia</code> in their Canary release channel.
<a href="https://sites.google.com/a/chromium.org/dev/Home/chromium-security/deprecating-powerful-features-on-insecure-origins">Read more</a>.</p><h2 id="caveats">Caveats</h2><p>To use Flash fallback your camera container must be at least 215 pixels wide and
138 pixels tall. This is the minimum to display privacy settings dialog.</p><p>With Flash in some browsers it's impossible to read response body for requests
that finish with status codes from outside the 2XX range (like 404 Not Found or
422 Unprocessable Entity). If you're using version of JpegCamera with Flash
fallback your application should not rely on reading body of these responses.
The status code number is always available.</p><p>Current stable versions of Firefox and Opera support getUserMedia, but do not
support Web Audio API. I have decided against loading a Flash object in
these browsers so JpegCamera will be silent.</p><h2 id="contributing">Contributing</h2><p>The source code is available on <a href="https://github.com/amw/jpeg_camera">Github</a>.
Please send pull requests on topic branches.</p><p>To build dist files from source you need <code>npm</code> — Node Package Manager.</p><pre><code>npm install # install required dependencies
npm install -g grunt-cli # install grunt command
grunt dist # build js & swf files
grunt js # only builds js files
grunt swf # only builds swf file
grunt doc # update documentation
grunt # build dist files and update documentation
</code></pre><p>To build swf file you need to have <code>mxmlc</code> available in your <code>$PATH</code>. It comes
in the <a href="http://www.adobe.com/devnet/flex/flex-sdk-download.html">Flex SDK</a>.</p><h2 id="acknowledgements">Acknowledgements</h2><p>Thanks to <NAME> for creating and <NAME> for maintaining
Flash-based <a href="http://code.google.com/p/jpegcam/">JpegCam library</a> which I have
been using until HTML5 became a viable solution. If you're interested here's
<a href="https://github.com/mattclements/jpegcam">Matt's repo</a> and here's
<a href="https://github.com/amw/jpegcam">mine</a>. Thanks to everyone else contributing to
that project.</p><p>Copyright <a href="http://adamwrobel.com"><NAME></a>, released under the MIT License.</p>
</div>
</div>
<div id='footer'>
June 06, 17 22:09:31 by
<a href='https://github.com/coffeedoc/codo' title='CoffeeScript API documentation generator'>
Codo
</a>
2.0.11
✲
Press H to see the keyboard shortcuts
✲
<a href='http://twitter.com/netzpirat' target='_parent'>@netzpirat</a>
✲
<a href='http://twitter.com/_inossidabile' target='_parent'>@_inossidabile</a>
</div>
<iframe id='search_frame'></iframe>
<div id='fuzzySearch'>
<input type='text'>
<ol></ol>
</div>
<div id='help'>
<p>
Quickly fuzzy find classes, mixins, methods, file:
</p>
<ul>
<li>
<span>T</span>
Open fuzzy finder dialog
</li>
</ul>
<p>
Control the navigation frame:
</p>
<ul>
<li>
<span>L</span>
Toggle list view
</li>
<li>
<span>C</span>
Show class list
</li>
<li>
<span>I</span>
Show mixin list
</li>
<li>
<span>F</span>
Show file list
</li>
<li>
<span>M</span>
Show method list
</li>
<li>
<span>E</span>
Show extras list
</li>
</ul>
<p>
You can focus and blur the search input:
</p>
<ul>
<li>
<span>S</span>
Focus search input
</li>
<li>
<span>Esc</span>
Blur search input
</li>
</ul>
</div>
</body>
</html>
<|start_filename|>doc/class/Snapshot.html<|end_filename|>
<!DOCTYPE html>
<html>
<head>
<meta charset='UTF-8'>
<title>JpegCamera Documentation</title>
<script src='../javascript/application.js'></script>
<script src='../javascript/search.js'></script>
<link rel='stylesheet' href='../stylesheets/application.css' type='text/css'>
</head>
<body>
<div id='base' data-path='../'></div>
<div id='header'>
<div id='menu'>
<a href='../extra/README.md.html' title='JpegCamera'>
JpegCamera
</a>
»
<a href='../alphabetical_index.html' title='Index'>
Index
</a>
»
<span class='title'>Snapshot</span>
</div>
</div>
<div id='content'>
<h1>
Class:
Snapshot
</h1>
<table class='box'>
<tr>
<td>Defined in:</td>
<td>src/snapshot.coffee</td>
</tr>
</table>
<h2>Overview</h2>
<div class='docstring'>
<p>Snapshot taken using <a href='../class/JpegCamera.html'>JpegCamera</a>.</p>
</div>
<div class='tags'>
</div>
<h2>Instance Method Summary</h2>
<ul class='summary'>
<li>
<span class='signature'>
<a href='#show-dynamic'>
#
(Snapshot)
<b>show</b><span>()</span>
</a>
</span>
<span class='desc'>
Display the snapshot with the camera element it was taken with.
</span>
</li>
<li>
<span class='signature'>
<a href='#hide-dynamic'>
#
(Snapshot)
<b>hide</b><span>()</span>
</a>
</span>
<span class='desc'>
Stop displaying the snapshot and return to showing live camera stream.
</span>
</li>
<li>
<span class='signature'>
<a href='#get_stats-dynamic'>
#
(void)
<b>get_stats</b><span>(callback)</span>
</a>
</span>
<span class='desc'>
Calculate snapshot pixel statistics (mean gray value, std).
</span>
</li>
<li>
<span class='signature'>
<a href='#get_canvas-dynamic'>
#
(Boolean)
<b>get_canvas</b><span>(callback)</span>
</a>
</span>
<span class='desc'>
Get canvas element showing the snapshot.
</span>
</li>
<li>
<span class='signature'>
<a href='#get_blob-dynamic'>
#
(Boolean)
<b>get_blob</b><span>(callback, mime_type = "image/jpeg")</span>
</a>
</span>
<span class='desc'>
Get the file that would be uploaded to the server as a Blob object.
</span>
</li>
<li>
<span class='signature'>
<a href='#get_image_data-dynamic'>
#
(void)
<b>get_image_data</b><span>(callback)</span>
</a>
</span>
<span class='desc'>
Get ImageData object containing color values for each pixel of the snapshot.
</span>
</li>
<li>
<span class='signature'>
<a href='#upload-dynamic'>
#
(Snapshot)
<b>upload</b><span>(options = {})</span>
</a>
</span>
<span class='desc'>
Upload the snapshot to the server.
</span>
</li>
<li>
<span class='signature'>
<a href='#done-dynamic'>
#
(Snapshot)
<b>done</b><span>(callback)</span>
</a>
</span>
<span class='desc'>
Bind callback for upload complete event.
</span>
</li>
<li>
<span class='signature'>
<a href='#fail-dynamic'>
#
(Snapshot)
<b>fail</b><span>(callback)</span>
</a>
</span>
<span class='desc'>
Bind callback for upload error event.
</span>
</li>
<li>
<span class='signature'>
<a href='#discard-dynamic'>
#
(void)
<b>discard</b><span>()</span>
</a>
</span>
<span class='desc'>
Hide and discard this snapshot.
</span>
</li>
</ul>
<h2>Instance Method Details</h2>
<div class='methods'>
<div class='method_details'>
<p class='signature' id='show-dynamic'>
#
(Snapshot)
<b>show</b><span>()</span>
<br>
</p>
<div class='docstring'>
<p>Display the snapshot with the camera element it was taken with.</p>
</div>
<div class='tags'>
<h3>Returns:</h3>
<ul class='return'>
<li>
<span class='type'></span>
(
<tt><a href='../class/Snapshot.html'>Snapshot</a></tt>
)
—
<span class='desc'>Self for chaining. </span>
</li>
</ul>
</div>
</div>
<div class='method_details'>
<p class='signature' id='hide-dynamic'>
#
(Snapshot)
<b>hide</b><span>()</span>
<br>
</p>
<div class='docstring'>
<p>Stop displaying the snapshot and return to showing live camera stream.</p><p>Ignored if camera is displaying different snapshot.</p>
</div>
<div class='tags'>
<h3>Returns:</h3>
<ul class='return'>
<li>
<span class='type'></span>
(
<tt><a href='../class/Snapshot.html'>Snapshot</a></tt>
)
—
<span class='desc'>Self for chaining. </span>
</li>
</ul>
</div>
</div>
<div class='method_details'>
<p class='signature' id='get_stats-dynamic'>
#
(void)
<b>get_stats</b><span>(callback)</span>
<br>
</p>
<div class='docstring'>
<p>Calculate snapshot pixel statistics (mean gray value, std).</p><p>Because reading image data can take a while when Flash fallback is being
used this method does not return the data immediately. Instead it accepts
a callback that later will be called with a <a href='../class/Stats.html'>Stats</a> object as an argument.
Snapshot will be available as <code>this</code>.</p>
</div>
<div class='tags'>
<h3>Parameters:</h3>
<ul class='param'>
<li>
<span class='name'>callback</span>
<span class='type'>
(
<tt>Function</tt>
)
</span>
—
<span class='desc'>Function to call when data is available. Snapshot object will be available as <code>this</code>, the <a href='../class/Stats.html'>Stats</a> instance will be passed as the first argument. </span>
</li>
</ul>
<h3>Returns:</h3>
<ul class='return'>
<li>
<span class='type'></span>
<tt>void</tt>
</li>
</ul>
</div>
</div>
<div class='method_details'>
<p class='signature' id='get_canvas-dynamic'>
#
(Boolean)
<b>get_canvas</b><span>(callback)</span>
<br>
</p>
<div class='docstring'>
<p>Get canvas element showing the snapshot.</p><p>This can be used to display the snapshot outside the camera's container.
You can show multiple snapshots at a time and allow the user to pick one
he likes best.</p><p>Canvas produced by this method has a resolution of the snapshot (which
depends on the camera's native resolution), not that of the camera's
container. Use CSS to display this canvas in different sizes.</p><p>Because reading image data can take a while when Flash fallback is being
used this method does not return the <code>canvas</code> element immediately. Instead
it accepts a callback that later will be called with the <code>canvas</code> element as
an argument. Snapshot will be available as <code>this</code>.</p><p>Multiple calls to this method will yield the same canvas element.</p><p>One caveat is that the underlaying data of this canvas is not mirrored like
the stream shown in the camera container. Special CSS transform directive
is applied on it so that it looks like the picture in the camera when
displayed. This only matters when manipulating the canvas or reading it's
data. You can read more about mirroring in <a href='../class/JpegCamera.html#capture-dynamic'>JpegCamera#capture</a>.</p><p>This method doesn't work in Internet Explorer 8 or earlier, because it does
not support <code>canvas</code> element. Call <a href='../class/JpegCamera.html#canvas_supported-static'>JpegCamera.canvas_supported</a> to learn
whether you can use this method.</p>
</div>
<div class='tags'>
<h3>Parameters:</h3>
<ul class='param'>
<li>
<span class='name'>callback</span>
<span class='type'>
(
<tt>Function</tt>
)
</span>
—
<span class='desc'>Function to call when <code>canvas</code> element is available. Snapshot object will be available as <code>this</code>, the <code>canvas</code> element will be passed as the first argument. </span>
</li>
</ul>
<h3>Returns:</h3>
<ul class='return'>
<li>
<span class='type'></span>
(
<tt>Boolean</tt>
)
—
<span class='desc'>Whether canvas is supported in this browser. </span>
</li>
</ul>
</div>
</div>
<div class='method_details'>
<p class='signature' id='get_blob-dynamic'>
#
(Boolean)
<b>get_blob</b><span>(callback, mime_type = "image/jpeg")</span>
<br>
</p>
<div class='docstring'>
<p>Get the file that would be uploaded to the server as a Blob object.</p><p>This can be useful if you want to stream the data via a websocket. Note that
using <code>upload</code> is more efficient if all you want to do is upload this file
to a server via POST call.</p><p>This method doesn't work in Internet Explorer 8 or earlier, because it does
not support <code>canvas</code> element. Call <a href='../class/JpegCamera.html#canvas_supported-static'>JpegCamera.canvas_supported</a> to learn
whether you can use this method.</p><p>Because preparing image blob can take a while this method does not return
the data immediately. Instead it accepts a callback that later will be
called with the data object as an argument. Snapshot will be available as
<code>this</code>.</p><p>Multiple calls to this method will yield the same data object.</p>
</div>
<div class='tags'>
<h3>Parameters:</h3>
<ul class='param'>
<li>
<span class='name'>callback</span>
<span class='type'>
(
<tt>Function</tt>
)
</span>
—
<span class='desc'>Function to call when data is available. Snapshot object will be available as <code>this</code>, the blob object will be passed as the first argument. </span>
</li>
<li>
<span class='name'>mime_type</span>
<span class='type'>
(
<tt>String</tt>
)
</span>
—
<span class='desc'>Mime type of the requested blob. "image/jpeg" by default. </span>
</li>
</ul>
<h3>Returns:</h3>
<ul class='return'>
<li>
<span class='type'></span>
(
<tt>Boolean</tt>
)
—
<span class='desc'>Whether canvas is supported in this browser. </span>
</li>
</ul>
</div>
</div>
<div class='method_details'>
<p class='signature' id='get_image_data-dynamic'>
#
(void)
<b>get_image_data</b><span>(callback)</span>
<br>
</p>
<div class='docstring'>
<p>Get ImageData object containing color values for each pixel of the snapshot.</p><p>Data produced by this method has a resolution of the snapshot (which depends
on the camera's native resolution), not that of the camera's container.</p><p>Read more about ImageData object on <a href="https://developer.mozilla.org/en-US/docs/Web/API/ImageData">Mozilla's website
</a>.</p><p>Because reading image data can take a while when Flash fallback is being
used this method does not return the data immediately. Instead it accepts
a callback that later will be called with the data object as an argument.
Snapshot will be available as <code>this</code>.</p><p>Multiple calls to this method will yield the same data object.</p><p>One caveat is that the returned data is not mirrored like the stream shown
in the camera container. This only matters when manipulating the canvas or
reading it's data. You can read more about mirroring in
<a href='../class/JpegCamera.html#capture-dynamic'>JpegCamera#capture</a>.</p><p>This method returns native <a href="https://developer.mozilla.org/en-US/docs/Web/API/ImageData">ImageData
</a> object in all
browsers except Internet Explorer 8 or earlier which does not support
the <code>canvas</code> element. In that browser a generic JavaScript object will be
returned that mimics the native format. Call <a href='../class/JpegCamera.html#canvas_supported-static'>JpegCamera.canvas_supported</a>
to learn whether <code>canvas</code> is supported by the browser.</p>
</div>
<div class='tags'>
<h3>Parameters:</h3>
<ul class='param'>
<li>
<span class='name'>callback</span>
<span class='type'>
(
<tt>Function</tt>
)
</span>
—
<span class='desc'>Function to call when data is available. Snapshot object will be available as <code>this</code>, the data will be passed as the first argument. </span>
</li>
</ul>
<h3>Returns:</h3>
<ul class='return'>
<li>
<span class='type'></span>
<tt>void</tt>
</li>
</ul>
</div>
</div>
<div class='method_details'>
<p class='signature' id='upload-dynamic'>
#
(Snapshot)
<b>upload</b><span>(options = {})</span>
<br>
</p>
<div class='docstring'>
<p>Upload the snapshot to the server.</p><p>The snapshot is uploaded using a POST request with JPEG file sent as RAW
data. This not like a multipart form upload using file element where the
file is given a name and is encoded along with other form keys. To read
file contents on the server side use <code>request.raw_post</code> in Ruby on Rails or
<code>$HTTP_RAW_POST_DATA</code> in PHP.</p><p>Upload completes successfully only if the server responds with status code</p><ol>
<li>Any other code will be handled via on_upload_fail callback. Your
application is free to inspect the status code and response text in that
handler to decide whether that response is acceptable or not.</li>
</ol><p>You cannot have multiple uploads for one snapshot running at the same time,
but you are free to start another upload after one succeeds or fails.</p><p>All of the options can have their defaults set when constructing camera
object or calling <a href='../class/JpegCamera.html#capture-dynamic'>JpegCamera#capture</a>.</p>
</div>
<div class='tags'>
<h3>
Options Hash:
(options):
</h3>
<ul class='options'>
<li>
<span class='name'>api_url</span>
<span class='type'>
(
<tt>String</tt>
)
</span>
—
<span class='desc'>URL where the snapshots will be uploaded. </span>
</li>
<li>
<span class='name'>csrf_token</span>
<span class='type'>
(
<tt>String</tt>
)
</span>
—
<span class='desc'>CSRF token to be sent in the <strong>X-CSRF-Token</strong> header during upload. </span>
</li>
<li>
<span class='name'>timeout</span>
<span class='type'>
(
<tt>Integer</tt>
)
</span>
—
<span class='desc'><strong>IGNORED</strong> (<strong>NOT</strong> <strong>IMPLEMENTED</strong>) The number of milliseconds a request can take before automatically being terminated. Default of 0 means there is no timeout. </span>
</li>
<li>
<span class='name'>on_upload_done</span>
<span class='type'>
(
<tt>Function</tt>
)
</span>
—
<span class='desc'>Function to call when upload completes. Snapshot object will be available as <code>this</code>, response body will be passed as the first argument. Calling <a href='../class/Snapshot.html#done-dynamic'>done</a> before the upload exits will change the handler for this upload. </span>
</li>
<li>
<span class='name'>on_upload_fail</span>
<span class='type'>
(
<tt>Function</tt>
)
</span>
—
<span class='desc'>Function to call when upload fails. Snapshot object will be available as <code>this</code>, response code will be passed as the first argument followed by error message and response body. Calling <a href='../class/Snapshot.html#fail-dynamic'>fail</a> before the upload exits will change the handler for this upload. </span>
</li>
<li>
<span class='name'>retry_if</span>
<span class='type'>
(
<tt>Function</tt>
)
</span>
—
<span class='desc'>Function to be called before any upload done/fail callbacks to decide if the upload should be retried. By default it's null and uploads are never retried. Inside the function snapshot object will be available as <code>this</code> and the arguments will be: <code>status_code</code>, <code>error_message</code>, <code>response</code>, <code>retry</code>. <code>retry</code> is a number incremented for each retry and starting with 1 when the upload finishes for the first time. If the function returns <code>true</code> or <code>0</code> then upload will be retried immediately. Number greater than <code>0</code> will delay the retry by that many milliseconds. Any other value will be treated as a decision not to retry the upload and one of the <code>on_upload_done</code> or <code>on_upload_fail</code> callbacks will be fired instead. </span>
</li>
<li>
<span class='name'>retry_success</span>
<span class='type'>
(
<tt>Boolean</tt>
)
</span>
—
<span class='desc'>By default <code>retry_if</code> is not called for uploads that finish with a status code from the 2XX range. Set this to <code>true</code> if you want to retry some of these responses. This can be useful if you're experiencing some network oddities. </span>
</li>
</ul>
<h3>Returns:</h3>
<ul class='return'>
<li>
<span class='type'></span>
(
<tt><a href='../class/Snapshot.html'>Snapshot</a></tt>
)
—
<span class='desc'>Self for chaining. </span>
</li>
</ul>
</div>
</div>
<div class='method_details'>
<p class='signature' id='done-dynamic'>
#
(Snapshot)
<b>done</b><span>(callback)</span>
<br>
</p>
<div class='docstring'>
<p>Bind callback for upload complete event.</p><p>The callback to fire when the previously requested <a href='../class/Snapshot.html#upload-dynamic'>upload</a>
operation succeeds. This is just a syntactic sugar that allows one to write:
<code>snapshot.upload().done(done_callback)</code> instead of
<code>snapshot.upload(on_upload_done: done_callback)</code>. This callback will be
forgotten after the next call to <a href='../class/Snapshot.html#upload-dynamic'>upload</a>.</p><p>If the event has already happened the argument will be called immediately.</p>
</div>
<div class='tags'>
<h3>Parameters:</h3>
<ul class='param'>
<li>
<span class='name'>callback</span>
<span class='type'>
(
<tt>Function</tt>
)
</span>
—
<span class='desc'>Function to call when upload completes. Snapshot object will be available as <code>this</code>, response body will be passed as the first argument. </span>
</li>
</ul>
<h3>Returns:</h3>
<ul class='return'>
<li>
<span class='type'></span>
(
<tt><a href='../class/Snapshot.html'>Snapshot</a></tt>
)
—
<span class='desc'>Self for chaining. </span>
</li>
</ul>
</div>
</div>
<div class='method_details'>
<p class='signature' id='fail-dynamic'>
#
(Snapshot)
<b>fail</b><span>(callback)</span>
<br>
</p>
<div class='docstring'>
<p>Bind callback for upload error event.</p><p>The callback to fire when the previously requested <a href='../class/Snapshot.html#upload-dynamic'>upload</a>
operation fails. This is just a syntactic sugar that allows one to write:
<code>snapshot.upload().fail(fail_callback)</code> instead of
<code>snapshot.upload(on_upload_fail: fail_callback)</code>. This callback will be
forgotten after the next call to <a href='../class/Snapshot.html#upload-dynamic'>upload</a>.</p><p>If the event has already happened the argument will be called immediately.</p>
</div>
<div class='tags'>
<h3>Parameters:</h3>
<ul class='param'>
<li>
<span class='name'>callback</span>
<span class='type'>
(
<tt>Function</tt>
)
</span>
—
<span class='desc'>Function to call when upload fails. Snapshot object will be available as <code>this</code>, response code will be passed as the first argument with response body or error message as the second argument if available. </span>
</li>
</ul>
<h3>Returns:</h3>
<ul class='return'>
<li>
<span class='type'></span>
(
<tt><a href='../class/Snapshot.html'>Snapshot</a></tt>
)
—
<span class='desc'>Self for chaining. </span>
</li>
</ul>
</div>
</div>
<div class='method_details'>
<p class='signature' id='discard-dynamic'>
#
(void)
<b>discard</b><span>()</span>
<br>
</p>
<div class='docstring'>
<p>Hide and discard this snapshot.</p><p>After discarding a snapshot an attempt to show or upload it will raise
an error.</p>
</div>
<div class='tags'>
<h3>Returns:</h3>
<ul class='return'>
<li>
<span class='type'></span>
<tt>void</tt>
</li>
</ul>
</div>
</div>
</div>
</div>
<div id='footer'>
June 06, 17 22:09:31 by
<a href='https://github.com/coffeedoc/codo' title='CoffeeScript API documentation generator'>
Codo
</a>
2.0.11
✲
Press H to see the keyboard shortcuts
✲
<a href='http://twitter.com/netzpirat' target='_parent'>@netzpirat</a>
✲
<a href='http://twitter.com/_inossidabile' target='_parent'>@_inossidabile</a>
</div>
<iframe id='search_frame'></iframe>
<div id='fuzzySearch'>
<input type='text'>
<ol></ol>
</div>
<div id='help'>
<p>
Quickly fuzzy find classes, mixins, methods, file:
</p>
<ul>
<li>
<span>T</span>
Open fuzzy finder dialog
</li>
</ul>
<p>
Control the navigation frame:
</p>
<ul>
<li>
<span>L</span>
Toggle list view
</li>
<li>
<span>C</span>
Show class list
</li>
<li>
<span>I</span>
Show mixin list
</li>
<li>
<span>F</span>
Show file list
</li>
<li>
<span>M</span>
Show method list
</li>
<li>
<span>E</span>
Show extras list
</li>
</ul>
<p>
You can focus and blur the search input:
</p>
<ul>
<li>
<span>S</span>
Focus search input
</li>
<li>
<span>Esc</span>
Blur search input
</li>
</ul>
</div>
</body>
</html>
<|start_filename|>doc/method_list.html<|end_filename|>
<!DOCTYPE html>
<html>
<head>
<meta charset='UTF-8'>
<title>JpegCamera Documentation</title>
<script src='javascript/application.js'></script>
<script src='javascript/search.js'></script>
<link rel='stylesheet' href='stylesheets/application.css' type='text/css'>
</head>
<body class='list'>
<div class='list' id='content'>
<h1 class='full_list_header'>Method List</h1>
<nav>
<a target='_self' href='class_list.html'>
Classes
</a>
<a target='_self' href='file_list.html'>
Files
</a>
<a target='_self' href='method_list.html'>
Methods
</a>
<a target='_self' href='extra_list.html'>
Extras
</a>
</nav>
<div id='search'>
Search:
<input type='text'>
</div>
<ul>
<li>
<a href='class/JpegCamera.html#canvas_supported-static' target='main' title='canvas_supported'>
.canvas_supported
</a>
<small>
(JpegCamera)
</small>
</li>
<li>
<a href='class/JpegCamera.html#capture-dynamic' target='main' title='capture'>
#capture
</a>
<small>
(JpegCamera)
</small>
</li>
<li>
<a href='class/JpegCamera.html#constructor-dynamic' target='main' title='constructor'>
#constructor
</a>
<small>
(JpegCamera)
</small>
</li>
<li>
<a href='class/Snapshot.html#discard-dynamic' target='main' title='discard'>
#discard
</a>
<small>
(Snapshot)
</small>
</li>
<li>
<a href='class/JpegCamera.html#discard_all-dynamic' target='main' title='discard_all'>
#discard_all
</a>
<small>
(JpegCamera)
</small>
</li>
<li>
<a href='class/Snapshot.html#done-dynamic' target='main' title='done'>
#done
</a>
<small>
(Snapshot)
</small>
</li>
<li>
<a href='class/JpegCamera.html#error-dynamic' target='main' title='error'>
#error
</a>
<small>
(JpegCamera)
</small>
</li>
<li>
<a href='class/Snapshot.html#fail-dynamic' target='main' title='fail'>
#fail
</a>
<small>
(Snapshot)
</small>
</li>
<li>
<a href='class/Snapshot.html#get_blob-dynamic' target='main' title='get_blob'>
#get_blob
</a>
<small>
(Snapshot)
</small>
</li>
<li>
<a href='class/Snapshot.html#get_canvas-dynamic' target='main' title='get_canvas'>
#get_canvas
</a>
<small>
(Snapshot)
</small>
</li>
<li>
<a href='class/Snapshot.html#get_image_data-dynamic' target='main' title='get_image_data'>
#get_image_data
</a>
<small>
(Snapshot)
</small>
</li>
<li>
<a href='class/JpegCamera.html#get_stats-dynamic' target='main' title='get_stats'>
#get_stats
</a>
<small>
(JpegCamera)
</small>
</li>
<li>
<a href='class/Snapshot.html#get_stats-dynamic' target='main' title='get_stats'>
#get_stats
</a>
<small>
(Snapshot)
</small>
</li>
<li>
<a href='class/Snapshot.html#hide-dynamic' target='main' title='hide'>
#hide
</a>
<small>
(Snapshot)
</small>
</li>
<li>
<a href='class/JpegCamera.html#ready-dynamic' target='main' title='ready'>
#ready
</a>
<small>
(JpegCamera)
</small>
</li>
<li>
<a href='class/Snapshot.html#show-dynamic' target='main' title='show'>
#show
</a>
<small>
(Snapshot)
</small>
</li>
<li>
<a href='class/JpegCamera.html#show_stream-dynamic' target='main' title='show_stream'>
#show_stream
</a>
<small>
(JpegCamera)
</small>
</li>
<li>
<a href='class/Snapshot.html#upload-dynamic' target='main' title='upload'>
#upload
</a>
<small>
(Snapshot)
</small>
</li>
</ul>
</div>
</body>
</html>
<|start_filename|>src/stats.coffee<|end_filename|>
# Contains some pixel statistics of {Snapshot} or camera stream.
#
# Can be retrieved using {JpegCamera#get_stats} or {Snapshot#get_stats} methods.
class Stats
# @property [Float] mean gray value of pixels (0-255)
mean: null
# @property [Float] standard deviation of gray values
std: null
<|start_filename|>doc/index.html<|end_filename|>
<html>
<head>
<title>JpegCamera Documentation</title>
<meta http-equiv="refresh"
content="0;URL=https://amw.github.io/jpeg_camera/doc/extra/README.md.html">
</head>
<body>
Please visit
<a href="https://amw.github.io/jpeg_camera/doc/extra/README.md.html">
https://amw.github.io/jpeg_camera/doc/extra/README.md.html
</a> for JpegCamera documentation.
</body>
</html>
<|start_filename|>src/jpeg_camera_html5.coffee<|end_filename|>
navigator.getUserMedia ||=
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia
window.AudioContext ||=
window.webkitAudioContext
# @private
check_canvas_to_blob = ->
canvas = document.createElement "canvas"
if canvas.getContext && !canvas.toBlob
throw "JpegCamera: Canvas-to-Blob is not loaded"
if navigator.getUserMedia
check_canvas_to_blob()
vorbis_audio = "audio/ogg; codecs=vorbis"
mpeg_audio = "audio/mpeg; "
# @private
can_play = (type) ->
elem = document.createElement "video"
!!(elem.canPlayType && elem.canPlayType(type).replace(/no/, ''))
# JpegCamera implementation that uses _getUserMedia_ to capture snapshots,
# _canvas_element_ to display them, _XHR_ to upload them to the server and
# optionally _Web_Audio_API_ to play shutter sound.
#
# @private
class JpegCameraHtml5 extends JpegCamera
_engine_init: ->
@_debug "Using HTML5 engine"
vertical_padding = Math.floor @view_height * 0.2
horizontal_padding = Math.floor @view_width * 0.2
@message = document.createElement "div"
@message.class = "message"
@message.style.width = "100%"
@message.style.height = "100%"
JpegCamera._add_prefixed_style @message, "boxSizing", "border-box"
@message.style.overflow = "hidden"
@message.style.textAlign = "center"
@message.style.paddingTop = "#{vertical_padding}px"
@message.style.paddingBottom = "#{vertical_padding}px"
@message.style.paddingLeft = "#{horizontal_padding}px"
@message.style.paddingRight = "#{horizontal_padding}px"
@message.style.position = "absolute"
@message.style.zIndex = 3
@message.innerHTML =
"Please allow camera access when prompted by the browser.<br><br>" +
"Look for camera icon around your address bar."
@container.appendChild @message
@video_container = document.createElement "div"
@video_container.style.width = "#{@view_width}px"
@video_container.style.height = "#{@view_height}px"
@video_container.style.overflow = "hidden"
@video_container.style.position = "absolute"
@video_container.style.zIndex = 1
@container.appendChild @video_container
@video = document.createElement 'video'
@video.autoplay = true
JpegCamera._add_prefixed_style @video, "transform", "scalex(-1.0)"
if window.AudioContext
if can_play vorbis_audio
@_load_shutter_sound @options.shutter_ogg_url
else if can_play mpeg_audio
@_load_shutter_sound @options.shutter_mp3_url
get_user_media_options =
video:
optional: [
{minWidth: 1280},
{minWidth: 640},
{minWidth: 480},
{minWidth: 360}
]
that = this
success =
(stream) ->
that._remove_message()
if window.URL
that.video.src = URL.createObjectURL stream
else
that.video.src = stream
that._block_element_access()
that._wait_for_video_ready()
failure =
# XXX Receives NavigatorUserMediaError object and searches for
# constant name matching error.code. With the current specification
# version this will always evaluate to
# `that._got_error("PERMISSION_DENIED")`.
(error) ->
that.message.innerHTML =
"<span style=\"color: red;\">" +
"You have denied camera access." +
"</span><br><br>" +
"Look for camera icon around your address bar to change your " +
"decision."
code = error.code
for key, value of error
continue if key == "code"
that._got_error key
return
that._got_error "UNKNOWN ERROR"
# XXX In an older spec first parameter was a string
try
navigator.getUserMedia get_user_media_options, success, failure
catch error
navigator.getUserMedia "video", success, failure
_engine_play_shutter_sound: ->
return unless @shutter_buffer
source = @audio_context.createBufferSource()
source.buffer = @shutter_buffer
source.connect @audio_context.destination
source.start 0
_engine_capture: (snapshot, mirror, quality, scale) ->
crop = @_get_capture_crop()
canvas = document.createElement "canvas"
canvas.width = Math.round crop.width * scale
canvas.height = Math.round crop.height * scale
context = canvas.getContext "2d"
context.drawImage @video,
crop.x_offset, crop.y_offset,
crop.width, crop.height,
0, 0,
Math.round(crop.width * scale), Math.round(crop.height * scale)
snapshot._canvas = canvas
snapshot._mirror = mirror
snapshot._quality = quality
_engine_display: (snapshot) ->
if @displayed_canvas
@container.removeChild @displayed_canvas
@displayed_canvas = snapshot._canvas
@displayed_canvas.style.width = "#{@view_width}px"
@displayed_canvas.style.height = "#{@view_height}px"
@displayed_canvas.style.top = 0
@displayed_canvas.style.left = 0
@displayed_canvas.style.position = "absolute"
@displayed_canvas.style.zIndex = 2
JpegCamera._add_prefixed_style @displayed_canvas,
"transform", "scalex(-1.0)"
@container.appendChild @displayed_canvas
_engine_get_canvas: (snapshot) ->
canvas = document.createElement "canvas"
canvas.width = snapshot._canvas.width
canvas.height = snapshot._canvas.height
context = canvas.getContext "2d"
context.drawImage snapshot._canvas, 0, 0
canvas
_engine_get_image_data: (snapshot) ->
canvas = snapshot._canvas
context = canvas.getContext "2d"
context.getImageData 0, 0, canvas.width, canvas.height
_engine_get_blob: (snapshot, mime, mirror, quality, callback) ->
if mirror
canvas = document.createElement "canvas"
canvas.width = snapshot._canvas.width
canvas.height = snapshot._canvas.height
context = canvas.getContext "2d"
context.setTransform 1, 0, 0, 1, 0, 0 # reset transformation matrix
context.translate canvas.width, 0
context.scale -1, 1
context.drawImage snapshot._canvas, 0, 0
else
canvas = snapshot._canvas
canvas.toBlob ((blob) -> callback blob), mime, quality
_engine_discard: (snapshot) ->
if snapshot._xhr
snapshot._xhr.abort()
delete snapshot._xhr
delete snapshot._canvas
_engine_show_stream: ->
if @displayed_canvas
@container.removeChild @displayed_canvas
@displayed_canvas = null
@video_container.style.display = "block"
_engine_upload: (snapshot, api_url, csrf_token, timeout) ->
@_debug "Uploading the file"
snapshot.get_blob (blob) ->
handler = (event) ->
delete snapshot._xhr
snapshot._status = event.target.status
snapshot._response = event.target.responseText
if snapshot._status >= 200 && snapshot._status < 300
snapshot._upload_done()
else
snapshot._error_message =
event.target.statusText || "Unknown error"
snapshot._upload_fail()
xhr = new XMLHttpRequest()
xhr.open 'POST', api_url
xhr.timeout = timeout
xhr.setRequestHeader "X-CSRF-Token", csrf_token if csrf_token
xhr.onload = handler
xhr.onerror = handler
xhr.onabort = handler
xhr.send blob
snapshot._xhr = xhr
, "image/jpeg"
_remove_message: ->
@message.style.display = "none"
_load_shutter_sound: (url) ->
return if @audio_context
@audio_context = new AudioContext()
request = new XMLHttpRequest()
request.open 'GET', url, true
request.responseType = 'arraybuffer'
that = this
request.onload = ->
that.audio_context.decodeAudioData request.response, (buffer) ->
that.shutter_buffer = buffer
request.send()
_wait_for_video_ready: ->
video_width = parseInt @video.videoWidth
video_height = parseInt @video.videoHeight
if video_width > 0 && video_height > 0
@video_container.appendChild @video
@video_width = video_width
@video_height = video_height
crop = @_get_video_crop()
@video.style.position = "relative"
@video.style.width = "#{crop.width}px"
@video.style.height = "#{crop.height}px"
@video.style.left = "#{crop.x_offset}px"
@video.style.top = "#{crop.y_offset}px"
@_prepared(@video_width, @video_height)
else if @_status_checks_count > 100
@_got_error "Camera failed to initialize in 10 seconds"
else
@_status_checks_count++
that = this
setTimeout (-> that._wait_for_video_ready()), 100
_status_checks_count: 0
_get_video_crop: ->
video_ratio = @video_width / @video_height
view_ratio = @view_width / @view_height
if video_ratio >= view_ratio
# fill height, crop width
@_debug "Filling height"
video_scale = @view_height / @video_height
scaled_video_width = Math.round @video_width * video_scale
width: scaled_video_width
height: @view_height
x_offset: -Math.floor((scaled_video_width - @view_width) / 2.0)
y_offset: 0
else
# fill width, crop height
@_debug "Filling width"
video_scale = @view_width / @video_width
scaled_video_height = Math.round @video_height * video_scale
width: @view_width
height: scaled_video_height
x_offset: 0
y_offset: -Math.floor((scaled_video_height - @view_height) / 2.0)
_get_capture_crop: ->
video_ratio = @video_width / @video_height
view_ratio = @view_width / @view_height
if video_ratio >= view_ratio
# take full height, crop width
snapshot_width = Math.round @video_height * view_ratio
width: snapshot_width
height: @video_height
x_offset: Math.floor((@video_width - snapshot_width) / 2.0)
y_offset: 0
else
# take full width, crop height
snapshot_height = Math.round @video_width / view_ratio
width: @video_width
height: snapshot_height
x_offset: 0
y_offset: Math.floor((@video_height - snapshot_height) / 2.0)
video_width: null
video_height: null
window.JpegCamera = JpegCameraHtml5
| otnorli/jpeg_camera |
<|start_filename|>src/main/java/com/pesegato/MonkeySheet/actions/MSActionControl.java<|end_filename|>
package com.pesegato.MonkeySheet.actions;
import com.jme3.renderer.RenderManager;
import com.jme3.renderer.ViewPort;
import com.jme3.scene.control.AbstractControl;
import com.pesegato.MonkeySheet.MSAnimationManager;
import com.pesegato.MonkeySheet.MTween;
import com.pesegato.MonkeySheet.MonkeySheetAppState;
import com.pesegato.timing.Timeable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public abstract class MSActionControl extends AbstractControl {
static Logger log = LoggerFactory.getLogger(MSActionControl.class);
public MTween anim;
public MTween nextAnim;
public String animation;
public int position;
boolean runOnce = false;
public MSAction msAction;
public MSActionControl(String anim, Timeable timeable) {
playForever(anim);
MonkeySheetAppState.timeable = timeable;
}
final public void playForever(String ani) {
log.debug("now playing FOREVER animation {}", ani);
anim = MonkeySheetAppState.getAnim(ani);
position = 0;
runOnce = false;
if (anim == null) {
log.warn("Running UNINITIALIZED animation {}, GOING TO CRASH VERY SOON!!!", ani);
}
}
public void playOnce(String ani) {
log.debug("now playing ONCE animation {}", ani);
anim = MonkeySheetAppState.getAnim(ani);
position = 0;
//nextAnim = MonkeySheetAppState.getAnim(ani);
runOnce = true;
if (anim == null) {
log.warn("Running UNINITIALIZED animation {}, GOING TO CRASH VERY SOON!!!" + ani);
}
}
/**
* Finite state machine for MSAction
*
* @return the msAction for the current state
*/
abstract MSAction updateActionState();
@Override
protected void controlUpdate(float tpf) {
if (MonkeySheetAppState.tTPF == 0) {
log.trace("msac position: {} - {}", position, anim.anim[position].getPosition());
position++;
if (position >= anim.anim.length - 1) {
if (runOnce) {
/*
if (nextAnim != null) {
anim = nextAnim;
position = 0;
nextAnim = null;
}
*/
if (msAction != null) {
log.trace("end of MSAction {}", msAction);
msAction.terminatedAnim();
updateActionState();
}
} else {
position = 0;
}
}
}
}
@Override
protected void controlRender(RenderManager rm, ViewPort vp) {
}
}
<|start_filename|>src/main/java/com/pesegato/collision/D4JSpaceDebugAppState.java<|end_filename|>
package com.pesegato.collision;
import com.jme3.app.Application;
import com.jme3.app.state.BaseAppState;
import com.jme3.math.ColorRGBA;
import com.jme3.renderer.RenderManager;
import com.jme3.renderer.ViewPort;
import com.jme3.scene.Geometry;
import com.jme3.scene.Node;
import com.jme3.scene.Spatial;
import java.util.*;
public class D4JSpaceDebugAppState extends BaseAppState {
protected ViewPort viewPort;
protected RenderManager rm;
Node stateGuiNode = new Node("D4J Debug Node");
protected HashMap<DebuggableBody, Node> labels = new HashMap<>();
ArrayList<DebuggableBody> bodies;
@Override
protected void initialize(Application app) {
this.rm = app.getRenderManager();
stateGuiNode.setCullHint(Spatial.CullHint.Never);
viewPort = rm.createMainView("D4J Debug Overlay", app.getCamera());
viewPort.setClearFlags(false, true, false);
viewPort.attachScene(stateGuiNode);
bodies = getState(D4JSpace2.class).bodies;
}
@Override
public void update(float tpf) {
super.update(tpf);
updateItems();
stateGuiNode.updateLogicalState(tpf);
stateGuiNode.updateGeometricState();
}
private void updateItems() {
HashMap<DebuggableBody, Node> oldObjects = labels;
labels = new HashMap<>();
Collection<DebuggableBody> current = bodies;
//create new map
for (Iterator<DebuggableBody> it = current.iterator(); it.hasNext(); ) {
DebuggableBody physicsObject = it.next();
//copy existing spatials
if (oldObjects.containsKey(physicsObject)) {
Node spat = oldObjects.get(physicsObject);
spat.setLocalTranslation((float) physicsObject.getTransform().getTranslation().x, (float) physicsObject.getTransform().getTranslation().y, 0);
labels.put(physicsObject, spat);
oldObjects.remove(physicsObject);
} else {
//if (filter == null || filter.displayObject(physicsObject))
{
//logger.log(Level.FINE, "Create new debug RigidBody");
//create new spatial
Geometry hudText = physicsObject.makeHitboxMarker(getApplication().getAssetManager(), stateGuiNode, physicsObject.color);
Node n = new Node();
//BitmapText hudText = new BitmapText(font, false);
//hudText.scale(0.01f);
//hudText.setSize(guiFont.getCharSet().getRenderedSize()); // font size
//hudText.setColor(ColorRGBA.Blue); // font color
//hudText.setText(physicsObject.toString()); // the text
n.attachChild(hudText);
n.setLocalTranslation((float) physicsObject.getTransform().getTranslation().x, (float) physicsObject.getTransform().getTranslation().y, 0); // position
//hudText.addControl(new BillboardControl());
labels.put(physicsObject, n);
stateGuiNode.attachChild(n);
}
}
}
//remove leftover spatials
for (Map.Entry<DebuggableBody, Node> entry : oldObjects.entrySet()) {
DebuggableBody object = entry.getKey();
Node spatial = entry.getValue();
spatial.removeFromParent();
}
}
public void setGuiNode(Node guiNode) {
guiNode.attachChild(stateGuiNode);
}
@Override
public void render(RenderManager rm) {
super.render(rm);
if (viewPort != null) {
rm.renderScene(stateGuiNode, viewPort);
}
}
@Override
protected void cleanup(Application app) {
}
@Override
protected void onEnable() {
}
@Override
protected void onDisable() {
}
}
<|start_filename|>src/main/java/com/pesegato/MonkeySheet/MSMaterialControl.java<|end_filename|>
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package com.pesegato.MonkeySheet;
import com.jme3.asset.AssetManager;
import com.jme3.material.Material;
import com.jme3.material.RenderState;
import com.jme3.math.ColorRGBA;
import com.jme3.renderer.RenderManager;
import com.jme3.renderer.ViewPort;
import com.jme3.scene.Geometry;
import com.jme3.scene.control.AbstractControl;
import com.jme3.texture.Texture;
import static com.pesegato.MonkeySheet.MonkeySheetAppState.log;
/**
* @author Pesegato
*/
public class MSMaterialControl extends AbstractControl {
Material material;
public String animation;
public int position;
MSSpriteControl msc;
private float alphaValue = 1;
private boolean flipped = false;
private boolean grayScale = false;
private float hueShift = 0;
private ColorRGBA fogColor = ColorRGBA.Pink;
private float fogIntensity = 0;
public MSMaterialControl(AssetManager assetManager, Geometry geo, MSContainer msCont, MSControl msc) {
material = new Material(assetManager, "MonkeySheet/MatDefs/Anim.j3md");
Texture[] sheetsX = new Texture[msCont.sheets.length];
for (int i = 0; i < msCont.sheets.length; i++) {
long start = System.currentTimeMillis();
long end;
log.trace("MonkeySheet: Now loading {}", msCont.sheets[i]);
sheetsX[i] = assetManager.loadTexture(msCont.sheets[i]);
end = System.currentTimeMillis();
log.trace("loaded {}", (end - start));
}
material.setFloat("SizeX", msCont.numTiles);
material.setFloat("SizeY", msCont.numTiles);
material.getAdditionalRenderState().setBlendMode(RenderState.BlendMode.Alpha);
for (MTween mt : MonkeySheetAppState.anis.values()) {
if (mt.msCont == msCont)
mt.setTextures(sheetsX);
}
geo.setMaterial(material);
geo.addControl(this);
this.msc = msc;
material.setFloat("Position", msc.anim.anim[msc.position].position);
material.setTexture("ColorMap", msc.anim.anim[msc.position].sheetX);
material.setFloat("FlipHorizontal", 0.0f);
material.setFloat("AlphaValue", 1.0f);
material.setColor("FogColor", fogColor);
material.setFloat("FogIntensity", 0.0f);
material.setFloat("HueShift", hueShift);
}
public MSMaterialControl(AssetManager assetManager, Geometry geo, MSContainer msCont) {
material = new Material(assetManager, "MonkeySheet/MatDefs/Anim.j3md");
Texture[] sheetsX = new Texture[msCont.sheets.length];
for (int i = 0; i < msCont.sheets.length; i++) {
long start = System.currentTimeMillis();
long end;
log.trace("MonkeySheet: Now loading {}", msCont.sheets[i]);
sheetsX[i] = assetManager.loadTexture(msCont.sheets[i]);
end = System.currentTimeMillis();
log.trace("loaded {}", (end - start));
}
material.setFloat("SizeX", msCont.numTiles);
material.setFloat("SizeY", msCont.numTiles);
material.getAdditionalRenderState().setBlendMode(RenderState.BlendMode.Alpha);
for (MTween mt : MonkeySheetAppState.anis.values()) {
if (mt.msCont == msCont)
mt.setTextures(sheetsX);
}
geo.setMaterial(material);
geo.addControl(this);
material.setFloat("FlipHorizontal", 0.0f);
material.setFloat("AlphaValue", 1.0f);
material.setColor("FogColor", fogColor);
material.setFloat("FogIntensity", 0.0f);
material.setFloat("HueShift", hueShift);
}
public MSMaterialControl setSprite(String sprite) {
this.msc = new MSSpriteControl(sprite);
material.setFloat("Position", msc.anim.anim[msc.position].position);
material.setTexture("ColorMap", msc.anim.anim[msc.position].sheetX);
return this;
}
public MSMaterialControl(AssetManager assetManager, MSContainer msCont, MSControl msc) {
material = new Material(assetManager, "MonkeySheet/MatDefs/Anim.j3md");
Texture[] sheetsX = new Texture[msCont.sheets.length];
for (int i = 0; i < msCont.sheets.length; i++) {
long start = System.currentTimeMillis();
long end;
log.trace("MonkeySheet: Now loading {}", msCont.sheets[i]);
sheetsX[i] = assetManager.loadTexture(msCont.sheets[i]);
end = System.currentTimeMillis();
log.trace("loaded {}", (end - start));
}
material.setFloat("SizeX", msCont.numTiles);
material.setFloat("SizeY", msCont.numTiles);
material.getAdditionalRenderState().setBlendMode(RenderState.BlendMode.Alpha);
for (MTween mt : MonkeySheetAppState.anis.values()) {
if (mt.msCont == msCont)
mt.setTextures(sheetsX);
}
this.msc = msc;
material.setFloat("Position", msc.anim.anim[msc.position].position);
material.setTexture("ColorMap", msc.anim.anim[msc.position].sheetX);
material.setFloat("FlipHorizontal", 0.0f);
material.setFloat("AlphaValue", 1.0f);
material.setColor("FogColor", fogColor);
material.setFloat("FogIntensity", 0.0f);
material.setFloat("HueShift", hueShift);
}
public Material getMaterial() {
return material;
}
public void initMaterial(Geometry geo) {
geo.setMaterial(material);
geo.addControl(this);
}
public void setVertexSheetPos(boolean b) {
material.setBoolean("VertexSheetPos", b);
}
@Override
protected void controlUpdate(float tpf) {
if (MonkeySheetAppState.tTPF == 0) {
if (msc.position >= msc.anim.anim.length) {
MSControl actionMsc = (MSControl) msc;
log.error("Error in animation, doing {} at position {}", actionMsc.msAction, msc.position);
}
material.setFloat("Position", msc.anim.anim[msc.position].position);
material.setTexture("ColorMap", msc.anim.anim[msc.position].sheetX);
}
}
public void setFlipped(boolean flipped) {
this.flipped = flipped;
material.setFloat("FlipHorizontal", flipped ? 1.0f : 0.0f);
}
public void setHueShift(float hueShift) {
this.hueShift = hueShift;
material.setFloat("HueShift", hueShift);
}
public void setAlpha(float alphaValue) {
this.alphaValue = alphaValue;
material.setFloat("AlphaValue", alphaValue);
}
public void setFogColor(ColorRGBA fogColor) {
this.fogColor = fogColor;
material.setColor("FogColor", fogColor);
}
public void setFogIntensity(float fogIntensity) {
this.fogIntensity = fogIntensity;
material.setFloat("FogIntensity", fogIntensity);
}
public void setGrayScale(boolean grayScale) {
this.grayScale = grayScale;
material.setBoolean("GrayScale", grayScale);
}
@Override
protected void controlRender(RenderManager rm, ViewPort vp) {
}
}
<|start_filename|>src/main/java/com/pesegato/collision/SampleD4J2.java<|end_filename|>
package com.pesegato.collision;
import com.jme3.app.Application;
import com.jme3.app.SimpleApplication;
import com.jme3.app.state.BaseAppState;
import com.jme3.math.ColorRGBA;
import com.jme3.renderer.RenderManager;
import com.jme3.renderer.ViewPort;
import com.jme3.scene.Node;
import com.jme3.scene.control.AbstractControl;
import com.pesegato.collision.hitbox.HBCircle;
import com.pesegato.collision.hitbox.HBRect;
import org.dyn4j.dynamics.Body;
import org.dyn4j.geometry.MassType;
import org.dyn4j.geometry.Vector2;
public class SampleD4J2 extends SimpleApplication {
public static void main(String[] args) {
SampleD4J2 app = new SampleD4J2();
//app.setShowSettings(false);
app.start(); // start the game
}
D4JSpace2 d4j;
@Override
public void simpleInitApp() {
d4j = new D4JSpace2();
stateManager.attachAll(d4j,
new D4JSpaceDebugAppState(),
new MainAppState());
}
HBRect hbRect;
class MainAppState extends BaseAppState {
@Override
protected void initialize(Application app) {
getState(D4JSpaceDebugAppState.class).setGuiNode(guiNode);
float boxSize = .5f;
hbRect = new HBRect(1, boxSize, .5f);
hbRect.translate(200, 300);
hbRect.setColor(ColorRGBA.Blue);
d4j.add(hbRect, MassType.INFINITE, 2);
HBCircle hbRect2 = new HBCircle(3, 15);
hbRect2.translate(200, 200);
hbRect2.setColor(ColorRGBA.Red);
d4j.add(hbRect2, MassType.INFINITE, 4);
d4j.addListener(new MyCollisionListener());
}
public void update(float tpf) {
hbRect.translate(new Vector2(0, -5 * tpf));
}
@Override
protected void cleanup(Application app) {
}
@Override
protected void onEnable() {
}
@Override
protected void onDisable() {
}
}
class MyCollisionListener implements CollisionListener {
@Override
public void listen(long collider, long collided) {
System.out.println(collider + " collided with " + collided);
d4j.remove(hbRect);
}
}
}
<|start_filename|>src/main/java/com/pesegato/MonkeySheet/extra/MSSharedQuartz.java<|end_filename|>
package com.pesegato.MonkeySheet.extra;
import com.jme3.renderer.RenderManager;
import com.jme3.renderer.ViewPort;
import com.jme3.scene.control.AbstractControl;
import java.util.ArrayList;
public class MSSharedQuartz extends AbstractControl {
public float tTPF = 0;
float maxValue;
ArrayList<Tickable> tickables = new ArrayList<>();
ArrayList<Float> delays = new ArrayList<>();
int pointer = 0;
public MSSharedQuartz(float tTPF) {
this.tTPF = tTPF;
this.maxValue = tTPF;
}
public void addTickable(float delay, Tickable tickable) {
pointer = delays.size();
tickables.add(tickable);
delays.add(delay);
}
@Override
protected void controlUpdate(float tpf) {
tTPF -= tpf;
if (tTPF < delays.get(pointer)) {
tickables.get(pointer).tick();
pointer--;
if (pointer < 0) {
pointer = tickables.size() - 1;
tTPF = maxValue;
}
}
}
@Override
protected void controlRender(RenderManager rm, ViewPort vp) {
}
}
<|start_filename|>src/main/java/com/pesegato/goldmonkey/Animation.java<|end_filename|>
package com.pesegato.goldmonkey;
public class Animation {
public String id;
public int[] frames;
public int centerX;
public int centerY;
public Animation(String id, int[] frames, int centerX, int centerY){
this.id=id;
this.frames=frames;
this.centerX=centerX;
this.centerY=centerY;
}
}
<|start_filename|>src/main/java/com/pesegato/MonkeySheet/MSFrame.java<|end_filename|>
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package com.pesegato.MonkeySheet;
import com.jme3.texture.Texture;
/**
* @author Pesegato
*/
public class MSFrame {
int position;
int sheet;
Texture sheetX;
public MSFrame(int position, int sheet) {
this.position = position;
this.sheet = sheet;
}
public int getPosition() {
return position;
}
public void setTexture(Texture sheetX) {
this.sheetX = sheetX;
}
}
<|start_filename|>src/main/java/com/pesegato/collision/SampleD4J.java<|end_filename|>
package com.pesegato.collision;
import com.jme3.app.Application;
import com.jme3.app.SimpleApplication;
import com.jme3.app.state.BaseAppState;
import com.jme3.math.ColorRGBA;
import com.jme3.renderer.RenderManager;
import com.jme3.renderer.ViewPort;
import com.jme3.scene.Node;
import com.jme3.scene.control.AbstractControl;
import com.pesegato.collision.hitbox.HBRect;
import org.dyn4j.dynamics.Body;
import org.dyn4j.geometry.Vector2;
public class SampleD4J extends SimpleApplication {
public static void main(String[] args){
SampleD4J app = new SampleD4J();
//app.setShowSettings(false);
app.start(); // start the game
}
Dyn4jMEAppState das;
@Override
public void simpleInitApp() {
das = new Dyn4jMEAppState();
stateManager.attachAll(das,new MainAppState());
}
class MainAppState extends BaseAppState{
@Override
protected void initialize(Application app) {
float boxSize = .5f;
HBRect hbRect=new HBRect(1,boxSize,.5f);
Node boxBlue= hbRect.getNode(assetManager, ColorRGBA.Blue);
boxBlue.setLocalTranslation(200, 300, 0);
guiNode.attachChild(boxBlue);
Dyn4JShapeControl physics = hbRect.getControl();
boxBlue.addControl(physics);
boxBlue.addControl(new Down(physics.getBody()));
das.getPhysicsSpace(0).add(boxBlue);
HBRect hbRect2=new HBRect(2,boxSize,.1f);
Node boxRed = hbRect2.getNode(assetManager, ColorRGBA.Red);
boxRed.setLocalTranslation(200, 200, 0);
guiNode.attachChild(boxRed);
Dyn4JShapeControl physics2 = hbRect2.getControl();
boxRed.addControl(physics2);
das.getPhysicsSpace(0).add(boxRed);
das.getPhysicsSpace(0).addListener(new MyCollisionListener(physics));
}
@Override
protected void cleanup(Application app) {
}
@Override
protected void onEnable() {
}
@Override
protected void onDisable() {
}
}
class MyCollisionListener implements CollisionListener{
Dyn4JShapeControl phy;
MyCollisionListener(Dyn4JShapeControl phy){
this.phy=phy;
}
@Override
public void listen(long collider, long collided) {
System.out.println(collider+" collided with "+collided);
D4JSpace space=das.getPhysicsSpace(0);
//phy.getSpatial().removeFromParent();
phy.removeFromWorld();
}
}
class Down extends AbstractControl{
Body body;
Down(Body body){
this.body=body;
}
@Override
protected void controlUpdate(float tpf) {
body.translate(new Vector2(0,-5*tpf));
}
@Override
protected void controlRender(RenderManager rm, ViewPort vp) {
}
}
}
<|start_filename|>src/main/java/com/pesegato/MonkeySheet/batch/BGeometryBodyControl.java<|end_filename|>
package com.pesegato.MonkeySheet.batch;
import org.dyn4j.dynamics.Body;
@Deprecated
public abstract class BGeometryBodyControl extends BGeometryControl {
public float offsetX;
public float offsetY;
public float offsetAngle;
protected Body body;
protected BGeometryBodyControl(Body body, BGeometry bgeo) {
super(bgeo);
this.body = body;
}
@Override
protected void controlUpdate(float tpf) {
if (mustInit) {
binit();
mustInit = false;
}
bupdate(tpf);
bgeo.getTransform().setPosition(
(float) body.getTransform().getTranslationX() + offsetX,
(float) body.getTransform().getTranslationY() + offsetY);
bgeo.getTransform().setLocalRotation((float) body.getTransform().getRotationAngle() + offsetAngle);
bgeo.applyTransform();
//duration -= tpf;
//if (duration < 0) {
// setEnabled(false);
//}
}
}
<|start_filename|>src/main/java/com/pesegato/MonkeySheet/actions/MSFiniteStateMachine.java<|end_filename|>
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package com.pesegato.MonkeySheet.actions;
import com.jme3.renderer.RenderManager;
import com.jme3.renderer.ViewPort;
import com.jme3.scene.control.AbstractControl;
import com.pesegato.MonkeySheet.MonkeySheetAppState;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author Pesegato
*/
public abstract class MSFiniteStateMachine extends AbstractControl {
static Logger log = LoggerFactory.getLogger(MSFiniteStateMachine.class);
MSAction[] actions;
MSAction currentAction;
MSTransitionAction transitionAction;
boolean runInit = true;
public MSFiniteStateMachine(MSAction... actions) {
initActions(actions);
}
public void initActions(MSAction... actions) {
this.actions = actions;
}
@Override
protected void controlUpdate(float tpf) {
tpf *= MonkeySheetAppState.timeable.getClockspeed();
if (runInit) {
init();
runInit = false;
}
if (currentAction == null) {
{
msUpdate(tpf);
return;
}
}
if (MonkeySheetAppState.tTPF == 0) {
if (transitionAction != null) {
transitionAction.maybeEnd();
if (transitionAction.hasEnded)
transitionAction = null;
return;
}
if (currentAction.maybeEnd()) {
currentAction = null;
return;
}
}
msUpdate(tpf);
currentAction.controlUpdate(tpf);
}
protected <T extends MSAction> T startAction(Class<T> msActionClass) {
for (MSAction act : actions) {
if (msActionClass.isAssignableFrom(act.getClass())) {
startAction(act);
return (T) act;
}
}
return null;
}
private void startAction(MSAction action) {
log.trace("start action {}", action);
if (currentAction == action)
return;
if (transitionAction != null)
return;
if (currentAction != null) {
currentAction.interrupted();
transitionAction = currentAction.onInterruptAttempt();
if (transitionAction != null)
return;
}
currentAction = action;
currentAction.init(spatial);
}
abstract protected void init();
abstract protected void msUpdate(float tpf);
@Override
protected void controlRender(RenderManager rm, ViewPort vp) {
}
}
<|start_filename|>src/main/resources/MonkeySheet/MatDefs/MyUnshaded.vert<|end_filename|>
#import "Common/ShaderLib/GLSL150Compat.glsllib"
#import "Common/ShaderLib/Skinning.glsllib"
#import "Common/ShaderLib/Instancing.glsllib"
attribute vec3 inPosition;
#if defined(HAS_COLORMAP) || (defined(HAS_LIGHTMAP) && !defined(SEPARATE_TEXCOORD))
#define NEED_TEXCOORD1
#endif
attribute vec2 inTexCoord;
attribute vec2 inTexCoord2;
attribute vec4 inColor;
varying vec2 texCoord1;
varying vec2 texCoord2;
varying vec4 vertColor;
void main(){
#ifdef NEED_TEXCOORD1
texCoord1 = inTexCoord;
#endif
#ifdef SEPARATE_TEXCOORD
texCoord2 = inTexCoord2;
#endif
#ifdef HAS_VERTEXCOLOR
vertColor = inColor;
#endif
vec4 modelSpacePos = vec4(inPosition, 1.0);
#ifdef NUM_BONES
Skinning_Compute(modelSpacePos);
#endif
gl_Position = TransformWorldViewProjection(modelSpacePos);
}
<|start_filename|>src/main/resources/MonkeySheet/MatDefs/Anim.vert<|end_filename|>
uniform mat4 g_WorldViewProjectionMatrix;
uniform float m_SizeX;
uniform float m_SizeY;
uniform float m_Position;
uniform float m_FlipHorizontal;
attribute vec3 inPosition;
attribute vec2 inTexCoord;
attribute float inTexCoord2;
attribute float inTexCoord3;
varying float vAlpha;
varying vec2 texCoord;
void main(){
float t = m_Position;
#ifdef HAS_VERTEXSHEETPOS
t = inTexCoord2;
vAlpha = inTexCoord3;
#endif
float tPointerY = 1.0 - ((floor(t / m_SizeX)) / m_SizeY) - 1.0 / m_SizeY;
float tPointerYOffset = (floor(t / m_SizeX)) / m_SizeY;
float tPointerX = (t - (tPointerYOffset * m_SizeX * m_SizeY)) / m_SizeX;
if (m_FlipHorizontal == 1.0 ) {
texCoord.x = ( 1.0 - inTexCoord.x ) / m_SizeX + tPointerX;
}
else {
texCoord.x = inTexCoord.x / m_SizeX + tPointerX;
}
texCoord.y = inTexCoord.y / m_SizeY + tPointerY;
gl_Position = g_WorldViewProjectionMatrix * vec4(inPosition, 1.0);
}
<|start_filename|>src/main/java/com/pesegato/MonkeySheet/MTween.java<|end_filename|>
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package com.pesegato.MonkeySheet;
import com.jme3.texture.Texture;
/**
*
* @author Pesegato
*/
public class MTween {
public String name;
public MSFrame[] anim;
MSContainer msCont;
int[] hitbox;
int centerX, centerY;
public MTween(MSContainer msCont, String name, int[] pos, int[] hitbox, int size, int centerX, int centerY){
this.msCont=msCont;
anim=new MSFrame[pos.length];
int lastPos=0;
for (int i=0;i<pos.length;i++){
lastPos+=pos[i];
anim[i]=new MSFrame(lastPos%(size*size), lastPos/(size*size));
}
this.hitbox = hitbox;
this.centerX = centerX;
this.centerY = centerY;
}
public void setTextures(Texture[] sheetsX) {
int sheet = -1;
try {
for (MSFrame frame : anim) {
sheet = frame.sheet;
frame.sheetX = sheetsX[sheet];
}
} catch (java.lang.ArrayIndexOutOfBoundsException e) {
System.out.println("Missing sheet " + sheet + "!");
System.out.println("Unless you are using multisheet, you probably referenced frames outside of the sheet 0.");
System.out.println("For example, when using a Container with size 3, frames must be between 0 and 8!");
e.printStackTrace();
System.exit(-1);
}
}
}
<|start_filename|>src/main/resources/MonkeySheet/MatDefs/Threshold.frag<|end_filename|>
uniform sampler2D m_ColorMap;
uniform sampler2D m_Threshold;
float threshold;
uniform float m_Level;
uniform bool m_ShowThreshold;
varying vec2 texCoord;
void main(){
threshold = texture2D(m_Threshold, texCoord).r;
if (threshold < m_Level)
if (m_ShowThreshold == false)
discard;
else
if (m_Level - threshold <0.01)
gl_FragColor = vec4(1.0, (m_Level - threshold)*100.0, 1.0, texture2D(m_ColorMap, texCoord).a);
else
discard;
else
gl_FragColor = texture2D(m_ColorMap, texCoord);
}
<|start_filename|>src/main/java/com/pesegato/MonkeySheet/batch/BNode.java<|end_filename|>
package com.pesegato.MonkeySheet.batch;
import com.jme3.math.Vector2f;
import com.jme3.math.Vector3f;
import com.jme3.scene.Geometry;
import com.jme3.scene.Mesh;
import com.jme3.scene.VertexBuffer;
import com.jme3.util.BufferUtils;
import java.nio.FloatBuffer;
import java.nio.IntBuffer;
import java.util.ArrayList;
import static com.jme3.scene.VertexBuffer.Type.*;
public class BNode {
Mesh mesh;
FloatBuffer posData, texData, msPosData, alphaData;
VertexBuffer posBuffer, texBuffer, msPosBuffer, alphaBuffer, idxBuffer;
IntBuffer idxData;
BGeometry[] quads;
int[] indexes;
ArrayList<Integer> slotBusy;
public BNode(int size) {
mesh = new Mesh();
quads = new BGeometry[size];
slotBusy = new ArrayList<>();
for (int i = 0; i < size; i++) {
slotBusy.add(i);
}
indexes = new int[6 * size];
posData = BufferUtils.createFloatBuffer(new Vector3f[4 * size]);
msPosData = BufferUtils.createFloatBuffer(new float[4 * size]);
alphaData = BufferUtils.createFloatBuffer(new float[4 * size]);
texData = BufferUtils.createFloatBuffer(new Vector2f[4 * size]);
idxData = BufferUtils.createIntBuffer(indexes);
mesh.setBuffer(Position, 3, posData);
mesh.setBuffer(TexCoord, 2, texData);
mesh.setBuffer(TexCoord2, 1, msPosData);
mesh.setBuffer(TexCoord3, 1, alphaData);
mesh.setBuffer(Index, 3, idxData);
posBuffer = mesh.getBuffer(Position);
texBuffer = mesh.getBuffer(TexCoord);
msPosBuffer = mesh.getBuffer(TexCoord2);
alphaBuffer = mesh.getBuffer(TexCoord3);
idxBuffer = mesh.getBuffer(Index);
}
public void remove(int idx) {
idxData.position(idx * 6);
idxData.put(0);
idxData.put(0);
idxData.put(0);
idxData.put(0);
idxData.put(0);
idxData.put(0);
slotBusy.add(idx);
}
public void removeAll() {
idxData.position(0);
for (int i = 0; i < quads.length * 6; i++) {
idxData.put(0);
}
slotBusy.clear();
for (int i = 0; i < quads.length; i++)
slotBusy.add(i);
updateData();
}
public int addQuad(float x, float y) {
int idx = getNextAvailableSlot();
if (idx == -1) {
System.err.println("No more free slot available for BGeometries on " + this + "!");
System.exit(-1);
}
return addReusableQuad(idx, x, y);
}
/**
* @return the index of a empty slot, or -1 if no slots are free
*/
public int getNextAvailableSlot() {
//int p=slotBusy.remove(0);
//System.out.println("using quad "+p);
return slotBusy.remove(0);
/*
for (int i = 0; i < slotBusy.length; i++) {
if (!slotBusy[i]) {
return i;
}
}
return -1;
*/
}
public int getDebugFreeSlot() {
return slotBusy.size();
/*
int count = 0;
for (int i = 0; i < slotBusy.length; i++) {
if (!slotBusy[i]) {
count++;
}
}
return count;
*/
}
/*
Allocates the quad at the provided index. Does NOT check the availability!
*/
public int addReusableQuad(int slotFreeIdx, float x, float y) {
//slotBusy[slotFreeIdx] = true;
quads[slotFreeIdx] = new BGeometry(this, slotFreeIdx, posData, texData, idxData, msPosData, alphaData);
quads[slotFreeIdx].getTransform().setPosition(x, y);
quads[slotFreeIdx].applyTransform();
texBuffer.updateData(texData);
posBuffer.updateData(posData);
alphaBuffer.updateData(alphaData);
idxBuffer.updateData(idxData);
return slotFreeIdx;
}
public void addQuad(int i, int x, int y) {
quads[i] = new BGeometry(this, i, posData, texData, idxData, msPosData, alphaData);
quads[i].getTransform().setPosition(x, y);
quads[i].applyTransform();
texBuffer.updateData(texData);
posBuffer.updateData(posData);
alphaBuffer.updateData(alphaData);
idxBuffer.updateData(idxData);
}
public void updateData() {
texBuffer.updateData(texData);
posBuffer.updateData(posData);
alphaBuffer.updateData(alphaData);
idxBuffer.updateData(idxData);
msPosBuffer.updateData(msPosData);
}
Geometry makeGeo() {
idxBuffer.updateData(idxData);
mesh.updateBound();
return new Geometry("batchedSpatial", mesh);
}
public BGeometry[] getQuads() {
return quads;
}
}
<|start_filename|>src/main/java/com/pesegato/MonkeySheet/MSHitboxControl.java<|end_filename|>
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package com.pesegato.MonkeySheet;
import com.jme3.renderer.RenderManager;
import com.jme3.renderer.ViewPort;
import com.jme3.scene.Geometry;
import com.jme3.scene.Node;
import com.jme3.scene.control.AbstractControl;
/**
*
* @author Pesegato
*/
public class MSHitboxControl extends AbstractControl {
MSControl msc;
int currentHB = -1;
Geometry[] hitboxes;
public MSHitboxControl(MSControl msc, Geometry[] hitboxes) {
this.msc = msc;
this.hitboxes = hitboxes;
}
@Override
protected void controlUpdate(float tpf) {
if (currentHB != msc.getCurrentHitbox()) {
currentHB = msc.getCurrentHitbox();
Node n=(Node)spatial;
n.detachChildNamed("hitbox");
n.attachChild(hitboxes[currentHB]);
}
}
@Override
protected void controlRender(RenderManager rm, ViewPort vp) {
}
}
<|start_filename|>src/main/resources/MonkeySheet/MatDefs/Threshold.vert<|end_filename|>
uniform mat4 g_WorldViewProjectionMatrix;
uniform float m_SizeX;
uniform float m_SizeY;
uniform float m_Position;
attribute vec3 inPosition;
attribute vec2 inTexCoord;
varying vec2 texCoord;
void main(){
/*
float t = m_Position;
float tPointerY = 1.0 - ((floor(m_Position / m_SizeX)) / m_SizeY) - 1.0 / m_SizeY;
float tPointerYOffset = (floor(t / m_SizeX)) / m_SizeY;
float tPointerX = (t - (tPointerYOffset * m_SizeX * m_SizeY)) / m_SizeX;
texCoord.x = inTexCoord.x / m_SizeX + tPointerX;
texCoord.y = inTexCoord.y / m_SizeY + tPointerY;
*/
texCoord.x = inTexCoord.x;
texCoord.y = inTexCoord.y;
/*
Nehon code
float t = fract(g_Time) * m_Speed;
texCoord.x = inTexCoord.x * m_InvSizeX + floor(t / m_InvSizeX) * m_InvSizeX;
float adjust = step(1.0,inTexCoord.x) * step(texCoord.x,floor(texCoord.x ));
texCoord.y = (1.0 - inTexCoord.y) * m_InvSizeY + floor(texCoord.x - adjust) * m_InvSizeY ;
*/
/* if(texCoord.y>1.0 && fract(texCoord.y)<0.5){
texCoord.y += 2* m_InvSizeY;
}*/
//texCoord = inTexCoord;
gl_Position = g_WorldViewProjectionMatrix * vec4(inPosition, 1.0);
}
<|start_filename|>src/main/java/com/pesegato/MonkeySheet/batch/BNodeControl.java<|end_filename|>
package com.pesegato.MonkeySheet.batch;
import com.jme3.asset.AssetManager;
import com.jme3.renderer.RenderManager;
import com.jme3.renderer.ViewPort;
import com.jme3.scene.Geometry;
import com.jme3.scene.Node;
import com.jme3.scene.control.AbstractControl;
import com.pesegato.MonkeySheet.MSContainer;
import com.pesegato.MonkeySheet.MSControl;
import com.pesegato.MonkeySheet.MSMaterialControl;
import com.pesegato.timing.Timeable;
public class BNodeControl extends AbstractControl {
BNode bNode;
BGeometry bGeometry[];
Geometry geometry;
MSMaterialControl msmc;
public BNodeControl(Node parent, int bufSize, String sheetName, Timeable timeable, MSContainer msContainer, AssetManager assetM) {
bNode = new BNode(bufSize);
bGeometry = bNode.getQuads();
geometry = bNode.makeGeo();
MSControl mscsb = new MSControl(sheetName, timeable);
geometry.addControl(mscsb);
msmc = new MSMaterialControl(assetM, geometry, msContainer, mscsb);
msmc.setVertexSheetPos(true);
parent.attachChild(geometry);
geometry.addControl(this);
}
public BGeometry getReusableQuad(float x, float y) {
int idx = bNode.getNextAvailableSlot();
return bNode.quads[bNode.addReusableQuad(idx, x, y)];
}
public void addControl(BGeometryControl control) {
geometry.addControl(control);
}
public void setZ(float z) {
geometry.setLocalTranslation(0, 0, z);
}
public Geometry getGeometry(){
return geometry;
}
public MSMaterialControl getMaterial(){
return msmc;
}
public int getBufSize() {
return bNode.quads.length;
}
public int getDebugFreeSlot() {
return bNode.getDebugFreeSlot();
}
public void destroy() {
geometry.removeFromParent();
}
@Override
protected void controlUpdate(float tpf) {
bNode.updateData();
}
@Override
protected void controlRender(RenderManager rm, ViewPort vp) {
}
}
<|start_filename|>src/main/java/com/pesegato/MonkeySheet/actions/MSTransitionAction.java<|end_filename|>
package com.pesegato.MonkeySheet.actions;
public class MSTransitionAction extends MSAction {
@Override
protected void msUpdate(float tpf) {
}
}
<|start_filename|>src/main/resources/MonkeySheet/MatDefs/Anim_1.frag<|end_filename|>
uniform sampler2D m_ColorMap;
uniform float g_Time;
uniform float m_HitTime;
uniform float m_AlphaValue;
uniform vec4 m_GlowColor;
uniform vec4 m_FogColor;
uniform float m_FogIntensity;
uniform float m_HueShift;
uniform bool m_GrayScale;
vec4 color;
varying vec2 texCoord;
varying float vAlpha;
vec3 rgb2hsv(vec3 c)
{
vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);
vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));
vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));
float d = q.x - min(q.w, q.y);
float e = 1.0e-10;
return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);
}
vec3 hsv2rgb(vec3 c)
{
vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);
vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);
return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);
}
void main(){
color = texture2D(m_ColorMap, texCoord);
vec3 fragRGB = color.rgb;
vec3 fragHSV = rgb2hsv(fragRGB).xyz;
fragHSV.x += m_HueShift;
//fragHSV.x += vHSV.x / 360.0;
//fragHSV.yz *= vHSV.yz;
//fragHSV.xyz = mod(fragHSV.xyz, 1.0);
fragRGB = hsv2rgb(fragHSV);
/* color.a = color.r * 0.7f;
vec4 overlay = vec4(0.5,0.8,1.0,1.0);
//gl_FragColor = max((1.0 - ((1.0 - color) / overlay)), 0.0);
gl_FragColor = color * overlay;//
*/
//vec4 overlay = vec4(1.0,1.0,1.0,1.0);
//gl_FragColor = mix(color, overlay, m_HitTime);
color.rgb=fragRGB;
gl_FragColor = mix(color, m_FogColor, m_FogIntensity);
//gl_FragColor = color;
//gl_FragColor.r = color.r*m_Pulse;
//gl_FragColor.g = m_Pulse;
//gl_FragColor.g = m_Pulse;
//gl_FragColor = m_GlowColor;
if (m_GrayScale) {
float gray = dot(gl_FragColor.rgb, vec3(0.299, 0.587, 0.114));
gl_FragColor = vec4(vec3(gray), 1.0);
}
float t = m_AlphaValue;
#ifdef HAS_VERTEXSHEETPOS
t = vAlpha;
#endif
gl_FragColor.a = color.a*t;
}
<|start_filename|>src/main/java/com/pesegato/collision/IDyn4JControl.java<|end_filename|>
package com.pesegato.collision;
import com.jme3.scene.control.AbstractControl;
import org.dyn4j.collision.broadphase.BroadphaseDetector;
@Deprecated
public abstract class IDyn4JControl extends AbstractControl{
void updatePhysics(BroadphaseDetector bp, float tpf){}
void updateDraw(float tpf){}
//void addToWorld(World world){}
void removeFromWorld(){}
void addToWorld(BroadphaseDetector broadphase){}
}
<|start_filename|>src/main/java/com/pesegato/MonkeySheet/extra/Tickable.java<|end_filename|>
package com.pesegato.MonkeySheet.extra;
public interface Tickable {
void tick();
}
<|start_filename|>src/main/java/com/pesegato/collision/D4JSpace2.java<|end_filename|>
package com.pesegato.collision;
import com.jme3.app.Application;
import com.jme3.app.state.BaseAppState;
import com.jme3.scene.Spatial;
import com.jme3.scene.control.AbstractControl;
import org.dyn4j.collision.CollisionItem;
import org.dyn4j.collision.CollisionPair;
import org.dyn4j.collision.broadphase.*;
import org.dyn4j.collision.manifold.ClippingManifoldSolver;
import org.dyn4j.collision.manifold.ManifoldSolver;
import org.dyn4j.collision.narrowphase.Gjk;
import org.dyn4j.collision.narrowphase.NarrowphaseDetector;
import org.dyn4j.collision.narrowphase.Penetration;
import org.dyn4j.dynamics.Body;
import org.dyn4j.dynamics.BodyFixture;
import org.dyn4j.geometry.Convex;
import org.dyn4j.geometry.MassType;
import org.dyn4j.geometry.Transform;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
/**
* TODO Evaluate the new 4.1 API https://github.com/dyn4j/dyn4j/issues/144
*/
public class D4JSpace2 extends BaseAppState {
//private World world;
int initialBodyCapacity=1024;
final BroadphaseFilter<CollisionItem<Body, BodyFixture>> broadphaseFilter = new CollisionItemBroadphaseFilter<Body, BodyFixture>();
final AABBProducer<CollisionItem<Body, BodyFixture>> aabbProducer = new CollisionItemAABBProducer<Body, BodyFixture>();
final AABBExpansionMethod<CollisionItem<Body, BodyFixture>> expansionMethod = new StaticValueAABBExpansionMethod<CollisionItem<Body, BodyFixture>>(0.2);
final BroadphaseDetector<CollisionItem<Body, BodyFixture>> broadphase = new DynamicAABBTree<CollisionItem<Body, BodyFixture>>(
broadphaseFilter,
aabbProducer,
expansionMethod,
initialBodyCapacity);
final CollisionItemBroadphaseDetector<Body, BodyFixture> broadphaseDetector = new CollisionItemBroadphaseDetectorAdapter<Body, BodyFixture>(broadphase);
NarrowphaseDetector np;
ManifoldSolver ms;
ArrayList<DebuggableBody> bodies = new ArrayList<>();
String name = "unnamed space";
public void setName(String name) {
this.name = name;
}
@Override
protected void initialize(Application app) {
// world=new World();
// collision detection process:
// Broadphase -> Narrowphase -> Manifold generation
// create detection chain
np = new Gjk();
//NarrowphasePostProcessor npp = LinkPostProcessor(); // Only required if you use the Link shape
ms = new ClippingManifoldSolver();
broadphaseDetector.setUpdateTrackingEnabled(true);
}
@Override
protected void cleanup(Application app) {
}
@Override
protected void onEnable() {
}
@Override
protected void onDisable() {
}
public void add(DebuggableBody body, MassType massType, long id) {
for (BodyFixture bf : body.getFixtures())
bf.setUserData(id);
body.setMass(massType);
body.setAtRestDetectionEnabled(false);
broadphaseDetector.add(body);
bodies.add(body);
}
public void remove(DebuggableBody body) {
broadphaseDetector.remove(body);
bodies.remove(body);
}
float tTPF = 0;
public void update(float tpf) {
tTPF += tpf;
if (tTPF > 1 / 60f) {
tTPF = 0;
broadphase.update();
//System.out.println("Collisions for "+name);
// when ready to detect
List<CollisionPair<CollisionItem<Body, BodyFixture>>> pairs = broadphaseDetector.detect();
for (CollisionPair<CollisionItem<Body, BodyFixture>> pair : pairs) {
CollisionItem<Body, BodyFixture> first = pair.getFirst();
CollisionItem<Body, BodyFixture> second = pair.getSecond();
BodyFixture fixture1 = first.getFixture();
BodyFixture fixture2 = second.getFixture();
Transform transform1 = first.getBody().getTransform();
Transform transform2 = second.getBody().getTransform();
Convex convex2 = fixture2.getShape();
Convex convex1 = fixture1.getShape();
Penetration p = new Penetration();
if (np.detect(convex1, transform1, convex2, transform2, p)) {
//System.out.println("Collision " + fixture1.getUserData() + " " + fixture2.getUserData());
for (CollisionListener listener : listeners) {
listener.listen((Long) fixture1.getUserData(), (Long) fixture2.getUserData());
}
}
}
}
}
public boolean checkCollisionNP(Body a, Body b) {
for (BodyFixture bf1 : a.getFixtures()) {
for (BodyFixture bf2 : b.getFixtures()) {
if (np.detect(bf1.getShape(), a.getTransform(), bf2.getShape(), b.getTransform())) {
return true;
}
}
}
return false;
}
/*
Alternative solution, but this require the use of World class
public boolean checkCollisionAll(Body a, Body b){
return a.isInContact(b);
}
*/
/*
Another alternative
*/
public boolean checkCollisionAll(Body a, Body b) {
List<CollisionPair<CollisionItem<Body, BodyFixture>>> pairs = broadphaseDetector.detect();
for (CollisionPair<CollisionItem<Body, BodyFixture>> pair : pairs) {
if ((pair.getFirst().getBody()==a)&&(pair.getSecond().getBody()==b)||
(pair.getFirst().getBody()==b)&&(pair.getSecond().getBody()==a)) {
CollisionItem<Body, BodyFixture> first = pair.getFirst();
CollisionItem<Body, BodyFixture> second = pair.getSecond();
BodyFixture fixture1 = first.getFixture();
BodyFixture fixture2 = second.getFixture();
Transform transform1 = first.getBody().getTransform();
Transform transform2 = second.getBody().getTransform();
Convex convex2 = fixture2.getShape();
Convex convex1 = fixture1.getShape();
Penetration p = new Penetration();
if (np.detect(convex1, transform1, convex2, transform2, p)) {
return true;
}
}
}
return false;
}
ArrayList<CollisionListener> listeners = new ArrayList<>();
public void addListener(CollisionListener cl) {
listeners.add(cl);
}
}
<|start_filename|>src/main/java/com/pesegato/MonkeySheet/extra/MSShadowControl.java<|end_filename|>
package com.pesegato.MonkeySheet.extra;
import com.jme3.math.Vector3f;
import com.jme3.scene.Spatial;
import com.pesegato.MonkeySheet.MSControl;
import com.pesegato.MonkeySheet.MSMaterialControl;
import com.pesegato.MonkeySheet.MonkeySheetAppState;
/**
* A shadow effect for MSControl
*/
public class MSShadowControl extends MSControl implements Tickable {
MSControl targetControl;
MSMaterialControl fxControl;
Spatial targetSpatial;
float currentAlpha = 1.0f;
float alphaDecay;
float offsetX;
float offsetY;
public MSShadowControl(MSControl targetControl, float delay, float offsetX, float offsetY) {
this.targetControl = targetControl;
this.targetSpatial = targetControl.getSpatial();
this.alphaDecay = 1f / delay;
this.position = targetControl.position;
this.anim = targetControl.anim;
this.offsetX = offsetX;
this.offsetY = offsetY;
}
public void setMaterial(MSMaterialControl fxControl) {
this.fxControl = fxControl;
}
public void update(float tpf) {
currentAlpha -= (alphaDecay * tpf);
fxControl.setAlpha(currentAlpha);
}
@Override
public void tick() {
log.debug("update {} {}", targetControl.animation, targetControl.position);
Vector3f trans = targetSpatial.getParent().getParent().getLocalTranslation();
spatial.setLocalTranslation(trans.x + offsetX, trans.y + offsetY, trans.z);
anim = MonkeySheetAppState.getAnim(targetControl.animation);
this.position = targetControl.position;
currentAlpha = 1.0f;
}
}
<|start_filename|>src/main/java/com/pesegato/MonkeySheet/actions/MSBodyAction.java<|end_filename|>
package com.pesegato.MonkeySheet.actions;
import org.dyn4j.dynamics.Body;
import org.dyn4j.geometry.Transform;
import static com.pesegato.MonkeySheet.MSGlobals.SPRITE_SIZE;
@Deprecated
abstract public class MSBodyAction extends MSAction{
protected Body body;
public void setBody(Body body){
this.body=body;
}
/**
* This method moves the body by x * SPRITE_SIZE and y * SPRITE_SIZE
*
* @param x movement on the X
* @param y movement on the Y
*/
@Override
protected void moveSprite(float x, float y) {
Transform t=body.getTransform();
t.setTranslationX(t.getTranslationX()+(x * SPRITE_SIZE));
t.setTranslationY(t.getTranslationY()+(y * SPRITE_SIZE));
body.setTransform(t);
}
/**
* This method moves the body toward absolute target position finalX, finalY
* with speed factors x,y.
* For each axis, if speed is positive but target is behind then no movement is performed
* For each axis, if speed is negative but target is in front then no movement is performed
*
* @param x speed on the X
* @param y speed on the Y
* @param finalX target x coordinate
* @param finalY target y coordinate
* @return true if arrived at target position
*/
@Override
protected boolean moveFixSprite(float x, float y, float finalX, float finalY){
finalX=finalX*SPRITE_SIZE;
finalY=finalY*SPRITE_SIZE;
Transform t=body.getTransform();
float currentX= (float) t.getTranslationX();
float currentY= (float) t.getTranslationY();
float nextX=SPRITE_SIZE * x + currentX;
float nextY=SPRITE_SIZE * y + currentY;
if (x>0){
nextX=Math.min(nextX, finalX);
}
else if (x==0){
nextX=currentX;
}
else {
nextX=Math.max(nextX, finalX);
}
if (y>0){
nextY=Math.min(nextY, finalY);
}
else if (y==0){
nextY=currentY;
}
else {
nextY=Math.max(nextY, finalY);
}
t.setTranslationX(nextX);
t.setTranslationY(nextY);
body.setTransform(t);
return (nextX==finalX)&&(nextY==finalY);
}
/**
* This method moves the body toward absolute target position finalX
* with speed factors x.
* If speed is positive but target is behind then no movement is performed
* If speed is negative but target is in front then no movement is performed
*
* @param x speed on the X
* @param finalX target x coordinate
* @return true if arrived at target position
*/
protected boolean moveFixXSprite(float x, float finalX){
finalX=finalX*SPRITE_SIZE;
Transform t=body.getTransform();
float currentX=spatial.getLocalTranslation().x;
float nextX=SPRITE_SIZE * x + currentX;
if (x>0){
nextX=Math.min(nextX, finalX);
}
else if (x==0){
nextX=currentX;
}
else {
nextX=Math.max(nextX, finalX);
}
t.setTranslationX(nextX);
return nextX==finalX;
}
/**
* This method moves the body toward absolute target position finalY
* with speed factors y.
* If speed is positive but target is behind then no movement is performed
* If speed is negative but target is in front then no movement is performed
*
* @param y speed on the Y
* @param finalY target y coordinate
* @return true if arrived at target position
*/
protected boolean moveFixYSprite(float y, float finalY){
finalY=finalY*SPRITE_SIZE;
Transform t=body.getTransform();
float currentY= (float) t.getTranslationY();
float nextY=SPRITE_SIZE * y + currentY;
if (y>0){
nextY=Math.min(nextY, finalY);
}
else if (y==0){
nextY=currentY;
}
else {
nextY=Math.max(nextY, finalY);
}
t.setTranslationY(nextY);
body.setTransform(t);
return nextY==finalY;
}
/**
* This method tests if the body has reached the finalX position
* @param finalX x coordinate
* @return true if current position matches finalX
*/
protected boolean hasMovedFixXSprite(float finalX){
return (body.getTransform().getTranslationX()==finalX*SPRITE_SIZE);
}
/**
* This method tests if the body has reached the finalY position
* @param finalY x coordinate
* @return true if current position matches finalY
*/
protected boolean hasMovedFixYSprite(float finalY){
return (body.getTransform().getTranslationY()==finalY*SPRITE_SIZE);
}
}
<|start_filename|>src/main/resources/MonkeySheet/MatDefs/Hueshift.frag<|end_filename|>
uniform sampler2D m_ColorMap;
uniform float g_Time;
uniform float m_HitTime;
uniform float m_AlphaValue;
uniform vec4 m_FogColor;
uniform float m_FogIntensity;
uniform float m_HueShift;
uniform bool m_GrayScale;
vec4 color;
varying vec2 texCoord;
vec3 rgb2hsv(vec3 c)
{
vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);
vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));
vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.x, c.r));
float d = q.x - min(q.w, q.y);
float e = 1.0e-10;
return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);
}
vec3 hsv2rgb(vec3 c)
{
vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);
vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);
return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);
}
void main(){
/*
color = texture2D(m_ColorMap, texCoord);
gl_FragColor = mix(color, m_FogColor, m_FogIntensity);
gl_FragColor.a = color.a*m_AlphaValue;
*/
//vec3 vHSV = vec3(-100.0, 0.0, 0.0);
vec4 textureColor = texture2D(m_ColorMap, texCoord);
vec3 fragRGB = textureColor.rgb;
vec3 fragHSV = rgb2hsv(fragRGB).xyz;
fragHSV.x += m_HueShift;
//fragHSV.x += vHSV.x / 360.0;
//fragHSV.yz *= vHSV.yz;
//fragHSV.xyz = mod(fragHSV.xyz, 1.0);
fragRGB = hsv2rgb(fragHSV);
gl_FragColor = vec4(fragRGB, textureColor.w);
if (m_GrayScale) {
float gray = dot(gl_FragColor.rgb, vec3(0.299, 0.587, 0.114));
gl_FragColor = vec4(vec3(gray), gl_FragColor.a);
}
}
<|start_filename|>src/main/java/com/pesegato/MonkeySheet/MSContainer.java<|end_filename|>
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package com.pesegato.MonkeySheet;
import com.pesegato.goldmonkey.Animation;
import com.pesegato.goldmonkey.Container;
/**
* @author Pesegato
*/
public class MSContainer {
public int numTiles;
String[] sheets;
String name;
Container c;
public MSContainer(Container c) {
this.numTiles = c.size;
this.name = c.id;
setPath("Textures/MonkeySheet/");
this.c = c;
}
public Animation[] getAnimList() {
return MonkeySheetAppState.animationC.get(c);
}
public MSContainer setPath(String path) {
this.sheets = new String[]{path + MSGlobals.getCompression() + "/" + name + (MSGlobals.getExtension())};
return this;
}
}
<|start_filename|>src/main/java/com/pesegato/MonkeySheet/batch/BTransform.java<|end_filename|>
package com.pesegato.MonkeySheet.batch;
import com.jme3.math.Vector2f;
public class BTransform {
float angle = 0;
float scale = 1;
Vector2f center = new Vector2f();
Vector2f offset = new Vector2f();
Vector2f trueOffset = new Vector2f();
public void setLocalRotation(float angle) {
this.angle = angle;
}
public void setLocalScale(float scale) {
this.scale = scale;
}
public void setOffset(Vector2f offset) {
this.offset = offset;
}
public void setTrueOffset(float x, float y) {
trueOffset.x = x;
trueOffset.y = y;
}
public Vector2f getLocalTranslation() {
return center;
}
public void setPosition(float x, float y) {
center.x = x;
center.y = y;
}
public void move(float x, float y) {
center.x += x;
center.y += y;
}
}
<|start_filename|>src/main/java/com/pesegato/collision/DebuggableBody.java<|end_filename|>
package com.pesegato.collision;
import com.jme3.asset.AssetManager;
import com.jme3.math.ColorRGBA;
import com.jme3.scene.Geometry;
import com.jme3.scene.Node;
import org.dyn4j.dynamics.Body;
public abstract class DebuggableBody extends Body {
ColorRGBA color = ColorRGBA.Cyan;
public void setColor(ColorRGBA color) {
this.color = color;
}
public abstract Geometry makeHitboxMarker(AssetManager assetManager, Node n, ColorRGBA colH);
}
<|start_filename|>src/main/java/com/pesegato/collision/hitbox/HBCircle.java<|end_filename|>
package com.pesegato.collision.hitbox;
import com.jme3.asset.AssetManager;
import com.jme3.material.Material;
import com.jme3.material.RenderState;
import com.jme3.math.ColorRGBA;
import com.jme3.math.FastMath;
import com.jme3.scene.Geometry;
import com.jme3.scene.Node;
import com.jme3.scene.shape.Quad;
import com.pesegato.collision.DebuggableBody;
import com.pesegato.collision.Dyn4JShapeControl;
import org.dyn4j.collision.Filter;
import org.dyn4j.dynamics.BodyFixture;
import org.dyn4j.geometry.Circle;
import org.dyn4j.geometry.Convex;
import org.dyn4j.geometry.MassType;
import org.dyn4j.geometry.Rectangle;
import static com.pesegato.MonkeySheet.MSGlobals.SHOW_HITBOX;
import static com.pesegato.MonkeySheet.MSGlobals.SPRITE_SIZE;
public class HBCircle extends DebuggableBody {
String name = "";
int radius;
public long id;
public HBCircle(long id, int radius) {
this.id = id;
this.radius = radius;
addFixture(new BodyFixture(new Circle(radius)));
}
public HBCircle(String name, long id, int radius) {
this.name = name;
this.id = id;
this.radius = radius;
addFixture(new BodyFixture(new Circle(radius)));
}
public HBCircle(long id, Filter filter, int radius) {
this.id = id;
this.radius = radius;
BodyFixture bf=new BodyFixture(new Circle(radius));
bf.setFilter(filter);
addFixture(bf);
}
@Deprecated
public Node getNode(AssetManager assetM, ColorRGBA color) {
Node n = new Node();
if (SHOW_HITBOX) {
n.attachChild(makeHitboxMarker(assetM, n, color));
}
return n;
}
public Convex getConvex() {
return new Circle(radius);
}
@Override
public Geometry makeHitboxMarker(AssetManager assetManager, Node n, ColorRGBA color) {
Material mat = new Material(assetManager, "Common/MatDefs/Misc/Unshaded.j3md");
ColorRGBA col = color.clone();
col.a = .5f;
mat.setColor("Color", col);
mat.getAdditionalRenderState().setBlendMode(RenderState.BlendMode.Alpha);
Geometry geo = new Geometry(name, new Prism(radius , 5,12));
geo.setMaterial(mat);
geo.rotate(FastMath.PI/2,0,0);
return geo;
}
}
<|start_filename|>src/main/java/com/pesegato/timing/SimpleTimeable.java<|end_filename|>
package com.pesegato.timing;
public class SimpleTimeable implements Timeable{
@Override
public float getClockspeed() {
return 1;
}
}
<|start_filename|>src/main/java/com/pesegato/MonkeySheet/batch/BGeometry.java<|end_filename|>
package com.pesegato.MonkeySheet.batch;
import com.jme3.math.Vector2f;
import java.nio.FloatBuffer;
import java.nio.IntBuffer;
public class BGeometry {
BNode parent;
public float QUAD_SIZE = 1;
float actualSize;
BTransform transform = new BTransform();
int bufPosition;
Vector2f ll = new Vector2f();
Vector2f lr = new Vector2f();
Vector2f ur = new Vector2f();
Vector2f ul = new Vector2f();
FloatBuffer vertexData, msPosData, alphaData;
IntBuffer idxData;
float[] vertices, msPos, alpha;
public BGeometry(BNode parent, int bufPosition, FloatBuffer vertexData, FloatBuffer texData, IntBuffer idxData, FloatBuffer msPosData, FloatBuffer alphaData) {
this.parent = parent;
this.bufPosition = bufPosition;
this.idxData = idxData;
this.vertexData = vertexData;
this.msPosData = msPosData;
this.alphaData = alphaData;
vertices = new float[12];
msPos = new float[4];
alpha = new float[4];
texData.position(bufPosition * 8);
texData.put(0);
texData.put(0);
texData.put(1);
texData.put(0);
texData.put(0);
texData.put(1);
texData.put(1);
texData.put(1);
idxData.position(6 * bufPosition);
int indexes[] = new int[6];
indexes[0] = 2 + 4 * bufPosition;
indexes[1] = 0 + 4 * bufPosition;
indexes[2] = 1 + 4 * bufPosition;
indexes[3] = 1 + 4 * bufPosition;
indexes[4] = 3 + 4 * bufPosition;
indexes[5] = 2 + 4 * bufPosition;
idxData.put(indexes, 0, 6);
alphaData.position(bufPosition * 4);
alphaData.put(1);
alphaData.put(1);
alphaData.put(1);
alphaData.put(1);
}
public void setSFrame(int newPos) {
msPosData.position(bufPosition * 4);
msPos[0] = newPos;
msPos[1] = newPos;
msPos[2] = newPos;
msPos[3] = newPos;
msPosData.put(msPos, 0, 4);
}
public void setAlpha(float a) {
alphaData.position(bufPosition * 4);
alphaData.put(a);
alphaData.put(a);
alphaData.put(a);
alphaData.put(a);
}
public void setQuadSize(float size) {
QUAD_SIZE = size;
}
public void removeFromParent() {
idxData.position(bufPosition * 6);
idxData.put(0);
idxData.put(0);
idxData.put(0);
idxData.put(0);
idxData.put(0);
idxData.put(0);
parent.slotBusy.add(bufPosition);
}
public BTransform getTransform() {
return transform;
}
public void applyTransform() {
vertexData.position(bufPosition * 12);
actualSize = QUAD_SIZE * transform.scale;
ll.x = (-actualSize / 2);
ll.y = (-actualSize / 2);
lr.x = (actualSize / 2);
lr.y = (-actualSize / 2);
ul.x = (-actualSize / 2);
ul.y = (actualSize / 2);
ur.x = (actualSize / 2);
ur.y = (actualSize / 2);
manage(ll);
manage(lr);
manage(ul);
manage(ur);
/*
ll.subtractLocal(transform.offset);
ll.rotateAroundOrigin(transform.angle, false);
ll.addLocal(transform.offset);
ll.addLocal(transform.center);
lr.subtractLocal(transform.offset);
lr.rotateAroundOrigin(transform.angle, false);
lr.addLocal(transform.offset);
lr.addLocal(transform.center);
ul.subtractLocal(transform.offset);
ul.rotateAroundOrigin(transform.angle, false);
ul.addLocal(transform.offset);
ul.addLocal(transform.center);
ur.subtractLocal(transform.offset);
ur.rotateAroundOrigin(transform.angle, false);
ur.addLocal(transform.offset);
ur.addLocal(transform.center);
*/
vertices[0] = ll.x;
vertices[1] = ll.y;
//vertices[2] = z;
vertices[3] = lr.x;
vertices[4] = lr.y;
//vertices[5] = z;
vertices[6] = ul.x;
vertices[7] = ul.y;
//vertices[8] = z;
vertices[9] = ur.x;
vertices[10] = ur.y;
//vertices[11] = z;
vertexData.put(vertices, 0, 12);
}
private void manage(Vector2f vx) {
vx.subtractLocal(transform.offset);
vx.rotateAroundOrigin(transform.angle, false);
vx.addLocal(transform.offset);
vx.addLocal(transform.center);
vx.addLocal(transform.trueOffset);
}
}
<|start_filename|>src/main/java/com/pesegato/MonkeySheet/extra/PulseControl.java<|end_filename|>
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package com.pesegato.MonkeySheet.extra;
import com.jme3.renderer.RenderManager;
import com.jme3.renderer.ViewPort;
import com.jme3.scene.control.AbstractControl;
import com.pesegato.MonkeySheet.MSMaterialControl;
/**
*
* An example effect for having a sprite 'pulsate' with a color.
*
* @author Pesegato
*/
public class PulseControl extends AbstractControl{
MSMaterialControl ms;
float pulseValue;
final float speed=4;
final float threshold=.5f;
public PulseControl(MSMaterialControl ms){
this.ms=ms;
}
boolean increasing=true;
@Override
protected void controlUpdate(float tpf) {
if (increasing){
pulseValue+=(speed*tpf);
if (pulseValue>threshold)
increasing=false;
}
else{
pulseValue-=(speed*tpf);
if (pulseValue<0)
increasing=true;
}
ms.setFogIntensity(pulseValue);
}
@Override
protected void controlRender(RenderManager rm, ViewPort vp) {
}
}
<|start_filename|>src/main/java/com/pesegato/MonkeySheet/actions/MSAction.java<|end_filename|>
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package com.pesegato.MonkeySheet.actions;
import com.jme3.math.Vector2f;
import com.jme3.math.Vector3f;
import com.jme3.scene.Geometry;
import com.jme3.scene.Spatial;
import com.jme3.scene.shape.Quad;
import com.pesegato.MonkeySheet.MSControl;
import static com.pesegato.MonkeySheet.MSGlobals.SPRITE_SIZE;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
*
* @author Pesegato
*/
public abstract class MSAction {
static Logger log = LoggerFactory.getLogger(MSAction.class);
protected float msTimer;
protected MSControl msc;
protected Spatial spatial;
public boolean hasEnded = false;
public boolean uncalledFinish = true;
public static Geometry createGeometry(String name, float scaleX, float scaleY) {
return new Geometry(name, new Quad(SPRITE_SIZE * scaleX, SPRITE_SIZE * scaleY));
}
/**
* This method moves the spatial by x * SPRITE_SIZE and y * SPRITE_SIZE
*
* @param x movement on the X
* @param y movement on the Y
*/
protected void moveSprite(float x, float y) {
spatial.move(SPRITE_SIZE * x, SPRITE_SIZE * y, 0);
}
/**
* This method moves the spatial toward absolute target position finalX, finalY
* with speed factors x,y.
* For each axis, if speed is positive but target is behind then no movement is performed
* For each axis, if speed is negative but target is in front then no movement is performed
*
* @param x speed on the X
* @param y speed on the Y
* @param finalX target x coordinate
* @param finalY target y coordinate
* @return true if arrived at target position
*/
protected boolean moveFixSprite(float x, float y, float finalX, float finalY) {
finalX=finalX*SPRITE_SIZE;
finalY=finalY*SPRITE_SIZE;
float currentX=spatial.getLocalTranslation().x;
float currentY=spatial.getLocalTranslation().y;
float nextX=SPRITE_SIZE * x + currentX;
float nextY=SPRITE_SIZE * y + currentY;
if (x>0){
nextX=Math.min(nextX, finalX);
}
else {
nextX=Math.max(nextX, finalX);
}
if (y>0){
nextY=Math.min(nextY, finalY);
}
else {
nextY=Math.max(nextY, finalY);
}
spatial.setLocalTranslation(nextX,nextY,0);
return (nextX==finalX)&&(nextY==finalY);
}
/**
* This method moves the spatial toward absolute target position finalX, finalY
* with speed factors x,y.
* For each axis, if speed is positive but target is behind then no movement is performed
* For each axis, if speed is negative but target is in front then no movement is performed
*
* @param x speed on the X
* @param y speed on the Y
* @param finalX target x coordinate
* @param finalY target y coordinate
* @return true if arrived at target position
*/
protected boolean moveFixPixels(float x, float y, float finalX, float finalY) {
float currentX = spatial.getLocalTranslation().x;
float currentY = spatial.getLocalTranslation().y;
float nextX = x + currentX;
float nextY = y + currentY;
if (x > 0) {
nextX = Math.min(nextX, finalX);
} else {
nextX = Math.max(nextX, finalX);
}
if (y > 0) {
nextY = Math.min(nextY, finalY);
} else {
nextY = Math.max(nextY, finalY);
}
spatial.setLocalTranslation(nextX, nextY, 0);
return (nextX == finalX) && (nextY == finalY);
}
/**
* This method moves the spatial toward absolute target position finalX
* with speed factors x.
* If speed is positive but target is behind then no movement is performed
* If speed is negative but target is in front then no movement is performed
*
* @param x speed on the X
* @param finalX target x coordinate
* @return true if arrived at target position
* @deprecated use Sprite or Pixels
*/
protected boolean moveFixX(float x, float finalX){
finalX=finalX*SPRITE_SIZE;
float currentX=spatial.getLocalTranslation().x;
float currentY=spatial.getLocalTranslation().y;
float nextX=SPRITE_SIZE * x + currentX;
if (x>0){
nextX=Math.min(nextX, finalX);
}
else {
nextX=Math.max(nextX, finalX);
}
spatial.setLocalTranslation(nextX,currentY,0);
return nextX==finalX;
}
/**
* This method moves the spatial toward absolute target position finalX
* with speed factors x.
* If speed is positive but target is behind then no movement is performed
* If speed is negative but target is in front then no movement is performed
*
* @param x speed on the X
* @param finalX target x coordinate
* @return true if arrived at target position
*/
protected boolean moveFixXSprite(float x, float finalX) {
finalX = finalX * SPRITE_SIZE;
float currentX = spatial.getLocalTranslation().x;
float currentY = spatial.getLocalTranslation().y;
float nextX = SPRITE_SIZE * x + currentX;
if (x > 0) {
nextX = Math.min(nextX, finalX);
} else {
nextX = Math.max(nextX, finalX);
}
spatial.setLocalTranslation(nextX, currentY, 0);
return nextX == finalX;
}
/**
* This method moves the spatial toward absolute target position finalX
* with speed factors x.
* If speed is positive but target is behind then no movement is performed
* If speed is negative but target is in front then no movement is performed
*
* @param x speed on the X
* @param finalX target x coordinate
* @return true if arrived at target position
*/
protected boolean moveFixXPixels(float x, int finalX) {
int currentX = (int) spatial.getLocalTranslation().x;
int currentY = (int) spatial.getLocalTranslation().y;
float nextX = x + currentX;
if (x > 0) {
nextX = Math.min(nextX, finalX);
} else {
nextX = Math.max(nextX, finalX);
}
spatial.setLocalTranslation(nextX, currentY, 0);
return nextX == finalX;
}
/**
* This method moves the spatial toward absolute target position finalY
* with speed factors y.
* If speed is positive but target is behind then no movement is performed
* If speed is negative but target is in front then no movement is performed
*
* @param y speed on the Y
* @param finalY target y coordinate
* @return true if arrived at target position
* @deprecated use moveFixYPixels or moveFixYSprite instead
*/
protected boolean moveFixY(float y, float finalY){
finalY=finalY*SPRITE_SIZE;
float currentX=spatial.getLocalTranslation().x;
float currentY=spatial.getLocalTranslation().y;
float nextY=SPRITE_SIZE * y + currentY;
if (y>0){
nextY=Math.min(nextY, finalY);
}
else {
nextY=Math.max(nextY, finalY);
}
spatial.setLocalTranslation(currentX,nextY,0);
return nextY==finalY;
}
/**
* This method moves the spatial toward absolute target position finalY
* with speed factors y (multiplied by SPRITE_SIZE)
* If speed is positive but target is behind then no movement is performed
* If speed is negative but target is in front then no movement is performed
*
* @param y speed on the Y
* @param finalY target y coordinate
* @return true if arrived at target position
*/
protected boolean moveFixYSprite(float y, float finalY) {
finalY = finalY * SPRITE_SIZE;
float currentX = spatial.getLocalTranslation().x;
float currentY = spatial.getLocalTranslation().y;
float nextY = SPRITE_SIZE * y + currentY;
if (y > 0) {
nextY = Math.min(nextY, finalY);
} else {
nextY = Math.max(nextY, finalY);
}
spatial.setLocalTranslation(currentX, nextY, 0);
return nextY == finalY;
}
/**
* This method moves the spatial toward absolute target position finalY
* with speed factors y (specified in Pixel units).
* If speed is positive but target is behind then no movement is performed
* If speed is negative but target is in front then no movement is performed
*
* @param y speed on the Y
* @param finalY target y coordinate
* @return true if arrived at target position
*/
protected boolean moveFixYPixels(float y, int finalY) {
int currentX = (int) spatial.getLocalTranslation().x;
int currentY = (int) spatial.getLocalTranslation().y;
float nextY = y + currentY;
if (y > 0) {
nextY = Math.min(nextY, finalY);
} else {
nextY = Math.max(nextY, finalY);
}
spatial.setLocalTranslation(currentX, nextY, 0);
return nextY == finalY;
}
/**
* This method tests if the spatial has reached the finalX position
* @param finalX x coordinate
* @return true if current position matches finalX
*/
protected boolean hasMovedFixXSprite(float finalX){
return (spatial.getLocalTranslation().x==finalX*SPRITE_SIZE);
}
/**
* This method tests if the spatial has reached the finalX position
*
* @param finalX x coordinate
* @return true if current position matches finalX
*/
protected boolean hasMovedFixXPixels(float finalX) {
return (spatial.getLocalTranslation().x == finalX);
}
/**
* This method tests if the spatial has reached the finalX position
*
* @param finalX x coordinate
* @return true if current position matches finalX
* @deprecated use either Sprite or Pixels
*/
protected boolean hasMovedFixX(float finalX) {
return (spatial.getLocalTranslation().x == finalX * SPRITE_SIZE);
}
/**
* This method tests if the spatial has reached the finalY position (multiplied by SPRITE_SIZE)
* @param finalY x coordinate
* @return true if current position matches finalY
*/
protected boolean hasMovedFixYSprite(float finalY) {
return (spatial.getLocalTranslation().y==finalY*SPRITE_SIZE);
}
/**
* This method tests if the spatial has reached the finalY position (specified in Pixel units)
*
* @param finalY x coordinate
* @return true if current position matches finalY
*/
protected boolean hasMovedFixYPixels(float finalY) {
return (spatial.getLocalTranslation().y == finalY);
}
/**
* This method tests if the spatial has reached the finalY position
*
* @param finalY x coordinate
* @return true if current position matches finalY
* @deprecated use hasMovedFixYSprite or hasMovedFixYPixels instead
*/
protected boolean hasMovedFixY(float finalY) {
return (spatial.getLocalTranslation().y == finalY * SPRITE_SIZE);
}
protected Vector2f getUVector(Vector3f v, int x2, int y2) {
return new Vector2f(x2 - v.x, y2 - v.y).normalizeLocal();
}
protected void init(Spatial spatial) {
msTimer = 0;
hasEnded = false;
uncalledFinish = true;
this.spatial = spatial;
if (msc != null) {
msc.msAction = this;
}
whatPlay(msc);
init();
}
protected void controlUpdate(float tpf) {
msUpdate(tpf);
this.msTimer += tpf;
}
public void whatPlay(MSControl msc) {
}
abstract protected void msUpdate(float tpf);
protected boolean hasEnded() {
return hasEnded;
}
final public void terminatedAnim() {
hasEnded = true;
maybeEnd();
}
final public boolean maybeEnd() {
boolean ended=hasEnded();
log.trace("{} maybe end {} {}",msc, this, ended);
if (ended){
if (uncalledFinish) {
finish();
uncalledFinish = false;
}
}
return ended;
}
/**
* Called on start
*/
public void init() {
}
/**
* Called when trying to interrupt the current action
* @return true if can be interrupted
*/
public MSTransitionAction onInterruptAttempt(){
return null;
}
/**
* Called when switching to another MSAction before this is finished normally
*/
public void interrupted(){
}
/**
* Called when finishes normally
*/
public void finish() {
}
}
<|start_filename|>src/main/java/com/pesegato/collision/Dyn4jMEAppState.java<|end_filename|>
package com.pesegato.collision;
import com.jme3.app.Application;
import com.jme3.app.state.BaseAppState;
@Deprecated
public class Dyn4jMEAppState extends BaseAppState {
D4JSpace spaces[];
public D4JSpace getPhysicsSpace(int index) {
return spaces[index];
}
@Override
protected void initialize(Application app) {
spaces = new D4JSpace[]{new D4JSpace(), new D4JSpace(), new D4JSpace()};
spaces[0].setName("Plane 0");
spaces[1].setName("Plane 1");
spaces[2].setName("Plane 2");
}
@Override
public void update(float tpf) {
updatePhysics(tpf);
updateDraw(tpf);
}
public void updateDraw(float tpf) {
for (D4JSpace space : spaces)
space.updateDraw(tpf);
}
public void updatePhysics(float tpf) {
for (D4JSpace space : spaces)
space.updatePhysics(tpf);
}
@Override
protected void cleanup(Application app) {
}
@Override
protected void onEnable() {
}
@Override
protected void onDisable() {
}
}
<|start_filename|>src/main/java/com/pesegato/MonkeySheet/MSSpriteControl.java<|end_filename|>
package com.pesegato.MonkeySheet;
import com.jme3.renderer.RenderManager;
import com.jme3.renderer.ViewPort;
import com.jme3.scene.control.AbstractControl;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* The most basic sprite
*/
public class MSSpriteControl extends AbstractControl {
public static Logger log = LoggerFactory.getLogger(MSControl.class);
public MTween anim;
public String animation;
public int position;
MSSpriteControl() {
}
public MSSpriteControl(String anim) {
this.anim = MonkeySheetAppState.getAnim(anim);
this.position = 0;
}
public MSSpriteControl(MTween anim, int position) {
this.anim = anim;
this.position = position;
}
@Override
protected void controlUpdate(float tpf) {
}
@Override
protected void controlRender(RenderManager rm, ViewPort vp) {
}
}
<|start_filename|>src/main/resources/MonkeySheet/MatDefs/Anim.frag<|end_filename|>
uniform sampler2D m_ColorMap;
uniform float g_Time;
uniform float m_HitTime;
uniform float m_AlphaValue;
uniform vec4 m_GlowColor;
uniform vec4 m_FogColor;
uniform float m_FogIntensity;
vec4 color;
varying vec2 texCoord;
void main(){
color = texture2D(m_ColorMap, texCoord);
/* color.a = color.r * 0.7f;
vec4 overlay = vec4(0.5,0.8,1.0,1.0);
//gl_FragColor = max((1.0 - ((1.0 - color) / overlay)), 0.0);
gl_FragColor = color * overlay;//
*/
vec4 overlay = vec4(1.0,1.0,1.0,1.0);
gl_FragColor = mix(color, overlay, m_HitTime);//
gl_FragColor = mix(m_FogColor, color, m_FogIntensity);
//gl_FragColor = color;
//gl_FragColor.r = color.r*m_Pulse;
//gl_FragColor.g = m_Pulse;
//gl_FragColor.g = m_Pulse;
//gl_FragColor = m_GlowColor;
gl_FragColor.a = color.a*m_AlphaValue;
}
<|start_filename|>src/main/java/com/pesegato/timing/Timeable.java<|end_filename|>
package com.pesegato.timing;
public interface Timeable {
/**
*
* Implement this interface to have configurable timing.
*
* Time factor:
* 0: time stops
* 1: time runs normally
* <1: time is faster
* >1: time is slower
*
* @return time factor
*/
float getClockspeed();
}
<|start_filename|>src/main/java/com/pesegato/MonkeySheet/MonkeySheetAppState.java<|end_filename|>
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package com.pesegato.MonkeySheet;
import com.google.common.base.Charsets;
import com.google.common.io.Resources;
import com.google.gson.Gson;
import com.jme3.app.Application;
import com.jme3.app.state.BaseAppState;
import com.pesegato.goldmonkey.Animation;
import com.pesegato.goldmonkey.Container;
import com.pesegato.goldmonkey.GM;
import com.pesegato.timing.SimpleTimeable;
import com.pesegato.timing.Timeable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.FileNotFoundException;
import java.util.HashMap;
/**
*
* @author Pesegato
*/
public class MonkeySheetAppState extends BaseAppState {
static Logger log = LoggerFactory.getLogger( MonkeySheetAppState.class );
private static float tickDuration=0.025f;
public static float tTPF = 0;
public static Timeable timeable=new SimpleTimeable();
@Override
protected void initialize(Application app) {
getStateManager().attach(new MSTimerAppState());
logBuildInfo();
}
static HashMap<String, MTween> anis = new HashMap<>();
public static MTween getAnim(String name){
return anis.get(name);
}
public static int getCenterX(String name){
return anis.get(name).centerX;
}
public static int getCenterY(String name){
return anis.get(name).centerY;
}
public void addAnim(MSContainer msCont, String name, int ani[], int hitbox[], int centerX, int centerY) {
anis.put(name,new MTween(msCont, name, ani, hitbox, msCont.numTiles, centerX, centerY));
}
public MSContainer initializeContainer(String name) {
Container c = MonkeySheetAppState.getContainer(name);
MSContainer container = new MSContainer(c);
for (Animation anim : animationC.get(c)) {
loadAnim(container, anim.id);
}
return container;
}
static HashMap<String, Container> containers;
static HashMap<Container, Animation[]> animationC;
static Container getContainer(String id) {
if (containers == null) {
try {
containers = new HashMap<>();
animations = new HashMap<>();
animationC = new HashMap<>();
Container[] data = new Gson().fromJson(GM.getJSON("MonkeySheet/Containers"), Container[].class);
for (Container obj : data) {
containers.put(obj.id, obj);
Animation[] aData = new Gson().fromJson(GM.getJSON("MonkeySheet/Animations/" + obj.id), Animation[].class);
for (Animation aniObj : aData) {
animations.put(aniObj.id, aniObj);
}
animationC.put(obj, aData);
}
} catch (FileNotFoundException ex) {
log.error(null, ex);
}
}
return containers.get(id);
}
static HashMap<String, Animation> animations;
public void loadAnim(MSContainer container, String anim){
addAnim(container,anim,animations.get(anim).frames,null,animations.get(anim).centerX,animations.get(anim).centerY);
}
@Override
public void update(float tpf){
tTPF += tpf*timeable.getClockspeed();
if (tTPF > tickDuration) {
tTPF = 0;
}
}
public static void setTickDuration(float tickDuration){
MonkeySheetAppState.tickDuration=tickDuration;
}
@Override
protected void cleanup(Application app) {
}
@Override
protected void onEnable() {
}
@Override
protected void onDisable() {
}
private static final String LIBNAME="MonkeySheet";
protected void logBuildInfo() {
try {
java.net.URL u = Resources.getResource(LIBNAME+".build.date");
String build = Resources.toString(u, Charsets.UTF_8);
log.info("MonkeySheet build date: " + build);
log.info("MonkeySheet build version: " + Resources.toString(Resources.getResource(LIBNAME+".build.version"), Charsets.UTF_8));
} catch( java.io.IOException e ) {
log.error( "Error reading build info", e );
}
}
}
<|start_filename|>src/main/java/com/pesegato/MonkeySheet/MSGlobals.java<|end_filename|>
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package com.pesegato.MonkeySheet;
import com.google.common.base.Charsets;
import com.google.common.io.Resources;
import com.jme3.asset.AssetManager;
import com.jme3.material.Material;
import com.jme3.material.RenderState;
import com.jme3.scene.Geometry;
import com.jme3.scene.Spatial;
import com.jme3.scene.shape.Quad;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author Pesegato
*/
public class MSGlobals {
static Logger log = LoggerFactory.getLogger(MSGlobals.class);
public enum COMPRESSION_TYPE {
NONE,
BC7,
DXT5,
ASTC,
ETC2
}
public static final int SPRITE_SIZE = 256;
public static final int MS_WIDTH_480P = 720;
public static final int MS_HEIGHT_480P = 480;
public static final int MS_WIDTH_576P = 720;
public static final int MS_HEIGHT_576P = 576;
public static final int MS_WIDTH_720P = 1280;
public static final int MS_HEIGHT_720P = 720;
public static final int MS_WIDTH_1080P = 1920;
public static final int MS_HEIGHT_1080P = 1080;
public static int MS_WIDTH;
public static int MS_HEIGHT;
public static boolean SHOW_HITBOX = true;
private static COMPRESSION_TYPE USE_COMPRESSION = COMPRESSION_TYPE.NONE;
public static void setCompressedTexturesEnabled(COMPRESSION_TYPE compression) {
USE_COMPRESSION = compression;
}
public static COMPRESSION_TYPE getCompression() {
return USE_COMPRESSION;
}
public static String getExtension() {
switch (USE_COMPRESSION) {
case NONE:
return ".png";
case BC7:
case DXT5:
return ".dds";
case ASTC:
return ".astc";
case ETC2:
return ".etc";
}
return null;
}
public static String getComment(COMPRESSION_TYPE type) {
switch (type) {
case NONE:
return "Memory hungry";//&slow
case BC7:
return "Needs DirectX11";
case DXT5:
return "Looks bad";
case ASTC:
return "Unavailable";
case ETC2:
return "Unavailable";//Raspberry Pi?
}
return null;
}
public static void setResolution(String res) {
switch (res) {
case "480p":
MS_WIDTH = MS_WIDTH_480P;
MS_HEIGHT = MS_HEIGHT_480P;
break;
case "576p":
MS_WIDTH = MS_WIDTH_576P;
MS_HEIGHT = MS_HEIGHT_576P;
break;
case "720p":
MS_WIDTH = MS_WIDTH_720P;
MS_HEIGHT = MS_HEIGHT_720P;
break;
case "1080p":
MS_WIDTH = MS_WIDTH_1080P;
MS_HEIGHT = MS_HEIGHT_1080P;
break;
default:
log.error("Resolution unsupported: {}", res);
System.exit(1);
}
log.info("Resolution set to: {}", res);
}
public static Geometry makeDefaultQuad(int sizeX, int sizeY) {
return new Geometry("MSQuad", new Quad(sizeX, sizeY));
}
public static Material createDefaultMaterialWithAlpha(AssetManager assetManager, Geometry geo, String texture) {
Material material = createDefaultMaterialWithAlpha(assetManager, geo);
material.setTexture("ColorMap", assetManager.loadTexture(texture));
return material;
}
public static Material createDefaultMaterialWithAlpha(AssetManager assetManager, Geometry geo) {
Material material = new Material(assetManager, "Common/MatDefs/Misc/Unshaded.j3md");
material.getAdditionalRenderState().setBlendMode(RenderState.BlendMode.Alpha);
geo.setMaterial(material);
return material;
}
public static Material createDefaultMaterialNoAlpha(AssetManager assetManager, Geometry geo, String texture) {
Material material = createDefaultMaterialNoAlpha(assetManager, geo);
material.setTexture("ColorMap", assetManager.loadTexture(texture));
return material;
}
public static Material createDefaultMaterialNoAlpha(AssetManager assetManager, Geometry geo) {
Material material = new Material(assetManager, "Common/MatDefs/Misc/Unshaded.j3md");
geo.setMaterial(material);
return material;
}
public static MSMaterialControl makeSprite(AssetManager assetManager, Geometry geo, MSContainer container, String sprite) {
return new MSMaterialControl(assetManager, geo, container).setSprite(sprite);
}
/**
* This method moves the spatial toward absolute target position finalX, finalY
* with speed factors x,y.
* For each axis, if speed is positive but target is behind then no movement is performed
* For each axis, if speed is negative but target is in front then no movement is performed
*
* @param spatial the Spatial to be moved
* @param x speed on the X
* @param y speed on the Y
* @param finalX target x coordinate
* @param finalY target y coordinate
* @return true if arrived at target position
*/
public static boolean simpleMoveFixPixels(Spatial spatial, float x, float y, float finalX, float finalY) {
float currentX = spatial.getLocalTranslation().x;
float currentY = spatial.getLocalTranslation().y;
float nextX = x + currentX;
float nextY = y + currentY;
if (x > 0) {
nextX = Math.min(nextX, finalX);
} else {
nextX = Math.max(nextX, finalX);
}
if (y > 0) {
nextY = Math.min(nextY, finalY);
} else {
nextY = Math.max(nextY, finalY);
}
spatial.setLocalTranslation(nextX, nextY, 0);
return (nextX == finalX) && (nextY == finalY);
}
/**
* This method scales the spatial toward target scale
* with speed factors x,y.
* For each axis, if speed is positive but target is smaller then no scale is performed
* For each axis, if speed is negative but target is bigger then no scale is performed
*
* @param spatial the Spatial to be moved
* @param x speed on the X
* @param y speed on the Y
* @param finalX target x scale
* @param finalY target y scale
* @return true if arrived at target scale
*/
public static boolean simpleScaleFixPixels(Spatial spatial, float x, float y, float finalX, float finalY) {
float currentX = spatial.getLocalScale().getX();
float currentY = spatial.getLocalScale().getY();
float nextX = x + currentX;
float nextY = y + currentY;
if (x > 0) {
nextX = Math.min(nextX, finalX);
} else {
nextX = Math.max(nextX, finalX);
}
if (y > 0) {
nextY = Math.min(nextY, finalY);
} else {
nextY = Math.max(nextY, finalY);
}
spatial.setLocalScale(nextX, nextY, 0);
return (nextX == finalX) && (nextY == finalY);
}
protected void logBuildInfo() {
try {
java.net.URL u = Resources.getResource("monkeysheet.build.date");
String build = Resources.toString(u, Charsets.UTF_8);
log.info("MonkeySheett build date:" + build);
} catch (java.io.IOException e) {
log.error("Error reading build info", e);
}
}
}
<|start_filename|>src/main/java/com/pesegato/MonkeySheet/MSMaterialPlugin.java<|end_filename|>
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package com.pesegato.MonkeySheet;
import com.jme3.renderer.RenderManager;
import com.jme3.renderer.ViewPort;
import com.jme3.scene.control.AbstractControl;
/**
*
* @author Pesegato
*/
public abstract class MSMaterialPlugin extends AbstractControl{
public MSMaterialControl msmc;
public MSMaterialPlugin(MSMaterialControl msmc){
this.msmc=msmc;
}
@Override
protected void controlRender(RenderManager rm, ViewPort vp) {
}
}
<|start_filename|>src/main/java/com/pesegato/MonkeySheet/batch/BGeometryControl.java<|end_filename|>
package com.pesegato.MonkeySheet.batch;
import com.jme3.renderer.RenderManager;
import com.jme3.renderer.ViewPort;
import com.jme3.scene.control.AbstractControl;
public abstract class BGeometryControl extends AbstractControl {
protected BGeometry bgeo;
boolean mustInit = true;
protected BGeometryControl(BGeometry bgeo) {
this.bgeo = bgeo;
}
protected BGeometryControl(BNodeControl bnc, float posX, float posY) {
this.bgeo = bnc.getReusableQuad(posX, posY);
}
abstract protected void binit();
abstract protected void bupdate(float tpf);
@Override
protected void controlUpdate(float tpf) {
if (mustInit) {
binit();
mustInit = false;
}
bupdate(tpf);
bgeo.applyTransform();
//duration -= tpf;
//if (duration < 0) {
//setEnabled(false);
//}
}
@Override
public void setEnabled(boolean enabled) {
this.enabled = enabled;
if (!enabled) {
bgeo.removeFromParent();
spatial.removeControl(this);
}
}
@Override
protected void controlRender(RenderManager rm, ViewPort vp) {
}
}
<|start_filename|>src/main/java/com/pesegato/collision/Dyn4JShapeControl.java<|end_filename|>
package com.pesegato.collision;
import com.jme3.math.Quaternion;
import com.jme3.math.Vector3f;
import com.jme3.renderer.RenderManager;
import com.jme3.renderer.ViewPort;
import com.jme3.scene.Spatial;
import com.pesegato.collision.hitbox.HBRect;
import org.dyn4j.collision.CategoryFilter;
import org.dyn4j.collision.broadphase.BroadphaseDetector;
import org.dyn4j.dynamics.Body;
import org.dyn4j.dynamics.BodyFixture;
import org.dyn4j.geometry.Convex;
import org.dyn4j.geometry.MassType;
import org.dyn4j.geometry.Transform;
import org.dyn4j.geometry.Vector2;
@Deprecated
public class Dyn4JShapeControl extends IDyn4JControl {
protected Body body;
BodyFixture fixture;
//private World world;
BroadphaseDetector broadphase;
HBRect hbRect;
public Dyn4JShapeControl(Convex shape,
MassType massType,
HBRect hbRect
) {
this.hbRect=hbRect;
body = new Body();
fixture = new BodyFixture(shape);
body.addFixture(fixture);
body.setMass(massType);
body.setAtRestDetectionEnabled(false);
fixture.setUserData(hbRect.id);
}
public void setFilter(CategoryFilter cf){
fixture.setFilter(cf);
}
public Dyn4JShapeControl(Body body,
MassType massType,
long id
) {
this.body = body;
for (BodyFixture bf:body.getFixtures())
bf.setUserData(id);
body.setMass(massType);
body.setAtRestDetectionEnabled(true);
}
Dyn4JShapeControl(Convex shape,
MassType massType,
Double weight, //in kg/m
Double friction, // low = more slippery
Double restitution// more = more bouncy
) {
body = new Body();
fixture = new BodyFixture(shape);
fixture.setFriction(friction);
fixture.setRestitution(restitution);
fixture.setDensity(weight);
body.addFixture(fixture);
body.setMass(massType);
body.setAtRestDetectionEnabled(true);
}
public void addToWorld(BroadphaseDetector broadphase) {
this.broadphase = broadphase;
broadphase.add(body);
}
public void removeFromWorld() {
this.broadphase.remove(body);
}
/*
@Override
void addToWorld(World world) {
this.world = world;
world.addBody(body);
}
@Override
public void removeFromWorld() {
if (world==null)
return;
boolean removed=this.world.removeBody(body);
System.out.println("removed "+removed);
BroadphaseDetector bp = world.getBroadphaseDetector();
boolean stillThere = bp.contains(body);
for (BodyFixture fixture : body.getFixtures()) {
stillThere |= bp.contains(body, fixture);
}
if (stillThere) {
// I would need to see more code around the way the body is being removed
System.out.println("still there");
}
this.world=null;
}
*/
// more = more bouncy
void setRestitution(Double restitution) {
fixture.setRestitution(restitution);
}
// more = in kg/m
void setDensity(Double kg) {
fixture.setDensity(kg);
}
// low = more slippery
void setFriction(Double friction) {
fixture.setFriction(friction);
}
@Override
public void setSpatial(Spatial spatial) {
this.spatial = spatial;
body.translate(new Double(spatial.getLocalTranslation().x), new Double(spatial.getLocalTranslation().y));
//TODO: set initial rotation of the dyn4j-Body
}
@Override
protected void controlUpdate(float tpf) {
//Dyn4JAppState handles everything
}
@Override
protected void controlRender(RenderManager rm, ViewPort vp) {
}
public Body getBody(){
return body;
}
private Double lastAngle=-1d;
private Transform lastTransform = new Transform();
private final static Float negligibleAngleRotation = 0.001f;
void updatePhysics(BroadphaseDetector bp, float tpf){
if (bp.contains(body))
bp.update(body);
}
void updateDraw(float tpf) {
Vector2 vector2 = body.getTransform().getTranslation();
this.spatial.setLocalTranslation(
new Float(vector2.x),
new Float(vector2.y), 0f);
Transform transform = body.getTransform();
if (transform.getTranslation().x == lastTransform.getTranslation().x &&
transform.getTranslation().y == lastTransform.getTranslation().y) {
this.spatial.setLocalTranslation(
new Vector3f(
new Float(transform.getTranslation().x),
new Float(transform.getTranslation().y),
0f));
lastTransform=transform;
}
double angle = body.getTransform().getRotationAngle();
if (angle != lastAngle) {
Quaternion roll = new Quaternion();
roll.fromAngleAxis( new Float(angle) , Vector3f.UNIT_Z);
this.spatial.setLocalRotation(roll);
lastAngle = angle;
}
}
}
<|start_filename|>src/main/java/com/pesegato/MonkeySheet/actions/MSSingleActionMachine.java<|end_filename|>
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package com.pesegato.MonkeySheet.actions;
import com.jme3.renderer.RenderManager;
import com.jme3.renderer.ViewPort;
import com.jme3.scene.control.AbstractControl;
/**
*
* @author Pesegato
*/
public class MSSingleActionMachine extends AbstractControl {
MSAction action;
boolean inited=false;
public MSSingleActionMachine(MSAction action) {
this.action = action;
}
@Override
protected void controlUpdate(float tpf) {
if (!inited){
action.init(spatial);
inited=true;
}
action.controlUpdate(tpf);
if (action.hasEnded())
setEnabled(false);
}
@Override
protected void controlRender(RenderManager rm, ViewPort vp) {
}
}
<|start_filename|>src/main/resources/MonkeySheet/MatDefs/Particle2d.vert<|end_filename|>
uniform mat4 g_WorldViewProjectionMatrix;
attribute vec3 inPosition;
attribute vec4 inColor;
attribute vec4 inTexCoord;
varying vec4 color;
#ifdef USE_TEXTURE
varying vec4 texCoord;
#endif
#ifdef POINT_SPRITE
uniform mat4 g_WorldViewMatrix;
uniform mat4 g_WorldMatrix;
uniform vec3 g_CameraPosition;
uniform float m_Quadratic;
const float SIZE_MULTIPLIER = 4.0;
attribute float inSize;
#endif
void main(){
vec4 pos = vec4(inPosition, 1.0);
gl_Position = g_WorldViewProjectionMatrix * pos;
color = inColor;
#ifdef USE_TEXTURE
texCoord = inTexCoord;
#endif
#ifdef POINT_SPRITE
gl_PointSize = max(1.0, inSize);
color.a *= min(gl_PointSize, 1.0);
#endif
}
<|start_filename|>src/main/java/com/pesegato/collision/D4JSpace.java<|end_filename|>
package com.pesegato.collision;
import com.jme3.scene.Spatial;
import com.jme3.scene.control.AbstractControl;
import org.dyn4j.collision.CollisionItem;
import org.dyn4j.collision.CollisionPair;
import org.dyn4j.collision.broadphase.*;
import org.dyn4j.collision.manifold.ClippingManifoldSolver;
import org.dyn4j.collision.manifold.ManifoldSolver;
import org.dyn4j.collision.narrowphase.Gjk;
import org.dyn4j.collision.narrowphase.NarrowphaseDetector;
import org.dyn4j.collision.narrowphase.Penetration;
import org.dyn4j.dynamics.Body;
import org.dyn4j.dynamics.BodyFixture;
import org.dyn4j.geometry.Convex;
import org.dyn4j.geometry.Transform;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
@Deprecated
public class D4JSpace {
//private World world;
private Set<Spatial> spatials = new HashSet<Spatial>();
int initialBodyCapacity=1024;
final BroadphaseFilter<CollisionItem<Body, BodyFixture>> broadphaseFilter = new CollisionItemBroadphaseFilter<Body, BodyFixture>();
final AABBProducer<CollisionItem<Body, BodyFixture>> aabbProducer = new CollisionItemAABBProducer<Body, BodyFixture>();
final AABBExpansionMethod<CollisionItem<Body, BodyFixture>> expansionMethod = new StaticValueAABBExpansionMethod<CollisionItem<Body, BodyFixture>>(0.2);
final BroadphaseDetector<CollisionItem<Body, BodyFixture>> broadphase = new DynamicAABBTree<CollisionItem<Body, BodyFixture>>(
broadphaseFilter,
aabbProducer,
expansionMethod,
initialBodyCapacity);
final CollisionItemBroadphaseDetector<Body, BodyFixture> broadphaseDetector = new CollisionItemBroadphaseDetectorAdapter<Body, BodyFixture>(broadphase);
NarrowphaseDetector np;
ManifoldSolver ms;
String name="unnamed space";
public void setName(String name){
this.name=name;
}
public D4JSpace(){
// world=new World();
// collision detection process:
// Broadphase -> Narrowphase -> Manifold generation
// create detection chain
np = new Gjk();
//NarrowphasePostProcessor npp = LinkPostProcessor(); // Only required if you use the Link shape
ms = new ClippingManifoldSolver();
broadphaseDetector.setUpdateTrackingEnabled(true);
}
public void add(Spatial spatial) {
if (spatial.getControl(IDyn4JControl.class) == null) throw new IllegalArgumentException("Cannot handle a node which isnt a ${Dyn4JShapeControl.getClass().getSimpleName()}");
synchronized(spatials) {
spatials.add(spatial);
IDyn4JControl ctl = spatial.getControl(IDyn4JControl.class);
ctl.addToWorld(broadphaseDetector);
}
}
public void remove(Spatial spatial) {
if (spatial == null || spatial.getControl(IDyn4JControl.class) == null) return;
synchronized (spatials) {
spatials.remove(spatial);
}
IDyn4JControl ctl = spatial.getControl(IDyn4JControl.class);
for (int i = spatial.getNumControls() - 1; i > -1; i--) {
//getNumControls changes for each cycle, because BGeometryBodyControl removes itself when disabled
((AbstractControl) spatial.getControl(i)).setEnabled(false);
}
ctl.removeFromWorld();
}
public void updateDraw(float tpf) {
synchronized(spatials) {
for (Spatial spatial: spatials){
IDyn4JControl ctl = spatial.getControl(IDyn4JControl.class);
if (ctl == null) { spatials.remove(spatial); return; } //evict nodes which have their Dyn4JShapeControl removed
ctl.updateDraw(tpf);
}
}
}
float tTPF=0;
public void updatePhysics(float tpf) {
//world.update(tpf, Integer.MAX_VALUE);
tTPF+=tpf;
if (tTPF>1/60f) {
synchronized(spatials) {
//System.out.println("*** numero di oggetti nel D4JSpace "+spatials.size());
for (Spatial spatial: spatials){
IDyn4JControl ctl = spatial.getControl(IDyn4JControl.class);
if (ctl == null) { spatials.remove(spatial); return; } //evict nodes which have their Dyn4JShapeControl removed
ctl.updatePhysics(broadphaseDetector, tpf);
}
}
tTPF=0;
//System.out.println("Collisions for "+name);
// when ready to detect
List<CollisionPair<CollisionItem<Body, BodyFixture>>> pairs = broadphaseDetector.detect();
for (CollisionPair<CollisionItem<Body, BodyFixture>> pair : pairs) {
// handle the pairs by using pair.getFirst().getBody() / pair.getSecond().getFixture() / etc.
CollisionItem<Body, BodyFixture> first = pair.getFirst();
CollisionItem<Body, BodyFixture> second = pair.getSecond();
BodyFixture fixture1 = first.getFixture();
BodyFixture fixture2 = second.getFixture();
Transform transform1 = first.getBody().getTransform();
Transform transform2 = second.getBody().getTransform();
Convex convex2 = fixture2.getShape();
Convex convex1 = fixture1.getShape();
Penetration p = new Penetration();
if (np.detect(convex1, transform1, convex2, transform2, p)) {
//System.out.println("Collision " + fixture1.getUserData() + " " + fixture2.getUserData());
for (CollisionListener listener:listeners){
listener.listen((Long)fixture1.getUserData(), (Long)fixture2.getUserData());
}
}
}
}
}
public boolean checkCollisionNP(Body a, Body b) {
for (BodyFixture bf1 : a.getFixtures()) {
for (BodyFixture bf2 : b.getFixtures()) {
if (np.detect(bf1.getShape(), a.getTransform(), bf2.getShape(), b.getTransform())) {
return true;
}
}
}
return false;
}
/*
Alternative solution, but this require the use of World class
public boolean checkCollisionAll(Body a, Body b){
return a.isInContact(b);
}
*/
/*
Another alternative
*/
public boolean checkCollisionAll(Body a, Body b){
List<CollisionPair<CollisionItem<Body, BodyFixture>>> pairs = broadphaseDetector.detect();
for (CollisionPair<CollisionItem<Body, BodyFixture>> pair : pairs) {
if ((pair.getFirst().getBody()==a)&&(pair.getSecond().getBody()==b)||
(pair.getFirst().getBody()==b)&&(pair.getSecond().getBody()==a)) {
CollisionItem<Body, BodyFixture> first = pair.getFirst();
CollisionItem<Body, BodyFixture> second = pair.getSecond();
BodyFixture fixture1 = first.getFixture();
BodyFixture fixture2 = second.getFixture();
Transform transform1 = first.getBody().getTransform();
Transform transform2 = second.getBody().getTransform();
Convex convex2 = fixture2.getShape();
Convex convex1 = fixture1.getShape();
Penetration p = new Penetration();
if (np.detect(convex1, transform1, convex2, transform2, p)) {
return true;
}
}
}
return false;
}
ArrayList<CollisionListener> listeners=new ArrayList<>();
public void addListener(CollisionListener cl){
listeners.add(cl);
}
}
<|start_filename|>src/main/java/com/pesegato/MonkeySheet/MSAnimationManager.java<|end_filename|>
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package com.pesegato.MonkeySheet;
/**
*
* @author Pesegato
*/
public interface MSAnimationManager {
public void whatPlay(MSControl msc);
}
<|start_filename|>src/main/java/com/pesegato/collision/CollisionListener.java<|end_filename|>
package com.pesegato.collision;
public interface CollisionListener {
void listen(long collider, long collided);
}
<|start_filename|>src/main/java/com/pesegato/MonkeySheet/MSTimerAppState.java<|end_filename|>
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package com.pesegato.MonkeySheet;
import com.jme3.app.Application;
import com.jme3.app.state.BaseAppState;
/**
*
* @author Pesegato
*/
public class MSTimerAppState extends BaseAppState{
@Override
protected void initialize(Application app) {
}
@Override
protected void cleanup(Application app) {
}
@Override
protected void onEnable() {
}
@Override
protected void onDisable() {
}
}
<|start_filename|>src/main/resources/MonkeySheet/MatDefs/MyGlow.frag<|end_filename|>
uniform sampler2D m_ColorMap;
#if defined(NEED_TEXCOORD1)
varying vec2 texCoord1;
#else
varying vec2 texCoord;
#endif
uniform vec4 m_GlowColor;
void main(){
#ifdef HAS_GLOWCOLOR
vec4 color = texture2D(m_ColorMap, texCoord1);
gl_FragColor = mix(color, m_GlowColor, 0.5);
gl_FragColor.a = color.a;
#else
gl_FragColor = vec4(0.0);
#endif
}
<|start_filename|>src/main/resources/MonkeySheet/MatDefs/AnimOLD.frag<|end_filename|>
uniform sampler2D m_ColorMap;
uniform float g_Time;
uniform float m_HitTime;
uniform vec4 m_GlowColor;
vec4 color;
varying vec2 texCoord;
void main(){
color = texture2D(m_ColorMap, texCoord);
/* color.a = color.r * 0.7f;
vec4 overlay = vec4(0.5,0.8,1.0,1.0);
//gl_FragColor = max((1.0 - ((1.0 - color) / overlay)), 0.0);
gl_FragColor = color * overlay;//
*/
vec4 overlay = vec4(1.0,1.0,1.0,1.0);
gl_FragColor = mix(color, overlay, m_HitTime);//
//gl_FragColor = m_GlowColor;
gl_FragColor.a = color.a;
}
<|start_filename|>src/main/java/com/pesegato/MonkeySheet/actions/MSActionMachine.java<|end_filename|>
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package com.pesegato.MonkeySheet.actions;
import com.jme3.renderer.RenderManager;
import com.jme3.renderer.ViewPort;
import com.jme3.scene.control.AbstractControl;
import com.pesegato.MonkeySheet.MonkeySheetAppState;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
*
* @author Pesegato
* @deprecated use MSFiniteStateMachine instead
*/
public abstract class MSActionMachine extends AbstractControl {
static Logger log = LoggerFactory.getLogger(MSActionMachine.class);
MSAction[] actions;
MSAction currentAction;
public MSActionMachine(MSAction... actions) {
initActions(actions);
}
public void initActions(MSAction... actions){
this.actions = actions;
}
@Override
protected void controlUpdate(float tpf) {
if (currentAction == null) {
init();
nextAction();
}
if (MonkeySheetAppState.tTPF == 0) {
if (currentAction.maybeEnd()) {
nextAction();
}
}
tpf *= MonkeySheetAppState.timeable.getClockspeed();
msUpdate(tpf);
currentAction.controlUpdate(tpf);
}
protected <T extends MSAction> T startAction(Class <T> msActionClass){
for (MSAction act:actions){
if (msActionClass.isAssignableFrom(act.getClass())){
startAction(act);
return (T)act;
}
}
return null;
}
private void nextAction() {
startAction(actions[getNextAction()]);
}
private void startAction(MSAction action){
log.trace("start action {}",action);
if (currentAction==action)
return;
currentAction = action;
currentAction.init(spatial);
}
abstract protected void init();
abstract protected void msUpdate(float tpf);
abstract protected int getNextAction();
@Override
protected void controlRender(RenderManager rm, ViewPort vp) {
}
}
<|start_filename|>src/main/resources/MonkeySheet/MatDefs/Anim_simple.vert<|end_filename|>
uniform mat4 g_WorldViewProjectionMatrix;
uniform float m_SizeX;
uniform float m_SizeY;
uniform float m_Position;
uniform float m_FlipHorizontal;
attribute vec3 inPosition;
attribute vec2 inTexCoord;
attribute float inTexCoord2;
attribute float inTexCoord3;
varying float vAlpha;
varying vec2 texCoord;
void main(){
float t = m_Position;
#ifdef HAS_VERTEXSHEETPOS
t = inTexCoord2;
vAlpha = inTexCoord3;
#endif
float yAtlas = floor(t / m_SizeY);
float xAtlas = t - (yAtlas * m_SizeY);
//texCoord = vec2(xAtlas + inTexCoord.x, yAtlas + inTexCoord.y) / vec2(m_SizeX, m_SizeY);
texCoord = vec2(xAtlas + inTexCoord.x, (m_SizeY - 1.0 - yAtlas) + inTexCoord.y) / vec2(m_SizeX, m_SizeY);
gl_Position = g_WorldViewProjectionMatrix * vec4(inPosition, 1.0);
}
<|start_filename|>src/main/java/com/pesegato/goldmonkey/Container.java<|end_filename|>
package com.pesegato.goldmonkey;
public class Container {
public String id;
public int size;
public Container(String id, int size){
this.id=id;
this.size=size;
}
}
<|start_filename|>src/main/java/com/pesegato/MonkeySheet/MSControl.java<|end_filename|>
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package com.pesegato.MonkeySheet;
import com.jme3.renderer.RenderManager;
import com.jme3.renderer.ViewPort;
import com.pesegato.MonkeySheet.actions.MSAction;
import com.pesegato.timing.Timeable;
/**
* @author Pesegato
*/
public class MSControl extends MSSpriteControl {
boolean runOnce = false;
MSAnimationManager animManager;
public MSAction msAction;
public MSControl() {
}
public MSControl(String anim, Timeable timeable) {
playForever(anim);
MonkeySheetAppState.timeable = timeable;
}
public MSControl(MSAnimationManager animManager) {
this.animManager = animManager;
}
final public void playForever(String ani) {
anim = MonkeySheetAppState.getAnim(ani);
position = 0;
runOnce = false;
if (anim == null) {
log.warn("Running UNINITIALIZED animation {}, GOING TO CRASH VERY SOON!!!", ani);
return;
}
animation = ani;
log.debug("now playing FOREVER animation {}/{}", anim.msCont.name, ani);
}
public void playOnce(String ani) {
anim = MonkeySheetAppState.getAnim(ani);
position = 0;
runOnce = true;
if (anim == null) {
log.warn("Running UNINITIALIZED animation {}, GOING TO CRASH VERY SOON!!!" + ani);
return;
}
animation = ani;
log.debug("now playing ONCE animation {}/{}", anim.msCont.name, ani);
}
@Override
protected void controlUpdate(float tpf) {
if (MonkeySheetAppState.tTPF == 0) {
log.trace("position: {}/{} {} - {}", anim.msCont.name, animation, position, anim.anim[position].position);
if (position < anim.anim.length - 1) {
position++;
} else if (runOnce) {
if (animManager != null) {
log.trace("loading animManager {}", animManager);
animManager.whatPlay(this);
}
if (msAction != null) {
log.trace("end of MSAction {}", msAction);
msAction.terminatedAnim();
msAction.whatPlay(this);
}
} else {
position = 0;
}
}
}
public int getCurrentHitbox() {
return anim.hitbox[position];
}
@Override
protected void controlRender(RenderManager rm, ViewPort vp) {
}
}
<|start_filename|>src/main/java/com/pesegato/collision/hitbox/Prism.java<|end_filename|>
package com.pesegato.collision.hitbox;
import com.jme3.math.FastMath;
import com.jme3.math.Vector3f;
import com.jme3.scene.Mesh;
import com.jme3.scene.VertexBuffer;
import com.jme3.util.BufferUtils;
import java.nio.FloatBuffer;
import java.nio.ShortBuffer;
public class Prism extends Mesh {
public Prism(float radius, float halfHeight, int sides) {
this(Vector3f.ZERO, radius, halfHeight, sides);
}
public Prism(Vector3f center, float radius, float halfHeight, int sides) {
FloatBuffer fb = BufferUtils.createFloatBuffer(sides * 18);
ShortBuffer idx = BufferUtils.createShortBuffer((sides + sides - 2) * 6);
FloatBuffer normal = BufferUtils.createFloatBuffer(sides * 18);
normal.position(6);
float angle = FastMath.TWO_PI / sides, x, z;
float lastX = 0,//FastMath.sin(0),
lastZ = 1;//FastMath.cos(0);
short c = 2;
x = FastMath.sin(angle);
z = FastMath.cos(angle) + 1;
float normVal = 1f / FastMath.sqrt(x * x + z * z);
for (int i = 0; i < sides - 1; i++, c += (short) 6) {
x = center.x + radius * lastX;
z = center.z + radius * lastZ;
for (int j = 0; j < 3; j++) {
fb.put(x).put(center.y + halfHeight).put(z);
fb.put(x).put(center.y - halfHeight).put(z);
}
x = FastMath.sin((i + 1) * angle);
z = FastMath.cos((i + 1) * angle);
lastX = (lastX + x) * normVal;
lastZ = (lastZ + z) * normVal;
normal.put(lastX).put(0).put(lastZ);
normal.put(lastX).put(0).put(lastZ);
normal.put(0).put(1).put(0);
normal.put(0).put(-1).put(0);
normal.put(lastX).put(0).put(lastZ);
normal.put(lastX).put(0).put(lastZ);
lastX = x;
lastZ = z;
//index for sides
idx.put(c).put((short) (c + 1)).put((short) (c + 4));
idx.put((short) (c + 5)).put((short) (c + 4)).put((short) (c + 1));
}
x = center.x + radius * lastX;
z = center.z + radius * lastZ;
for (int j = 0; j < 3; j++) {
fb.put(x).put(center.y + halfHeight).put(z);
fb.put(x).put(center.y - halfHeight).put(z);
}
x = 0;//FastMath.sin(0);
z = 1;//FastMath.cos(0);
lastX = (lastX + x) * normVal;
lastZ = (lastZ + z) * normVal;
normal.put(lastX).put(0).put(lastZ);
normal.put(lastX).put(0).put(lastZ);
normal.put(0).put(1).put(0);
normal.put(0).put(-1).put(0);
normal.flip();
normal.put(lastX).put(0).put(lastZ);
normal.put(lastX).put(0).put(lastZ);
idx.put(c).put((short) (c + 1)).put((short) 0);
idx.put((short) 1).put((short) 0).put((short) (c + 1));
c = 10; //index for caps
for (int i = 0; i < sides - 2; i++, c += (short) 6) {
idx.put((short) 4).put(c).put((short) (c + 6));
idx.put((short) 5).put((short) (c + 7)).put((short) (c + 1));
}
normal.rewind();
fb.flip();
idx.flip();
setBuffer(VertexBuffer.Type.Position, 3, fb);
setBuffer(VertexBuffer.Type.Index, 3, idx);
setBuffer(VertexBuffer.Type.Normal, 3, normal);
updateBound();
}
}
| Pesegato/MonkeySheet |
<|start_filename|>lib/src/main/java/com/huya/pitaya/ui/status/PageStatusTransformer.kt<|end_filename|>
package com.huya.pitaya.ui.status
import android.os.Looper
import android.view.View
import android.view.ViewGroup
import android.view.ViewGroup.LayoutParams.MATCH_PARENT
import android.widget.FrameLayout
import androidx.annotation.MainThread
import androidx.constraintlayout.widget.ConstraintLayout
import androidx.coordinatorlayout.widget.CoordinatorLayout
import androidx.core.view.ViewCompat
import androidx.core.view.children
import androidx.fragment.app.Fragment
import androidx.lifecycle.Lifecycle.Event.ON_DESTROY
import androidx.lifecycle.Lifecycle.State.DESTROYED
import androidx.lifecycle.LifecycleObserver
import androidx.lifecycle.LifecycleOwner
import androidx.lifecycle.OnLifecycleEvent
import com.huya.pitaya.ui.status.PageStatusTransformer.Companion.newInstance
/**
* # 页面状态切换
* 1. 定义状态,返回状态要显示的视图
* 2. 切换状态
*
* # Page status transform
*
* 1. Define the status, declare the view of the status
*
* ```Kotlin
* val status = PageStatusTransformer.newInstance {
* "Status 1" {
* ViewStatus(view_for_status_1)
* }
* "Status 2" {
* ViewStatus(view_for_status_2)
* }
* "Status 3" {
* ViewStatus(view_for_status_3)
* }
* }
* ```
*
* 2. Transform the status
*
* ```Kotlin
* status.transform("Status 3")
* ```
*
* # 页面的状态
*
* - 每一种状态对应一个[PageDisplayStatus],切换状态意味着:
* - 当前状态调用 [PageDisplayStatus.showView]
* - 其他非当前状态调用 [PageDisplayStatus.hideView]
*
* - 状态可以通过[String]或者[Enum]来命名
* - 通过[newInstance]静态工厂构造时需要为所有可能使用的状态进行命名和定义。
* - 通过[ViewStatusBuilder.invoke]方法会把这些状态记录到状态机中。
*
* # PageDisplayStatus
* - Each state corresponds to a [PageDisplayStatus], which means that:
* - current state's [PageDisplayStatus.showView] is invoked.
* - other states's [PageDisplayStatus.hideView] are invoked.
*
* - The state is named by [String] or [Enum]
* - All possible states need to be named and defined through the [newInstance] static factory method.
* - The [ViewStatusBuilder.invoke] method will record these states to the state machine.
*
* # 状态的可见性
* - 通常只有命名为[currentStatusName]的当前状态是可见的,
* 其余所有状态都会因为[PageDisplayStatus.hideView]被调用而不可见;
* - 也可以通过设置[visibility]为 `false` 让所有状态(包括[currentStatusName])变得不可见,
* 设置为`true`可以让当前状态恢复可见。
*
* # Visibility of States
* - Usually, only the status named [currentStatusName] is visible.
* All other states will call [PageDisplayStatus.hideView];
* - You can also make all States (including [currentStatusName]) invisible by setting [visibility] to 'false',
* Set to 'true' to make the [currentStatusName] visible.
*
* # 绑定生命周期
* 可选地,可以在[newInstance]构造时传入[LifecycleOwner],当[Event.ON_DESTROY]时销毁状态所持有的[View]。
* 注意传入的是[Fragment.getViewLifecycleOwner]而不是[Fragment.getLifecycle]。
*
* @see SimpleStatus
* @see ViewStubStatus
* @see ReplacementViewStatus
*
* @author YvesCheung
* 2020/3/21
*/
@Suppress("MemberVisibilityCanBePrivate", "unused")
class PageStatusTransformer private constructor(
private val lifecycleOwner: LifecycleOwner? = null
) : LifecycleObserver {
init {
lifecycleOwner?.lifecycle?.addObserver(this)
}
private val statusList = mutableMapOf<String, PageDisplayStatus>()
/**
* Construct your page status:
*
* ```kotlin
* "yourStatusName" {
* PageDisplayStatus()
* }
* ```
*
* or
*
* ```
* YourStatusEnum {
* PageDisplayStatus()
* }
* ```
*
* 在Java中使用[JavaViewStatusBuilder]来适配这个DSL
*
* Use [JavaViewStatusBuilder] in Java.
*/
open inner class ViewStatusBuilder internal constructor() {
//DSL constructor
open operator fun String.invoke(createStatus: () -> PageDisplayStatus) {
statusList[this] = createStatus().also { status ->
if (status is ReplacementViewStatus) {
throw IllegalArgumentException(
"Use `PageStatusTransformer.newInstance" +
"(replaceTo = placeHolderView) {...}` instead."
)
}
}
}
//DSL constructor
open operator fun Enum<*>.invoke(createStatus: () -> PageDisplayStatus) {
this.name.invoke(createStatus)
}
}
/**
* Construct your page status:
*
* ```kotlin
* "yourStatusName" {
* PageDisplayStatus()
* }
* ```
*
* or
*
* ```
* YourStatusEnum {
* PageDisplayStatus()
* }
* ```
*
* 在Java中使用[JavaViewStatusBuilder]来适配这个DSL
*
* Use [JavaViewStatusBuilder] in Java.
*/
open inner class ReplacementViewStatusBuilder internal constructor(
/**
* `replaceTo` 参数指定的 `View`
*/
val contentView: View
) : ViewStatusBuilder() {
private val tagKey get() = R.id.tag_page_status_transformer
override fun String.invoke(createStatus: () -> PageDisplayStatus) {
statusList[this] = createStatus().also { status ->
if (status is ReplacementViewStatus) {
status.parent = replaceParent(contentView)
}
}
}
private fun replaceParent(contentView: View): ParentInfo {
if (contentView is ViewGroup && contentView.getTag(tagKey) != null) {
return ParentInfo(contentView)
}
val grandParent = contentView.parent
when {
contentView is FrameLayout && contentView::class.java == FrameLayout::class.java -> {
contentView.setTag(tagKey, "Make from PageStatusTransformer")
return ParentInfo(contentView)
}
grandParent is FrameLayout && grandParent::class.java == FrameLayout::class.java -> {
grandParent.setTag(tagKey, "Make from PageStatusTransformer")
//add to the next position of $contentView
val index = grandParent.indexOfChild(contentView) + 1
return ParentInfo(grandParent, index)
}
grandParent is ViewGroup -> {
if (grandParent.getTag(tagKey) != null) {
return ParentInfo(grandParent)
}
val index = grandParent.indexOfChild(contentView)
val contentParam = contentView.layoutParams
grandParent.removeView(contentView)
val newParent = FrameLayout(grandParent.context)
newParent.id = ViewCompat.generateViewId()
grandParent.addView(newParent, index, contentParam)
newParent.addView(contentView, MATCH_PARENT, MATCH_PARENT)
newParent.setTag(tagKey, "Make from PageStatusTransformer")
resolveConstraintLayoutId(grandParent, contentView, newParent)
resolveCoordinatorLayoutDependency(grandParent)
return ParentInfo(newParent)
}
else -> {
throw IllegalStateException(
"$contentView must have a parent. " +
"Current parent is $grandParent"
)
}
}
}
/**
* ConstraintLayout会依赖原来的View的id(比如layout_constraintBottom_toBottomOf="旧id"),
* 需要把旧id全部替换为新id。
*
* Workaround for [ConstraintLayout].
*/
private fun resolveConstraintLayoutId(constraintLayout: ViewGroup, old: View, new: View) {
val oldId = old.id
val newId = new.id
if (oldId != View.NO_ID && constraintLayout is ConstraintLayout) {
constraintLayout.children.forEach { child ->
val lp = child.layoutParams
if (lp is ConstraintLayout.LayoutParams) {
if (lp.baselineToBaseline == oldId) lp.baselineToBaseline = newId
if (lp.leftToLeft == oldId) lp.leftToLeft = newId
if (lp.startToStart == oldId) lp.startToStart = newId
if (lp.leftToRight == oldId) lp.leftToRight = newId
if (lp.startToEnd == oldId) lp.startToEnd = newId
if (lp.topToTop == oldId) lp.topToTop = newId
if (lp.topToBottom == oldId) lp.topToBottom = newId
if (lp.rightToRight == oldId) lp.rightToRight = newId
if (lp.endToEnd == oldId) lp.endToEnd = newId
if (lp.rightToLeft == oldId) lp.rightToLeft = newId
if (lp.endToStart == oldId) lp.endToStart = newId
if (lp.bottomToBottom == oldId) lp.bottomToBottom = newId
if (lp.bottomToTop == oldId) lp.bottomToTop = newId
if (lp.circleConstraint == oldId) lp.circleConstraint = newId
}
}
}
}
/**
* CoordinatorLayout会在onMeasure的时候建立一条子View的依赖链:
* [CoordinatorLayout.mDependencySortedChildren]。
* 这条链如果不及时更新就会出现里面的View已经不再是子View,从而导致ClassCastException。
*
* Workaround for [CoordinatorLayout].
*/
private fun resolveCoordinatorLayoutDependency(coordinatorLayout: ViewGroup) {
if (coordinatorLayout is CoordinatorLayout) {
coordinatorLayout.measure(
coordinatorLayout.measuredWidthAndState,
coordinatorLayout.measuredHeightAndState
)
}
}
}
/**
* 当前显示的状态,通过[transform]切换
*
* The current display status switched through [transform].
*/
val currentStatusName: String?
get() = currentStatusAndParam?.first
@Volatile
private var currentStatusAndParam: Pair<String, Map<String, Any>>? = null
/**
* 控制当前状态的可见性。
* 该方法不影响通过[transform]切换状态,即使[visibility]为false,依然可以切换状态,但是状态不可见。
* 当[visibility]切换为true时,[currentStatusName]代表的状态就会显示。
*
* Toggles the visibility of the current state.
*
* This method does not affect the state switching through [transform].
* Even if [visibility] is false, the state can still be transformed, but the UI is invisible.
*
* When [visibility] is set to true, the status represented by [currentStatusName] will be displayed.
*/
var visibility: Boolean = true
@MainThread
set(visible) {
if (Looper.myLooper() != Looper.getMainLooper()) {
throw IllegalThreadStateException("Only the mainThread can change ui visibility")
}
if (field != visible) {
field = visible
if (lifecycleOwner?.lifecycle?.currentState == DESTROYED) {
//Can't perform this state after ON_DESTROY
return
}
if (visible) {
currentStatusAndParam?.let { (status, param) -> transform(status, param) }
} else {
statusList.values.forEach { status -> status.hideView() }
}
}
}
/**
* 切换状态
*
* Transform status.
*
* @param status status defined in [ViewStatusBuilder]
*/
@MainThread
@JvmOverloads
fun transform(status: Enum<*>, param: Map<String, Any> = emptyMap()) =
transform(status.name, param)
/**
* 切换状态
*
* Transform status.
*
* @param status status defined in [ViewStatusBuilder]
*/
@MainThread
@JvmOverloads
fun transform(status: String, param: Map<String, Any> = emptyMap()) {
if (lifecycleOwner?.lifecycle?.currentState == DESTROYED) {
//Can't perform this state after ON_DESTROY
return
}
if (visibility) {
val currentStatus = statusList[status]
when {
currentStatus == null -> {
throw IllegalArgumentException(
"PageStatusTransformer has status: '${statusList.keys}'," +
" but expect status is '$status'"
)
}
Looper.myLooper() != Looper.getMainLooper() -> {
throw IllegalThreadStateException("Only the mainThread can transform the ui status")
}
else -> {
currentStatusAndParam = status to param
statusList.filter { it.key != status }.forEach { (_, status) ->
status.hideView()
}
currentStatus.showView(param)
}
}
} else {
currentStatusAndParam = status to param
}
}
@MainThread
@OnLifecycleEvent(ON_DESTROY)
fun destroyView() {
statusList.clear()
}
companion object {
//static factory
@JvmStatic
@JvmOverloads
fun newInstance(
lifecycleOwner: LifecycleOwner? = null,
config: ViewStatusBuilder.() -> Unit
): PageStatusTransformer {
val instance = PageStatusTransformer(lifecycleOwner)
config(instance.ViewStatusBuilder())
return instance
}
//static factory
@JvmStatic
fun newInstance(
lifecycleOwner: LifecycleOwner? = null,
replaceTo: View,
config: ReplacementViewStatusBuilder.() -> Unit
): PageStatusTransformer {
val instance = PageStatusTransformer(lifecycleOwner)
config(instance.ReplacementViewStatusBuilder(replaceTo))
return instance
}
//static factory
@JvmStatic
fun newInstance(
replaceTo: View,
config: ReplacementViewStatusBuilder.() -> Unit
): PageStatusTransformer {
val instance = PageStatusTransformer()
config(instance.ReplacementViewStatusBuilder(replaceTo))
return instance
}
}
} | YvesCheung/PageStatusTransformer |
<|start_filename|>prefs/bfdRootListController.h<|end_filename|>
#import <Preferences/PSListController.h>
#import <Preferences/PSSpecifier.h>
#import <spawn.h>
@interface bfdRootListController : PSListController
@end
<|start_filename|>Makefile<|end_filename|>
TARGET := iphone:clang:13.5:12.0
GO_EASY_ON_ME = 1
ARCHS = arm64 arm64e
BFINJECT_SRC=DumpDecrypted.m bfdecrypt.m
MINIZIP_SRC=SSZipArchive/minizip/crypt.c \
SSZipArchive/minizip/ioapi.c \
SSZipArchive/minizip/ioapi_buf.c \
SSZipArchive/minizip/ioapi_mem.c \
SSZipArchive/minizip/minishared.c \
SSZipArchive/minizip/unzip.c \
SSZipArchive/minizip/zip.c \
SSZipArchive/minizip/aes/aes_ni.c \
SSZipArchive/minizip/aes/aescrypt.c \
SSZipArchive/minizip/aes/aeskey.c \
SSZipArchive/minizip/aes/aestab.c \
SSZipArchive/minizip/aes/fileenc.c \
SSZipArchive/minizip/aes/hmac.c \
SSZipArchive/minizip/aes/prng.c \
SSZipArchive/minizip/aes/pwd2key.c \
SSZipArchive/minizip/aes/sha1.c
SSZIPARCHIVE_SRC=SSZipArchive/SSZipArchive.m
include $(THEOS)/makefiles/common.mk
TWEAK_NAME = bfdecrypt
$(TWEAK_NAME)_FILES = bfdecrypt.m DumpDecrypted.m $(MINIZIP_SRC) $(SSZIPARCHIVE_SRC)
#
$(TWEAK_NAME)_CFLAGS = -fobjc-arc -I SSZipArchive -I SSZipArchive/minizip
$(TWEAK_NAME)_FRAMEWORKS += CoreFoundation IOKit Foundation JavaScriptCore UIKit Security CFNetwork CoreGraphics
#$(TWEAK_NAME)_EXTRA_FRAMEWORKS += Cephei
include $(THEOS_MAKE_PATH)/tweak.mk
SUBPROJECTS += prefs
include $(THEOS_MAKE_PATH)/aggregate.mk
<|start_filename|>prefs/Makefile<|end_filename|>
TARGET := iphone:clang:latest:11.2
include $(THEOS)/makefiles/common.mk
ARCHS = arm64 arm64e
BUNDLE_NAME = bfdecrypt
$(BUNDLE_NAME)_FILES = bfdRootListController.m
$(BUNDLE_NAME)_FRAMEWORKS = UIKit
$(BUNDLE_NAME)_PRIVATE_FRAMEWORKS = Preferences
$(BUNDLE_NAME)_INSTALL_PATH = /Library/PreferenceBundles
$(BUNDLE_NAME)_CFLAGS = -fobjc-arc
include $(THEOS_MAKE_PATH)/bundle.mk
internal-stage::
$(ECHO_NOTHING)mkdir -p $(THEOS_STAGING_DIR)/Library/PreferenceLoader/Preferences$(ECHO_END)
$(ECHO_NOTHING)cp entry.plist $(THEOS_STAGING_DIR)/Library/PreferenceLoader/Preferences/bfdecrypt.plist$(ECHO_END)
| p0358/bfdecrypt |
<|start_filename|>website/index.css<|end_filename|>
.center {
margin: auto;
max-width: 1200px;
padding: 10px;
}
.container-fluid {
padding: 30px;
}
body {
font-size: 18px;
}
.name {
font-size: 18px;
font-family: Helvetica; font-style: normal; font-variant: normal; font-weight: 300; line-height: 20px;
}
.affiliation {
font-size: 16px;
font-family: Helvetica; font-style: normal; font-variant: normal; font-weight: 100; line-height: 20px;
}
h4 {
font-size: 20px;
}
.sidenav {
padding: 10px;
position: fixed;
top: 120px;
width: 16.7%;
}
.barright {
right: 0;
}
.shift {
padding: 0 15px;
}
.vid-caption {
padding: 5px;
}
.vid-item {
min-height: 15vw;
}
.slidebutton {
font-size: 21px;
}
img.sr-inp {
display: none;
}
@media only screen and (max-width: 960px) {
.sidenav {
display: none;
}
}
.sidenav a:hover {
color: #064579;
}
.my-buttons {
padding: 10px;
}
.slick-prev:before, .slick-next:before{
background-color: #666666;
border-radius: 50%;
}
.grey {
background-color: #edf3fc;
}
.switch {
position: relative;
display: inline-block;
width: 60px;
height: 34px;
}
/* Hide default HTML checkbox */
.switch input {
opacity: 0;
width: 0;
height: 0;
}
/* The slider */
.slider {
position: absolute;
cursor: pointer;
top: 0;
left: 0;
right: 0;
bottom: 0;
background-color: #ccc;
-webkit-transition: .4s;
transition: .4s;
}
.slider:before {
position: absolute;
content: "";
height: 26px;
width: 26px;
left: 4px;
bottom: 4px;
background-color: white;
-webkit-transition: .4s;
transition: .4s;
}
input:checked + .slider {
background-color: #2196F3;
}
input:focus + .slider {
box-shadow: 0 0 1px #2196F3;
}
input:checked + .slider:before {
-webkit-transform: translateX(26px);
-ms-transform: translateX(26px);
transform: translateX(26px);
}
/* Rounded sliders */
.slider.round {
border-radius: 34px;
}
.slider.round:before {
border-radius: 50%;
}
| zvict/HyperRIM |
<|start_filename|>.config/Code/User/keybindings.json<|end_filename|>
// Place your key bindings in this file to overwrite the defaults
[
{
"key": "ctrl+3",
"command": "editor.action.addCommentLine",
"when": "editorTextFocus && !editorReadonly"
},
{
"key": "ctrl+k ctrl+c",
"command": "-editor.action.addCommentLine",
"when": "editorTextFocus && !editorReadonly"
},
{
"key": "ctrl+r",
"command": "editor.action.goToDeclaration",
"when": "editorHasDefinitionProvider && editorTextFocus && !isInEmbeddedEditor"
},
{
"key": "f12",
"command": "-editor.action.goToDeclaration",
"when": "editorHasDefinitionProvider && editorTextFocus && !isInEmbeddedEditor"
},
{
"key": "ctrl+shift+3",
"command": "editor.action.removeCommentLine",
"when": "editorTextFocus && !editorReadonly"
},
{
"key": "ctrl+k ctrl+u",
"command": "-editor.action.removeCommentLine",
"when": "editorTextFocus && !editorReadonly"
},
{
"key": "ctrl+shift+1",
"command": "workbench.action.toggleSidebarVisibility"
},
{
"key": "ctrl+b",
"command": "-workbench.action.toggleSidebarVisibility"
},
{
"key": "ctrl+f9",
"command": "editor.debug.action.toggleBreakpoint",
"when": "debuggersAvailable && editorTextFocus"
},
{
"key": "f9",
"command": "editor.action.sortLinesAscending"
}
]
| mmphego/dot-files |
<|start_filename|>src/visualizer.jl<|end_filename|>
struct MechanismVisualizer{M <: MechanismState, V <: AbstractVisualizer}
state::M
visualizer::V
modcount::Int
function MechanismVisualizer(state::M, vis::V) where {M <: MechanismState, V <: AbstractVisualizer}
new{M, V}(state, vis, rbd.modcount(state.mechanism))
end
end
function MechanismVisualizer(state::MechanismState, source::AbstractGeometrySource=Skeleton(), vis::AbstractVisualizer=Visualizer())
vis = MechanismVisualizer(state, vis)
setelement!(vis, source)
vis
end
function setelement!(vis::MechanismVisualizer, source::AbstractGeometrySource)
elements = visual_elements(mechanism(vis), source)
_set_mechanism!(vis, elements)
_render_state!(vis)
end
MechanismVisualizer(m::Mechanism, args...) = MechanismVisualizer(MechanismState{Float64}(m), args...)
state(mvis::MechanismVisualizer) = mvis.state
mechanism(mvis::MechanismVisualizer) = mvis.state.mechanism
visualizer(mvis::MechanismVisualizer) = mvis.visualizer
to_affine_map(tform::Transform3D) = AffineMap(rotation(tform), translation(tform))
function _set_mechanism!(mvis::MechanismVisualizer, elements::AbstractVector{<:VisualElement})
for (i, element) in enumerate(elements)
setelement!(mvis, element, "geometry_$i")
end
end
Base.getindex(mvis::MechanismVisualizer, x...) = getindex(mvis.visualizer, x...)
# TODO: much of this information can be cached if this
# method becomes a performance bottleneck.
# We can probably just put `@memoize` from Memoize.jl right here.
function Base.getindex(mvis::MechanismVisualizer, frame::CartesianFrame3D)
body = rbd.body_fixed_frame_to_body(mechanism(mvis), frame)
mvis[body][string(frame)]
end
function Base.getindex(mvis::MechanismVisualizer, body::RigidBody)
path = _path(mechanism(mvis), body)
mvis[path...]
end
"""
setelement!(mvis::MechanismVisualizer, element::VisualElement, name::AbstractString="<element>")
Attach the given visual element to the visualizer.
The element's frame will determine how its geometry is attached to the scene
tree, so that any other geometries attached to the same body will all move together.
"""
function setelement!(mvis::MechanismVisualizer, element::VisualElement, name::AbstractString="<element>")
setelement!(mvis, element.frame, element.geometry, MeshLambertMaterial(color=element.color), name)
settransform!(mvis[element.frame][name], element.transform)
end
"""
setelement!(mvis::MechanismVisualizer, frame::CartesianFrame3D, object::AbstractObject, name::AbstractString="<element>")
Attach the given geometric object (geometry + material) to the visualizer at the given frame
"""
function setelement!(mvis::MechanismVisualizer, frame::CartesianFrame3D, object::AbstractObject, name::AbstractString="<element>")
body = rbd.body_fixed_frame_to_body(mechanism(mvis), frame)
definition = rbd.frame_definition(body, frame)
frame_vis = mvis[frame]
settransform!(frame_vis, to_affine_map(definition))
setobject!(frame_vis[name], object)
end
"""
setelement!(mvis::MechanismVisualizer, frame::CartesianFrame3D, geometry::GeometryLike, name::AbstractString="<element>")
Attach the given geometry to the visualizer at the given frame, using its default material.
"""
function setelement!(mvis::MechanismVisualizer, frame::CartesianFrame3D, geometry::GeometryLike, name::AbstractString="<element>")
setelement!(mvis, frame, Object(geometry), name)
end
"""
setelement!(mvis::MechanismVisualizer, frame::CartesianFrame3D, geometry::GeometryLike, material::AbstractMaterial, name::AbstractString="<element>")
Construct an object with the given geometry and material and attach it to the visualizer
"""
function setelement!(mvis::MechanismVisualizer, frame::CartesianFrame3D, geometry::GeometryLike, material::AbstractMaterial, name::AbstractString="<element>")
setelement!(mvis, frame, Object(geometry, material), name)
end
function setelement!(mvis::MechanismVisualizer, frame::CartesianFrame3D, geometry::MeshFile, material::AbstractMaterial, name::AbstractString="<element>")
ext = lowercase(splitext(geometry.filename)[2])
# We load .dae files as MeshFileObject so that threejs can handle loading
# their built-in primitives, materials, and textures. All other meshes are
# loaded as MeshFileGeometry which uses MeshIO to load the mesh geometry in
# Julia (but does not currently handle any materials or textures).
if ext == ".dae"
obj = MeshFileObject(geometry.filename)
else
obj = Object(MeshFileGeometry(geometry.filename), material)
end
setelement!(mvis, frame, obj, name)
end
# Special cases for visualizing frames and points
"""
setelement!(mvis::MechanismVisualizer, frame::CartesianFrame3D, scale::Real=0.5, name::AbstractString="<element>")
Add a Triad geometry with the given scale to the visualizer at the specified frame
"""
function setelement!(mvis::MechanismVisualizer, frame::CartesianFrame3D, scale::Real=0.5, name::AbstractString="<element>")
setelement!(mvis, frame, Triad(scale), name)
end
"""
setelement!(mvis::MechanismVisualizer, point::Point3D, radius::Real=0.05, name::AbstractString="<element>")
Add a HyperSphere geometry with the given radius to the visualizer at the given point
"""
function setelement!(mvis::MechanismVisualizer, point::Point3D, radius::Real=0.05, name::AbstractString="<element>")
setelement!(mvis, point.frame, HyperSphere(Point(point.v[1], point.v[2], point.v[3]), convert(eltype(point.v), radius)), name)
end
function _path(mechanism, body)
body_ancestors = ancestors(body, mechanism.tree)
path = string.(reverse(body_ancestors))
end
function _render_state!(mvis::MechanismVisualizer, state::MechanismState=mvis.state)
@assert mvis.state.mechanism === state.mechanism
if rbd.modcount(state.mechanism) != mvis.modcount
error("Mechanism has been modified after creating the visualizer. Please create a new MechanismVisualizer")
end
vis = mvis.visualizer
tree = mechanism(mvis).tree # TODO: tree accessor?
for body in vertices(tree)
if body == root(tree)
continue
else
parent = source(edge_to_parent(body, tree), tree)
tform = relative_transform(state, default_frame(body), default_frame(parent))
settransform!(mvis[body], to_affine_map(tform))
end
end
end
"""
set_configuration!(mvis::MechanismVisualizer, args...)
Set the configuration of the mechanism visualizer and re-render it.
# Examples
```julia-repl
julia> set_configuration!(vis, [1., 2., 3.])
```
```julia-repl
julia> set_configuration!(vis, findjoint(robot, "shoulder"), 1.0)
```
"""
function rbd.set_configuration!(mvis::MechanismVisualizer, args...)
set_configuration!(mvis.state, args...)
_render_state!(mvis)
end
rbd.configuration(mvis::MechanismVisualizer, args...) = configuration(mvis.state, args...)
MeshCat.render(mvis::MechanismVisualizer, args...; kw...) = MeshCat.render(mvis.visualizer, args...; kw...)
Base.open(mvis::MechanismVisualizer, args...; kw...) = open(mvis.visualizer, args...; kw...)
Base.wait(mvis::MechanismVisualizer) = wait(mvis.visualizer)
function Base.copyto!(mvis::MechanismVisualizer, state::Union{MechanismState, AbstractVector})
copyto!(mvis.state, state)
_render_state!(mvis)
end
| ferrolho/MeshCatMechanisms.jl |
<|start_filename|>EZLoadingActivity_iOS/EZLoadingActivity_iOS.h<|end_filename|>
//
// EZLoadingActivity_iOS.h
// EZLoadingActivity_iOS
//
// Created by toshi0383 on 12/14/15.
// Copyright © 2015 <NAME>. All rights reserved.
//
#import <UIKit/UIKit.h>
//! Project version number for EZLoadingActivity_iOS.
FOUNDATION_EXPORT double EZLoadingActivity_iOSVersionNumber;
//! Project version string for EZLoadingActivity_iOS.
FOUNDATION_EXPORT const unsigned char EZLoadingActivity_iOSVersionString[];
// In this header, you should import all the public headers of your framework using statements like #import <EZLoadingActivity_iOS/PublicHeader.h>
| Legoless/EZLoadingActivity |
<|start_filename|>Dockerfile<|end_filename|>
FROM unit9/base
MAINTAINER <NAME> <<EMAIL>>
RUN curl https://deb.nodesource.com/setup_10.x --output /tmp/node_setup && \
bash /tmp/node_setup && \
rm /tmp/node_setup && \
apt install nodejs
WORKDIR /app
RUN adduser --system --no-create-home --disabled-login --group app
ADD config/run /etc/service/backend/run
ADD node_modules /app/node_modules
ADD index.html index.js /app/
<|start_filename|>node_modules/unitycloudbuild-client/test/index.js<|end_filename|>
var chai = require('chai'),
should = chai.should,
expect = chai.expect,
ucbclient = require('../index').client;
var config = {
schemaurl: 'https://build-api.cloud.unity3d.com/api/v1/api.json'
}
var testClient;
before(function(done) {
ucbclient(config.schemaurl, function(client) {
testClient = client;
done();
});
});
describe('#client', function() {
it('is an object', function() {
expect(testClient).to.be.a('object');
});
it('has builds', function() {
expect(testClient).to.have.property('builds');
});
it('has buildtargets', function() {
expect(testClient).to.have.property('buildtargets');
});
it('has orgs', function() {
expect(testClient).to.have.property('orgs');
});
it('has users', function() {
expect(testClient).to.have.property('users');
});
it('has credentials', function() {
expect(testClient).to.have.property('credentials');
});
it('has shares', function() {
expect(testClient).to.have.property('shares');
});
it('fails to get self user without token', function(done) {
testClient.users.getUserSelf()
.then(function(user) {
})
.catch(function(error) {
expect(error.status).to.equal(401);
done();
});
});
});
<|start_filename|>index.js<|end_filename|>
// Options
var options = {
port: process.env.PORT || 80, // Heroku port or 80.
unityAPIBase: 'https://build-api.cloud.unity3d.com', // URI (e.g. href) recieved in web hook payload.
unityCloudAPIKey: process.env.UNITYCLOUD_KEY,
unityCloudSecret: process.env.UNITYCLOUD_SECRET,
appCenterHost: 'https://api.appcenter.ms',
appCenterAPIKey: process.env.APPCENTER_KEY,
logLevel: process.env.LOG_LEVEL || 'info'
};
// Imports
var path = require('path');
var fs = require('fs');
var express = require('express');
var app = express();
var http = require('http');
var https = require('https');
var server = http.Server(app);
var bodyParser = require('body-parser');
var najax = require('najax');
var FormData = require('form-data');
var url = require('url');
var HmacSHA256 = require('crypto-js/hmac-sha256');
var winston = require('winston');
// Setup logging
const logger = winston.createLogger({
level: options.logLevel,
format: winston.format.json(),
transports: [
new winston.transports.Console({
format: winston.format.combine(
winston.format.timestamp(),
winston.format.align(),
winston.format.splat(),
winston.format.printf(info => `${info.timestamp} ${info.level}: ${info.message}`)
)
})
]
});
// Run Server
server.listen(options.port, function () {
logger.info('listening on *:' + options.port);
});
// Configure Express
app.use('/public', express.static('public'));
// parse application/json
var jsonParser = bodyParser.json({
verify: function (req, res, buf, encoding) {
if (options.unityCloudSecret) {
var content = buf.toString();
var actualHmac = HmacSHA256(content, options.unityCloudSecret).toString();
var hmac = req.headers['x-unitycloudbuild-signature'];
if (hmac !== actualHmac) {
throw new Error('Invalid signature');
} else {
logger.info('Signature OK');
}
}
}
});
app.get('/', function (req, res) {
res.sendFile(path.join(__dirname, '/index.html'));
});
app.post('/build', jsonParser, async function (req, res) {
if (!req.body) {
return res.sendStatus(400);
}
logger.info('body: %j', req.body);
// Get Build API URL
var buildAPIURL = ((req.body.links || {}).api_self || {}).href;
if (!buildAPIURL) {
// URL not available.
res.setHeader('Content-Type', 'application/json');
res.send({
error: true,
message: 'No build link from Unity Cloud Build webhook'
});
logger.warn('No build link provided, ignoring request');
return;
} else {
// URL available.
res.setHeader('Content-Type', 'application/json');
res.send({
error: false,
message: 'Process begun for project "' + req.body.projectName + '" platform "' + req.body.buildTargetName + '".'
});
}
if (req.query.excludeTargets) {
var excludedTargets = req.query.excludeTargets.split(',').map((x) => x.trim());
if (excludedTargets.includes(req.body.buildTargetName)) {
logger.info('Target "%s" excluded, skipping', req.body.buildTargetName);
return;
}
}
if (req.query.includeTargets) {
var includedTargets = req.query.includeTargets.split(',').map((x) => x.trim());
if (!includedTargets.includes(req.body.buildTargetName)) {
logger.info('Target "%s" not included, skipping', req.body.buildTargetName);
return;
}
}
var { url, filename, notes } = await getBuildDetails(buildAPIURL);
var downloadedFilename = await downloadBinary(url, filename);
await uploadToAppCenter(downloadedFilename, notes, req.body.platform, req.query.ownerName, req.query.appName, req.query.team);
});
function getBuildDetails (buildAPIURL) {
logger.info('getBuildDetails: start');
return new Promise((resolve, reject) =>
najax({
url: options.unityAPIBase + buildAPIURL,
type: 'GET',
headers: {
'Authorization': 'Basic ' + options.unityCloudAPIKey
},
success: function (data) {
var parsedData = JSON.parse(data);
var notes = '';
if (parsedData.changeset) {
notes += 'Commits:\n';
for (var commit of parsedData.changeset.reverse()) {
notes += ` - [${commit.commitId.substr(0, 8)}] ${commit.message}\n`;
}
}
var parsedUrl = url.parse(parsedData.links.download_primary.href);
var filename = '/tmp/' + path.basename(parsedUrl.pathname);
logger.info('getBuildDetails: finished');
resolve({url: parsedData.links.download_primary.href, filename: filename, notes: notes});
},
error: function (error) {
logger.error('Error when fetching build details: %j', error);
reject(error);
}
})
);
}
function downloadBinary (binaryURL, filename) {
logger.info('downloadBinary: start');
logger.info(' ' + binaryURL);
logger.info(' ' + filename);
return new Promise((resolve, reject) =>
deleteFile(filename, () =>
https.get(binaryURL, (res) => {
logger.info('statusCode: %j', res.statusCode);
logger.info('headers: %j', res.headers);
var writeStream = fs.createWriteStream(filename, {'flags': 'a'});
var len = parseInt(res.headers['content-length'], 10);
var cur = 0;
var total = len / 1048576; // 1048576 - bytes in 1Megabyte
res.on('data', (chunk) => {
cur += chunk.length;
writeStream.write(chunk, 'binary');
logger.debug('Downloading ' + (100.0 * cur / len).toFixed(2) + '%, Downloaded: ' + (cur / 1048576).toFixed(2) + ' mb, Total: ' + total.toFixed(2) + ' mb');
});
res.on('end', () => {
logger.info('downloadBinary: finished');
writeStream.end();
});
writeStream.on('finish', () => {
resolve(filename);
});
}).on('error', (e) => {
console.error(e);
reject(e);
})
)
);
}
async function uploadToAppCenter (filename, notes, platform, ownerName, appName, team) {
if (platform === 'android' || platform === 'ios') {
var { uploadId, uploadUrl } = await createAppCenterUpload(ownerName, appName);
await uploadFileToAppCenter(filename, uploadUrl);
var releaseUrl = await commitAppCenterUpload(ownerName, appName, uploadId);
await distributeAppCenterUpload(releaseUrl, team, notes);
} else {
logger.error('Platform not supported: %s', platform);
}
}
function createAppCenterUpload (ownerName, appName) {
logger.info('createAppCenterUpload: start');
var url = `${options.appCenterHost}/v0.1/apps/${ownerName}/${appName}/release_uploads`;
return new Promise((resolve, reject) =>
najax({
url: url,
method: 'POST',
headers: {
'X-API-Token': options.appCenterAPIKey,
'Content-Type': 'application/json'
},
success: function (data) {
var parsedData = JSON.parse(data);
logger.info('createAppCenterUpload: finished');
resolve({
uploadId: parsedData.upload_id,
uploadUrl: parsedData.upload_url
});
},
error: function (error) {
logger.error('Error when creating upload: %j', error);
reject(error);
}
})
);
}
function commitAppCenterUpload (ownerName, appName, uploadId) {
logger.info('commitAppCenterUpload: start');
var url = `${options.appCenterHost}/v0.1/apps/${ownerName}/${appName}/release_uploads/${uploadId}`;
return new Promise((resolve, reject) =>
najax({
url: url,
type: 'PATCH',
contentType: 'application/json',
data: { status: 'committed' },
headers: {
'X-API-Token': options.appCenterAPIKey
},
success: function (data) {
var parsedData = JSON.parse(data);
logger.info('commitAppCenterUpload: finished');
resolve(parsedData.release_url);
},
error: function (error) {
logger.error('Error when committing upload: %j', error);
reject(error);
}
})
);
}
function distributeAppCenterUpload (releaseUrl, team, notes) {
logger.info('distributeAppCenterUpload: start');
var url = `${options.appCenterHost}/${releaseUrl}`;
var data = {
release_notes: notes,
destination_name: team
};
return new Promise((resolve, reject) =>
najax({
url: url,
type: 'PATCH',
contentType: 'application/json',
data: data,
headers: {
'X-API-Token': options.appCenterAPIKey
},
success: function (data) {
var parsedData = JSON.parse(data);
logger.info('distributeAppCenterUpload: finished');
resolve(parsedData.release_url);
},
error: function (error) {
logger.error('Error when committing upload: %j', error);
reject(error);
}
})
);
}
function uploadFileToAppCenter (filename, uploadUrl) {
logger.info('uploadFileToAppCenter: start');
var readable = fs.createReadStream(filename);
readable.on('error', () => {
logger.error('Error reading binary file for upload to App Center');
});
// Create FormData
var form = new FormData();
form.append('ipa', readable);
var parsedUrl = url.parse(uploadUrl);
return new Promise((resolve, reject) => {
var req = form.submit({
host: parsedUrl.host,
path: parsedUrl.pathname + (parsedUrl.search ? parsedUrl.search : ''),
protocol: parsedUrl.protocol,
headers: {
'Accept': 'application/json',
'X-API-Token': options.appCenterAPIKey
}
}, function (err, res) {
if (err) {
logger.error('Error when uploading: %j', err);
reject(err);
}
if (res.statusCode !== 200 && res.statusCode !== 201 && res.statusCode !== 204) {
logger.info('Uploading failed with status ' + res.statusCode);
reject(err);
}
var jsonString = ''; // eslint-disable-line
res.on('data', (chunk) => {
jsonString += String.fromCharCode.apply(null, new Uint16Array(chunk));
});
res.on('end', () => {
logger.info('uploadFileToAppCenter: finished');
deleteFile(filename, resolve);
});
});
// Track upload progress.
var len = parseInt(req.getHeader('content-length'), 10);
var cur = 0;
var total = len / 1048576; // 1048576 - bytes in 1Megabyte
req.on('data', (chunk) => {
cur += chunk.length;
logger.debug('Downloading ' + (100.0 * cur / len).toFixed(2) + '%, Downloaded: ' + (cur / 1048576).toFixed(2) + ' mb, Total: ' + total.toFixed(2) + ' mb');
});
});
}
// Delete file, used to clear up any binary downloaded.
function deleteFile (filename, cb) {
fs.access(filename, function (err) {
if (!err || err.code !== 'ENOENT') {
// Delete File.
fs.unlink(filename, (err) => {
if (err) {
logger.error('Error when deleting file: %j', err);
}
cb();
});
} else {
cb();
}
});
}
<|start_filename|>node_modules/najax/lib/parse-options.js<|end_filename|>
var _ = require('lodash')
module.exports = function parseOptions (url, options, callback) {
var opts = {}
if (_.isString(url)) {
opts.url = url
} else {
_.extend(opts, url)
}
if (_.isFunction(options)) {
opts.success = options
} else {
if (_.isFunction(callback)) opts.success = callback
_.extend(opts, options)
}
// support legacy jquery options.type
if (!opts.method && opts.type) {
opts.method = opts.type
}
return opts
}
<|start_filename|>node_modules/swagger-client/lib/resolver.js<|end_filename|>
'use strict';
var SwaggerHttp = require('./http');
var _ = {
isObject: require('lodash-compat/lang/isObject'),
cloneDeep: require('lodash-compat/lang/cloneDeep'),
isArray: require('lodash-compat/lang/isArray'),
isString: require('lodash-compat/lang/isString')
};
/**
* Resolves a spec's remote references
*/
var Resolver = module.exports = function () {
this.failedUrls = [];
};
Resolver.prototype.processAllOf = function(root, name, definition, resolutionTable, unresolvedRefs, spec) {
var i, location, property;
definition['x-resolved-from'] = [ '#/definitions/' + name ];
var allOf = definition.allOf;
// the refs go first
allOf.sort(function(a, b) {
if(a.$ref && b.$ref) { return 0; }
else if(a.$ref) { return -1; }
else { return 1; }
});
for (i = 0; i < allOf.length; i++) {
property = allOf[i];
location = '/definitions/' + name + '/allOf';
this.resolveInline(root, spec, property, resolutionTable, unresolvedRefs, location);
}
};
Resolver.prototype.resolve = function (spec, arg1, arg2, arg3) {
this.spec = spec;
var root = arg1, callback = arg2, scope = arg3, opts = {}, location, i;
if(typeof arg1 === 'function') {
root = null;
callback = arg1;
scope = arg2;
}
var _root = root;
this.scope = (scope || this);
this.iteration = this.iteration || 0;
if(this.scope.options && this.scope.options.requestInterceptor){
opts.requestInterceptor = this.scope.options.requestInterceptor;
}
if(this.scope.options && this.scope.options.responseInterceptor){
opts.responseInterceptor = this.scope.options.responseInterceptor;
}
var name, path, property, propertyName;
var processedCalls = 0, resolvedRefs = {}, unresolvedRefs = {};
var resolutionTable = []; // store objects for dereferencing
spec.definitions = spec.definitions || {};
// definitions
for (name in spec.definitions) {
var definition = spec.definitions[name];
if(definition['$ref']) {
this.resolveInline(root, spec, definition, resolutionTable, unresolvedRefs, definition);
}
else {
for (propertyName in definition.properties) {
property = definition.properties[propertyName];
if (_.isArray(property.allOf)) {
this.processAllOf(root, name, property, resolutionTable, unresolvedRefs, spec);
}
else {
this.resolveTo(root, property, resolutionTable, '/definitions');
}
}
if (definition.allOf) {
this.processAllOf(root, name, definition, resolutionTable, unresolvedRefs, spec);
}
}
}
// shared parameters
spec.parameters = spec.parameters || {};
for(name in spec.parameters) {
var parameter = spec.parameters[name];
if (parameter.in === 'body' && parameter.schema) {
if(_.isArray(parameter.schema.allOf)) {
// move to a definition
var modelName = 'inline_model';
var name = modelName;
var done = false; var counter = 0;
while(!done) {
if(typeof spec.definitions[name] === 'undefined') {
done = true;
break;
}
name = modelName + '_' + counter;
counter ++;
}
spec.definitions[name] = { allOf: parameter.schema.allOf };
delete parameter.schema.allOf;
parameter.schema.$ref = '#/definitions/' + name;
this.processAllOf(root, name, spec.definitions[name], resolutionTable, unresolvedRefs, spec);
}
else {
this.resolveTo(root, parameter.schema, resolutionTable, location);
}
}
if (parameter.$ref) {
// parameter reference
this.resolveInline(root, spec, parameter, resolutionTable, unresolvedRefs, parameter.$ref);
}
}
// operations
for (name in spec.paths) {
var method, operation, responseCode;
path = spec.paths[name];
for (method in path) {
// operation reference
if(method === '$ref') {
// location = path[method];
location = '/paths' + name;
this.resolveInline(root, spec, path, resolutionTable, unresolvedRefs, location);
}
else {
operation = path[method];
var sharedParameters = path.parameters || [];
var parameters = operation.parameters || [];
for (i in sharedParameters) {
var parameter = sharedParameters[i];
parameters.unshift(parameter);
}
if(method !== 'parameters' && _.isObject(operation)) {
operation.parameters = operation.parameters || parameters;
}
for (i in parameters) {
var parameter = parameters[i];
location = '/paths' + name + '/' + method + '/parameters';
if (parameter.in === 'body' && parameter.schema) {
if(_.isArray(parameter.schema.allOf)) {
// move to a definition
var modelName = 'inline_model';
var name = modelName;
var done = false; var counter = 0;
while(!done) {
if(typeof spec.definitions[name] === 'undefined') {
done = true;
break;
}
name = modelName + '_' + counter;
counter ++;
}
spec.definitions[name] = { allOf: parameter.schema.allOf };
delete parameter.schema.allOf;
parameter.schema.$ref = '#/definitions/' + name;
this.processAllOf(root, name, spec.definitions[name], resolutionTable, unresolvedRefs, spec);
}
else {
this.resolveTo(root, parameter.schema, resolutionTable, location);
}
}
if (parameter.$ref) {
// parameter reference
this.resolveInline(root, spec, parameter, resolutionTable, unresolvedRefs, parameter.$ref);
}
}
for (responseCode in operation.responses) {
var response = operation.responses[responseCode];
location = '/paths' + name + '/' + method + '/responses/' + responseCode;
if(_.isObject(response)) {
if(response.$ref) {
// response reference
this.resolveInline(root, spec, response, resolutionTable, unresolvedRefs, location);
}
if (response.schema) {
var responseObj = response;
if(_.isArray(responseObj.schema.allOf)) {
// move to a definition
var modelName = 'inline_model';
var name = modelName;
var done = false; var counter = 0;
while(!done) {
if(typeof spec.definitions[name] === 'undefined') {
done = true;
break;
}
name = modelName + '_' + counter;
counter ++;
}
spec.definitions[name] = { allOf: responseObj.schema.allOf };
delete responseObj.schema.allOf;
delete responseObj.schema.type;
responseObj.schema.$ref = '#/definitions/' + name;
this.processAllOf(root, name, spec.definitions[name], resolutionTable, unresolvedRefs, spec);
}
else if('array' === responseObj.schema.type) {
if(responseObj.schema.items && responseObj.schema.items.$ref) {
// response reference
this.resolveInline(root, spec, responseObj.schema.items, resolutionTable, unresolvedRefs, location);
}
}
else {
this.resolveTo(root, response.schema, resolutionTable, location);
}
}
}
}
}
}
// clear them out to avoid multiple resolutions
path.parameters = [];
}
var expectedCalls = 0, toResolve = [];
// if the root is same as obj[i].root we can resolve locally
var all = resolutionTable;
var parts;
for(i = 0; i < all.length; i++) {
var a = all[i];
if(root === a.root) {
if(a.resolveAs === 'ref') {
// resolve any path walking
var joined = ((a.root || '') + '/' + a.key).split('/');
var normalized = [];
var url = '';
var k;
if(a.key.indexOf('../') >= 0) {
for(var j = 0; j < joined.length; j++) {
if(joined[j] === '..') {
normalized = normalized.slice(0, normalized.length-1);
}
else {
normalized.push(joined[j]);
}
}
for(k = 0; k < normalized.length; k ++) {
if(k > 0) {
url += '/';
}
url += normalized[k];
}
// we now have to remote resolve this because the path has changed
a.root = url;
toResolve.push(a);
}
else {
parts = a.key.split('#');
if(parts.length === 2) {
if(parts[0].indexOf('http:') === 0 || parts[0].indexOf('https:') === 0) {
a.root = parts[0];
}
location = parts[1].split('/');
var r;
var s = spec;
for(k = 0; k < location.length; k++) {
var part = location[k];
if(part !== '') {
s = s[part];
if(typeof s !== 'undefined') {
r = s;
}
else {
r = null;
break;
}
}
}
if(r === null) {
// must resolve this too
toResolve.push(a);
}
}
}
}
else {
if (a.resolveAs === 'inline') {
if(a.key && a.key.indexOf('#') === -1 && a.key.charAt(0) !== '/') {
// handle relative schema
parts = a.root.split('/');
location = '';
for(i = 0; i < parts.length - 1; i++) {
location += parts[i] + '/';
}
location += a.key;
a.root = location;
a.location = '';
}
toResolve.push(a);
}
}
}
else {
toResolve.push(a);
}
}
expectedCalls = toResolve.length;
// resolve anything that is local
for(var ii = 0; ii < toResolve.length; ii++) {
(function(item, spec, self) {
// NOTE: this used to be item.root === null, but I (@ponelat) have added a guard against .split, which means item.root can be ''
if(!item.root || item.root === root) {
// local resolve
self.resolveItem(spec, _root, resolutionTable, resolvedRefs, unresolvedRefs, item);
processedCalls += 1;
if(processedCalls === expectedCalls) {
self.finish(spec, root, resolutionTable, resolvedRefs, unresolvedRefs, callback, true);
}
}
else if(self.failedUrls.indexOf(item.root) === -1) {
var obj = {
useJQuery: false, // TODO
url: item.root,
method: 'get',
headers: {
accept: self.scope.swaggerRequestHeaders || 'application/json'
},
on: {
error: function (error) {
processedCalls += 1;
console.log('failed url: ' + obj.url);
self.failedUrls.push(obj.url);
unresolvedRefs[item.key] = {
root: item.root,
location: item.location
};
if (processedCalls === expectedCalls) {
self.finish(spec, _root, resolutionTable, resolvedRefs, unresolvedRefs, callback);
}
}, // jshint ignore:line
response: function (response) {
var swagger = response.obj;
self.resolveItem(swagger, item.root, resolutionTable, resolvedRefs, unresolvedRefs, item);
processedCalls += 1;
if (processedCalls === expectedCalls) {
self.finish(spec, _root, resolutionTable, resolvedRefs, unresolvedRefs, callback);
}
}
} // jshint ignore:line
};
if (scope && scope.clientAuthorizations) {
scope.clientAuthorizations.apply(obj);
}
new SwaggerHttp().execute(obj, opts);
}
else {
processedCalls += 1;
unresolvedRefs[item.key] = {
root: item.root,
location: item.location
};
if (processedCalls === expectedCalls) {
self.finish(spec, _root, resolutionTable, resolvedRefs, unresolvedRefs, callback);
}
}
}(toResolve[ii], spec, this));
}
if (Object.keys(toResolve).length === 0) {
this.finish(spec, _root, resolutionTable, resolvedRefs, unresolvedRefs, callback);
}
};
Resolver.prototype.resolveItem = function(spec, root, resolutionTable, resolvedRefs, unresolvedRefs, item) {
var path = item.location;
var location = spec, parts = path.split('/');
if(path !== '') {
for (var j = 0; j < parts.length; j++) {
var segment = parts[j];
if (segment.indexOf('~1') !== -1) {
segment = parts[j].replace(/~0/g, '~').replace(/~1/g, '/');
if (segment.charAt(0) !== '/') {
segment = '/' + segment;
}
}
if (typeof location === 'undefined' || location === null) {
break;
}
if (segment === '' && j === (parts.length - 1) && parts.length > 1) {
location = null;
break;
}
if (segment.length > 0) {
location = location[segment];
}
}
}
var resolved = item.key;
parts = item.key.split('/');
var resolvedName = parts[parts.length-1];
if(resolvedName.indexOf('#') >= 0) {
resolvedName = resolvedName.split('#')[1];
}
if (location !== null && typeof location !== 'undefined') {
resolvedRefs[resolved] = {
name: resolvedName,
obj: location,
key: item.key,
root: item.root
};
} else {
unresolvedRefs[resolved] = {
root: item.root,
location: item.location
};
}
};
Resolver.prototype.finish = function (spec, root, resolutionTable, resolvedRefs, unresolvedRefs, callback, localResolve) {
// walk resolution table and replace with resolved refs
var ref;
for (ref in resolutionTable) {
var item = resolutionTable[ref];
var key = item.key;
var resolvedTo = resolvedRefs[key];
if (resolvedTo) {
spec.definitions = spec.definitions || {};
if (item.resolveAs === 'ref') {
if (localResolve !== true) {
// don't retain root for local definitions
for (key in resolvedTo.obj) {
var abs = this.retainRoot(key, resolvedTo.obj[key], item.root);
resolvedTo.obj[key] = abs;
}
}
spec.definitions[resolvedTo.name] = resolvedTo.obj;
item.obj.$ref = '#/definitions/' + resolvedTo.name;
} else if (item.resolveAs === 'inline') {
var targetObj = item.obj;
targetObj['x-resolved-from'] = [ item.key ];
delete targetObj.$ref;
for (key in resolvedTo.obj) {
var abs = resolvedTo.obj[key];
if (localResolve !== true) {
// don't retain root for local definitions
abs = this.retainRoot(key, resolvedTo.obj[key], item.root);
}
targetObj[key] = abs;
}
}
}
}
var existingUnresolved = this.countUnresolvedRefs(spec);
if(existingUnresolved === 0 || this.iteration > 5) {
this.resolveAllOf(spec.definitions);
callback.call(this.scope, spec, unresolvedRefs);
}
else {
this.iteration += 1;
this.resolve(spec, root, callback, this.scope);
}
};
Resolver.prototype.countUnresolvedRefs = function(spec) {
var i;
var refs = this.getRefs(spec);
var keys = [];
var unresolvedKeys = [];
for(i in refs) {
if(i.indexOf('#') === 0) {
keys.push(i.substring(1));
}
else {
unresolvedKeys.push(i);
}
}
// verify possible keys
for (i = 0; i < keys.length; i++) {
var part = keys[i];
var parts = part.split('/');
var obj = spec;
for (var k = 0; k < parts.length; k++) {
var key = parts[k];
if(key !== '') {
obj = obj[key];
if(typeof obj === 'undefined') {
unresolvedKeys.push(part);
break;
}
}
}
}
return unresolvedKeys.length;
};
Resolver.prototype.getRefs = function(spec, obj) {
obj = obj || spec;
var output = {};
for(var key in obj) {
if (!obj.hasOwnProperty(key)) {
continue;
}
var item = obj[key];
if(key === '$ref' && typeof item === 'string') {
output[item] = null;
}
else if(_.isObject(item)) {
var o = this.getRefs(item);
for(var k in o) {
output[k] = null;
}
}
}
return output;
};
Resolver.prototype.retainRoot = function(origKey, obj, root) {
// walk object and look for relative $refs
if(_.isObject(obj)) {
for(var key in obj) {
var item = obj[key];
if (key === '$ref' && typeof item === 'string') {
// stop and inspect
if (item.indexOf('http:') !== 0 && item.indexOf('https:') !== 0) {
// TODO: check if root ends in '/'. If not, AND item has no protocol, make relative
var appendHash = true;
var oldRoot = root;
if (root) {
var lastChar = root.slice(-1);
if (lastChar !== '/' && (item.indexOf('#') !== 0 && item.indexOf('http:') !== 0 && item.indexOf('https:'))) {
appendHash = false;
var parts = root.split('\/');
parts = parts.splice(0, parts.length - 1);
root = '';
for (var i = 0; i < parts.length; i++) {
root += parts[i] + '/';
}
}
}
if (item.indexOf('#') !== 0 && appendHash) {
item = '#' + item;
}
item = (root || '') + item;
obj[key] = item;
}
}
else if (_.isObject(item)) {
this.retainRoot(key, item, root);
}
}
}
else if(_.isString(obj) && origKey === '$ref') {
// look at the ref?
if(obj.indexOf('http:') === -1 && obj.indexOf('https:') === -1) {
obj = root + obj;
}
}
return obj;
};
/**
* immediately in-lines local refs, queues remote refs
* for inline resolution
*/
Resolver.prototype.resolveInline = function (root, spec, property, resolutionTable, unresolvedRefs, location) {
var key = property.$ref, ref = property.$ref, i, p, p2, rs;
var rootTrimmed = false;
root = root || '' // Guard against .split. @fehguy, you'll need to check if this logic fits
// More imporantly is how do we gracefully handle relative urls, when provided just a 'spec', not a 'url' ?
if (ref) {
if(ref.indexOf('../') === 0) {
// reset root
p = ref.split('../');
p2 = root.split('/');
ref = '';
for(i = 0; i < p.length; i++) {
if(p[i] === '') {
p2 = p2.slice(0, p2.length-1);
}
else {
ref += p[i];
}
}
root = '';
for(i = 0; i < p2.length - 1; i++) {
if(i > 0) { root += '/'; }
root += p2[i];
}
rootTrimmed = true;
}
if(ref.indexOf('#') >= 0) {
if(ref.indexOf('/') === 0) {
rs = ref.split('#');
p = root.split('//');
p2 = p[1].split('/');
root = p[0] + '//' + p2[0] + rs[0];
location = rs[1];
}
else {
rs = ref.split('#');
if(rs[0] !== '') {
p2 = root.split('/');
p2 = p2.slice(0, p2.length - 1);
if(!rootTrimmed) {
root = '';
for (var k = 0; k < p2.length; k++) {
if(k > 0) { root += '/'; }
root += p2[k];
}
}
root += '/' + ref.split('#')[0];
}
location = rs[1];
}
}
if (ref.indexOf('http:') === 0 || ref.indexOf('https:') === 0) {
if(ref.indexOf('#') >= 0) {
root = ref.split('#')[0];
location = ref.split('#')[1];
}
else {
root = ref;
location = '';
}
resolutionTable.push({obj: property, resolveAs: 'inline', root: root, key: key, location: location});
} else if (ref.indexOf('#') === 0) {
location = ref.split('#')[1];
resolutionTable.push({obj: property, resolveAs: 'inline', root: root, key: key, location: location});
} else if (ref.indexOf('/') === 0 && ref.indexOf('#') === -1) {
location = ref;
var matches = root.match(/^https?\:\/\/([^\/?#]+)(?:[\/?#]|$)/i);
if(matches) {
root = matches[0] + ref.substring(1);
location = '';
}
resolutionTable.push({obj: property, resolveAs: 'inline', root: root, key: key, location: location});
}
else {
resolutionTable.push({obj: property, resolveAs: 'inline', root: root, key: key, location: location});
}
}
else if (property.type === 'array') {
this.resolveTo(root, property.items, resolutionTable, location);
}
};
Resolver.prototype.resolveTo = function (root, property, resolutionTable, location) {
var sp, i;
var ref = property.$ref;
var lroot = root;
if ((typeof ref !== 'undefined') && (ref !== null)) {
if(ref.indexOf('#') >= 0) {
var parts = ref.split('#');
// #/definitions/foo
// foo.json#/bar
if(parts[0] && ref.indexOf('/') === 0) {
}
else if(parts[0] && (parts[0].indexOf('http:') === 0 || parts[0].indexOf('https:') === 0)) {
lroot = parts[0];
ref = parts[1];
}
else if(parts[0] && parts[0].length > 0) {
// relative file
sp = root.split('/');
lroot = '';
for(i = 0; i < sp.length - 1; i++) {
lroot += sp[i] + '/';
}
lroot += parts[0];
}
else {
}
location = parts[1];
}
else if (ref.indexOf('http:') === 0 || ref.indexOf('https:') === 0) {
lroot = ref;
location = '';
}
else {
// relative file
sp = root.split('/');
lroot = '';
for(i = 0; i < sp.length - 1; i++) {
lroot += sp[i] + '/';
}
lroot += ref;
location = '';
}
resolutionTable.push({
obj: property, resolveAs: 'ref', root: lroot, key: ref, location: location
});
} else if (property.type === 'array') {
var items = property.items;
this.resolveTo(root, items, resolutionTable, location);
} else {
if(property && property.properties) {
var name = this.uniqueName('inline_model');
if (property.title) {
name = this.uniqueName(property.title);
}
delete property.title;
this.spec.definitions[name] = _.cloneDeep(property);
property['$ref'] = '#/definitions/' + name;
delete property.type;
delete property.properties;
}
}
};
Resolver.prototype.uniqueName = function(base) {
var name = base;
var count = 0;
while(true) {
if(!_.isObject(this.spec.definitions[name])) {
return name;
}
name = base + '_' + count;
count++;
}
};
Resolver.prototype.resolveAllOf = function(spec, obj, depth) {
depth = depth || 0;
obj = obj || spec;
var name;
for(var key in obj) {
if (!obj.hasOwnProperty(key)) {
continue;
}
var item = obj[key];
if(item === null) {
throw new TypeError('Swagger 2.0 does not support null types (' + obj + '). See https://github.com/swagger-api/swagger-spec/issues/229.');
}
if(typeof item === 'object') {
this.resolveAllOf(spec, item, depth + 1);
}
if(item && typeof item.allOf !== 'undefined') {
var allOf = item.allOf;
if(_.isArray(allOf)) {
var output = _.cloneDeep(item);
delete output.allOf;
output['x-composed'] = true;
if (typeof item['x-resolved-from'] !== 'undefined') {
output['x-resolved-from'] = item['x-resolved-from'];
}
for(var i = 0; i < allOf.length; i++) {
var component = allOf[i];
var source = 'self';
if(typeof component['x-resolved-from'] !== 'undefined') {
source = component['x-resolved-from'][0];
}
for(var part in component) {
if(!output.hasOwnProperty(part)) {
output[part] = _.cloneDeep(component[part]);
if(part === 'properties') {
for(name in output[part]) {
output[part][name]['x-resolved-from'] = source;
}
}
}
else {
if(part === 'properties') {
var properties = component[part];
for(name in properties) {
output.properties[name] = _.cloneDeep(properties[name]);
var resolvedFrom = properties[name]['x-resolved-from'];
if (typeof resolvedFrom === 'undefined' || resolvedFrom === 'self') {
resolvedFrom = source;
}
output.properties[name]['x-resolved-from'] = resolvedFrom;
}
}
else if(part === 'required') {
// merge & dedup the required array
var a = output.required.concat(component[part]);
for(var k = 0; k < a.length; ++k) {
for(var j = k + 1; j < a.length; ++j) {
if(a[k] === a[j]) { a.splice(j--, 1); }
}
}
output.required = a;
}
else if(part === 'x-resolved-from') {
output['x-resolved-from'].push(source);
}
else {
// TODO: need to merge this property
// console.log('what to do with ' + part)
}
}
}
}
obj[key] = output;
}
}
}
};
<|start_filename|>package.json<|end_filename|>
{
"name": "unity-cloud-build-to-app-center",
"version": "1.0.0",
"description": "Receives a payload from Unity Cloud Build webhook. Downloads the binary and posts to the App Center API. Automating deployment from Unity Cloud Build to App Center.",
"main": "index.js",
"dependencies": {
"crypto-js": "^3.1.9-1",
"dotenv": "^2.0.0",
"express": "^4.14.0",
"swagger-client": "^2.1.16",
"unitycloudbuild-client": "^0.1.6",
"winston": "^3.0.0"
},
"devDependencies": {},
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1",
"start": "node index.js"
},
"author": "<NAME>",
"license": "ISC"
}
<|start_filename|>index.html<|end_filename|>
Unity Cloud Build -> App Center
Uses webhooks to pull binary (IPA/APK) from UCB and send to App Center.
<|start_filename|>node_modules/unitycloudbuild-client/index.js<|end_filename|>
var Swagger = require('swagger-client');
var apiClient = function(url, token, callback) {
new Swagger({
url: url,
usePromise: true,
authorizations: {
'basicAuth': new CustomAuthHeader(token)
}
})
.then(function (client) {
console.log('Unity Cloud Build client loaded OK.');
callback(client);
})
.catch(function(error) {
console.log('UCB error: ' + error);
});
};
exports.client = apiClient;
var CustomAuthHeader = function(token) {
this.enctoken = new Buffer(token + ':').toString('base64');;
};
CustomAuthHeader.prototype.apply = function(obj, authorizations) {
var headerValue = 'Basic ' + this.enctoken;
console.log('header value: ' + headerValue);
obj.headers["Authorization"] = headerValue;
};
<|start_filename|>node_modules/najax/coverage/lcov-report/lib/najax.js.html<|end_filename|>
<!doctype html>
<html lang="en">
<head>
<title>Code coverage report for lib/najax.js</title>
<meta charset="utf-8" />
<link rel="stylesheet" href="../prettify.css" />
<link rel="stylesheet" href="../base.css" />
<meta name="viewport" content="width=device-width, initial-scale=1">
<style type='text/css'>
.coverage-summary .sorter {
background-image: url(../sort-arrow-sprite.png);
}
</style>
</head>
<body>
<div class='wrapper'>
<div class='pad1'>
<h1>
<a href="../index.html">all files</a> / <a href="index.html">lib/</a> najax.js
</h1>
<div class='clearfix'>
<div class='fl pad1y space-right2'>
<span class="strong">86.55% </span>
<span class="quiet">Statements</span>
<span class='fraction'>103/119</span>
</div>
<div class='fl pad1y space-right2'>
<span class="strong">78.82% </span>
<span class="quiet">Branches</span>
<span class='fraction'>67/85</span>
</div>
<div class='fl pad1y space-right2'>
<span class="strong">94.12% </span>
<span class="quiet">Functions</span>
<span class='fraction'>16/17</span>
</div>
<div class='fl pad1y space-right2'>
<span class="strong">86.96% </span>
<span class="quiet">Lines</span>
<span class='fraction'>100/115</span>
</div>
</div>
</div>
<div class='status-line high'></div>
<pre><table class="coverage">
<tr><td class="line-count quiet">1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246</td><td class="line-coverage quiet"><span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">1×</span>
<span class="cline-any cline-yes">1×</span>
<span class="cline-any cline-yes">1×</span>
<span class="cline-any cline-yes">1×</span>
<span class="cline-any cline-yes">1×</span>
<span class="cline-any cline-yes">1×</span>
<span class="cline-any cline-yes">1×</span>
<span class="cline-any cline-yes">1×</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">1×</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">1×</span>
<span class="cline-any cline-yes">64×</span>
<span class="cline-any cline-yes">64×</span>
<span class="cline-any cline-yes">64×</span>
<span class="cline-any cline-yes">64×</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">64×</span>
<span class="cline-any cline-yes">1×</span>
<span class="cline-any cline-yes">63×</span>
<span class="cline-any cline-yes">3×</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">1×</span>
<span class="cline-any cline-yes">1×</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">2×</span>
<span class="cline-any cline-yes">2×</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-no"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">64×</span>
<span class="cline-any cline-yes">7×</span>
<span class="cline-any cline-yes">1×</span>
<span class="cline-any cline-no"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">1×</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">6×</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">64×</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">64×</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">64×</span>
<span class="cline-any cline-yes">5×</span>
<span class="cline-any cline-yes">59×</span>
<span class="cline-any cline-yes">5×</span>
<span class="cline-any cline-yes">54×</span>
<span class="cline-any cline-yes">5×</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">64×</span>
<span class="cline-any cline-yes">64×</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">64×</span>
<span class="cline-any cline-no"> </span>
<span class="cline-any cline-no"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">1×</span>
<span class="cline-any cline-yes">256×</span>
<span class="cline-any cline-no"> </span>
<span class="cline-any cline-no"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">64×</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">64×</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">62×</span>
<span class="cline-any cline-yes">1×</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">62×</span>
<span class="cline-any cline-yes">1×</span>
<span class="cline-any cline-yes">1×</span>
<span class="cline-any cline-yes">2×</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">1×</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">1×</span>
<span class="cline-any cline-yes">62×</span>
<span class="cline-any cline-yes">62×</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-no"> </span>
<span class="cline-any cline-no"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-no"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">62×</span>
<span class="cline-any cline-yes">62×</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">62×</span>
<span class="cline-any cline-no"> </span>
<span class="cline-any cline-yes">62×</span>
<span class="cline-any cline-no"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">62×</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">62×</span>
<span class="cline-any cline-yes">62×</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">62×</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">62×</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-no"> </span>
<span class="cline-any cline-no"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">62×</span>
<span class="cline-any cline-yes">62×</span>
<span class="cline-any cline-yes">62×</span>
<span class="cline-any cline-yes">62×</span>
<span class="cline-any cline-yes">62×</span>
<span class="cline-any cline-yes">62×</span>
<span class="cline-any cline-yes">4×</span>
<span class="cline-any cline-yes">4×</span>
<span class="cline-any cline-no"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">4×</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">58×</span>
<span class="cline-any cline-yes">4×</span>
<span class="cline-any cline-yes">4×</span>
<span class="cline-any cline-no"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">4×</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">54×</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">64×</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">1×</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">4×</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">4×</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">64×</span>
<span class="cline-any cline-yes">2×</span>
<span class="cline-any cline-yes">2×</span>
<span class="cline-any cline-yes">2×</span>
<span class="cline-any cline-yes">2×</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">64×</span>
<span class="cline-any cline-yes">64×</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">64×</span>
<span class="cline-any cline-yes">64×</span>
<span class="cline-any cline-yes">64×</span>
<span class="cline-any cline-yes">64×</span>
<span class="cline-any cline-yes">64×</span>
<span class="cline-any cline-yes">64×</span>
<span class="cline-any cline-yes">64×</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">1×</span>
<span class="cline-any cline-yes">3×</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">1×</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">1×</span>
<span class="cline-any cline-yes">4×</span>
<span class="cline-any cline-yes">51×</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">1×</span>
<span class="cline-any cline-neutral"> </span></td><td class="text"><pre class="prettyprint lang-js">/* najax
* jquery ajax-stye http requests in node
* https://github.com/alanclarke/najax
*/
var https = require('https')
var http = require('http')
var querystring = require('qs')
var url = require('url')
var zlib = require('zlib')
var $ = require('jquery-deferred')
var parseOptions = require('./parse-options')
var defaults = {
method: 'GET',
rejectUnauthorized: true,
processData: true,
data: '',
contentType: 'application/x-www-form-urlencoded',
headers: {},
setRequestHeader: function (name, value) {
this.headers[name] = value
}
}
/*
method overloading, can use:
-function(url, opts, callback) or
-function(url, callback)
-function(opts)
*/
function najax (uri, options, callback) {
var dfd = new $.Deferred()
var o = Object.assign({}, defaults, parseOptions(uri, options, callback))
var l = url.parse(o.url)
var ssl = l.protocol.indexOf('https') === 0
// DATA
// Per jquery docs / source: encoding is only done
// if processData is true (defaults to true)
// and the data is not already a string
// https://github.com/jquery/jquery/blob/master/src/ajax.js#L518
if (o.data && o.processData && o.method === 'GET') {
o.data = querystring.stringify(o.data)
} else if (o.data && o.processData && typeof o.data !== 'string' && o.method !== 'GET') {
switch (o.contentType) {
case 'application/json':
o.data = JSON.stringify(o.data)
break
case 'application/x-www-form-urlencoded':
o.data = querystring.stringify(o.data)
break
<span class="branch-2 cbranch-no" title="branch not covered" > default:</span>
<span class="cstat-no" title="statement not covered" > o.data = String(o.data)</span>
}
}
/* if get, use querystring method for data */
if (o.data) {
if (o.method === 'GET') {
<span class="missing-if-branch" title="if path not taken" >I</span>if (l.search) {
<span class="cstat-no" title="statement not covered" > l.search += '&' + o.data</span>
} else {
l.search = '?' + o.data
}
} else {
/* set data content type */
o.headers = Object.assign({
'Content-Type': o.contentType + ';charset=utf-8',
'Content-Length': Buffer.byteLength(o.data)
}, o.headers)
}
}
if (o.beforeSend) o.beforeSend(o)
options = {
host: l.hostname,
path: l.pathname + (l.search || ''),
method: o.method,
port: Number(l.port) || (ssl ? 443 : 80),
headers: o.headers,
rejectUnauthorized: o.rejectUnauthorized
}
// AUTHENTICATION
/* add authentication to http request */
if (l.auth) {
options.auth = l.auth
} else if (o.username && o.password) {
options.auth = o.username + ':' + o.password
} else if (o.auth) {
options.auth = o.auth
}
options.auth = o.auth || options.auth
options.agent = o.agent || options.agent
/* for debugging, method to get options and return */
<span class="missing-if-branch" title="if path not taken" >I</span>if (o.getopts) {
<span class="cstat-no" title="statement not covered" > var getopts = [ssl, options, o.data || false, o.success || false, o.error || false]</span>
<span class="cstat-no" title="statement not covered" > return getopts</span>
}
// REQUEST
function notImplemented (name) {
return <span class="fstat-no" title="function not covered" >function () {</span>
<span class="cstat-no" title="statement not covered" > console.error('najax: method jqXHR."' + name + '" not implemented')</span>
<span class="cstat-no" title="statement not covered" > console.trace()</span>
}
}
var jqXHR = {
readyState: 0,
status: 0,
statusText: 'error', // one of: "success", "notmodified", "error", "timeout", "abort", or "parsererror"
setRequestHeader: notImplemented('setRequestHeader'),
getAllResponseHeaders: notImplemented('getAllResponseHeaders'),
statusCode: notImplemented('statusCode'),
abort: notImplemented('abort')
}
var req = (ssl ? https : http).request(options, function (res) {
// Allow getting Response Headers from the XMLHTTPRequest object
dfd.getResponseHeader = jqXHR.getResponseHeader = function getResponseHeader (header) {
return res.headers[header.toLowerCase()]
}
dfd.getAllResponseHeaders = jqXHR.getAllResponseHeaders = function getAllResponseHeaders () {
var headers = []
for (var key in res.headers) {
headers.push(key + ': ' + res.headers[key])
}
return headers.join('\n')
}
function dataHandler (data) {
jqXHR.responseText = data
<span class="missing-if-branch" title="if path not taken" >I</span>if (o.dataType === 'json' || o.dataType === 'jsonp') {
// replace control characters
<span class="cstat-no" title="statement not covered" > try {</span>
<span class="cstat-no" title="statement not covered" > data = JSON.parse(data.replace(/[\cA-\cZ]/gi, ''))</span>
} catch (e) {
<span class="cstat-no" title="statement not covered" > return onError(e)</span>
}
}
var statusCode = res.statusCode
jqXHR.statusText = 'success'
<span class="missing-if-branch" title="if path not taken" >I</span>if (statusCode === 204 || options.method === 'HEAD') {
<span class="cstat-no" title="statement not covered" > jqXHR.statusText = 'nocontent'</span>
} else <span class="missing-if-branch" title="if path not taken" >I</span>if (statusCode === 304) {
<span class="cstat-no" title="statement not covered" > jqXHR.statusText = 'notmodified'</span>
}
// Determine if successful
// (per https://github.com/jquery/jquery/blob/master/src/ajax.js#L679)
var isSuccess = statusCode >= 200 && statusCode < 300 || <span class="branch-2 cbranch-no" title="branch not covered" >statusCode === 304</span>
// Set readyState
jqXHR.readyState = statusCode > 0 ? 4 : <span class="branch-1 cbranch-no" title="branch not covered" >0</span>
jqXHR.status = statusCode
<span class="missing-if-branch" title="else path not taken" >E</span>if (isSuccess) {
// success, statusText, jqXHR
dfd.resolve(data, jqXHR.statusText, jqXHR)
} else {
// jqXHR, statusText, error
// When an HTTP error occurs, errorThrown receives the textual portion of the
// HTTP status, such as "Not Found" or "Internal Server Error."
<span class="cstat-no" title="statement not covered" > jqXHR.statusText = 'error'</span>
<span class="cstat-no" title="statement not covered" > onError(new Error(http.STATUS_CODES[statusCode]))</span>
}
}
var chunks = []
res.on('data', function (chunk) { chunks.push(chunk) })
res.on('end', function () {
var buffer = Buffer.concat(chunks)
var encoding = res.headers['content-encoding']
if (encoding === 'gzip') {
zlib.gunzip(buffer, function (err, buffer) {
<span class="missing-if-branch" title="if path not taken" >I</span>if (err) {
<span class="cstat-no" title="statement not covered" > onError(err)</span>
} else {
dataHandler(buffer.toString())
}
})
} else if (encoding === 'deflate') {
zlib.inflate(buffer, function (err, buffer) {
<span class="missing-if-branch" title="if path not taken" >I</span>if (err) {
<span class="cstat-no" title="statement not covered" > onError(err)</span>
} else {
dataHandler(buffer.toString())
}
})
} else {
dataHandler(buffer.toString())
}
})
})
// ERROR
req.on('error', onError)
function onError (e) {
// Set data for the fake xhr object
<span class="missing-if-branch" title="if path not taken" >I</span>if (jqXHR.statusText === 'error') <span class="cstat-no" title="statement not covered" >jqXHR.responseText = e.stack</span>
// jqXHR, statusText, error
dfd.reject(jqXHR, jqXHR.statusText, e)
}
// SET TIMEOUT
if (o.timeout && o.timeout > 0) {
req.setTimeout(o.timeout, function () {
req.abort()
jqXHR.statusText = 'timeout'
onError(new Error('timeout'))
})
}
// SEND DATA
if (o.method !== 'GET' && o.data) req.write(o.data, 'utf-8')
req.end()
// DEFERRED
dfd.done(o.success)
dfd.done(o.complete)
dfd.fail(o.error)
dfd.fail(o.complete)
dfd.success = dfd.done
dfd.error = dfd.fail
return dfd
}
najax.defaults = function defaults (opts) {
return Object.assign(defaults, opts)
}
/* auto rest interface go! */
;['GET', 'POST', 'PUT', 'DELETE'].forEach(handleMethod)
function handleMethod (method) {
najax[method.toLowerCase()] = function methodHandler (uri, options, callback) {
return najax(Object.assign(parseOptions(uri, options, callback), { method: method }))
}
}
module.exports = najax
</pre></td></tr>
</table></pre>
<div class='push'></div><!-- for sticky footer -->
</div><!-- /wrapper -->
<div class='footer quiet pad2 space-top1 center small'>
Code coverage
generated by <a href="http://istanbul-js.org/" target="_blank">istanbul</a> at Mon Jun 06 2016 22:48:24 GMT+0100 (BST)
</div>
</div>
<script src="../prettify.js"></script>
<script>
window.onload = function () {
if (typeof prettyPrint === 'function') {
prettyPrint();
}
};
</script>
<script src="../sorter.js"></script>
</body>
</html>
| hugobozzshih007/unity-cloud-build-app-center |
<|start_filename|>questions.js<|end_filename|>
questions = [
//Reform vs Conserve
{ "question": "Homosexuality is against my personal values.",
"i18n":{"ko": "동성애는 나의 개인적인 도덕적 가치에 어긋난다.",
"ru": "гомосексуализм противоречит моим личным ценностям.",
"cn": "同性恋违背我的个人价值观。",
"ja": "同性愛は私の価値観に反する"},
"effect": { "a": -5, "b": 0, "c": 0, "d": 0, "e": 0, "f": 0, "g": 0 } },
{ "question": "A crackdown on pornography is beneficial to society.",
"i18n":{"ko": "포르노에 대한 단속은 사회에 도움이 된다.",
"ru": "Борьба с порнографией полезна для общества.",
"cn": "打击色情制品对社会有利。",
"ja": "ポルノの厳格な取り締まりは社会にとって有益だ"},
"effect": { "a": -5, "b": 0, "c": 0, "d": 0, "e": 0, "f": 0, "g": 0 } },
{ "question": "The usage of marijuana is immoral and should be criminalized.",
"i18n":{"ko": "대마초는 범죄화되어야 합니다.",
"ru": "Марихуана должна быть запрещена.",
"cn": "大麻的使用是不道德的,所以大麻应该是非法的。",
"ja": "大麻の使用は不道徳なので、処罰されるべきだ"},
"effect": { "a": -5, "b": 0, "c": 0, "d": 0, "e": 0, "f": 0, "g": 0 } },
{ "question": "I disagree with most of the values that Progressivism advocates.",
"i18n":{"ko": "나는 진보주의가 옹호하는 대부분의 가치관에 동의하지 않는다.",
"ru": "Отрицательные стороны быстрой реформы перевешивают её положительные.",
"cn": "我反对进步主义倡导的大多数价值观。",
"ja": "私はほとんどの進歩主義的な価値観に同意しない"},
"effect": { "a": -5, "b": 0, "c": 0, "d": 0, "e": 0, "f": 0, "g": 0 } },
{ "question": "Abortion is murder and should be criminalized in most or all cases.",
"i18n":{"ko": "낙태의 합법화는 유익하다.",
"ru": "Легализация абортов вредна, потому что это убийство.",
"cn": "在大多数情况下,堕胎应该是非法的,因为它等同于谋杀。",
"ja": "多くのまたは全ての中絶は殺人であるから、処罰されるべきだ"},
"effect": { "a": -5, "b": 0, "c": 0, "d": 0, "e": 0, "f": 0, "g": 0 } },
{ "question": "Gender is a social construct.",
"i18n":{"ko": "성별간의 역할 차이는 사회적으로 구성된 것이다.",
"ru": "гендер - это социальная конструкция.",
"cn": "性别是一种社会建构。",
"ja": "ジェンダーは社会的に構築されたものだ"},
"effect": { "a": 5, "b": 0, "c": 0, "d": 0, "e": 0, "f": 0, "g": 0 } },
{ "question": "I support (Third/Fourth Wave) Feminism.",
"i18n":{"ko": "나는 성소수자(3세대)와 정치적 올바름(4세대)을 위한 페미니즘을 지지한다.",
"ru": "Я поддерживаю третью/четвёртую волну Феминизма.",
"cn": "我支持(第三/第四波)女权主义。",
"ja": "(第三波/第四波)フェミニズムを支持している"},
"effect": { "a": 5, "b": 0, "c": 0, "d": 0, "e": 0, "f": 0, "g": 0 } },
{ "question": "The LGBT movement deserves more respect.",
"i18n":{"ko": "성소수자 운동은 더 존중 받을 만하다.",
"ru": "ЛГБТ-движение заслуживает большого уважения.",
"cn": "LGBT+运动应该得到更多的尊重。",
"ja": "LGBT運動はもっと評価されるべきだ"},
"effect": { "a": 5, "b": 0, "c": 0, "d": 0, "e": 0, "f": 0, "g": 0 } },
{ "question": "I support criminal justice reform in my country.",
"i18n":{"ko": "나는 우리나라의 형사 사법 개혁을 지지합니다.",
"ru": "Я поддерживаю реформу уголовного правосудия в моей стране.",
"cn": "我支持我的国家进行刑事司法改革。",
"ja": "私の国の刑事・司法は改革されるべきだ"},
"effect": { "a": 5, "b": 0, "c": 0, "d": 0, "e": 0, "f": 0, "g": 0 } },
//Intervention vs Laissez-Faire
{ "question": "The freer the markets, the freer the people.",
"i18n":{"ko": "시장이 자유로울수록, 사람들은 자유로워진다.",
"ru": "чем свободнее рынок, тем свободнее люди.",
"cn": "在我看来,'市场越自由,人民越自由' 是正确的。",
"ja": "市場が自由なほど、人々も自由だ"},
"effect": { "a": 0, "b": -5, "c": 0, "d": 0, "e": 0, "f": 0, "g": 0 } },
{ "question": "Economic regulation causes economic stagnation and the overall reduction of productivity.",
"i18n":{"ko": "경제 규제는 경제 침체와 전반적인 생산성 감소를 야기한다.",
"ru": "Экономическое регулирование со стороны правительства уменьшает положительные преимущества капитализма.",
"cn": "经济管制降低了资本主义的积极利益。",
"ja": "経済的規制は経済を停滞させ、全体的な生産効率を下げる"},
"effect": { "a": 0, "b": -5, "c": 0, "d": 0, "e": 0, "f": 0, "g": 0 } },
{ "question": "Insurance should be used voluntarily by individuals rather than welfare.",
"i18n":{"ko": "보험은 복지보다는 개인이 자발적으로 사용해야 한다.",
"ru": "Страхование должно использоваться человеком добровольно.",
"cn": "公民应该用保险来代替政府福利。",
"ja": "保険は福祉の一部としてではなく、個人が自由意志に基づいて契約すべきものだ"},
"effect": { "a": 0, "b": -5, "c": 0, "d": 0, "e": 0, "f": 0, "g": 0 } },
{ "question": "The gain of companies within the market directly correlate to the gain of the individual.",
"i18n":{"ko": "기업의 이익은 개인의 이익과 직결된다.",
"ru": "Прибыль фирмы напрямую соотносится с прибылью индивида.",
"cn": "市场内公司的收益与个人的收益直接相关。",
"ja": "企業の市場における利潤が多いほど、個人も利益を得る"},
"effect": { "a": 0, "b": -5, "c": 0, "d": 0, "e": 0, "f": 0, "g": 0 } },
{ "question": "Government is a bigger cause of economic inequality than Capitalism is.",
"i18n":{"ko": "우리는 가장 작은 정부와 함께 완전히 기능하는 시장을 가져야 한다.",
"ru": "Правительство является большей причиной экономического неравенства, чем капитализм.",
"cn": "与资本主义相比,政府是经济不平等的最大促成者。",
"ja": "経済的不平等のより大きな原因は、資本主義ではなく政府だ"},
"effect": { "a": 0, "b": -5, "c": 0, "d": 0, "e": 0, "f": 0, "g": 0 } },
{ "question": "Economic regulation is necessary for a fair and equal society.",
"i18n":{"ko": "자유방임 경제는 독점을 형성시키고 계층의 양극화를 악화시킨다.",
"ru": "Экономика невмешательства приводит к образованию монополий и усугубляет классовое разделение.",
"cn": "公司的压迫比政府的压迫更令人关注。",
"ja": "公平・公正な社会を実現するためには、経済的な規制だ"},
"effect": { "a": 0, "b": 5, "c": 0, "d": 0, "e": 0, "f": 0, "g": 0 } },
{ "question": "I prefer a planned economy over a market economy.",
"i18n":{"ko": "나는 시장경제보다 계획경제를 선호한다.",
"ru": "Я предпочитаю плановую экономику рыночной.",
"cn": "计划经济优于市场经济。",
"ja": "私は市場経済よりも計画経済の方が好きだ。"},
"effect": { "a": 0, "b": 5, "c": 0, "d": 0, "e": 0, "f": 0, "g": 0 } },
{ "question": "It is necessary for the government to intervene in the economy to protect its consumers.",
"i18n":{"ko": "정부는 소비자를 보호하기 위해 경제에 개입할 필요가 있다.",
"ru": "Правительству необходимо вмешаться в экономику, чтобы защитить потребителей.",
"cn": "政府有必要干预经济以保护其消费者。",
"ja": "消費者を守るためには、政府は経済へ介入する必要がある"},
"effect": { "a": 0, "b": 5, "c": 0, "d": 0, "e": 0, "f": 0, "g": 0 } },
{ "question": "We need to have more welfare compared to what we have currently.",
"i18n":{"ko": "우리는 우리가 현재 가진 복지보다 더 많은 복지를 가질 필요가 있다.",
"ru": "Мы должны увеличить налоги в обмен на улучшение достатка.",
"cn": "与目前相比,我们需要有更多的福利。",
"ja": "私の国は今よりも福祉を充実させるべきだ"},
"effect": { "a": 0, "b": 5, "c": 0, "d": 0, "e": 0, "f": 0, "g": 0 } },
{ "question": "The minimum wage should be increased.",
"i18n":{"ko": "최저임금을 올려야 한다.",
"ru": "Минимальная заработная плата должна быть увеличена.",
"cn": "最低工资应该被提高。",
"ja": "最低賃金を引き上げるべきだ"},
"effect": { "a": 0, "b": 5, "c": 0, "d": 0, "e": 0, "f": 0, "g": 0 } },
//Dictatorship vs Constitution
{ "question": "The state should be abolished.",
"i18n":{"ko": "국가는 폐지되어야 합니다.",
"ru": "Правительство должно быть упразднено.",
"cn": "政府应该被废除。",
"ja": "政府は廃止されるべきだ"},
"effect": { "a": 0, "b": 0, "c": -5, "d": 0, "e": 0, "f": 0, "g": 0 } },
{ "question": "I would rather have a constitutional monarchy than an absolute monarchy.",
"i18n":{"ko": "나는 차라리 절대왕정보다는 입헌군주제를 택할 것이다.",
"ru": "Я бы предпочел конституционную монархию, чем абсолютную.",
"cn": "我宁愿拥有君主立宪制,也不愿拥有绝对君主制。",
"ja": "絶対君主制に比べれば、立憲君主制のほうがマシだ"},
"effect": { "a": 0, "b": 0, "c": -5, "d": 0, "e": 0, "f": 0, "g": 0 } },
{ "question": "I support a constitution that limits the state's power.",
"i18n":{"ko": "강한 정부는 폭정의 위험을 증가시켜 국가를 매우 불안정하게 만든다.",
"ru": "Я поддерживаю конституцию, которая ограничивает власть государства.",
"cn": "我支持一个限制政府权力的宪法。",
"ja": "私は、国家の権力を制限する憲法を支持する"},
"effect": { "a": 0, "b": 0, "c": -5, "d": 0, "e": 0, "f": 0, "g": 0 } },
{ "question": "Rights are inalienable and natural, as the government is not the source of our rights.",
"i18n":{"ko": "정부가 우리 권리의 원천이 아니기 때문에 권리는 양도할 수 없고 자연스럽습니다.",
"ru": "Права являются неотъемлемыми и естественными, поскольку правительство не является источником наших прав.",
"cn": "人权是不可剥夺和与生俱来的,因为政府不是我们权利的来源。",
"ja": "権利とは政府が付与するものではなく、すべての人が自然に持つ不可侵のものだ"},
"effect": { "a": 0, "b": 0, "c": -5, "d": 0, "e": 0, "f": 0, "g": 0 } },
{ "question": "The state can be best defined as a monopoly on violence.",
"i18n":{"ko": "국가는 폭력에 대한 독점으로 가장 잘 정의될 수 있습니다.",
"ru": "Более сильное правительство увеличивают риск тирании, что делает нацию очень изменчивой.",
"cn": "我宁愿选择民主而不是专政。",
"ja": "国家は暴力を独占する存在として、もっともよく説明できる"},
"effect": { "a": 0, "b": 0, "c": -5, "d": 0, "e": 0, "f": 0, "g": 0 } },
{ "question": "A strong government is required for a safe society.",
"i18n":{"ko": "안전한 사회를 위해 강력한 정부가 필요하다.",
"ru": "Для безопасности необходимо сильное правительство.",
"cn": "一个安全的社会需要强大的政府。",
"ja": "安全な社会を実現するには強い政府が不可欠だ"},
"effect": { "a": 0, "b": 0, "c": 5, "d": 0, "e": 0, "f": 0, "g": 0 } },
{ "question": "I prioritize security over liberty.",
"i18n":{"ko": "나는 자유보다 안보를 우선시한다.",
"ru": "Я отдаю приоритет безопасности, а не свободе.",
"cn": "我把安全置于自由之上。",
"ja": "私は自由よりも治安維持を優先する"},
"effect": { "a": 0, "b": 0, "c": 5, "d": 0, "e": 0, "f": 0, "g": 0 } },
{ "question": "I prefer a unitary state over a federal state.",
"i18n":{"ko": "나는 연방 주보다 단일 주를 선호합니다.",
"ru": "Я предпочитаю унитарное государство федеративному.",
"cn": "与联邦政府相比,我更喜欢单一制政府。",
"ja": "連邦国家よりも単一国家の方がよい"},
"effect": { "a": 0, "b": 0, "c": 5, "d": 0, "e": 0, "f": 0, "g": 0 } },
{ "question": "Theoretically, a morally good dictator would bring more good than bad.",
"i18n":{"ko": "사심 없는 독재자의 통치는 내게 이상적이다.",
"ru": "Правление бескорыстного и самоотверженного диктатора идеально для меня.",
"cn": "一个无私的独裁者的统治对我来说是理想的。",
"ja": "理論上は、無欲な指導者による独裁は弊害よりも利益をもたらす"},
"effect": { "a": 0, "b": 0, "c": 5, "d": 0, "e": 0, "f": 0, "g": 0 } },
{ "question": "Autocratic governments are more efficient than democratic governments.",
"i18n":{"ko": "나는 민주주의보다 독재를 택하겠다.",
"ru": "Я бы предпочел диктатуру демократии.",
"cn": "专制政府比民主政府更有效率。",
"ja": "民主政治より専制政治の方が効率的だ"},
"effect": { "a": 0, "b": 0, "c": 5, "d": 0, "e": 0, "f": 0, "g": 0 } },
//Particular vs Universal
{ "question": "My nation's ideals are better than most other nations' ideals.",
"i18n":{"ko": "우리나라의 이상은 대부분 나라들의 이상보다 낫다.",
"ru": "Идеалы моей нации лучше идеалов большинства других наций.",
"cn": "我国家的理想比大多数其他国家的理想都要好。",
"ja": "私の国の理想はほかの国のそれもより優れている"},
"effect": { "a": 0, "b": 0, "c": 0, "d": -5, "e": 0, "f": 0, "g": 0 } },
{ "question": "The enemy of my enemy is my friend.",
"i18n":{"ko": "적의 적은 나의 친구이다.",
"ru": "Враг моего врага - мой друг.",
"cn": "我的敌人的敌人就是我的盟友。",
"ja": "外交において、敵の敵は味方である"},
"effect": { "a": 0, "b": 0, "c": 0, "d": -5, "e": 0, "f": 0, "g": 0 } },
{ "question": "Foreign intervention is sometimes necessary.",
"i18n":{"ko": "해외에 자유를 제공하기 위해 독재 정권을 침략하는 것은 정당하다.",
"ru": "Мы должны больше сосредоточиться на проблемах за пределами нашей страны.",
"cn": "我们应该更加关注我们国家以外的问题。",
"ja": "時には他国への介入必要だ"},
"effect": { "a": 0, "b": 0, "c": 0, "d": -5, "e": 0, "f": 0, "g": 0 } },
{ "question": "Foreign officials have no right to dictate policy in another country.",
"i18n":{"ko": "외국 관리들은 다른 나라에서 정책을 지시할 권리가 없습니다. ",
"ru": "Иностранные чиновники не имеют права диктовать политику в другой стране.",
"cn": "外交官员不应干预另一个国家的事务。",
"ja": "外交官には、他国の政治について決定する権利はない"},
"effect": { "a": 0, "b": 0, "c": 0, "d": 5, "e": 0, "f": 0, "g": 0 } },
{ "question": "My country's borders should be expanded.",
"i18n":{"ko": "조국의 국경을 넓혀야 합니다. ",
"ru": "Границы моей страны должны быть расширены.",
"cn": "我的国家的边界应该被扩大。",
"ja": "私の国はもっと国土を広げるべきだ"},
"effect": { "a": 0, "b": 0, "c": 0, "d": -5, "e": 0, "f": 0, "g": 0 } },
{ "question": "We should avoid conflict when possible.",
"i18n":{"ko": "우리는 가능하면 갈등을 피해야 한다.",
"ru": "Мы должны избегать конфликтов, когда это возможно.",
"cn": "我们应该尽可能避免冲突。",
"ja": "紛争は原則として避けるべきだ"},
"effect": { "a": 0, "b": 0, "c": 0, "d": 5, "e": 0, "f": 0, "g": 0 } },
{ "question": "Political unions like the EU generally do more harm than good.",
"i18n":{"ko": "국제 동맹은 일반적으로 해롭다.",
"ru": "Международные организации, как правило, вредны.",
"cn": "国际联盟通常是有害的。",
"ja": "EUのような国際的な連合の多くは有害だ"},
"effect": { "a": 0, "b": 0, "c": 0, "d": 5, "e": 0, "f": 0, "g": 0 } },
{ "question": "I support isolationism.",
"i18n":{"ko": "저는 고립주의를 지지합니다.",
"ru": "Я поддерживаю изоляционизм.",
"cn": "我支持孤立主义。",
"ja": "私は孤立主義を支持する"},
"effect": { "a": 0, "b": 0, "c": 0, "d": 5, "e": 0, "f": 0, "g": 0 } },
{ "question": "I support a non-interventionist foreign policy.",
"i18n":{"ko": "우리의 이상을 전파하기보다는 평화와 개별주의에 주로 힘써야 한다.",
"ru": "Я поддерживаю невмешательскую внешнюю политику.",
"cn": "我支持不干涉主义的外交政策。",
"ja": "私は非介入主義を支持する"},
"effect": { "a": 0, "b": 0, "c": 0, "d": 5, "e": 0, "f": 0, "g": 0 } },
{ "question": "My country spends too much money on the military.",
"i18n":{"ko": "우리나라는 군대에 너무 많은 돈을 쓴다.",
"ru": "Моя страна тратит слишком много денег на вооруженные силы.",
"cn": "我的国家在军事上花了太多的钱。",
"ja": "私の国は軍事にお金をかけすぎている"},
"effect": { "a": 0, "b": 0, "c": 0, "d": 5, "e": 0, "f": 0, "g": 0 } },
//Theocracy vs Secular
{ "question": "Public schools should not advocate for any religious belief.",
"i18n":{"ko": "공립학교는 어떤 종교적 신념도 옹호해서는 안 됩니다. ",
"ru": "Государственные школы не должны пропагандировать какие-либо религиозные убеждения. ",
"cn": "公立学校不应倡导任何宗教信仰。",
"ja": "公立学校は、いかなる宗教的信条も擁護してはならない"},
"effect": { "a": 0, "b": 0, "c": 0, "d": 0, "e": -5, "f": 0, "g": 0 } },
{ "question": "Religion should play a much smaller role in government.",
"i18n":{"ko": "종교는 현재에 비해 훨씬 더 작은 역할을 해야 한다.",
"ru": "Религия должна играть гораздо меньшую роль по сравнению с нынешней.",
"cn": "宗教的作用应该比现在小得多。",
"ja": "宗教は、今以上に政治に対する影響力を失うべきだ"},
"effect": { "a": 0, "b": 0, "c": 0, "d": 0, "e": -5, "f": 0, "g": 0 } },
{ "question": "Churches should be taxed the same way as how most other organizations are taxed.",
"i18n":{"ko": "대부분의 다른 조직들처럼 교회에도 세금이 부과되어야 한다.",
"ru": "Церкви должны облагаться такими же налогами, как и другие организации.",
"cn": "教会应该像大多数其他组织一样被征税。",
"ja": "宗教団体もほかの団体と同様の制度で課税されるべきだ。"},
"effect": { "a": 0, "b": 0, "c": 0, "d": 0, "e": -5, "f": 0, "g": 0 } },
{ "question": "The church and state needs to be seperated.",
"i18n":{"ko": "교회와 국가는 분리되어야 합니다.",
"ru": "Церковь и государство должны быть отделены друг от друга.",
"cn": "教会和政府应该分离。",
"ja": "国家と宗教団体は明確に分離されなければならない"},
"effect": { "a": 0, "b": 0, "c": 0, "d": 0, "e": -5, "f": 0, "g": 0 } },
{ "question": "All theocracies are tyrannies.",
"i18n":{"ko": "모든 신정은 폭정이다.",
"ru": "Все теократии - это тирании.",
"cn": "神权统治经常导致宗教的腐败和歪曲。",
"ja": "すべての神権政治は独裁だ"},
"effect": { "a": 0, "b": 0, "c": 0, "d": 0, "e": -5, "f": 0, "g": 0 } },
{ "question": "There should be one or more official religion(s) in my country.",
"i18n":{"ko": "우리 나라에는 최소한 하나의 공식 종교가 있어야 합니다.",
"ru": "В моей стране должна быть официальная религия.",
"cn": "我国应该有一个或多个官方宗教。",
"ja": "私の国にも、ひとつまたは複数の国教があるべきだ"},
"effect": { "a": 0, "b": 0, "c": 0, "d": 0, "e": 5, "f": 0, "g": 0 } },
{ "question": "My religion should be spread as much as possible.",
"i18n":{"ko": "나의 종교는 가능한 한 많이 전파되어야 한다.",
"ru": "Мои религиозные ценности должны быть распространены как можно шире.",
"cn": "我的宗教信仰应尽可能地传播。",
"ja": "私の信仰はできる限り広められるべきだ"},
"effect": { "a": 0, "b": 0, "c": 0, "d": 0, "e": 5, "f": 0, "g": 0 } },
{ "question": "My nation is inherently a religious nation.",
"i18n":{"ko": "우리 민족은 종교 국가로 건국되었습니다.",
"ru": "Моя нация по своей сути является религиозной нацией.",
"cn": "我的国家本来就是一个宗教国家。",
"ja": "私の国は本来、宗教国家だ"},
"effect": { "a": 0, "b": 0, "c": 0, "d": 0, "e": 5, "f": 0, "g": 0 } },
{ "question": "Secularism is unhealthy for society.",
"i18n":{"ko": "세속주의는 사회에 해롭다.",
"ru": "Секуляризм вреден для общества.",
"cn": "世俗主义对社会是不健康的。",
"ja": "世俗主義は社会を不健全にする"},
"effect": { "a": 0, "b": 0, "c": 0, "d": 0, "e": 5, "f": 0, "g": 0 } },
{ "question": "If I can vote, I would consider the candidates' religious beliefs when voting.",
"i18n":{"ko": "내가 투표할 수 있다면 투표할 때 후보자의 종교적 신념을 고려할 것입니다. ",
"ru": "Если я смогу голосовать, я буду учитывать религиозные убеждения кандидатов при голосовании.",
"cn": "如果我可以投票,我会在投票时考虑候选人的宗教信仰。",
"ja": "投票の際は、候補者の宗教的信条を考慮する"},
"effect": { "a": 0, "b": 0, "c": 0, "d": 0, "e": 5, "f": 0, "g": 0 } },
//Acceleration vs Deceleration
{ "question": "Rapid technological advancement causes more harm than good.",
"i18n":{"ko": "급속한 기술 발전은 득보다 실이 많다.",
"ru": "Быстрый технический прогресс приносит больше вреда, чем пользы.",
"cn": "迅速的技术进步带来的弊大于利。",
"ja": "急激な技術発展は、恩恵よりも弊害をもたらす"},
"effect": { "a": 0, "b": 0, "c": 0, "d": 0, "e": 0, "f": -5, "g": 0 } },
{ "question": "Life before the industrial revolution was much better.",
"i18n":{"ko": "산업 혁명 이전의 삶은 지금보다 훨씬 더 좋았다.",
"ru": "Жизнь до промышленной революции была лучше.",
"cn": "工业革命之前的生活要好得多。",
"ja": "産業革命以前の生活はもっと良かった"},
"effect": { "a": 0, "b": 0, "c": 0, "d": 0, "e": 0, "f": -5, "g": 0 } },
{ "question": "Modern urban life is unfulfilling because of technological development.",
"i18n":{"ko": "삶이 너무 복잡해지고 있기 때문에 우리는 한 발짝 물러나야 한다.",
"ru": "Современная городская жизнь не приносит удовлетворения из-за развития технологий.",
"cn": "由于科技的发展,现代城市生活是不充实的。",
"ja": "現代の都市生活が満足いくものでないのは、技術発展のせいだ"},
"effect": { "a": 0, "b": 0, "c": 0, "d": 0, "e": 0, "f": -5, "g": 0 } },
{ "question": "If given the choice, I would reject an extension of my lifespan by centuries.",
"i18n":{"ko": "선택권이 주어진다면 나는 내 수명을 수백 년 연장하는 것을 거부할 것입니다.",
"ru": "Если бы мне предоставили выбор, я бы отказался от продления своей жизни на столетия.",
"cn": "如果可以选择,我将拒绝将我的寿命延长几个世纪。",
"ja": "もしそれが可能だったとしても、自分の寿命を何世紀も延ばせるような技術は拒否する"},
"effect": { "a": 0, "b": 0, "c": 0, "d": 0, "e": 0, "f": -5, "g": 0 } },
{ "question": "Transhumanism is an absurd idea.",
"i18n":{"ko": "트랜스휴머니즘(인간은 자신의 한계를 극복하기 위해 과학과 기술을 통해 인체를 개조해야 한다는 생각)은 터무니없다.",
"ru": "Трансгуманизм - это абсурдная идея.",
"cn": "超人类主义是荒谬的。",
"ja": "科学技術を用いた人体改造や拡張(トランスヒューマニズム)は馬鹿げた考えだ"},
"effect": { "a": 0, "b": 0, "c": 0, "d": 0, "e": 0, "f": -5, "g": 0 } },
{ "question": "It is foolish to expect renewable energy sources to be able to fully replace fossil fuels.",
"i18n":{"ko": "재생 가능 에너지원이 화석 연료를 완전히 대체할 수 있다고 기대하는 것은 어리석은 일입니다. ",
"ru": "Глупо ожидать, что возобновляемые источники энергии смогут полностью заменить ископаемое топливо.",
"cn": "可再生能源不能完全取代化石燃料。",
"ja": "再生可能エネルギーが化石燃料に完全に取って代わることを期待するのは愚かなことである"},
"effect": { "a": 0, "b": 0, "c": 0, "d": 0, "e": 0, "f": 5, "g": 0 } },
{ "question": "We should accelerate our technological progress.",
"i18n":{"ko": "우리는 우리의 기술적 진보를 가속화해야 한다.",
"ru": "Мы должны ускорить наш технический прогресс.",
"cn": "我们应该加快技术进步。",
"ja": "技術発展は促進されるべきだ"},
"effect": { "a": 0, "b": 0, "c": 0, "d": 0, "e": 0, "f": 5, "g": 0 } },
{ "question": "Promoting nuclear energy is more effective than implementing environmental regulations.",
"i18n":{"ko": "원자력 발전은 환경 규제를 시행하는 것보다 더 효과적입니다.",
"ru": "Вместо того чтобы пытаться ограничивать фабрики, мы должны позволить технологии развиваться и найти более простое решение в будущем.",
"cn": "我们不应该试图限制工厂,而应该采用核能。",
"ja": "環境保護のためには、工場に規制をかけるよりも原子力を推進すべきだ"},
"effect": { "a": 0, "b": 0, "c": 0, "d": 0, "e": 0, "f": 5, "g": 0 } },
{ "question": "Space colonization as an issue should be considered more seriously.",
"i18n":{"ko": "문제로서의 우주식민화는 보다 심각하게 고려되어야 한다. ",
"ru": "Космическая колонизация как проблема должна рассматриваться более серьезно.",
"cn": "我们应更认真地考虑太空定居这个问题。",
"ja": "宇宙開拓の可能性を、もっと真剣に考えるべきだ"},
"effect": { "a": 0, "b": 0, "c": 0, "d": 0, "e": 0, "f": 5, "g": 0 } },
{ "question": "Currently, people are too paranoid about AI.",
"i18n":{"ko": "현재 사람들은 AI에 대해 너무 편집증적입니다. ",
"ru": "Currently, people are too paranoid about AI.",
"cn": "我们应该加快技术进步。",
"ja": "人々はAIを恐れすぎている"},
"effect": { "a": 0, "b": 0, "c": 0, "d": 0, "e": 0, "f": 5, "g": 0 } },
//Assimilation vs Multicultural
{ "question": "Governments should be as concerned about illegal immigrants as they are concerned about ordinary citizens.",
"i18n":{"ko": "불법 이민자들과 일반 시민들은 같은 대우를 받을 자격이 있다.",
"ru": "Нелегальные иммигранты и обычные граждане заслуживают одинакового обращения.",
"cn": "非法移民和普通公民应享有同样的待遇。",
"ja": "不法入国者は一般市民と同じ待遇を政府から受けられるべきだ"},
"effect": { "a": 0, "b": 0, "c": 0, "d": 0, "e": 0, "f": 0, "g": -5 } },
{ "question": "Most, if not all societies should be a blend of different cultures.",
"i18n":{"ko": "모든 사회는 아니더라도 대부분의 경우 서로 다른 문화가 혼합되어야 합니다. ",
"ru": "Большинство, если не все общества, должны представлять собой смесь различных культур.",
"cn": "大多数的社会应该是融合不同的文化。",
"ja": "異なる文化の融合は望ましい"},
"effect": { "a": 0, "b": 0, "c": 0, "d": 0, "e": 0, "f": 0, "g": -5 } },
{ "question": "Immigration should be easier than it is now.",
"i18n":{"ko": "이민은 지금보다 쉬워야합니다. ",
"ru": "Иммиграция должна быть проще, чем сейчас.",
"cn": "移民应该比现在更容易。",
"ja": "移民の規制は緩和されるべきだ"},
"effect": { "a": 0, "b": 0, "c": 0, "d": 0, "e": 0, "f": 0, "g": -5 } },
{ "question": "There shouldn't be an official language in my country.",
"i18n":{"ko": "우리나라에는 공식 언어가 없어야 합니다. ",
"ru": "В моей стране не должно быть официального языка.",
"cn": "我的国家不应该有官方语言。",
"ja": "私の国に公用語はないべきだ"},
"effect": { "a": 0, "b": 0, "c": 0, "d": 0, "e": 0, "f": 0, "g": -5 } },
{ "question": "Nativism is discriminatory in nature.",
"i18n":{"ko": "원주민주의는 본질적으로 차별적입니다. ",
"ru": "Нативизм носит дискриминационный характер.",
"cn": "本土主义在本质上是歧视性的。",
"ja": "排外主義は本質的に差別的だ"},
"effect": { "a": 0, "b": 0, "c": 0, "d": 0, "e": 0, "f": 0, "g": -5 } },
{ "question": "In general, immigrants are bad for the country.",
"i18n":{"ko": "너무 많은 이민자는 경제적 쇠퇴와 이념적 급진화를 불러온다.",
"ru": "Нелегальная иммиграция в большинстве случаев является морально неправильной.",
"cn": "一般来说,移民对国家不利。",
"ja": "ほとんどの移民は国にとって有害だ"},
"effect": { "a": 0, "b": 0, "c": 0, "d": 0, "e": 0, "f": 0, "g": 5 } },
{ "question": "Immigrants should be expected to learn the majority language of the country.",
"i18n":{"ko": "이민자들은 그 나라의 대부분의 언어를 배워야 합니다.",
"ru": "Иммигранты должны изучать язык большинства населения страны.",
"cn": "移民应该被期望学习该国的多数语言。",
"ja": "移民は、その国でもっともよく使われる言語を習得すべきだ"},
"effect": { "a": 0, "b": 0, "c": 0, "d": 0, "e": 0, "f": 0, "g": 5 } },
{ "question": "Immigrants can never really adapt to our way of life.",
"i18n":{"ko": "이민자들은 결코 우리의 생활 방식에 적응할 수 없습니다. ",
"ru": "Иммигранты никогда не смогут по-настоящему адаптироваться к нашему образу жизни.",
"cn": "移民永远无法完全适应我们的生活方式。",
"ja": "移民は、私たちの文化に本当の意味で順応することなどできない"},
"effect": { "a": 0, "b": 0, "c": 0, "d": 0, "e": 0, "f": 0, "g": 5 } },
{ "question": "There should be a shared and unified cultural identity in my country.",
"i18n":{"ko": "우리나라에는 공유되고 통일된 문화적 정체성이 있어야 합니다. ",
"ru": "В моей стране должна быть общая и единая культурная идентичность.",
"cn": "在我国应该有一个共同的、统一的文化。",
"ja": "私の国では、共有され、統一された文化的アイデンティティがあるべきだ"},
"effect": { "a": 0, "b": 0, "c": 0, "d": 0, "e": 0, "f": 0, "g": 5 } },
{ "question": "If we accept migrants at all, it is important that they assimilate into our culture.",
"i18n":{"ko": "우리가 이주민을 받아들인다면 그들이 우리 문화에 동화되는 것이 중요합니다. ",
"ru": "Если мы вообще принимаем мигрантов, важно, чтобы они ассимилировались в нашей культуре.",
"cn": "如果我们接受移民,他们应与我们的文化同化。",
"ja": "移民を受け入れるのならば、移民は私たちの文化に同化する必要がある"},
"effect": { "a": 0, "b": 0, "c": 0, "d": 0, "e": 0, "f": 0, "g": 5 } },
];
<|start_filename|>ideologies.js<|end_filename|>
ideologies = [
//Totalitarianism
{ "name": "<a href='https://en.wikipedia.org/wiki/Fascism'> Fascism </a>",
"desc": "Fascism is a form of authoritarian ultranationalism characterized by dictatorial power, forcible suppression of opposition, and strong regimentation of society and of the economy, which came to prominence in early 20th-century Europe.",
"i18n": {
"ko": { "name": "파시즘",
"desc": "파시즘은 권위주의적 초국가주의를 수용하는 독재 이데올로기의 한 형태입니다. 조반니 젠틸레는 이 사상을 창시했으며, 유명한 파시스트로는 베니토 무솔리니가 있습니다." },
"ru": { "name": "Фашизм",
"desc": "Это форма диктаторской идеологии, которая представляет собой авторитарный ультранационализм. Основателем этой идеологии является Джованни Джентил, а самый известный фашист в истории - Бенито Муссолини." },
"cn": { "name": "法西斯主义",
"desc": "法西斯主义是专制主义的一种形式,它包含了专制的超民族主义。乔万尼•金泰勒(Giovanni Gentile)创立了这种意识形态,著名的法西斯主义者包括墨索里尼。" },
"ja": { "name": "<a href='https://ja.wikipedia.org/wiki/%E3%83%95%E3%82%A1%E3%82%B7%E3%82%BA%E3%83%A0'>ファシズム</a>",
"desc": "ファシズム(Fascism)は、権威主義的なウルトラナショナリズム(超国家主義)的思想を持つ、独裁を支持する学派の一つです。ジェンティーレ(<NAME>)を始祖とします。有名なファシストとして、ムッソリーニ(<NAME>)がいます。" }},
"stats": { "a": 20, "b": 85, "c": 90, "d": 10, "e": 50, "f": 50, "g": 80 }},
{ "name": "<a href='https://en.wikipedia.org/wiki/Nazism'> National Socialism </a>",
"desc": "National Socialism, often shortened as Nazism, is a totalitarian ideology that has disdain for liberal democracy and the parliamentary system, and advocates for ideas like antisemitism, anti-communism, scientific racism, German nationalism, and eugenics.",
"i18n": {
"ko": { "name": "나치즘",
"desc": "나치즘은 인종과 민족에 근거하여 힘과 기회를 할당하는 파시즘의 한 형태입니다. 그것은 안톤 드렉슬러가 창당하고 아돌프 히틀러가 대중화한 나치당의 이념이었습니다." },
"ru": { "name": "Нацизм",
"desc": "Нацизм - это форма фашизма, которая присваивает власть и привелегии на основе расы и этнической принадлежности. Это была идеология Нацистской партии (НСДАП), которая основана Антоном Дрекслером и популяризирована Адольфом Гитлером." },
"cn": { "name": "纳粹主义",
"desc": "纳粹主义是一种基于种族和种族来分配权力和机会的法西斯主义形式。这是纳粹党的意识形态,它是由安东·德雷克斯勒(<NAME>)创立并由阿道夫·希特勒(<NAME>)推广的。" },
"ja": { "name": "<a href='https://ja.wikipedia.org/wiki/%E3%83%8A%E3%83%81%E3%82%BA%E3%83%A0'>ナチズム</a>",
"desc": "ナチズム(Nazism)は人種や民族性を基に権力を分配することを求める、ファシズムの一派です。 国家社会主義ドイツ労働者党(ナチス)がイデオロギーとして採用した事で有名です。ドレクスラー(<NAME>)を始祖とし、ヒトラー(<NAME>)によって広められました。" }},
"stats": { "a": 0, "b": 80, "c": 100, "d": 10, "e": 50, "f": 50, "g": 100 }},
//Monarchism
{ "name": "<a href='https://en.wikipedia.org/wiki/Absolute_monarchy'> Absolute Monarchism </a>",
"desc": "Absolute Monarchism, also called Absolutism, is a form of monarchy in which the monarch holds supreme autocratic authority, principally not being restricted by written laws, legislature, or unwritten customs. These are often hereditary monarchies.",
"i18n": {
"ko": { "name": "절대 군주제",
"desc": "절대 군주제는 권위주의적 독재 이념입니다. 군주는 절대적인 권력을 가지고 있고, 지도자는 유전적으로 계승됩니다." },
"ru": { "name": "<NAME>",
"desc": "это авторитарная идеология. Монархи обладают абсолютной властью, а власть передаётся генетически." },
"cn": { "name": "绝对君主制",
"desc": "绝对君主制是专制独裁的意识形态。君主具有绝对权力,领导者是世袭的。" },
"ja": { "name": "<a href='https://ja.wikipedia.org/wiki/%E7%B5%B6%E5%AF%BE%E5%90%9B%E4%B8%BB%E5%88%B6'>絶対君主制</a>",
"desc": "絶対君主主義(Absolute Monarchism)とは、権威主義的な独裁制の一つです。絶対的な権力を持つ君主による政治を理想とします。また、君主の地位は血縁を基に世襲されます。" }},
"stats": { "a": 30, "b": 50, "c": 80, "d": 80, "e": 50, "f": 30, "g": 50 }},
{ "name": "<a href='https://en.wikipedia.org/wiki/Constitutional_monarchy'> Constitutional Monarchism </a>",
"desc": "Constitutional Monarchism is a form of monarchy in which the monarch exercises authority in accordance with a written or unwritten constitution. Within Constitutional Monarchism, the monarch acts as a non-party political head of state under the constitution, whether written or unwritten.",
"i18n": {
"ko": { "name": "입헌 군주제",
"desc": "입헌 군주제는 군주제의 변형으로, 군주는 통치할 때 헌법을 따라야 합니다. 그것은 영국, 스페인, 스웨덴 등과 같은 유럽 국가에 존재합니다." },
"ru": { "name": "Конституционная Монархия",
"desc": "Это вариант монархизма, когда правящий монарх должен следовать конституции, чтобы править. Он присутствует в европейских странах, таких как Великобритания, Испания, Швеция и т. Д." },
"cn": { "name": "君主立宪制",
"desc": "君主立宪制是君主制的一种变体,统治君主需要遵循宪法才能统治。它存在于英国,西班牙,瑞典等欧洲国家。" },
"ja": { "name": "<a href='https://ja.wikipedia.org/wiki/%E7%AB%8B%E6%86%B2%E5%90%9B%E4%B8%BB%E5%88%B6'>立憲君主主義</a>",
"desc": "立憲君主主義(Constitutional Monarchism)とは、君主が憲法に縛られる君主制の一派です。英国やスペイン、スウェーデン王国などヨーロッパの国々は、現在もこの体制下にあります。" }},
"stats": { "a": 30, "b": 30, "c": 60, "d": 40, "e": 50, "f": 50, "g": 50 }},
{ "name": "<a href='https://en.wikipedia.org/wiki/Theocracy'> Theocracy </a>",
"desc": "Theocracy is a form of government in which a deity of some type is recognized as the supreme ruling authority, giving divine guidance to human intermediaries that manage the day-to-day affairs of the government. In some religions, the ruler, usually a king, was regarded as the chosen favorite of God (or gods) and could not be questioned, sometimes even being regarded as the descendant of or a god in their own right.",
"i18n": {
"ko": { "name": "<NAME>",
"desc": "신정 군주제는 종교를 국가 수반으로 사용하는 군주제이며, 종종 종교 조직을 사용하여 통치합니다." },
"ru": { "name": "Теократия",
"desc": "Теократия - это форма правления, при которой власть в государстве находится в руках религиозного учреждения и духовенства. " },
"cn": { "name": "君权神授制",
"desc": "君权神授制是使用宗教作为国家统治工具的君主制,经常使用宗教组织来统治。" },
"ja": { "name": "<a href='https://ja.wikipedia.org/wiki/%E7%A5%9E%E6%A8%A9%E6%94%BF%E6%B2%BB'>神権主義</a>",
"desc": "神権主義(Theocracy)とは、宗教や宗教的指導者を君主とする君主制を支持する思想です。多くの場合、宗教団体を用いた政治が行われます。" }},
"stats": { "a": 20, "b": 50, "c": 85, "d": 50, "e": 10, "f": 40, "g": 50 }},
//Liberals, Libertarians
{ "name": "<a href='https://en.wikipedia.org/wiki/Right-Libertarianism'> Libertarianism </a>",
"desc": "Libertarianism, or more specifically Right-Libertarianism, is a political philosophy and movement that upholds liberty as a core principle. Libertarians seek to maximize autonomy and political freedom, emphasizing free association, freedom of choice, individualism and voluntary association.",
"i18n": {
"ko": { "name": "자유주의",
"desc": "자유주의자들은 일반적으로 문화적으로는 진보적지만 경제적으로는 보수적입니다. 그들은 비자발적인 세금 폐지는 물론 더 작은 주/정부를 원합니다." },
"ru": { "name": "Либертарианство",
"desc": "Либертарианцы, как правило, культурно левые, но финансово консервативные, и хотят меньшего государства/правительства, а также отмены принудительных налогов." },
"cn": { "name": "自由主义",
"desc": "自由主义者通常在文化上是激进的,但在财政上是保守的,他们想要一个较小的政府,以及废除强制纳税。" },
"ja": { "name": "<a href='https://ja.wikipedia.org/wiki/%E3%83%AA%E3%83%90%E3%82%BF%E3%83%AA%E3%82%A2%E3%83%8B%E3%82%BA%E3%83%A0'>リバタリアニズム</a>",
"desc": "リバタリアニズム(Libertarianism)は自由至上主義とも訳されます。リバタリアンの多くは文化については左翼的である一方、経済に関しては右派です。小さな国家/政府を支持し、自由意志で拒否できない税制の廃止を主張します。" }},
"stats": { "a": 60, "b": 10, "c": 20, "d": 70, "e": 50, "f": 80, "g": 50 }},
{ "name": "<a href='https://en.wikipedia.org/wiki/Individualist_Anarchism'> Individualist Anarchism </a>",
"desc": "Individualist anarchism is the branch of anarchism that emphasizes the individual and their will over external determinants such as groups, society, traditions and ideological systems.",
"i18n": {
"ko": { "name": "개인주의 아나키즘",
"desc": "개인주의 아나키즘은 개인주의를 강하게 옹호하는 무정부주의의 한 형태이며, 아나키즘 정부를 지지합니다." },
"ru": { "name": "Индивидуалистический анархизм",
"desc": "Индивидуалистический анархизм-это форма анархизма, которая решительно выступает за индивидуализм, а также поддерживает анархическое идеи." },
"cn": { "name": "个人无政府主义",
"desc": "个人无政府主义是强烈倡导个人主义并支持无政府主义的意识形态。" },
"ja": { "name": "<a href='https://ja.wikipedia.org/wiki/%E5%80%8B%E4%BA%BA%E4%B8%BB%E7%BE%A9%E7%9A%84%E7%84%A1%E6%94%BF%E5%BA%9C%E4%B8%BB%E7%BE%A9'>個人主義的無政府主義</a>",
"desc": "個人主義的無政府主義(Individualist Anarchism)は、個人主義を強く主張する無政府主義の一派です。" }},
"stats": { "a": 50, "b": 20, "c": 5, "d": 65, "e": 50, "f": 60, "g": 50 }},
{ "name": "<a href='https://en.wikipedia.org/wiki/Agorism'> Agorism </a>",
"desc": "Agorism is a free-market anarchist political philosophy that supports bringing about a society in which all relations between people are voluntary exchanges, and aims to achieve this through counter-economics.",
"i18n": {
"ko": { "name": "아고리즘",
"desc": "아고리즘은 아나키즘의 분파로써 모든 인간 관계가 평화로운 혁명과 반경제적인 것에 의해 이루어져야한다고 생각합니다." },
"ru": { "name": "Агоризм",
"desc": "Агоризм - это анархистская политическая философия свободного рынка, которая поддерживает создание общества, в котором все отношения между людьми являются добровольными обменами, и стремится достичь этого с помощью контрэкономики. " },
"cn": { "name": "黑市主义(阿哥拉主义)",
"desc": "黑市主义者认为所有人类关系都应通过和平革命和反经济学来完成。" },
"ja": { "name": "<a href='https://en.wikipedia.org/wiki/Agorism'>アゴリズム</a>",
"desc": "アゴリズム(Agorism)とは、ブラックマーケットやグレーマーケットを活用した非暴力革命を通し、あらゆる人間が能動的な契約に基づく交流を行う社会を実現することを志向する思想です。" }},
"stats": { "a": 50, "b": 40, "c": 0, "d": 80, "e": 50, "f": 60, "g": 50 }},
{ "name": "<a href='https://en.wikipedia.org/wiki/Anarcho-Capitalism'> Anarcho-Capitalism </a>",
"desc": "Anarcho-Capitalism is a political philosophy and economic theory that advocates the elimination of centralized states in favor of a system of private property enforced by private agencies, free markets and the right-libertarian interpretation of self-ownership, which extends the concept to include control of private property as part of the self.",
"i18n": {
"ko": { "name": "아나코-자본주의",
"desc": "아나코-자본주의는 개인의 권리와 자유에 매우 높은 우선순위를 두는 무정부주의 이념입니다. 그것은 무정부상태와 자본주의의 극단적인 형태이고, 세금을 도둑질의 한 형태로 간주합니다." },
"ru": { "name": "Анархо-Капитализм",
"desc": "Это анархистская идеология, которая ставит очень высокий приоритет правам и свободе личности. Это крайняя форма анархии и капитализма. Он рассматривает налогообложение как форму воровства." },
"cn": { "name": "无政府资本主义",
"desc": "无政府资本主义是一种无政府主义意识形态,在个人权利和自由方面具有很高的优先地位。它是无政府状态和资本主义的极端形式。它认为税收是盗窃的一种形式。" },
"ja": { "name": "<a href='https://ja.wikipedia.org/wiki/%E7%84%A1%E6%94%BF%E5%BA%9C%E8%B3%87%E6%9C%AC%E4%B8%BB%E7%BE%A9'>無政府資本主義</a>",
"desc": "無政府資本主義(アナルコキャピタリズム;Anarcho-Capitalism)とは、個人の自由や権利をつよく優先する思想です。徴税は窃盗であるとつよく主張し、政府が廃止された自由市場に基づく資本主義を提唱します。" }},
"stats": { "a": 50, "b": 0, "c": 0, "d": 80, "e": 50, "f": 80, "g": 50 }},
{ "name": "<a href='https://en.wikipedia.org/wiki/Night-watchman_state'> Minarchism </a>",
"desc": "Minarchism maintains that the state's only legitimate function is the protection of individuals from aggression, theft, breach of contract, and fraud, and the only legitimate governmental institutions are the military, police, and courts. Similarly to other Libertarian ideologies, it emphasizes on freedom of speech, property, and voluntary association.",
"i18n": {
"ko": { "name": "소정부주의",
"desc": "소정부주의는 매우 작은 정부를 원하는 이념입니다. 그들은 정부의 목적이 단지 법을 집행하기 위한 것이라고 생각합니다. 이런 형태의 정부는 '야경꾼 국가'라고 불립니다." },
"ru": { "name": "Минархизм",
"desc": "это идеология, которая хочет иметь очень никзкое вмешательство правительства. Они думают, что цель правительства только в том, чтобы исполнять законы." },
"cn": { "name": "最小政府主義",
"desc": "最小政府主義是一个需要很小的政府的意识形态。他们认为政府的目的只是为了执行法律。这种政府被称为守夜人政府。" },
"ja": { "name": "<a href='https://ja.wikipedia.org/wiki/%E6%9C%80%E5%B0%8F%E5%9B%BD%E5%AE%B6%E4%B8%BB%E7%BE%A9'>最小国家主義</a>",
"desc": "最小国家主義(ミナキズム;Minarchism)とは、国民を暴力やその他個人を害する行為からのみ守る政府(最小国家)を提唱する思想です。最小国家は夜警国家とも呼ばれます。" }},
"stats": { "a": 50, "b": 5, "c": 10, "d": 80, "e": 50, "f": 30, "g": 50 }},
{ "name": "<a href='https://en.wikipedia.org/wiki/Paleolibertarianism'> Paleolibertarianism </a>",
"desc": "Paleolibertarianism is a variant of Libertarianism which stresses inherent incompatibility between progressivism and the concept of liberty, as well as a focus on the importance of inherited culture as a means of maintaining order. ",
"i18n": {
"ko": { "name": "고자유주의",
"desc": "고자유주의는 기본적으로 자유주의이지만 문화적으로는 보수적입니다. 그들은 작은 정부를 원합니다." },
"ru": { "name": "Палеолибертарианство",
"desc": "Палеолибертарианцы в основном либетарианцы, но консервативны и в культурном отношении. Они хотят небольшое государство." },
"cn": { "name": "古希腊主义",
"desc": "古希腊主义者基本上是自由主义者,但在文化上也很保守。他们想要一个小国家。" },
"ja": { "name": "<a href='https://en.wikipedia.org/wiki/Paleolibertarianism'>パレオ・リバタリアニズム</a>",
"desc": "パレオ・リバタリアニズム(Paleolibertarianism)とは、文化に対しては保守主義的姿勢を取るリバタリアン(自由至上主義)の一派です。小さな国家を主張します。" }},
"stats": { "a": 30, "b": 10, "c": 30, "d": 60, "e": 50, "f": 60, "g": 50 }},
{ "name": "<a href='https://en.wikipedia.org/wiki/Geolibertarianism'>Geolibertarianism </a>",
"desc": "Geolibertarianism is a political and economic ideology that integrates libertarianism with Georgism, and holds that although people should own the value they produce themselves, all land should belong equally to all members of society. It does not consider land as legitimate private property.",
"i18n": {
"ko": { "name": "토지-자유주의",
"desc": "토지-자유주의는 자유주의적인 가치를 가진 지공주의의 분파입니다 지공주의는 토지가 사유재산으로 간주되지 않고 세금과 공소유지를 받는 경제체제입니다." },
"ru": { "name": "Геолибертарианство",
"desc": "это джорджизм с либертарианскими ценностями. Джорджизм -это экономическая система, в которой земля не считается частной собственностью, а облагается налогом и находится в государственной собственности." },
"cn": { "name": "地缘自由主义",
"desc": "地缘自由主义是具有自由主义者价值观的乔治主义者。乔治主义是一种土地不被视为私有财产,而是共有且使用者被征税的经济体系。" },
"ja": { "name": "<a href='https://en.wikipedia.org/wiki/Geolibertarianism'>ジオ・リバタリアニズム</a>",
"desc": "ジオ・リバタリアニズム(Geo-Libertarianism)は、土公主義(ジョージ主義;Georgism)的価値観を持つリバタリアニズム(自由至上主義)思想です。土公主義者は土地を公的所有されているものと見なし、土地所有者につよい税をかけ、土地税以外のあらゆる税の廃止を提唱します。" }},
"stats": { "a": 50, "b": 60, "c": 20, "d": 65, "e": 50, "f": 60, "g": 50 }},
{ "name": "<a href='https://en.wikipedia.org/wiki/Neoliberalism'> Neoliberalism </a>",
"desc": "Neoliberalism is generally associated with policies of economic liberalization, including privatization, deregulation, globalization, free trade, and tricke-down economics, and served as the opposition of Keynesian Economics.",
"i18n": {
"ko": { "name": "신자유주의",
"desc": "신자유주의는 자유시장 중심의 개혁 이념으로 대개 문화적으로 좌편향적입니다. 국가의 영향력을 줄이는 것도 이념의 큰 부분입니다." },
"ru": { "name": "Неолиберализм",
"desc": "это ориентированная на свободный рынок реформаторская идеология, которая обычно культурно левая. Снижение влияния государства также является большой частью идеологии." },
"cn": { "name": "新自由主义",
"desc": "新自由主义是一种以自由市场为导向的改革思想,通常在文化上是激进的。减少政府影响也是该思想的重要组成部分。" },
"ja": { "name": "<a href='https://ja.wikipedia.org/wiki/%E6%96%B0%E8%87%AA%E7%94%B1%E4%B8%BB%E7%BE%A9'>新自由主義</a>",
"desc": "新自由主義(ネオリベラリズム;Neo-Liberalism)とは、、自由市場志向の社会改良を求める思想です。文化に対しては左翼的思想を持ちます。また、国家の権力を縮小することも主張します。" }},
"stats": { "a": 60, "b": 25, "c": 50, "d": 50, "e": 50, "f": 70, "g": 50 }},
{ "name": "<a href='https://en.wikipedia.org/wiki/Classical_Liberalism'> Classical Liberalism </a>",
"desc": "Classical liberalism is a political ideology and a branch of liberalism that advocates civil liberties under the rule of law with an emphasis on economic freedom. Highly influenced by the Enlightenment, it believes in a limited government that protects individual rights, and limited public services and welfare.",
"i18n": {
"ko": { "name": "고전적 자유주의",
"desc": "고전적 자유주의는 개인의 자유와 제한적인 정부를 자유사회의 기초로 봅니다. 그것은 자유주의적 이상을 가진 최초의 이념들 중 하나였습니다." },
"ru": { "name": "Классический либерализм",
"desc": "Класси́ческий либерали́зм — политическая идеология, ветвь либерализма, которая утверждает гражданские права и политическую свободу." },
"cn": { "name": "古典自由主义",
"desc": "古典自由主义将个人自由和有限的政府视为自由社会的基础。 它是最初具有自由主义理想的意识形态之一。" },
"ja": { "name": "<a href='https://ja.wikipedia.org/wiki/%E5%8F%A4%E5%85%B8%E7%9A%84%E8%87%AA%E7%94%B1%E4%B8%BB%E7%BE%A9'>古典的自由主義</a>",
"desc": "古典的自由主義(Classical Liberalism)とは、個人の自由と小さな政府を前提とした自由社会を求める立場です。新自由主義などの、より新しい自由主義のもととなった思想で、これらと区別するために「古典的」と呼ばれます。" }},
"stats": { "a": 40, "b": 20, "c": 30, "d": 65, "e": 50, "f": 50, "g": 50 }},
//Traditionalism
{ "name": "<a href='https://en.wikipedia.org/wiki/Conservatism'> Conservatism </a>",
"desc": "Conservatism is a cultural, social, and political philosophy which seeks to promote and to preserve traditional social institutions. n Western culture, conservatives seek to preserve a range of institutions such as organized religion, parliamentary government, and property rights.",
"i18n": {
"ko": { "name": "보수주의",
"desc": "보수주의는 주로 현상유지에 관한 이념입니다. 보수주의자들은 대개 외교적 개입주의, 더 많은 군사비 지출, 더 낮은 세금을 지지합니다." },
"ru": { "name": "Консерватизм",
"desc": "это идеология, которая в основном направлена на сохранение статус-кво. Консерваторы обычно поддерживают дипломатический интервенционизм, увеличение военных расходов и снижение налогов." },
"cn": { "name": "保守主义",
"desc": "保守主义是一种倾向于主要是维护现状的意识形态。保守主义者通常支持外交干预主义,增加军费开支和降低税收。" },
"ja": { "name": "<a href='https://ja.wikipedia.org/wiki/%E4%BF%9D%E5%AE%88'>保守主義</a>",
"desc": "保守主義(Conservatism)とは、現状維持を強く主張する思想の一つです。保守主義者の多くは他国への積極的な干渉を支持し(干渉主義)、軍事予算 の増加や税の引き下げを主張します。" }},
"stats": { "a": 30, "b": 20, "c": 40, "d": 30, "e": 50, "f": 50, "g": 50 }},
{ "name": "<a href='https://en.wikipedia.org/wiki/Neoconservatism'> Neoconservatism </a>",
"desc": "Neoconservatives typically advocate the promotion of democracy and interventionism in international affairs, including peace through strength, and are known for espousing disdain for communism and political radicalism.",
"i18n": {
"ko": { "name": "신보수주의",
"desc": "신보수주의는 자유주의 정당의 태평양주의 정책을 싫어하는 보수주의의 한 형태입니다. 신보수주의자들은 서구 국가들의 목표가 자신들의 모델을 세계에 알리는 것이라고 생각합니다. 그들은 더 국제적으로 초점을 맞추고 있습니다." },
"ru": { "name": "Неоконсерватизм",
"desc": "это идеология,в основе которой лежат идеи консерватизма, приспособленных к новым условиям общественного развития. Неоконсерваторы считают, что цель западных стран-донести до мира свою модель. Они более ориентированы на международный уровень." },
"cn": { "name": "新保守主义",
"desc": "新保守主义是保守主义的一种形式,它不喜欢自由党的太平洋主义政策。新保守主义者认为西方国家的目标是将自己的榜样推向世界。它们更加国际化。" },
"ja": { "name": "<a href='https://ja.wikipedia.org/wiki/%E6%96%B0%E4%BF%9D%E5%AE%88%E4%B8%BB%E7%BE%A9'>新保守主義</a>",
"desc": "新保守主義(Neo-Conservatism)は、リベラル政党のハト派政策に反対する保守主義の一派です。新保守主義者は、西洋諸国の思想や政治体制を積極的に世界中に広めることを主張します。海外を強く意識しているのが特徴です。" }},
"stats": { "a": 20, "b": 20, "c": 65, "d": 15, "e": 50, "f": 60, "g": 50 }},
{ "name": "<a href='https://en.wikipedia.org/wiki/Centre-Right_Politics'> Centre-Right Politics </a>",
"desc": "Center-Right Politics lean to the right of the political spectrum, but are closer to the centre than others. It is influenced by economics liberalism and neoliberalism, but also supports certain left-leaning ideas such as a welfare state and egalitarianism.",
"i18n": {
"ko": { "name": "<NAME>",
"desc": "중도 우파는 우파 성향의 중도주의입니다. 온건 보수적인 것 말고는, 중도 우파는 대부분의 다른 견해에서는 중립적입니다." },
"ru": { "name": "<NAME>",
"desc": "Это люди умеренных правых взглядов. Помимо того, что они умеренно консервативны, они нейтральны в большинстве других взглядов." },
"cn": { "name": "中间派右翼主义",
"desc": "中间派右翼主义是右倾的中间派意识形态。除了适度的保守主义外,它们在大多数其他观点中都是中立的。" },
"ja": { "name": "<a href='https://ja.wikipedia.org/wiki/%E4%B8%AD%E9%81%93%E5%8F%B3%E6%B4%BE'>中道右派</a>",
"desc": "中道右派(Centre-Rightism)の人は、文化についてはやや保守的な傾向をもつものの、その他の価値観については中立的な立場をとります。" }},
"stats": { "a": 60, "b": 50, "c": 50, "d": 50, "e": 50, "f": 50, "g": 50 }},
{ "name": "<a href='https://en.wikipedia.org/wiki/Reactionary'> Reactionary Conservatism </a>",
"desc": "Reactionary Conservatism, or Reactionaryism, favor a return to a previous political state of society that they believe possessed positive characteristics that are absent in contemporary society. In modern terms, Reactionary Conservatism refers to a highly traditional position that is opposed to social or political change, that tends to stand in opposition against Enlightenment ideas.",
"i18n": {
"ko": { "name": "반동 보수주의",
"desc": "반동주의는 구체제로 돌아가려는 정치 이념으로서 진보주의에 대한 반대 개념이다." },
"ru": { "name": "Политическая реакция",
"desc": "политическая реакция — общественное движение в направлении, резко противоположном предшествовавшему или современному общественному строю." },
"cn": { "name": "反动保守主义",
"desc": "反动保守主义是一种意识形态,其思想是保存文化并退后一步,以恢复过去的政府体制,无论是帝国主义还是古典保守主义。" },
"ja": { "name": "<a href='https://ja.wikipedia.org/wiki/%E5%BE%A9%E5%8F%A4%E4%B8%BB%E7%BE%A9'>復古主義</a>",
"desc": "復古主義(Reactionary Conservatism)とは、文化の保守や過去の政治体制の復活を支持します。場合によっては帝国主義(imperialism)を主張することもあります。" }},
"stats": { "a": 0, "b": 20, "c": 80, "d": 20, "e": 50, "f": 50, "g": 50 }},
{ "name": "<a href='https://en.wikipedia.org/wiki/Paleoconservatism'> Paleoconservatism </a>",
"desc": "Paleoconservatism is a political philosophy and variety of conservatism in the United States stressing nationalism, Christian ethics, regionalism, and traditionalist conservatism. Their views overlap with the Old Right and Right-Wing Populism.",
"i18n": {
"ko": { "name": "고보수주의",
"desc": "고보수주의는 종교, 민족주의, 전통주의를 강조하는 보수주의의 변형입니다." },
"ru": { "name": "Палеоконсерватизм",
"desc": "Палеоконсерватизм-это разновидность консерватизма, подчеркивающая религиозность, национализм и традиционализм." },
"cn": { "name": "古典保守主义",
"desc": "古典保守主义是保守主义的一种变体,它强调宗教,民族主义和传统主义。" },
"ja": { "name": "<a href='https://en.wikipedia.org/wiki/Paleoconservatism'>パレオ保守主義</a>",
"desc": "パレオ保守主義(Paleoconservatism)とは、保守主義のうち、宗教、国民、伝統を強く重視することを非常に強く訴える一派です。" }},
"stats": { "a": 25, "b": 40, "c": 60, "d": 35, "e": 50, "f": 40, "g": 50 }},
{ "name": "<a href='https://en.wikipedia.org/wiki/Right-wing_populism'> Right-Wing Populism </a>",
"desc": "Right-Wing Populism is a political ideology which combines right-wing politics and populist rhetoric and themes. The rhetoric often consists of anti-elitist and anti-intellectual sentiments, opposition to the Establishment, and speaking to the 'common people'.",
"i18n": {
"ko": { "name": "우익 포퓰리즘",
"desc": "우익 포퓰리즘은 우파적 가치와 포퓰리즘이 결합된 이념입니다. 우익 포퓰리스트들은 대개 민족주의적이고 보호주의적이며 이민에 대한 더 강력한 규제를 지지합니다." },
"ru": { "name": "<NAME>серватизм",
"desc": "Патернали́стский консервати́зм — ответвление консерватизма, отражающее идею того, что сообщества функционируют и развиваются взаимозависимо, и что люди внутри них имеют обязательства друг перед другом." },
"cn": { "name": "右翼民粹主义",
"desc": "右翼民粹主义是将右翼价值观与民粹主义相结合的意识形态。右翼民粹主义者通常是民族主义,保护主义,并支持更严格的移民限制。" },
"ja": { "name": "<a href='https://ja.wikipedia.org/wiki/%E5%8F%B3%E6%B4%BE%E3%83%9D%E3%83%94%E3%83%A5%E3%83%AA%E3%82%BA%E3%83%A0'>右翼ポピュリズム</a>",
"desc": "右翼ポピュリズム(Right-Wing Populism)とは、右翼的価値観をもったポピュリズム(大衆主義)です。ナショナリズム的な傾向を持ち、保護貿易と移民の規制を主張します、" }},
"stats": { "a": 30, "b": 55, "c": 65, "d": 55, "e": 50, "f": 50, "g": 50 }},
{ "name": "<a href='https://en.wikipedia.org/wiki/Paternalistic_conservatism'> Paternalistic Conservatism </a>",
"desc": "Paternalistic conservatism is a strand of conservatism with an emphasis on the paternalistic obligation of those who are privileged and wealthy to the poorer parts of society.",
"i18n": {
"ko": { "name": "우익 사회주의",
"desc": "우익 사회주의 는 시장 경제, 상업주의, 자유 방임, 내셔널리즘 등 우파적인 가치를 바탕으로 한 공동체주의를 의미한다." },
"ru": { "name": "Патерналистский консерватизм",
"desc": "это идеология, которая в основном направлена на сохранение статус-кво. Консерваторы обычно поддерживают дипломатический интервенционизм, увеличение военных расходов и снижение налогов." },
"cn": { "name": "家长式保守主义",
"desc": "家长式保守主义是保守主义的一个分支,主张富人有责任支持穷人。 " },
"ja": { "name": "<a href='https://en.wikipedia.org/wiki/Paternalistic_conservatism'> 父性的保守主義</a>",
"desc": "父性的保守主義(Paternalistic Conservatism)は保守主義の一分野であり、貧しい人々を支援するために裕福な人々の義務を強調します。 " }},
"stats": { "a": 40, "b": 65, "c": 60, "d": 40, "e": 50, "f": 50, "g": 50 }},
{ "name": "<a href='https://en.wikipedia.org/wiki/Fiscal_conservatism'> Fiscal Conservatism </a>",
"desc": "Fiscal conservatism is a political and economic philosophy regarding fiscal policy and fiscal responsibility advocating privatization, low taxes, reduced government spending and minimal government debt.",
"i18n": {
"ko": { "name": "재정보수주의",
"desc": "재정보수주의 는 재정 정책과 관련하여 재정 책임의 옹호의 정치 철학이다. 재정보수주의자들은 종종 피고려 적자 지출과 전체 정부 지출 및 국가 부채의 감소뿐만 아니라 균형 예산을 중요시한다." },
"ru": { "name": "Фискальный консерватизм",
"desc": "Фискальный консерватизм, представляет собой политическую и экономическую философию, касающуюся фискальной политики и фискальной ответственности, выступающую за низкие налоги, сокращение государственных расходов и минимальный государственный долг." },
"cn": { "name": "财政保守主义",
"desc": "财政保守主义是用来形容主张避免赤字开支的保守主义财政政策。财政保守主义者往往支持减少整体政府开支、赤字和国债,以达到最重要的平衡预算。" },
"ja": { "name": "<a href='https://en.wikipedia.org/wiki/Fiscal_conservatism'>財政保守主義</a>",
"desc": "財政保守主義(Fiscal Conservatism)は、低税、政府支出の削減、最小限の政府債務を提唱する、財政政策と財政責任に関する政治的および経済的哲学です。" }},
"stats": { "a": 40, "b": 10, "c": 40, "d": 20, "e": 50, "f": 50, "g": 50 }},
//Others
{ "name": "<a href='https://en.wikipedia.org/wiki/Rainbow_capitalism'> Pink Capitalism </a>",
"desc": "Pink Capitalism is the incorporation of the LGBT movement and sexual diversity into consumerism and capitalism, in order to create a market that is oriented and tolerated towards LGBT communities.",
"i18n": {
"ko": { "name": "핑크 자본주의",
"desc": "핑크 자본주의는 LGBT 운동, 성적 다양성, 핑크워싱을 자본주의에 통합한 것입니다." },
"ru": { "name": "Розовый капитализм",
"desc": "это идеология, которая стремится включить прогрессивизм в капитализм. Он поддерживает рынок невмешательства и, как правило, также поддерживает минимальное или несуществующее правительство." },
"cn": { "name": "粉色资本主义",
"desc": "粉色资本主义是一种旨在将进步主义纳入资本主义的意识形态。它支持自由放任的市场。" },
"ja": { "name": "<a href='https://en.wikipedia.org/wiki/Rainbow_capitalism'>ピンク・キャピタリズム</a>",
"desc": "ピンク・キャピタリズム(Pink Capitalism)とは、レインボーキャピタリズムとも言われ、LGBTQの権利拡大を主張しつつ自由放任市場を支持する資本主義の一派です。" }},
"stats": { "a": 80, "b": 10, "c": 30, "d": 30, "e": 50, "f": 70, "g": 50 }},
{ "name": "<a href='https://en.wikipedia.org/wiki/Civic_Nationalism'> Civic Nationalism </a>",
"desc": "Civic Nationalism is a form of nationalism identified by political philosophers who believe in an inclusive form of nationalism that adheres to traditional liberal values of freedom, tolerance, equality, and individual rights.",
"i18n": {
"ko": { "name": "시민 민족주의",
"desc": "시민 민족주의는 민족주의와 자유주의를 결합합니다. 이는 자유주의, 개인주의, 평등과 같은 자유주의적 가치와 서양 기준의 전통적 가치를 지지하는 민족주의의 한 형태입니다." },
"ru": { "name": "Гражданский национализм",
"desc": "Гражданский национализм сочетает национализм с либерализмом. Это форма национализма, которая поддерживает либеральные и Традиционные (по западным стандартам) ценности, такие как Свобода, Индивидуальность и Равенство." },
"cn": { "name": "公民民族主义",
"desc": "公民民族主义将民族主义与自由主义相结合。这是一种支持自由主义和传统(按照西方标准)价值观(例如自由,个性和平等)的民族主义。" },
"ja": { "name": "<a href='https://en.wikipedia.org/wiki/Civic_Nationalism'>市民民族主義</a>",
"desc": "市民民族主義(Civic Nationalism)とは、リベラリズム的思想を持ったナショナリズムです。市民の自由や個人主義、そして平等を主張します。" }},
"stats": { "a": 60, "b": 30, "c": 50, "d": 50, "e": 50, "f": 50, "g": 50 }},
{ "name": "<a href='https://en.wikipedia.org/wiki/Progressive_Conservatism'> Progressive Conservatism </a>",
"desc": "Progressive conservatism is a political ideology which combines conservative and progressive policies. It believes that social reform and progression in areas such as science, economics, education, technology and medicine were necessary to improve human living conditions, but also supports the preservation of tradition.",
"i18n": {
"ko": { "name": "진보적 보수주의",
"desc": "진보적 보수주의는 보수적 가치와 진보적 가치의 결합입니다. 진보적 보수주의자들은 사회가 개혁되어야 하지만 보수주의적인 사고를 통해 성취되어야 한다고 믿습니다." },
"ru": { "name": "Прогрессивный консерватизм",
"desc": "Прогрессивный консерватизм-это сочетание консервативных и прогрессивных ценностей. Он считает, что общество должно быть реформировано, но должно быть достигнуто с помощью консервативного мышления." },
"cn": { "name": "进步保守主义",
"desc": "进步保守主义是一个来自于保守主義的思想意识,它依附于保守主義政策的基础上合并了進步主義的制度政策。" },
"ja": { "name": "<a href='https://en.wikipedia.org/wiki/Progressive_Conservatism'>革新的保守主義</a>",
"desc": "革新的保守主義(Progressive Conservatism)とは、社会改良主義的思想を持った保守です。社会に改良が必要であることを認めつつ、保守的態度を通した社会改革を求めます。" }},
"stats": { "a": 70, "b": 40, "c": 40, "d": 50, "e": 50, "f": 70, "g": 50 }},
{ "name": "<a href='https://en.wikipedia.org/wiki/Technocracy'> Technocracy </a>",
"desc": "Technocracy is a system of government in which a decision-maker or makers are elected by the population or appointed on the basis of their expertise in a given area of responsibility, particularly with regard to scientific or technical knowledge.",
"i18n": {
"ko": { "name": "기술관료제",
"desc": "기술관료제는 기술 전문가인 사람이 국가를 통치해야 한다는 사상을 지지하는 이념입니다." },
"ru": { "name": "Технократия",
"desc": "это идеология, которая продвигает идею о том, что люди, которые являются экспертами в области технологий, должны править нацией." },
"cn": { "name": "科技专制",
"desc": "在科技专制中,决策者是根据其在给定责任领域(尤其是在科学或技术知识方面)的专业知识来选拔的。" },
"ja": { "name": "<a href='https://en.wikipedia.org/wiki/Technocracy'>技術官僚主義</a>",
"desc": "技術官僚主義(テクノクラシー;Technocracy)あるいは技術家主義とは、高度な専門的知識を持つ技術家を官僚(テクノクラート)とし、テクノクラートを中心とした政治体制を求める思想です。" }},
"stats": { "a": 60, "b": 60, "c": 60, "d": 60, "e": 50, "f": 80, "g": 50 }}
];
| rightvaluestest/rightvaluestest.github.io |
<|start_filename|>docs/asset-manifest.json<|end_filename|>
{
"main.css": "static/css/main.ebe711c6.css",
"main.css.map": "static/css/main.ebe711c6.css.map",
"main.js": "static/js/main.3a666ba4.js",
"main.js.map": "static/js/main.3a666ba4.js.map"
} | WinsomeYuen/WeatherApp |
<|start_filename|>UTHAI-MPPC/uthai_mbed/stm32/src/main.cpp<|end_filename|>
#include <mbed.h>
DigitalOut led1(PB_10);
DigitalOut led2(PB_4);
DigitalOut led3(PB_5);
DigitalIn button1(PB_3);
DigitalIn button2(PA_10);
int main()
{
while (1)
{
if (button1.read() == 0 || button2.read() == 0)
{
led1.write(1);
led2.write(0);
led3.write(1);
}
else
{
led1.write(0);
led2.write(1);
led3.write(0);
}
}
}
<|start_filename|>kdlo/src/final_kd.cpp<|end_filename|>
#include <iostream>
#include "ros/ros.h"
#include "std_msgs/String.h"
#include "std_msgs/Float64.h"
#include "std_msgs/Float64MultiArray.h"
#include "geometry_msgs/PointStamped.h"
#include "trajectory_msgs/JointTrajectory.h"
#include "trajectory_msgs/JointTrajectoryPoint.h"
#include "sensor_msgs/JointState.h"
#include "sensor_msgs/Imu.h"
#include "control_msgs/JointControllerState.h"
#include "nav_msgs/Path.h"
#include "tf/transform_datatypes.h"
#include <sstream>
#include <kdl_parser/kdl_parser.hpp>
#include <kdl/jntarray.hpp>
#include <kdl/chainfksolverpos_recursive.hpp>
#include <kdl/treefksolverpos_recursive.hpp>
#include <kdl/chainjnttojacsolver.hpp>
#include <kdl/chainiksolverpos_nr.hpp>
#include <kdl/chainiksolverpos_nr_jl.hpp>
#include <kdl/chainiksolvervel_pinv.hpp>
#include <ctime>
#define foot_x_size 0.03
#define foot_x_size2 -0.04
#define foot_y_size 0.018
#define foot_y_size2 -0.018
#define stance_value 0.001
class point_mass
{
public:
double x = 0, y = 0, z = 0, mass = 0;
char state;
};
class stability
{
public:
bool x = 0, y = 0;
bool check()
{
return x && y;
}
};
class uthai_kd
{
// ros::NodeHandle n;
ros::Publisher pb_ankle_pitch_L,
pb_ankle_roll_L,
pb_hip_pitch_L,
pb_hip_roll_L,
pb_hip_yaw_L,
pb_knee_pitch_L,
pb_ankle_pitch_R,
pb_ankle_roll_R,
pb_hip_pitch_R,
pb_hip_roll_R,
pb_hip_yaw_R,
pb_knee_pitch_R,
pb_com,
pb_centroid,
pb_com_x,
pb_centroid_x,
pb_com_y,
pb_centroid_y,
pb_l_foot,
pb_r_foot,
pb_traj;
trajectory_msgs::JointTrajectoryPoint traj_point;
geometry_msgs::PointStamped ps, ps_centroid, ps_l_foot, ps_r_foot;
KDL::Tree uthai_tree;
KDL::JntArray r_jntarray;
KDL::JntArray l_jntarray;
double home[12] = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0};
point_mass humanoid_CoM, centroid;
double r_roll, r_pitch, r_yaw,
l_roll, l_pitch, l_yaw,
imu_roll, imu_pitch, imu_yaw,
base_roll, base_pitch, base_yaw;
ros::Subscriber sub_imu;
ros::Subscriber read_rhip_y,
read_rhip_r,
read_rhip_p,
read_rknee_p,
read_rankle_p,
read_rankle_r,
read_lhip_y,
read_lhip_r,
read_lhip_p,
read_lknee_p,
read_lankle_p,
read_lankle_r;
// ros::Subscriber sub_jointstates = n.subscribe("uthai/joint_states", 1, &uthai_kd::read_state, this);
// void read_state(const sensor_msgs::JointState &msg)
// {
// std::cout << "success!"
// << "\n";
// rhip_y = msg.position[0];
// rhip_r = msg.position[1];
// rhip_p = msg.position[2];
// rknee_p = msg.position[3];
// rankle_p = msg.position[4];
// rankle_r = msg.position[5];
// lhip_y = msg.position[6];
// lhip_r = msg.position[7];
// lhip_p = msg.position[8];
// lknee_p = msg.position[9];
// lankle_p = msg.position[10];
// lankle_r = msg.position[11];
// }
// void read_imu(const sensor_msgs::Imu::ConstPtr &msg)
// {
// tf::Quaternion q;
// q.setX(msg->orientation.x);
// q.setY(msg->orientation.y);
// q.setZ(msg->orientation.z);
// q.setW(msg->orientation.w);
// tf::Matrix3x3 m(q);
// m.getRPY(imu_roll, imu_pitch, imu_yaw);
// std::cout << imu_roll << ", " << imu_pitch << ", " << imu_yaw << "\n";
// }
void fread_rhip_y(const control_msgs::JointControllerState::ConstPtr &msg)
{
r_jntarray.data[5] = msg->process_value;
l_jntarray.data[6] = msg->process_value;
}
void fread_rhip_r(const control_msgs::JointControllerState::ConstPtr &msg)
{
r_jntarray.data[4] = msg->process_value;
l_jntarray.data[7] = msg->process_value;
}
void fread_rhip_p(const control_msgs::JointControllerState::ConstPtr &msg)
{
r_jntarray.data[3] = msg->process_value;
l_jntarray.data[8] = msg->process_value;
}
void fread_rknee_p(const control_msgs::JointControllerState::ConstPtr &msg)
{
r_jntarray.data[2] = msg->process_value;
l_jntarray.data[9] = msg->process_value;
}
void fread_rankle_p(const control_msgs::JointControllerState::ConstPtr &msg)
{
r_jntarray.data[1] = msg->process_value;
l_jntarray.data[10] = msg->process_value;
}
void fread_rankle_r(const control_msgs::JointControllerState::ConstPtr &msg)
{
r_jntarray.data[0] = msg->process_value;
l_jntarray.data[11] = msg->process_value;
}
void fread_lhip_y(const control_msgs::JointControllerState::ConstPtr &msg)
{
r_jntarray.data[6] = msg->process_value;
l_jntarray.data[5] = msg->process_value;
}
void fread_lhip_r(const control_msgs::JointControllerState::ConstPtr &msg)
{
r_jntarray.data[7] = msg->process_value;
l_jntarray.data[4] = msg->process_value;
}
void fread_lhip_p(const control_msgs::JointControllerState::ConstPtr &msg)
{
r_jntarray.data[8] = msg->process_value;
l_jntarray.data[3] = msg->process_value;
}
void fread_lknee_p(const control_msgs::JointControllerState::ConstPtr &msg)
{
r_jntarray.data[9] = msg->process_value;
l_jntarray.data[2] = msg->process_value;
}
void fread_lankle_p(const control_msgs::JointControllerState::ConstPtr &msg)
{
r_jntarray.data[10] = msg->process_value;
l_jntarray.data[1] = msg->process_value;
}
void fread_lankle_r(const control_msgs::JointControllerState::ConstPtr &msg)
{
r_jntarray.data[11] = msg->process_value;
l_jntarray.data[0] = msg->process_value;
}
public:
bool gazebo = 1,
real = 0;
KDL::Chain r_leg, l_leg;
KDL::Frame r_foot, l_foot;
double rhip_y, rhip_r, rhip_p, rknee_p, rankle_p, rankle_r;
double lhip_y, lhip_r, lhip_p, lknee_p, lankle_p, lankle_r;
double T_rhip_y, T_rhip_r, T_rhip_p, T_rknee_p, T_rankle_p, T_rankle_r;
double T_lhip_y, T_lhip_r, T_lhip_p, T_lknee_p, T_lankle_p, T_lankle_r;
uthai_kd(ros::NodeHandle *n, std::string urdf, std::string base, std::string r_endf, std::string l_endf)
{
if (!kdl_parser::treeFromFile(urdf, uthai_tree))
{
std::cout << "Failed to construct kdl tree\n";
}
else
{
std::cout << "Success to construct kdl tree\n";
}
if (!uthai_tree.getChain(r_endf, l_endf, r_leg))
{
std::cout << "Failed to get r_leg kinematics chain\n";
}
if (!uthai_tree.getChain(l_endf, r_endf, l_leg))
{
std::cout << "Failed to get l_leg kinematics chain\n";
}
else
{
std::cout << "Success to get kinematics chain\n";
r_jntarray = KDL::JntArray(r_leg.getNrOfJoints());
l_jntarray = KDL::JntArray(l_leg.getNrOfJoints());
}
pb_ankle_pitch_L = n->advertise<std_msgs::Float64>("uthai/l_ankle_pitch_position/command", 10);
pb_ankle_roll_L = n->advertise<std_msgs::Float64>("uthai/l_ankle_roll_position/command", 10);
pb_hip_pitch_L = n->advertise<std_msgs::Float64>("uthai/l_hip_pitch_position/command", 10);
pb_hip_roll_L = n->advertise<std_msgs::Float64>("uthai/l_hip_roll_position/command", 10);
pb_hip_yaw_L = n->advertise<std_msgs::Float64>("uthai/l_hip_yaw_position/command", 10);
pb_knee_pitch_L = n->advertise<std_msgs::Float64>("uthai/l_knee_pitch_position/command", 10);
pb_ankle_pitch_R = n->advertise<std_msgs::Float64>("uthai/r_ankle_pitch_position/command", 10);
pb_ankle_roll_R = n->advertise<std_msgs::Float64>("uthai/r_ankle_roll_position/command", 10);
pb_hip_pitch_R = n->advertise<std_msgs::Float64>("uthai/r_hip_pitch_position/command", 10);
pb_hip_roll_R = n->advertise<std_msgs::Float64>("uthai/r_hip_roll_position/command", 10);
pb_hip_yaw_R = n->advertise<std_msgs::Float64>("uthai/r_hip_yaw_position/command", 10);
pb_knee_pitch_R = n->advertise<std_msgs::Float64>("uthai/r_knee_pitch_position/command", 10);
pb_com = n->advertise<geometry_msgs::PointStamped>("uthai/com", 10);
pb_centroid = n->advertise<geometry_msgs::PointStamped>("uthai/com_target", 10);
pb_com_x = n->advertise<std_msgs::Float64>("com/x", 10);
pb_centroid_x = n->advertise<std_msgs::Float64>("centroid/x", 10);
pb_com_y = n->advertise<std_msgs::Float64>("com/y", 10);
pb_centroid_y = n->advertise<std_msgs::Float64>("centroid/y", 10);
pb_l_foot = n->advertise<geometry_msgs::PointStamped>("uthai/l_foot", 10);
pb_r_foot = n->advertise<geometry_msgs::PointStamped>("uthai/r_foot", 10);
pb_traj = n->advertise<trajectory_msgs::JointTrajectory>("uthai/joint_command", 10);
// sub_imu = n->subscribe("uthai/sensor/imu", 1000, &uthai_kd::read_imu, this);
read_rhip_y = n->subscribe("uthai/r_hip_yaw_position/state", 1000, &uthai_kd::fread_rhip_y, this);
read_rhip_r = n->subscribe("uthai/r_hip_roll_position/state", 1000, &uthai_kd::fread_rhip_r, this);
read_rhip_p = n->subscribe("uthai/r_hip_pitch_position/state", 1000, &uthai_kd::fread_rhip_p, this);
read_rknee_p = n->subscribe("uthai/r_knee_pitch_position/state", 1000, &uthai_kd::fread_rknee_p, this);
read_rankle_p = n->subscribe("uthai/r_ankle_pitch_position/state", 1000, &uthai_kd::fread_rankle_p, this);
read_rankle_r = n->subscribe("uthai/r_ankle_roll_position/state", 1000, &uthai_kd::fread_rankle_r, this);
read_lhip_y = n->subscribe("uthai/l_hip_yaw_position/state", 1000, &uthai_kd::fread_lhip_y, this);
read_lhip_r = n->subscribe("uthai/l_hip_roll_position/state", 1000, &uthai_kd::fread_lhip_r, this);
read_lhip_p = n->subscribe("uthai/l_hip_pitch_position/state", 1000, &uthai_kd::fread_lhip_p, this);
read_lknee_p = n->subscribe("uthai/l_knee_pitch_position/state", 1000, &uthai_kd::fread_lknee_p, this);
read_lankle_p = n->subscribe("uthai/l_ankle_pitch_position/state", 1000, &uthai_kd::fread_lankle_p, this);
read_lankle_r = n->subscribe("uthai/l_ankle_roll_position/state", 1000, &uthai_kd::fread_lankle_r, this);
}
void set_T_jointpose(double *jnt)
{
T_rhip_y = jnt[0];
T_rhip_r = jnt[1];
T_rhip_p = jnt[2];
T_rknee_p = jnt[3];
T_rankle_p = 0 - (jnt[2] + jnt[3]);
T_rankle_r = -jnt[1];
T_lhip_y = jnt[6];
T_lhip_r = jnt[7];
T_lhip_p = jnt[8];
T_lknee_p = jnt[9];
T_lankle_p = 0 - (jnt[8] + jnt[9]);
T_lankle_r = -jnt[7];
}
void set_T_equal_jointpose()
{
T_rhip_y = rhip_y;
T_rhip_r = rhip_r;
T_rhip_p = rhip_p;
T_rknee_p = rknee_p;
T_rankle_p = rankle_p;
T_rankle_r = rankle_r;
T_lhip_y = lhip_y;
T_lhip_r = lhip_r;
T_lhip_p = lhip_p;
T_lknee_p = lknee_p;
T_lankle_p = lankle_p;
T_lankle_r = lankle_r;
}
void set_jointpose(double *jnt)
{
rhip_y = jnt[0];
rhip_r = jnt[1];
rhip_p = jnt[2];
rknee_p = jnt[3];
rankle_p = 0 - (jnt[2] + jnt[3]);
rankle_r = -jnt[1];
lhip_y = jnt[6];
lhip_r = jnt[7];
lhip_p = jnt[8];
lknee_p = jnt[9];
lankle_p = 0 - (jnt[8] + jnt[9]);
lankle_r = -jnt[7];
}
void add_jointpose(double *jnt)
{
rhip_y += jnt[0];
rhip_r += jnt[1];
rhip_p += jnt[2];
rknee_p += jnt[3];
rankle_p += 0 - (jnt[2] + jnt[3]);
rankle_r += -jnt[1];
lhip_y += jnt[6];
lhip_r += jnt[7];
lhip_p += jnt[8];
lknee_p += jnt[9];
lankle_p += 0 - (jnt[8] + jnt[9]);
lankle_r += -jnt[7];
}
void auto_ankle(char state)
{
if (state == 'r')
lhip_r = rhip_r;
else if (state == 'l')
rhip_r = lhip_r;
rankle_r = -rhip_r;
lankle_r = -lhip_r;
rankle_p = 0 - (rhip_p + rknee_p);
lankle_p = 0 - (lhip_p + lknee_p);
// if (state == 'l')
// lankle_r = -lhip_r;
// if (state == 'r')
// rankle_r = -rhip_r;
}
void set_kdjointpose()
{
r_jntarray.data[0] = rankle_r; // r_hip yaw
r_jntarray.data[1] = rankle_p; // r_hip rol
r_jntarray.data[2] = rknee_p; // r_hip pitch
r_jntarray.data[3] = rhip_p; // r_knee pitch
r_jntarray.data[4] = rhip_r; // r_ankle pitch
r_jntarray.data[5] = rhip_y; // r_ankle roll
r_jntarray.data[6] = lhip_y; // r_hip yaw
r_jntarray.data[7] = lhip_r; // r_hip rol
r_jntarray.data[8] = lhip_p; // r_hip pitch
r_jntarray.data[9] = lknee_p; // r_knee pitch
r_jntarray.data[10] = lankle_p; // r_ankle pitch
r_jntarray.data[11] = lankle_r; // r_ankle roll
l_jntarray.data[0] = lankle_r; // r_hip yaw
l_jntarray.data[1] = lankle_p; // r_hip rol
l_jntarray.data[2] = lknee_p; // r_hip pitch
l_jntarray.data[3] = lhip_p; // r_knee pitch
l_jntarray.data[4] = lhip_r; // r_ankle pitch
l_jntarray.data[5] = lhip_y; // r_ankle roll
l_jntarray.data[6] = rhip_y; // r_hip yaw
l_jntarray.data[7] = rhip_r; // r_hip rol
l_jntarray.data[8] = rhip_p; // r_hip pitch
l_jntarray.data[9] = rknee_p; // r_knee pitch
l_jntarray.data[10] = rankle_p; // r_ankle pitch
l_jntarray.data[11] = rankle_r; // r_ankle roll
}
void joint_publish(ros::Rate *r)
{
if (gazebo)
{
std_msgs::Float64 radi;
radi.data = rhip_y;
pb_hip_yaw_R.publish(radi);
radi.data = rhip_r;
pb_hip_roll_R.publish(radi);
radi.data = rhip_p;
pb_hip_pitch_R.publish(radi);
radi.data = rknee_p;
pb_knee_pitch_R.publish(radi);
radi.data = rankle_p;
pb_ankle_pitch_R.publish(radi);
radi.data = rankle_r;
pb_ankle_roll_R.publish(radi);
radi.data = lhip_y;
pb_hip_yaw_L.publish(radi);
radi.data = lhip_r;
pb_hip_roll_L.publish(radi);
radi.data = lhip_p;
pb_hip_pitch_L.publish(radi);
radi.data = lknee_p;
pb_knee_pitch_L.publish(radi);
radi.data = lankle_p;
pb_ankle_pitch_L.publish(radi);
radi.data = lankle_r;
pb_ankle_roll_L.publish(radi);
}
if (real)
{
std::vector<double> temp;
trajectory_msgs::JointTrajectory joint_traj;
std_msgs::Float64 radi;
radi.data = rhip_y;
temp.push_back(radi.data);
radi.data = rhip_r;
temp.push_back(radi.data);
radi.data = rhip_p;
temp.push_back(radi.data);
radi.data = rknee_p;
temp.push_back(radi.data);
radi.data = rankle_p;
temp.push_back(radi.data);
radi.data = rankle_r;
temp.push_back(radi.data);
radi.data = lhip_y;
temp.push_back(radi.data);
radi.data = lhip_r;
temp.push_back(radi.data);
radi.data = lhip_p;
temp.push_back(radi.data);
radi.data = lknee_p;
temp.push_back(radi.data);
radi.data = lankle_p;
temp.push_back(radi.data);
radi.data = lankle_r;
temp.push_back(radi.data);
traj_point.positions = temp;
traj_point.time_from_start = ros::Duration(0.03333);
joint_traj.points.push_back(traj_point);
pb_traj.publish(joint_traj);
}
r->sleep();
}
void com_publish(char state, ros::Rate *r)
{
ps.point.x = humanoid_CoM.x;
ps.point.y = humanoid_CoM.y;
ps.point.z = 0;
if (state == 'r' || state == 'd')
{
ps.header.frame_id = "r_foot_ft_link";
ps_centroid.header.frame_id = "r_foot_ft_link";
}
else if (state == 'l')
{
ps.header.frame_id = "l_foot_ft_link";
ps_centroid.header.frame_id = "l_foot_ft_link";
}
ps_centroid.point.x = centroid.x;
ps_centroid.point.y = centroid.y;
ps_centroid.point.z = ps.point.z;
ps_l_foot.header.frame_id = "r_foot_ft_link";
ps_l_foot.point.x = l_foot.p.data[0];
ps_l_foot.point.y = l_foot.p.data[1];
ps_l_foot.point.z = l_foot.p.data[2];
ps_r_foot.header.frame_id = "l_foot_ft_link";
ps_r_foot.point.x = r_foot.p.data[0];
ps_r_foot.point.y = r_foot.p.data[1];
ps_r_foot.point.z = r_foot.p.data[2];
pb_com.publish(ps);
pb_centroid.publish(ps_centroid);
pb_l_foot.publish(ps_l_foot);
pb_r_foot.publish(ps_r_foot);
std_msgs::Float64 centroid_x, com_x, centroid_y, com_y;
centroid_x.data = centroid.x;
com_x.data = humanoid_CoM.x;
centroid_y.data = centroid.y;
com_y.data = humanoid_CoM.y;
pb_com_x.publish(com_x);
pb_centroid_x.publish(centroid_x);
pb_com_y.publish(com_y);
pb_centroid_y.publish(centroid_y);
r->sleep();
}
point_mass compute_com(char state, KDL::ChainFkSolverPos_recursive *r_leg_fksolver, KDL::ChainFkSolverPos_recursive *l_leg_fksolver, bool verbose = 0)
{
point_mass r_CoM, l_CoM;
if (state == 'r' || state == 'd')
{
for (int i = 0; i < r_leg.getNrOfSegments(); i++)
{
r_leg_fksolver->JntToCart(r_jntarray, l_foot, i + 1);
double r_roll, r_pitch, r_yaw;
l_foot.M.GetRPY(r_roll, r_pitch, r_yaw);
KDL::Rotation rot_inv = l_foot.M.Inverse();
KDL::Vector link_cog = r_leg.getSegment(i).getInertia().getCOG();
KDL::Vector link_cog_refbase;
KDL::Vector link_temp = rot_inv.operator*(l_foot.p);
link_cog_refbase.data[0] = link_temp.data[0] + link_cog.data[0];
link_cog_refbase.data[1] = link_temp.data[1] + link_cog.data[1];
link_cog_refbase.data[2] = link_temp.data[2] + link_cog.data[2];
link_cog_refbase.operator=(l_foot.M.operator*(link_cog_refbase));
r_CoM.x += link_cog_refbase.data[0] * r_leg.getSegment(i).getInertia().getMass();
r_CoM.y += link_cog_refbase.data[1] * r_leg.getSegment(i).getInertia().getMass();
r_CoM.z += link_cog_refbase.data[2] * r_leg.getSegment(i).getInertia().getMass();
// if (i == 9)
// {
// ps.header.frame_id = "r_foot_ft_link";
// ps.point.x = link_cog_refbase.data[0];
// ps.point.y = link_cog_refbase.data[1];
// ps.point.z = link_cog_refbase.data[2];
// pb_com.publish(ps);
// }
if (i == 6)
{
base_roll = r_roll;
base_pitch = r_pitch;
base_yaw = r_yaw;
}
// r_CoM.x += l_foot.p[0] * r_leg.getSegment(i).getInertia().getMass();
// r_CoM.y += l_foot.p[1] * r_leg.getSegment(i).getInertia().getMass();
// r_CoM.z += l_foot.p[2] * r_leg.getSegment(i).getInertia().getMass();
r_CoM.mass += r_leg.getSegment(i).getInertia().getMass();
if (verbose)
{
std::cout << std::setprecision(5) << i << ".) state = "
<< " " << r_leg.getSegment(i).getName() << " mass = " << r_leg.getSegment(i).getInertia().getMass()
<< " Rotation : " << r_roll << " " << r_pitch << " " << r_yaw
<< " Mass Trans : " << r_CoM.x << " " << r_CoM.y << " " << r_CoM.z << ""
<< " Translation main : " << l_foot.p[0] << " " << l_foot.p[1] << " " << l_foot.p[2] << "\n";
}
humanoid_CoM.x = r_CoM.x / r_CoM.mass;
humanoid_CoM.y = r_CoM.y / r_CoM.mass;
}
}
if (state == 'l')
{
for (int i = 0; i < l_leg.getNrOfSegments(); i++)
{
l_leg_fksolver->JntToCart(l_jntarray, r_foot, i + 1);
double l_roll, l_pitch, l_yaw;
r_foot.M.GetRPY(l_roll, l_pitch, l_yaw);
KDL::Rotation rot_inv = r_foot.M.Inverse();
KDL::Vector link_cog = l_leg.getSegment(i).getInertia().getCOG();
KDL::Vector link_cog_refbase;
KDL::Vector link_temp = rot_inv.operator*(r_foot.p);
link_cog_refbase.data[0] = link_temp.data[0] + link_cog.data[0];
link_cog_refbase.data[1] = link_temp.data[1] + link_cog.data[1];
link_cog_refbase.data[2] = link_temp.data[2] + link_cog.data[2];
link_cog_refbase.operator=(r_foot.M.operator*(link_cog_refbase));
l_CoM.x += link_cog_refbase.data[0] * l_leg.getSegment(i).getInertia().getMass();
l_CoM.y += link_cog_refbase.data[1] * l_leg.getSegment(i).getInertia().getMass();
l_CoM.z += link_cog_refbase.data[2] * l_leg.getSegment(i).getInertia().getMass();
// if (i == 9)
// {
// ps.header.frame_id = "l_foot_ft_link";
// ps.point.x = link_cog_refbase.data[0];
// ps.point.y = link_cog_refbase.data[1];
// ps.point.z = link_cog_refbase.data[2];
// pb_com.publish(ps);
// }
// l_CoM.x += r_foot.p[0] * l_leg.getSegment(i).getInertia().getMass();
// l_CoM.y += r_foot.p[1] * l_leg.getSegment(i).getInertia().getMass();
// l_CoM.z += r_foot.p[2] * l_leg.getSegment(i).getInertia().getMass();
l_CoM.mass += l_leg.getSegment(i).getInertia().getMass();
if (verbose)
{
std::cout
<< std::setprecision(5) << i << ".) state = "
<< " " << l_leg.getSegment(i).getName() << " mass = " << l_leg.getSegment(i).getInertia().getMass()
<< " Rotation : " << l_roll << " " << l_pitch << " " << l_yaw
<< " Mass Trans : " << l_CoM.x << " " << l_CoM.y << " " << l_CoM.z << ""
<< " Translation main : " << r_foot.p[0] << " " << r_foot.p[1] << " " << r_foot.p[2] << "\n";
}
humanoid_CoM.x = l_CoM.x / l_CoM.mass;
humanoid_CoM.y = l_CoM.y / l_CoM.mass;
}
}
return humanoid_CoM;
}
point_mass compute_centroid(char state, bool verbose = 0)
{
if (state == 'r' || state == 'l') // R leg is stance leg
{
centroid.x = 0;
centroid.y = 0;
centroid.z = 0;
}
else if (state == 'd') // R is main leg for double support
{
centroid.x = l_foot.p.data[0] / 2.0;
centroid.y = l_foot.p.data[1] / 2.0;
centroid.z = l_foot.p.data[2] / 2.0;
}
centroid.state = state;
if (verbose)
std::cout << centroid.state << " centroid : (" << centroid.x << ", " << centroid.y << ", " << centroid.z << ")\n";
return centroid;
}
stability quick_is_stable(char state, bool verbose = 0)
{
stability out;
if (verbose)
std::cout << "Humanoid_CoM : " << humanoid_CoM.x << ", " << humanoid_CoM.y << "\n";
if (centroid.x + foot_x_size2 < humanoid_CoM.x && humanoid_CoM.x < centroid.x + foot_x_size)
{
out.x = true;
if (verbose)
std::cout << " x is stable\n";
if (centroid.y + foot_y_size2 < humanoid_CoM.y && humanoid_CoM.y < centroid.y + foot_y_size)
{
out.y = true;
if (verbose)
std::cout << " y is stable\n";
}
}
}
bool uthai_will_go_on(char state, KDL::ChainFkSolverPos_recursive *r_fksolver, KDL::ChainFkSolverPos_recursive *l_fksolver, ros::Rate *rate, int freq = 200)
{
double jntstate[] = {(T_rhip_y - rhip_y) / freq,
(T_rhip_r - rhip_r) / freq,
(T_rhip_p - rhip_p) / freq,
(T_rknee_p - rknee_p) / freq,
(T_rankle_p - rankle_p) / freq,
(T_rankle_r - rankle_r) / freq,
(T_lhip_y - lhip_y) / freq,
(T_lhip_r - lhip_r) / freq,
(T_lhip_p - lhip_p) / freq,
(T_lknee_p - lknee_p) / freq,
(T_lankle_p - lankle_p) / freq,
(T_lankle_r - lankle_r) / freq};
stability humanoid_stability;
bool stable = false;
double Kp = 20.0 / freq,
Ki = 1.5 / freq,
Kd = 0.0 / freq,
e_x = 0.0,
e_y = 0.0,
de_x = 0.0,
de_y = 0.0,
ie_x = 0.0,
ie_y = 0.0,
P_limit = 0.2 / freq,
I_limit = 4.0 / freq,
D_limit = 200000.0 / freq;
for (int k = 0; k < freq; k++)
{
add_jointpose(jntstate);
auto_ankle(state);
set_kdjointpose();
// ros::spinOnce();
humanoid_CoM = compute_com(state, r_fksolver, l_fksolver, 0);
centroid = compute_centroid(state, 0);
// if (state == 'l')
// centroid.y -= 0.02;
// if (state == 'r')
// centroid.y += 0.02;
humanoid_stability = quick_is_stable(state, 0);
stable = humanoid_stability.check();
de_x = (humanoid_CoM.x - centroid.x) - e_x;
de_y = (humanoid_CoM.y - centroid.y) - e_y;
e_x = humanoid_CoM.x - centroid.x;
e_y = humanoid_CoM.y - centroid.y;
ie_x += e_x;
ie_y += e_y;
double P_x = (Kp * e_x),
P_y = (Kp * e_y),
I_x = (Ki * ie_x),
I_y = (Ki * ie_y),
D_x = (Kd * de_x),
D_y = (Kd * de_y);
if (P_x > P_limit)
P_x = P_limit;
else if (P_x < -P_limit)
P_x = -P_limit;
if (P_y > P_limit)
P_y = P_limit;
else if (P_y < -P_limit)
P_y = -P_limit;
if (ie_x > I_limit)
ie_x = I_limit;
else if (ie_x < -I_limit)
ie_x = -I_limit;
if (ie_y > I_limit)
ie_y = I_limit;
else if (ie_y < -I_limit)
ie_y = -I_limit;
if (D_x > D_limit)
D_x = D_limit;
else if (D_x < -D_limit)
D_x = -D_limit;
if (D_y > D_limit)
D_y = D_limit;
else if (D_y < -D_limit)
D_y = -D_limit;
if (state == 'r')
{
rhip_r += P_y + I_y + D_y;
rhip_p -= P_x + I_x + D_x;
// lankle_r += Kp * (0 - l_roll);
if (l_foot.p.data[1] < 0.16)
{
// std::cout << "l_foot.p.data[1] = " << l_foot.p.data[1] << " r_foot.p.data[1] = " << r_foot.p.data[1] << "\n";
lhip_r += Kp / 3 * (0.16 - l_foot.p.data[1]);
}
}
else if (state == 'l')
{
lhip_r += P_y + I_y + D_y;
lhip_p -= P_x + I_x + D_x;
// rankle_r += Kp * (0 - r_roll);
if (r_foot.p.data[1] > -0.16)
{
// std::cout << "l_foot.p.data[1] = " << l_foot.p.data[1] << " r_foot.p.data[1] = " << r_foot.p.data[1] << "\n";
rhip_r -= Kp / 3 * (-r_foot.p.data[1] + 0.16);
}
}
else if (state == 'd')
{
rhip_r += (P_y + I_y + D_y);
rhip_p -= (P_x + I_x + D_x);
lhip_r += (P_y + I_y + D_y);
lhip_p -= (P_x + I_x + D_x);
}
// auto_ankle(state);
joint_publish(rate);
com_publish(state, rate);
}
set_T_equal_jointpose();
}
bool moveleg(double *target, char state, KDL::ChainFkSolverPos_recursive *r_fksolver, KDL::ChainFkSolverPos_recursive *l_fksolver, ros::Rate *rate, int freq = 200)
{
double jntstate[] = {(T_rhip_y - rhip_y) / freq,
(T_rhip_r - rhip_r) / freq,
(T_rhip_p - rhip_p) / freq,
(T_rknee_p - rknee_p) / freq,
(T_rankle_p - rankle_p) / freq,
(T_rankle_r - rankle_r) / freq,
(T_lhip_y - lhip_y) / freq,
(T_lhip_r - lhip_r) / freq,
(T_lhip_p - lhip_p) / freq,
(T_lknee_p - lknee_p) / freq,
(T_lankle_p - lankle_p) / freq,
(T_lankle_r - lankle_r) / freq};
stability humanoid_stability;
bool stable = false;
double Kp = 30.0 / freq,
Ki = 1.5 / freq,
Kd = 0.0 / freq,
e_x = 0.0,
e_y = 0.0,
de_x = 0.0,
de_y = 0.0,
ie_x = 0.0,
ie_y = 0.0,
te_x = 0.0,
te_y = 0.0,
te_z = 0.0,
P_limit = 0.2 / freq,
I_limit = 4.0 / freq,
D_limit = 200000.0 / freq;
for (int k = 0; k < freq; k++)
{
add_jointpose(jntstate);
// auto_ankle(state);
rankle_r = -rhip_r;
lankle_r = -lhip_r;
rankle_p = 0 - (rhip_p + rknee_p);
lankle_p = 0 - (lhip_p + lknee_p);
set_kdjointpose();
humanoid_CoM = compute_com(state, r_fksolver, l_fksolver, 0);
centroid = compute_centroid(state, 0);
if (state == 'l')
centroid.y -= 0.02;
if (state == 'r')
centroid.y += 0.02;
humanoid_stability = quick_is_stable(state, 0);
stable = humanoid_stability.check();
de_x = (humanoid_CoM.x - centroid.x) - e_x;
de_y = (humanoid_CoM.y - centroid.y) - e_y;
e_x = humanoid_CoM.x - centroid.x;
e_y = humanoid_CoM.y - centroid.y;
ie_x += e_x;
ie_y += e_y;
double P_x = (Kp * e_x),
P_y = (Kp * e_y),
I_x = (Ki * ie_x),
I_y = (Ki * ie_y),
D_x = (Kd * de_x),
D_y = (Kd * de_y);
if (P_x > P_limit)
P_x = P_limit;
else if (P_x < -P_limit)
P_x = -P_limit;
if (P_y > P_limit)
P_y = P_limit;
else if (P_y < -P_limit)
P_y = -P_limit;
if (ie_x > I_limit)
ie_x = I_limit;
else if (ie_x < -I_limit)
ie_x = -I_limit;
if (ie_y > I_limit)
ie_y = I_limit;
else if (ie_y < -I_limit)
ie_y = -I_limit;
if (D_x > D_limit)
D_x = D_limit;
else if (D_x < -D_limit)
D_x = -D_limit;
if (D_y > D_limit)
D_y = D_limit;
else if (D_y < -D_limit)
D_y = -D_limit;
if (state == 'r')
{
te_x = l_foot.p.data[0] - target[0];
te_y = l_foot.p.data[1] - target[1];
te_z = l_foot.p.data[2] - target[2];
rhip_r += P_y + I_y + D_y;
rhip_p -= P_x + I_x + D_x;
if (l_foot.p.data[1] < 0.16)
{
// std::cout << "l_foot.p.data[1] = " << l_foot.p.data[1] << " r_foot.p.data[1] = " << r_foot.p.data[1] << "\n";
lhip_r += Kp / 3 * (0.16 - l_foot.p.data[1]);
}
lhip_r -= Kp * te_y;
lhip_p += Kp * te_x;
lknee_p -= Kp * te_z;
}
else if (state == 'l')
{
te_x = r_foot.p.data[0] - target[0];
te_y = r_foot.p.data[1] - target[1];
te_z = r_foot.p.data[2] - target[2];
lhip_r += P_y + I_y + D_y;
lhip_p -= P_x + I_x + D_x;
if (r_foot.p.data[1] > -0.16)
{
// std::cout << "l_foot.p.data[1] = " << l_foot.p.data[1] << " r_foot.p.data[1] = " << r_foot.p.data[1] << "\n";
rhip_r -= Kp / 3 * (-r_foot.p.data[1] + 0.16);
}
rhip_r -= Kp * te_y;
rhip_p += Kp * te_x;
rknee_p -= Kp * te_z;
}
else if (state == 'd')
{
rhip_r += (P_y + I_y + D_y);
rhip_p -= (P_x + I_x + D_x);
lhip_r += (P_y + I_y + D_y);
lhip_p -= (P_x + I_x + D_x);
}
// auto_ankle(state);
joint_publish(rate);
com_publish(state, rate);
}
set_T_equal_jointpose();
}
};
void get_path(const nav_msgs::Path::ConstPtr &msg, int sampling, ros::Rate *rate, uthai_kd *uthai, KDL::ChainFkSolverPos_recursive *r_fksolver, KDL::ChainFkSolverPos_recursive *l_fksolver)
{
for (int idx = 2; idx < msg->poses.size(); idx++)
{
double x = msg->poses[idx].pose.position.x,
y = msg->poses[idx].pose.position.y,
roll, pitch, yaw;
tf::Quaternion q;
q.setX(msg->poses[idx].pose.orientation.x);
q.setY(msg->poses[idx].pose.orientation.y);
q.setZ(msg->poses[idx].pose.orientation.z);
q.setW(msg->poses[idx].pose.orientation.w);
tf::Matrix3x3 m(q);
m.getRPY(roll, pitch, yaw);
char foot = msg->poses[idx].header.frame_id[0];
char foot_stance;
if (foot == 'r')
foot_stance = 'l';
if (foot == 'l')
foot_stance = 'r';
std::cout << idx << ".) x = " << x << " , y = " << y << " , yaw = " << yaw << " , foot = " << foot << "\n";
uthai->uthai_will_go_on(foot_stance, r_fksolver, l_fksolver, rate, sampling * 1.5);
if (foot_stance == 'r')
{
// uthai->T_lhip_p = -0.7;
// uthai->T_lknee_p = 1.4;
uthai->T_rhip_p = -0.3;
uthai->T_rknee_p = 0.55;
uthai->T_lhip_p += -0.2;
uthai->T_lknee_p += 0.5;
uthai->T_lhip_y = yaw;
uthai->T_rhip_y = 0;
}
else if (foot_stance == 'l')
{
// uthai->T_rhip_p = -0.7;
// uthai->T_rknee_p = 1.4;
uthai->T_lhip_p = -0.3;
uthai->T_lknee_p = 0.55;
uthai->T_rhip_p += -0.2;
uthai->T_rknee_p += 0.5;
uthai->T_rhip_y = yaw;
uthai->T_lhip_y = 0;
}
uthai->uthai_will_go_on(foot_stance, r_fksolver, l_fksolver, rate, sampling);
double tfoot[] = {x, y, 0};
uthai->moveleg(tfoot, foot_stance, r_fksolver, l_fksolver, rate, sampling * 2);
uthai->uthai_will_go_on('d', r_fksolver, l_fksolver, rate, sampling);
}
uthai->T_rhip_p = -0.3;
uthai->T_rknee_p = 0.55;
uthai->T_lhip_p = -0.3;
uthai->T_lknee_p = 0.55;
uthai->uthai_will_go_on('d', r_fksolver, l_fksolver, rate, 100);
std::cout << "Finish! \n";
}
int main(int argc, char **argv)
{
ros::init(argc, argv, "kd_passion");
ros::NodeHandle nh;
ros::Rate rate(100); //30/100//80
std::string urdf_file;
nh.getParam("urdf_file", urdf_file);
uthai_kd uthai(&nh,urdf_file , "base_link", "r_foot_ft_link", "l_foot_ft_link");
KDL::ChainFkSolverPos_recursive r_fksolver(uthai.r_leg);
KDL::ChainFkSolverPos_recursive l_fksolver(uthai.l_leg);
// ros::Subscriber sub_path = nh.subscribe("uthai/footstep_path", 1000, get_path, &uthai, &r_fksolver, &l_fksolver);
nav_msgs::Path ptest;
int sampling = 200;
ros::Subscriber sub_path = nh.subscribe<nav_msgs::Path>("uthai/footstep_path", 100, boost::bind(get_path, _1, sampling, &rate, &uthai, &r_fksolver, &l_fksolver));
double home[] = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0};
uthai.set_jointpose(home);
uthai.set_T_jointpose(home);
uthai.uthai_will_go_on('d', &r_fksolver, &l_fksolver, &rate, 70);
// sleep(5);
// uthai->T_rhip_y = -0.1; // start 1st loop
// uthai->T_lhip_y = 0.1;
uthai.T_rhip_p = -0.3;
uthai.T_rknee_p = 0.55;
uthai.T_lhip_p = -0.3;
uthai.T_lknee_p = 0.55;
uthai.uthai_will_go_on('d', &r_fksolver, &l_fksolver, &rate, 100);
std::clock_t begin = clock();
ros::spin();
std::clock_t end = clock();
std::cout << "elapsed time is " << double(end - begin) / CLOCKS_PER_SEC << "\n";
return 1;
}
// for (int i = 0; i < 5; i++)
// {
// uthai.T_rhip_p = -0.4;
// uthai.T_rknee_p = 0.75;
// uthai.uthai_will_go_on('l', &r_fksolver, &l_fksolver, &rate, sampling);
// uthai.T_rhip_p = -0.7;
// uthai.T_rknee_p = 1.4;
// uthai.uthai_will_go_on('l', &r_fksolver, &l_fksolver, &rate, sampling / 2);
// double tfoot[] = {0.1, -0.1, 0};
// uthai.moveleg(tfoot, 'l', &r_fksolver, &l_fksolver, &rate, sampling * 1.5);
// uthai.uthai_will_go_on('d', &r_fksolver, &l_fksolver, &rate, sampling);
// uthai.T_lhip_p = -0.4;
// uthai.T_lknee_p = 0.75;
// uthai.uthai_will_go_on('r', &r_fksolver, &l_fksolver, &rate, sampling);
// uthai.T_lhip_p = -0.7;
// uthai.T_lknee_p = 1.4;
// uthai.uthai_will_go_on('r', &r_fksolver, &l_fksolver, &rate, sampling / 2);
// tfoot[1] = 0.1;
// uthai.moveleg(tfoot, 'r', &r_fksolver, &l_fksolver, &rate, sampling * 1.5);
// uthai.uthai_will_go_on('d', &r_fksolver, &l_fksolver, &rate, sampling);
// }
<|start_filename|>kdl_parser/kdl_parser/test/test_inertia_rpy.cpp<|end_filename|>
/*********************************************************************
* Software License Agreement (BSD License)
*
* Copyright (c) 2015 Open Source Robotics Foundation, Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
* * Neither the name of the copyright holder nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*********************************************************************/
/* Author: <NAME> */
#include <iostream>
#include <vector>
#include <gtest/gtest.h>
#include <kdl/jntarray.hpp>
#include <kdl/chainidsolver_recursive_newton_euler.hpp>
#include <ros/ros.h>
#include <ros/console.h>
#include "kdl_parser/kdl_parser.hpp"
int g_argc;
char ** g_argv;
class TestInertiaRPY : public testing::Test
{
public:
protected:
/// constructor
TestInertiaRPY()
{
}
/// Destructor
~TestInertiaRPY()
{
}
};
TEST_F(TestInertiaRPY, test_torques) {
// workaround for segfault issue with parsing 2 trees instantiated on the stack
KDL::Tree * tree_1 = new KDL::Tree;
KDL::Tree * tree_2 = new KDL::Tree;
KDL::JntArray torques_1;
KDL::JntArray torques_2;
{
ASSERT_TRUE(kdl_parser::treeFromFile(g_argv[1], *tree_1));
KDL::Vector gravity(0, 0, -9.81);
KDL::Chain chain;
std::cout << "number of joints: " << tree_1->getNrOfJoints() << std::endl;
std::cout << "number of segments: " << tree_1->getNrOfSegments() << std::endl;
ASSERT_TRUE(tree_1->getChain("base_link", "link2", chain));
KDL::ChainIdSolver_RNE solver(chain, gravity);
KDL::JntArray q(chain.getNrOfJoints());
KDL::JntArray qdot(chain.getNrOfJoints());
KDL::JntArray qdotdot(chain.getNrOfJoints());
std::vector<KDL::Wrench> wrenches(chain.getNrOfJoints());
solver.CartToJnt(q, qdot, qdotdot, wrenches, torques_1);
delete tree_1;
tree_1 = NULL;
}
{
ASSERT_TRUE(kdl_parser::treeFromFile(g_argv[2], *tree_2));
KDL::Vector gravity(0, 0, -9.81);
KDL::Chain chain;
ASSERT_TRUE(tree_2->getChain("base_link", "link2", chain));
KDL::ChainIdSolver_RNE solver(chain, gravity);
KDL::JntArray q(chain.getNrOfJoints());
KDL::JntArray qdot(chain.getNrOfJoints());
KDL::JntArray qdotdot(chain.getNrOfJoints());
std::vector<KDL::Wrench> wrenches(chain.getNrOfJoints());
solver.CartToJnt(q, qdot, qdotdot, wrenches, torques_2);
delete tree_2;
tree_2 = NULL;
}
ASSERT_TRUE(torques_1 == torques_2);
SUCCEED();
}
int main(int argc, char ** argv)
{
testing::InitGoogleTest(&argc, argv);
ros::init(argc, argv, "test_kdl_parser");
for (size_t i = 0; i < argc; ++i) {
std::cout << argv[i] << std::endl;
}
g_argc = argc;
g_argv = argv;
return RUN_ALL_TESTS();
}
| oeydev/UTHAI-Humanoid |
<|start_filename|>.bootstrap.mk<|end_filename|>
# Export all Make variables by default to sub-make as well as Shell calls.
#
# Note that you can still explicitely mark a variable with `unexport` and it is
# not going to be exported by Make, regardless of this setting.
#
# https://www.gnu.org/software/make/manual/html_node/Variables_002fRecursion.html
export
# Disable/enable various make features.
#
# https://www.gnu.org/software/make/manual/html_node/Options-Summary.html
MAKEFLAGS += --no-builtin-rules
MAKEFLAGS += --no-builtin-variables
MAKEFLAGS += --no-print-directory
MAKEFLAGS += --warn-undefined-variables
# Set `help` as the default goal to be used if no targets were specified on the command line
#
# https://www.gnu.org/software/make/manual/html_node/Special-Variables.html
.DEFAULT_GOAL:=help
# Never delete a target if it exits with an error.
#
# https://www.gnu.org/software/make/manual/html_node/Special-Targets.html
.DELETE_ON_ERROR:
# Disable the suffix functionality of make.
#
# https://www.gnu.org/software/make/manual/html_node/Suffix-Rules.html
.SUFFIXES:
# This executes all targets in a single shell. This improves performance, by
# not spawning a new shell for each line, and also allows use to write multiline
# commands like conditions and loops without escaping sequences.
#
# https://www.gnu.org/software/make/manual/html_node/One-Shell.html
.ONESHELL:
# This makes all targets silent by default, unless VERBOSE is set.
ifndef VERBOSE
.SILENT:
endif
# The shell that should be used to execute the recipes.
SHELL := bash
.SHELLFLAGS := -euo pipefail -c
# Determine the root directory of our codebase and export it, this allows easy
# file inclusion in both Bash and Make.
override ROOT := $(shell path="$(CURDIR)"; while [[ "$${path}" != "/" \
&& ! -f "$${path}/.bootstrap.mk" ]]; do path="$${path%/*}"; done; echo "$${path}")
## Built-in
# A generic help message that parses the available targets, and lists each one
# that has a comment on the same line with a ## prefix.
help: ## Display this help
readonly pad=$$(printf "%0.1s" "_"{1..25}); \
print_targets() { \
local -n targets_ref=$$1; \
if (( "$${#targets_ref[@]}" > 0 )); then \
declare -a keys=(); \
readarray -t keys < <(printf "%s\n" "$${!targets_ref[@]}" | sort -d); \
for target in "$${keys[@]}"; do \
printf "%s\n" "$${targets_ref[$$target]}"; \
done; \
fi; \
}; \
targets() { \
declare -A targets=(); \
local target_pattern='[^:]+::?[^#]*## +.*'; \
local section_pattern='^## .*'; \
for mk in "$$@"; do \
while read -r line; do \
if [[ "$${line}" =~ $${section_pattern} ]]; then \
print_targets targets; \
targets=(); \
local comment="$${line##*## }"; \
printf " \033[1m%s\033[0m\n" "$${comment}"; \
elif [[ "$${line}" =~ $${target_pattern} ]]; then \
local target="$${line%%:*}"; \
local comment="$${line##*## }"; \
if [ "$${targets[$${target}]+x}" ]; then \
targets["$${target}"]+=$$(printf "\n %$${#pad}s %s\n" "" "$${comment}"); \
else \
targets["$${target}"]=$$(printf " \033[0;32m%s\033[0m \033[0;90m%s\033[0m %s\n" "$${target}" "$${pad:$${#target}}" "$${comment}"); \
fi; \
fi; \
done < "$${mk}"; \
print_targets targets; \
targets=(); \
done; \
}; \
print_option() { \
printf " \033[0;32m%s\033[0m \033[0;90m%s\033[0m %s \033[0;33m[default: %s]\033[0m\n\n" "$$1" "$${pad:$${#1}}" "$$2" "$$3"; \
}; \
echo; \
echo -e "\033[0;33mUsage:\033[0m"; \
echo -e " make [flags...] [target...] [options...]"; \
echo; \
echo -e "\033[0;33mFlags:\033[0m"; \
echo -e " See; \033[1mmake --help\033[0m" \
echo; \
echo -e "\033[0;33mTargets:\033[0m"; \
targets $(MAKEFILE_LIST); \
echo -e "\n\033[0;33mOptions:\033[0m"; \
print_option 'VERBOSE=<mode>' 'Set mode to 1 for verbose output' '0'
.PHONY: help
# Target to update make-bootstrap once installed
self-update: ## Update make-bootstrap
echo "backing up existing bootstrap file..."
cp .bootstrap.mk ".bootstrap.mk.$(shell date +%s)"
echo "downloading latest version from github.com/dsiebel/make-bootstrap (master)"
curl --location --retry 3 --show-error --silent --output .bootstrap.mk \
"https://raw.githubusercontent.com/dsiebel/make-bootstrap/master/.bootstrap.mk"
echo "done!"
.PHONY: self-update
| Zebradil/make-bootstrap |
<|start_filename|>ios/Classes/TflitePlugin.h<|end_filename|>
#import <Flutter/Flutter.h>
@interface TflitePlugin : NSObject<FlutterPlugin>
@end
| diddledani/flutter_tflite |
<|start_filename|>kilo.cpp<|end_filename|>
/* Kilo -- A very simple editor in less than 1-kilo lines of code (as counted
* by "cloc"). Does not depend on libcurses, directly emits VT100
* escapes on the terminal.
*
* -----------------------------------------------------------------------
*
* Copyright (C) 2016 <NAME> <antirez at gmail dot com>
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#define KILO_VERSION "0.0.1"
#include <array>
#include <bitset>
#include <fcntl.h>
#include <fstream>
#include <memory>
#include <signal.h>
#include <sstream>
#include <stdexcept>
#include <stdint.h>
#include <string.h>
#include <string_view>
#include <sys/ioctl.h>
#include <sys/time.h>
#include <sys/types.h>
#include <termios.h>
#include <time.h>
#include <unistd.h>
#include <utility>
#include <vector>
/* Syntax highlight types */
enum class highlight : unsigned char {
normal,
nonprint,
comment, /* Single line comment. */
mlcomment, /* Multi-line comment. */
keyword1,
keyword2,
string,
number,
match, /* Search match. */
};
enum class highlight_flags { strings = 1, numbers = 2 };
namespace kilopp {
class file_descriptor {
public:
file_descriptor(int fd, bool should_close)
: fd(fd), should_close(should_close) {
if (fd == -1) {
throw std::runtime_error("Error opening a file");
}
}
file_descriptor(const file_descriptor&) = delete;
file_descriptor& operator=(const file_descriptor&) = delete;
void truncate(size_t size) {
if (ftruncate(fd, size) == -1) {
throw std::runtime_error("Error truncating");
}
}
template <typename T> void write(const T& buffer) {
if (::write(fd, buffer.data(), buffer.size()) == -1) {
throw std::runtime_error("Write error");
}
}
// This overload is for string literals which size if known is compile time.
template <std::size_t N> void write_string(const char (&buffer)[N]) {
// Do not write the nullptr termination character
if (::write(fd, buffer, N - 1) == -1) {
throw std::runtime_error("Write error");
}
}
~file_descriptor() {
if (should_close && fd != -1) {
close(fd);
}
}
private:
int fd = -1;
bool should_close;
};
struct syntax {
std::vector<std::string_view> extensions;
std::vector<std::string_view> keywords;
std::string_view singleline_comment_start;
std::string_view multiline_comment_start;
std::string_view multiline_comment_end;
std::bitset<2> flags;
};
/* This structure represents a single line of the file we are editing. */
struct erow {
erow(std::string&& line)
: rsize(0), chars(std::move(line)), hl_oc(0), m_hl(nullptr, ::free),
m_render(nullptr, ::free) {}
erow(const erow&) = delete;
erow& operator=(const erow&) = delete;
erow(erow&& other) = default;
erow& operator=(erow&& other) = default;
highlight* hl() { return m_hl.get(); }
const highlight* hl() const { return m_hl.get(); }
char* render() { return m_render.get(); }
const char* render() const { return m_render.get(); }
void realloc_hl() {
const auto old = m_hl.release();
m_hl.reset(static_cast<highlight*>(realloc(old, rsize)));
}
void alloc_render(size_t size) {
m_render.reset(static_cast<char*>(malloc(size)));
}
int rsize; /* Size of the rendered row. */
std::string chars; /* Row content. */
int hl_oc; /* Row had open comment at end in last syntax highlight
check. */
private:
std::unique_ptr<highlight, void (*)(void*)>
m_hl; /* Syntax highlight type for each character in render.*/
std::unique_ptr<char, void (*)(void*)>
m_render; /* Row content "rendered" for screen (for TABs). */
};
typedef struct hlcolor {
int r, g, b;
} hlcolor;
struct config {
size_t cx, cy; /* Cursor x and y position in characters */
size_t rowoff; /* Offset of row displayed. */
size_t coloff; /* Offset of column displayed. */
size_t screenrows; /* Number of rows that we can show */
size_t screencols; /* Number of cols that we can show */
std::vector<erow> row; /* Rows */
bool dirty; /* File modified but not saved. */
std::string filename; /* Currently open filename */
std::string status_message;
time_t statusmsg_time;
const struct syntax* syntax; /* Current syntax highlight, or nullptr. */
};
static struct config E;
class output {
public:
template <typename T, typename... Args>
std::string args(T first, Args... more) {
output << first;
return std::move(args(more...));
}
std::string args() { return std::move(output.str()); }
private:
std::stringstream output;
};
template <typename... Args> constexpr std::string format(Args... args) {
return std::move(output().args(args...));
}
template <typename... Args> void set_status(Args... args) {
output o;
E.status_message = std::move(o.args(args...));
E.statusmsg_time = time(nullptr);
}
constexpr std::string_view WELCOME("Kilo editor -- verison " KILO_VERSION
"\x1b[0K\r\n");
/* =========================== Syntax highlights DB =========================
*
* In order to add a new syntax, define two arrays with a list of file name
* matches and keywords. The file name matches are used in order to match
* a given syntax with a given file name: if a match pattern starts with a
* dot, it is matched as the last past of the filename, for example ".c".
* Otherwise the pattern is just searched inside the filenme, like "Makefile").
*
* The list of keywords to highlight is just a list of words, however if they
* a trailing '|' character is added at the end, they are highlighted in
* a different color, so that you can have two different sets of keywords.
*
* Finally add a stanza in the HLDB global variable with two two arrays
* of strings, and a set of flags in order to enable highlighting of
* comments and numbers.
*
* The characters for single and multi line comments must be exactly two
* and must be provided as well (see the C language example).
*
* There is no support to highlight patterns currently. */
/* C / C++ */
/* Here we define an array of syntax highlights by extensions, keywords,
* comments delimiters and flags. */
std::array<struct syntax, 1> HLDB = {
{/* C / C++ */
std::vector<std::string_view>{".c", ".h", ".cpp", ".hpp", ".cc"},
std::vector<std::string_view>{
/* C Keywords */
"auto", "break", "case", "continue", "default", "do", "else", "enum",
"extern", "for", "goto", "if", "register", "return", "sizeof",
"static", "struct", "switch", "typedef", "union", "volatile", "while",
"NULL",
/* C++ Keywords */
"alignas", "alignof", "and", "and_eq", "asm", "bitand", "bitor",
"class", "compl", "constexpr", "const_cast", "deltype", "delete",
"dynamic_cast", "explicit", "export", "false", "friend", "inline",
"mutable", "namespace", "new", "noexcept", "not", "not_eq", "nullptr",
"operator", "or", "or_eq", "private", "protected", "public",
"reinterpret_cast", "static_assert", "static_cast", "template", "this",
"thread_local", "throw", "true", "try", "typeid", "typename",
"virtual", "xor", "xor_eq",
/* C types */
"int|", "long|", "double|", "float|", "char|", "unsigned|", "signed|",
"void|", "short|", "auto|", "const|", "bool|"},
"//", "/*", "*/", std::bitset<2>("11")}};
/* ======================= Low level terminal handling ====================== */
namespace term {
enum KEY_ACTION {
KEY_NULL = 0, /* NULL */
CTRL_C = 3, /* Ctrl-c */
CTRL_D = 4, /* Ctrl-d */
CTRL_F = 6, /* Ctrl-f */
CTRL_H = 8, /* Ctrl-h */
TAB = 9, /* Tab */
CTRL_L = 12, /* Ctrl+l */
ENTER = 13, /* Enter */
CTRL_Q = 17, /* Ctrl-q */
CTRL_S = 19, /* Ctrl-s */
CTRL_U = 21, /* Ctrl-u */
ESC = 27, /* Escape */
BACKSPACE = 127, /* Backspace */
/* The following are just soft codes, not really reported by the
* terminal directly. */
ARROW_LEFT = 1000,
ARROW_RIGHT,
ARROW_UP,
ARROW_DOWN,
DEL_KEY,
HOME_KEY,
END_KEY,
PAGE_UP,
PAGE_DOWN
};
class raw_mode {
public:
raw_mode(const raw_mode&) = delete;
raw_mode& operator=(const raw_mode&) = delete;
raw_mode() {
struct termios raw;
if (!isatty(STDIN_FILENO)) {
throw std::runtime_error("stdin isn't a TTY");
}
if (tcgetattr(STDIN_FILENO, &previous_state) == -1) {
throw std::runtime_error(
"Unable to get the current terminal state");
}
raw = previous_state; /* modify the original mode */
/* input modes: no break, no CR to NL, no parity check, no strip char,
* no start/stop output control. */
raw.c_iflag &= ~(BRKINT | ICRNL | INPCK | ISTRIP | IXON);
/* output modes - disable post processing */
raw.c_oflag &= ~(OPOST);
/* control modes - set 8 bit chars */
raw.c_cflag |= (CS8);
/* local modes - choing off, canonical off, no extended functions,
* no signal chars (^Z,^C) */
raw.c_lflag &= ~(ECHO | ICANON | IEXTEN | ISIG);
/* control chars - set return condition: min number of bytes and timer.
*/
raw.c_cc[VMIN] = 0; /* Return each byte, or zero for timeout. */
raw.c_cc[VTIME] = 1; /* 100 ms timeout (unit is tens of second). */
/* put terminal in raw mode after flushing */
if (tcsetattr(STDIN_FILENO, TCSAFLUSH, &raw) < 0) {
throw std::runtime_error("Unable to set the terminal to raw mode");
}
}
~raw_mode() {
file_descriptor(STDIN_FILENO, false).write_string("\x1b[0;0H\033[2J");
tcsetattr(STDIN_FILENO, TCSAFLUSH, &previous_state);
}
private:
struct termios previous_state;
};
/* Read a key from the terminal put in raw mode, trying to handle
* escape sequences. */
int read_key(int fd) {
int nread;
char c;
std::array<char, 3> seq;
while ((nread = read(fd, &c, 1)) == 0)
;
if (nread == -1)
exit(1);
while (1) {
switch (c) {
case ESC: /* escape sequence */
/* If this is just an ESC, we'll timeout here. */
if (read(fd, seq.data(), 1) == 0)
return ESC;
if (read(fd, seq.data() + 1, 1) == 0)
return ESC;
/* ESC [ sequences. */
if (seq[0] == '[') {
if (seq[1] >= '0' && seq[1] <= '9') {
/* Extended escape, read additional byte. */
if (read(fd, seq.data() + 2, 1) == 0)
return ESC;
if (seq[2] == '~') {
switch (seq[1]) {
case '3':
return DEL_KEY;
case '5':
return PAGE_UP;
case '6':
return PAGE_DOWN;
}
}
} else {
switch (seq[1]) {
case 'A':
return ARROW_UP;
case 'B':
return ARROW_DOWN;
case 'C':
return ARROW_RIGHT;
case 'D':
return ARROW_LEFT;
case 'H':
return HOME_KEY;
case 'F':
return END_KEY;
}
}
}
/* ESC O sequences. */
else if (seq[0] == 'O') {
switch (seq[1]) {
case 'H':
return HOME_KEY;
case 'F':
return END_KEY;
}
}
break;
default:
return c;
}
}
}
/* Use the ESC [6n escape sequence to query the horizontal cursor position
* and return it. On error -1 is returned, on success the position of the
* cursor is stored at *rows and *cols and 0 is returned. */
std::pair<int, int> get_cursor_position(int ifd, int ofd) {
auto result = std::pair<int, int>();
std::array<char, 32> buf;
unsigned int i = 0;
/* Report cursor location */
file_descriptor(ofd, false).write_string("\x1b[6n");
/* Read the response: ESC [ rows ; cols R */
while (i < sizeof(buf) - 1) {
if (read(ifd, buf.data() + i, 1) != 1)
break;
if (buf[i] == 'R')
break;
i++;
}
buf[i] = '\0';
/* Parse it. */
if (buf[0] != ESC || buf[1] != '[') {
throw std::runtime_error{"ioctl failed"};
}
if (sscanf(buf.data() + 2, "%d;%d", &result.first, &result.second) != 2) {
throw std::runtime_error{"ioctl failed"};
}
return result;
}
/* Try to get the number of columns in the current terminal. If the ioctl()
* call fails the function will try to query the terminal itself.
* Returns 0 on success, -1 on error. */
void get_window_size(int ifd, int ofd, size_t& rows, size_t& cols) {
struct winsize ws;
if (ioctl(1, TIOCGWINSZ, &ws) == -1 || ws.ws_col == 0) {
/* ioctl() failed. Try to query the terminal itself. */
auto orig_position = get_cursor_position(ifd, ofd);
file_descriptor fd(ofd, false);
/* Go to right/bottom margin and get position. */
fd.write_string("\x1b[999C\x1b[999B");
auto position = get_cursor_position(ifd, ofd);
rows = position.first;
cols = position.second;
/* Restore position. */
fd.write(format("\x1b[", orig_position.first, ";", orig_position.second,
"H"));
} else {
cols = ws.ws_col;
rows = ws.ws_row;
}
}
} // namespace term
/* ====================== Syntax highlight color scheme ==================== */
constexpr bool is_separator(int c) {
return c == '\0' || isspace(c) || strchr(",.()+-/*=~%[];", c) != nullptr;
}
/* Return true if the specified row last char is part of a multi line comment
* that starts at this row or at one before, and does not end at the end
* of the row but spawns to the next row. */
bool has_open_comment(const erow& row) {
return (row.hl() && row.rsize &&
row.hl()[row.rsize - 1] == highlight::mlcomment &&
(row.rsize < 2 || (row.render()[row.rsize - 2] != '*' ||
row.render()[row.rsize - 1] != '/')));
}
/* Set every byte of row->hl() (that corresponds to every character in the line)
* to the right syntax highlight type (HL_* defines). */
void update_syntax(erow& row, size_t row_index) {
row.realloc_hl();
std::fill_n(row.hl(), row.rsize, highlight::normal);
if (E.syntax == nullptr)
return; /* No syntax, everything is highlight::normal. */
int i;
bool prev_sep, in_string, in_comment;
char* p;
const auto& keywords = E.syntax->keywords;
auto scs = E.syntax->singleline_comment_start;
auto mcs = E.syntax->multiline_comment_start;
auto mce = E.syntax->multiline_comment_end;
/* Point to the first non-space char. */
p = row.render();
i = 0; /* Current char offset */
while (*p && isspace(*p)) {
p++;
i++;
}
prev_sep = true; /* Tell the parser if 'i' points to start of word. */
in_string = false; /* Are we inside "" or '' ? */
in_comment = false; /* Are we inside multi-line comment? */
/* If the previous line has an open comment, this line starts
* with an open comment state. */
if (row_index > 0 && has_open_comment(E.row[row_index - 1]))
in_comment = true;
while (*p) {
/* Handle // comments. */
if (prev_sep && *p == scs[0] && *(p + 1) == scs[1]) {
/* From here to end is a comment */
std::fill_n(row.hl() + i, row.chars.size() - i, highlight::comment);
return;
}
/* Handle multi line comments. */
if (in_comment) {
row.hl()[i] = highlight::mlcomment;
if (*p == mce[0] && *(p + 1) == mce[1]) {
row.hl()[i + 1] = highlight::mlcomment;
p += 2;
i += 2;
in_comment = false;
prev_sep = true;
continue;
} else {
prev_sep = false;
p++;
i++;
continue;
}
} else if (*p == mcs[0] && *(p + 1) == mcs[1]) {
row.hl()[i] = highlight::mlcomment;
row.hl()[i + 1] = highlight::mlcomment;
p += 2;
i += 2;
in_comment = true;
prev_sep = false;
continue;
}
/* Handle "" and '' */
if (in_string) {
row.hl()[i] = highlight::string;
if (*p == '\\') {
row.hl()[i + 1] = highlight::string;
p += 2;
i += 2;
prev_sep = false;
continue;
}
if (*p == in_string)
in_string = false;
p++;
i++;
continue;
} else {
if (*p == '"' || *p == '\'') {
in_string = *p;
row.hl()[i] = highlight::string;
p++;
i++;
prev_sep = false;
continue;
}
}
/* Handle non printable chars. */
if (!isprint(*p)) {
row.hl()[i] = highlight::nonprint;
p++;
i++;
prev_sep = false;
continue;
}
/* Handle numbers */
if ((isdigit(*p) &&
(prev_sep || row.hl()[i - 1] == highlight::number)) ||
(*p == '.' && i > 0 && row.hl()[i - 1] == highlight::number)) {
row.hl()[i] = highlight::number;
p++;
i++;
prev_sep = false;
continue;
}
/* Handle keywords and lib calls */
if (prev_sep) {
for (auto word : keywords) {
const auto kw2 = word[word.size() - 1] == '|';
if (kw2) {
word.remove_suffix(1);
}
if (word == std::string_view(p, word.size()) &&
is_separator(*(p + word.size()))) {
/* Keyword */
std::fill_n(
row.hl() + i, word.size(),
(kw2 ? highlight::keyword1 : highlight::keyword2));
p += word.size();
i += word.size();
prev_sep = false;
break;
}
}
}
/* Not special chars */
prev_sep = is_separator(*p);
p++;
i++;
}
/* Propagate syntax change to the next row if the open commen
* state changed. This may recursively affect all the following rows
* in the file. */
bool oc = has_open_comment(row);
if (row.hl_oc != oc && row_index + 1 < E.row.size())
update_syntax(E.row[row_index + 1], row_index + 1);
row.hl_oc = oc;
}
/* Maps syntax highlight token types to terminal colors. */
constexpr int syntax_to_color(highlight hl) {
switch (hl) {
case highlight::comment:
case highlight::mlcomment:
return 36; /* cyan */
case highlight::keyword2:
return 33; /* yellow */
case highlight::keyword1:
return 32; /* green */
case highlight::string:
return 35; /* magenta */
case highlight::number:
return 31; /* red */
case highlight::match:
return 34; /* blu */
default:
return 37; /* white */
}
}
/* Select the syntax highlight scheme depending on the filename,
* setting it in the global state E.syntax. */
void select_syntax_highlight(const std::string_view&& filename) {
for (const auto& syntax : HLDB) {
for (const auto& raw_extension : syntax.extensions) {
std::string_view extension(raw_extension);
auto position = filename.find_last_of(extension);
if ((position != std::string_view::npos) &&
((filename.length() - position) == extension.length())) {
E.syntax = &syntax;
return;
}
}
}
}
/* ======================= Editor rows implementation ======================= */
/* Update the rendered version and the syntax highlight of a row. */
void update_row(erow& row, size_t row_index) {
unsigned int tabs = 0, nonprint = 0;
/* Create a version of the row we can directly print on the screen,
* respecting tabs, substituting non printable characters with '?'. */
for (const auto c : row.chars)
if (c == term::TAB)
tabs++;
unsigned long long allocsize =
(unsigned long long)row.chars.size() + tabs * 8 + nonprint * 9 + 1;
if (allocsize > UINT32_MAX) {
printf("Some line of the edited file is too long for kilo\n");
exit(1);
}
row.alloc_render(row.chars.size() + tabs * 8 + nonprint * 9 + 1);
auto idx = 0;
for (const auto c : row.chars) {
if (c == term::TAB) {
row.render()[idx++] = ' ';
while ((idx + 1) % 8 != 0)
row.render()[idx++] = ' ';
} else {
row.render()[idx++] = c;
}
}
row.rsize = idx;
row.render()[idx] = '\0';
/* Update the syntax highlighting attributes of the row. */
update_syntax(row, row_index);
}
/* Insert a row at the specified position, shifting the other rows on the bottom
* if required. */
void insert_row(size_t at, std::string&& line) {
if (at > E.row.size())
return;
E.row.emplace(E.row.begin() + at, std::move(line));
update_row(E.row[at], at);
E.dirty = true;
}
/* Remove the row at the specified position, shifting the remainign on the
* top. */
void delete_row(size_t at) {
if (at >= E.row.size())
return;
E.row.erase(E.row.begin() + at);
E.dirty = true;
}
/* Turn the editor rows into a single heap-allocated string.
* Returns the pointer to the heap-allocated string and populate the
* integer pointed by 'buflen' with the size of the string, escluding
* the final nulterm. */
std::vector<char> to_string() {
size_t length = 0;
/* Compute count of bytes */
for (auto const& row : E.row) {
length += row.chars.size() + 1; /* +1 is for "\n" at end of every row */
}
std::vector<char> result;
result.reserve(length);
for (auto const& row_struct : E.row) {
for (const auto c : row_struct.chars) {
result.push_back(c);
}
result.push_back('\n');
}
return result;
}
/* Insert a character at the specified position in a row, moving the remaining
* chars on the right if needed. */
void insert_character_to_row(erow& row, size_t char_index, int c,
size_t row_index) {
if (char_index > row.chars.size()) {
/* Pad the string with spaces if the insert location is outside the
* current length by more than a single character. */
int padlen = char_index - row.chars.size();
/* In the next line +2 means: new char and null term. */
for (auto i = 0; i < padlen; ++i) {
row.chars.push_back(' ');
}
}
row.chars.insert(row.chars.begin() + char_index, static_cast<char>(c));
update_row(row, row_index);
E.dirty = true;
}
/* Append the string 's' at the end of a row */
void append_to_row(erow& row, const char* s, size_t row_index) {
row.chars.append(s);
update_row(row, row_index);
E.dirty = true;
}
/* Delete the character at offset 'at' from the specified row. */
void delete_character_from_row(erow& row, size_t at, size_t row_index) {
if (row.chars.size() <= at)
return;
row.chars.erase(at, 1);
update_row(row, row_index);
E.dirty = true;
}
/* Insert the specified char at the current prompt position. */
void insert_character(char c) {
const auto filerow = E.rowoff + E.cy;
const auto filecol = E.coloff + E.cx;
erow* row = (filerow >= E.row.size()) ? nullptr : &E.row[filerow];
/* If the row where the cursor is currently located does not exist in our
* logical representaion of the file, add enough empty rows as needed. */
if (!row) {
while (E.row.size() <= filerow)
insert_row(E.row.size(), "");
}
row = &E.row[filerow];
insert_character_to_row(*row, filecol, c, filerow);
if (E.cx == E.screencols - 1)
E.coloff++;
else
E.cx++;
E.dirty = true;
}
/* Inserting a newline is slightly complex as we have to handle inserting a
* newline in the middle of a line, splitting the line as needed. */
void insert_newline(void) {
const auto filerow = E.rowoff + E.cy;
auto filecol = E.coloff + E.cx;
erow* row = (filerow >= E.row.size()) ? nullptr : &E.row[filerow];
if (!row) {
if (filerow == E.row.size()) {
insert_row(filerow, "");
goto fixcursor;
}
return;
}
/* If the cursor is over the current line size, we want to conceptually
* think it's just over the last character. */
if (filecol >= row->chars.size())
filecol = row->chars.size();
if (filecol == 0) {
insert_row(filerow, "");
} else {
/* We are in the middle of a line. Split it between two rows. */
const auto split = row->chars.substr(filecol);
insert_row(filerow + 1, split.c_str());
row = &E.row[filerow];
row->chars.erase(filecol);
update_row(*row, filerow);
}
fixcursor:
if (E.cy == E.screenrows - 1) {
E.rowoff++;
} else {
E.cy++;
}
E.cx = 0;
E.coloff = 0;
}
/* Delete the char at the current prompt position. */
void delete_character() {
const auto filerow = E.rowoff + E.cy;
auto filecol = E.coloff + E.cx;
erow* row = (filerow >= E.row.size()) ? nullptr : &E.row[filerow];
if (!row || (filecol == 0 && filerow == 0))
return;
if (filecol == 0) {
/* Handle the case of column 0, we need to move the current line
* on the right of the previous one. */
filecol = E.row[filerow - 1].chars.size();
auto row_index = filerow - 1;
append_to_row(E.row[row_index], row->chars.c_str(), row_index);
delete_row(filerow);
row = nullptr;
if (E.cy == 0)
E.rowoff--;
else
E.cy--;
E.cx = filecol;
if (E.cx >= E.screencols) {
int shift = (E.screencols - E.cx) + 1;
E.cx -= shift;
E.coloff += shift;
}
} else {
delete_character_from_row(*row, filecol - 1, filerow);
if (E.cx == 0 && E.coloff)
E.coloff--;
else
E.cx--;
}
if (row)
update_row(*row, filerow);
E.dirty = true;
}
/* Load the specified program in the editor memory and returns 0 on success
* or 1 on error. */
void open_file(const char* filename) {
E.dirty = false;
E.filename = filename;
std::ifstream infile(filename);
std::string line;
;
while (std::getline(infile, line)) {
insert_row(E.row.size(), std::move(line));
}
E.dirty = false;
}
/* Save the current file on disk. Return 0 on success, 1 on error. */
int save(void) {
const auto buffer = to_string();
const auto length = buffer.size();
try {
file_descriptor fd(open(E.filename.c_str(), O_RDWR | O_CREAT, 0644),
true);
fd.truncate(length);
fd.write(buffer);
E.dirty = false;
set_status(length, " bytes written on disk");
} catch (const std::runtime_error& e) {
set_status("Can't save! I/O error: ", strerror(errno));
}
return 1;
}
/* ============================= Terminal update ============================ */
/* This function writes the whole screen using VT100 escape characters
* starting from the logical state of the editor in the global state 'E'. */
void refresh_screen(void) {
erow* r;
// char buf[32];
std::stringstream output;
output << "\x1b[?25l"; /* Hide cursor. */
output << "\x1b[H"; /* Go home. */
for (size_t y = 0; y < E.screenrows; y++) {
const auto filerow = E.rowoff + y;
if (filerow >= E.row.size()) {
if (E.row.size() == 0 && y == E.screenrows / 3) {
int padding = (E.screencols - WELCOME.size()) / 2;
if (padding) {
output << '~';
padding--;
}
while (padding--)
output << ' ';
output << WELCOME;
} else {
output << "~\x1b[0K\r\n";
}
continue;
}
r = &E.row[filerow];
auto len = r->rsize - E.coloff;
auto current_color = -1;
if (len > 0) {
if (len > E.screencols)
len = E.screencols;
const auto c = r->render() + E.coloff;
const auto hl = r->hl() + E.coloff;
for (size_t j = 0; j < len; j++) {
if (hl[j] == highlight::nonprint) {
char sym;
output << "\x1b[7m";
if (c[j] <= 26)
sym = '@' + c[j];
else
sym = '?';
output << sym;
output << "\x1b[0m";
} else if (hl[j] == highlight::normal) {
if (current_color != -1) {
output << "\x1b[39m";
current_color = -1;
}
output << c[j];
} else {
int color = syntax_to_color(hl[j]);
if (color != current_color) {
output << "\x1b[" << color << 'm';
current_color = color;
}
output << c[j];
}
}
}
output << "\x1b[39m";
output << "\x1b[0K";
output << "\r\n";
}
/* Create a two rows status. First row: */
output << "\x1b[0K";
output << "\x1b[7m";
char status[80], rstatus[80];
size_t len =
snprintf(status, sizeof(status), "%.20s - %lu lines %s",
E.filename.c_str(), E.row.size(), E.dirty ? "(modified)" : "");
size_t rlen = snprintf(rstatus, sizeof(rstatus), "%lu/%lu",
E.rowoff + E.cy + 1, E.row.size());
if (len > E.screencols)
len = E.screencols;
output << status;
while (len < E.screencols) {
if (E.screencols - len == rlen) {
output << rstatus;
break;
} else {
output << ' ';
len++;
}
}
output << "\x1b[0m\r\n";
/* Second row depends on E.statusmsg and the status message update time. */
output << "\x1b[0K";
if (!E.status_message.empty() && time(nullptr) - E.statusmsg_time < 5)
output << E.status_message; //, msglen <= E.screencols ? msglen :
// E.screencols);
/* Put cursor at its current position. Note that the horizontal position
* at which the cursor is displayed may be different compared to 'E.cx'
* because of TABs. */
auto cx = 1;
const auto filerow = E.rowoff + E.cy;
erow* row = (filerow >= E.row.size()) ? nullptr : &E.row[filerow];
if (row) {
for (auto j = E.coloff; j < (E.cx + E.coloff); j++) {
if (j < row->chars.size() && row->chars[j] == term::TAB)
cx += 7 - ((cx) % 8);
cx++;
}
}
output << "\x1b[" << E.cy + 1 << ';' << cx << 'H';
output << "\x1b[?25h"; /* Show cursor. */
auto const data = output.str();
write(STDOUT_FILENO, data.data(), data.size());
}
/* =============================== Find mode ================================ */
#define KILO_QUERY_LEN 256
void find(int fd) {
char query[KILO_QUERY_LEN + 1] = {0};
int qlen = 0;
int last_match = -1; /* Last line where a match was found. -1 for none. */
int find_next = 0; /* if 1 search next, if -1 search prev. */
int saved_hl_line = -1; /* No saved HL */
char* saved_hl = nullptr;
#define FIND_RESTORE_HL \
do { \
if (saved_hl) { \
memcpy(E.row[saved_hl_line].hl(), saved_hl, \
E.row[saved_hl_line].rsize); \
free(saved_hl); \
saved_hl = nullptr; \
} \
} while (0)
/* Save the cursor position in order to restore it later. */
int saved_cx = E.cx, saved_cy = E.cy;
int saved_coloff = E.coloff, saved_rowoff = E.rowoff;
while (1) {
set_status("Search: ", query, " (Use ESC/Arrows/Enter)");
refresh_screen();
int c = term::read_key(fd);
if (c == term::DEL_KEY || c == term::CTRL_H || c == term::BACKSPACE) {
if (qlen != 0)
query[--qlen] = '\0';
last_match = -1;
} else if (c == term::ESC || c == term::ENTER) {
if (c == term::ESC) {
E.cx = saved_cx;
E.cy = saved_cy;
E.coloff = saved_coloff;
E.rowoff = saved_rowoff;
}
FIND_RESTORE_HL;
set_status();
return;
} else if (c == term::ARROW_RIGHT || c == term::ARROW_DOWN) {
find_next = 1;
} else if (c == term::ARROW_LEFT || c == term::ARROW_UP) {
find_next = -1;
} else if (isprint(c)) {
if (qlen < KILO_QUERY_LEN) {
query[qlen++] = c;
query[qlen] = '\0';
last_match = -1;
}
}
/* Search occurrence. */
if (last_match == -1)
find_next = 1;
if (find_next) {
char* match = nullptr;
int match_offset = 0;
int current = last_match;
for (size_t i = 0; i < E.row.size(); i++) {
current += find_next;
if (current == -1)
current = E.row.size() - 1;
else if (static_cast<size_t>(current) == E.row.size())
current = 0;
match = strstr(E.row[current].render(), query);
if (match) {
match_offset = match - E.row[current].render();
break;
}
}
find_next = 0;
/* Highlight */
FIND_RESTORE_HL;
if (match) {
erow* row = &E.row[current];
last_match = current;
if (row->hl()) {
saved_hl_line = current;
saved_hl = static_cast<char*>(malloc(row->rsize));
memcpy(saved_hl, row->hl(), row->rsize);
memset(row->hl() + match_offset,
static_cast<int>(highlight::match), qlen);
}
E.cy = 0;
E.cx = match_offset;
E.rowoff = current;
E.coloff = 0;
/* Scroll horizontally as needed. */
if (E.cx > E.screencols) {
int diff = E.cx - E.screencols;
E.cx -= diff;
E.coloff += diff;
}
}
}
}
}
/* ========================= Editor events handling ======================== */
/* Handle cursor position change because arrow keys were pressed. */
void move_cursor(int key) {
auto filerow = E.rowoff + E.cy;
auto filecol = E.coloff + E.cx;
size_t rowlen;
erow* row = (filerow >= E.row.size()) ? nullptr : &E.row[filerow];
switch (key) {
case term::ARROW_LEFT:
if (E.cx == 0) {
if (E.coloff) {
E.coloff--;
} else {
if (filerow > 0) {
E.cy--;
E.cx = E.row[filerow - 1].chars.size();
if (E.cx > E.screencols - 1) {
E.coloff = E.cx - E.screencols + 1;
E.cx = E.screencols - 1;
}
}
}
} else {
E.cx -= 1;
}
break;
case term::ARROW_RIGHT:
if (row && filecol < row->chars.size()) {
if (E.cx == E.screencols - 1) {
E.coloff++;
} else {
E.cx += 1;
}
} else if (row && filecol == row->chars.size()) {
E.cx = 0;
E.coloff = 0;
if (E.cy == E.screenrows - 1) {
E.rowoff++;
} else {
E.cy += 1;
}
}
break;
case term::ARROW_UP:
if (E.cy == 0) {
if (E.rowoff)
E.rowoff--;
} else {
E.cy -= 1;
}
break;
case term::ARROW_DOWN:
if (filerow < E.row.size()) {
if (E.cy == E.screenrows - 1) {
E.rowoff++;
} else {
E.cy += 1;
}
}
break;
}
/* Fix cx if the current line has not enough chars. */
filerow = E.rowoff + E.cy;
filecol = E.coloff + E.cx;
row = (filerow >= E.row.size()) ? nullptr : &E.row[filerow];
rowlen = row ? row->chars.size() : 0;
if (filecol > rowlen) {
const auto reminder = filecol - rowlen;
if (reminder > E.cx) {
E.coloff -= reminder - E.cx;
E.cx = 0;
} else {
E.cx -= reminder;
}
}
}
/* Process events arriving from the standard input, which is, the user
* is typing stuff on the terminal. */
#define KILO_QUIT_TIMES 3
bool process_keypress(int fd) {
/* When the file is modified, requires Ctrl-q to be pressed N times
* before actually quitting. */
static int quit_times = KILO_QUIT_TIMES;
int c = term::read_key(fd);
switch (c) {
case term::ENTER: /* Enter */
insert_newline();
break;
case term::CTRL_C: /* Ctrl-c */
/* We ignore ctrl-c, it can't be so simple to lose the changes
* to the edited file. */
break;
case term::CTRL_Q: /* Ctrl-q */
/* Quit if the file was already saved. */
if (E.dirty && quit_times) {
set_status("WARNING!!! File has unsaved changes. "
"Press Ctrl-Q ",
quit_times, " more times to quit.");
quit_times--;
return true;
}
return false;
case term::CTRL_S: /* Ctrl-s */
save();
break;
case term::CTRL_F:
find(fd);
break;
case term::BACKSPACE: /* Backspace */
case term::CTRL_H: /* Ctrl-h */
case term::DEL_KEY:
delete_character();
break;
case term::PAGE_UP:
case term::PAGE_DOWN:
if (c == term::PAGE_UP && E.cy != 0)
E.cy = 0;
else if (c == term::PAGE_DOWN && E.cy != E.screenrows - 1)
E.cy = E.screenrows - 1;
{
int times = E.screenrows;
while (times--)
move_cursor(c == term::PAGE_UP ? term::ARROW_UP
: term::ARROW_DOWN);
}
break;
case term::ARROW_UP:
case term::ARROW_DOWN:
case term::ARROW_LEFT:
case term::ARROW_RIGHT:
move_cursor(c);
break;
case term::CTRL_L: /* ctrl+l, clear screen */
/* Just refresht the line as side effect. */
break;
case term::ESC:
/* Nothing to do for ESC in this mode. */
break;
default:
insert_character(c);
break;
}
quit_times = KILO_QUIT_TIMES; /* Reset it to the original value. */
return true;
}
int editorFileWasModified(void) { return E.dirty; }
void update_window_size(void) {
term::get_window_size(STDIN_FILENO, STDOUT_FILENO, E.screenrows,
E.screencols);
E.screenrows -= 2; /* Get room for status bar. */
}
void signal_handler(int unused __attribute__((unused))) {
update_window_size();
if (E.cy > E.screenrows)
E.cy = E.screenrows - 1;
if (E.cx > E.screencols)
E.cx = E.screencols - 1;
refresh_screen();
}
void init(void) {
E.cx = 0;
E.cy = 0;
E.rowoff = 0;
E.coloff = 0;
E.dirty = false;
E.syntax = nullptr;
update_window_size();
signal(SIGWINCH, signal_handler);
}
} // namespace kilopp
using namespace kilopp;
int main(int argc, char** argv) {
if (argc != 2) {
fprintf(stderr, "Usage: kilo <filename>\n");
exit(1);
}
init();
select_syntax_highlight(argv[1]);
open_file(argv[1]);
term::raw_mode rm;
set_status("HELP: Ctrl-S = save | Ctrl-Q = quit | Ctrl-F = find");
do {
refresh_screen();
} while (process_keypress(STDIN_FILENO));
return 0;
} | r-darwish/kilo |
<|start_filename|>core22/Models/POS/Models/InvenTran.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
namespace core22.Models.POS.Models
{
public class InvenTran
{
public Guid InvenTranId { get; set; }
public string Number { get; set; }
public string Description { get; set; }
public Guid ProductId { get; set; }
public Product Product { get; set; }
public Guid TranSourceId { get; set; }
public string TranSourceNumber { get; set; }
public string TranSourceType { get; set; }
public int Quantity { get; set; }
public DateTimeOffset? InvenTranDate { get; set; } = DateTime.Now;
}
}
<|start_filename|>core22/Data/ApplicationDbContext.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Text;
using Microsoft.AspNetCore.Identity.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore;
using core22.Models.POS.Models;
namespace core22.Data
{
public class ApplicationDbContext : IdentityDbContext
{
public ApplicationDbContext(DbContextOptions<ApplicationDbContext> options)
: base(options)
{
}
public DbSet<core22.Models.POS.Models.Customer> Customer { get; set; }
public DbSet<core22.Models.POS.Models.Vendor> Vendor { get; set; }
public DbSet<core22.Models.POS.Models.Product> Product { get; set; }
public DbSet<core22.Models.POS.Models.PurchaseOrder> PurchaseOrder { get; set; }
public DbSet<core22.Models.POS.Models.SalesOrder> SalesOrder { get; set; }
public DbSet<core22.Models.POS.Models.GoodsReceive> GoodsReceive { get; set; }
public DbSet<core22.Models.POS.Models.InvenTran> InvenTran { get; set; }
public DbSet<core22.Models.POS.Models.PurchaseOrderLine> PurchaseOrderLine { get; set; }
public DbSet<core22.Models.POS.Models.SalesOrderLine> SalesOrderLine { get; set; }
public DbSet<core22.Models.POS.Models.GoodsReceiveLine> GoodsReceiveLine { get; set; }
}
}
<|start_filename|>core22/Models/POS/Models/GoodsReceive.cs<|end_filename|>
using Newtonsoft.Json;
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Linq;
using System.Threading.Tasks;
namespace core22.Models.POS.Models
{
public class GoodsReceive
{
public Guid GoodsReceiveId { get; set; }
[Required]
public string Number { get; set; }
public string Description { get; set; }
public DateTimeOffset? GoodsReceiveDate { get; set; } = DateTime.Now;
public Guid PurchaseOrderId { get; set; }
public PurchaseOrder PurchaseOrder { get; set; }
}
public class GoodsReceiveLine
{
public Guid GoodsReceiveLineId { get; set; }
public Guid GoodsReceiveId { get; set; }
[JsonIgnore]
public GoodsReceive GoodsReceive { get; set; }
public Guid PurchaseOrderLineId { get; set; }
public Guid ProductId { get; set; }
public Product Product { get; set; }
public int QtyPurchase { get; set; }
public int QtyReceive { get; set; }
public int QtyReceived { get; set; }
}
}
<|start_filename|>core22/Services/POS/Repository.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
namespace core22.Services.POS
{
public class Repository : IRepository
{
public string GeneratePONumber()
{
string result = "";
try
{
result = this.GenereateNumber("PO");
}
catch (Exception)
{
throw;
}
return result;
}
public string GenerateSONumber()
{
string result = "";
try
{
result = this.GenereateNumber("SO");
}
catch (Exception)
{
throw;
}
return result;
}
public string GenerateGRNumber()
{
string result = "";
try
{
result = this.GenereateNumber("GR");
}
catch (Exception)
{
throw;
}
return result;
}
public string GenerateInvenTranNumber()
{
string result = "";
try
{
result = this.GenereateNumber("TRN");
}
catch (Exception)
{
throw;
}
return result;
}
private string GenereateNumber(string module)
{
string result = "";
try
{
result = Guid.NewGuid().ToString().Substring(0, 7).ToUpper() + "-" + DateTime.Now.ToString("yyyyMMdd") + "#" + module;
}
catch (Exception)
{
throw;
}
return result;
}
}
}
<|start_filename|>core22/Views/_ViewImports.cshtml<|end_filename|>
@using core22
@using core22.Models
@addTagHelper *, Microsoft.AspNetCore.Mvc.TagHelpers
<|start_filename|>core22/Controllers/Api/SalesOrderController.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc;
using Microsoft.EntityFrameworkCore;
using core22.Data;
using core22.Models.POS.Models;
namespace core22.Controllers.Api
{
[Route("api/[controller]")]
[ApiController]
public class SalesOrderController : ControllerBase
{
private readonly ApplicationDbContext _context;
private readonly Services.POS.IRepository _pos;
public SalesOrderController(ApplicationDbContext context, Services.POS.IRepository pos)
{
_context = context;
_pos = pos;
}
// GET: api/SalesOrder
[HttpGet]
public async Task<ActionResult<IEnumerable<SalesOrder>>> GetSalesOrder()
{
return await _context.SalesOrder.ToListAsync();
}
// GET: api/SalesOrder/5
[HttpGet("{id}")]
public async Task<ActionResult<SalesOrder>> GetSalesOrder(Guid id)
{
var salesOrder = await _context.SalesOrder.FindAsync(id);
if (salesOrder == null)
{
return NotFound();
}
return salesOrder;
}
// PUT: api/SalesOrder/5
[HttpPut("{id}")]
public async Task<IActionResult> PutSalesOrder(Guid id, SalesOrder salesOrder)
{
if (id != salesOrder.SalesOrderId)
{
return BadRequest();
}
_context.Entry(salesOrder).State = EntityState.Modified;
try
{
await _context.SaveChangesAsync();
}
catch (DbUpdateConcurrencyException)
{
if (!SalesOrderExists(id))
{
return NotFound();
}
else
{
throw;
}
}
return NoContent();
}
//POST: api/SalesOrder/InitiateNewPOSTrans
[HttpPost("[action]")]
public async Task<IActionResult> InitiateNewPOSTrans()
{
SalesOrder salesOrder = new SalesOrder();
salesOrder.Number = _pos.GenerateSONumber();
salesOrder.SalesOrderDate = DateTime.Now;
//random customer
Customer cust = new Customer();
cust = await _context.Customer.FirstOrDefaultAsync();
if (cust != null)
{
salesOrder.CustomerId = cust.CustomerId;
}
_context.SalesOrder.Add(salesOrder);
await _context.SaveChangesAsync();
return CreatedAtAction("GetSalesOrder", new { id = salesOrder.SalesOrderId }, salesOrder);
}
// POST: api/SalesOrder
[HttpPost]
public async Task<ActionResult<SalesOrder>> PostSalesOrder(SalesOrder salesOrder)
{
_context.SalesOrder.Add(salesOrder);
await _context.SaveChangesAsync();
return CreatedAtAction("GetSalesOrder", new { id = salesOrder.SalesOrderId }, salesOrder);
}
// DELETE: api/SalesOrder/5
[HttpDelete("{id}")]
public async Task<ActionResult<SalesOrder>> DeleteSalesOrder(Guid id)
{
var salesOrder = await _context.SalesOrder.FindAsync(id);
if (salesOrder == null)
{
return NotFound();
}
_context.SalesOrder.Remove(salesOrder);
await _context.SaveChangesAsync();
return salesOrder;
}
private bool SalesOrderExists(Guid id)
{
return _context.SalesOrder.Any(e => e.SalesOrderId == id);
}
}
}
<|start_filename|>core22/Migrations/20190324230600_alltables.cs<|end_filename|>
using System;
using Microsoft.EntityFrameworkCore.Metadata;
using Microsoft.EntityFrameworkCore.Migrations;
namespace core22.Migrations
{
public partial class alltables : Migration
{
protected override void Up(MigrationBuilder migrationBuilder)
{
migrationBuilder.CreateTable(
name: "AspNetRoles",
columns: table => new
{
Id = table.Column<string>(nullable: false),
Name = table.Column<string>(maxLength: 256, nullable: true),
NormalizedName = table.Column<string>(maxLength: 256, nullable: true),
ConcurrencyStamp = table.Column<string>(nullable: true)
},
constraints: table =>
{
table.PrimaryKey("PK_AspNetRoles", x => x.Id);
});
migrationBuilder.CreateTable(
name: "AspNetUsers",
columns: table => new
{
Id = table.Column<string>(nullable: false),
UserName = table.Column<string>(maxLength: 256, nullable: true),
NormalizedUserName = table.Column<string>(maxLength: 256, nullable: true),
Email = table.Column<string>(maxLength: 256, nullable: true),
NormalizedEmail = table.Column<string>(maxLength: 256, nullable: true),
EmailConfirmed = table.Column<bool>(nullable: false),
PasswordHash = table.Column<string>(nullable: true),
SecurityStamp = table.Column<string>(nullable: true),
ConcurrencyStamp = table.Column<string>(nullable: true),
PhoneNumber = table.Column<string>(nullable: true),
PhoneNumberConfirmed = table.Column<bool>(nullable: false),
TwoFactorEnabled = table.Column<bool>(nullable: false),
LockoutEnd = table.Column<DateTimeOffset>(nullable: true),
LockoutEnabled = table.Column<bool>(nullable: false),
AccessFailedCount = table.Column<int>(nullable: false)
},
constraints: table =>
{
table.PrimaryKey("PK_AspNetUsers", x => x.Id);
});
migrationBuilder.CreateTable(
name: "Customer",
columns: table => new
{
CustomerId = table.Column<Guid>(nullable: false),
Name = table.Column<string>(nullable: false),
Description = table.Column<string>(nullable: true),
Phone = table.Column<string>(nullable: true),
Email = table.Column<string>(nullable: true),
Address = table.Column<string>(nullable: true),
Address2 = table.Column<string>(nullable: true)
},
constraints: table =>
{
table.PrimaryKey("PK_Customer", x => x.CustomerId);
});
migrationBuilder.CreateTable(
name: "Product",
columns: table => new
{
ProductId = table.Column<Guid>(nullable: false),
Name = table.Column<string>(nullable: false),
Description = table.Column<string>(nullable: true),
PriceSell = table.Column<decimal>(nullable: false),
PricePurchase = table.Column<decimal>(nullable: false)
},
constraints: table =>
{
table.PrimaryKey("PK_Product", x => x.ProductId);
});
migrationBuilder.CreateTable(
name: "Vendor",
columns: table => new
{
VendorId = table.Column<Guid>(nullable: false),
Name = table.Column<string>(nullable: false),
Description = table.Column<string>(nullable: true),
Phone = table.Column<string>(nullable: true),
Email = table.Column<string>(nullable: true),
Address = table.Column<string>(nullable: true),
Address2 = table.Column<string>(nullable: true)
},
constraints: table =>
{
table.PrimaryKey("PK_Vendor", x => x.VendorId);
});
migrationBuilder.CreateTable(
name: "AspNetRoleClaims",
columns: table => new
{
Id = table.Column<int>(nullable: false)
.Annotation("SqlServer:ValueGenerationStrategy", SqlServerValueGenerationStrategy.IdentityColumn),
RoleId = table.Column<string>(nullable: false),
ClaimType = table.Column<string>(nullable: true),
ClaimValue = table.Column<string>(nullable: true)
},
constraints: table =>
{
table.PrimaryKey("PK_AspNetRoleClaims", x => x.Id);
table.ForeignKey(
name: "FK_AspNetRoleClaims_AspNetRoles_RoleId",
column: x => x.RoleId,
principalTable: "AspNetRoles",
principalColumn: "Id",
onDelete: ReferentialAction.Cascade);
});
migrationBuilder.CreateTable(
name: "AspNetUserClaims",
columns: table => new
{
Id = table.Column<int>(nullable: false)
.Annotation("SqlServer:ValueGenerationStrategy", SqlServerValueGenerationStrategy.IdentityColumn),
UserId = table.Column<string>(nullable: false),
ClaimType = table.Column<string>(nullable: true),
ClaimValue = table.Column<string>(nullable: true)
},
constraints: table =>
{
table.PrimaryKey("PK_AspNetUserClaims", x => x.Id);
table.ForeignKey(
name: "FK_AspNetUserClaims_AspNetUsers_UserId",
column: x => x.UserId,
principalTable: "AspNetUsers",
principalColumn: "Id",
onDelete: ReferentialAction.Cascade);
});
migrationBuilder.CreateTable(
name: "AspNetUserLogins",
columns: table => new
{
LoginProvider = table.Column<string>(maxLength: 128, nullable: false),
ProviderKey = table.Column<string>(maxLength: 128, nullable: false),
ProviderDisplayName = table.Column<string>(nullable: true),
UserId = table.Column<string>(nullable: false)
},
constraints: table =>
{
table.PrimaryKey("PK_AspNetUserLogins", x => new { x.LoginProvider, x.ProviderKey });
table.ForeignKey(
name: "FK_AspNetUserLogins_AspNetUsers_UserId",
column: x => x.UserId,
principalTable: "AspNetUsers",
principalColumn: "Id",
onDelete: ReferentialAction.Cascade);
});
migrationBuilder.CreateTable(
name: "AspNetUserRoles",
columns: table => new
{
UserId = table.Column<string>(nullable: false),
RoleId = table.Column<string>(nullable: false)
},
constraints: table =>
{
table.PrimaryKey("PK_AspNetUserRoles", x => new { x.UserId, x.RoleId });
table.ForeignKey(
name: "FK_AspNetUserRoles_AspNetRoles_RoleId",
column: x => x.RoleId,
principalTable: "AspNetRoles",
principalColumn: "Id",
onDelete: ReferentialAction.Cascade);
table.ForeignKey(
name: "FK_AspNetUserRoles_AspNetUsers_UserId",
column: x => x.UserId,
principalTable: "AspNetUsers",
principalColumn: "Id",
onDelete: ReferentialAction.Cascade);
});
migrationBuilder.CreateTable(
name: "AspNetUserTokens",
columns: table => new
{
UserId = table.Column<string>(nullable: false),
LoginProvider = table.Column<string>(maxLength: 128, nullable: false),
Name = table.Column<string>(maxLength: 128, nullable: false),
Value = table.Column<string>(nullable: true)
},
constraints: table =>
{
table.PrimaryKey("PK_AspNetUserTokens", x => new { x.UserId, x.LoginProvider, x.Name });
table.ForeignKey(
name: "FK_AspNetUserTokens_AspNetUsers_UserId",
column: x => x.UserId,
principalTable: "AspNetUsers",
principalColumn: "Id",
onDelete: ReferentialAction.Cascade);
});
migrationBuilder.CreateTable(
name: "SalesOrder",
columns: table => new
{
SalesOrderId = table.Column<Guid>(nullable: false),
Number = table.Column<string>(nullable: false),
Description = table.Column<string>(nullable: true),
SalesOrderDate = table.Column<DateTimeOffset>(nullable: true),
CustomerId = table.Column<Guid>(nullable: false)
},
constraints: table =>
{
table.PrimaryKey("PK_SalesOrder", x => x.SalesOrderId);
table.ForeignKey(
name: "FK_SalesOrder_Customer_CustomerId",
column: x => x.CustomerId,
principalTable: "Customer",
principalColumn: "CustomerId",
onDelete: ReferentialAction.Cascade);
});
migrationBuilder.CreateTable(
name: "InvenTran",
columns: table => new
{
InvenTranId = table.Column<Guid>(nullable: false),
Number = table.Column<string>(nullable: true),
Description = table.Column<string>(nullable: true),
ProductId = table.Column<Guid>(nullable: false),
TranSourceId = table.Column<Guid>(nullable: false),
TranSourceNumber = table.Column<string>(nullable: true),
TranSourceType = table.Column<string>(nullable: true),
Quantity = table.Column<int>(nullable: false),
InvenTranDate = table.Column<DateTimeOffset>(nullable: true)
},
constraints: table =>
{
table.PrimaryKey("PK_InvenTran", x => x.InvenTranId);
table.ForeignKey(
name: "FK_InvenTran_Product_ProductId",
column: x => x.ProductId,
principalTable: "Product",
principalColumn: "ProductId",
onDelete: ReferentialAction.Cascade);
});
migrationBuilder.CreateTable(
name: "PurchaseOrder",
columns: table => new
{
PurchaseOrderId = table.Column<Guid>(nullable: false),
Number = table.Column<string>(nullable: false),
Description = table.Column<string>(nullable: true),
PurchaseOrderDate = table.Column<DateTimeOffset>(nullable: true),
VendorId = table.Column<Guid>(nullable: false)
},
constraints: table =>
{
table.PrimaryKey("PK_PurchaseOrder", x => x.PurchaseOrderId);
table.ForeignKey(
name: "FK_PurchaseOrder_Vendor_VendorId",
column: x => x.VendorId,
principalTable: "Vendor",
principalColumn: "VendorId",
onDelete: ReferentialAction.Cascade);
});
migrationBuilder.CreateTable(
name: "SalesOrderLine",
columns: table => new
{
SalesOrderLineId = table.Column<Guid>(nullable: false),
SalesOrderId = table.Column<Guid>(nullable: false),
ProductId = table.Column<Guid>(nullable: false),
Quantity = table.Column<int>(nullable: false),
Price = table.Column<decimal>(nullable: false),
SubTotal = table.Column<decimal>(nullable: false),
Discount = table.Column<decimal>(nullable: false),
Total = table.Column<decimal>(nullable: false)
},
constraints: table =>
{
table.PrimaryKey("PK_SalesOrderLine", x => x.SalesOrderLineId);
table.ForeignKey(
name: "FK_SalesOrderLine_Product_ProductId",
column: x => x.ProductId,
principalTable: "Product",
principalColumn: "ProductId",
onDelete: ReferentialAction.Cascade);
table.ForeignKey(
name: "FK_SalesOrderLine_SalesOrder_SalesOrderId",
column: x => x.SalesOrderId,
principalTable: "SalesOrder",
principalColumn: "SalesOrderId",
onDelete: ReferentialAction.Cascade);
});
migrationBuilder.CreateTable(
name: "GoodsReceive",
columns: table => new
{
GoodsReceiveId = table.Column<Guid>(nullable: false),
Number = table.Column<string>(nullable: false),
Description = table.Column<string>(nullable: true),
GoodsReceiveDate = table.Column<DateTimeOffset>(nullable: true),
PurchaseOrderId = table.Column<Guid>(nullable: false)
},
constraints: table =>
{
table.PrimaryKey("PK_GoodsReceive", x => x.GoodsReceiveId);
table.ForeignKey(
name: "FK_GoodsReceive_PurchaseOrder_PurchaseOrderId",
column: x => x.PurchaseOrderId,
principalTable: "PurchaseOrder",
principalColumn: "PurchaseOrderId",
onDelete: ReferentialAction.Cascade);
});
migrationBuilder.CreateTable(
name: "PurchaseOrderLine",
columns: table => new
{
PurchaseOrderLineId = table.Column<Guid>(nullable: false),
PurchaseOrderId = table.Column<Guid>(nullable: false),
ProductId = table.Column<Guid>(nullable: false),
Quantity = table.Column<int>(nullable: false),
Price = table.Column<decimal>(nullable: false),
SubTotal = table.Column<decimal>(nullable: false),
Discount = table.Column<decimal>(nullable: false),
Total = table.Column<decimal>(nullable: false)
},
constraints: table =>
{
table.PrimaryKey("PK_PurchaseOrderLine", x => x.PurchaseOrderLineId);
table.ForeignKey(
name: "FK_PurchaseOrderLine_Product_ProductId",
column: x => x.ProductId,
principalTable: "Product",
principalColumn: "ProductId",
onDelete: ReferentialAction.Cascade);
table.ForeignKey(
name: "FK_PurchaseOrderLine_PurchaseOrder_PurchaseOrderId",
column: x => x.PurchaseOrderId,
principalTable: "PurchaseOrder",
principalColumn: "PurchaseOrderId",
onDelete: ReferentialAction.Cascade);
});
migrationBuilder.CreateTable(
name: "GoodsReceiveLine",
columns: table => new
{
GoodsReceiveLineId = table.Column<Guid>(nullable: false),
GoodsReceiveId = table.Column<Guid>(nullable: false),
PurchaseOrderLineId = table.Column<Guid>(nullable: false),
ProductId = table.Column<Guid>(nullable: false),
QtyPurchase = table.Column<int>(nullable: false),
QtyReceive = table.Column<int>(nullable: false),
QtyReceived = table.Column<int>(nullable: false)
},
constraints: table =>
{
table.PrimaryKey("PK_GoodsReceiveLine", x => x.GoodsReceiveLineId);
table.ForeignKey(
name: "FK_GoodsReceiveLine_GoodsReceive_GoodsReceiveId",
column: x => x.GoodsReceiveId,
principalTable: "GoodsReceive",
principalColumn: "GoodsReceiveId",
onDelete: ReferentialAction.Cascade);
table.ForeignKey(
name: "FK_GoodsReceiveLine_Product_ProductId",
column: x => x.ProductId,
principalTable: "Product",
principalColumn: "ProductId",
onDelete: ReferentialAction.Cascade);
});
migrationBuilder.CreateIndex(
name: "IX_AspNetRoleClaims_RoleId",
table: "AspNetRoleClaims",
column: "RoleId");
migrationBuilder.CreateIndex(
name: "RoleNameIndex",
table: "AspNetRoles",
column: "NormalizedName",
unique: true,
filter: "[NormalizedName] IS NOT NULL");
migrationBuilder.CreateIndex(
name: "IX_AspNetUserClaims_UserId",
table: "AspNetUserClaims",
column: "UserId");
migrationBuilder.CreateIndex(
name: "IX_AspNetUserLogins_UserId",
table: "AspNetUserLogins",
column: "UserId");
migrationBuilder.CreateIndex(
name: "IX_AspNetUserRoles_RoleId",
table: "AspNetUserRoles",
column: "RoleId");
migrationBuilder.CreateIndex(
name: "EmailIndex",
table: "AspNetUsers",
column: "NormalizedEmail");
migrationBuilder.CreateIndex(
name: "UserNameIndex",
table: "AspNetUsers",
column: "NormalizedUserName",
unique: true,
filter: "[NormalizedUserName] IS NOT NULL");
migrationBuilder.CreateIndex(
name: "IX_GoodsReceive_PurchaseOrderId",
table: "GoodsReceive",
column: "PurchaseOrderId");
migrationBuilder.CreateIndex(
name: "IX_GoodsReceiveLine_GoodsReceiveId",
table: "GoodsReceiveLine",
column: "GoodsReceiveId");
migrationBuilder.CreateIndex(
name: "IX_GoodsReceiveLine_ProductId",
table: "GoodsReceiveLine",
column: "ProductId");
migrationBuilder.CreateIndex(
name: "IX_InvenTran_ProductId",
table: "InvenTran",
column: "ProductId");
migrationBuilder.CreateIndex(
name: "IX_PurchaseOrder_VendorId",
table: "PurchaseOrder",
column: "VendorId");
migrationBuilder.CreateIndex(
name: "IX_PurchaseOrderLine_ProductId",
table: "PurchaseOrderLine",
column: "ProductId");
migrationBuilder.CreateIndex(
name: "IX_PurchaseOrderLine_PurchaseOrderId",
table: "PurchaseOrderLine",
column: "PurchaseOrderId");
migrationBuilder.CreateIndex(
name: "IX_SalesOrder_CustomerId",
table: "SalesOrder",
column: "CustomerId");
migrationBuilder.CreateIndex(
name: "IX_SalesOrderLine_ProductId",
table: "SalesOrderLine",
column: "ProductId");
migrationBuilder.CreateIndex(
name: "IX_SalesOrderLine_SalesOrderId",
table: "SalesOrderLine",
column: "SalesOrderId");
}
protected override void Down(MigrationBuilder migrationBuilder)
{
migrationBuilder.DropTable(
name: "AspNetRoleClaims");
migrationBuilder.DropTable(
name: "AspNetUserClaims");
migrationBuilder.DropTable(
name: "AspNetUserLogins");
migrationBuilder.DropTable(
name: "AspNetUserRoles");
migrationBuilder.DropTable(
name: "AspNetUserTokens");
migrationBuilder.DropTable(
name: "GoodsReceiveLine");
migrationBuilder.DropTable(
name: "InvenTran");
migrationBuilder.DropTable(
name: "PurchaseOrderLine");
migrationBuilder.DropTable(
name: "SalesOrderLine");
migrationBuilder.DropTable(
name: "AspNetRoles");
migrationBuilder.DropTable(
name: "AspNetUsers");
migrationBuilder.DropTable(
name: "GoodsReceive");
migrationBuilder.DropTable(
name: "Product");
migrationBuilder.DropTable(
name: "SalesOrder");
migrationBuilder.DropTable(
name: "PurchaseOrder");
migrationBuilder.DropTable(
name: "Customer");
migrationBuilder.DropTable(
name: "Vendor");
}
}
}
<|start_filename|>core22/Controllers/Api/PurchaseOrderController.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc;
using Microsoft.EntityFrameworkCore;
using core22.Data;
using core22.Models.POS.Models;
namespace core22.Controllers.Api
{
[Route("api/[controller]")]
[ApiController]
public class PurchaseOrderController : ControllerBase
{
private readonly ApplicationDbContext _context;
public PurchaseOrderController(ApplicationDbContext context)
{
_context = context;
}
// GET: api/PurchaseOrder
[HttpGet]
public async Task<ActionResult<IEnumerable<PurchaseOrder>>> GetPurchaseOrder()
{
return await _context.PurchaseOrder.ToListAsync();
}
// GET: api/PurchaseOrder/5
[HttpGet("{id}")]
public async Task<ActionResult<PurchaseOrder>> GetPurchaseOrder(Guid id)
{
var purchaseOrder = await _context.PurchaseOrder.FindAsync(id);
if (purchaseOrder == null)
{
return NotFound();
}
return purchaseOrder;
}
// PUT: api/PurchaseOrder/5
[HttpPut("{id}")]
public async Task<IActionResult> PutPurchaseOrder(Guid id, PurchaseOrder purchaseOrder)
{
if (id != purchaseOrder.PurchaseOrderId)
{
return BadRequest();
}
_context.Entry(purchaseOrder).State = EntityState.Modified;
try
{
await _context.SaveChangesAsync();
}
catch (DbUpdateConcurrencyException)
{
if (!PurchaseOrderExists(id))
{
return NotFound();
}
else
{
throw;
}
}
return NoContent();
}
// POST: api/PurchaseOrder
[HttpPost]
public async Task<ActionResult<PurchaseOrder>> PostPurchaseOrder(PurchaseOrder purchaseOrder)
{
_context.PurchaseOrder.Add(purchaseOrder);
await _context.SaveChangesAsync();
return CreatedAtAction("GetPurchaseOrder", new { id = purchaseOrder.PurchaseOrderId }, purchaseOrder);
}
// DELETE: api/PurchaseOrder/5
[HttpDelete("{id}")]
public async Task<ActionResult<PurchaseOrder>> DeletePurchaseOrder(Guid id)
{
var purchaseOrder = await _context.PurchaseOrder.FindAsync(id);
if (purchaseOrder == null)
{
return NotFound();
}
_context.PurchaseOrder.Remove(purchaseOrder);
await _context.SaveChangesAsync();
return purchaseOrder;
}
private bool PurchaseOrderExists(Guid id)
{
return _context.PurchaseOrder.Any(e => e.PurchaseOrderId == id);
}
}
}
<|start_filename|>core22/Controllers/Api/GoodsReceiveLineController.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc;
using Microsoft.EntityFrameworkCore;
using core22.Data;
using core22.Models.POS.Models;
namespace core22.Controllers.Api
{
[Route("api/[controller]")]
[ApiController]
public class GoodsReceiveLineController : ControllerBase
{
private readonly ApplicationDbContext _context;
private readonly IHttpContextAccessor _httpContext;
public GoodsReceiveLineController(ApplicationDbContext context, IHttpContextAccessor httpContext)
{
_context = context;
_httpContext = httpContext;
}
// GET: api/GoodsReceiveLine
[HttpGet]
public async Task<ActionResult<IEnumerable<GoodsReceiveLine>>> GetGoodsReceiveLine()
{
return await _context.GoodsReceiveLine.ToListAsync();
}
[HttpGet("[action]")]
public async Task<IActionResult> GetLines()
{
List<GoodsReceiveLine> lines = new List<GoodsReceiveLine>();
try
{
var paramGuidString = _httpContext.HttpContext.Request.Query["goodsReceiveId"].ToString();
Guid goodsReceiveId = new Guid(paramGuidString);
lines = await _context.GoodsReceiveLine.Include(x => x.Product).Where(x => x.GoodsReceiveId.Equals(goodsReceiveId)).ToListAsync();
return Ok(new { lines });
}
catch (Exception ex)
{
return StatusCode(500, new { error = ex.Message });
}
}
// GET: api/GoodsReceiveLine/5
[HttpGet("{id}")]
public async Task<ActionResult<GoodsReceiveLine>> GetGoodsReceiveLine(Guid id)
{
var goodsReceiveLine = await _context.GoodsReceiveLine
.Include(x => x.Product)
.Where(x => x.GoodsReceiveLineId.Equals(id)).FirstOrDefaultAsync();
if (goodsReceiveLine == null)
{
return NotFound();
}
return goodsReceiveLine;
}
// PUT: api/GoodsReceiveLine/5
[HttpPut("{id}")]
public async Task<IActionResult> PutGoodsReceiveLine(Guid id, GoodsReceiveLine goodsReceiveLine)
{
if (id != goodsReceiveLine.GoodsReceiveLineId)
{
return BadRequest();
}
GoodsReceiveLine update = new GoodsReceiveLine();
update = await _context.GoodsReceiveLine.FindAsync(id);
InvenTran tran = new InvenTran();
tran = await _context.InvenTran.Where(x => x.TranSourceId.Equals(id)).FirstOrDefaultAsync();
if (update != null && tran != null)
{
update.QtyReceive = goodsReceiveLine.QtyReceive;
tran.Quantity = update.QtyReceive * 1;
_context.Update(tran);
_context.Update(update);
await _context.SaveChangesAsync();
}
return Ok(new { data = update });
}
// POST: api/GoodsReceiveLine
[HttpPost]
public async Task<ActionResult<GoodsReceiveLine>> PostGoodsReceiveLine(GoodsReceiveLine goodsReceiveLine)
{
_context.GoodsReceiveLine.Add(goodsReceiveLine);
await _context.SaveChangesAsync();
return CreatedAtAction("GetGoodsReceiveLine", new { id = goodsReceiveLine.GoodsReceiveLineId }, goodsReceiveLine);
}
// DELETE: api/GoodsReceiveLine/5
[HttpDelete("{id}")]
public async Task<ActionResult<GoodsReceiveLine>> DeleteGoodsReceiveLine(Guid id)
{
var goodsReceiveLine = await _context.GoodsReceiveLine.FindAsync(id);
if (goodsReceiveLine == null)
{
return NotFound();
}
_context.GoodsReceiveLine.Remove(goodsReceiveLine);
await _context.SaveChangesAsync();
return goodsReceiveLine;
}
private bool GoodsReceiveLineExists(Guid id)
{
return _context.GoodsReceiveLine.Any(e => e.GoodsReceiveLineId == id);
}
}
}
<|start_filename|>core22/Models/POS/Models/Customer.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Linq;
using System.Threading.Tasks;
namespace core22.Models.POS.Models
{
public class Customer
{
public Guid CustomerId { get; set; }
[Required]
public string Name { get; set; }
public string Description { get; set; }
public string Phone { get; set; }
public string Email { get; set; }
public string Address { get; set; }
public string Address2 { get; set; }
}
}
<|start_filename|>core22/Models/POS/Models/SalesOrder.cs<|end_filename|>
using Newtonsoft.Json;
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Linq;
using System.Threading.Tasks;
namespace core22.Models.POS.Models
{
public class SalesOrder
{
public Guid SalesOrderId { get; set; }
[Required]
public string Number { get; set; }
public string Description { get; set; }
public DateTimeOffset? SalesOrderDate { get; set; } = DateTime.Now;
public Guid CustomerId { get; set; }
public Customer Customer { get; set; }
public virtual List<SalesOrderLine> SalesOrderLine { get; set; } = new List<SalesOrderLine>();
}
public class SalesOrderLine
{
public Guid SalesOrderLineId { get; set; }
public Guid SalesOrderId { get; set; }
[JsonIgnore]
public SalesOrder SalesOrder { get; set; }
public Guid ProductId { get; set; }
public Product Product { get; set; }
public int Quantity { get; set; }
public decimal Price { get; set; }
public decimal SubTotal { get; set; }
public decimal Discount { get; set; }
public decimal Total { get; set; }
}
}
<|start_filename|>core22/Controllers/Api/VendorController.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc;
using Microsoft.EntityFrameworkCore;
using core22.Data;
using core22.Models.POS.Models;
namespace core22.Controllers.Api
{
[Route("api/[controller]")]
[ApiController]
public class VendorController : ControllerBase
{
private readonly ApplicationDbContext _context;
public VendorController(ApplicationDbContext context)
{
_context = context;
}
// GET: api/Vendor
[HttpGet]
public async Task<ActionResult<IEnumerable<Vendor>>> GetVendor()
{
return await _context.Vendor.ToListAsync();
}
// GET: api/Vendor/5
[HttpGet("{id}")]
public async Task<ActionResult<Vendor>> GetVendor(Guid id)
{
var vendor = await _context.Vendor.FindAsync(id);
if (vendor == null)
{
return NotFound();
}
return vendor;
}
// PUT: api/Vendor/5
[HttpPut("{id}")]
public async Task<IActionResult> PutVendor(Guid id, Vendor vendor)
{
if (id != vendor.VendorId)
{
return BadRequest();
}
_context.Entry(vendor).State = EntityState.Modified;
try
{
await _context.SaveChangesAsync();
}
catch (DbUpdateConcurrencyException)
{
if (!VendorExists(id))
{
return NotFound();
}
else
{
throw;
}
}
return NoContent();
}
// POST: api/Vendor
[HttpPost]
public async Task<ActionResult<Vendor>> PostVendor(Vendor vendor)
{
_context.Vendor.Add(vendor);
await _context.SaveChangesAsync();
return CreatedAtAction("GetVendor", new { id = vendor.VendorId }, vendor);
}
// DELETE: api/Vendor/5
[HttpDelete("{id}")]
public async Task<ActionResult<Vendor>> DeleteVendor(Guid id)
{
var vendor = await _context.Vendor.FindAsync(id);
if (vendor == null)
{
return NotFound();
}
_context.Vendor.Remove(vendor);
await _context.SaveChangesAsync();
return vendor;
}
private bool VendorExists(Guid id)
{
return _context.Vendor.Any(e => e.VendorId == id);
}
}
}
<|start_filename|>core22/Controllers/Api/CustomerController.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc;
using Microsoft.EntityFrameworkCore;
using core22.Data;
using core22.Models.POS.Models;
namespace core22.Controllers.Api
{
[Route("api/[controller]")]
[ApiController]
public class CustomerController : ControllerBase
{
private readonly ApplicationDbContext _context;
public CustomerController(ApplicationDbContext context)
{
_context = context;
}
// GET: api/Customer
[HttpGet]
public async Task<ActionResult<IEnumerable<Customer>>> GetCustomer()
{
return await _context.Customer.ToListAsync();
}
// GET: api/Customer/5
[HttpGet("{id}")]
public async Task<ActionResult<Customer>> GetCustomer(Guid id)
{
var customer = await _context.Customer.FindAsync(id);
if (customer == null)
{
return NotFound();
}
return customer;
}
// PUT: api/Customer/5
[HttpPut("{id}")]
public async Task<IActionResult> PutCustomer(Guid id, Customer customer)
{
if (id != customer.CustomerId)
{
return BadRequest();
}
_context.Entry(customer).State = EntityState.Modified;
try
{
await _context.SaveChangesAsync();
}
catch (DbUpdateConcurrencyException)
{
if (!CustomerExists(id))
{
return NotFound();
}
else
{
throw;
}
}
return NoContent();
}
// POST: api/Customer
[HttpPost]
public async Task<ActionResult<Customer>> PostCustomer(Customer customer)
{
_context.Customer.Add(customer);
await _context.SaveChangesAsync();
return CreatedAtAction("GetCustomer", new { id = customer.CustomerId }, customer);
}
// DELETE: api/Customer/5
[HttpDelete("{id}")]
public async Task<ActionResult<Customer>> DeleteCustomer(Guid id)
{
var customer = await _context.Customer.FindAsync(id);
if (customer == null)
{
return NotFound();
}
_context.Customer.Remove(customer);
await _context.SaveChangesAsync();
return customer;
}
private bool CustomerExists(Guid id)
{
return _context.Customer.Any(e => e.CustomerId == id);
}
}
}
<|start_filename|>core22/Models/POS/Models/PurchaseOrder.cs<|end_filename|>
using Newtonsoft.Json;
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Linq;
using System.Threading.Tasks;
namespace core22.Models.POS.Models
{
public class PurchaseOrder
{
public Guid PurchaseOrderId { get; set; }
[Required]
public string Number { get; set; }
public string Description { get; set; }
public DateTimeOffset? PurchaseOrderDate { get; set; } = DateTime.Now;
public Guid VendorId { get; set; }
public Vendor Vendor { get; set; }
public virtual List<PurchaseOrderLine> PurchaseOrderLine { get; set; } = new List<PurchaseOrderLine>();
}
public class PurchaseOrderLine
{
public Guid PurchaseOrderLineId { get; set; }
public Guid PurchaseOrderId { get; set; }
[JsonIgnore]
public PurchaseOrder PurchaseOrder { get; set; }
public Guid ProductId { get; set; }
public Product Product { get; set; }
public int Quantity { get; set; }
public decimal Price { get; set; }
public decimal SubTotal { get; set; }
public decimal Discount { get; set; }
public decimal Total { get; set; }
}
}
| aulkiller/POSWebASP |
<|start_filename|>router/http.go<|end_filename|>
package router
import (
"fmt"
"net/http"
"strings"
)
func init() {
port := getopt("PORT", getopt("HTTP_PORT", "80"))
Jobs.Register(&httpService{port}, "http")
}
type httpService struct {
port string
}
func (s *httpService) Name() string {
return fmt.Sprintf("http[%s]:%s",
strings.Join(HttpHandlers.Names(), ","), s.port)
}
func (s *httpService) Setup() error {
for name, handler := range HttpHandlers.All() {
h := handler()
http.Handle("/"+name, h)
http.Handle("/"+name+"/", h)
}
return nil
}
func (s *httpService) Run() error {
return http.ListenAndServe(":"+s.port, nil)
}
<|start_filename|>router/extpoints.go<|end_filename|>
// generated by go-extpoints -- DO NOT EDIT
package router
import (
"reflect"
"sync"
"strings"
"runtime"
)
var registry = struct {
sync.Mutex
extpoints map[string]*extensionPoint
}{
extpoints: make(map[string]*extensionPoint),
}
type extensionPoint struct {
sync.Mutex
iface reflect.Type
components map[string]interface{}
}
func newExtensionPoint(iface interface{}) *extensionPoint {
ep := &extensionPoint{
iface: reflect.TypeOf(iface).Elem(),
components: make(map[string]interface{}),
}
registry.Lock()
defer registry.Unlock()
registry.extpoints[ep.iface.Name()] = ep
return ep
}
func (ep *extensionPoint) lookup(name string) (ext interface{}, ok bool) {
ep.Lock()
defer ep.Unlock()
ext, ok = ep.components[name]
return
}
func (ep *extensionPoint) all() map[string]interface{} {
ep.Lock()
defer ep.Unlock()
all := make(map[string]interface{})
for k, v := range ep.components {
all[k] = v
}
return all
}
func (ep *extensionPoint) register(component interface{}, name string) bool {
ep.Lock()
defer ep.Unlock()
if name == "" {
comType := reflect.TypeOf(component)
if comType.Kind() == reflect.Func {
nameParts := strings.Split(runtime.FuncForPC(
reflect.ValueOf(component).Pointer()).Name(), ".")
name = nameParts[len(nameParts)-1]
} else {
name = comType.Elem().Name()
}
}
_, exists := ep.components[name]
if exists {
return false
}
ep.components[name] = component
return true
}
func (ep *extensionPoint) unregister(name string) bool {
ep.Lock()
defer ep.Unlock()
_, exists := ep.components[name]
if !exists {
return false
}
delete(ep.components, name)
return true
}
func implements(component interface{}) []string {
var ifaces []string
typ := reflect.TypeOf(component)
for name, ep := range registry.extpoints {
if ep.iface.Kind() == reflect.Func && typ.AssignableTo(ep.iface) {
ifaces = append(ifaces, name)
}
if ep.iface.Kind() != reflect.Func && typ.Implements(ep.iface) {
ifaces = append(ifaces, name)
}
}
return ifaces
}
func Register(component interface{}, name string) []string {
registry.Lock()
defer registry.Unlock()
var ifaces []string
for _, iface := range implements(component) {
if ok := registry.extpoints[iface].register(component, name); ok {
ifaces = append(ifaces, iface)
}
}
return ifaces
}
func Unregister(name string) []string {
registry.Lock()
defer registry.Unlock()
var ifaces []string
for iface, extpoint := range registry.extpoints {
if ok := extpoint.unregister(name); ok {
ifaces = append(ifaces, iface)
}
}
return ifaces
}
// HttpHandler
var HttpHandlers = &httpHandlerExt{
newExtensionPoint(new(HttpHandler)),
}
type httpHandlerExt struct {
*extensionPoint
}
func (ep *httpHandlerExt) Unregister(name string) bool {
return ep.unregister(name)
}
func (ep *httpHandlerExt) Register(component HttpHandler, name string) bool {
return ep.register(component, name)
}
func (ep *httpHandlerExt) Lookup(name string) (HttpHandler, bool) {
ext, ok := ep.lookup(name)
if !ok {
return nil, ok
}
return ext.(HttpHandler), ok
}
func (ep *httpHandlerExt) All() map[string]HttpHandler {
all := make(map[string]HttpHandler)
for k, v := range ep.all() {
all[k] = v.(HttpHandler)
}
return all
}
func (ep *httpHandlerExt) Names() []string {
var names []string
for k := range ep.all() {
names = append(names, k)
}
return names
}
// AdapterFactory
var AdapterFactories = &adapterFactoryExt{
newExtensionPoint(new(AdapterFactory)),
}
type adapterFactoryExt struct {
*extensionPoint
}
func (ep *adapterFactoryExt) Unregister(name string) bool {
return ep.unregister(name)
}
func (ep *adapterFactoryExt) Register(component AdapterFactory, name string) bool {
return ep.register(component, name)
}
func (ep *adapterFactoryExt) Lookup(name string) (AdapterFactory, bool) {
ext, ok := ep.lookup(name)
if !ok {
return nil, ok
}
return ext.(AdapterFactory), ok
}
func (ep *adapterFactoryExt) All() map[string]AdapterFactory {
all := make(map[string]AdapterFactory)
for k, v := range ep.all() {
all[k] = v.(AdapterFactory)
}
return all
}
func (ep *adapterFactoryExt) Names() []string {
var names []string
for k := range ep.all() {
names = append(names, k)
}
return names
}
// AdapterTransport
var AdapterTransports = &adapterTransportExt{
newExtensionPoint(new(AdapterTransport)),
}
type adapterTransportExt struct {
*extensionPoint
}
func (ep *adapterTransportExt) Unregister(name string) bool {
return ep.unregister(name)
}
func (ep *adapterTransportExt) Register(component AdapterTransport, name string) bool {
return ep.register(component, name)
}
func (ep *adapterTransportExt) Lookup(name string) (AdapterTransport, bool) {
ext, ok := ep.lookup(name)
if !ok {
return nil, ok
}
return ext.(AdapterTransport), ok
}
func (ep *adapterTransportExt) All() map[string]AdapterTransport {
all := make(map[string]AdapterTransport)
for k, v := range ep.all() {
all[k] = v.(AdapterTransport)
}
return all
}
func (ep *adapterTransportExt) Names() []string {
var names []string
for k := range ep.all() {
names = append(names, k)
}
return names
}
// Job
var Jobs = &jobExt{
newExtensionPoint(new(Job)),
}
type jobExt struct {
*extensionPoint
}
func (ep *jobExt) Unregister(name string) bool {
return ep.unregister(name)
}
func (ep *jobExt) Register(component Job, name string) bool {
return ep.register(component, name)
}
func (ep *jobExt) Lookup(name string) (Job, bool) {
ext, ok := ep.lookup(name)
if !ok {
return nil, ok
}
return ext.(Job), ok
}
func (ep *jobExt) All() map[string]Job {
all := make(map[string]Job)
for k, v := range ep.all() {
all[k] = v.(Job)
}
return all
}
func (ep *jobExt) Names() []string {
var names []string
for k := range ep.all() {
names = append(names, k)
}
return names
}
// LogRouter
var LogRouters = &logRouterExt{
newExtensionPoint(new(LogRouter)),
}
type logRouterExt struct {
*extensionPoint
}
func (ep *logRouterExt) Unregister(name string) bool {
return ep.unregister(name)
}
func (ep *logRouterExt) Register(component LogRouter, name string) bool {
return ep.register(component, name)
}
func (ep *logRouterExt) Lookup(name string) (LogRouter, bool) {
ext, ok := ep.lookup(name)
if !ok {
return nil, ok
}
return ext.(LogRouter), ok
}
func (ep *logRouterExt) All() map[string]LogRouter {
all := make(map[string]LogRouter)
for k, v := range ep.all() {
all[k] = v.(LogRouter)
}
return all
}
func (ep *logRouterExt) Names() []string {
var names []string
for k := range ep.all() {
names = append(names, k)
}
return names
}
| gbolo/logspout |
<|start_filename|>uuidxx.h<|end_filename|>
#pragma once
#include <stdexcept>
#include <cstdint>
#include <string>
namespace uuidxx
{
enum class Variant
{
Nil,
Version1,
Version2,
Version3,
Version4,
Version5
};
class NotImplemented : public std::logic_error
{
public:
NotImplemented() : std::logic_error("Function not yet implemented") { };
};
union uuid
{
private:
static uuid Generatev4();
public:
uint64_t WideIntegers[2];
struct _internalData
{
uint32_t Data1;
uint16_t Data2;
uint16_t Data3;
uint8_t Data4[8];
} Uuid;
struct _byteRepresentation
{
uint8_t Data1[4];
uint8_t Data2[2];
uint8_t Data3[2];
uint8_t Data4[8];
} Bytes;
bool operator == (const uuid &guid2) const;
bool operator != (const uuid &guid2) const;
bool operator < (const uuid &guid2) const;
bool operator > (const uuid &guid2) const;
uuid() = default;
uuid(const char *uuidString);
uuid(const std::string &uuidString);
static uuid FromString(const char *uuidString);
static uuid FromString(const std::string &uuidString);
static inline uuid Generate(Variant v = Variant::Version4)
{
switch (v)
{
case Variant::Nil:
return uuid(nullptr); //special case;
case Variant::Version1:
case Variant::Version2:
case Variant::Version3:
case Variant::Version5:
throw new NotImplemented();
case Variant::Version4:
return Generatev4();
default:
throw new NotImplemented();
}
}
std::string ToString(bool withBraces = true) const;
};
static_assert(sizeof(uuid) == 2 * sizeof(int64_t), "Check uuid type declaration/padding!");
}
<|start_filename|>uuidxx.cpp<|end_filename|>
#ifdef _WIN32
#define _CRT_SECURE_NO_WARNINGS
#endif
#include "uuidxx.h"
#include <random>
#include <stdio.h>
#include <inttypes.h>
#include <string.h>
using namespace std;
using namespace uuidxx;
bool uuid::operator == (const uuid & guid2) const
{
return memcmp(&guid2, this, sizeof(uuid)) == 0;
}
bool uuid::operator != (const uuid & guid2) const
{
return !(*this == guid2);
}
bool uuid::operator < (const uuid &guid2) const
{
return memcmp(this, &guid2, sizeof(uuid)) < 0;
}
bool uuid::operator > (const uuid &guid2) const
{
return memcmp(this, &guid2, sizeof(uuid)) > 0;
}
uuid::uuid (const std::string &uuidString)
: uuid(uuidString.c_str())
{
}
uuid::uuid (const char *uuidString)
{
if (uuidString == nullptr)
{
//special case, and prevents random bugs
memset(this, 0, sizeof(uuid));
return;
}
if (uuidString[0] == '{')
{
sscanf(uuidString, "{%08" SCNx32 "-%04" SCNx16 "-%04" SCNx16 "-%02" SCNx8 "%02" SCNx8 "-%02" SCNx8 "%02" SCNx8 "%02" SCNx8 "%02" SCNx8 "%02" SCNx8 "%02" SCNx8 "}", &Uuid.Data1, &Uuid.Data2, &Uuid.Data3, &Uuid.Data4[0], &Uuid.Data4[1], &Uuid.Data4[2], &Uuid.Data4[3], &Uuid.Data4[4], &Uuid.Data4[5], &Uuid.Data4[6], &Uuid.Data4[7]);
}
else
{
sscanf(uuidString, "%08" SCNx32 "-%04" SCNx16 "-%04" SCNx16 "-%02" SCNx8 "%02" SCNx8 "-%02" SCNx8 "%02" SCNx8 "%02" SCNx8 "%02" SCNx8 "%02" SCNx8 "%02" SCNx8 "", &Uuid.Data1, &Uuid.Data2, &Uuid.Data3, &Uuid.Data4[0], &Uuid.Data4[1], &Uuid.Data4[2], &Uuid.Data4[3], &Uuid.Data4[4], &Uuid.Data4[5], &Uuid.Data4[6], &Uuid.Data4[7]);
}
}
string uuid::ToString(bool withBraces) const
{
char buffer[39];
sprintf(buffer, "%s%08X-%04X-%04X-%02X%02X-%02X%02X%02X%02X%02X%02X%s", withBraces ? "{" : "", Uuid.Data1, Uuid.Data2, Uuid.Data3, Uuid.Data4[0], Uuid.Data4[1], Uuid.Data4[2], Uuid.Data4[3], Uuid.Data4[4], Uuid.Data4[5], Uuid.Data4[6], Uuid.Data4[7], withBraces ? "}" : "");
return buffer;
}
uuid uuid::FromString(const char *uuidString)
{
uuid temp(uuidString);
return temp;
}
uuid uuid::FromString(const std::string &uuidString)
{
uuid temp(uuidString.c_str());
return temp;
}
uuid uuid::Generatev4()
{
//mach-o does not support TLS and clang still has issues with thread_local
#if !defined(__APPLE__) && !defined(__clang__)
thread_local std::random_device rd;
thread_local auto gen = std::mt19937_64(rd());
#else
std::random_device rd;
std::mt19937_64 gen(rd());
#endif
std::uniform_int_distribution<uint64_t> dis64;
uuid newGuid;
newGuid.WideIntegers[0] = dis64(gen);
newGuid.WideIntegers[1] = dis64(gen);
//RFC4122 defines (psuedo)random uuids (in big-endian notation):
//MSB of DATA4[0] specifies the variant and should be 0b10 to indicate standard uuid,
//and MSB of DATA3 should be 0b0100 to indicate version 4
newGuid.Bytes.Data4[0] = (newGuid.Bytes.Data4[0] & 0x3F) | static_cast<uint8_t>(0x80);
newGuid.Bytes.Data3[1] = (newGuid.Bytes.Data3[1] & 0x0F) | static_cast<uint8_t>(0x40);
return newGuid;
}
| neosmart/uuidxx |
<|start_filename|>styles/colors.css<|end_filename|>
:root {
--color-text-main: #2b2c30;
--color-text-sub: #616269;
--color-text-off: #999;
--color-text-disabled: #ccc;
--color-text-placeholder: #ccc;
--color-text-link: #2b2c30;
--color-border-dark: #ccc;
--color-border: #ddd;
--color-border-light: #eee;
--color-gradient-purple: linear-gradient(to right bottom, #5630af, #3067af);
--color-gradient-purple-light: linear-gradient(
to right bottom,
#7650cf,
#5087cf
);
--color-gradient-blue: linear-gradient(to right bottom, #adaf30, #30af7f);
--color-purple: #331cbf;
--color-green: #2cc63e;
--color-bluegreen: #30af7f;
--color-gray: #ddd;
--color-gray-light: #eee;
--color-accent: #ff8d27;
--color-accent-light: #ff9d37;
--color-blue: #3067af;
--color-pink: #ff357f;
--color-bg-purple-light: #f7f7fc;
--color-bg-purple: #e7e7f3;
--color-bg-purple-dark: #cacae7;
--color-bg-blue: #e5eff9;
--color-bg-success: #e8fbe8;
--color-bg-error: #ffe9df;
--color-error: #d9534f;
}
| sohtsuka/microcms-blog-nextjs |
<|start_filename|>mm/malloc.h<|end_filename|>
// Copyright (c) 2016 Couchbase, Inc.
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
// except in compliance with the License. You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software distributed under the
// License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions
// and limitations under the License.
#ifndef MALLOC_MM_H
#define MALLOC_MM_H
#include <stdlib.h>
typedef struct {
char *buf;
int offset;
int size;
} stats_buf;
void *mm_malloc(size_t);
void mm_free(void *);
char *mm_stats();
size_t mm_sizeat(void *);
size_t mm_size();
size_t mm_alloc_size();
int mm_free2os();
#endif
<|start_filename|>skiplist/node.go<|end_filename|>
// +build !amd64
// Copyright (c) 2016-2021 Couchbase, Inc.
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
// except in compliance with the License. You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software distributed under the
// License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions
// and limitations under the License.
package skiplist
import (
"reflect"
"sync/atomic"
"unsafe"
)
//
// The default skiplist implementation :
// a) should support 64-bit platforms including aarch64 which has alignment restrictions
// (on arm64, 64-bit words accessed atomically should have 64-bit address alignment
// otherwise will result in alignment fault.)
// b) and can work with both golang memory (garbage collection safe) as well as user
// managed memory (e.g. jemalloc)
//
// Node layout:
// Nitro's two-phase deletion approach requires us to atomically update both the
// next pointer as well as the state of the current node.
//
// For user-managed memory, this can be addressed by using tagged pointers. However
// this relies on the fact that the platform's virtual addresses do not consume the
// entire 64 bits. To our knowledge, amd64 uses 48-bits VA addresses and for ARMV8.3
// supporting large VA addressing mode (64K page size), it can go upto 52-bits. This
// has the advantage of using word aligned operations which is a requirement for
// certain platforms.
//
// Below is the node layout we use for user managed memory
//
// <NodeMM struct>
// +------------+-----------+-----------+-------------+
// | level - 8b | itm - 8b | Link - 8b| Cache - 8b |<NodeRefMM>
// +------------+-----------+-----------+-------------+-------------+--------------+
// | tag ptr- 8b| tag ptr - 8b|
// +-------------+--------------+
//
// For golang memory, the same can be addressed by using an indirection pointer. A
// NodeRef pointer is stored in skiplist levels which point to an object which contains
// both the state & the next pointer. This is the existing implementation.
//
// Below is the node layout we use for golang memory
//
// <Node struct>
// +------------+------------+-----------+-------------+-------------+
// | level - 8b | next - 8b | itm - 8b | Link - 8b | Cache - 8b |
// +------------+------------+-----------+-------------+-------------+
// | ----- |------------------+----------------+
// | NodeRef ptr - 8b | NodeRefptr - 8b|
// |------------------+----------------+
//
// Note: Although golang indirection approach can work with user managed memory,
// but it comes with an overhead of constant memory allocation/deallocation in
// case of conflicts and also SMR will not be straightforward. Also reclaim in SMR
// becomes easy if we allocate node memory as a single blob (NodeMM).
//
// Based on memory config used for skiplist, we cache the type information in the
// MSB of level field to save extra bytes. Currently MaxLevel is 32. But it can go
// up to 2^63 -1
//
// 52-bit Large VA address capability is supported from ARMv8.2 onwards (64KB page size)
const deletedFlag = uint64(1) << 52
const deletedFlagMask = ^deletedFlag
// memory management type, bit set for user managed memory
const mmFlag = int(1) << 62
const mmFlagMask = (^mmFlag)
var nodeHdrSizeMM = unsafe.Sizeof(NodeMM{})
var nodeRefSizeMM = unsafe.Sizeof(NodeRefMM{})
// Node represents skiplist entry
// This should reside in a single cache line (L1 cache 64bytes)
type Node struct {
level int // we use the 2nd highest bit to store memory type
itm unsafe.Pointer
Link unsafe.Pointer
Cache int64 // needed by plasma
next unsafe.Pointer // Points to [level+1]unsafe.Pointer
}
// NodeRef is a wrapper for node pointer
type NodeRef struct {
deleted bool
ptr *Node
}
// NodeMM represents skiplist entry from user managed memory.
// We skips the next pointer in Node struct to save bytes
type NodeMM struct {
level int // // we use the 63rd bit to store node type
itm unsafe.Pointer
Link unsafe.Pointer
Cache int64 // needed by plasma
}
// NodeRefMM is a wrapper for Node(MM) pointer tagged with deletedFlag
type NodeRefMM struct {
tagptr uint64
}
// for user managed memory
func (n *Node) setMM() {
n.level |= mmFlag
}
// this is inlined by go as seen from profile
func (n *Node) usesMM() bool {
return (n.level & mmFlag) != 0
}
// get a slice of NodeRef's containing golang pointers
func (n *Node) nextArray() (s []unsafe.Pointer) {
hdr := (*reflect.SliceHeader)(unsafe.Pointer(&s))
hdr.Data = uintptr(n.next)
hdr.Len = n.Level() + 1
hdr.Cap = hdr.Len
return
}
// Level returns the level of a node in the skiplist
func (n Node) Level() int {
return n.level & mmFlagMask
}
// Size returns memory used by the node
func (n Node) Size() int {
if n.usesMM() {
return int(nodeHdrSizeMM + uintptr(n.Level()+1)*nodeRefSizeMM)
} else {
return int(unsafe.Sizeof(n) +
uintptr(n.Level()+1)*(unsafe.Sizeof(unsafe.Pointer(nil))+
unsafe.Sizeof(NodeRef{})))
}
}
// Item returns item held by the node
func (n *Node) Item() unsafe.Pointer {
return n.itm
}
// SetItem sets itm ptr
func (n *Node) SetItem(itm unsafe.Pointer) {
n.itm = itm
}
// SetLink can be used to set link pointer for the node
func (n *Node) SetLink(l *Node) {
n.Link = unsafe.Pointer(l)
}
// GetLink returns link pointer from the node
func (n *Node) GetLink() *Node {
return (*Node)(n.Link)
}
func allocNode(itm unsafe.Pointer, level int, fn MallocFn) *Node {
var n *Node
// we reserve level's MSB bit to cache node type
if level < 0 || level >= mmFlag {
return nil
}
if fn == nil {
next := make([]unsafe.Pointer, level+1)
n = &Node{
level: level,
next: unsafe.Pointer(&next[0]),
}
} else {
// NodeMM is casted as Node (NodeMM is not undersized)
n = (*Node)(fn(int(nodeHdrSizeMM + uintptr(level+1)*nodeRefSizeMM)))
if n == nil {
return nil
}
n.level = level
n.Link = nil
n.setMM() // malloced memory
}
n.Cache = 0
n.itm = itm
return n
}
func (n *Node) setNext(level int, ptr *Node, deleted bool) {
if n.usesMM() {
nodeRefAddr := uintptr(unsafe.Pointer(uintptr(unsafe.Pointer(n)) +
nodeHdrSizeMM + nodeRefSizeMM*uintptr(level)))
wordAddr := (*uint64)(unsafe.Pointer(nodeRefAddr))
tag := uint64(uintptr(unsafe.Pointer(ptr)))
if deleted {
tag |= deletedFlag
}
atomic.StoreUint64(wordAddr, tag)
} else {
next := n.nextArray()
next[level] = unsafe.Pointer(&NodeRef{ptr: ptr, deleted: deleted})
}
}
// GetNext returns next node in level 0
func (n *Node) GetNext() *Node {
var next *Node
var del bool
for next, del = n.getNext(0); del; next, del = next.getNext(0) {
}
return next
}
func (n *Node) getNext(level int) (*Node, bool) {
if n.usesMM() {
nodeRefAddr := uintptr(unsafe.Pointer(n)) + nodeHdrSizeMM + nodeRefSizeMM*uintptr(level)
wordAddr := (*uint64)(unsafe.Pointer(nodeRefAddr))
v := atomic.LoadUint64(wordAddr)
ptr := (*Node)(unsafe.Pointer(uintptr(v) & uintptr(deletedFlagMask)))
if ptr != nil {
return ptr, (v&deletedFlag != uint64(0))
}
} else {
next := n.nextArray()
ref := (*NodeRef)(atomic.LoadPointer(&next[level]))
if ref != nil {
return ref.ptr, ref.deleted
}
}
return nil, false
}
func (n *Node) dcasNext(level int, prevPtr, newPtr *Node, prevIsdeleted, newIsdeleted bool) bool {
var swapped bool
if n.usesMM() {
nodeRefAddr := uintptr(unsafe.Pointer(n)) + nodeHdrSizeMM + nodeRefSizeMM*uintptr(level)
wordAddr := (*uint64)(unsafe.Pointer(nodeRefAddr))
prevVal := uint64(uintptr(unsafe.Pointer(prevPtr)))
newVal := uint64(uintptr(unsafe.Pointer(newPtr)))
if prevIsdeleted {
prevVal |= deletedFlag
}
if newIsdeleted {
newVal |= deletedFlag
}
swapped = atomic.CompareAndSwapUint64(wordAddr, prevVal, newVal)
} else {
next := n.nextArray()
addr := &next[level]
ref := (*NodeRef)(atomic.LoadPointer(addr))
if (ref == nil) || (ref.ptr == prevPtr && ref.deleted == prevIsdeleted) {
swapped = atomic.CompareAndSwapPointer(addr, unsafe.Pointer(ref),
unsafe.Pointer(&NodeRef{ptr: newPtr, deleted: newIsdeleted}))
}
}
return swapped
}
// This can help debugging of memory reclaimer bugs
func debugMarkFree(n *Node) {
}
<|start_filename|>skiplist/node_alloc_amd64.go<|end_filename|>
//
// Copyright (c) 2016 Couchbase, Inc.
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
// except in compliance with the License. You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software distributed under the
// License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions
// and limitations under the License.
package skiplist
import (
"reflect"
"unsafe"
)
var nodeTypes = [33]reflect.Type{
reflect.TypeOf(node0),
reflect.TypeOf(node1),
reflect.TypeOf(node2),
reflect.TypeOf(node3),
reflect.TypeOf(node4),
reflect.TypeOf(node5),
reflect.TypeOf(node6),
reflect.TypeOf(node7),
reflect.TypeOf(node8),
reflect.TypeOf(node9),
reflect.TypeOf(node10),
reflect.TypeOf(node11),
reflect.TypeOf(node12),
reflect.TypeOf(node13),
reflect.TypeOf(node14),
reflect.TypeOf(node15),
reflect.TypeOf(node16),
reflect.TypeOf(node17),
reflect.TypeOf(node18),
reflect.TypeOf(node19),
reflect.TypeOf(node20),
reflect.TypeOf(node21),
reflect.TypeOf(node22),
reflect.TypeOf(node23),
reflect.TypeOf(node24),
reflect.TypeOf(node25),
reflect.TypeOf(node26),
reflect.TypeOf(node27),
reflect.TypeOf(node28),
reflect.TypeOf(node29),
reflect.TypeOf(node30),
reflect.TypeOf(node31),
reflect.TypeOf(node32),
}
var node0 struct {
itm unsafe.Pointer
gc unsafe.Pointer
cache int64
buf [1]NodeRef
}
var node1 struct {
itm unsafe.Pointer
gc unsafe.Pointer
cache int64
buf [2]NodeRef
}
var node2 struct {
itm unsafe.Pointer
gc unsafe.Pointer
cache int64
buf [3]NodeRef
}
var node3 struct {
itm unsafe.Pointer
gc unsafe.Pointer
cache int64
buf [4]NodeRef
}
var node4 struct {
itm unsafe.Pointer
gc unsafe.Pointer
cache int64
buf [5]NodeRef
}
var node5 struct {
itm unsafe.Pointer
gc unsafe.Pointer
cache int64
buf [6]NodeRef
}
var node6 struct {
itm unsafe.Pointer
gc unsafe.Pointer
cache int64
buf [7]NodeRef
}
var node7 struct {
itm unsafe.Pointer
gc unsafe.Pointer
cache int64
buf [8]NodeRef
}
var node8 struct {
itm unsafe.Pointer
gc unsafe.Pointer
cache int64
buf [9]NodeRef
}
var node9 struct {
itm unsafe.Pointer
gc unsafe.Pointer
cache int64
buf [10]NodeRef
}
var node10 struct {
itm unsafe.Pointer
gc unsafe.Pointer
cache int64
buf [11]NodeRef
}
var node11 struct {
itm unsafe.Pointer
gc unsafe.Pointer
cache int64
buf [12]NodeRef
}
var node12 struct {
itm unsafe.Pointer
gc unsafe.Pointer
cache int64
buf [13]NodeRef
}
var node13 struct {
itm unsafe.Pointer
gc unsafe.Pointer
cache int64
buf [14]NodeRef
}
var node14 struct {
itm unsafe.Pointer
gc unsafe.Pointer
cache int64
buf [15]NodeRef
}
var node15 struct {
itm unsafe.Pointer
gc unsafe.Pointer
cache int64
buf [16]NodeRef
}
var node16 struct {
itm unsafe.Pointer
gc unsafe.Pointer
cache int64
buf [17]NodeRef
}
var node17 struct {
itm unsafe.Pointer
gc unsafe.Pointer
cache int64
buf [18]NodeRef
}
var node18 struct {
itm unsafe.Pointer
gc unsafe.Pointer
cache int64
buf [19]NodeRef
}
var node19 struct {
itm unsafe.Pointer
gc unsafe.Pointer
cache int64
buf [20]NodeRef
}
var node20 struct {
itm unsafe.Pointer
gc unsafe.Pointer
cache int64
buf [21]NodeRef
}
var node21 struct {
itm unsafe.Pointer
gc unsafe.Pointer
cache int64
buf [22]NodeRef
}
var node22 struct {
itm unsafe.Pointer
gc unsafe.Pointer
cache int64
buf [23]NodeRef
}
var node23 struct {
itm unsafe.Pointer
gc unsafe.Pointer
cache int64
buf [24]NodeRef
}
var node24 struct {
itm unsafe.Pointer
gc unsafe.Pointer
cache int64
buf [25]NodeRef
}
var node25 struct {
itm unsafe.Pointer
gc unsafe.Pointer
cache int64
buf [26]NodeRef
}
var node26 struct {
itm unsafe.Pointer
gc unsafe.Pointer
cache int64
buf [27]NodeRef
}
var node27 struct {
itm unsafe.Pointer
gc unsafe.Pointer
cache int64
buf [28]NodeRef
}
var node28 struct {
itm unsafe.Pointer
gc unsafe.Pointer
cache int64
buf [29]NodeRef
}
var node29 struct {
itm unsafe.Pointer
gc unsafe.Pointer
cache int64
buf [30]NodeRef
}
var node30 struct {
itm unsafe.Pointer
gc unsafe.Pointer
cache int64
buf [31]NodeRef
}
var node31 struct {
itm unsafe.Pointer
gc unsafe.Pointer
cache int64
buf [32]NodeRef
}
var node32 struct {
itm unsafe.Pointer
gc unsafe.Pointer
cache int64
buf [33]NodeRef
}
func allocNode(itm unsafe.Pointer, level int, malloc MallocFn) *Node {
var block unsafe.Pointer
if malloc == nil {
block = unsafe.Pointer(reflect.New(nodeTypes[level]).Pointer())
} else {
block = malloc(int(nodeTypes[level].Size()))
}
n := (*Node)(block)
n.level = uint16(level)
n.itm = itm
n.Link = nil
n.Cache = 0
return n
}
var freeBlockContent []byte
func init() {
l := int(nodeTypes[32].Size())
freeBlockContent = make([]byte, l)
for i := 0; i < l; i++ {
freeBlockContent[i] = 0xdd
}
}
// Fill free blocks with a const
// This can help debugging of memory reclaimer bugs
func debugMarkFree(n *Node) {
var block []byte
l := int(nodeTypes[n.level].Size())
sh := (*reflect.SliceHeader)(unsafe.Pointer(&block))
sh.Data = uintptr(unsafe.Pointer(n))
sh.Len = l
sh.Cap = l
copy(block, freeBlockContent)
}
| couchbase/nitro |
<|start_filename|>zmodules/Src/jobs.c<|end_filename|>
/*
* jobs.c - job control
*
* This file is part of zsh, the Z shell.
*
* Copyright (c) 1992-1997 <NAME>
* All rights reserved.
*
* Permission is hereby granted, without written agreement and without
* license or royalty fees, to use, copy, modify, and distribute this
* software and to distribute modified versions of this software for any
* purpose, provided that the above copyright notice and the following
* two paragraphs appear in all copies of this software.
*
* In no event shall <NAME> or the Zsh Development Group be liable
* to any party for direct, indirect, special, incidental, or consequential
* damages arising out of the use of this software and its documentation,
* even if <NAME> and the Zsh Development Group have been advised of
* the possibility of such damage.
*
* <NAME> and the Zsh Development Group specifically disclaim any
* warranties, including, but not limited to, the implied warranties of
* merchantability and fitness for a particular purpose. The software
* provided hereunder is on an "as is" basis, and <NAME> and the
* Zsh Development Group have no obligation to provide maintenance,
* support, updates, enhancements, or modifications.
*
*/
#include "zsh.mdh"
#include "jobs.pro"
/* the process group of the shell at startup (equal to mypgprp, except
when we started without being process group leader */
/**/
mod_export pid_t origpgrp;
/* the process group of the shell */
/**/
mod_export pid_t mypgrp;
/* the job we are working on */
/**/
mod_export int thisjob;
/* the current job (+) */
/**/
mod_export int curjob;
/* the previous job (-) */
/**/
mod_export int prevjob;
/* the job table */
/**/
mod_export struct job *jobtab;
/* Size of the job table. */
/**/
mod_export int jobtabsize;
/* The highest numbered job in the jobtable */
/**/
mod_export int maxjob;
/* If we have entered a subshell, the original shell's job table. */
static struct job *oldjobtab;
/* The size of that. */
static int oldmaxjob;
/* shell timings */
/**/
#ifdef HAVE_GETRUSAGE
/**/
static struct rusage child_usage;
/**/
#else
/**/
static struct tms shtms;
/**/
#endif
/* 1 if ttyctl -f has been executed */
/**/
mod_export int ttyfrozen;
/* Previous values of errflag and breaks if the signal handler had to
* change them. And a flag saying if it did that. */
/**/
int prev_errflag, prev_breaks, errbrk_saved;
/**/
int numpipestats, pipestats[MAX_PIPESTATS];
/* Diff two timevals for elapsed-time computations */
/**/
static struct timeval *
dtime(struct timeval *dt, struct timeval *t1, struct timeval *t2)
{
dt->tv_sec = t2->tv_sec - t1->tv_sec;
dt->tv_usec = t2->tv_usec - t1->tv_usec;
if (dt->tv_usec < 0) {
dt->tv_usec += 1000000.0;
dt->tv_sec -= 1.0;
}
return dt;
}
/* change job table entry from stopped to running */
/**/
void
makerunning(Job jn)
{
Process pn;
jn->stat &= ~STAT_STOPPED;
for (pn = jn->procs; pn; pn = pn->next) {
#if 0
if (WIFSTOPPED(pn->status) &&
(!(jn->stat & STAT_SUPERJOB) || pn->next))
pn->status = SP_RUNNING;
#endif
if (WIFSTOPPED(pn->status))
pn->status = SP_RUNNING;
}
if (jn->stat & STAT_SUPERJOB)
makerunning(jobtab + jn->other);
}
/* Find process and job associated with pid. *
* Return 1 if search was successful, else return 0. */
/**/
int
findproc(pid_t pid, Job *jptr, Process *pptr, int aux)
{
Process pn;
int i;
*jptr = NULL;
*pptr = NULL;
for (i = 1; i <= maxjob; i++)
{
/*
* We are only interested in jobs with processes still
* marked as live. Careful in case there's an identical
* process number in a job we haven't quite got around
* to deleting.
*/
if (jobtab[i].stat & STAT_DONE)
continue;
for (pn = aux ? jobtab[i].auxprocs : jobtab[i].procs;
pn; pn = pn->next)
{
/*
* Make sure we match a process that's still running.
*
* When a job contains two pids, one terminated pid and one
* running pid, then the condition (jobtab[i].stat &
* STAT_DONE) will not stop these pids from being candidates
* for the findproc result (which is supposed to be a
* RUNNING pid), and if the terminated pid is an identical
* process number for the pid identifying the running
* process we are trying to find (after pid number
* wrapping), then we need to avoid returning the terminated
* pid, otherwise the shell would block and wait forever for
* the termination of the process which pid we were supposed
* to return in a different job.
*/
if (pn->pid == pid) {
*pptr = pn;
*jptr = jobtab + i;
if (pn->status == SP_RUNNING)
return 1;
}
}
}
return (*pptr && *jptr);
}
/* Does the given job number have any processes? */
/**/
int
hasprocs(int job)
{
Job jn;
if (job < 0) {
DPUTS(1, "job number invalid in hasprocs");
return 0;
}
jn = jobtab + job;
return jn->procs || jn->auxprocs;
}
/* Find the super-job of a sub-job. */
/**/
static int
super_job(int sub)
{
int i;
for (i = 1; i <= maxjob; i++)
if ((jobtab[i].stat & STAT_SUPERJOB) &&
jobtab[i].other == sub &&
jobtab[i].gleader)
return i;
return 0;
}
/**/
static int
handle_sub(int job, int fg)
{
/* job: superjob; sj: subjob. */
Job jn = jobtab + job, sj = jobtab + jn->other;
if ((sj->stat & STAT_DONE) || (!sj->procs && !sj->auxprocs)) {
struct process *p;
for (p = sj->procs; p; p = p->next) {
if (WIFSIGNALED(p->status)) {
if (jn->gleader != mypgrp && jn->procs->next)
killpg(jn->gleader, WTERMSIG(p->status));
else
kill(jn->procs->pid, WTERMSIG(p->status));
kill(sj->other, SIGCONT);
kill(sj->other, WTERMSIG(p->status));
break;
}
}
if (!p) {
int cp;
jn->stat &= ~STAT_SUPERJOB;
jn->stat |= STAT_WASSUPER;
if ((cp = ((WIFEXITED(jn->procs->status) ||
WIFSIGNALED(jn->procs->status)) &&
killpg(jn->gleader, 0) == -1))) {
Process p;
for (p = jn->procs; p->next; p = p->next);
jn->gleader = p->pid;
}
/* This deleted the job too early if the parent
shell waited for a command in a list that will
be executed by the sub-shell (e.g.: if we have
`ls|if true;then sleep 20;cat;fi' and ^Z the
sleep, the rest will be executed by a sub-shell,
but the parent shell gets notified for the
sleep.
deletejob(sj, 0); */
/* If this super-job contains only the sub-shell,
we have to attach the tty to its process group
now. */
if ((fg || thisjob == job) &&
(!jn->procs->next || cp || jn->procs->pid != jn->gleader))
attachtty(jn->gleader);
kill(sj->other, SIGCONT);
if (jn->stat & STAT_DISOWN)
{
deletejob(jn, 1);
}
}
curjob = jn - jobtab;
} else if (sj->stat & STAT_STOPPED) {
struct process *p;
jn->stat |= STAT_STOPPED;
for (p = jn->procs; p; p = p->next)
if (p->status == SP_RUNNING ||
(!WIFEXITED(p->status) && !WIFSIGNALED(p->status)))
p->status = sj->procs->status;
curjob = jn - jobtab;
printjob(jn, !!isset(LONGLISTJOBS), 1);
return 1;
}
return 0;
}
/* Get the latest usage information */
/**/
void
get_usage(void)
{
#ifdef HAVE_GETRUSAGE
getrusage(RUSAGE_CHILDREN, &child_usage);
#else
times(&shtms);
#endif
}
#if !defined HAVE_WAIT3 || !defined HAVE_GETRUSAGE
/* Update status of process that we have just WAIT'ed for */
/**/
void
update_process(Process pn, int status)
{
struct timezone dummy_tz;
#ifdef HAVE_GETRUSAGE
struct timeval childs = child_usage.ru_stime;
struct timeval childu = child_usage.ru_utime;
#else
long childs = shtms.tms_cstime;
long childu = shtms.tms_cutime;
#endif
/* get time-accounting info */
get_usage();
gettimeofday(&pn->endtime, &dummy_tz); /* record time process exited */
pn->status = status; /* save the status returned by WAIT */
#ifdef HAVE_GETRUSAGE
dtime(&pn->ti.ru_stime, &childs, &child_usage.ru_stime);
dtime(&pn->ti.ru_utime, &childu, &child_usage.ru_utime);
#else
pn->ti.st = shtms.tms_cstime - childs; /* compute process system space time */
pn->ti.ut = shtms.tms_cutime - childu; /* compute process user space time */
#endif
}
#endif
/*
* Called when the current shell is behaving as if it received
* a interactively generated signal (sig).
*
* As we got the signal or are pretending we did, we need to pretend
* anything attached to a CURSH process got it, too.
*/
/**/
void
check_cursh_sig(int sig)
{
int i, j;
if (!errflag)
return;
for (i = 1; i <= maxjob; i++) {
if ((jobtab[i].stat & (STAT_CURSH|STAT_DONE)) ==
STAT_CURSH) {
for (j = 0; j < 2; j++) {
Process pn = j ? jobtab[i].auxprocs : jobtab[i].procs;
for (; pn; pn = pn->next) {
if (pn->status == SP_RUNNING) {
kill(pn->pid, sig);
}
}
}
}
}
}
/**/
void
storepipestats(Job jn, int inforeground, int fixlastval)
{
int i, pipefail = 0, jpipestats[MAX_PIPESTATS];
Process p;
for (p = jn->procs, i = 0; p && i < MAX_PIPESTATS; p = p->next, i++) {
jpipestats[i] = (WIFSIGNALED(p->status) ?
0200 | WTERMSIG(p->status) :
(WIFSTOPPED(p->status) ?
0200 | WEXITSTATUS(p->status) :
WEXITSTATUS(p->status)));
if (jpipestats[i])
pipefail = jpipestats[i];
}
if (inforeground) {
memcpy(pipestats, jpipestats, sizeof(int)*i);
if ((jn->stat & STAT_CURSH) && i < MAX_PIPESTATS)
pipestats[i++] = lastval;
numpipestats = i;
}
if (fixlastval) {
if (jn->stat & STAT_CURSH) {
if (!lastval && isset(PIPEFAIL))
lastval = pipefail;
} else if (isset(PIPEFAIL))
lastval = pipefail;
}
}
/* Update status of job, possibly printing it */
/**/
void
update_job(Job jn)
{
Process pn;
int job;
int val = 0, status = 0;
int somestopped = 0, inforeground = 0;
for (pn = jn->auxprocs; pn; pn = pn->next) {
#ifdef WIFCONTINUED
if (WIFCONTINUED(pn->status))
pn->status = SP_RUNNING;
#endif
if (pn->status == SP_RUNNING)
return;
}
for (pn = jn->procs; pn; pn = pn->next) {
#ifdef WIFCONTINUED
if (WIFCONTINUED(pn->status)) {
jn->stat &= ~STAT_STOPPED;
pn->status = SP_RUNNING;
}
#endif
if (pn->status == SP_RUNNING) /* some processes in this job are running */
return; /* so no need to update job table entry */
if (WIFSTOPPED(pn->status)) /* some processes are stopped */
somestopped = 1; /* so job is not done, but entry needs updating */
if (!pn->next) /* last job in pipeline determines exit status */
val = (WIFSIGNALED(pn->status) ?
0200 | WTERMSIG(pn->status) :
(WIFSTOPPED(pn->status) ?
0200 | WEXITSTATUS(pn->status) :
WEXITSTATUS(pn->status)));
if (pn->pid == jn->gleader) /* if this process is process group leader */
status = pn->status;
}
job = jn - jobtab; /* compute job number */
if (somestopped) {
if (jn->stty_in_env && !jn->ty) {
jn->ty = (struct ttyinfo *) zalloc(sizeof(struct ttyinfo));
gettyinfo(jn->ty);
}
if (jn->stat & STAT_STOPPED) {
if (jn->stat & STAT_SUBJOB) {
/* If we have `cat foo|while read a; grep $a bar;done'
* and have hit ^Z, the sub-job is stopped, but the
* super-job may still be running, waiting to be stopped
* or to exit. So we have to send it a SIGTSTP. */
int i;
if ((i = super_job(job)))
killpg(jobtab[i].gleader, SIGTSTP);
}
return;
}
}
{ /* job is done or stopped, remember return value */
lastval2 = val;
/* If last process was run in the current shell, keep old status
* and let it handle its own traps, but always allow the test
* for the pgrp.
*/
if (jn->stat & STAT_CURSH)
inforeground = 1;
else if (job == thisjob) {
lastval = val;
inforeground = 2;
}
}
if (shout && shout != stderr && !ttyfrozen && !jn->stty_in_env &&
!zleactive && job == thisjob && !somestopped &&
!(jn->stat & STAT_NOSTTY))
gettyinfo(&shttyinfo);
if (isset(MONITOR)) {
pid_t pgrp = gettygrp(); /* get process group of tty */
/* is this job in the foreground of an interactive shell? */
if (mypgrp != pgrp && inforeground &&
(jn->gleader == pgrp || (pgrp > 1 && kill(-pgrp, 0) == -1))) {
if (list_pipe) {
if (somestopped || (pgrp > 1 && kill(-pgrp, 0) == -1)) {
attachtty(mypgrp);
/* check window size and adjust if necessary */
adjustwinsize(0);
} else {
/*
* Oh, dear, we're right in the middle of some confusion
* of shell jobs on the righthand side of a pipeline, so
* it's death to call attachtty() just yet. Mark the
* fact in the job, so that the attachtty() will be called
* when the job is finally deleted.
*/
jn->stat |= STAT_ATTACH;
}
/* If we have `foo|while true; (( x++ )); done', and hit
* ^C, we have to stop the loop, too. */
if ((val & 0200) && inforeground == 1 &&
((val & ~0200) == SIGINT || (val & ~0200) == SIGQUIT)) {
if (!errbrk_saved) {
errbrk_saved = 1;
prev_breaks = breaks;
prev_errflag = errflag;
}
breaks = loops;
errflag |= ERRFLAG_INT;
inerrflush();
}
} else {
attachtty(mypgrp);
/* check window size and adjust if necessary */
adjustwinsize(0);
}
}
} else if (list_pipe && (val & 0200) && inforeground == 1 &&
((val & ~0200) == SIGINT || (val & ~0200) == SIGQUIT)) {
if (!errbrk_saved) {
errbrk_saved = 1;
prev_breaks = breaks;
prev_errflag = errflag;
}
breaks = loops;
errflag |= ERRFLAG_INT;
inerrflush();
}
if (somestopped && jn->stat & STAT_SUPERJOB)
return;
jn->stat |= (somestopped) ? STAT_CHANGED | STAT_STOPPED :
STAT_CHANGED | STAT_DONE;
if (jn->stat & (STAT_DONE|STAT_STOPPED)) {
/* This may be redundant with printjob() but note that inforeground
* is true here for STAT_CURSH jobs even when job != thisjob, most
* likely because thisjob = -1 from exec.c:execsimple() trickery.
* However, if we reset lastval here we break it for printjob().
*/
storepipestats(jn, inforeground, 0);
}
if (!inforeground &&
(jn->stat & (STAT_SUBJOB | STAT_DONE)) == (STAT_SUBJOB | STAT_DONE)) {
int su;
if ((su = super_job(jn - jobtab)))
handle_sub(su, 0);
}
if ((jn->stat & (STAT_DONE | STAT_STOPPED)) == STAT_STOPPED) {
prevjob = curjob;
curjob = job;
}
if ((isset(NOTIFY) || job == thisjob) && (jn->stat & STAT_LOCKED)) {
if (printjob(jn, !!isset(LONGLISTJOBS), 0) &&
zleactive)
zleentry(ZLE_CMD_REFRESH);
}
if (sigtrapped[SIGCHLD] && job != thisjob)
dotrap(SIGCHLD);
/* When MONITOR is set, the foreground process runs in a different *
* process group from the shell, so the shell will not receive *
* terminal signals, therefore we pretend that the shell got *
* the signal too. */
if (inforeground == 2 && isset(MONITOR) && WIFSIGNALED(status)) {
int sig = WTERMSIG(status);
if (sig == SIGINT || sig == SIGQUIT) {
if (sigtrapped[sig]) {
dotrap(sig);
/* We keep the errflag as set or not by dotrap.
* This is to fulfil the promise to carry on
* with the jobs if trap returns zero.
* Setting breaks = loops ensures a consistent return
* status if inside a loop. Maybe the code in loops
* should be changed.
*/
if (errflag)
breaks = loops;
} else {
breaks = loops;
errflag |= ERRFLAG_INT;
}
check_cursh_sig(sig);
}
}
}
/* set the previous job to something reasonable */
/**/
static void
setprevjob(void)
{
int i;
for (i = maxjob; i; i--)
if ((jobtab[i].stat & STAT_INUSE) && (jobtab[i].stat & STAT_STOPPED) &&
!(jobtab[i].stat & STAT_SUBJOB) && i != curjob && i != thisjob) {
prevjob = i;
return;
}
for (i = maxjob; i; i--)
if ((jobtab[i].stat & STAT_INUSE) && !(jobtab[i].stat & STAT_SUBJOB) &&
i != curjob && i != thisjob) {
prevjob = i;
return;
}
prevjob = -1;
}
/**/
long
get_clktck(void)
{
static long clktck;
#ifdef _SC_CLK_TCK
if (!clktck)
/* fetch clock ticks per second from *
* sysconf only the first time */
clktck = sysconf(_SC_CLK_TCK);
#else
# ifdef __NeXT__
/* NeXTStep 3.3 defines CLK_TCK wrongly */
clktck = 60;
# else
# ifdef CLK_TCK
clktck = CLK_TCK;
# else
# ifdef HZ
clktck = HZ;
# else
clktck = 60;
# endif
# endif
# endif
#endif
return clktck;
}
/**/
static void
printhhmmss(double secs)
{
int mins = (int) secs / 60;
int hours = mins / 60;
secs -= 60 * mins;
mins -= 60 * hours;
if (hours)
fprintf(stderr, "%d:%02d:%05.2f", hours, mins, secs);
else if (mins)
fprintf(stderr, "%d:%05.2f", mins, secs);
else
fprintf(stderr, "%.3f", secs);
}
static void
printtime(struct timeval *real, child_times_t *ti, char *desc)
{
char *s;
double elapsed_time, user_time, system_time;
#ifdef HAVE_GETRUSAGE
double total_time;
#endif
int percent, desclen;
if (!desc)
{
desc = "";
desclen = 0;
}
else
{
desc = dupstring(desc);
unmetafy(desc, &desclen);
}
/* go ahead and compute these, since almost every TIMEFMT will have them */
elapsed_time = real->tv_sec + real->tv_usec / 1000000.0;
#ifdef HAVE_GETRUSAGE
user_time = ti->ru_utime.tv_sec + ti->ru_utime.tv_usec / 1000000.0;
system_time = ti->ru_stime.tv_sec + ti->ru_stime.tv_usec / 1000000.0;
total_time = user_time + system_time;
percent = 100.0 * total_time
/ (real->tv_sec + real->tv_usec / 1000000.0);
#else
{
long clktck = get_clktck();
user_time = ti->ut / (double) clktck;
system_time = ti->st / (double) clktck;
percent = 100.0 * (ti->ut + ti->st)
/ (clktck * real->tv_sec + clktck * real->tv_usec / 1000000.0);
}
#endif
queue_signals();
if (!(s = getsparam("TIMEFMT")))
s = DEFAULT_TIMEFMT;
else
s = unmetafy(s, NULL);
for (; *s; s++)
if (*s == '%')
switch (*++s) {
case 'E':
fprintf(stderr, "%4.2fs", elapsed_time);
break;
case 'U':
fprintf(stderr, "%4.2fs", user_time);
break;
case 'S':
fprintf(stderr, "%4.2fs", system_time);
break;
case 'm':
switch (*++s) {
case 'E':
fprintf(stderr, "%0.fms", elapsed_time * 1000.0);
break;
case 'U':
fprintf(stderr, "%0.fms", user_time * 1000.0);
break;
case 'S':
fprintf(stderr, "%0.fms", system_time * 1000.0);
break;
default:
fprintf(stderr, "%%m");
s--;
break;
}
break;
case 'u':
switch (*++s) {
case 'E':
fprintf(stderr, "%0.fus", elapsed_time * 1000000.0);
break;
case 'U':
fprintf(stderr, "%0.fus", user_time * 1000000.0);
break;
case 'S':
fprintf(stderr, "%0.fus", system_time * 1000000.0);
break;
default:
fprintf(stderr, "%%u");
s--;
break;
}
break;
case '*':
switch (*++s) {
case 'E':
printhhmmss(elapsed_time);
break;
case 'U':
printhhmmss(user_time);
break;
case 'S':
printhhmmss(system_time);
break;
default:
fprintf(stderr, "%%*");
s--;
break;
}
break;
case 'P':
fprintf(stderr, "%d%%", percent);
break;
#ifdef HAVE_STRUCT_RUSAGE_RU_NSWAP
case 'W':
fprintf(stderr, "%ld", ti->ru_nswap);
break;
#endif
#ifdef HAVE_STRUCT_RUSAGE_RU_IXRSS
case 'X':
fprintf(stderr, "%ld",
total_time ?
(long)(ti->ru_ixrss / total_time) :
(long)0);
break;
#endif
#ifdef HAVE_STRUCT_RUSAGE_RU_IDRSS
case 'D':
fprintf(stderr, "%ld",
total_time ?
(long) ((ti->ru_idrss
#ifdef HAVE_STRUCT_RUSAGE_RU_ISRSS
+ ti->ru_isrss
#endif
) / total_time) :
(long)0);
break;
#endif
#if defined(HAVE_STRUCT_RUSAGE_RU_IDRSS) || \
defined(HAVE_STRUCT_RUSAGE_RU_ISRSS) || \
defined(HAVE_STRUCT_RUSAGE_RU_IXRSS)
case 'K':
/* treat as D if X not available */
fprintf(stderr, "%ld",
total_time ?
(long) ((
#ifdef HAVE_STRUCT_RUSAGE_RU_IXRSS
ti->ru_ixrss
#else
0
#endif
#ifdef HAVE_STRUCT_RUSAGE_RU_IDRSS
+ ti->ru_idrss
#endif
#ifdef HAVE_STRUCT_RUSAGE_RU_ISRSS
+ ti->ru_isrss
#endif
) / total_time) :
(long)0);
break;
#endif
#ifdef HAVE_STRUCT_RUSAGE_RU_MAXRSS
case 'M':
fprintf(stderr, "%ld", ti->ru_maxrss / 1024);
break;
#endif
#ifdef HAVE_STRUCT_RUSAGE_RU_MAJFLT
case 'F':
fprintf(stderr, "%ld", ti->ru_majflt);
break;
#endif
#ifdef HAVE_STRUCT_RUSAGE_RU_MINFLT
case 'R':
fprintf(stderr, "%ld", ti->ru_minflt);
break;
#endif
#ifdef HAVE_STRUCT_RUSAGE_RU_INBLOCK
case 'I':
fprintf(stderr, "%ld", ti->ru_inblock);
break;
#endif
#ifdef HAVE_STRUCT_RUSAGE_RU_OUBLOCK
case 'O':
fprintf(stderr, "%ld", ti->ru_oublock);
break;
#endif
#ifdef HAVE_STRUCT_RUSAGE_RU_MSGRCV
case 'r':
fprintf(stderr, "%ld", ti->ru_msgrcv);
break;
#endif
#ifdef HAVE_STRUCT_RUSAGE_RU_MSGSND
case 's':
fprintf(stderr, "%ld", ti->ru_msgsnd);
break;
#endif
#ifdef HAVE_STRUCT_RUSAGE_RU_NSIGNALS
case 'k':
fprintf(stderr, "%ld", ti->ru_nsignals);
break;
#endif
#ifdef HAVE_STRUCT_RUSAGE_RU_NVCSW
case 'w':
fprintf(stderr, "%ld", ti->ru_nvcsw);
break;
#endif
#ifdef HAVE_STRUCT_RUSAGE_RU_NIVCSW
case 'c':
fprintf(stderr, "%ld", ti->ru_nivcsw);
break;
#endif
case 'J':
fwrite(desc, sizeof(char), desclen, stderr);
break;
case '%':
putc('%', stderr);
break;
case '\0':
s--;
break;
default:
fprintf(stderr, "%%%c", *s);
break;
} else
putc(*s, stderr);
unqueue_signals();
putc('\n', stderr);
fflush(stderr);
}
/**/
static void
dumptime(Job jn)
{
Process pn;
struct timeval dtimeval;
if (!jn->procs)
return;
for (pn = jn->procs; pn; pn = pn->next)
printtime(dtime(&dtimeval, &pn->bgtime, &pn->endtime), &pn->ti,
pn->text);
}
/* Check whether shell should report the amount of time consumed *
* by job. This will be the case if we have preceded the command *
* with the keyword time, or if REPORTTIME is non-negative and the *
* amount of time consumed by the job is greater than REPORTTIME */
/**/
static int
should_report_time(Job j)
{
struct value vbuf;
Value v;
char *s = "REPORTTIME";
int save_errflag = errflag;
zlong reporttime = -1;
#ifdef HAVE_GETRUSAGE
char *sm = "REPORTMEMORY";
zlong reportmemory = -1;
#endif
/* if the time keyword was used */
if (j->stat & STAT_TIMED)
return 1;
queue_signals();
errflag = 0;
if ((v = getvalue(&vbuf, &s, 0)))
reporttime = getintvalue(v);
#ifdef HAVE_GETRUSAGE
if ((v = getvalue(&vbuf, &sm, 0)))
reportmemory = getintvalue(v);
#endif
errflag = save_errflag;
unqueue_signals();
if (reporttime < 0
#ifdef HAVE_GETRUSAGE
&& reportmemory < 0
#endif
)
return 0;
/* can this ever happen? */
if (!j->procs)
return 0;
if (zleactive)
return 0;
if (reporttime >= 0)
{
#ifdef HAVE_GETRUSAGE
reporttime -= j->procs->ti.ru_utime.tv_sec +
j->procs->ti.ru_stime.tv_sec;
if (j->procs->ti.ru_utime.tv_usec +
j->procs->ti.ru_stime.tv_usec >= 1000000)
reporttime--;
if (reporttime <= 0)
return 1;
#else
{
clktck = get_clktck();
if ((j->procs->ti.ut + j->procs->ti.st) / clktck >= reporttime)
return 1;
}
#endif
}
#ifdef HAVE_GETRUSAGE
if (reportmemory >= 0 &&
j->procs->ti.ru_maxrss / 1024 > reportmemory)
return 1;
#endif
return 0;
}
/* !(lng & 3) means jobs *
* (lng & 1) means jobs -l *
* (lng & 2) means jobs -p
* (lng & 4) means jobs -d
*
* synch = 0 means asynchronous
* synch = 1 means synchronous
* synch = 2 means called synchronously from jobs
* synch = 3 means called synchronously from bg or fg
*
* Returns 1 if some output was done.
*
* The function also deletes the job if it was done, even it
* is not printed.
*/
/**/
int
printjob(Job jn, int lng, int synch)
{
Process pn;
int job, len = 9, sig, sflag = 0, llen;
int conted = 0, lineleng = zterm_columns, skip = 0, doputnl = 0;
int doneprint = 0, skip_print = 0;
FILE *fout = (synch == 2 || !shout) ? stdout : shout;
if (synch > 1 && oldjobtab != NULL)
job = jn - oldjobtab;
else
job = jn - jobtab;
DPUTS3(job < 0 || job > (oldjobtab && synch > 1 ? oldmaxjob : maxjob),
"bogus job number, jn = %L, jobtab = %L, oldjobtab = %L",
(long)jn, (long)jobtab, (long)oldjobtab);
if (jn->stat & STAT_NOPRINT) {
skip_print = 1;
}
if (lng < 0) {
conted = 1;
lng = !!isset(LONGLISTJOBS);
}
/* find length of longest signame, check to see */
/* if we really need to print this job */
for (pn = jn->procs; pn; pn = pn->next) {
if (jn->stat & STAT_SUPERJOB &&
jn->procs->status == SP_RUNNING && !pn->next)
pn->status = SP_RUNNING;
if (pn->status != SP_RUNNING) {
if (WIFSIGNALED(pn->status)) {
sig = WTERMSIG(pn->status);
llen = strlen(sigmsg(sig));
if (WCOREDUMP(pn->status))
llen += 14;
if (llen > len)
len = llen;
if (sig != SIGINT && sig != SIGPIPE)
sflag = 1;
if (job == thisjob && sig == SIGINT)
doputnl = 1;
if (isset(PRINTEXITVALUE) && isset(SHINSTDIN)) {
sflag = 1;
skip_print = 0;
}
} else if (WIFSTOPPED(pn->status)) {
sig = WSTOPSIG(pn->status);
if ((int)strlen(sigmsg(sig)) > len)
len = strlen(sigmsg(sig));
if (job == thisjob && sig == SIGTSTP)
doputnl = 1;
} else if (isset(PRINTEXITVALUE) && isset(SHINSTDIN) &&
WEXITSTATUS(pn->status)) {
sflag = 1;
skip_print = 0;
}
}
}
if (skip_print) {
if (jn->stat & STAT_DONE) {
/* This looks silly, but see update_job() */
if (synch <= 1)
storepipestats(jn, job == thisjob, job == thisjob);
if (should_report_time(jn))
dumptime(jn);
deletejob(jn, 0);
if (job == curjob) {
curjob = prevjob;
prevjob = job;
}
if (job == prevjob)
setprevjob();
}
return 0;
}
/*
* - Always print if called from jobs
* - Otherwise, require MONITOR option ("jobbing") and some
* change of state
* - also either the shell is interactive or this is synchronous.
*/
if (synch == 2 ||
((interact || synch) && jobbing &&
((jn->stat & STAT_STOPPED) || sflag || job != thisjob))) {
int len2, fline = 1;
/* POSIX requires just the job text for bg and fg */
int plainfmt = (synch == 3) && isset(POSIXJOBS);
/* use special format for current job, except in `jobs' */
int thisfmt = job == thisjob && synch != 2;
Process qn;
if (!synch)
zleentry(ZLE_CMD_TRASH);
if (doputnl && !synch) {
doneprint = 1;
putc('\n', fout);
}
for (pn = jn->procs; pn;) {
len2 = (thisfmt ? 5 : 10) + len; /* 2 spaces */
if (lng & 3)
qn = pn->next;
else
for (qn = pn->next; qn; qn = qn->next) {
if (qn->status != pn->status)
break;
if ((int)strlen(qn->text) + len2 + ((qn->next) ? 3 : 0)
> lineleng)
break;
len2 += strlen(qn->text) + 2;
}
doneprint = 1;
if (!plainfmt) {
if (!thisfmt || lng) {
if (fline)
fprintf(fout, "[%ld] %c ",
(long)job,
(job == curjob) ? '+'
: (job == prevjob) ? '-' : ' ');
else
fprintf(fout, (job > 9) ? " " : " ");
} else
fprintf(fout, "zsh: ");
if (lng & 1)
fprintf(fout, "%ld ", (long) pn->pid);
else if (lng & 2) {
pid_t x = jn->gleader;
fprintf(fout, "%ld ", (long) x);
do
skip++;
while ((x /= 10));
skip++;
lng &= ~3;
} else
fprintf(fout, "%*s", skip, "");
if (pn->status == SP_RUNNING) {
if (!conted)
fprintf(fout, "running%*s", len - 7 + 2, "");
else
fprintf(fout, "continued%*s", len - 9 + 2, "");
}
else if (WIFEXITED(pn->status)) {
if (WEXITSTATUS(pn->status))
fprintf(fout, "exit %-4d%*s", WEXITSTATUS(pn->status),
len - 9 + 2, "");
else
fprintf(fout, "done%*s", len - 4 + 2, "");
} else if (WIFSTOPPED(pn->status))
fprintf(fout, "%-*s", len + 2,
sigmsg(WSTOPSIG(pn->status)));
else if (WCOREDUMP(pn->status))
fprintf(fout, "%s (core dumped)%*s",
sigmsg(WTERMSIG(pn->status)),
(int)(len - 14 + 2 -
strlen(sigmsg(WTERMSIG(pn->status)))), "");
else
fprintf(fout, "%-*s", len + 2,
sigmsg(WTERMSIG(pn->status)));
}
for (; pn != qn; pn = pn->next) {
char *txt = dupstring(pn->text);
int txtlen;
unmetafy(txt, &txtlen);
fwrite(txt, sizeof(char), txtlen, fout);
if (pn->next)
fputs(" | ", fout);
}
putc('\n', fout);
fline = 0;
}
fflush(fout);
} else if (doputnl && interact && !synch) {
doneprint = 1;
putc('\n', fout);
fflush(fout);
}
/* print "(pwd now: foo)" messages: with (lng & 4) we are printing
* the directory where the job is running, otherwise the current directory
*/
if ((lng & 4) || (interact && job == thisjob &&
jn->pwd && strcmp(jn->pwd, pwd))) {
doneprint = 1;
fprintf(fout, "(pwd %s: ", (lng & 4) ? "" : "now");
fprintdir(((lng & 4) && jn->pwd) ? jn->pwd : pwd, fout);
fprintf(fout, ")\n");
fflush(fout);
}
/* delete job if done */
if (jn->stat & STAT_DONE) {
/* This looks silly, but see update_job() */
if (synch <= 1)
storepipestats(jn, job == thisjob, job == thisjob);
if (should_report_time(jn))
dumptime(jn);
deletejob(jn, 0);
if (job == curjob) {
curjob = prevjob;
prevjob = job;
}
if (job == prevjob)
setprevjob();
} else
jn->stat &= ~STAT_CHANGED;
return doneprint;
}
/* Add a file to be deleted or fd to be closed to the current job */
/**/
void
addfilelist(const char *name, int fd)
{
Jobfile jf = (Jobfile)zalloc(sizeof(struct jobfile));
LinkList ll = jobtab[thisjob].filelist;
if (!ll)
ll = jobtab[thisjob].filelist = znewlinklist();
if (name)
{
jf->u.name = ztrdup(name);
jf->is_fd = 0;
}
else
{
jf->u.fd = fd;
jf->is_fd = 1;
}
zaddlinknode(ll, jf);
}
/* Clean up pipes no longer needed associated with a job */
/**/
void
pipecleanfilelist(LinkList filelist, int proc_subst_only)
{
LinkNode node;
if (!filelist)
return;
node = firstnode(filelist);
while (node) {
Jobfile jf = (Jobfile)getdata(node);
if (jf->is_fd &&
(!proc_subst_only || fdtable[jf->u.fd] == FDT_PROC_SUBST)) {
LinkNode next = nextnode(node);
zclose(jf->u.fd);
(void)remnode(filelist, node);
zfree(jf, sizeof(*jf));
node = next;
} else
incnode(node);
}
}
/* Finished with list of files for a job */
/**/
void
deletefilelist(LinkList file_list, int disowning)
{
Jobfile jf;
if (file_list) {
while ((jf = (Jobfile)getlinknode(file_list))) {
if (jf->is_fd) {
if (!disowning)
zclose(jf->u.fd);
} else {
if (!disowning)
unlink(jf->u.name);
zsfree(jf->u.name);
}
zfree(jf, sizeof(*jf));
}
zfree(file_list, sizeof(struct linklist));
}
}
/**/
void
freejob(Job jn, int deleting)
{
struct process *pn, *nx;
pn = jn->procs;
jn->procs = NULL;
for (; pn; pn = nx) {
nx = pn->next;
zfree(pn, sizeof(struct process));
}
pn = jn->auxprocs;
jn->auxprocs = NULL;
for (; pn; pn = nx) {
nx = pn->next;
zfree(pn, sizeof(struct process));
}
if (jn->ty)
zfree(jn->ty, sizeof(struct ttyinfo));
if (jn->pwd)
zsfree(jn->pwd);
jn->pwd = <PASSWORD>;
if (jn->stat & STAT_WASSUPER) {
/* careful in case we shrink and move the job table */
int job = jn - jobtab;
if (deleting)
deletejob(jobtab + jn->other, 0);
else
freejob(jobtab + jn->other, 0);
jn = jobtab + job;
}
jn->gleader = jn->other = 0;
jn->stat = jn->stty_in_env = 0;
jn->filelist = NULL;
jn->ty = NULL;
/* Find the new highest job number. */
if (maxjob == jn - jobtab) {
while (maxjob && !(jobtab[maxjob].stat & STAT_INUSE))
maxjob--;
}
}
/*
* We are actually finished with this job, rather
* than freeing it to make space.
*
* If "disowning" is set, files associated with the job are not
* actually deleted --- and won't be as there is nothing left
* to clear up.
*/
/**/
void
deletejob(Job jn, int disowning)
{
deletefilelist(jn->filelist, disowning);
if (jn->stat & STAT_ATTACH) {
attachtty(mypgrp);
adjustwinsize(0);
}
if (jn->stat & STAT_SUPERJOB) {
Job jno = jobtab + jn->other;
if (jno->stat & STAT_SUBJOB)
jno->stat |= STAT_SUBJOB_ORPHANED;
}
freejob(jn, 1);
}
/*
* Add a process to the current job.
* The third argument is 1 if we are adding a process which is not
* part of the main pipeline but an auxiliary process used for
* handling MULTIOS or process substitution. We will wait for it
* but not display job information about it.
*/
/**/
void
addproc(pid_t pid, char *text, int aux, struct timeval *bgtime)
{
Process pn, *pnlist;
DPUTS(thisjob == -1, "No valid job in addproc.");
pn = (Process) zshcalloc(sizeof *pn);
pn->pid = pid;
if (text)
strcpy(pn->text, text);
else
*pn->text = '\0';
pn->status = SP_RUNNING;
pn->next = NULL;
if (!aux)
{
pn->bgtime = *bgtime;
/* if this is the first process we are adding to *
* the job, then it's the group leader. */
if (!jobtab[thisjob].gleader)
jobtab[thisjob].gleader = pid;
/* attach this process to end of process list of current job */
pnlist = &jobtab[thisjob].procs;
}
else
pnlist = &jobtab[thisjob].auxprocs;
if (*pnlist) {
Process n;
for (n = *pnlist; n->next; n = n->next);
n->next = pn;
} else {
/* first process for this job */
*pnlist = pn;
}
/* If the first process in the job finished before any others were *
* added, maybe STAT_DONE got set incorrectly. This can happen if *
* a $(...) was waited for and the last existing job in the *
* pipeline was already finished. We need to be very careful that *
* there was no call to printjob() between then and now, else *
* the job will already have been deleted from the table. */
jobtab[thisjob].stat &= ~STAT_DONE;
}
/* Check if we have files to delete. We need to check this to see *
* if it's all right to exec a command without forking in the last *
* component of subshells or after the `-c' option. */
/**/
int
havefiles(void)
{
int i;
for (i = 1; i <= maxjob; i++)
if (jobtab[i].stat && jobtab[i].filelist)
return 1;
return 0;
}
/*
* Wait for a particular process.
* wait_cmd indicates this is from the interactive wait command,
* in which case the behaviour is a little different: the command
* itself can be interrupted by a trapped signal.
*/
/**/
int
waitforpid(pid_t pid, int wait_cmd)
{
int first = 1, q = queue_signal_level();
/* child_block() around this loop in case #ifndef WNOHANG */
dont_queue_signals();
child_block(); /* unblocked in signal_suspend() */
queue_traps(wait_cmd);
/* This function should never be called with a pid that is not a
* child of the current shell. Consequently, if kill(0, pid)
* fails here with ESRCH, the child has already been reaped. In
* the loop body, we expect this to happen in signal_suspend()
* via zhandler(), after which this test terminates the loop.
*/
while (!errflag && (kill(pid, 0) >= 0 || errno != ESRCH)) {
if (first)
first = 0;
else if (!wait_cmd)
kill(pid, SIGCONT);
last_signal = -1;
signal_suspend(SIGCHLD, wait_cmd);
if (last_signal != SIGCHLD && wait_cmd && last_signal >= 0 &&
(sigtrapped[last_signal] & ZSIG_TRAPPED)) {
/* wait command interrupted, but no error: return */
restore_queue_signals(q);
return 128 + last_signal;
}
child_block();
}
unqueue_traps();
child_unblock();
restore_queue_signals(q);
return 0;
}
/*
* Wait for a job to finish.
* wait_cmd indicates this is from the wait builtin; see
* wait_cmd in waitforpid().
*/
/**/
static int
zwaitjob(int job, int wait_cmd)
{
int q = queue_signal_level();
Job jn = jobtab + job;
child_block(); /* unblocked during signal_suspend() */
queue_traps(wait_cmd);
dont_queue_signals();
if (jn->procs || jn->auxprocs) { /* if any forks were done */
jn->stat |= STAT_LOCKED;
if (jn->stat & STAT_CHANGED)
printjob(jn, !!isset(LONGLISTJOBS), 1);
if (jn->filelist) {
/*
* The main shell is finished with any file descriptors used
* for process substitution associated with this job: close
* them to indicate to listeners there's no more input.
*
* Note we can't safely delete temporary files yet as these
* are directly visible to other processes. However,
* we can't deadlock on the fact that those still exist, so
* that's not a problem.
*/
pipecleanfilelist(jn->filelist, 0);
}
while (!(errflag & ERRFLAG_ERROR) && jn->stat &&
!(jn->stat & STAT_DONE) &&
!(interact && (jn->stat & STAT_STOPPED))) {
signal_suspend(SIGCHLD, wait_cmd);
if (last_signal != SIGCHLD && wait_cmd && last_signal >= 0 &&
(sigtrapped[last_signal] & ZSIG_TRAPPED))
{
/* builtin wait interrupted by trapped signal */
restore_queue_signals(q);
return 128 + last_signal;
}
/* Commenting this out makes ^C-ing a job started by a function
stop the whole function again. But I guess it will stop
something else from working properly, we have to find out
what this might be. --oberon
When attempting to separate errors and interrupts, we
assumed because of the previous comment it would be OK
to remove ERRFLAG_ERROR and leave ERRFLAG_INT set, since
that's the one related to ^C. But that doesn't work.
There's something more here we don't understand. --pws
The change above to ignore ERRFLAG_INT in the loop test
solves a problem wherein child processes that ignore the
INT signal were never waited-for. Clearing the flag here
still seems the wrong thing, but perhaps ERRFLAG_INT
should be saved and restored around signal_suspend() to
prevent it being lost within a signal trap? --Bart
errflag = 0; */
if (subsh) {
killjb(jn, SIGCONT);
jn->stat &= ~STAT_STOPPED;
}
if (jn->stat & STAT_SUPERJOB)
if (handle_sub(jn - jobtab, 1))
break;
child_block();
}
} else {
deletejob(jn, 0);
pipestats[0] = lastval;
numpipestats = 1;
}
restore_queue_signals(q);
unqueue_traps();
child_unblock();
return 0;
}
/* wait for running job to finish */
/**/
void
waitjobs(void)
{
Job jn = jobtab + thisjob;
DPUTS(thisjob == -1, "No valid job in waitjobs.");
if (jn->procs || jn->auxprocs)
zwaitjob(thisjob, 0);
else {
deletejob(jn, 0);
pipestats[0] = lastval;
numpipestats = 1;
}
thisjob = -1;
}
/* clear job table when entering subshells */
/**/
mod_export void
clearjobtab(int monitor)
{
int i;
if (isset(POSIXJOBS))
oldmaxjob = 0;
for (i = 1; i <= maxjob; i++) {
/*
* See if there is a jobtable worth saving.
* We never free the saved version; it only happens
* once for each subshell of a shell with job control,
* so doesn't create a leak.
*/
if (monitor && !isset(POSIXJOBS) && jobtab[i].stat)
oldmaxjob = i+1;
else if (jobtab[i].stat & STAT_INUSE)
freejob(jobtab + i, 0);
}
if (monitor && oldmaxjob) {
int sz = oldmaxjob * sizeof(struct job);
if (oldjobtab)
free(oldjobtab);
oldjobtab = (struct job *)zalloc(sz);
memcpy(oldjobtab, jobtab, sz);
/* Don't report any job we're part of */
if (thisjob != -1 && thisjob < oldmaxjob)
memset(oldjobtab+thisjob, 0, sizeof(struct job));
}
memset(jobtab, 0, jobtabsize * sizeof(struct job)); /* zero out table */
maxjob = 0;
/*
* Although we don't have job control in subshells, we
* sometimes needs control structures for other purposes such
* as multios. Grab a job for this purpose; any will do
* since we've freed them all up (so there's no question
* of problems with the job table size here).
*/
thisjob = initjob();
}
static int initnewjob(int i)
{
jobtab[i].stat = STAT_INUSE;
if (jobtab[i].pwd) {
zsfree(jobtab[i].pwd);
jobtab[i].pwd = <PASSWORD>;
}
jobtab[i].gleader = 0;
if (i > maxjob)
maxjob = i;
return i;
}
/* Get a free entry in the job table and initialize it. */
/**/
int
initjob(void)
{
int i;
for (i = 1; i <= maxjob; i++)
if (!jobtab[i].stat)
return initnewjob(i);
if (maxjob + 1 < jobtabsize)
return initnewjob(maxjob+1);
if (expandjobtab())
return initnewjob(i);
zerr("job table full or recursion limit exceeded");
return -1;
}
/**/
void
setjobpwd(void)
{
int i;
for (i = 1; i <= maxjob; i++)
if (jobtab[i].stat && !jobtab[i].pwd)
jobtab[i].pwd = ztrdup(pwd);
}
/* print pids for & */
/**/
void
spawnjob(void)
{
Process pn;
DPUTS(thisjob == -1, "No valid job in spawnjob.");
/* if we are not in a subshell */
if (!subsh) {
if (curjob == -1 || !(jobtab[curjob].stat & STAT_STOPPED)) {
curjob = thisjob;
setprevjob();
} else if (prevjob == -1 || !(jobtab[prevjob].stat & STAT_STOPPED))
prevjob = thisjob;
if (jobbing && jobtab[thisjob].procs) {
FILE *fout = shout ? shout : stdout;
fprintf(fout, "[%d]", thisjob);
for (pn = jobtab[thisjob].procs; pn; pn = pn->next)
fprintf(fout, " %ld", (long) pn->pid);
fprintf(fout, "\n");
fflush(fout);
}
}
if (!hasprocs(thisjob))
deletejob(jobtab + thisjob, 0);
else {
jobtab[thisjob].stat |= STAT_LOCKED;
pipecleanfilelist(jobtab[thisjob].filelist, 0);
}
thisjob = -1;
}
/**/
void
shelltime(void)
{
struct timezone dummy_tz;
struct timeval dtimeval, now;
child_times_t ti;
#ifndef HAVE_GETRUSAGE
struct tms buf;
#endif
gettimeofday(&now, &dummy_tz);
#ifdef HAVE_GETRUSAGE
getrusage(RUSAGE_SELF, &ti);
#else
times(&buf);
ti.ut = buf.tms_utime;
ti.st = buf.tms_stime;
#endif
printtime(dtime(&dtimeval, &shtimer, &now), &ti, "shell");
#ifdef HAVE_GETRUSAGE
getrusage(RUSAGE_CHILDREN, &ti);
#else
ti.ut = buf.tms_cutime;
ti.st = buf.tms_cstime;
#endif
printtime(&dtimeval, &ti, "children");
}
/* see if jobs need printing */
/**/
void
scanjobs(void)
{
int i;
for (i = 1; i <= maxjob; i++)
if (jobtab[i].stat & STAT_CHANGED)
printjob(jobtab + i, !!isset(LONGLISTJOBS), 1);
}
/**** job control builtins ****/
/* This simple function indicates whether or not s may represent *
* a number. It returns true iff s consists purely of digits and *
* minuses. Note that minus may appear more than once, and the empty *
* string will produce a `true' response. */
/**/
static int
isanum(char *s)
{
while (*s == '-' || idigit(*s))
s++;
return *s == '\0';
}
/* Make sure we have a suitable current and previous job set. */
/**/
static void
setcurjob(void)
{
if (curjob == thisjob ||
(curjob != -1 && !(jobtab[curjob].stat & STAT_INUSE))) {
curjob = prevjob;
setprevjob();
if (curjob == thisjob ||
(curjob != -1 && !((jobtab[curjob].stat & STAT_INUSE) &&
curjob != thisjob))) {
curjob = prevjob;
setprevjob();
}
}
}
/* Convert a job specifier ("%%", "%1", "%foo", "%?bar?", etc.) *
* to a job number. */
/**/
mod_export int
getjob(const char *s, const char *prog)
{
int jobnum, returnval, mymaxjob;
Job myjobtab;
if (oldjobtab) {
myjobtab = oldjobtab;
mymaxjob = oldmaxjob;
} else {
myjobtab= jobtab;
mymaxjob = maxjob;
}
/* if there is no %, treat as a name */
if (*s != '%')
goto jump;
s++;
/* "%%", "%+" and "%" all represent the current job */
if (*s == '%' || *s == '+' || !*s) {
if (curjob == -1) {
if (prog)
zwarnnam(prog, "no current job");
returnval = -1;
goto done;
}
returnval = curjob;
goto done;
}
/* "%-" represents the previous job */
if (*s == '-') {
if (prevjob == -1) {
if (prog)
zwarnnam(prog, "no previous job");
returnval = -1;
goto done;
}
returnval = prevjob;
goto done;
}
/* a digit here means we have a job number */
if (idigit(*s)) {
jobnum = atoi(s);
if (jobnum && jobnum <= mymaxjob && myjobtab[jobnum].stat &&
!(myjobtab[jobnum].stat & STAT_SUBJOB) &&
/*
* If running jobs in a subshell, we are allowed to
* refer to the "current" job (it's not really the
* current job in the subshell). It's possible we
* should reset thisjob to -1 on entering the subshell.
*/
(myjobtab == oldjobtab || jobnum != thisjob)) {
returnval = jobnum;
goto done;
}
if (prog)
zwarnnam(prog, "%%%s: no such job", s);
returnval = -1;
goto done;
}
/* "%?" introduces a search string */
if (*s == '?') {
struct process *pn;
for (jobnum = mymaxjob; jobnum >= 0; jobnum--)
if (myjobtab[jobnum].stat &&
!(myjobtab[jobnum].stat & STAT_SUBJOB) &&
jobnum != thisjob)
for (pn = myjobtab[jobnum].procs; pn; pn = pn->next)
if (strstr(pn->text, s + 1)) {
returnval = jobnum;
goto done;
}
if (prog)
zwarnnam(prog, "job not found: %s", s);
returnval = -1;
goto done;
}
jump:
/* anything else is a job name, specified as a string that begins the
job's command */
if ((jobnum = findjobnam(s)) != -1) {
returnval = jobnum;
goto done;
}
/* if we get here, it is because none of the above succeeded and went
to done */
zwarnnam(prog, "job not found: %s", s);
returnval = -1;
done:
return returnval;
}
#ifndef HAVE_SETPROCTITLE
/* For jobs -Z (which modifies the shell's name as seen in ps listings). *
* hackzero is the start of the safely writable space, and hackspace is *
* its length, excluding a final NUL terminator that will always be left. */
static char *hackzero;
static int hackspace;
#endif
/* Initialise job handling. */
/**/
void
init_jobs(char **argv, char **envp)
{
#ifndef HAVE_SETPROCTITLE
char *p, *q;
#endif
size_t init_bytes = MAXJOBS_ALLOC*sizeof(struct job);
/*
* Initialise the job table. If this fails, we're in trouble.
*/
jobtab = (struct job *)zalloc(init_bytes);
if (!jobtab) {
zerr("failed to allocate job table, aborting.");
exit(1);
}
jobtabsize = MAXJOBS_ALLOC;
memset(jobtab, 0, init_bytes);
#ifndef HAVE_SETPROCTITLE
/*
* Initialise the jobs -Z system. The technique is borrowed from
* perl: check through the argument and environment space, to see
* how many of the strings are in contiguous space. This determines
* the value of hackspace.
*/
hackzero = *argv;
p = strchr(hackzero, 0);
while(*++argv) {
q = *argv;
if(q != p+1)
goto done;
p = strchr(q, 0);
}
#if !defined(HAVE_PUTENV) && !defined(USE_SET_UNSET_ENV)
for(; *envp; envp++) {
q = *envp;
if(q != p+1)
goto done;
p = strchr(q, 0);
}
#endif
done:
hackspace = p - hackzero;
#endif
}
/*
* We have run out of space in the job table.
* Expand it by an additional MAXJOBS_ALLOC slots.
*/
/*
* An arbitrary limit on the absolute maximum size of the job table.
* This prevents us taking over the entire universe.
* Ought to be a multiple of MAXJOBS_ALLOC, but doesn't need to be.
*/
#define MAX_MAXJOBS 1000
/**/
int
expandjobtab(void)
{
int newsize = jobtabsize + MAXJOBS_ALLOC;
struct job *newjobtab;
if (newsize > MAX_MAXJOBS)
return 0;
newjobtab = (struct job *)zrealloc(jobtab, newsize * sizeof(struct job));
if (!newjobtab)
return 0;
/*
* Clear the new section of the table; this is necessary for
* the jobs to appear unused.
*/
memset(newjobtab + jobtabsize, 0, MAXJOBS_ALLOC * sizeof(struct job));
jobtab = newjobtab;
jobtabsize = newsize;
return 1;
}
/*
* See if we can reduce the job table. We can if we go over
* a MAXJOBS_ALLOC boundary. However, we leave a boundary,
* currently 20 jobs, so that we have a place for immediate
* expansion and don't play ping pong with the job table size.
*/
/**/
void
maybeshrinkjobtab(void)
{
int jobbound;
queue_signals();
jobbound = maxjob + MAXJOBS_ALLOC - (maxjob % MAXJOBS_ALLOC);
if (jobbound < jobtabsize && jobbound > maxjob + 20) {
struct job *newjobtab;
/* Hope this can't fail, but anyway... */
newjobtab = (struct job *)zrealloc(jobtab,
jobbound*sizeof(struct job));
if (newjobtab) {
jobtab = newjobtab;
jobtabsize = jobbound;
}
}
unqueue_signals();
}
/*
* Definitions for the background process stuff recorded below.
* This would be more efficient as a hash, but
* - that's quite heavyweight for something not needed very often
* - we need some kind of ordering as POSIX allows us to limit
* the size of the list to the value of _SC_CHILD_MAX and clearly
* we want to clear the oldest first
* - cases with a long list of background jobs where the user doesn't
* wait for a large number, and then does wait for one (the only
* inefficient case) are rare
* - in the context of waiting for an external process, looping
* over a list isn't so very inefficient.
* Enough excuses already.
*/
/* Data in the link list, a key (process ID) / value (exit status) pair. */
struct bgstatus {
pid_t pid;
int status;
};
typedef struct bgstatus *Bgstatus;
/* The list of those entries */
static LinkList bgstatus_list;
/* Count of entries. Reaches value of _SC_CHILD_MAX and stops. */
static long bgstatus_count;
/*
* Remove and free a bgstatus entry.
*/
static void rembgstatus(LinkNode node)
{
zfree(remnode(bgstatus_list, node), sizeof(struct bgstatus));
bgstatus_count--;
}
/*
* Record the status of a background process that exited so we
* can execute the builtin wait for it.
*
* We can't execute the wait builtin for something that exited in the
* foreground as it's not visible to the user, so don't bother recording.
*/
/**/
void
addbgstatus(pid_t pid, int status)
{
static long child_max;
Bgstatus bgstatus_entry;
if (!child_max) {
#ifdef _SC_CHILD_MAX
child_max = sysconf(_SC_CHILD_MAX);
if (!child_max) /* paranoia */
#endif
{
/* Be inventive */
child_max = 1024L;
}
}
if (!bgstatus_list) {
bgstatus_list = znewlinklist();
/*
* We're not always robust about memory failures, but
* this is pretty deep in the shell basics to be failing owing
* to memory, and a failure to wait is reported loudly, so test
* and fail silently here.
*/
if (!bgstatus_list)
return;
}
if (bgstatus_count == child_max) {
/* Overflow. List is in order, remove first */
rembgstatus(firstnode(bgstatus_list));
}
bgstatus_entry = (Bgstatus)zalloc(sizeof(*bgstatus_entry));
if (!bgstatus_entry) {
/* See note above */
return;
}
bgstatus_entry->pid = pid;
bgstatus_entry->status = status;
if (!zaddlinknode(bgstatus_list, bgstatus_entry)) {
zfree(bgstatus_entry, sizeof(*bgstatus_entry));
return;
}
bgstatus_count++;
}
/*
* See if pid has a recorded exit status.
* Note we make no guarantee that the PIDs haven't wrapped, so this
* may not be the right process.
*
* This is only used by wait, which must only work on each
* pid once, so we need to remove the entry if we find it.
*/
static int getbgstatus(pid_t pid)
{
LinkNode node;
Bgstatus bgstatus_entry;
if (!bgstatus_list)
return -1;
for (node = firstnode(bgstatus_list); node; incnode(node)) {
bgstatus_entry = (Bgstatus)getdata(node);
if (bgstatus_entry->pid == pid) {
int status = bgstatus_entry->status;
rembgstatus(node);
return status;
}
}
return -1;
}
/* bg, disown, fg, jobs, wait: most of the job control commands are *
* here. They all take the same type of argument. Exception: wait can *
* take a pid or a job specifier, whereas the others only work on jobs. */
/**/
int
bin_fg(char *name, char **argv, Options ops, int func)
{
int job, lng, firstjob = -1, retval = 0, ofunc = func;
if (OPT_ISSET(ops,'Z')) {
int len;
if(isset(RESTRICTED)) {
zwarnnam(name, "-Z is restricted");
return 1;
}
if(!argv[0] || argv[1]) {
zwarnnam(name, "-Z requires one argument");
return 1;
}
queue_signals();
unmetafy(*argv, &len);
#ifdef HAVE_SETPROCTITLE
setproctitle("%s", *argv);
#else
if(len > hackspace)
len = hackspace;
memcpy(hackzero, *argv, len);
memset(hackzero + len, 0, hackspace - len);
#endif
unqueue_signals();
return 0;
}
if (func == BIN_JOBS) {
lng = (OPT_ISSET(ops,'l')) ? 1 : (OPT_ISSET(ops,'p')) ? 2 : 0;
if (OPT_ISSET(ops,'d'))
lng |= 4;
} else {
lng = !!isset(LONGLISTJOBS);
}
if ((func == BIN_FG || func == BIN_BG) && !jobbing) {
/* oops... maybe bg and fg should have been disabled? */
zwarnnam(name, "no job control in this shell.");
return 1;
}
queue_signals();
/*
* In case any processes changed state recently, wait for them.
* This updates stopped processes (but we should have been
* signalled about those, up to inevitable races), and also
* continued processes if that feature is available.
*/
wait_for_processes();
/* If necessary, update job table. */
if (unset(NOTIFY))
scanjobs();
if (func != BIN_JOBS || isset(MONITOR) || !oldmaxjob)
setcurjob();
if (func == BIN_JOBS)
/* If you immediately type "exit" after "jobs", this *
* will prevent zexit from complaining about stopped jobs */
stopmsg = 2;
if (!*argv) {
/* This block handles all of the default cases (no arguments). bg,
fg and disown act on the current job, and jobs and wait act on all the
jobs. */
if (func == BIN_FG || func == BIN_BG || func == BIN_DISOWN) {
/* W.r.t. the above comment, we'd better have a current job at this
point or else. */
if (curjob == -1 || (jobtab[curjob].stat & STAT_NOPRINT)) {
zwarnnam(name, "no current job");
unqueue_signals();
return 1;
}
firstjob = curjob;
} else if (func == BIN_JOBS) {
/* List jobs. */
struct job *jobptr;
int curmaxjob, ignorejob;
if (unset(MONITOR) && oldmaxjob) {
jobptr = oldjobtab;
curmaxjob = oldmaxjob ? oldmaxjob - 1 : 0;
ignorejob = 0;
} else {
jobptr = jobtab;
curmaxjob = maxjob;
ignorejob = thisjob;
}
for (job = 0; job <= curmaxjob; job++, jobptr++)
if (job != ignorejob && jobptr->stat) {
if ((!OPT_ISSET(ops,'r') && !OPT_ISSET(ops,'s')) ||
(OPT_ISSET(ops,'r') && OPT_ISSET(ops,'s')) ||
(OPT_ISSET(ops,'r') &&
!(jobptr->stat & STAT_STOPPED)) ||
(OPT_ISSET(ops,'s') && jobptr->stat & STAT_STOPPED))
printjob(jobptr, lng, 2);
}
unqueue_signals();
return 0;
} else { /* Must be BIN_WAIT, so wait for all jobs */
for (job = 0; job <= maxjob; job++)
if (job != thisjob && jobtab[job].stat &&
!(jobtab[job].stat & STAT_NOPRINT))
retval = zwaitjob(job, 1);
unqueue_signals();
return retval;
}
}
/* Defaults have been handled. We now have an argument or two, or three...
In the default case for bg, fg and disown, the argument will be provided by
the above routine. We now loop over the arguments. */
for (; (firstjob != -1) || *argv; (void)(*argv && argv++)) {
int stopped, ocj = thisjob, jstat;
func = ofunc;
if (func == BIN_WAIT && isanum(*argv)) {
/* wait can take a pid; the others can't. */
pid_t pid = (long)atoi(*argv);
Job j;
Process p;
if (findproc(pid, &j, &p, 0)) {
if (j->stat & STAT_STOPPED) {
retval = (killjb(j, SIGCONT) != 0);
if (retval == 0)
makerunning(j);
}
if (retval == 0) {
/*
* returns 0 for normal exit, else signal+128
* in which case we should return that status.
*/
retval = waitforpid(pid, 1);
}
if (retval == 0) {
if ((retval = getbgstatus(pid)) < 0) {
retval = lastval2;
}
}
} else if ((retval = getbgstatus(pid)) < 0) {
zwarnnam(name, "pid %d is not a child of this shell", pid);
/* presumably lastval2 doesn't tell us a heck of a lot? */
retval = 1;
}
thisjob = ocj;
continue;
}
if (func != BIN_JOBS && oldjobtab != NULL) {
zwarnnam(name, "can't manipulate jobs in subshell");
unqueue_signals();
return 1;
}
/* The only type of argument allowed now is a job spec. Check it. */
job = (*argv) ? getjob(*argv, name) : firstjob;
firstjob = -1;
if (job == -1) {
retval = 1;
break;
}
jstat = oldjobtab ? oldjobtab[job].stat : jobtab[job].stat;
if (!(jstat & STAT_INUSE) ||
(jstat & STAT_NOPRINT)) {
zwarnnam(name, "%s: no such job", *argv);
unqueue_signals();
return 1;
}
/* If AUTO_CONTINUE is set (automatically make stopped jobs running
* on disown), we actually do a bg and then delete the job table entry. */
if (isset(AUTOCONTINUE) && func == BIN_DISOWN &&
jstat & STAT_STOPPED)
func = BIN_BG;
/* We have a job number. Now decide what to do with it. */
switch (func) {
case BIN_FG:
case BIN_BG:
case BIN_WAIT:
if (func == BIN_BG) {
jobtab[job].stat |= STAT_NOSTTY;
jobtab[job].stat &= ~STAT_CURSH;
}
if ((stopped = (jobtab[job].stat & STAT_STOPPED))) {
makerunning(jobtab + job);
if (func == BIN_BG) {
/* Set $! to indicate this was backgrounded */
Process pn = jobtab[job].procs;
for (;;) {
Process next = pn->next;
if (!next) {
lastpid = (zlong) pn->pid;
break;
}
pn = next;
}
}
} else if (func == BIN_BG) {
/* Silly to bg a job already running. */
zwarnnam(name, "job already in background");
thisjob = ocj;
unqueue_signals();
return 1;
}
/* It's time to shuffle the jobs around! Reset the current job,
and pick a sensible secondary job. */
if (curjob == job) {
curjob = prevjob;
prevjob = (func == BIN_BG) ? -1 : job;
}
if (prevjob == job || prevjob == -1)
setprevjob();
if (curjob == -1) {
curjob = prevjob;
setprevjob();
}
if (func != BIN_WAIT)
/* for bg and fg -- show the job we are operating on */
printjob(jobtab + job, (stopped) ? -1 : lng, 3);
if (func != BIN_BG) { /* fg or wait */
if (jobtab[job].pwd && strcmp(jobtab[job].pwd, pwd)) {
FILE *fout = (func == BIN_JOBS || !shout) ? stdout : shout;
fprintf(fout, "(pwd : ");
fprintdir(jobtab[job].pwd, fout);
fprintf(fout, ")\n");
fflush(fout);
}
if (func != BIN_WAIT) { /* fg */
thisjob = job;
if ((jobtab[job].stat & STAT_SUPERJOB) &&
((!jobtab[job].procs->next ||
(jobtab[job].stat & STAT_SUBLEADER) ||
killpg(jobtab[job].gleader, 0) == -1)) &&
jobtab[jobtab[job].other].gleader)
attachtty(jobtab[jobtab[job].other].gleader);
else
attachtty(jobtab[job].gleader);
}
}
if (stopped) {
if (func != BIN_BG && jobtab[job].ty)
settyinfo(jobtab[job].ty);
killjb(jobtab + job, SIGCONT);
}
if (func == BIN_WAIT)
{
retval = zwaitjob(job, 1);
if (!retval)
retval = lastval2;
}
else if (func != BIN_BG) {
/*
* HERE: there used not to be an "else" above. How
* could it be right to wait for the foreground job
* when we've just been told to wait for another
* job (and done it)?
*/
waitjobs();
retval = lastval2;
} else if (ofunc == BIN_DISOWN)
deletejob(jobtab + job, 1);
break;
case BIN_JOBS:
printjob(job + (oldjobtab ? oldjobtab : jobtab), lng, 2);
break;
case BIN_DISOWN:
if (jobtab[job].stat & STAT_SUPERJOB) {
jobtab[job].stat |= STAT_DISOWN;
continue;
}
if (jobtab[job].stat & STAT_STOPPED) {
char buf[20], *pids = "";
if (jobtab[job].stat & STAT_SUPERJOB) {
Process pn;
for (pn = jobtab[jobtab[job].other].procs; pn; pn = pn->next) {
sprintf(buf, " -%d", pn->pid);
pids = dyncat(pids, buf);
}
for (pn = jobtab[job].procs; pn->next; pn = pn->next) {
sprintf(buf, " %d", pn->pid);
pids = dyncat(pids, buf);
}
if (!jobtab[jobtab[job].other].procs && pn) {
sprintf(buf, " %d", pn->pid);
pids = dyncat(pids, buf);
}
} else {
sprintf(buf, " -%d", jobtab[job].gleader);
pids = buf;
}
zwarnnam(name,
#ifdef USE_SUSPENDED
"warning: job is suspended, use `kill -CONT%s' to resume",
#else
"warning: job is stopped, use `kill -CONT%s' to resume",
#endif
pids);
}
deletejob(jobtab + job, 1);
break;
}
thisjob = ocj;
}
unqueue_signals();
return retval;
}
static const struct {
const char *name;
int num;
} alt_sigs[] = {
#if defined(SIGCHLD) && defined(SIGCLD)
#if SIGCHLD == SIGCLD
{ "CLD", SIGCLD },
#endif
#endif
#if defined(SIGPOLL) && defined(SIGIO)
#if SIGPOLL == SIGIO
{ "IO", SIGIO },
#endif
#endif
#if !defined(SIGERR)
/*
* If SIGERR is not defined by the operating system, use it
* as an alias for SIGZERR.
*/
{ "ERR", SIGZERR },
#endif
{ NULL, 0 }
};
/* kill: send a signal to a process. The process(es) may be specified *
* by job specifier (see above) or pid. A signal, defaulting to *
* SIGTERM, may be specified by name or number, preceded by a dash. */
/**/
int
bin_kill(char *nam, char **argv, UNUSED(Options ops), UNUSED(int func))
{
int sig = SIGTERM;
int returnval = 0;
/* check for, and interpret, a signal specifier */
if (*argv && **argv == '-') {
if (idigit((*argv)[1])) {
char *endp;
/* signal specified by number */
sig = zstrtol(*argv + 1, &endp, 10);
if (*endp) {
zwarnnam(nam, "invalid signal number: %s", *argv);
return 1;
}
} else if ((*argv)[1] != '-' || (*argv)[2]) {
char *signame;
/* with argument "-l" display the list of signal names */
if ((*argv)[1] == 'l' && (*argv)[2] == '\0') {
if (argv[1]) {
while (*++argv) {
sig = zstrtol(*argv, &signame, 10);
if (signame == *argv) {
if (!strncmp(signame, "SIG", 3))
signame += 3;
for (sig = 1; sig <= SIGCOUNT; sig++)
if (!strcasecmp(sigs[sig], signame))
break;
if (sig > SIGCOUNT) {
int i;
for (i = 0; alt_sigs[i].name; i++)
if (!strcasecmp(alt_sigs[i].name, signame))
{
sig = alt_sigs[i].num;
break;
}
}
if (sig > SIGCOUNT) {
zwarnnam(nam, "unknown signal: SIG%s",
signame);
returnval++;
} else
printf("%d\n", sig);
} else {
if (*signame) {
zwarnnam(nam, "unknown signal: SIG%s",
signame);
returnval++;
} else {
if (WIFSIGNALED(sig))
sig = WTERMSIG(sig);
else if (WIFSTOPPED(sig))
sig = WSTOPSIG(sig);
if (1 <= sig && sig <= SIGCOUNT)
printf("%s\n", sigs[sig]);
else
printf("%d\n", sig);
}
}
}
return returnval;
}
printf("%s", sigs[1]);
for (sig = 2; sig <= SIGCOUNT; sig++)
printf(" %s", sigs[sig]);
putchar('\n');
return 0;
}
if ((*argv)[1] == 'n' && (*argv)[2] == '\0') {
char *endp;
if (!*++argv) {
zwarnnam(nam, "-n: argument expected");
return 1;
}
sig = zstrtol(*argv, &endp, 10);
if (*endp) {
zwarnnam(nam, "invalid signal number: %s", *argv);
return 1;
}
} else {
if (!((*argv)[1] == 's' && (*argv)[2] == '\0'))
signame = *argv + 1;
else if (!(*++argv)) {
zwarnnam(nam, "-s: argument expected");
return 1;
} else
signame = *argv;
if (!*signame) {
zwarnnam(nam, "-: signal name expected");
return 1;
}
signame = casemodify(signame, CASMOD_UPPER);
if (!strncmp(signame, "SIG", 3))
signame+=3;
/* check for signal matching specified name */
for (sig = 1; sig <= SIGCOUNT; sig++)
if (!strcmp(*(sigs + sig), signame))
break;
if (*signame == '0' && !signame[1])
sig = 0;
if (sig > SIGCOUNT) {
int i;
for (i = 0; alt_sigs[i].name; i++)
if (!strcmp(alt_sigs[i].name, signame))
{
sig = alt_sigs[i].num;
break;
}
}
if (sig > SIGCOUNT) {
zwarnnam(nam, "unknown signal: SIG%s", signame);
zwarnnam(nam, "type kill -l for a list of signals");
return 1;
}
}
}
argv++;
}
/* Discard the standard "-" and "--" option breaks */
if (*argv && (*argv)[0] == '-' && (!(*argv)[1] || (*argv)[1] == '-'))
argv++;
if (!*argv) {
zwarnnam(nam, "not enough arguments");
return 1;
}
queue_signals();
setcurjob();
/* Remaining arguments specify processes. Loop over them, and send the
signal (number sig) to each process. */
for (; *argv; argv++) {
if (**argv == '%') {
/* job specifier introduced by '%' */
int p;
if ((p = getjob(*argv, nam)) == -1) {
returnval++;
continue;
}
if (killjb(jobtab + p, sig) == -1) {
zwarnnam("kill", "kill %s failed: %e", *argv, errno);
returnval++;
continue;
}
/* automatically update the job table if sending a SIGCONT to a
job, and send the job a SIGCONT if sending it a non-stopping
signal. */
if (jobtab[p].stat & STAT_STOPPED) {
#ifndef WIFCONTINUED
/* With WIFCONTINUED we find this out properly */
if (sig == SIGCONT)
makerunning(jobtab + p);
#endif
if (sig != SIGKILL && sig != SIGCONT && sig != SIGTSTP
&& sig != SIGTTOU && sig != SIGTTIN && sig != SIGSTOP)
killjb(jobtab + p, SIGCONT);
}
} else if (!isanum(*argv)) {
zwarnnam("kill", "illegal pid: %s", *argv);
returnval++;
} else {
int pid = atoi(*argv);
if (kill(pid, sig) == -1) {
zwarnnam("kill", "kill %s failed: %e", *argv, errno);
returnval++;
}
#ifndef WIFCONTINUED
else if (sig == SIGCONT) {
Job jn;
Process pn;
/* With WIFCONTINUED we find this out properly */
if (findproc(pid, &jn, &pn, 0)) {
if (WIFSTOPPED(pn->status))
pn->status = SP_RUNNING;
}
}
#endif
}
}
unqueue_signals();
return returnval < 126 ? returnval : 1;
}
/* Get a signal number from a string */
/**/
mod_export int
getsignum(const char *s)
{
int x, i;
/* check for a signal specified by number */
x = atoi(s);
if (idigit(*s) && x >= 0 && x < VSIGCOUNT)
return x;
/* search for signal by name */
if (!strncmp(s, "SIG", 3))
s += 3;
for (i = 0; i < VSIGCOUNT; i++)
if (!strcmp(s, sigs[i]))
return i;
for (i = 0; alt_sigs[i].name; i++)
{
if (!strcmp(s, alt_sigs[i].name))
return alt_sigs[i].num;
}
/* no matching signal */
return -1;
}
/* Get the name for a signal. */
/**/
mod_export const char *
getsigname(int sig)
{
if (sigtrapped[sig] & ZSIG_ALIAS)
{
int i;
for (i = 0; alt_sigs[i].name; i++)
if (sig == alt_sigs[i].num)
return alt_sigs[i].name;
}
else
return sigs[sig];
/* shouldn't reach here */
#ifdef DEBUG
dputs("Bad alias flag for signal");
#endif
return "";
}
/* Get the function node for a trap, taking care about alternative names */
/**/
HashNode
gettrapnode(int sig, int ignoredisable)
{
char fname[20];
HashNode hn;
HashNode (*getptr)(HashTable ht, const char *name);
int i;
if (ignoredisable)
getptr = shfunctab->getnode2;
else
getptr = shfunctab->getnode;
sprintf(fname, "TRAP%s", sigs[sig]);
if ((hn = getptr(shfunctab, fname)))
return hn;
for (i = 0; alt_sigs[i].name; i++) {
if (alt_sigs[i].num == sig) {
sprintf(fname, "TRAP%s", alt_sigs[i].name);
if ((hn = getptr(shfunctab, fname)))
return hn;
}
}
return NULL;
}
/* Remove a TRAP function under any name for the signal */
/**/
void
removetrapnode(int sig)
{
HashNode hn = gettrapnode(sig, 1);
if (hn) {
shfunctab->removenode(shfunctab, hn->nam);
shfunctab->freenode(hn);
}
}
/* Suspend this shell */
/**/
int
bin_suspend(char *name, UNUSED(char **argv), Options ops, UNUSED(int func))
{
/* won't suspend a login shell, unless forced */
if (islogin && !OPT_ISSET(ops,'f')) {
zwarnnam(name, "can't suspend login shell");
return 1;
}
if (jobbing) {
/* stop ignoring signals */
signal_default(SIGTTIN);
signal_default(SIGTSTP);
signal_default(SIGTTOU);
/* Move ourselves back to the process group we came from */
release_pgrp();
}
/* suspend ourselves with a SIGTSTP */
killpg(origpgrp, SIGTSTP);
if (jobbing) {
acquire_pgrp();
/* restore signal handling */
signal_ignore(SIGTTOU);
signal_ignore(SIGTSTP);
signal_ignore(SIGTTIN);
}
return 0;
}
/* find a job named s */
/**/
int
findjobnam(const char *s)
{
int jobnum;
for (jobnum = maxjob; jobnum >= 0; jobnum--)
if (!(jobtab[jobnum].stat & (STAT_SUBJOB | STAT_NOPRINT)) &&
jobtab[jobnum].stat && jobtab[jobnum].procs && jobnum != thisjob &&
jobtab[jobnum].procs->text[0] && strpfx(s, jobtab[jobnum].procs->text))
return jobnum;
return -1;
}
/* make sure we are a process group leader by creating a new process
group if necessary */
/**/
void
acquire_pgrp(void)
{
long ttpgrp;
sigset_t blockset, oldset;
if ((mypgrp = GETPGRP()) >= 0) {
long lastpgrp = mypgrp;
sigemptyset(&blockset);
sigaddset(&blockset, SIGTTIN);
sigaddset(&blockset, SIGTTOU);
sigaddset(&blockset, SIGTSTP);
oldset = signal_block(blockset);
while ((ttpgrp = gettygrp()) != -1 && ttpgrp != mypgrp) {
mypgrp = GETPGRP();
if (mypgrp == mypid) {
if (!interact)
break; /* attachtty() will be a no-op, give up */
signal_setmask(oldset);
attachtty(mypgrp); /* Might generate SIGT* */
signal_block(blockset);
}
if (mypgrp == gettygrp())
break;
signal_setmask(oldset);
if (read(0, NULL, 0) != 0) {} /* Might generate SIGT* */
signal_block(blockset);
mypgrp = GETPGRP();
if (mypgrp == lastpgrp && !interact)
break; /* Unlikely that pgrp will ever change */
lastpgrp = mypgrp;
}
if (mypgrp != mypid) {
if (setpgrp(0, 0) == 0) {
mypgrp = mypid;
attachtty(mypgrp);
} else
opts[MONITOR] = 0;
}
signal_setmask(oldset);
} else
opts[MONITOR] = 0;
}
/* revert back to the process group we came from (before acquire_pgrp) */
/**/
void
release_pgrp(void)
{
if (origpgrp != mypgrp) {
/* in linux pid namespaces, origpgrp may never have been set */
if (origpgrp) {
attachtty(origpgrp);
setpgrp(0, origpgrp);
}
mypgrp = origpgrp;
}
}
<|start_filename|>zmodules/Src/signames1.awk<|end_filename|>
# This is an awk script which finds out what the possibilities for
# the signal names are, and dumps them out so that cpp can turn them
# into numbers. Since we don't need to decide here what the
# real signals are, we can afford to be generous about definitions,
# in case the definitions are in terms of other definitions.
# However, we need to avoid definitions with parentheses, which will
# mess up the syntax.
BEGIN { printf "#include <signal.h>\n\n" }
/^[\t ]*#[\t ]*define[\t _]*SIG[A-Z][A-Z0-9]*[\t ][\t ]*[^(\t ]/ {
sigindex = index($0, "SIG")
sigtail = substr($0, sigindex, 80)
split(sigtail, tmp)
signam = substr(tmp[1], 4, 20)
if (substr($0, sigindex-1, 1) == "_")
printf("XXNAMES XXSIG%s _SIG%s\n", signam, signam)
else
printf("XXNAMES XXSIG%s SIG%s\n", signam, signam)
}
<|start_filename|>doc/zsdoc/zinit.zsh.html<|end_filename|>
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta http-equiv="X-UA-Compatible" content="IE=edge" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<meta name="generator" content="Asciidoctor 2.0.16" />
<title>zinit.zsh(1)</title>
<link
rel="stylesheet"
href="https://fonts.googleapis.com/css?family=Open+Sans:300,300italic,400,400italic,600,600italic%7CNoto+Serif:400,400italic,700,700italic%7CDroid+Sans+Mono:400,700"
/>
<style>
/*! Asciidoctor default stylesheet | MIT License | https://asciidoctor.org */
/* Uncomment the following line when using as a custom stylesheet */
/* @import "https://fonts.googleapis.com/css?family=Open+Sans:300,300italic,400,400italic,600,600italic%7CNoto+Serif:400,400italic,700,700italic%7CDroid+Sans+Mono:400,700"; */
html {
font-family: sans-serif;
-webkit-text-size-adjust: 100%;
}
a {
background: none;
}
a:focus {
outline: thin dotted;
}
a:active,
a:hover {
outline: 0;
}
h1 {
font-size: 2em;
margin: 0.67em 0;
}
b,
strong {
font-weight: bold;
}
abbr {
font-size: 0.9em;
}
abbr[title] {
cursor: help;
border-bottom: 1px dotted #dddddf;
text-decoration: none;
}
dfn {
font-style: italic;
}
hr {
height: 0;
}
mark {
background: #ff0;
color: #000;
}
code,
kbd,
pre,
samp {
font-family: monospace;
font-size: 1em;
}
pre {
white-space: pre-wrap;
}
q {
quotes: "\201C""\201D""\2018""\2019";
}
small {
font-size: 80%;
}
sub,
sup {
font-size: 75%;
line-height: 0;
position: relative;
vertical-align: baseline;
}
sup {
top: -0.5em;
}
sub {
bottom: -0.25em;
}
img {
border: 0;
}
svg:not(:root) {
overflow: hidden;
}
figure {
margin: 0;
}
audio,
video {
display: inline-block;
}
audio:not([controls]) {
display: none;
height: 0;
}
fieldset {
border: 1px solid silver;
margin: 0 2px;
padding: 0.35em 0.625em 0.75em;
}
legend {
border: 0;
padding: 0;
}
button,
input,
select,
textarea {
font-family: inherit;
font-size: 100%;
margin: 0;
}
button,
input {
line-height: normal;
}
button,
select {
text-transform: none;
}
button,
html input[type="button"],
input[type="reset"],
input[type="submit"] {
-webkit-appearance: button;
cursor: pointer;
}
button[disabled],
html input[disabled] {
cursor: default;
}
input[type="checkbox"],
input[type="radio"] {
padding: 0;
}
button::-moz-focus-inner,
input::-moz-focus-inner {
border: 0;
padding: 0;
}
textarea {
overflow: auto;
vertical-align: top;
}
table {
border-collapse: collapse;
border-spacing: 0;
}
*,
::before,
::after {
box-sizing: border-box;
}
html,
body {
font-size: 100%;
}
body {
background: #fff;
color: rgba(0, 0, 0, 0.8);
padding: 0;
margin: 0;
font-family: "Noto Serif", "DejaVu Serif", serif;
line-height: 1;
position: relative;
cursor: auto;
-moz-tab-size: 4;
-o-tab-size: 4;
tab-size: 4;
word-wrap: anywhere;
-moz-osx-font-smoothing: grayscale;
-webkit-font-smoothing: antialiased;
}
a:hover {
cursor: pointer;
}
img,
object,
embed {
max-width: 100%;
height: auto;
}
object,
embed {
height: 100%;
}
img {
-ms-interpolation-mode: bicubic;
}
.left {
float: left !important;
}
.right {
float: right !important;
}
.text-left {
text-align: left !important;
}
.text-right {
text-align: right !important;
}
.text-center {
text-align: center !important;
}
.text-justify {
text-align: justify !important;
}
.hide {
display: none;
}
img,
object,
svg {
display: inline-block;
vertical-align: middle;
}
textarea {
height: auto;
min-height: 50px;
}
select {
width: 100%;
}
.subheader,
.admonitionblock td.content > .title,
.audioblock > .title,
.exampleblock > .title,
.imageblock > .title,
.listingblock > .title,
.literalblock > .title,
.stemblock > .title,
.openblock > .title,
.paragraph > .title,
.quoteblock > .title,
table.tableblock > .title,
.verseblock > .title,
.videoblock > .title,
.dlist > .title,
.olist > .title,
.ulist > .title,
.qlist > .title,
.hdlist > .title {
line-height: 1.45;
color: #7a2518;
font-weight: 400;
margin-top: 0;
margin-bottom: 0.25em;
}
div,
dl,
dt,
dd,
ul,
ol,
li,
h1,
h2,
h3,
#toctitle,
.sidebarblock > .content > .title,
h4,
h5,
h6,
pre,
form,
p,
blockquote,
th,
td {
margin: 0;
padding: 0;
}
a {
color: #2156a5;
text-decoration: underline;
line-height: inherit;
}
a:hover,
a:focus {
color: #1d4b8f;
}
a img {
border: 0;
}
p {
line-height: 1.6;
margin-bottom: 1.25em;
text-rendering: optimizeLegibility;
}
p aside {
font-size: 0.875em;
line-height: 1.35;
font-style: italic;
}
h1,
h2,
h3,
#toctitle,
.sidebarblock > .content > .title,
h4,
h5,
h6 {
font-family: "Open Sans", "DejaVu Sans", sans-serif;
font-weight: 300;
font-style: normal;
color: #ba3925;
text-rendering: optimizeLegibility;
margin-top: 1em;
margin-bottom: 0.5em;
line-height: 1.0125em;
}
h1 small,
h2 small,
h3 small,
#toctitle small,
.sidebarblock > .content > .title small,
h4 small,
h5 small,
h6 small {
font-size: 60%;
color: #e99b8f;
line-height: 0;
}
h1 {
font-size: 2.125em;
}
h2 {
font-size: 1.6875em;
}
h3,
#toctitle,
.sidebarblock > .content > .title {
font-size: 1.375em;
}
h4,
h5 {
font-size: 1.125em;
}
h6 {
font-size: 1em;
}
hr {
border: solid #dddddf;
border-width: 1px 0 0;
clear: both;
margin: 1.25em 0 1.1875em;
}
em,
i {
font-style: italic;
line-height: inherit;
}
strong,
b {
font-weight: bold;
line-height: inherit;
}
small {
font-size: 60%;
line-height: inherit;
}
code {
font-family: "Droid Sans Mono", "DejaVu Sans Mono", monospace;
font-weight: 400;
color: rgba(0, 0, 0, 0.9);
}
ul,
ol,
dl {
line-height: 1.6;
margin-bottom: 1.25em;
list-style-position: outside;
font-family: inherit;
}
ul,
ol {
margin-left: 1.5em;
}
ul li ul,
ul li ol {
margin-left: 1.25em;
margin-bottom: 0;
}
ul.square li ul,
ul.circle li ul,
ul.disc li ul {
list-style: inherit;
}
ul.square {
list-style-type: square;
}
ul.circle {
list-style-type: circle;
}
ul.disc {
list-style-type: disc;
}
ol li ul,
ol li ol {
margin-left: 1.25em;
margin-bottom: 0;
}
dl dt {
margin-bottom: 0.3125em;
font-weight: bold;
}
dl dd {
margin-bottom: 1.25em;
}
blockquote {
margin: 0 0 1.25em;
padding: 0.5625em 1.25em 0 1.1875em;
border-left: 1px solid #ddd;
}
blockquote,
blockquote p {
line-height: 1.6;
color: rgba(0, 0, 0, 0.85);
}
@media screen and (min-width: 768px) {
h1,
h2,
h3,
#toctitle,
.sidebarblock > .content > .title,
h4,
h5,
h6 {
line-height: 1.2;
}
h1 {
font-size: 2.75em;
}
h2 {
font-size: 2.3125em;
}
h3,
#toctitle,
.sidebarblock > .content > .title {
font-size: 1.6875em;
}
h4 {
font-size: 1.4375em;
}
}
table {
background: #fff;
margin-bottom: 1.25em;
border: 1px solid #dedede;
word-wrap: normal;
}
table thead,
table tfoot {
background: #f7f8f7;
}
table thead tr th,
table thead tr td,
table tfoot tr th,
table tfoot tr td {
padding: 0.5em 0.625em 0.625em;
font-size: inherit;
color: rgba(0, 0, 0, 0.8);
text-align: left;
}
table tr th,
table tr td {
padding: 0.5625em 0.625em;
font-size: inherit;
color: rgba(0, 0, 0, 0.8);
}
table tr.even,
table tr.alt {
background: #f8f8f7;
}
table thead tr th,
table tfoot tr th,
table tbody tr td,
table tr td,
table tfoot tr td {
line-height: 1.6;
}
h1,
h2,
h3,
#toctitle,
.sidebarblock > .content > .title,
h4,
h5,
h6 {
line-height: 1.2;
word-spacing: -0.05em;
}
h1 strong,
h2 strong,
h3 strong,
#toctitle strong,
.sidebarblock > .content > .title strong,
h4 strong,
h5 strong,
h6 strong {
font-weight: 400;
}
.center {
margin-left: auto;
margin-right: auto;
}
.stretch {
width: 100%;
}
.clearfix::before,
.clearfix::after,
.float-group::before,
.float-group::after {
content: " ";
display: table;
}
.clearfix::after,
.float-group::after {
clear: both;
}
:not(pre).nobreak {
word-wrap: normal;
}
:not(pre).nowrap {
white-space: nowrap;
}
:not(pre).pre-wrap {
white-space: pre-wrap;
}
:not(pre):not([class^="L"]) > code {
font-size: 0.9375em;
font-style: normal !important;
letter-spacing: 0;
padding: 0.1em 0.5ex;
word-spacing: -0.15em;
background: #f7f7f8;
border-radius: 4px;
line-height: 1.45;
text-rendering: optimizeSpeed;
}
pre {
color: rgba(0, 0, 0, 0.9);
font-family: "Droid Sans Mono", "DejaVu Sans Mono", monospace;
line-height: 1.45;
text-rendering: optimizeSpeed;
}
pre code,
pre pre {
color: inherit;
font-size: inherit;
line-height: inherit;
}
pre > code {
display: block;
}
pre.nowrap,
pre.nowrap pre {
white-space: pre;
word-wrap: normal;
}
em em {
font-style: normal;
}
strong strong {
font-weight: 400;
}
.keyseq {
color: rgba(51, 51, 51, 0.8);
}
kbd {
font-family: "Droid Sans Mono", "DejaVu Sans Mono", monospace;
display: inline-block;
color: rgba(0, 0, 0, 0.8);
font-size: 0.65em;
line-height: 1.45;
background: #f7f7f7;
border: 1px solid #ccc;
border-radius: 3px;
box-shadow: 0 1px 0 rgba(0, 0, 0, 0.2), inset 0 0 0 0.1em #fff;
margin: 0 0.15em;
padding: 0.2em 0.5em;
vertical-align: middle;
position: relative;
top: -0.1em;
white-space: nowrap;
}
.keyseq kbd:first-child {
margin-left: 0;
}
.keyseq kbd:last-child {
margin-right: 0;
}
.menuseq,
.menuref {
color: #000;
}
.menuseq b:not(.caret),
.menuref {
font-weight: inherit;
}
.menuseq {
word-spacing: -0.02em;
}
.menuseq b.caret {
font-size: 1.25em;
line-height: 0.8;
}
.menuseq i.caret {
font-weight: bold;
text-align: center;
width: 0.45em;
}
b.button::before,
b.button::after {
position: relative;
top: -1px;
font-weight: 400;
}
b.button::before {
content: "[";
padding: 0 3px 0 2px;
}
b.button::after {
content: "]";
padding: 0 2px 0 3px;
}
p a > code:hover {
color: rgba(0, 0, 0, 0.9);
}
#header,
#content,
#footnotes,
#footer {
width: 100%;
margin: 0 auto;
max-width: 62.5em;
*zoom: 1;
position: relative;
padding-left: 0.9375em;
padding-right: 0.9375em;
}
#header::before,
#header::after,
#content::before,
#content::after,
#footnotes::before,
#footnotes::after,
#footer::before,
#footer::after {
content: " ";
display: table;
}
#header::after,
#content::after,
#footnotes::after,
#footer::after {
clear: both;
}
#content {
margin-top: 1.25em;
}
#content::before {
content: none;
}
#header > h1:first-child {
color: rgba(0, 0, 0, 0.85);
margin-top: 2.25rem;
margin-bottom: 0;
}
#header > h1:first-child + #toc {
margin-top: 8px;
border-top: 1px solid #dddddf;
}
#header > h1:only-child,
body.toc2 #header > h1:nth-last-child(2) {
border-bottom: 1px solid #dddddf;
padding-bottom: 8px;
}
#header .details {
border-bottom: 1px solid #dddddf;
line-height: 1.45;
padding-top: 0.25em;
padding-bottom: 0.25em;
padding-left: 0.25em;
color: rgba(0, 0, 0, 0.6);
display: flex;
flex-flow: row wrap;
}
#header .details span:first-child {
margin-left: -0.125em;
}
#header .details span.email a {
color: rgba(0, 0, 0, 0.85);
}
#header .details br {
display: none;
}
#header .details br + span::before {
content: "\00a0\2013\00a0";
}
#header .details br + span.author::before {
content: "\00a0\22c5\00a0";
color: rgba(0, 0, 0, 0.85);
}
#header .details br + span#revremark::before {
content: "\00a0|\00a0";
}
#header #revnumber {
text-transform: capitalize;
}
#header #revnumber::after {
content: "\00a0";
}
#content > h1:first-child:not([class]) {
color: rgba(0, 0, 0, 0.85);
border-bottom: 1px solid #dddddf;
padding-bottom: 8px;
margin-top: 0;
padding-top: 1rem;
margin-bottom: 1.25rem;
}
#toc {
border-bottom: 1px solid #e7e7e9;
padding-bottom: 0.5em;
}
#toc > ul {
margin-left: 0.125em;
}
#toc ul.sectlevel0 > li > a {
font-style: italic;
}
#toc ul.sectlevel0 ul.sectlevel1 {
margin: 0.5em 0;
}
#toc ul {
font-family: "Open Sans", "DejaVu Sans", sans-serif;
list-style-type: none;
}
#toc li {
line-height: 1.3334;
margin-top: 0.3334em;
}
#toc a {
text-decoration: none;
}
#toc a:active {
text-decoration: underline;
}
#toctitle {
color: #7a2518;
font-size: 1.2em;
}
@media screen and (min-width: 768px) {
#toctitle {
font-size: 1.375em;
}
body.toc2 {
padding-left: 15em;
padding-right: 0;
}
#toc.toc2 {
margin-top: 0 !important;
background: #f8f8f7;
position: fixed;
width: 15em;
left: 0;
top: 0;
border-right: 1px solid #e7e7e9;
border-top-width: 0 !important;
border-bottom-width: 0 !important;
z-index: 1000;
padding: 1.25em 1em;
height: 100%;
overflow: auto;
}
#toc.toc2 #toctitle {
margin-top: 0;
margin-bottom: 0.8rem;
font-size: 1.2em;
}
#toc.toc2 > ul {
font-size: 0.9em;
margin-bottom: 0;
}
#toc.toc2 ul ul {
margin-left: 0;
padding-left: 1em;
}
#toc.toc2 ul.sectlevel0 ul.sectlevel1 {
padding-left: 0;
margin-top: 0.5em;
margin-bottom: 0.5em;
}
body.toc2.toc-right {
padding-left: 0;
padding-right: 15em;
}
body.toc2.toc-right #toc.toc2 {
border-right-width: 0;
border-left: 1px solid #e7e7e9;
left: auto;
right: 0;
}
}
@media screen and (min-width: 1280px) {
body.toc2 {
padding-left: 20em;
padding-right: 0;
}
#toc.toc2 {
width: 20em;
}
#toc.toc2 #toctitle {
font-size: 1.375em;
}
#toc.toc2 > ul {
font-size: 0.95em;
}
#toc.toc2 ul ul {
padding-left: 1.25em;
}
body.toc2.toc-right {
padding-left: 0;
padding-right: 20em;
}
}
#content #toc {
border: 1px solid #e0e0dc;
margin-bottom: 1.25em;
padding: 1.25em;
background: #f8f8f7;
border-radius: 4px;
}
#content #toc > :first-child {
margin-top: 0;
}
#content #toc > :last-child {
margin-bottom: 0;
}
#footer {
max-width: none;
background: rgba(0, 0, 0, 0.8);
padding: 1.25em;
}
#footer-text {
color: hsla(0, 0%, 100%, 0.8);
line-height: 1.44;
}
#content {
margin-bottom: 0.625em;
}
.sect1 {
padding-bottom: 0.625em;
}
@media screen and (min-width: 768px) {
#content {
margin-bottom: 1.25em;
}
.sect1 {
padding-bottom: 1.25em;
}
}
.sect1:last-child {
padding-bottom: 0;
}
.sect1 + .sect1 {
border-top: 1px solid #e7e7e9;
}
#content h1 > a.anchor,
h2 > a.anchor,
h3 > a.anchor,
#toctitle > a.anchor,
.sidebarblock > .content > .title > a.anchor,
h4 > a.anchor,
h5 > a.anchor,
h6 > a.anchor {
position: absolute;
z-index: 1001;
width: 1.5ex;
margin-left: -1.5ex;
display: block;
text-decoration: none !important;
visibility: hidden;
text-align: center;
font-weight: 400;
}
#content h1 > a.anchor::before,
h2 > a.anchor::before,
h3 > a.anchor::before,
#toctitle > a.anchor::before,
.sidebarblock > .content > .title > a.anchor::before,
h4 > a.anchor::before,
h5 > a.anchor::before,
h6 > a.anchor::before {
content: "\00A7";
font-size: 0.85em;
display: block;
padding-top: 0.1em;
}
#content h1:hover > a.anchor,
#content h1 > a.anchor:hover,
h2:hover > a.anchor,
h2 > a.anchor:hover,
h3:hover > a.anchor,
#toctitle:hover > a.anchor,
.sidebarblock > .content > .title:hover > a.anchor,
h3 > a.anchor:hover,
#toctitle > a.anchor:hover,
.sidebarblock > .content > .title > a.anchor:hover,
h4:hover > a.anchor,
h4 > a.anchor:hover,
h5:hover > a.anchor,
h5 > a.anchor:hover,
h6:hover > a.anchor,
h6 > a.anchor:hover {
visibility: visible;
}
#content h1 > a.link,
h2 > a.link,
h3 > a.link,
#toctitle > a.link,
.sidebarblock > .content > .title > a.link,
h4 > a.link,
h5 > a.link,
h6 > a.link {
color: #ba3925;
text-decoration: none;
}
#content h1 > a.link:hover,
h2 > a.link:hover,
h3 > a.link:hover,
#toctitle > a.link:hover,
.sidebarblock > .content > .title > a.link:hover,
h4 > a.link:hover,
h5 > a.link:hover,
h6 > a.link:hover {
color: #a53221;
}
details,
.audioblock,
.imageblock,
.literalblock,
.listingblock,
.stemblock,
.videoblock {
margin-bottom: 1.25em;
}
details {
margin-left: 1.25rem;
}
details > summary {
cursor: pointer;
display: block;
position: relative;
line-height: 1.6;
margin-bottom: 0.625rem;
-webkit-tap-highlight-color: transparent;
}
details > summary::before {
content: "";
border: solid transparent;
border-left: solid;
border-width: 0.3em 0 0.3em 0.5em;
position: absolute;
top: 0.5em;
left: -1.25rem;
transform: translateX(15%);
}
details[open] > summary::before {
border: solid transparent;
border-top: solid;
border-width: 0.5em 0.3em 0;
transform: translateY(15%);
}
details > summary::after {
content: "";
width: 1.25rem;
height: 1em;
position: absolute;
top: 0.3em;
left: -1.25rem;
}
.admonitionblock td.content > .title,
.audioblock > .title,
.exampleblock > .title,
.imageblock > .title,
.listingblock > .title,
.literalblock > .title,
.stemblock > .title,
.openblock > .title,
.paragraph > .title,
.quoteblock > .title,
table.tableblock > .title,
.verseblock > .title,
.videoblock > .title,
.dlist > .title,
.olist > .title,
.ulist > .title,
.qlist > .title,
.hdlist > .title {
text-rendering: optimizeLegibility;
text-align: left;
font-family: "Noto Serif", "DejaVu Serif", serif;
font-size: 1rem;
font-style: italic;
}
table.tableblock.fit-content > caption.title {
white-space: nowrap;
width: 0;
}
.paragraph.lead > p,
#preamble > .sectionbody > [class="paragraph"]:first-of-type p {
font-size: 1.21875em;
line-height: 1.6;
color: rgba(0, 0, 0, 0.85);
}
.admonitionblock > table {
border-collapse: separate;
border: 0;
background: none;
width: 100%;
}
.admonitionblock > table td.icon {
text-align: center;
width: 80px;
}
.admonitionblock > table td.icon img {
max-width: none;
}
.admonitionblock > table td.icon .title {
font-weight: bold;
font-family: "Open Sans", "DejaVu Sans", sans-serif;
text-transform: uppercase;
}
.admonitionblock > table td.content {
padding-left: 1.125em;
padding-right: 1.25em;
border-left: 1px solid #dddddf;
color: rgba(0, 0, 0, 0.6);
word-wrap: anywhere;
}
.admonitionblock > table td.content > :last-child > :last-child {
margin-bottom: 0;
}
.exampleblock > .content {
border: 1px solid #e6e6e6;
margin-bottom: 1.25em;
padding: 1.25em;
background: #fff;
border-radius: 4px;
}
.exampleblock > .content > :first-child {
margin-top: 0;
}
.exampleblock > .content > :last-child {
margin-bottom: 0;
}
.sidebarblock {
border: 1px solid #dbdbd6;
margin-bottom: 1.25em;
padding: 1.25em;
background: #f3f3f2;
border-radius: 4px;
}
.sidebarblock > :first-child {
margin-top: 0;
}
.sidebarblock > :last-child {
margin-bottom: 0;
}
.sidebarblock > .content > .title {
color: #7a2518;
margin-top: 0;
text-align: center;
}
.exampleblock > .content > :last-child > :last-child,
.exampleblock > .content .olist > ol > li:last-child > :last-child,
.exampleblock > .content .ulist > ul > li:last-child > :last-child,
.exampleblock > .content .qlist > ol > li:last-child > :last-child,
.sidebarblock > .content > :last-child > :last-child,
.sidebarblock > .content .olist > ol > li:last-child > :last-child,
.sidebarblock > .content .ulist > ul > li:last-child > :last-child,
.sidebarblock > .content .qlist > ol > li:last-child > :last-child {
margin-bottom: 0;
}
.literalblock pre,
.listingblock > .content > pre {
border-radius: 4px;
overflow-x: auto;
padding: 1em;
font-size: 0.8125em;
}
@media screen and (min-width: 768px) {
.literalblock pre,
.listingblock > .content > pre {
font-size: 0.90625em;
}
}
@media screen and (min-width: 1280px) {
.literalblock pre,
.listingblock > .content > pre {
font-size: 1em;
}
}
.literalblock pre,
.listingblock > .content > pre:not(.highlight),
.listingblock > .content > pre[class="highlight"],
.listingblock > .content > pre[class^="highlight "] {
background: #f7f7f8;
}
.literalblock.output pre {
color: #f7f7f8;
background: rgba(0, 0, 0, 0.9);
}
.listingblock > .content {
position: relative;
}
.listingblock code[data-lang]::before {
display: none;
content: attr(data-lang);
position: absolute;
font-size: 0.75em;
top: 0.425rem;
right: 0.5rem;
line-height: 1;
text-transform: uppercase;
color: inherit;
opacity: 0.5;
}
.listingblock:hover code[data-lang]::before {
display: block;
}
.listingblock.terminal pre .command::before {
content: attr(data-prompt);
padding-right: 0.5em;
color: inherit;
opacity: 0.5;
}
.listingblock.terminal pre .command:not([data-prompt])::before {
content: "$";
}
.listingblock pre.highlightjs {
padding: 0;
}
.listingblock pre.highlightjs > code {
padding: 1em;
border-radius: 4px;
}
.listingblock pre.prettyprint {
border-width: 0;
}
.prettyprint {
background: #f7f7f8;
}
pre.prettyprint .linenums {
line-height: 1.45;
margin-left: 2em;
}
pre.prettyprint li {
background: none;
list-style-type: inherit;
padding-left: 0;
}
pre.prettyprint li code[data-lang]::before {
opacity: 1;
}
pre.prettyprint li:not(:first-child) code[data-lang]::before {
display: none;
}
table.linenotable {
border-collapse: separate;
border: 0;
margin-bottom: 0;
background: none;
}
table.linenotable td[class] {
color: inherit;
vertical-align: top;
padding: 0;
line-height: inherit;
white-space: normal;
}
table.linenotable td.code {
padding-left: 0.75em;
}
table.linenotable td.linenos {
border-right: 1px solid;
opacity: 0.35;
padding-right: 0.5em;
}
pre.pygments .lineno {
border-right: 1px solid;
opacity: 0.35;
display: inline-block;
margin-right: 0.75em;
}
pre.pygments .lineno::before {
content: "";
margin-right: -0.125em;
}
.quoteblock {
margin: 0 1em 1.25em 1.5em;
display: table;
}
.quoteblock:not(.excerpt) > .title {
margin-left: -1.5em;
margin-bottom: 0.75em;
}
.quoteblock blockquote,
.quoteblock p {
color: rgba(0, 0, 0, 0.85);
font-size: 1.15rem;
line-height: 1.75;
word-spacing: 0.1em;
letter-spacing: 0;
font-style: italic;
text-align: justify;
}
.quoteblock blockquote {
margin: 0;
padding: 0;
border: 0;
}
.quoteblock blockquote::before {
content: "\201c";
float: left;
font-size: 2.75em;
font-weight: bold;
line-height: 0.6em;
margin-left: -0.6em;
color: #7a2518;
text-shadow: 0 1px 2px rgba(0, 0, 0, 0.1);
}
.quoteblock blockquote > .paragraph:last-child p {
margin-bottom: 0;
}
.quoteblock .attribution {
margin-top: 0.75em;
margin-right: 0.5ex;
text-align: right;
}
.verseblock {
margin: 0 1em 1.25em;
}
.verseblock pre {
font-family: "Open Sans", "DejaVu Sans", sans-serif;
font-size: 1.15rem;
color: rgba(0, 0, 0, 0.85);
font-weight: 300;
text-rendering: optimizeLegibility;
}
.verseblock pre strong {
font-weight: 400;
}
.verseblock .attribution {
margin-top: 1.25rem;
margin-left: 0.5ex;
}
.quoteblock .attribution,
.verseblock .attribution {
font-size: 0.9375em;
line-height: 1.45;
font-style: italic;
}
.quoteblock .attribution br,
.verseblock .attribution br {
display: none;
}
.quoteblock .attribution cite,
.verseblock .attribution cite {
display: block;
letter-spacing: -0.025em;
color: rgba(0, 0, 0, 0.6);
}
.quoteblock.abstract blockquote::before,
.quoteblock.excerpt blockquote::before,
.quoteblock .quoteblock blockquote::before {
display: none;
}
.quoteblock.abstract blockquote,
.quoteblock.abstract p,
.quoteblock.excerpt blockquote,
.quoteblock.excerpt p,
.quoteblock .quoteblock blockquote,
.quoteblock .quoteblock p {
line-height: 1.6;
word-spacing: 0;
}
.quoteblock.abstract {
margin: 0 1em 1.25em;
display: block;
}
.quoteblock.abstract > .title {
margin: 0 0 0.375em;
font-size: 1.15em;
text-align: center;
}
.quoteblock.excerpt > blockquote,
.quoteblock .quoteblock {
padding: 0 0 0.25em 1em;
border-left: 0.25em solid #dddddf;
}
.quoteblock.excerpt,
.quoteblock .quoteblock {
margin-left: 0;
}
.quoteblock.excerpt blockquote,
.quoteblock.excerpt p,
.quoteblock .quoteblock blockquote,
.quoteblock .quoteblock p {
color: inherit;
font-size: 1.0625rem;
}
.quoteblock.excerpt .attribution,
.quoteblock .quoteblock .attribution {
color: inherit;
font-size: 0.85rem;
text-align: left;
margin-right: 0;
}
p.tableblock:last-child {
margin-bottom: 0;
}
td.tableblock > .content {
margin-bottom: 1.25em;
word-wrap: anywhere;
}
td.tableblock > .content > :last-child {
margin-bottom: -1.25em;
}
table.tableblock,
th.tableblock,
td.tableblock {
border: 0 solid #dedede;
}
table.grid-all > * > tr > * {
border-width: 1px;
}
table.grid-cols > * > tr > * {
border-width: 0 1px;
}
table.grid-rows > * > tr > * {
border-width: 1px 0;
}
table.frame-all {
border-width: 1px;
}
table.frame-ends {
border-width: 1px 0;
}
table.frame-sides {
border-width: 0 1px;
}
table.frame-none > colgroup + * > :first-child > *,
table.frame-sides > colgroup + * > :first-child > * {
border-top-width: 0;
}
table.frame-none > :last-child > :last-child > *,
table.frame-sides > :last-child > :last-child > * {
border-bottom-width: 0;
}
table.frame-none > * > tr > :first-child,
table.frame-ends > * > tr > :first-child {
border-left-width: 0;
}
table.frame-none > * > tr > :last-child,
table.frame-ends > * > tr > :last-child {
border-right-width: 0;
}
table.stripes-all tr,
table.stripes-odd tr:nth-of-type(odd),
table.stripes-even tr:nth-of-type(even),
table.stripes-hover tr:hover {
background: #f8f8f7;
}
th.halign-left,
td.halign-left {
text-align: left;
}
th.halign-right,
td.halign-right {
text-align: right;
}
th.halign-center,
td.halign-center {
text-align: center;
}
th.valign-top,
td.valign-top {
vertical-align: top;
}
th.valign-bottom,
td.valign-bottom {
vertical-align: bottom;
}
th.valign-middle,
td.valign-middle {
vertical-align: middle;
}
table thead th,
table tfoot th {
font-weight: bold;
}
tbody tr th {
background: #f7f8f7;
}
tbody tr th,
tbody tr th p,
tfoot tr th,
tfoot tr th p {
color: rgba(0, 0, 0, 0.8);
font-weight: bold;
}
p.tableblock > code:only-child {
background: none;
padding: 0;
}
p.tableblock {
font-size: 1em;
}
ol {
margin-left: 1.75em;
}
ul li ol {
margin-left: 1.5em;
}
dl dd {
margin-left: 1.125em;
}
dl dd:last-child,
dl dd:last-child > :last-child {
margin-bottom: 0;
}
ol > li p,
ul > li p,
ul dd,
ol dd,
.olist .olist,
.ulist .ulist,
.ulist .olist,
.olist .ulist {
margin-bottom: 0.625em;
}
ul.checklist,
ul.none,
ol.none,
ul.no-bullet,
ol.no-bullet,
ol.unnumbered,
ul.unstyled,
ol.unstyled {
list-style-type: none;
}
ul.no-bullet,
ol.no-bullet,
ol.unnumbered {
margin-left: 0.625em;
}
ul.unstyled,
ol.unstyled {
margin-left: 0;
}
ul.checklist > li > p:first-child {
margin-left: -1em;
}
ul.checklist > li > p:first-child > .fa-square-o:first-child,
ul.checklist > li > p:first-child > .fa-check-square-o:first-child {
width: 1.25em;
font-size: 0.8em;
position: relative;
bottom: 0.125em;
}
ul.checklist > li > p:first-child > input[type="checkbox"]:first-child {
margin-right: 0.25em;
}
ul.inline {
display: flex;
flex-flow: row wrap;
list-style: none;
margin: 0 0 0.625em -1.25em;
}
ul.inline > li {
margin-left: 1.25em;
}
.unstyled dl dt {
font-weight: 400;
font-style: normal;
}
ol.arabic {
list-style-type: decimal;
}
ol.decimal {
list-style-type: decimal-leading-zero;
}
ol.loweralpha {
list-style-type: lower-alpha;
}
ol.upperalpha {
list-style-type: upper-alpha;
}
ol.lowerroman {
list-style-type: lower-roman;
}
ol.upperroman {
list-style-type: upper-roman;
}
ol.lowergreek {
list-style-type: lower-greek;
}
.hdlist > table,
.colist > table {
border: 0;
background: none;
}
.hdlist > table > tbody > tr,
.colist > table > tbody > tr {
background: none;
}
td.hdlist1,
td.hdlist2 {
vertical-align: top;
padding: 0 0.625em;
}
td.hdlist1 {
font-weight: bold;
padding-bottom: 1.25em;
}
td.hdlist2 {
word-wrap: anywhere;
}
.literalblock + .colist,
.listingblock + .colist {
margin-top: -0.5em;
}
.colist td:not([class]):first-child {
padding: 0.4em 0.75em 0;
line-height: 1;
vertical-align: top;
}
.colist td:not([class]):first-child img {
max-width: none;
}
.colist td:not([class]):last-child {
padding: 0.25em 0;
}
.thumb,
.th {
line-height: 0;
display: inline-block;
border: 4px solid #fff;
box-shadow: 0 0 0 1px #ddd;
}
.imageblock.left {
margin: 0.25em 0.625em 1.25em 0;
}
.imageblock.right {
margin: 0.25em 0 1.25em 0.625em;
}
.imageblock > .title {
margin-bottom: 0;
}
.imageblock.thumb,
.imageblock.th {
border-width: 6px;
}
.imageblock.thumb > .title,
.imageblock.th > .title {
padding: 0 0.125em;
}
.image.left,
.image.right {
margin-top: 0.25em;
margin-bottom: 0.25em;
display: inline-block;
line-height: 0;
}
.image.left {
margin-right: 0.625em;
}
.image.right {
margin-left: 0.625em;
}
a.image {
text-decoration: none;
display: inline-block;
}
a.image object {
pointer-events: none;
}
sup.footnote,
sup.footnoteref {
font-size: 0.875em;
position: static;
vertical-align: super;
}
sup.footnote a,
sup.footnoteref a {
text-decoration: none;
}
sup.footnote a:active,
sup.footnoteref a:active {
text-decoration: underline;
}
#footnotes {
padding-top: 0.75em;
padding-bottom: 0.75em;
margin-bottom: 0.625em;
}
#footnotes hr {
width: 20%;
min-width: 6.25em;
margin: -0.25em 0 0.75em;
border-width: 1px 0 0;
}
#footnotes .footnote {
padding: 0 0.375em 0 0.225em;
line-height: 1.3334;
font-size: 0.875em;
margin-left: 1.2em;
margin-bottom: 0.2em;
}
#footnotes .footnote a:first-of-type {
font-weight: bold;
text-decoration: none;
margin-left: -1.05em;
}
#footnotes .footnote:last-of-type {
margin-bottom: 0;
}
#content #footnotes {
margin-top: -0.625em;
margin-bottom: 0;
padding: 0.75em 0;
}
.gist .file-data > table {
border: 0;
background: #fff;
width: 100%;
margin-bottom: 0;
}
.gist .file-data > table td.line-data {
width: 99%;
}
div.unbreakable {
page-break-inside: avoid;
}
.big {
font-size: larger;
}
.small {
font-size: smaller;
}
.underline {
text-decoration: underline;
}
.overline {
text-decoration: overline;
}
.line-through {
text-decoration: line-through;
}
.aqua {
color: #00bfbf;
}
.aqua-background {
background: #00fafa;
}
.black {
color: #000;
}
.black-background {
background: #000;
}
.blue {
color: #0000bf;
}
.blue-background {
background: #0000fa;
}
.fuchsia {
color: #bf00bf;
}
.fuchsia-background {
background: #fa00fa;
}
.gray {
color: #606060;
}
.gray-background {
background: #7d7d7d;
}
.green {
color: #006000;
}
.green-background {
background: #007d00;
}
.lime {
color: #00bf00;
}
.lime-background {
background: #00fa00;
}
.maroon {
color: #600000;
}
.maroon-background {
background: #7d0000;
}
.navy {
color: #000060;
}
.navy-background {
background: #00007d;
}
.olive {
color: #606000;
}
.olive-background {
background: #7d7d00;
}
.purple {
color: #600060;
}
.purple-background {
background: #7d007d;
}
.red {
color: #bf0000;
}
.red-background {
background: #fa0000;
}
.silver {
color: #909090;
}
.silver-background {
background: #bcbcbc;
}
.teal {
color: #006060;
}
.teal-background {
background: #007d7d;
}
.white {
color: #bfbfbf;
}
.white-background {
background: #fafafa;
}
.yellow {
color: #bfbf00;
}
.yellow-background {
background: #fafa00;
}
span.icon > .fa {
cursor: default;
}
a span.icon > .fa {
cursor: inherit;
}
.admonitionblock td.icon [class^="fa icon-"] {
font-size: 2.5em;
text-shadow: 1px 1px 2px rgba(0, 0, 0, 0.5);
cursor: default;
}
.admonitionblock td.icon .icon-note::before {
content: "\f05a";
color: #19407c;
}
.admonitionblock td.icon .icon-tip::before {
content: "\f0eb";
text-shadow: 1px 1px 2px rgba(155, 155, 0, 0.8);
color: #111;
}
.admonitionblock td.icon .icon-warning::before {
content: "\f071";
color: #bf6900;
}
.admonitionblock td.icon .icon-caution::before {
content: "\f06d";
color: #bf3400;
}
.admonitionblock td.icon .icon-important::before {
content: "\f06a";
color: #bf0000;
}
.conum[data-value] {
display: inline-block;
color: #fff !important;
background: rgba(0, 0, 0, 0.8);
border-radius: 50%;
text-align: center;
font-size: 0.75em;
width: 1.67em;
height: 1.67em;
line-height: 1.67em;
font-family: "Open Sans", "DejaVu Sans", sans-serif;
font-style: normal;
font-weight: bold;
}
.conum[data-value] * {
color: #fff !important;
}
.conum[data-value] + b {
display: none;
}
.conum[data-value]::after {
content: attr(data-value);
}
pre .conum[data-value] {
position: relative;
top: -0.125em;
}
b.conum * {
color: inherit !important;
}
.conum:not([data-value]):empty {
display: none;
}
dt,
th.tableblock,
td.content,
div.footnote {
text-rendering: optimizeLegibility;
}
h1,
h2,
p,
td.content,
span.alt,
summary {
letter-spacing: -0.01em;
}
p strong,
td.content strong,
div.footnote strong {
letter-spacing: -0.005em;
}
p,
blockquote,
dt,
td.content,
span.alt,
summary {
font-size: 1.0625rem;
}
p {
margin-bottom: 1.25rem;
}
.sidebarblock p,
.sidebarblock dt,
.sidebarblock td.content,
p.tableblock {
font-size: 1em;
}
.exampleblock > .content {
background: #fffef7;
border-color: #e0e0dc;
box-shadow: 0 1px 4px #e0e0dc;
}
.print-only {
display: none !important;
}
@page {
margin: 1.25cm 0.75cm;
}
@media print {
* {
box-shadow: none !important;
text-shadow: none !important;
}
html {
font-size: 80%;
}
a {
color: inherit !important;
text-decoration: underline !important;
}
a.bare,
a[href^="#"],
a[href^="mailto:"] {
text-decoration: none !important;
}
a[href^="http:"]:not(.bare)::after,
a[href^="https:"]:not(.bare)::after {
content: "(" attr(href) ")";
display: inline-block;
font-size: 0.875em;
padding-left: 0.25em;
}
abbr[title] {
border-bottom: 1px dotted;
}
abbr[title]::after {
content: " (" attr(title) ")";
}
pre,
blockquote,
tr,
img,
object,
svg {
page-break-inside: avoid;
}
thead {
display: table-header-group;
}
svg {
max-width: 100%;
}
p,
blockquote,
dt,
td.content {
font-size: 1em;
orphans: 3;
widows: 3;
}
h2,
h3,
#toctitle,
.sidebarblock > .content > .title {
page-break-after: avoid;
}
#header,
#content,
#footnotes,
#footer {
max-width: none;
}
#toc,
.sidebarblock,
.exampleblock > .content {
background: none !important;
}
#toc {
border-bottom: 1px solid #dddddf !important;
padding-bottom: 0 !important;
}
body.book #header {
text-align: center;
}
body.book #header > h1:first-child {
border: 0 !important;
margin: 2.5em 0 1em;
}
body.book #header .details {
border: 0 !important;
display: block;
padding: 0 !important;
}
body.book #header .details span:first-child {
margin-left: 0 !important;
}
body.book #header .details br {
display: block;
}
body.book #header .details br + span::before {
content: none !important;
}
body.book #toc {
border: 0 !important;
text-align: left !important;
padding: 0 !important;
margin: 0 !important;
}
body.book #toc,
body.book #preamble,
body.book h1.sect0,
body.book .sect1 > h2 {
page-break-before: always;
}
.listingblock code[data-lang]::before {
display: block;
}
#footer {
padding: 0 0.9375em;
}
.hide-on-print {
display: none !important;
}
.print-only {
display: block !important;
}
.hide-for-print {
display: none !important;
}
.show-for-print {
display: inherit !important;
}
}
@media amzn-kf8, print {
#header > h1:first-child {
margin-top: 1.25rem;
}
.sect1 {
padding: 0 !important;
}
.sect1 + .sect1 {
border: 0;
}
#footer {
background: none;
}
#footer-text {
color: rgba(0, 0, 0, 0.6);
font-size: 0.9em;
}
}
@media amzn-kf8 {
#header,
#content,
#footnotes,
#footer {
padding: 0;
}
}
</style>
</head>
<body class="article">
<div id="header">
<h1>zinit.zsh(1)</h1>
</div>
<div id="content">
<div class="sect1">
<h2 id="_name">NAME</h2>
<div class="sectionbody">
<div class="paragraph">
<p>zinit.zsh - a shell script</p>
</div>
</div>
</div>
<div class="sect1">
<h2 id="_synopsis">SYNOPSIS</h2>
<div class="sectionbody">
<div class="paragraph">
<p>Documentation automatically generated with `zshelldoc'</p>
</div>
</div>
</div>
<div class="sect1">
<h2 id="_functions">FUNCTIONS</h2>
<div class="sectionbody">
<div class="literalblock">
<div class="content">
<pre>
@autoload
pmodload
zicdclear
zicdreplay
zicompdef
zicompinit
zinit
.zinit-add-fpath
.zinit-add-report
.zinit-any-to-pid
.zinit-any-to-user-plugin
.zinit-compdef-clear
.zinit-compdef-replay
+zinit-deploy-message
.zinit-diff
.zinit-diff-env
.zinit-diff-functions
.zinit-diff-options
.zinit-diff-parameter
.zinit-find-other-matches
.zinit-formatter-bar
.zinit-formatter-bar-util
.zinit-formatter-pid
.zinit-formatter-th-bar
.zinit-formatter-url
.zinit-get-mtime-into
.zinit-get-object-path
.zinit-ice
.zinit-load
.zinit-load-ices
.zinit-load-object
.zinit-load-plugin
.zinit-load-snippet
.zinit-main-message-formatter
+zinit-message
.zinit-pack-ice
.zinit-parse-opts
+zinit-prehelp-usage-message
.zinit-prepare-home
@zinit-register-annex
@zinit-register-hook
.zinit-register-plugin
:zinit-reload-and-run
.zinit-run
.zinit-run-task
-zinit_scheduler_add_sh
.zinit-set-m-func
.zinit-setup-params
.zinit-submit-turbo
@zinit-substitute
:zinit-tmp-subst-alias
:zinit-tmp-subst-autoload
:zinit-tmp-subst-bindkey
:zinit-tmp-subst-compdef
.zinit-tmp-subst-off
.zinit-tmp-subst-on
:zinit-tmp-subst-zle
:zinit-tmp-subst-zstyle
.zinit-util-shands-path
zpcdclear
zpcdreplay
zpcompdef
zpcompinit
zplugin
@zsh-plugin-run-on-unload
@zsh-plugin-run-on-update
AUTOLOAD add-zsh-hook
AUTOLOAD compinit
AUTOLOAD is-at-least
PRECMD-HOOK @zinit-scheduler</pre
>
</div>
</div>
</div>
</div>
<div class="sect1">
<h2 id="_details">DETAILS</h2>
<div class="sectionbody">
<div class="sect2">
<h3 id="_script_body">Script Body</h3>
<div class="paragraph">
<p>Has 213 line(s). Calls functions:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>
Script-Body
|-- add-zsh-hook
|-- is-at-least
|-- zinit-autoload.zsh/.zinit-module
|-- +zinit-message
`-- @zinit-register-hook</pre
>
</div>
</div>
<div class="paragraph">
<p>
Uses feature(s): <em>add-zsh-hook</em>, <em>alias</em>,
<em>autoload</em>, <em>export</em>, <em>is-at-least</em>,
<em>setopt</em>, <em>source</em>, <em>zmodload</em>,
<em>zstyle</em>
</p>
</div>
<div class="paragraph">
<p>
<em>Exports (environment):</em> PMSPEC
<strong class="big">//</strong> ZPFX
<strong class="big">//</strong> ZSH_CACHE_DIR
</p>
</div>
</div>
<div class="sect2">
<h3 id="_autoload">@autoload</h3>
<div class="quoteblock">
<blockquote>
<div class="literalblock">
<div class="content">
<pre>
]]]
FUNCTION: @autoload. [[[</pre
>
</div>
</div>
</blockquote>
</div>
<div class="paragraph">
<p>Has 3 line(s). Calls functions:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>
@autoload
`-- :zinit-tmp-subst-autoload
|-- is-at-least
`-- +zinit-message</pre
>
</div>
</div>
<div class="paragraph">
<p>
Not called by script or any function (may be e.g. a hook, a Zle
widget, etc.).
</p>
</div>
</div>
<div class="sect2">
<h3 id="_pmodload">pmodload</h3>
<div class="quoteblock">
<blockquote>
<div class="literalblock">
<div class="content">
<pre>
FUNCTION: pmodload. [[[
Compatibility with Prezto. Calls can be recursive.</pre
>
</div>
</div>
</blockquote>
</div>
<div class="paragraph">
<p>Has 15 line(s). Calls functions:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>pmodload</pre>
</div>
</div>
<div class="paragraph">
<p>Uses feature(s): <em>zstyle</em></p>
</div>
<div class="paragraph">
<p>
Not called by script or any function (may be e.g. a hook, a Zle
widget, etc.).
</p>
</div>
</div>
<div class="sect2">
<h3 id="_zicdclear">zicdclear</h3>
<div class="quoteblock">
<blockquote>
<div class="literalblock">
<div class="content">
<pre>
]]]
FUNCTION: zicdclear. [[[
A wrapper for `zinit cdclear -q' which can be called from hook
ices like the atinit'', atload'', etc. ices.</pre
>
</div>
</div>
</blockquote>
</div>
<div class="paragraph">
<p>Has 1 line(s). Calls functions:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>zicdclear</pre>
</div>
</div>
<div class="paragraph">
<p>
Not called by script or any function (may be e.g. a hook, a Zle
widget, etc.).
</p>
</div>
</div>
<div class="sect2">
<h3 id="_zicdreplay">zicdreplay</h3>
<div class="quoteblock">
<blockquote>
<div class="literalblock">
<div class="content">
<pre>
FUNCTION: zicdreplay. [[[
A function that can be invoked from within `atinit', `atload', etc.
ice-mod. It works like `zinit cdreplay', which cannot be invoked
from such hook ices.</pre
>
</div>
</div>
</blockquote>
</div>
<div class="paragraph">
<p>Has 1 line(s). Calls functions:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>zicdreplay</pre>
</div>
</div>
<div class="paragraph">
<p>
Not called by script or any function (may be e.g. a hook, a Zle
widget, etc.).
</p>
</div>
</div>
<div class="sect2">
<h3 id="_zicompdef">zicompdef</h3>
<div class="quoteblock">
<blockquote>
<div class="literalblock">
<div class="content">
<pre>
]]]
FUNCTION: zicompdef. [[[
Stores compdef for a replay with `zicdreplay' (turbo mode) or
with `zinit cdreplay' (normal mode). An utility functton of
an undefined use case.</pre
>
</div>
</div>
</blockquote>
</div>
<div class="paragraph">
<p>Has 1 line(s). Doesn’t call other functions.</p>
</div>
<div class="paragraph">
<p>
Not called by script or any function (may be e.g. a hook, a Zle
widget, etc.).
</p>
</div>
</div>
<div class="sect2">
<h3 id="_zicompinit">zicompinit</h3>
<div class="quoteblock">
<blockquote>
<div class="literalblock">
<div class="content">
<pre>
]]]
FUNCTION: zicompinit. [[[
A function that can be invoked from within `atinit', `atload', etc.
ice-mod. It runs `autoload compinit; compinit' and respects
ZINIT[ZCOMPDUMP_PATH] and ZINIT[COMPINIT_OPTS].</pre
>
</div>
</div>
</blockquote>
</div>
<div class="paragraph">
<p>Has 1 line(s). Calls functions:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>
zicompinit
`-- compinit</pre
>
</div>
</div>
<div class="paragraph">
<p>Uses feature(s): <em>autoload</em>, <em>compinit</em></p>
</div>
<div class="paragraph">
<p>
Not called by script or any function (may be e.g. a hook, a Zle
widget, etc.).
</p>
</div>
</div>
<div class="sect2">
<h3 id="_zinit">zinit</h3>
<div class="quoteblock">
<blockquote>
<div class="literalblock">
<div class="content">
<pre>
FUNCTION: zinit. [[[
Main function directly exposed to user, obtains subcommand and its
arguments, has completion.</pre
>
</div>
</div>
</blockquote>
</div>
<div class="paragraph">
<p>Has 560 line(s). Calls functions:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>
zinit
|-- compinit
|-- zinit-autoload.zsh/.zinit-cdisable
|-- zinit-autoload.zsh/.zinit-cenable
|-- zinit-autoload.zsh/.zinit-clear-completions
|-- zinit-autoload.zsh/.zinit-compiled
|-- zinit-autoload.zsh/.zinit-compile-uncompile-all
|-- zinit-autoload.zsh/.zinit-help
|-- zinit-autoload.zsh/.zinit-list-bindkeys
|-- zinit-autoload.zsh/.zinit-list-compdef-replay
|-- zinit-autoload.zsh/.zinit-ls
|-- zinit-autoload.zsh/.zinit-module
|-- zinit-autoload.zsh/.zinit-recently
|-- zinit-autoload.zsh/.zinit-search-completions
|-- zinit-autoload.zsh/.zinit-self-update
|-- zinit-autoload.zsh/.zinit-show-all-reports
|-- zinit-autoload.zsh/.zinit-show-completions
|-- zinit-autoload.zsh/.zinit-show-debug-report
|-- zinit-autoload.zsh/.zinit-show-registered-plugins
|-- zinit-autoload.zsh/.zinit-show-report
|-- zinit-autoload.zsh/.zinit-show-times
|-- zinit-autoload.zsh/.zinit-show-zstatus
|-- zinit-autoload.zsh/.zinit-uncompile-plugin
|-- zinit-autoload.zsh/.zinit-uninstall-completions
|-- zinit-autoload.zsh/.zinit-unload
|-- zinit-autoload.zsh/.zinit-update-or-status
|-- zinit-autoload.zsh/.zinit-update-or-status-all
|-- zinit-install.zsh/.zinit-compile-plugin
|-- zinit-install.zsh/.zinit-compinit
|-- zinit-install.zsh/.zinit-forget-completion
|-- zinit-install.zsh/.zinit-install-completions
|-- +zinit-message
`-- +zinit-prehelp-usage-message
`-- +zinit-message</pre
>
</div>
</div>
<div class="paragraph">
<p>
Uses feature(s): <em>autoload</em>, <em>compinit</em>,
<em>eval</em>, <em>setopt</em>, <em>source</em>
</p>
</div>
<div class="paragraph">
<p>Called by:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>zplugin</pre>
</div>
</div>
<div class="paragraph">
<div class="title">zinit-add-fpath</div>
<p>
<sub>~</sub><sub>~</sub><sub>~</sub><sub>~</sub><sub>~</sub>~
</p>
</div>
<div class="quoteblock">
<blockquote>
<div class="literalblock">
<div class="content">
<pre>FUNCTION: .zinit-add-fpath. [[[</pre>
</div>
</div>
</blockquote>
</div>
<div class="paragraph">
<p>Has 10 line(s). Calls functions:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>.zinit-add-fpath</pre>
</div>
</div>
<div class="paragraph">
<p>Called by:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>zinit</pre>
</div>
</div>
<div class="paragraph">
<div class="title">zinit-add-report</div>
<p>
<sub>~</sub><sub>~</sub><sub>~</sub><sub>~</sub><sub>~</sub>~~
</p>
</div>
<div class="quoteblock">
<blockquote>
<div class="literalblock">
<div class="content">
<pre>
FUNCTION: .zinit-add-report. [[[
Adds a report line for given plugin.</pre
>
</div>
</div>
<div class="literalblock">
<div class="content">
<pre>
$1 - uspl2, i.e. user/plugin
$2, ... - the text</pre
>
</div>
</div>
</blockquote>
</div>
<div class="paragraph">
<p>Has 3 line(s). Doesn’t call other functions.</p>
</div>
<div class="paragraph">
<p>Called by:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>
.zinit-load-plugin
.zinit-load-snippet
:zinit-tmp-subst-alias
:zinit-tmp-subst-autoload
:zinit-tmp-subst-bindkey
:zinit-tmp-subst-compdef
:zinit-tmp-subst-zle
:zinit-tmp-subst-zstyle</pre
>
</div>
</div>
<div class="paragraph">
<div class="title">zinit-any-to-pid</div>
<p>
<sub>~</sub><sub>~</sub><sub>~</sub><sub>~</sub><sub>~</sub>~~
</p>
</div>
<div class="quoteblock">
<blockquote>
<div class="literalblock">
<div class="content">
<pre>FUNCTION: .zinit-any-to-pid. [[[</pre>
</div>
</div>
</blockquote>
</div>
<div class="paragraph">
<p>Has 22 line(s). Calls functions:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>.zinit-any-to-pid</pre>
</div>
</div>
<div class="paragraph">
<p>Uses feature(s): <em>setopt</em></p>
</div>
<div class="paragraph">
<p>Called by:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>
zinit-side.zsh/.zinit-any-colorify-as-uspl2
zinit-side.zsh/.zinit-exists-physically-message
zinit-side.zsh/.zinit-first</pre
>
</div>
</div>
<div class="paragraph">
<div class="title">zinit-any-to-user-plugin</div>
<p>
<sub>~</sub><sub>~</sub><sub>~</sub><sub>~</sub><sub>~</sub
><sub>~</sub><sub>~</sub><sub>~</sub>~
</p>
</div>
<div class="quoteblock">
<blockquote>
<div class="literalblock">
<div class="content">
<pre>
FUNCTION: .zinit-any-to-user-plugin. [[[
Allows elastic plugin-spec across the code.</pre
>
</div>
</div>
<div class="literalblock">
<div class="content">
<pre>
$1 - plugin spec (4 formats: user---plugin, user/plugin, user, plugin)
$2 - plugin (only when $1 - i.e. user - given)</pre
>
</div>
</div>
<div class="literalblock">
<div class="content">
<pre>Returns user and plugin in $reply.</pre>
</div>
</div>
</blockquote>
</div>
<div class="paragraph">
<p>Has 29 line(s). Doesn’t call other functions.</p>
</div>
<div class="paragraph">
<p>Uses feature(s): <em>setopt</em></p>
</div>
<div class="paragraph">
<p>Called by:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>
.zinit-add-fpath
.zinit-get-object-path
.zinit-load
.zinit-run
:zinit-tmp-subst-autoload
zinit-autoload.zsh/.zinit-any-to-uspl2
zinit-autoload.zsh/.zinit-changes
zinit-autoload.zsh/.zinit-compiled
zinit-autoload.zsh/.zinit-compile-uncompile-all
zinit-autoload.zsh/.zinit-create
zinit-autoload.zsh/.zinit-delete
zinit-autoload.zsh/.zinit-find-completions-of-plugin
zinit-autoload.zsh/.zinit-glance
zinit-autoload.zsh/.zinit-show-report
zinit-autoload.zsh/.zinit-stress
zinit-autoload.zsh/.zinit-uncompile-plugin
zinit-autoload.zsh/.zinit-unload
zinit-autoload.zsh/.zinit-unregister-plugin
zinit-autoload.zsh/.zinit-update-all-parallel
zinit-autoload.zsh/.zinit-update-or-status-all
zinit-autoload.zsh/.zinit-update-or-status
zinit-install.zsh/.zinit-install-completions
zinit-side.zsh/.zinit-any-colorify-as-uspl2
zinit-side.zsh/.zinit-compute-ice
zinit-side.zsh/.zinit-exists-physically-message
zinit-side.zsh/.zinit-exists-physically
zinit-side.zsh/.zinit-first</pre
>
</div>
</div>
<div class="paragraph">
<p><em>Environment variables used:</em> ZPFX</p>
</div>
<div class="paragraph">
<div class="title">zinit-compdef-clear</div>
<p>
<sub>~</sub><sub>~</sub><sub>~</sub><sub>~</sub><sub>~</sub
><sub>~</sub>~~
</p>
</div>
<div class="quoteblock">
<blockquote>
<div class="literalblock">
<div class="content">
<pre>
FUNCTION: .zinit-compdef-clear. [[[
Implements user-exposed functionality to clear gathered compdefs.</pre
>
</div>
</div>
</blockquote>
</div>
<div class="paragraph">
<p>Has 3 line(s). Calls functions:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>
.zinit-compdef-clear
`-- +zinit-message</pre
>
</div>
</div>
<div class="paragraph">
<p>Called by:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>
zicdclear
zinit
zpcdclear</pre
>
</div>
</div>
<div class="paragraph">
<div class="title">zinit-compdef-replay</div>
<p>
<sub>~</sub><sub>~</sub><sub>~</sub><sub>~</sub><sub>~</sub
><sub>~</sub><sub>~</sub>
</p>
</div>
<div class="quoteblock">
<blockquote>
<div class="literalblock">
<div class="content">
<pre>
FUNCTION: .zinit-compdef-replay. [[[
Runs gathered compdef calls. This allows to run `compinit'
after loading plugins.</pre
>
</div>
</div>
</blockquote>
</div>
<div class="paragraph">
<p>Has 17 line(s). Calls functions:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>
.zinit-compdef-replay
`-- +zinit-message</pre
>
</div>
</div>
<div class="paragraph">
<p>Uses feature(s): <em>compdef</em></p>
</div>
<div class="paragraph">
<p>Called by:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>
zicdreplay
zinit
zpcdreplay</pre
>
</div>
</div>
</div>
<div class="sect2">
<h3 id="_zinit_deploy_message">+zinit-deploy-message</h3>
<div class="quoteblock">
<blockquote>
<div class="literalblock">
<div class="content">
<pre>
]]]
FUNCTION: +zinit-deploy-message. [[[
Deploys a sub-prompt message to be displayed OR a `zle
.reset-prompt' call to be invoked</pre
>
</div>
</div>
</blockquote>
</div>
<div class="paragraph">
<p>Has 13 line(s). Doesn’t call other functions.</p>
</div>
<div class="paragraph">
<p>Uses feature(s): <em>read</em>, <em>zle</em></p>
</div>
<div class="paragraph">
<p>Called by:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>
.zinit-load-snippet
.zinit-load
zinit-autoload.zsh/.zinit-recall</pre
>
</div>
</div>
<div class="paragraph">
<div class="title">zinit-diff</div>
<p><sub>~</sub><sub>~</sub><sub>~</sub>~~</p>
</div>
<div class="quoteblock">
<blockquote>
<div class="literalblock">
<div class="content">
<pre>
FUNCTION: .zinit-diff. [[[
Performs diff actions of all types</pre
>
</div>
</div>
</blockquote>
</div>
<div class="paragraph">
<p>Has 4 line(s). Calls functions:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>.zinit-diff</pre>
</div>
</div>
<div class="paragraph">
<p>Called by:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>.zinit-load-plugin</pre>
</div>
</div>
<div class="paragraph">
<div class="title">zinit-diff-env</div>
<p>
<sub>~</sub><sub>~</sub><sub>~</sub><sub>~</sub><sub>~</sub>
</p>
</div>
<div class="quoteblock">
<blockquote>
<div class="literalblock">
<div class="content">
<pre>
FUNCTION: .zinit-diff-env. [[[
Implements detection of change in PATH and FPATH.</pre
>
</div>
</div>
<div class="literalblock">
<div class="content">
<pre>
$1 - user/plugin (i.e. uspl2 format)
$2 - command, can be "begin" or "end"</pre
>
</div>
</div>
</blockquote>
</div>
<div class="paragraph">
<p>Has 18 line(s). Doesn’t call other functions.</p>
</div>
<div class="paragraph">
<p>Called by:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>
.zinit-diff
.zinit-load-plugin</pre
>
</div>
</div>
<div class="paragraph">
<div class="title">zinit-diff-functions</div>
<p>
<sub>~</sub><sub>~</sub><sub>~</sub><sub>~</sub><sub>~</sub
><sub>~</sub><sub>~</sub>
</p>
</div>
<div class="quoteblock">
<blockquote>
<div class="literalblock">
<div class="content">
<pre>
FUNCTION: .zinit-diff-functions. [[[
Implements detection of newly created functions. Performs
data gathering, computation is done in *-compute().</pre
>
</div>
</div>
<div class="literalblock">
<div class="content">
<pre>
$1 - user/plugin (i.e. uspl2 format)
$2 - command, can be "begin" or "end"</pre
>
</div>
</div>
</blockquote>
</div>
<div class="paragraph">
<p>Has 8 line(s). Doesn’t call other functions.</p>
</div>
<div class="paragraph">
<p>Called by:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>.zinit-diff</pre>
</div>
</div>
<div class="paragraph">
<div class="title">zinit-diff-options</div>
<p>
<sub>~</sub><sub>~</sub><sub>~</sub><sub>~</sub><sub>~</sub
><sub>~</sub>~
</p>
</div>
<div class="quoteblock">
<blockquote>
<div class="literalblock">
<div class="content">
<pre>
FUNCTION: .zinit-diff-options. [[[
Implements detection of change in option state. Performs
data gathering, computation is done in *-compute().</pre
>
</div>
</div>
<div class="literalblock">
<div class="content">
<pre>
$1 - user/plugin (i.e. uspl2 format)
$2 - command, can be "begin" or "end"</pre
>
</div>
</div>
</blockquote>
</div>
<div class="paragraph">
<p>Has 7 line(s). Doesn’t call other functions.</p>
</div>
<div class="paragraph">
<p>Called by:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>.zinit-diff</pre>
</div>
</div>
<div class="paragraph">
<div class="title">zinit-diff-parameter</div>
<p>
<sub>~</sub><sub>~</sub><sub>~</sub><sub>~</sub><sub>~</sub
><sub>~</sub><sub>~</sub>
</p>
</div>
<div class="quoteblock">
<blockquote>
<div class="literalblock">
<div class="content">
<pre>
FUNCTION: .zinit-diff-parameter. [[[
Implements detection of change in any parameter's existence and type.
Performs data gathering, computation is done in *-compute().</pre
>
</div>
</div>
<div class="literalblock">
<div class="content">
<pre>
$1 - user/plugin (i.e. uspl2 format)
$2 - command, can be "begin" or "end"</pre
>
</div>
</div>
</blockquote>
</div>
<div class="paragraph">
<p>Has 9 line(s). Doesn’t call other functions.</p>
</div>
<div class="paragraph">
<p>Called by:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>.zinit-diff</pre>
</div>
</div>
<div class="paragraph">
<div class="title">zinit-find-other-matches</div>
<p>
<sub>~</sub><sub>~</sub><sub>~</sub><sub>~</sub><sub>~</sub
><sub>~</sub><sub>~</sub><sub>~</sub>~
</p>
</div>
<div class="quoteblock">
<blockquote>
<div class="literalblock">
<div class="content">
<pre>
FUNCTION: .zinit-find-other-matches. [[[
Plugin's main source file is in general `name.plugin.zsh'. However,
there can be different conventions, if that file is not found, then
this functions examines other conventions in the most sane order.</pre
>
</div>
</div>
</blockquote>
</div>
<div class="paragraph">
<p>Has 17 line(s). Doesn’t call other functions.</p>
</div>
<div class="paragraph">
<p>Called by:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>
.zinit-load-plugin
.zinit-load-snippet
zinit-side.zsh/.zinit-first</pre
>
</div>
</div>
<div class="paragraph">
<div class="title">zinit-formatter-bar</div>
<p>
<sub>~</sub><sub>~</sub><sub>~</sub><sub>~</sub><sub>~</sub
><sub>~</sub>~~
</p>
</div>
<div class="quoteblock">
<blockquote>
<div class="literalblock">
<div class="content">
<pre>
]]]
FUNCTION: .zinit-formatter-bar. [[[</pre
>
</div>
</div>
</blockquote>
</div>
<div class="paragraph">
<p>Has 1 line(s). Calls functions:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>.zinit-formatter-bar</pre>
</div>
</div>
<div class="paragraph">
<p>
Not called by script or any function (may be e.g. a hook, a Zle
widget, etc.).
</p>
</div>
<div class="paragraph">
<div class="title">zinit-formatter-bar-util</div>
<p>
<sub>~</sub><sub>~</sub><sub>~</sub><sub>~</sub><sub>~</sub
><sub>~</sub><sub>~</sub><sub>~</sub>~
</p>
</div>
<div class="quoteblock">
<blockquote>
<div class="literalblock">
<div class="content">
<pre>FUNCTION: .zinit-formatter-bar-util. [[[</pre>
</div>
</div>
</blockquote>
</div>
<div class="paragraph">
<p>Has 7 line(s). Doesn’t call other functions.</p>
</div>
<div class="paragraph">
<p>Called by:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>
.zinit-formatter-bar
.zinit-formatter-th-bar</pre
>
</div>
</div>
<div class="paragraph">
<div class="title">zinit-formatter-pid</div>
<p>
<sub>~</sub><sub>~</sub><sub>~</sub><sub>~</sub><sub>~</sub
><sub>~</sub>~~
</p>
</div>
<div class="quoteblock">
<blockquote>
<div class="literalblock">
<div class="content">
<pre>
]]]
FUNCTION: .zinit-formatter-pid. [[[</pre
>
</div>
</div>
</blockquote>
</div>
<div class="paragraph">
<p>Has 11 line(s). Calls functions:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>
.zinit-formatter-pid
`-- zinit-side.zsh/.zinit-any-colorify-as-uspl2</pre
>
</div>
</div>
<div class="paragraph">
<p>Uses feature(s): <em>source</em></p>
</div>
<div class="paragraph">
<p>
Not called by script or any function (may be e.g. a hook, a Zle
widget, etc.).
</p>
</div>
<div class="paragraph">
<div class="title">zinit-formatter-th-bar</div>
<p>
<sub>~</sub><sub>~</sub><sub>~</sub><sub>~</sub><sub>~</sub
><sub>~</sub><sub>~</sub>~~
</p>
</div>
<div class="quoteblock">
<blockquote>
<div class="literalblock">
<div class="content">
<pre>
]]]
FUNCTION: .zinit-formatter-th-bar. [[[</pre
>
</div>
</div>
</blockquote>
</div>
<div class="paragraph">
<p>Has 1 line(s). Calls functions:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>.zinit-formatter-th-bar</pre>
</div>
</div>
<div class="paragraph">
<p>
Not called by script or any function (may be e.g. a hook, a Zle
widget, etc.).
</p>
</div>
<div class="paragraph">
<div class="title">zinit-formatter-url</div>
<p>
<sub>~</sub><sub>~</sub><sub>~</sub><sub>~</sub><sub>~</sub
><sub>~</sub>~~
</p>
</div>
<div class="quoteblock">
<blockquote>
<div class="literalblock">
<div class="content">
<pre>
]]]
FUNCTION: .zinit-formatter-url. [[[</pre
>
</div>
</div>
</blockquote>
</div>
<div class="paragraph">
<p>Has 19 line(s). Doesn’t call other functions.</p>
</div>
<div class="paragraph">
<p>
Not called by script or any function (may be e.g. a hook, a Zle
widget, etc.).
</p>
</div>
<div class="paragraph">
<div class="title">zinit-get-mtime-into</div>
<p>
<sub>~</sub><sub>~</sub><sub>~</sub><sub>~</sub><sub>~</sub
><sub>~</sub><sub>~</sub>
</p>
</div>
<div class="quoteblock">
<blockquote>
<div class="literalblock">
<div class="content">
<pre>FUNCTION: .zinit-get-mtime-into. [[[</pre>
</div>
</div>
</blockquote>
</div>
<div class="paragraph">
<p>Has 7 line(s). Doesn’t call other functions.</p>
</div>
<div class="paragraph">
<p>Called by:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>
Script-Body
zinit-autoload.zsh/.zinit-self-update
zinit-autoload.zsh/.zinit-update-or-status-all</pre
>
</div>
</div>
<div class="paragraph">
<div class="title">zinit-get-object-path</div>
<p>
<sub>~</sub><sub>~</sub><sub>~</sub><sub>~</sub><sub>~</sub
><sub>~</sub><sub>~</sub>~
</p>
</div>
<div class="quoteblock">
<blockquote>
<div class="literalblock">
<div class="content">
<pre>FUNCTION: .zinit-get-object-path. [[[</pre>
</div>
</div>
</blockquote>
</div>
<div class="paragraph">
<p>Has 28 line(s). Calls functions:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>.zinit-get-object-path</pre>
</div>
</div>
<div class="paragraph">
<p>Called by:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>
.zinit-load-ices
.zinit-load-snippet
.zinit-run
zinit
zinit-autoload.zsh/.zinit-get-path
zinit-install.zsh/.zinit-setup-plugin-dir
zinit-install.zsh/.zinit-update-snippet
zinit-side.zsh/.zinit-first
zinit-side.zsh/.zinit-two-paths</pre
>
</div>
</div>
<div class="paragraph">
<div class="title">zinit-ice</div>
<p><sub>~</sub><sub>~</sub><sub>~</sub>~</p>
</div>
<div class="quoteblock">
<blockquote>
<div class="literalblock">
<div class="content">
<pre>
FUNCTION: .zinit-ice. [[[
Parses ICE specification, puts the result into ICE global hash.
The ice-spec is valid for next command only (i.e. it "melts"), but
it can then stick to plugin and activate e.g. at update.</pre
>
</div>
</div>
</blockquote>
</div>
<div class="paragraph">
<p>Has 13 line(s). Doesn’t call other functions.</p>
</div>
<div class="paragraph">
<p>Uses feature(s): <em>setopt</em></p>
</div>
<div class="paragraph">
<p>Called by:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>zinit</pre>
</div>
</div>
<div class="paragraph">
<p><em>Environment variables used:</em> ZPFX</p>
</div>
<div class="paragraph">
<div class="title">zinit-load</div>
<p><sub>~</sub><sub>~</sub><sub>~</sub>~~</p>
</div>
<div class="quoteblock">
<blockquote>
<div class="literalblock">
<div class="content">
<pre>
FUNCTION: .zinit-load. [[[
Implements the exposed-to-user action of loading a plugin.</pre
>
</div>
</div>
<div class="literalblock">
<div class="content">
<pre>
$1 - plugin spec (4 formats: user---plugin, user/plugin, user, plugin)
$2 - plugin name, if the third format is used</pre
>
</div>
</div>
</blockquote>
</div>
<div class="paragraph">
<p>Has 92 line(s). Calls functions:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>
.zinit-load
|-- +zinit-deploy-message
|-- zinit-install.zsh/.zinit-get-package
`-- zinit-install.zsh/.zinit-setup-plugin-dir</pre
>
</div>
</div>
<div class="paragraph">
<p>
Uses feature(s): <em>eval</em>, <em>setopt</em>,
<em>source</em>, <em>zle</em>
</p>
</div>
<div class="paragraph">
<p>Called by:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>
.zinit-load-object
.zinit-run-task</pre
>
</div>
</div>
<div class="paragraph">
<div class="title">zinit-load-ices</div>
<p>
<sub>~</sub><sub>~</sub><sub>~</sub><sub>~</sub><sub>~</sub>~
</p>
</div>
<div class="quoteblock">
<blockquote>
<div class="literalblock">
<div class="content">
<pre>FUNCTION: .zinit-load-ices. [[[</pre>
</div>
</div>
</blockquote>
</div>
<div class="paragraph">
<p>Has 22 line(s). Calls functions:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>.zinit-load-ices</pre>
</div>
</div>
<div class="paragraph">
<p>Called by:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>zinit</pre>
</div>
</div>
<div class="paragraph">
<p><em>Environment variables used:</em> ZPFX</p>
</div>
<div class="paragraph">
<div class="title">zinit-load-object</div>
<p>
<sub>~</sub><sub>~</sub><sub>~</sub><sub>~</sub><sub>~</sub
><sub>~</sub>
</p>
</div>
<div class="quoteblock">
<blockquote>
<div class="literalblock">
<div class="content">
<pre>FUNCTION: .zinit-load-object. [[[</pre>
</div>
</div>
</blockquote>
</div>
<div class="paragraph">
<p>Has 12 line(s). Calls functions:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>.zinit-load-object</pre>
</div>
</div>
<div class="paragraph">
<p>Called by:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>zinit</pre>
</div>
</div>
<div class="paragraph">
<div class="title">zinit-load-plugin</div>
<p>
<sub>~</sub><sub>~</sub><sub>~</sub><sub>~</sub><sub>~</sub
><sub>~</sub>
</p>
</div>
<div class="quoteblock">
<blockquote>
<div class="literalblock">
<div class="content">
<pre>
FUNCTION: .zinit-load-plugin. [[[
Lower-level function for loading a plugin.</pre
>
</div>
</div>
<div class="literalblock">
<div class="content">
<pre>
$1 - user
$2 - plugin
$3 - mode (light or load)</pre
>
</div>
</div>
</blockquote>
</div>
<div class="paragraph">
<p>Has 127 line(s). Calls functions:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>
.zinit-load-plugin
`-- :zinit-tmp-subst-autoload
|-- is-at-least
`-- +zinit-message</pre
>
</div>
</div>
<div class="paragraph">
<p>
Uses feature(s): <em>eval</em>, <em>setopt</em>,
<em>source</em>, <em>unfunction</em>, <em>zle</em>
</p>
</div>
<div class="paragraph">
<p>Called by:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>.zinit-load</pre>
</div>
</div>
<div class="paragraph">
<div class="title">zinit-load-snippet</div>
<p>
<sub>~</sub><sub>~</sub><sub>~</sub><sub>~</sub><sub>~</sub
><sub>~</sub>~
</p>
</div>
<div class="quoteblock">
<blockquote>
<div class="literalblock">
<div class="content">
<pre>
]]]
FUNCTION: .zinit-load-snippet. [[[
Implements the exposed-to-user action of loading a snippet.</pre
>
</div>
</div>
<div class="literalblock">
<div class="content">
<pre>$1 - url (can be local, absolute path).</pre>
</div>
</div>
</blockquote>
</div>
<div class="paragraph">
<p>Has 203 line(s). Calls functions:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>
.zinit-load-snippet
|-- +zinit-deploy-message
|-- zinit-install.zsh/.zinit-download-snippet
`-- +zinit-message</pre
>
</div>
</div>
<div class="paragraph">
<p>
Uses feature(s): <em>autoload</em>, <em>eval</em>,
<em>setopt</em>, <em>source</em>, <em>unfunction</em>,
<em>zparseopts</em>, <em>zstyle</em>
</p>
</div>
<div class="paragraph">
<p>Called by:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>
pmodload
.zinit-load-object
.zinit-load
.zinit-run-task</pre
>
</div>
</div>
<div class="paragraph">
<div class="title">zinit-main-message-formatter</div>
<p>
<sub>~</sub><sub>~</sub><sub>~</sub><sub>~</sub><sub>~</sub
><sub>~</sub><sub>~</sub><sub>~</sub><sub>~</sub>~~
</p>
</div>
<div class="quoteblock">
<blockquote>
<div class="literalblock">
<div class="content">
<pre>
]]]
FUNCTION: +zinit-message-formatter [[[</pre
>
</div>
</div>
</blockquote>
</div>
<div class="paragraph">
<p>Has 18 line(s). Doesn’t call other functions.</p>
</div>
<div class="paragraph">
<p>
Not called by script or any function (may be e.g. a hook, a Zle
widget, etc.).
</p>
</div>
</div>
<div class="sect2">
<h3 id="_zinit_message">+zinit-message</h3>
<div class="quoteblock">
<blockquote>
<div class="literalblock">
<div class="content">
<pre>
]]]
FUNCTION: +zinit-message. [[[</pre
>
</div>
</div>
</blockquote>
</div>
<div class="paragraph">
<p>Has 14 line(s). Doesn’t call other functions.</p>
</div>
<div class="paragraph">
<p>Called by:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>
Script-Body
.zinit-compdef-clear
.zinit-compdef-replay
.zinit-load-snippet
+zinit-prehelp-usage-message
.zinit-register-plugin
.zinit-run
.zinit-set-m-func
:zinit-tmp-subst-autoload
zinit
zinit-autoload.zsh/.zinit-build-module
zinit-autoload.zsh/.zinit-cd
zinit-autoload.zsh/.zinit-self-update
zinit-autoload.zsh/.zinit-show-zstatus
zinit-autoload.zsh/.zinit-uninstall-completions
zinit-autoload.zsh/.zinit-update-all-parallel
zinit-autoload.zsh/.zinit-update-or-status-all
zinit-autoload.zsh/.zinit-update-or-status
zinit-autoload.zsh/.zinit-wait-for-update-jobs
zinit-install.zsh/ziextract
zinit-install.zsh/.zinit-compile-plugin
zinit-install.zsh/.zinit-compinit
zinit-install.zsh/.zinit-download-file-stdout
zinit-install.zsh/.zinit-download-snippet
zinit-install.zsh/.zinit-extract
zinit-install.zsh/.zinit-get-cygwin-package
zinit-install.zsh/.zinit-get-latest-gh-r-url-part
zinit-install.zsh/.zinit-get-package
zinit-install.zsh/.zinit-install-completions
zinit-install.zsh/∞zinit-ps-on-update-hook
zinit-install.zsh/∞zinit-reset-hook
zinit-install.zsh/.zinit-setup-plugin-dir
zinit-install.zsh/.zinit-update-snippet
zinit-side.zsh/.zinit-countdown
zinit-side.zsh/.zinit-exists-physically-message</pre
>
</div>
</div>
<div class="paragraph">
<div class="title">zinit-pack-ice</div>
<p>
<sub>~</sub><sub>~</sub><sub>~</sub><sub>~</sub><sub>~</sub>
</p>
</div>
<div class="quoteblock">
<blockquote>
<div class="literalblock">
<div class="content">
<pre>
FUNCTION: .zinit-pack-ice. [[[
Remembers all ice-mods, assigns them to concrete plugin. Ice spec
is in general forgotten for second-next command (that's why it's
called "ice" - it melts), however they glue to the object (plugin
or snippet) mentioned in the next command – for later use with e.g.
`zinit update ...'.</pre
>
</div>
</div>
</blockquote>
</div>
<div class="paragraph">
<p>Has 3 line(s). Doesn’t call other functions.</p>
</div>
<div class="paragraph">
<p>Called by:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>
.zinit-load-snippet
.zinit-load
@zsh-plugin-run-on-unload
@zsh-plugin-run-on-update
zinit-install.zsh/.zinit-update-snippet
zinit-side.zsh/.zinit-compute-ice</pre
>
</div>
</div>
<div class="paragraph">
<div class="title">zinit-parse-opts</div>
<p>
<sub>~</sub><sub>~</sub><sub>~</sub><sub>~</sub><sub>~</sub>~~
</p>
</div>
<div class="quoteblock">
<blockquote>
<div class="literalblock">
<div class="content">
<pre>
]]]
FUNCTION: +zinit-parse-opts. [[[</pre
>
</div>
</div>
</blockquote>
</div>
<div class="paragraph">
<p>Has 2 line(s). Doesn’t call other functions.</p>
</div>
<div class="paragraph">
<p>Called by:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>
zinit
zinit-autoload.zsh/.zinit-delete</pre
>
</div>
</div>
</div>
<div class="sect2">
<h3 id="_zinit_prehelp_usage_message">
+zinit-prehelp-usage-message
</h3>
<div class="quoteblock">
<blockquote>
<div class="literalblock">
<div class="content">
<pre>
]]]
FUNCTION: +zinit-prehelp-usage-message. [[[</pre
>
</div>
</div>
</blockquote>
</div>
<div class="paragraph">
<p>Has 38 line(s). Calls functions:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>
+zinit-prehelp-usage-message
`-- +zinit-message</pre
>
</div>
</div>
<div class="paragraph">
<p>Called by:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>
zinit
zinit-autoload.zsh/.zinit-delete</pre
>
</div>
</div>
<div class="paragraph">
<div class="title">zinit-prepare-home</div>
<p>
<sub>~</sub><sub>~</sub><sub>~</sub><sub>~</sub><sub>~</sub
><sub>~</sub>~
</p>
</div>
<div class="quoteblock">
<blockquote>
<div class="literalblock">
<div class="content">
<pre>
FUNCTION: .zinit-prepare-home. [[[
Creates all directories needed by Zinit, first checks if they
already exist.</pre
>
</div>
</div>
</blockquote>
</div>
<div class="paragraph">
<p>Has 37 line(s). Calls functions:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>
.zinit-prepare-home
|-- zinit-autoload.zsh/.zinit-clear-completions
`-- zinit-install.zsh/.zinit-compinit</pre
>
</div>
</div>
<div class="paragraph">
<p>Uses feature(s): <em>source</em></p>
</div>
<div class="paragraph">
<p>Called by:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>Script-Body</pre>
</div>
</div>
<div class="paragraph">
<p><em>Environment variables used:</em> ZPFX</p>
</div>
</div>
<div class="sect2">
<h3 id="_zinit_register_annex">@zinit-register-annex</h3>
<div class="quoteblock">
<blockquote>
<div class="literalblock">
<div class="content">
<pre>
]]]
FUNCTION: @zinit-register-annex. [[[
Registers the z-annex inside Zinit – i.e. an Zinit extension</pre
>
</div>
</div>
</blockquote>
</div>
<div class="paragraph">
<p>Has 8 line(s). Doesn’t call other functions.</p>
</div>
<div class="paragraph">
<p>
Not called by script or any function (may be e.g. a hook, a Zle
widget, etc.).
</p>
</div>
</div>
<div class="sect2">
<h3 id="_zinit_register_hook">@zinit-register-hook</h3>
<div class="quoteblock">
<blockquote>
<div class="literalblock">
<div class="content">
<pre>
]]]
FUNCTION: @zinit-register-hook. [[[
Registers the z-annex inside Zinit – i.e. an Zinit extension</pre
>
</div>
</div>
</blockquote>
</div>
<div class="paragraph">
<p>Has 4 line(s). Doesn’t call other functions.</p>
</div>
<div class="paragraph">
<p>Called by:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>Script-Body</pre>
</div>
</div>
<div class="paragraph">
<div class="title">zinit-register-plugin</div>
<p>
<sub>~</sub><sub>~</sub><sub>~</sub><sub>~</sub><sub>~</sub
><sub>~</sub><sub>~</sub>~
</p>
</div>
<div class="quoteblock">
<blockquote>
<div class="literalblock">
<div class="content">
<pre>
FUNCTION: .zinit-register-plugin. [[[
Adds the plugin to ZINIT_REGISTERED_PLUGINS array and to the
zsh_loaded_plugins array (managed according to the plugin standard:
http://z-shell.github.io/Zsh-100-Commits-Club/Zsh-Plugin-Standard.html).</pre
>
</div>
</div>
</blockquote>
</div>
<div class="paragraph">
<p>Has 23 line(s). Calls functions:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>
.zinit-register-plugin
`-- +zinit-message</pre
>
</div>
</div>
<div class="paragraph">
<p>Called by:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>.zinit-load</pre>
</div>
</div>
</div>
<div class="sect2">
<h3 id="_zinit_reload_and_run">:zinit-reload-and-run</h3>
<div class="quoteblock">
<blockquote>
<div class="literalblock">
<div class="content">
<pre>
FUNCTION: :zinit-reload-and-run. [[[
Marks given function ($3) for autoloading, and executes it triggering the
load. $1 is the fpath dedicated to the function, $2 are autoload options.
This function replaces "autoload -X", because using that on older Zsh
versions causes problems with traps.</pre
>
</div>
</div>
<div class="literalblock">
<div class="content">
<pre>
So basically one creates function stub that calls :zinit-reload-and-run()
instead of "autoload -X".</pre
>
</div>
</div>
<div class="literalblock">
<div class="content">
<pre>
$1 - FPATH dedicated to function
$2 - autoload options
$3 - function name (one that needs autoloading)</pre
>
</div>
</div>
<div class="literalblock">
<div class="content">
<pre>Author: <NAME></pre>
</div>
</div>
</blockquote>
</div>
<div class="paragraph">
<p>Has 11 line(s). Doesn’t call other functions.</p>
</div>
<div class="paragraph">
<p>Uses feature(s): <em>autoload</em>, <em>unfunction</em></p>
</div>
<div class="paragraph">
<p>
Not called by script or any function (may be e.g. a hook, a Zle
widget, etc.).
</p>
</div>
<div class="paragraph">
<div class="title">zinit-run</div>
<p><sub>~</sub><sub>~</sub><sub>~</sub>~</p>
</div>
<div class="quoteblock">
<blockquote>
<div class="literalblock">
<div class="content">
<pre>
]]]
FUNCTION: .zinit-run. [[[
Run code inside plugin's folder
It uses the `correct' parameter from upper's scope zinit().</pre
>
</div>
</div>
</blockquote>
</div>
<div class="paragraph">
<p>Has 24 line(s). Calls functions:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>
.zinit-run
`-- +zinit-message</pre
>
</div>
</div>
<div class="paragraph">
<p>Uses feature(s): <em>eval</em>, <em>setopt</em></p>
</div>
<div class="paragraph">
<p>Called by:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>zinit</pre>
</div>
</div>
<div class="paragraph">
<div class="title">zinit-run-task</div>
<p>
<sub>~</sub><sub>~</sub><sub>~</sub><sub>~</sub><sub>~</sub>
</p>
</div>
<div class="quoteblock">
<blockquote>
<div class="literalblock">
<div class="content">
<pre>
FUNCTION: .zinit-run-task. [[[
A backend, worker function of .zinit-scheduler. It obtains the tasks
index and a few of its properties (like the type: plugin, snippet,
service plugin, service snippet) and executes it first checking for
additional conditions (like non-numeric wait'' ice).</pre
>
</div>
</div>
<div class="literalblock">
<div class="content">
<pre>
$1 - the pass number, either 1st or 2nd pass
$2 - the time assigned to the task
$3 - type: plugin, snippet, service plugin, service snippet
$4 - task's index in the ZINIT[WAIT_ICE_...] fields
$5 - mode: load or light
$6 - the plugin-spec or snippet URL or alias name (from id-as'')</pre
>
</div>
</div>
</blockquote>
</div>
<div class="paragraph">
<p>Has 46 line(s). Calls functions:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>
.zinit-run-task
`-- zinit-autoload.zsh/.zinit-unload</pre
>
</div>
</div>
<div class="paragraph">
<p>
Uses feature(s): <em>eval</em>, <em>source</em>, <em>zle</em>,
<em>zpty</em>
</p>
</div>
<div class="paragraph">
<p>Called by:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>@zinit-scheduler</pre>
</div>
</div>
</div>
<div class="sect2">
<h3 id="_zinit_scheduler">@zinit-scheduler</h3>
<div class="quoteblock">
<blockquote>
<div class="literalblock">
<div class="content">
<pre>
]]]
FUNCTION: @zinit-scheduler. [[[
Searches for timeout tasks, executes them. There's an array of tasks
waiting for execution, this scheduler manages them, detects which ones
should be run at current moment, decides to remove (or not) them from
the array after execution.</pre
>
</div>
</div>
<div class="literalblock">
<div class="content">
<pre>
$1 - if "following", then it is non-first (second and more)
invocation of the scheduler; this results in chain of `sched'
invocations that results in repetitive @zinit-scheduler activity.</pre
>
</div>
</div>
<div class="literalblock">
<div class="content">
<pre>
if "burst", then all tasks are marked timeout and executed one
by one; this is handy if e.g. a docker image starts up and
needs to install all turbo-mode plugins without any hesitation
(delay), i.e. "burst" allows to run package installations from
script, not from prompt.</pre
>
</div>
</div>
</blockquote>
</div>
<div class="paragraph">
<p>
Has 75 line(s). <strong>Is a precmd hook</strong>. Calls
functions:
</p>
</div>
<div class="literalblock">
<div class="content">
<pre>
@zinit-scheduler
`-- add-zsh-hook</pre
>
</div>
</div>
<div class="paragraph">
<p>
Uses feature(s): <em>add-zsh-hook</em>, <em>sched</em>,
<em>setopt</em>, <em>zle</em>
</p>
</div>
<div class="paragraph">
<p>
Not called by script or any function (may be e.g. a hook, a Zle
widget, etc.).
</p>
</div>
</div>
<div class="sect2">
<h3 id="_zinit_scheduler_add_sh">-zinit_scheduler_add_sh</h3>
<div class="quoteblock">
<blockquote>
<div class="literalblock">
<div class="content">
<pre>
]]]
FUNCTION: -zinit_scheduler_add_sh. [[[
Copies task into ZINIT_RUN array, called when a task timeouts.
A small function ran from pattern in /-substitution as a math
function.</pre
>
</div>
</div>
</blockquote>
</div>
<div class="paragraph">
<p>Has 7 line(s). Doesn’t call other functions.</p>
</div>
<div class="paragraph">
<p>
Not called by script or any function (may be e.g. a hook, a Zle
widget, etc.).
</p>
</div>
<div class="paragraph">
<div class="title">zinit-set-m-func</div>
<p>
<sub>~</sub><sub>~</sub><sub>~</sub><sub>~</sub><sub>~</sub>~~
</p>
</div>
<div class="quoteblock">
<blockquote>
<div class="literalblock">
<div class="content">
<pre>
]]]
FUNCTION:.zinit-set-m-func() [[[
Sets and withdraws the temporary, atclone/atpull time function `m`.</pre
>
</div>
</div>
</blockquote>
</div>
<div class="paragraph">
<p>Has 17 line(s). Calls functions:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>
.zinit-set-m-func
`-- +zinit-message</pre
>
</div>
</div>
<div class="paragraph">
<p>Uses feature(s): <em>setopt</em></p>
</div>
<div class="paragraph">
<p>Called by:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>
.zinit-load-snippet
.zinit-load
zinit-autoload.zsh/.zinit-update-or-status</pre
>
</div>
</div>
<div class="paragraph">
<div class="title">zinit-setup-params</div>
<p>
<sub>~</sub><sub>~</sub><sub>~</sub><sub>~</sub><sub>~</sub
><sub>~</sub>~
</p>
</div>
<div class="quoteblock">
<blockquote>
<div class="literalblock">
<div class="content">
<pre>
]]]
FUNCTION: .zinit-setup-params. [[[</pre
>
</div>
</div>
</blockquote>
</div>
<div class="paragraph">
<p>Has 3 line(s). Doesn’t call other functions.</p>
</div>
<div class="paragraph">
<p>Called by:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>
.zinit-load-snippet
.zinit-load</pre
>
</div>
</div>
<div class="paragraph">
<div class="title">zinit-submit-turbo</div>
<p>
<sub>~</sub><sub>~</sub><sub>~</sub><sub>~</sub><sub>~</sub
><sub>~</sub>~
</p>
</div>
<div class="quoteblock">
<blockquote>
<div class="literalblock">
<div class="content">
<pre>
FUNCTION: .zinit-submit-turbo. [[[
If `zinit load`, `zinit light` or `zinit snippet` will be
preceded with `wait', `load', `unload' or `on-update-of`/`subscribe'
ice-mods then the plugin or snipped is to be loaded in turbo-mode,
and this function adds it to internal data structures, so that
@zinit-scheduler can run (load, unload) this as a task.</pre
>
</div>
</div>
</blockquote>
</div>
<div class="paragraph">
<p>Has 16 line(s). Doesn’t call other functions.</p>
</div>
<div class="paragraph">
<p>Called by:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>zinit</pre>
</div>
</div>
</div>
<div class="sect2">
<h3 id="_zinit_substitute">@zinit-substitute</h3>
<div class="quoteblock">
<blockquote>
<div class="literalblock">
<div class="content">
<pre>
]]]
FUNCTION: @zinit-substitute. [[[</pre
>
</div>
</div>
</blockquote>
</div>
<div class="paragraph">
<p>Has 40 line(s). Doesn’t call other functions.</p>
</div>
<div class="paragraph">
<p>Uses feature(s): <em>setopt</em></p>
</div>
<div class="paragraph">
<p>Called by:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>
zinit-autoload.zsh/.zinit-at-eval
zinit-install.zsh/∞zinit-atclone-hook
zinit-install.zsh/.zinit-at-eval
zinit-install.zsh/∞zinit-cp-hook
zinit-install.zsh/∞zinit-extract-hook
zinit-install.zsh/.zinit-get-package
zinit-install.zsh/∞zinit-make-ee-hook
zinit-install.zsh/∞zinit-make-e-hook
zinit-install.zsh/∞zinit-make-hook
zinit-install.zsh/∞zinit-mv-hook</pre
>
</div>
</div>
<div class="paragraph">
<p><em>Environment variables used:</em> ZPFX</p>
</div>
</div>
<div class="sect2">
<h3 id="_zinit_tmp_subst_alias">:zinit-tmp-subst-alias</h3>
<div class="quoteblock">
<blockquote>
<div class="literalblock">
<div class="content">
<pre>
FUNCTION: :zinit-tmp-subst-alias. [[[
Function defined to hijack plugin's calls to the `alias' builtin.</pre
>
</div>
</div>
<div class="literalblock">
<div class="content">
<pre>
The hijacking is to gather report data (which is used in unload).</pre
>
</div>
</div>
</blockquote>
</div>
<div class="paragraph">
<p>Has 36 line(s). Calls functions:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>:zinit-tmp-subst-alias</pre>
</div>
</div>
<div class="paragraph">
<p>
Uses feature(s): <em>alias</em>, <em>setopt</em>,
<em>zparseopts</em>
</p>
</div>
<div class="paragraph">
<p>
Not called by script or any function (may be e.g. a hook, a Zle
widget, etc.).
</p>
</div>
</div>
<div class="sect2">
<h3 id="_zinit_tmp_subst_autoload">:zinit-tmp-subst-autoload</h3>
<div class="quoteblock">
<blockquote>
<div class="literalblock">
<div class="content">
<pre>
FUNCTION: :zinit-tmp-subst-autoload. [[[
Function defined to hijack plugin's calls to the `autoload' builtin.</pre
>
</div>
</div>
<div class="literalblock">
<div class="content">
<pre>
The hijacking is not only to gather report data, but also to.
run custom `autoload' function, that doesn't need FPATH.</pre
>
</div>
</div>
</blockquote>
</div>
<div class="paragraph">
<p>Has 111 line(s). Calls functions:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>
:zinit-tmp-subst-autoload
|-- is-at-least
`-- +zinit-message</pre
>
</div>
</div>
<div class="paragraph">
<p>
Uses feature(s): <em>autoload</em>, <em>eval</em>,
<em>is-at-least</em>, <em>setopt</em>, <em>zparseopts</em>
</p>
</div>
<div class="paragraph">
<p>Called by:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>
@autoload
.zinit-load-plugin</pre
>
</div>
</div>
</div>
<div class="sect2">
<h3 id="_zinit_tmp_subst_bindkey">:zinit-tmp-subst-bindkey</h3>
<div class="quoteblock">
<blockquote>
<div class="literalblock">
<div class="content">
<pre>
FUNCTION: :zinit-tmp-subst-bindkey. [[[
Function defined to hijack plugin's calls to the `bindkey' builtin.</pre
>
</div>
</div>
<div class="literalblock">
<div class="content">
<pre>
The hijacking is to gather report data (which is used in unload).</pre
>
</div>
</div>
</blockquote>
</div>
<div class="paragraph">
<p>Has 120 line(s). Calls functions:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>
:zinit-tmp-subst-bindkey
`-- is-at-least</pre
>
</div>
</div>
<div class="paragraph">
<p>
Uses feature(s): <em>bindkey</em>, <em>is-at-least</em>,
<em>setopt</em>, <em>zparseopts</em>
</p>
</div>
<div class="paragraph">
<p>
Not called by script or any function (may be e.g. a hook, a Zle
widget, etc.).
</p>
</div>
</div>
<div class="sect2">
<h3 id="_zinit_tmp_subst_compdef">:zinit-tmp-subst-compdef</h3>
<div class="quoteblock">
<blockquote>
<div class="literalblock">
<div class="content">
<pre>
FUNCTION: :zinit-tmp-subst-compdef. [[[
Function defined to hijack plugin's calls to the `compdef' function.
The hijacking is not only for reporting, but also to save compdef
calls so that `compinit' can be called after loading plugins.</pre
>
</div>
</div>
</blockquote>
</div>
<div class="paragraph">
<p>Has 6 line(s). Calls functions:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>:zinit-tmp-subst-compdef</pre>
</div>
</div>
<div class="paragraph">
<p>Uses feature(s): <em>setopt</em></p>
</div>
<div class="paragraph">
<p>
Not called by script or any function (may be e.g. a hook, a Zle
widget, etc.).
</p>
</div>
<div class="paragraph">
<div class="title">zinit-tmp-subst-off</div>
<p>
<sub>~</sub><sub>~</sub><sub>~</sub><sub>~</sub><sub>~</sub
><sub>~</sub>~~
</p>
</div>
<div class="quoteblock">
<blockquote>
<div class="literalblock">
<div class="content">
<pre>
FUNCTION: .zinit-tmp-subst-off. [[[
Turn off temporary substituting of functions completely for a given mode ("load", "light",
"light-b" (i.e. the `trackbinds' mode) or "compdef").</pre
>
</div>
</div>
</blockquote>
</div>
<div class="paragraph">
<p>Has 21 line(s). Doesn’t call other functions.</p>
</div>
<div class="paragraph">
<p>Uses feature(s): <em>setopt</em>, <em>unfunction</em></p>
</div>
<div class="paragraph">
<p>Called by:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>.zinit-load-plugin</pre>
</div>
</div>
<div class="paragraph">
<div class="title">zinit-tmp-subst-on</div>
<p>
<sub>~</sub><sub>~</sub><sub>~</sub><sub>~</sub><sub>~</sub
><sub>~</sub>~
</p>
</div>
<div class="quoteblock">
<blockquote>
<div class="literalblock">
<div class="content">
<pre>
FUNCTION: .zinit-tmp-subst-on. [[[
Turn on temporary substituting of functions of builtins and functions according to passed
mode ("load", "light", "light-b" or "compdef"). The temporary substituting of functions is
to gather report data, and to hijack `autoload', `bindkey' and
`compdef' calls.</pre
>
</div>
</div>
</blockquote>
</div>
<div class="paragraph">
<p>Has 32 line(s). Doesn’t call other functions.</p>
</div>
<div class="paragraph">
<p>Uses feature(s): <em>source</em></p>
</div>
<div class="paragraph">
<p>Called by:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>.zinit-load-plugin</pre>
</div>
</div>
</div>
<div class="sect2">
<h3 id="_zinit_tmp_subst_zle">:zinit-tmp-subst-zle</h3>
<div class="quoteblock">
<blockquote>
<div class="literalblock">
<div class="content">
<pre>
FUNCTION: :zinit-tmp-subst-zle. [[[.
Function defined to hijack plugin's calls to the `zle' builtin.</pre
>
</div>
</div>
<div class="literalblock">
<div class="content">
<pre>
The hijacking is to gather report data (which is used in unload).</pre
>
</div>
</div>
</blockquote>
</div>
<div class="paragraph">
<p>Has 36 line(s). Calls functions:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>:zinit-tmp-subst-zle</pre>
</div>
</div>
<div class="paragraph">
<p>Uses feature(s): <em>setopt</em>, <em>zle</em></p>
</div>
<div class="paragraph">
<p>
Not called by script or any function (may be e.g. a hook, a Zle
widget, etc.).
</p>
</div>
</div>
<div class="sect2">
<h3 id="_zinit_tmp_subst_zstyle">:zinit-tmp-subst-zstyle</h3>
<div class="quoteblock">
<blockquote>
<div class="literalblock">
<div class="content">
<pre>
FUNCTION: :zinit-tmp-subst-zstyle. [[[
Function defined to hijack plugin's calls to the `zstyle' builtin.</pre
>
</div>
</div>
<div class="literalblock">
<div class="content">
<pre>
The hijacking is to gather report data (which is used in unload).</pre
>
</div>
</div>
</blockquote>
</div>
<div class="paragraph">
<p>Has 23 line(s). Calls functions:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>:zinit-tmp-subst-zstyle</pre>
</div>
</div>
<div class="paragraph">
<p>
Uses feature(s): <em>setopt</em>, <em>zparseopts</em>,
<em>zstyle</em>
</p>
</div>
<div class="paragraph">
<p>
Not called by script or any function (may be e.g. a hook, a Zle
widget, etc.).
</p>
</div>
<div class="paragraph">
<div class="title">zinit-util-shands-path</div>
<p>
<sub>~</sub><sub>~</sub><sub>~</sub><sub>~</sub><sub>~</sub
><sub>~</sub><sub>~</sub>~~
</p>
</div>
<div class="quoteblock">
<blockquote>
<div class="literalblock">
<div class="content">
<pre>
FUNCTION: .zinit-util-shands-path. [[[
Replaces parts of path with %HOME, etc.</pre
>
</div>
</div>
</blockquote>
</div>
<div class="paragraph">
<p>Has 9 line(s). Doesn’t call other functions.</p>
</div>
<div class="paragraph">
<p>Uses feature(s): <em>setopt</em></p>
</div>
<div class="paragraph">
<p>Called by:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>.zinit-any-to-pid</pre>
</div>
</div>
<div class="paragraph">
<p><em>Environment variables used:</em> ZPFX</p>
</div>
</div>
<div class="sect2">
<h3 id="_zpcdclear">zpcdclear</h3>
<div class="paragraph">
<p>Has 1 line(s). Calls functions:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>zpcdclear</pre>
</div>
</div>
<div class="paragraph">
<p>
Not called by script or any function (may be e.g. a hook, a Zle
widget, etc.).
</p>
</div>
</div>
<div class="sect2">
<h3 id="_zpcdreplay">zpcdreplay</h3>
<div class="paragraph">
<p>Has 1 line(s). Calls functions:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>zpcdreplay</pre>
</div>
</div>
<div class="paragraph">
<p>
Not called by script or any function (may be e.g. a hook, a Zle
widget, etc.).
</p>
</div>
</div>
<div class="sect2">
<h3 id="_zpcompdef">zpcompdef</h3>
<div class="paragraph">
<p>Has 1 line(s). Doesn’t call other functions.</p>
</div>
<div class="paragraph">
<p>
Not called by script or any function (may be e.g. a hook, a Zle
widget, etc.).
</p>
</div>
</div>
<div class="sect2">
<h3 id="_zpcompinit">zpcompinit</h3>
<div class="paragraph">
<p>Has 1 line(s). Calls functions:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>
zpcompinit
`-- compinit</pre
>
</div>
</div>
<div class="paragraph">
<p>Uses feature(s): <em>autoload</em>, <em>compinit</em></p>
</div>
<div class="paragraph">
<p>
Not called by script or any function (may be e.g. a hook, a Zle
widget, etc.).
</p>
</div>
</div>
<div class="sect2">
<h3 id="_zplugin">zplugin</h3>
<div class="quoteblock">
<blockquote>
<div class="literalblock">
<div class="content">
<pre>Compatibility functions. [[[</pre>
</div>
</div>
</blockquote>
</div>
<div class="paragraph">
<p>Has 1 line(s). Calls functions:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>
zplugin
`-- zinit
|-- compinit
|-- zinit-autoload.zsh/.zinit-cdisable
|-- zinit-autoload.zsh/.zinit-cenable
|-- zinit-autoload.zsh/.zinit-clear-completions
|-- zinit-autoload.zsh/.zinit-compiled
|-- zinit-autoload.zsh/.zinit-compile-uncompile-all
|-- zinit-autoload.zsh/.zinit-help
|-- zinit-autoload.zsh/.zinit-list-bindkeys
|-- zinit-autoload.zsh/.zinit-list-compdef-replay
|-- zinit-autoload.zsh/.zinit-ls
|-- zinit-autoload.zsh/.zinit-module
|-- zinit-autoload.zsh/.zinit-recently
|-- zinit-autoload.zsh/.zinit-search-completions
|-- zinit-autoload.zsh/.zinit-self-update
|-- zinit-autoload.zsh/.zinit-show-all-reports
|-- zinit-autoload.zsh/.zinit-show-completions
|-- zinit-autoload.zsh/.zinit-show-debug-report
|-- zinit-autoload.zsh/.zinit-show-registered-plugins
|-- zinit-autoload.zsh/.zinit-show-report
|-- zinit-autoload.zsh/.zinit-show-times
|-- zinit-autoload.zsh/.zinit-show-zstatus
|-- zinit-autoload.zsh/.zinit-uncompile-plugin
|-- zinit-autoload.zsh/.zinit-uninstall-completions
|-- zinit-autoload.zsh/.zinit-unload
|-- zinit-autoload.zsh/.zinit-update-or-status
|-- zinit-autoload.zsh/.zinit-update-or-status-all
|-- zinit-install.zsh/.zinit-compile-plugin
|-- zinit-install.zsh/.zinit-compinit
|-- zinit-install.zsh/.zinit-forget-completion
|-- zinit-install.zsh/.zinit-install-completions
|-- +zinit-message
`-- +zinit-prehelp-usage-message
`-- +zinit-message</pre
>
</div>
</div>
<div class="paragraph">
<p>
Not called by script or any function (may be e.g. a hook, a Zle
widget, etc.).
</p>
</div>
</div>
<div class="sect2">
<h3 id="_zsh_plugin_run_on_unload">@zsh-plugin-run-on-unload</h3>
<div class="quoteblock">
<blockquote>
<div class="literalblock">
<div class="content">
<pre>
]]]
FUNCTION: @zsh-plugin-run-on-update. [[[
The Plugin Standard required mechanism, see:
http://zdharma.org/Zsh-100-Commits-Club/Zsh-Plugin-Standard.html</pre
>
</div>
</div>
</blockquote>
</div>
<div class="paragraph">
<p>Has 2 line(s). Calls functions:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>@zsh-plugin-run-on-unload</pre>
</div>
</div>
<div class="paragraph">
<p>
Not called by script or any function (may be e.g. a hook, a Zle
widget, etc.).
</p>
</div>
</div>
<div class="sect2">
<h3 id="_zsh_plugin_run_on_update">@zsh-plugin-run-on-update</h3>
<div class="quoteblock">
<blockquote>
<div class="literalblock">
<div class="content">
<pre>
]]]
FUNCTION: @zsh-plugin-run-on-update. [[[
The Plugin Standard required mechanism</pre
>
</div>
</div>
</blockquote>
</div>
<div class="paragraph">
<p>Has 2 line(s). Calls functions:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>@zsh-plugin-run-on-update</pre>
</div>
</div>
<div class="paragraph">
<p>
Not called by script or any function (may be e.g. a hook, a Zle
widget, etc.).
</p>
</div>
</div>
<div class="sect2">
<h3 id="_add_zsh_hook">add-zsh-hook</h3>
<div class="quoteblock">
<blockquote>
<div class="literalblock">
<div class="content">
<pre>
Add to HOOK the given FUNCTION.
HOOK is one of chpwd, precmd, preexec, periodic, zshaddhistory,
zshexit, zsh_directory_name (the _functions subscript is not required).</pre
>
</div>
</div>
<div class="literalblock">
<div class="content">
<pre>
With -d, remove the function from the hook instead; delete the hook
variable if it is empty.</pre
>
</div>
</div>
<div class="literalblock">
<div class="content">
<pre>
-D behaves like -d, but pattern characters are active in the
function name, so any matching function will be deleted from the hook.</pre
>
</div>
</div>
</blockquote>
</div>
<div class="paragraph">
<p>Has 93 line(s). Doesn’t call other functions.</p>
</div>
<div class="paragraph">
<p>Uses feature(s): <em>autoload</em>, <em>getopts</em></p>
</div>
<div class="paragraph">
<p>Called by:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>
Script-Body
@zinit-scheduler</pre
>
</div>
</div>
</div>
<div class="sect2">
<h3 id="_compinit">compinit</h3>
<div class="quoteblock">
<blockquote>
<div class="literalblock">
<div class="content">
<pre>
Initialisation for new style completion. This mainly contains some helper
functions and setup. Everything else is split into different files that
will automatically be made autoloaded (see the end of this file). The
names of the files that will be considered for autoloading are those that
begin with an underscores (like `_condition).</pre
>
</div>
</div>
<div class="literalblock">
<div class="content">
<pre>
The first line of each of these files is read and must indicate what
should be done with its contents:</pre
>
</div>
</div>
<div class="literalblock">
<div class="content">
<pre>`#compdef <names ...>'</pre>
</div>
</div>
</blockquote>
</div>
<div class="paragraph">
<p>Has 549 line(s). Doesn’t call other functions.</p>
</div>
<div class="paragraph">
<p>
Uses feature(s): <em>autoload</em>, <em>bindkey</em>,
<em>compdef</em>, <em>compdump</em>, <em>eval</em>,
<em>read</em>, <em>setopt</em>, <em>unfunction</em>,
<em>zle</em>, <em>zstyle</em>
</p>
</div>
<div class="paragraph">
<p>Called by:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>
zicompinit
zinit
zpcompinit</pre
>
</div>
</div>
</div>
<div class="sect2">
<h3 id="_is_at_least">is-at-least</h3>
<div class="quoteblock">
<blockquote>
<div class="literalblock">
<div class="content">
<pre>
Test whether $ZSH_VERSION (or some value of your choice, if a second argument
is provided) is greater than or equal to x.y.z-r (in argument one). In fact,
it'll accept any dot/dash-separated string of numbers as its second argument
and compare it to the dot/dash-separated first argument. Leading non-number
parts of a segment (such as the "zefram" in 3.1.2-zefram4) are not considered
when the comparison is done; only the numbers matter. Any left-out segments
in the first argument that are present in the version string compared are
considered as zeroes, eg 3 == 3.0 == 3.0.0 == 3.0.0.0 and so on.</pre
>
</div>
</div>
</blockquote>
</div>
<div class="paragraph">
<p>Has 56 line(s). Doesn’t call other functions.</p>
</div>
<div class="paragraph">
<p>Called by:</p>
</div>
<div class="literalblock">
<div class="content">
<pre>
Script-Body
:zinit-tmp-subst-autoload
:zinit-tmp-subst-bindkey</pre
>
</div>
</div>
</div>
</div>
</div>
</div>
<div id="footer">
<div id="footer-text">Last updated 2021-11-09 23:19:31 UTC</div>
</div>
</body>
</html>
<|start_filename|>zmodules/Src/signals.c<|end_filename|>
/*
* signals.c - signals handling code
*
* This file is part of zsh, the Z shell.
*
* Copyright (c) 1992-1997 <NAME>
* All rights reserved.
*
* Permission is hereby granted, without written agreement and without
* license or royalty fees, to use, copy, modify, and distribute this
* software and to distribute modified versions of this software for any
* purpose, provided that the above copyright notice and the following
* two paragraphs appear in all copies of this software.
*
* In no event shall <NAME> or the Zsh Development Group be liable
* to any party for direct, indirect, special, incidental, or consequential
* damages arising out of the use of this software and its documentation,
* even if <NAME> and the Zsh Development Group have been advised of
* the possibility of such damage.
*
* <NAME> and the Zsh Development Group specifically disclaim any
* warranties, including, but not limited to, the implied warranties of
* merchantability and fitness for a particular purpose. The software
* provided hereunder is on an "as is" basis, and <NAME> and the
* Zsh Development Group have no obligation to provide maintenance,
* support, updates, enhancements, or modifications.
*
*/
#include "zsh.mdh"
#include "signals.pro"
/* Array describing the state of each signal: an element contains *
* 0 for the default action or some ZSIG_* flags ored together. */
/**/
mod_export int sigtrapped[VSIGCOUNT];
/*
* Trap programme lists for each signal.
*
* If (sigtrapped[sig] & ZSIG_FUNC) is set, this isn't used.
* The corresponding shell function is used instead.
*
* Otherwise, if sigtrapped[sig] is not zero, this is NULL when a signal
* is to be ignored, and if not NULL contains the programme list to be
* eval'd.
*/
/**/
mod_export Eprog siglists[VSIGCOUNT];
/* Total count of trapped signals */
/**/
mod_export int nsigtrapped;
/* Running an exit trap? */
/**/
int in_exit_trap;
/*
* Flag that exit trap has been set in POSIX mode.
* The setter's expectation is therefore that it is run
* on programme exit, not function exit.
*/
/**/
static int exit_trap_posix;
/* Variables used by signal queueing */
/**/
mod_export int queueing_enabled, queue_front, queue_rear;
/**/
mod_export int signal_queue[MAX_QUEUE_SIZE];
/**/
mod_export sigset_t signal_mask_queue[MAX_QUEUE_SIZE];
#ifdef DEBUG
/**/
mod_export int queue_in;
#endif
/* Variables used by trap queueing */
/**/
mod_export int trap_queueing_enabled, trap_queue_front, trap_queue_rear;
/**/
mod_export int trap_queue[MAX_QUEUE_SIZE];
/* This is only used on machines that don't understand signal sets. *
* On SYSV machines this will represent the signals that are blocked *
* (held) using sighold. On machines which can't block signals at *
* all, we will simulate this by ignoring them and remembering them *
* in this variable. */
#if !defined(POSIX_SIGNALS) && !defined(BSD_SIGNALS)
static sigset_t blocked_set;
#endif
#ifdef POSIX_SIGNALS
# define signal_jmp_buf sigjmp_buf
# define signal_setjmp(b) sigsetjmp((b),1)
# define signal_longjmp(b,n) siglongjmp((b),(n))
#else
# define signal_jmp_buf jmp_buf
# define signal_setjmp(b) setjmp(b)
# define signal_longjmp(b,n) longjmp((b),(n))
#endif
#ifdef NO_SIGNAL_BLOCKING
# define signal_process(sig) signal_ignore(sig)
# define signal_reset(sig) install_handler(sig)
#else
# define signal_process(sig) ;
# define signal_reset(sig) ;
#endif
/* Install signal handler for given signal. *
* If possible, we want to make sure that interrupted *
* system calls are not restarted. */
/**/
mod_export void
install_handler(int sig)
{
#ifdef POSIX_SIGNALS
struct sigaction act;
act.sa_handler = (SIGNAL_HANDTYPE) zhandler;
sigemptyset(&act.sa_mask); /* only block sig while in handler */
act.sa_flags = 0;
# ifdef SA_INTERRUPT /* SunOS 4.x */
if (interact)
act.sa_flags |= SA_INTERRUPT; /* make sure system calls are not restarted */
# endif
sigaction(sig, &act, (struct sigaction *)NULL);
#else
# ifdef BSD_SIGNALS
struct sigvec vec;
vec.sv_handler = (SIGNAL_HANDTYPE) zhandler;
vec.sv_mask = sigmask(sig); /* mask out this signal while in handler */
# ifdef SV_INTERRUPT
vec.sv_flags = SV_INTERRUPT; /* make sure system calls are not restarted */
# endif
sigvec(sig, &vec, (struct sigvec *)NULL);
# else
# ifdef SYSV_SIGNALS
/* we want sigset rather than signal because it will *
* block sig while in handler. signal usually doesn't */
sigset(sig, zhandler);
# else /* NO_SIGNAL_BLOCKING (bummer) */
signal(sig, zhandler);
# endif /* SYSV_SIGNALS */
# endif /* BSD_SIGNALS */
#endif /* POSIX_SIGNALS */
}
/* enable ^C interrupts */
/**/
mod_export void
intr(void)
{
if (interact)
install_handler(SIGINT);
}
/* disable ^C interrupts */
#if 0 /**/
void
nointr(void)
{
if (interact)
signal_ignore(SIGINT);
}
#endif
/* temporarily block ^C interrupts */
/**/
mod_export void
holdintr(void)
{
if (interact)
signal_block(signal_mask(SIGINT));
}
/* release ^C interrupts */
/**/
mod_export void
noholdintr(void)
{
if (interact)
signal_unblock(signal_mask(SIGINT));
}
/* create a signal mask containing *
* only the given signal */
/**/
mod_export sigset_t
signal_mask(int sig)
{
sigset_t set;
sigemptyset(&set);
if (sig)
sigaddset(&set, sig);
return set;
}
/* Block the signals in the given signal *
* set. Return the old signal set. */
/**/
#ifndef BSD_SIGNALS
/**/
mod_export sigset_t
signal_block(sigset_t set)
{
sigset_t oset;
#ifdef POSIX_SIGNALS
sigprocmask(SIG_BLOCK, &set, &oset);
#else
# ifdef SYSV_SIGNALS
int i;
oset = blocked_set;
for (i = 1; i <= NSIG; ++i) {
if (sigismember(&set, i) && !sigismember(&blocked_set, i)) {
sigaddset(&blocked_set, i);
sighold(i);
}
}
# else /* NO_SIGNAL_BLOCKING */
/* We will just ignore signals if the system doesn't have *
* the ability to block them. */
int i;
oset = blocked_set;
for (i = 1; i <= NSIG; ++i) {
if (sigismember(&set, i) && !sigismember(&blocked_set, i)) {
sigaddset(&blocked_set, i);
signal_ignore(i);
}
}
# endif /* SYSV_SIGNALS */
#endif /* POSIX_SIGNALS */
return oset;
}
/**/
#endif /* BSD_SIGNALS */
/* Unblock the signals in the given signal *
* set. Return the old signal set. */
/**/
mod_export sigset_t
signal_unblock(sigset_t set)
{
sigset_t oset;
#ifdef POSIX_SIGNALS
sigprocmask(SIG_UNBLOCK, &set, &oset);
#else
# ifdef BSD_SIGNALS
sigfillset(&oset);
oset = sigsetmask(oset);
sigsetmask(oset & ~set);
# else
# ifdef SYSV_SIGNALS
int i;
oset = blocked_set;
for (i = 1; i <= NSIG; ++i) {
if (sigismember(&set, i) && sigismember(&blocked_set, i)) {
sigdelset(&blocked_set, i);
sigrelse(i);
}
}
# else /* NO_SIGNAL_BLOCKING */
/* On systems that can't block signals, we are just ignoring them. So *
* to unblock signals, we just reenable the signal handler for them. */
int i;
oset = blocked_set;
for (i = 1; i <= NSIG; ++i) {
if (sigismember(&set, i) && sigismember(&blocked_set, i)) {
sigdelset(&blocked_set, i);
install_handler(i);
}
}
# endif /* SYSV_SIGNALS */
# endif /* BSD_SIGNALS */
#endif /* POSIX_SIGNALS */
return oset;
}
/* set the process signal mask to *
* be the given signal mask */
/**/
mod_export sigset_t
signal_setmask(sigset_t set)
{
sigset_t oset;
#ifdef POSIX_SIGNALS
sigprocmask(SIG_SETMASK, &set, &oset);
#else
# ifdef BSD_SIGNALS
oset = sigsetmask(set);
# else
# ifdef SYSV_SIGNALS
int i;
oset = blocked_set;
for (i = 1; i <= NSIG; ++i) {
if (sigismember(&set, i) && !sigismember(&blocked_set, i)) {
sigaddset(&blocked_set, i);
sighold(i);
} else if (!sigismember(&set, i) && sigismember(&blocked_set, i)) {
sigdelset(&blocked_set, i);
sigrelse(i);
}
}
# else /* NO_SIGNAL_BLOCKING */
int i;
oset = blocked_set;
for (i = 1; i < NSIG; ++i) {
if (sigismember(&set, i) && !sigismember(&blocked_set, i)) {
sigaddset(&blocked_set, i);
signal_ignore(i);
} else if (!sigismember(&set, i) && sigismember(&blocked_set, i)) {
sigdelset(&blocked_set, i);
install_handler(i);
}
}
# endif /* SYSV_SIGNALS */
# endif /* BSD_SIGNALS */
#endif /* POSIX_SIGNALS */
return oset;
}
#if defined(NO_SIGNAL_BLOCKING)
static int suspend_longjmp = 0;
static signal_jmp_buf suspend_jmp_buf;
#endif
/**/
int
signal_suspend(UNUSED(int sig), int wait_cmd)
{
int ret;
#if defined(POSIX_SIGNALS) || defined(BSD_SIGNALS)
sigset_t set;
# if defined(POSIX_SIGNALS) && defined(BROKEN_POSIX_SIGSUSPEND)
sigset_t oset;
# endif
sigemptyset(&set);
/* SIGINT from the terminal driver needs to interrupt "wait"
* and to cause traps to fire, but otherwise should not be
* handled by the shell until after any foreground job has
* a chance to decide whether to exit on that signal.
*/
if (!(wait_cmd || isset(TRAPSASYNC) ||
(sigtrapped[SIGINT] & ~ZSIG_IGNORED)))
sigaddset(&set, SIGINT);
#endif /* POSIX_SIGNALS || BSD_SIGNALS */
#ifdef POSIX_SIGNALS
# ifdef BROKEN_POSIX_SIGSUSPEND
sigprocmask(SIG_SETMASK, &set, &oset);
ret = pause();
sigprocmask(SIG_SETMASK, &oset, NULL);
# else /* not BROKEN_POSIX_SIGSUSPEND */
ret = sigsuspend(&set);
# endif /* BROKEN_POSIX_SIGSUSPEND */
#else /* not POSIX_SIGNALS */
# ifdef BSD_SIGNALS
ret = sigpause(set);
# else
# ifdef SYSV_SIGNALS
ret = sigpause(sig);
# else /* NO_SIGNAL_BLOCKING */
/* need to use signal_longjmp to make this race-free *
* between the child_unblock() and pause() */
if (signal_setjmp(suspend_jmp_buf) == 0) {
suspend_longjmp = 1; /* we want to signal_longjmp after catching signal */
child_unblock(); /* do we need to do wait_cmd stuff as well? */
ret = pause();
}
suspend_longjmp = 0; /* turn off using signal_longjmp since we are past *
* the pause() function. */
# endif /* SYSV_SIGNALS */
# endif /* BSD_SIGNALS */
#endif /* POSIX_SIGNALS */
return ret;
}
/* last signal we handled: race prone, or what? */
/**/
int last_signal;
/*
* Wait for any processes that have changed state.
*
* The main use for this is in the SIGCHLD handler. However,
* we also use it to pick up status changes of jobs when
* updating jobs.
*/
/**/
void
wait_for_processes(void)
{
/* keep WAITING until no more child processes to reap */
for (;;) {
/* save the errno, since WAIT may change it */
int old_errno = errno;
int status;
Job jn;
Process pn;
pid_t pid;
pid_t *procsubpid = &cmdoutpid;
int *procsubval = &cmdoutval;
int cont = 0;
struct execstack *es = exstack;
/*
* Reap the child process.
* If we want usage information, we need to use wait3.
*/
#if defined(HAVE_WAIT3) || defined(HAVE_WAITPID)
# ifdef WCONTINUED
# define WAITFLAGS (WNOHANG|WUNTRACED|WCONTINUED)
# else
# define WAITFLAGS (WNOHANG|WUNTRACED)
# endif
#endif
#ifdef HAVE_WAIT3
# ifdef HAVE_GETRUSAGE
struct rusage ru;
pid = wait3((void *)&status, WAITFLAGS, &ru);
# else
pid = wait3((void *)&status, WAITFLAGS, NULL);
# endif
#else
# ifdef HAVE_WAITPID
pid = waitpid(-1, &status, WAITFLAGS);
# else
pid = wait(&status);
# endif
#endif
if (!pid) /* no more children to reap */
break;
/* check if child returned was from process substitution */
for (;;) {
if (pid == *procsubpid) {
*procsubpid = 0;
if (WIFSIGNALED(status))
*procsubval = (0200 | WTERMSIG(status));
else
*procsubval = WEXITSTATUS(status);
use_cmdoutval = 1;
get_usage();
cont = 1;
break;
}
if (!es)
break;
procsubpid = &es->cmdoutpid;
procsubval = &es->cmdoutval;
es = es->next;
}
if (cont)
continue;
/* check for WAIT error */
if (pid == -1) {
if (errno != ECHILD)
zerr("wait failed: %e", errno);
/* WAIT changed errno, so restore the original */
errno = old_errno;
break;
}
/* This is necessary to be sure queueing_enabled > 0 when
* we enter printjob() from update_job(), so that we don't
* decrement to zero in should_report_time() and improperly
* run other handlers in the middle of processing this one */
queue_signals();
/*
* Find the process and job containing this pid and
* update it.
*/
if (findproc(pid, &jn, &pn, 0)) {
if (((jn->stat & STAT_BUILTIN) ||
(list_pipe &&
(thisjob == -1 ||
(jobtab[thisjob].stat & STAT_BUILTIN)))) &&
WIFSTOPPED(status) && WSTOPSIG(status) == SIGTSTP) {
killjb(jn, SIGCONT);
zwarn("job can't be suspended");
} else {
#if defined(HAVE_WAIT3) && defined(HAVE_GETRUSAGE)
struct timezone dummy_tz;
gettimeofday(&pn->endtime, &dummy_tz);
#ifdef WIFCONTINUED
if (WIFCONTINUED(status))
pn->status = SP_RUNNING;
else
#endif
pn->status = status;
pn->ti = ru;
#else
update_process(pn, status);
#endif
if (WIFEXITED(status) &&
pn->pid == jn->gleader &&
killpg(pn->pid, 0) == -1) {
jn->gleader = 0;
if (!(jn->stat & STAT_NOSTTY)) {
/*
* This PID was in control of the terminal;
* reclaim terminal now it has exited.
* It's still possible some future forked
* process of this job will become group
* leader, however.
*/
attachtty(mypgrp);
}
}
}
update_job(jn);
} else if (findproc(pid, &jn, &pn, 1)) {
pn->status = status;
update_job(jn);
} else {
/* If not found, update the shell record of time spent by
* children in sub processes anyway: otherwise, this
* will get added on to the next found process that
* terminates.
*/
get_usage();
}
/*
* Accumulate a list of older jobs. We only do this for
* background jobs, which is something in the job table
* that's not marked as in the current shell or as shell builtin
* and is not equal to the current foreground job.
*/
if (jn && !(jn->stat & (STAT_CURSH|STAT_BUILTIN)) &&
jn - jobtab != thisjob) {
int val = (WIFSIGNALED(status) ?
0200 | WTERMSIG(status) :
(WIFSTOPPED(status) ?
0200 | WEXITSTATUS(status) :
WEXITSTATUS(status)));
addbgstatus(pid, val);
}
unqueue_signals();
}
}
/* the signal handler */
/**/
mod_export void
zhandler(int sig)
{
sigset_t newmask, oldmask;
#if defined(NO_SIGNAL_BLOCKING)
int do_jump;
signal_jmp_buf jump_to;
#endif
last_signal = sig;
signal_process(sig);
sigfillset(&newmask);
/* Block all signals temporarily */
oldmask = signal_block(newmask);
#if defined(NO_SIGNAL_BLOCKING)
/* do we need to longjmp to signal_suspend */
do_jump = suspend_longjmp;
/* In case a SIGCHLD somehow arrives */
suspend_longjmp = 0;
/* Traps can cause nested signal_suspend() */
if (sig == SIGCHLD) {
if (do_jump) {
/* Copy suspend_jmp_buf */
jump_to = suspend_jmp_buf;
}
}
#endif
/* Are we queueing signals now? */
if (queueing_enabled) {
int temp_rear = ++queue_rear % MAX_QUEUE_SIZE;
DPUTS(temp_rear == queue_front, "BUG: signal queue full");
/* Make sure it's not full (extremely unlikely) */
if (temp_rear != queue_front) {
/* ok, not full, so add to queue */
queue_rear = temp_rear;
/* save signal caught */
signal_queue[queue_rear] = sig;
/* save current signal mask */
signal_mask_queue[queue_rear] = oldmask;
}
signal_reset(sig);
return;
}
/* Reset signal mask, signal traps ok now */
signal_setmask(oldmask);
switch (sig) {
case SIGCHLD:
wait_for_processes();
break;
case SIGPIPE:
if (!handletrap(SIGPIPE)) {
if (!interact)
_exit(SIGPIPE);
else if (!isatty(SHTTY)) {
stopmsg = 1;
zexit(SIGPIPE, 1);
}
}
break;
case SIGHUP:
if (!handletrap(SIGHUP)) {
stopmsg = 1;
zexit(SIGHUP, 1);
}
break;
case SIGINT:
if (!handletrap(SIGINT)) {
if ((isset(PRIVILEGED) || isset(RESTRICTED)) &&
isset(INTERACTIVE) && (noerrexit & NOERREXIT_SIGNAL))
zexit(SIGINT, 1);
if (list_pipe || chline || simple_pline) {
breaks = loops;
errflag |= ERRFLAG_INT;
inerrflush();
check_cursh_sig(SIGINT);
}
lastval = 128 + SIGINT;
}
break;
#ifdef SIGWINCH
case SIGWINCH:
adjustwinsize(1); /* check window size and adjust */
(void) handletrap(SIGWINCH);
break;
#endif
case SIGALRM:
if (!handletrap(SIGALRM)) {
int idle = ttyidlegetfn(NULL);
int tmout = getiparam("TMOUT");
if (idle >= 0 && idle < tmout)
alarm(tmout - idle);
else {
/*
* We want to exit now.
* Cancel all errors, including a user interrupt
* which is now redundant.
*/
errflag = noerrs = 0;
zwarn("timeout");
stopmsg = 1;
zexit(SIGALRM, 1);
}
}
break;
default:
(void) handletrap(sig);
break;
} /* end of switch(sig) */
signal_reset(sig);
/* This is used to make signal_suspend() race-free */
#if defined(NO_SIGNAL_BLOCKING)
if (do_jump)
signal_longjmp(jump_to, 1);
#endif
} /* handler */
/* SIGHUP any jobs left running */
/**/
void
killrunjobs(int from_signal)
{
int i, killed = 0;
if (unset(HUP))
return;
for (i = 1; i <= maxjob; i++)
if ((from_signal || i != thisjob) && (jobtab[i].stat & STAT_LOCKED) &&
!(jobtab[i].stat & STAT_NOPRINT) &&
!(jobtab[i].stat & STAT_STOPPED)) {
if (jobtab[i].gleader != getpid() &&
killpg(jobtab[i].gleader, SIGHUP) != -1)
killed++;
}
if (killed)
zwarn("warning: %d jobs SIGHUPed", killed);
}
/* send a signal to a job (simply involves kill if monitoring is on) */
/**/
int
killjb(Job jn, int sig)
{
Process pn;
int err = 0;
if (jobbing) {
if (jn->stat & STAT_SUPERJOB) {
if (sig == SIGCONT) {
for (pn = jobtab[jn->other].procs; pn; pn = pn->next)
if (killpg(pn->pid, sig) == -1)
if (kill(pn->pid, sig) == -1 && errno != ESRCH)
err = -1;
/*
* Note this does not kill the last process,
* which is assumed to be the one controlling the
* subjob, i.e. the forked zsh that was originally
* list_pipe_pid...
*/
for (pn = jn->procs; pn->next; pn = pn->next)
if (kill(pn->pid, sig) == -1 && errno != ESRCH)
err = -1;
/*
* ...we only continue that once the external processes
* currently associated with the subjob are finished.
*/
if (!jobtab[jn->other].procs && pn)
if (kill(pn->pid, sig) == -1 && errno != ESRCH)
err = -1;
return err;
}
if (killpg(jobtab[jn->other].gleader, sig) == -1 && errno != ESRCH)
err = -1;
if (killpg(jn->gleader, sig) == -1 && errno != ESRCH)
err = -1;
return err;
}
else
return killpg(jn->gleader, sig);
}
for (pn = jn->procs; pn; pn = pn->next) {
/*
* Do not kill this job's process if it's already dead as its
* pid could have been reused by the system.
* As the PID doesn't exist don't return an error.
*/
if (pn->status == SP_RUNNING || WIFSTOPPED(pn->status)) {
/*
* kill -0 on a job is pointless. We still call kill() for each process
* in case the user cares about it but we ignore its outcome.
*/
if ((err = kill(pn->pid, sig)) == -1 && errno != ESRCH && sig != 0)
return -1;
}
}
return err;
}
/*
* List for saving traps. We don't usually have that many traps
* at once, so just use a linked list.
*/
struct savetrap {
int sig, flags, local, posix;
void *list;
};
static LinkList savetraps;
static int dontsavetrap;
/*
* Save the current trap by copying it. This does nothing to
* the existing value of sigtrapped or siglists.
*/
static void
dosavetrap(int sig, int level)
{
struct savetrap *st;
st = (struct savetrap *)zalloc(sizeof(*st));
st->sig = sig;
st->local = level;
st->posix = (sig == SIGEXIT) ? exit_trap_posix : 0;
if ((st->flags = sigtrapped[sig]) & ZSIG_FUNC) {
/*
* Get the old function: this assumes we haven't added
* the new one yet.
*/
Shfunc shf, newshf = NULL;
if ((shf = (Shfunc)gettrapnode(sig, 1))) {
/* Copy the node for saving */
newshf = (Shfunc) zshcalloc(sizeof(*newshf));
newshf->node.nam = ztrdup(shf->node.nam);
newshf->node.flags = shf->node.flags;
newshf->funcdef = dupeprog(shf->funcdef, 0);
if (shf->node.flags & PM_LOADDIR) {
dircache_set(&newshf->filename, shf->filename);
} else {
newshf->filename = ztrdup(shf->filename);
}
if (shf->sticky) {
newshf->sticky = sticky_emulation_dup(shf->sticky, 0);
} else
newshf->sticky = 0;
if (shf->node.flags & PM_UNDEFINED)
newshf->funcdef->shf = newshf;
}
#ifdef DEBUG
else dputs("BUG: no function present with function trap flag set.");
#endif
DPUTS(siglists[sig], "BUG: function signal has eval list, too.");
st->list = newshf;
} else if (sigtrapped[sig]) {
st->list = siglists[sig] ? dupeprog(siglists[sig], 0) : NULL;
} else {
DPUTS(siglists[sig], "BUG: siglists not null for untrapped signal");
st->list = NULL;
}
if (!savetraps)
savetraps = znewlinklist();
/*
* Put this at the front of the list
*/
zinsertlinknode(savetraps, (LinkNode)savetraps, st);
}
/*
* Set a trap: note this does not handle manipulation of
* the function table for TRAPNAL functions.
*
* sig is the signal number.
*
* l is the list to be eval'd for a trap defined with the "trap"
* builtin and should be NULL for a function trap.
*
* flags includes any additional flags to be or'd into sigtrapped[sig],
* in particular ZSIG_FUNC; the basic flags will be assigned within
* settrap.
*/
/**/
mod_export int
settrap(int sig, Eprog l, int flags)
{
if (sig == -1)
return 1;
if (jobbing && (sig == SIGTTOU || sig == SIGTSTP || sig == SIGTTIN)) {
zerr("can't trap SIG%s in interactive shells", sigs[sig]);
return 1;
}
/*
* Call unsettrap() unconditionally, to make sure trap is saved
* if necessary.
*/
queue_signals();
unsettrap(sig);
DPUTS((flags & ZSIG_FUNC) && l,
"BUG: trap function has passed eval list, too");
siglists[sig] = l;
if (!(flags & ZSIG_FUNC) && empty_eprog(l)) {
sigtrapped[sig] = ZSIG_IGNORED;
if (sig && sig <= SIGCOUNT &&
#ifdef SIGWINCH
sig != SIGWINCH &&
#endif
sig != SIGCHLD)
signal_ignore(sig);
} else {
nsigtrapped++;
sigtrapped[sig] = ZSIG_TRAPPED;
if (sig && sig <= SIGCOUNT &&
#ifdef SIGWINCH
sig != SIGWINCH &&
#endif
sig != SIGCHLD)
install_handler(sig);
}
sigtrapped[sig] |= flags;
/*
* Note that introducing the locallevel does not affect whether
* sigtrapped[sig] is zero or not, i.e. a test without a mask
* works just the same.
*/
if (sig == SIGEXIT) {
/* Make POSIX behaviour of EXIT trap sticky */
exit_trap_posix = isset(POSIXTRAPS);
/* POSIX exit traps are not local. */
if (!exit_trap_posix)
sigtrapped[sig] |= (locallevel << ZSIG_SHIFT);
}
else
sigtrapped[sig] |= (locallevel << ZSIG_SHIFT);
unqueue_signals();
return 0;
}
/**/
void
unsettrap(int sig)
{
HashNode hn;
queue_signals();
hn = removetrap(sig);
if (hn)
shfunctab->freenode(hn);
unqueue_signals();
}
/**/
HashNode
removetrap(int sig)
{
int trapped;
if (sig == -1 ||
(jobbing && (sig == SIGTTOU || sig == SIGTSTP || sig == SIGTTIN)))
return NULL;
queue_signals();
trapped = sigtrapped[sig];
/*
* Note that we save the trap here even if there isn't an existing
* one, to aid in removing this one. However, if there's
* already one at the current locallevel we just overwrite it.
*
* Note we save EXIT traps based on the *current* setting of
* POSIXTRAPS --- so if there is POSIX EXIT trap set but
* we are in native mode it can be saved, replaced by a function
* trap, and then restored.
*/
if (!dontsavetrap &&
(sig == SIGEXIT ? !isset(POSIXTRAPS) : isset(LOCALTRAPS)) &&
locallevel &&
(!trapped || locallevel > (sigtrapped[sig] >> ZSIG_SHIFT)))
dosavetrap(sig, locallevel);
if (!trapped) {
unqueue_signals();
return NULL;
}
if (sigtrapped[sig] & ZSIG_TRAPPED)
nsigtrapped--;
sigtrapped[sig] = 0;
if (sig == SIGINT && interact) {
/* PWS 1995/05/16: added test for interactive, also noholdintr() *
* as subshells ignoring SIGINT have it blocked from delivery */
intr();
noholdintr();
} else if (sig == SIGHUP)
install_handler(sig);
else if (sig == SIGPIPE && interact && !forklevel)
install_handler(sig);
else if (sig && sig <= SIGCOUNT &&
#ifdef SIGWINCH
sig != SIGWINCH &&
#endif
sig != SIGCHLD)
signal_default(sig);
if (sig == SIGEXIT)
exit_trap_posix = 0;
/*
* At this point we free the appropriate structs. If we don't
* want that to happen then either the function should already have been
* removed from shfunctab, or the entry in siglists should have been set
* to NULL. This is no longer necessary for saving traps as that
* copies the structures, so here we are remove the originals.
* That causes a little inefficiency, but a good deal more reliability.
*/
if (trapped & ZSIG_FUNC) {
HashNode node = gettrapnode(sig, 1);
/*
* As in dosavetrap(), don't call removeshfuncnode() because
* that calls back into unsettrap();
*/
if (node)
removehashnode(shfunctab, node->nam);
unqueue_signals();
return node;
} else if (siglists[sig]) {
freeeprog(siglists[sig]);
siglists[sig] = NULL;
}
unqueue_signals();
return NULL;
}
/**/
void
starttrapscope(void)
{
/* No special SIGEXIT behaviour inside another trap. */
if (intrap)
return;
/*
* SIGEXIT needs to be restored at the current locallevel,
* so give it the next higher one. dosavetrap() is called
* automatically where necessary.
*/
if (sigtrapped[SIGEXIT] && !exit_trap_posix) {
locallevel++;
unsettrap(SIGEXIT);
locallevel--;
}
}
/*
* Reset traps after the end of a function: must be called after
* endparamscope() so that the locallevel has been decremented.
*/
/**/
void
endtrapscope(void)
{
LinkNode ln;
struct savetrap *st;
int exittr = 0;
void *exitfn = NULL;
/*
* Remember the exit trap, but don't run it until
* after all the other traps have been put back.
* Don't do this inside another trap.
*/
if (!intrap &&
!exit_trap_posix && (exittr = sigtrapped[SIGEXIT])) {
if (exittr & ZSIG_FUNC) {
exitfn = removehashnode(shfunctab, "TRAPEXIT");
} else {
exitfn = siglists[SIGEXIT];
siglists[SIGEXIT] = NULL;
}
if (sigtrapped[SIGEXIT] & ZSIG_TRAPPED)
nsigtrapped--;
sigtrapped[SIGEXIT] = 0;
}
if (savetraps) {
while ((ln = firstnode(savetraps)) &&
(st = (struct savetrap *) ln->dat) &&
st->local > locallevel) {
int sig = st->sig;
remnode(savetraps, ln);
if (st->flags && (st->list != NULL)) {
/* prevent settrap from saving this */
dontsavetrap++;
if (st->flags & ZSIG_FUNC)
settrap(sig, NULL, ZSIG_FUNC);
else
settrap(sig, (Eprog) st->list, 0);
if (sig == SIGEXIT)
exit_trap_posix = st->posix;
dontsavetrap--;
/*
* counting of nsigtrapped should presumably be handled
* in settrap...
*/
DPUTS((sigtrapped[sig] ^ st->flags) & ZSIG_TRAPPED,
"BUG: settrap didn't restore correct ZSIG_TRAPPED");
if ((sigtrapped[sig] = st->flags) & ZSIG_FUNC)
shfunctab->addnode(shfunctab, ((Shfunc)st->list)->node.nam,
(Shfunc) st->list);
} else if (sigtrapped[sig]) {
/*
* Don't restore the old state if someone has set a
* POSIX-style exit trap --- allow this to propagate.
*/
if (sig != SIGEXIT || !exit_trap_posix)
unsettrap(sig);
}
zfree(st, sizeof(*st));
}
}
if (exittr) {
/*
* We already made sure this wasn't set as a POSIX exit trap.
* We respect the user's intention when the trap in question
* was set.
*/
dotrapargs(SIGEXIT, &exittr, exitfn);
if (exittr & ZSIG_FUNC)
shfunctab->freenode((HashNode)exitfn);
else
freeeprog(exitfn);
}
DPUTS(!locallevel && savetraps && firstnode(savetraps),
"BUG: still saved traps outside all function scope");
}
/*
* Decide whether a trap needs handling.
* If so, see if the trap should be run now or queued.
* Return 1 if the trap has been or will be handled.
* This only needs to be called in place of dotrap() in the
* signal handler, since it's only while waiting for children
* to exit that we queue traps.
*/
/**/
static int
handletrap(int sig)
{
if (!sigtrapped[sig])
return 0;
if (trap_queueing_enabled)
{
/* Code borrowed from signal queueing */
int temp_rear = ++trap_queue_rear % MAX_QUEUE_SIZE;
DPUTS(temp_rear == trap_queue_front, "BUG: trap queue full");
/* If queue is not full... */
if (temp_rear != trap_queue_front) {
trap_queue_rear = temp_rear;
trap_queue[trap_queue_rear] = sig;
}
return 1;
}
dotrap(sig);
if (sig == SIGALRM)
{
int tmout;
/*
* Reset the alarm.
* It seems slightly more natural to do this when the
* trap is run, rather than when it's queued, since
* the user doesn't see the latter.
*/
if ((tmout = getiparam("TMOUT")))
alarm(tmout);
}
return 1;
}
/*
* Queue traps if they shouldn't be run asynchronously, i.e.
* we're not in the wait builtin and TRAPSASYNC isn't set, when
* waiting for children to exit.
*
* Note that unlike signal queuing this should only be called
* in single matching pairs and can't be nested. It is
* only needed when waiting for a job or process to finish.
*
* There is presumably a race setting this up: we shouldn't be running
* traps between forking a foreground process and this point, either.
*/
/**/
void
queue_traps(int wait_cmd)
{
if (!isset(TRAPSASYNC) && !wait_cmd) {
/*
* Traps need to be handled synchronously, so
* enable queueing.
*/
trap_queueing_enabled = 1;
}
}
/*
* Disable trap queuing and run the traps.
*/
/**/
void
unqueue_traps(void)
{
trap_queueing_enabled = 0;
while (trap_queue_front != trap_queue_rear) {
trap_queue_front = (trap_queue_front + 1) % MAX_QUEUE_SIZE;
(void) handletrap(trap_queue[trap_queue_front]);
}
}
/* Execute a trap function for a given signal, possibly
* with non-standard sigtrapped & siglists values
*/
/* Are we already executing a trap? */
/**/
int intrap;
/* Is the current trap a function? */
/**/
int trapisfunc;
/*
* If the current trap is not a function, at what function depth
* did the trap get called?
*/
/**/
int traplocallevel;
/*
* sig is the signal number.
* *sigtr is the value to be taken as the field in sigtrapped (since
* that may have changed by this point if we are exiting).
* sigfn is an Eprog with a non-function eval list, or a Shfunc
* with a function trap. It may be NULL with an ignored signal.
*/
/**/
static void
dotrapargs(int sig, int *sigtr, void *sigfn)
{
LinkList args;
char *name, num[4];
int obreaks = breaks;
int oretflag = retflag;
int olastval = lastval;
int isfunc;
int traperr, new_trap_state, new_trap_return;
/* if signal is being ignored or the trap function *
* is NULL, then return *
* *
* Also return if errflag is set. In fact, the code in the *
* function will test for this, but this way we keep status flags *
* intact without working too hard. Special cases (e.g. calling *
* a trap for SIGINT after the error flag was set) are handled *
* by the calling code. (PWS 1995/06/08). *
* *
* This test is now replicated in dotrap(). */
if ((*sigtr & ZSIG_IGNORED) || !sigfn || errflag)
return;
/*
* Never execute special (synchronous) traps inside other traps.
* This can cause unexpected code execution when more than one
* of these is set.
*
* The down side is that it's harder to debug traps. I don't think
* that's a big issue.
*/
if (intrap) {
switch (sig) {
case SIGEXIT:
case SIGDEBUG:
case SIGZERR:
return;
}
}
queue_signals(); /* Any time we manage memory or global state */
intrap++;
*sigtr |= ZSIG_IGNORED;
zcontext_save();
/* execsave will save the old trap_return and trap_state */
execsave();
breaks = retflag = 0;
traplocallevel = locallevel;
runhookdef(BEFORETRAPHOOK, NULL);
if (*sigtr & ZSIG_FUNC) {
int osc = sfcontext, old_incompfunc = incompfunc;
HashNode hn = gettrapnode(sig, 0);
args = znewlinklist();
/*
* In case of multiple names, try to get
* a hint of the name in use from the function table.
* In special cases, e.g. EXIT traps, the function
* has already been removed. Then it's OK to
* use the standard name.
*/
if (hn) {
name = ztrdup(hn->nam);
} else {
name = (char *) zalloc(5 + strlen(sigs[sig]));
sprintf(name, "TRAP%s", sigs[sig]);
}
zaddlinknode(args, name);
sprintf(num, "%d", sig);
zaddlinknode(args, num);
trap_return = -1; /* incremented by doshfunc */
trap_state = TRAP_STATE_PRIMED;
trapisfunc = isfunc = 1;
sfcontext = SFC_SIGNAL;
incompfunc = 0;
doshfunc((Shfunc)sigfn, args, 1); /* manages signal queueing */
sfcontext = osc;
incompfunc= old_incompfunc;
freelinklist(args, (FreeFunc) NULL);
zsfree(name);
} else {
trap_return = -2; /* not incremented, used at current level */
trap_state = TRAP_STATE_PRIMED;
trapisfunc = isfunc = 0;
execode((Eprog)sigfn, 1, 0, "trap"); /* manages signal queueing */
}
runhookdef(AFTERTRAPHOOK, NULL);
traperr = errflag;
/* Grab values before they are restored */
new_trap_state = trap_state;
new_trap_return = trap_return;
execrestore();
zcontext_restore();
if (new_trap_state == TRAP_STATE_FORCE_RETURN &&
/* zero return from function isn't special */
!(isfunc && new_trap_return == 0)) {
if (isfunc) {
breaks = loops;
/*
* For SIGINT we behave the same as the default behaviour
* i.e. we set the error bit indicating an interrupt.
* We do this with SIGQUIT, too, even though we don't
* handle SIGQUIT by default. That's to try to make
* it behave a bit more like its normal behaviour when
* the trap handler has told us that's what it wants.
*/
if (sig == SIGINT || sig == SIGQUIT)
errflag |= ERRFLAG_INT;
else
errflag |= ERRFLAG_ERROR;
}
lastval = new_trap_return;
/* return triggered */
retflag = 1;
} else {
if (traperr && !EMULATION(EMULATE_SH))
lastval = 1;
else {
/*
* With no explicit forced return, we keep the
* lastval from before the trap ran.
*/
lastval = olastval;
}
if (try_tryflag) {
if (traperr)
errflag |= ERRFLAG_ERROR;
else
errflag &= ~ERRFLAG_ERROR;
}
breaks += obreaks;
/* return not triggered: restore old flag */
retflag = oretflag;
if (breaks > loops)
breaks = loops;
}
/*
* If zle was running while the trap was executed, see if we
* need to restore the display.
*/
if (zleactive && resetneeded)
zleentry(ZLE_CMD_REFRESH);
if (*sigtr != ZSIG_IGNORED)
*sigtr &= ~ZSIG_IGNORED;
intrap--;
unqueue_signals();
}
/* Standard call to execute a trap for a given signal. */
/**/
void
dotrap(int sig)
{
void *funcprog;
int q = queue_signal_level();
if (sigtrapped[sig] & ZSIG_FUNC) {
HashNode hn = gettrapnode(sig, 0);
if (hn)
funcprog = hn;
else {
#ifdef DEBUG
dputs("BUG: running function trap which has escaped.");
#endif
funcprog = NULL;
}
} else
funcprog = siglists[sig];
/*
* Copied from dotrapargs().
* (In fact, the gain from duplicating this appears to be virtually
* zero. Not sure why it's here.)
*/
if ((sigtrapped[sig] & ZSIG_IGNORED) || !funcprog || errflag)
return;
dont_queue_signals();
if (sig == SIGEXIT)
++in_exit_trap;
dotrapargs(sig, sigtrapped+sig, funcprog);
if (sig == SIGEXIT)
--in_exit_trap;
restore_queue_signals(q);
}
<|start_filename|>zmodules/Src/builtin.c<|end_filename|>
/*
* builtin.c - builtin commands
*
* This file is part of zsh, the Z shell.
*
* Copyright (c) 1992-1997 <NAME>
* All rights reserved.
*
* Permission is hereby granted, without written agreement and without
* license or royalty fees, to use, copy, modify, and distribute this
* software and to distribute modified versions of this software for any
* purpose, provided that the above copyright notice and the following
* two paragraphs appear in all copies of this software.
*
* In no event shall <NAME> or the Zsh Development Group be liable
* to any party for direct, indirect, special, incidental, or consequential
* damages arising out of the use of this software and its documentation,
* even if <NAME> and the Zsh Development Group have been advised of
* the possibility of such damage.
*
* <NAME> and the Zsh Development Group specifically disclaim any
* warranties, including, but not limited to, the implied warranties of
* merchantability and fitness for a particular purpose. The software
* provided hereunder is on an "as is" basis, and <NAME> and the
* Zsh Development Group have no obligation to provide maintenance,
* support, updates, enhancements, or modifications.
*
*/
/* this is defined so we get the prototype for open_memstream */
#define _GNU_SOURCE 1
#include "zsh.mdh"
#include "builtin.pro"
/* Builtins in the main executable */
static struct builtin builtins[] =
{
BIN_PREFIX("-", BINF_DASH),
BIN_PREFIX("builtin", BINF_BUILTIN),
BIN_PREFIX("command", BINF_COMMAND),
BIN_PREFIX("exec", BINF_EXEC),
BIN_PREFIX("noglob", BINF_NOGLOB),
BUILTIN("[", BINF_HANDLES_OPTS, bin_test, 0, -1, BIN_BRACKET, NULL, NULL),
BUILTIN(".", BINF_PSPECIAL, bin_dot, 1, -1, 0, NULL, NULL),
BUILTIN(":", BINF_PSPECIAL, bin_true, 0, -1, 0, NULL, NULL),
BUILTIN("alias", BINF_MAGICEQUALS | BINF_PLUSOPTS, bin_alias, 0, -1, 0, "Lgmrs", NULL),
BUILTIN("autoload", BINF_PLUSOPTS, bin_functions, 0, -1, 0, "dmktrRTUwWXz", "u"),
BUILTIN("bg", 0, bin_fg, 0, -1, BIN_BG, NULL, NULL),
BUILTIN("break", BINF_PSPECIAL, bin_break, 0, 1, BIN_BREAK, NULL, NULL),
BUILTIN("bye", 0, bin_break, 0, 1, BIN_EXIT, NULL, NULL),
BUILTIN("cd", BINF_SKIPINVALID | BINF_SKIPDASH | BINF_DASHDASHVALID, bin_cd, 0, 2, BIN_CD, "qsPL", NULL),
BUILTIN("chdir", BINF_SKIPINVALID | BINF_SKIPDASH | BINF_DASHDASHVALID, bin_cd, 0, 2, BIN_CD, "qsPL", NULL),
BUILTIN("continue", BINF_PSPECIAL, bin_break, 0, 1, BIN_CONTINUE, NULL, NULL),
BUILTIN("declare", BINF_PLUSOPTS | BINF_MAGICEQUALS | BINF_PSPECIAL | BINF_ASSIGN, (HandlerFunc)bin_typeset, 0, -1, 0, "AE:%F:%HL:%R:%TUZ:%afghi:%klmp:%rtuxz", NULL),
BUILTIN("dirs", 0, bin_dirs, 0, -1, 0, "clpv", NULL),
BUILTIN("disable", 0, bin_enable, 0, -1, BIN_DISABLE, "afmprs", NULL),
BUILTIN("disown", 0, bin_fg, 0, -1, BIN_DISOWN, NULL, NULL),
BUILTIN("echo", BINF_SKIPINVALID, bin_print, 0, -1, BIN_ECHO, "neE", "-"),
BUILTIN("emulate", 0, bin_emulate, 0, -1, 0, "lLR", NULL),
BUILTIN("enable", 0, bin_enable, 0, -1, BIN_ENABLE, "afmprs", NULL),
BUILTIN("eval", BINF_PSPECIAL, bin_eval, 0, -1, BIN_EVAL, NULL, NULL),
BUILTIN("exit", BINF_PSPECIAL, bin_break, 0, 1, BIN_EXIT, NULL, NULL),
BUILTIN("export", BINF_PLUSOPTS | BINF_MAGICEQUALS | BINF_PSPECIAL | BINF_ASSIGN, (HandlerFunc)bin_typeset, 0, -1, 0, "E:%F:%HL:%R:%TUZ:%afhi:%lp:%rtu", "xg"),
BUILTIN("false", 0, bin_false, 0, -1, 0, NULL, NULL),
/*
* We used to behave as if the argument to -e was optional.
* But that's actually not useful, so it's more consistent to
* cause an error.
*/
BUILTIN("fc", 0, bin_fc, 0, -1, BIN_FC, "aAdDe:EfiIlLmnpPrRt:W", NULL),
BUILTIN("fg", 0, bin_fg, 0, -1, BIN_FG, NULL, NULL),
BUILTIN("float", BINF_PLUSOPTS | BINF_MAGICEQUALS | BINF_PSPECIAL | BINF_ASSIGN, (HandlerFunc)bin_typeset, 0, -1, 0, "E:%F:%HL:%R:%Z:%ghlp:%rtux", "E"),
BUILTIN("functions", BINF_PLUSOPTS, bin_functions, 0, -1, 0, "kmMstTuUWx:z", NULL),
BUILTIN("getln", 0, bin_read, 0, -1, 0, "ecnAlE", "zr"),
BUILTIN("getopts", 0, bin_getopts, 2, -1, 0, NULL, NULL),
BUILTIN("hash", BINF_MAGICEQUALS, bin_hash, 0, -1, 0, "Ldfmrv", NULL),
#ifdef ZSH_HASH_DEBUG
BUILTIN("hashinfo", 0, bin_hashinfo, 0, 0, 0, NULL, NULL),
#endif
BUILTIN("history", 0, bin_fc, 0, -1, BIN_FC, "adDEfiLmnpPrt:", "l"),
BUILTIN("integer", BINF_PLUSOPTS | BINF_MAGICEQUALS | BINF_PSPECIAL | BINF_ASSIGN, (HandlerFunc)bin_typeset, 0, -1, 0, "HL:%R:%Z:%ghi:%lp:%rtux", "i"),
BUILTIN("jobs", 0, bin_fg, 0, -1, BIN_JOBS, "dlpZrs", NULL),
BUILTIN("kill", BINF_HANDLES_OPTS, bin_kill, 0, -1, 0, NULL, NULL),
BUILTIN("let", 0, bin_let, 1, -1, 0, NULL, NULL),
BUILTIN("local", BINF_PLUSOPTS | BINF_MAGICEQUALS | BINF_PSPECIAL | BINF_ASSIGN, (HandlerFunc)bin_typeset, 0, -1, 0, "AE:%F:%HL:%R:%TUZ:%ahi:%lp:%rtux", NULL),
BUILTIN("log", 0, bin_log, 0, 0, 0, NULL, NULL),
BUILTIN("logout", 0, bin_break, 0, 1, BIN_LOGOUT, NULL, NULL),
#if defined(ZSH_MEM) & defined(ZSH_MEM_DEBUG)
BUILTIN("mem", 0, bin_mem, 0, 0, 0, "v", NULL),
#endif
#if defined(ZSH_PAT_DEBUG)
BUILTIN("patdebug", 0, bin_patdebug, 1, -1, 0, "p", NULL),
#endif
BUILTIN("popd", BINF_SKIPINVALID | BINF_SKIPDASH | BINF_DASHDASHVALID, bin_cd, 0, 1, BIN_POPD, "q", NULL),
BUILTIN("print", BINF_PRINTOPTS, bin_print, 0, -1, BIN_PRINT, "abcC:Df:ilmnNoOpPrRsSu:v:x:X:z-", NULL),
BUILTIN("printf", BINF_SKIPINVALID | BINF_SKIPDASH, bin_print, 1, -1, BIN_PRINTF, "v:", NULL),
BUILTIN("pushd", BINF_SKIPINVALID | BINF_SKIPDASH | BINF_DASHDASHVALID, bin_cd, 0, 2, BIN_PUSHD, "qsPL", NULL),
BUILTIN("pushln", 0, bin_print, 0, -1, BIN_PRINT, NULL, "-nz"),
BUILTIN("pwd", 0, bin_pwd, 0, 0, 0, "rLP", NULL),
BUILTIN("r", 0, bin_fc, 0, -1, BIN_R, "IlLnr", NULL),
BUILTIN("read", 0, bin_read, 0, -1, 0, "cd:ek:%lnpqrst:%zu:AE", NULL),
BUILTIN("readonly", BINF_PLUSOPTS | BINF_MAGICEQUALS | BINF_PSPECIAL | BINF_ASSIGN, (HandlerFunc)bin_typeset, 0, -1, BIN_READONLY, "AE:%F:%HL:%R:%TUZ:%afghi:%lptux", "r"),
BUILTIN("rehash", 0, bin_hash, 0, 0, 0, "df", "r"),
BUILTIN("return", BINF_PSPECIAL, bin_break, 0, 1, BIN_RETURN, NULL, NULL),
BUILTIN("set", BINF_PSPECIAL | BINF_HANDLES_OPTS, bin_set, 0, -1, 0, NULL, NULL),
BUILTIN("setopt", 0, bin_setopt, 0, -1, BIN_SETOPT, NULL, NULL),
BUILTIN("shift", BINF_PSPECIAL, bin_shift, 0, -1, 0, "p", NULL),
BUILTIN("source", BINF_PSPECIAL, bin_dot, 1, -1, 0, NULL, NULL),
BUILTIN("suspend", 0, bin_suspend, 0, 0, 0, "f", NULL),
BUILTIN("test", BINF_HANDLES_OPTS, bin_test, 0, -1, BIN_TEST, NULL, NULL),
BUILTIN("ttyctl", 0, bin_ttyctl, 0, 0, 0, "fu", NULL),
BUILTIN("times", BINF_PSPECIAL, bin_times, 0, 0, 0, NULL, NULL),
BUILTIN("trap", BINF_PSPECIAL | BINF_HANDLES_OPTS, bin_trap, 0, -1, 0, NULL, NULL),
BUILTIN("true", 0, bin_true, 0, -1, 0, NULL, NULL),
BUILTIN("type", 0, bin_whence, 0, -1, 0, "ampfsSw", "v"),
BUILTIN("typeset", BINF_PLUSOPTS | BINF_MAGICEQUALS | BINF_PSPECIAL | BINF_ASSIGN, (HandlerFunc)bin_typeset, 0, -1, 0, "AE:%F:%HL:%R:%TUZ:%afghi:%klp:%rtuxmz", NULL),
BUILTIN("umask", 0, bin_umask, 0, 1, 0, "S", NULL),
BUILTIN("unalias", 0, bin_unhash, 0, -1, BIN_UNALIAS, "ams", NULL),
BUILTIN("unfunction", 0, bin_unhash, 1, -1, BIN_UNFUNCTION, "m", "f"),
BUILTIN("unhash", 0, bin_unhash, 1, -1, BIN_UNHASH, "adfms", NULL),
BUILTIN("unset", BINF_PSPECIAL, bin_unset, 1, -1, BIN_UNSET, "fmv", NULL),
BUILTIN("unsetopt", 0, bin_setopt, 0, -1, BIN_UNSETOPT, NULL, NULL),
BUILTIN("wait", 0, bin_fg, 0, -1, BIN_WAIT, NULL, NULL),
BUILTIN("whence", 0, bin_whence, 0, -1, 0, "acmpvfsSwx:", NULL),
BUILTIN("where", 0, bin_whence, 0, -1, 0, "pmsSwx:", "ca"),
BUILTIN("which", 0, bin_whence, 0, -1, 0, "ampsSwx:", "c"),
BUILTIN("zmodload", 0, bin_zmodload, 0, -1, 0, "AFRILP:abcfdilmpsue", NULL),
BUILTIN("zcompile", 0, bin_zcompile, 0, -1, 0, "tUMRcmzka", NULL),
};
/****************************************/
/* Builtin Command Hash Table Functions */
/****************************************/
/* hash table containing builtin commands */
/**/
mod_export HashTable builtintab;
/**/
void
createbuiltintable(void)
{
builtintab = newhashtable(85, "builtintab", NULL);
builtintab->hash = hasher;
builtintab->emptytable = NULL;
builtintab->filltable = NULL;
builtintab->cmpnodes = strcmp;
builtintab->addnode = addhashnode;
builtintab->getnode = gethashnode;
builtintab->getnode2 = gethashnode2;
builtintab->removenode = removehashnode;
builtintab->disablenode = disablehashnode;
builtintab->enablenode = enablehashnode;
builtintab->freenode = freebuiltinnode;
builtintab->printnode = printbuiltinnode;
(void)addbuiltins("zsh", builtins, sizeof(builtins)/sizeof(*builtins));
}
/* Print a builtin */
/**/
static void
printbuiltinnode(HashNode hn, int printflags)
{
Builtin bn = (Builtin) hn;
if (printflags & PRINT_WHENCE_WORD) {
printf("%s: builtin\n", bn->node.nam);
return;
}
if (printflags & PRINT_WHENCE_CSH) {
printf("%s: shell built-in command\n", bn->node.nam);
return;
}
if (printflags & PRINT_WHENCE_VERBOSE) {
printf("%s is a shell builtin\n", bn->node.nam);
return;
}
/* default is name only */
printf("%s\n", bn->node.nam);
}
/**/
static void
freebuiltinnode(HashNode hn)
{
Builtin bn = (Builtin) hn;
if(!(bn->node.flags & BINF_ADDED)) {
zsfree(bn->node.nam);
zsfree(bn->optstr);
zfree(bn, sizeof(struct builtin));
}
}
/**/
void
init_builtins(void)
{
if (!EMULATION(EMULATE_ZSH)) {
HashNode hn = reswdtab->getnode2(reswdtab, "repeat");
if (hn)
reswdtab->disablenode(hn, 0);
}
}
/* Make sure we have space for a new option and increment. */
#define OPT_ALLOC_CHUNK 16
/**/
static int
new_optarg(Options ops)
{
/* Argument index must be a non-zero 6-bit number. */
if (ops->argscount == 63)
return 1;
if (ops->argsalloc == ops->argscount) {
char **newptr =
(char **)zhalloc((ops->argsalloc + OPT_ALLOC_CHUNK) *
sizeof(char *));
if (ops->argsalloc)
memcpy(newptr, ops->args, ops->argsalloc * sizeof(char *));
ops->args = newptr;
ops->argsalloc += OPT_ALLOC_CHUNK;
}
ops->argscount++;
return 0;
}
/* execute a builtin handler function after parsing the arguments */
/**/
int
execbuiltin(LinkList args, LinkList assigns, Builtin bn)
{
char *pp, *name, *optstr;
int flags, argc, execop, xtr = isset(XTRACE);
struct options ops;
/* initialise options structure */
memset(ops.ind, 0, MAX_OPS*sizeof(unsigned char));
ops.args = NULL;
ops.argscount = ops.argsalloc = 0;
/* initialize some local variables */
name = (char *) ugetnode(args);
if (!bn->handlerfunc) {
DPUTS(1, "Missing builtin detected too late");
deletebuiltin(bn->node.nam);
return 1;
}
/* get some information about the command */
flags = bn->node.flags;
optstr = bn->optstr;
/* Set up the argument list. */
/* count the arguments */
argc = countlinknodes(args);
{
/*
* Keep all arguments, including options, in an array.
* We don't actually need the option part of the argument
* after option processing, but it makes XTRACE output
* much simpler.
*/
VARARR(char *, argarr, argc + 1);
char **argv;
/*
* Get the actual arguments, into argv. Remember argarr
* may be an array declaration, depending on the compiler.
*/
argv = argarr;
while ((*argv++ = (char *)ugetnode(args)));
argv = argarr;
/* Sort out the options. */
if (optstr) {
char *arg = *argv;
int sense; /* 1 for -x, 0 for +x */
/* while arguments look like options ... */
while (arg &&
/* Must begin with - or maybe + */
((sense = (*arg == '-')) ||
((flags & BINF_PLUSOPTS) && *arg == '+'))) {
/* Digits aren't arguments unless the command says they are. */
if (!(flags & BINF_KEEPNUM) && idigit(arg[1]))
break;
/* For cd and friends, a single dash is not an option. */
if ((flags & BINF_SKIPDASH) && !arg[1])
break;
if ((flags & BINF_DASHDASHVALID) && !strcmp(arg, "--")) {
/*
* Need to skip this before checking whether this is
* really an option.
*/
argv++;
break;
}
/*
* Unrecognised options to echo etc. are not really
* options.
*
* Note this flag is not smart enough to handle option
* arguments. In fact, ideally it shouldn't be added
* to any new builtins, to preserve standard option
* handling as much as possible.
*/
if (flags & BINF_SKIPINVALID) {
char *p = arg;
while (*++p && strchr(optstr, (int) *p));
if (*p)
break;
}
/* handle -- or - (ops.ind['-']), and +
* (ops.ind['-'] and ops.ind['+']) */
if (arg[1] == '-')
arg++;
if (!arg[1]) {
ops.ind['-'] = 1;
if (!sense)
ops.ind['+'] = 1;
}
/* save options in ops, as long as they are in bn->optstr */
while (*++arg) {
char *optptr;
if ((optptr = strchr(optstr, execop = (int)*arg))) {
ops.ind[(int)*arg] = (sense) ? 1 : 2;
if (optptr[1] == ':') {
char *argptr = NULL;
if (optptr[2] == ':') {
if (arg[1])
argptr = arg+1;
/* Optional argument in same word*/
} else if (optptr[2] == '%') {
/* Optional numeric argument in same
* or next word. */
if (arg[1] && idigit(arg[1]))
argptr = arg+1;
else if (argv[1] && idigit(*argv[1]))
argptr = arg = *++argv;
} else {
/* Mandatory argument */
if (arg[1])
argptr = arg+1;
else if ((arg = *++argv))
argptr = arg;
else {
zwarnnam(name, "argument expected: -%c",
execop);
return 1;
}
}
if (argptr) {
if (new_optarg(&ops)) {
zwarnnam(name,
"too many option arguments");
return 1;
}
ops.ind[execop] |= ops.argscount << 2;
ops.args[ops.argscount-1] = argptr;
while (arg[1])
arg++;
}
}
} else
break;
}
/* The above loop may have exited on an invalid option. (We *
* assume that any option requiring metafication is invalid.) */
if (*arg) {
if(*arg == Meta)
*++arg ^= 32;
zwarnnam(name, "bad option: %c%c", "+-"[sense], *arg);
return 1;
}
arg = *++argv;
/* for the "print" builtin, the options after -R are treated as
options to "echo" */
if ((flags & BINF_PRINTOPTS) && ops.ind['R'] &&
!ops.ind['f']) {
optstr = "ne";
flags |= BINF_SKIPINVALID;
}
/* the option -- indicates the end of the options */
if (ops.ind['-'])
break;
}
} else if (!(flags & BINF_HANDLES_OPTS) && *argv &&
!strcmp(*argv, "--")) {
ops.ind['-'] = 1;
argv++;
}
/* handle built-in options, for overloaded handler functions */
if ((pp = bn->defopts)) {
while (*pp) {
/* only if not already set */
if (!ops.ind[(int)*pp])
ops.ind[(int)*pp] = 1;
pp++;
}
}
/* Fix the argument count by subtracting option arguments */
argc -= argv - argarr;
if (errflag) {
errflag &= ~ERRFLAG_ERROR;
return 1;
}
/* check that the argument count lies within the specified bounds */
if (argc < bn->minargs || (argc > bn->maxargs && bn->maxargs != -1)) {
zwarnnam(name, (argc < bn->minargs)
? "not enough arguments" : "too many arguments");
return 1;
}
/* display execution trace information, if required */
if (xtr) {
/* Use full argument list including options for trace output */
char **fullargv = argarr;
printprompt4();
fprintf(xtrerr, "%s", name);
while (*fullargv) {
fputc(' ', xtrerr);
quotedzputs(*fullargv++, xtrerr);
}
if (assigns) {
LinkNode node;
for (node = firstnode(assigns); node; incnode(node)) {
Asgment asg = (Asgment)node;
fputc(' ', xtrerr);
quotedzputs(asg->name, xtrerr);
if (asg->flags & ASG_ARRAY) {
fprintf(xtrerr, "=(");
if (asg->value.array) {
if (asg->flags & ASG_KEY_VALUE) {
LinkNode keynode, valnode;
keynode = firstnode(asg->value.array);
for (;;) {
if (!keynode)
break;
valnode = nextnode(keynode);
if (!valnode)
break;
fputc('[', xtrerr);
quotedzputs((char *)getdata(keynode),
xtrerr);
fprintf(stderr, "]=");
quotedzputs((char *)getdata(valnode),
xtrerr);
keynode = nextnode(valnode);
}
} else {
LinkNode arrnode;
for (arrnode = firstnode(asg->value.array);
arrnode;
incnode(arrnode)) {
fputc(' ', xtrerr);
quotedzputs((char *)getdata(arrnode),
xtrerr);
}
}
}
fprintf(xtrerr, " )");
} else if (asg->value.scalar) {
fputc('=', xtrerr);
quotedzputs(asg->value.scalar, xtrerr);
}
}
}
fputc('\n', xtrerr);
fflush(xtrerr);
}
/* call the handler function, and return its return value */
if (flags & BINF_ASSIGN)
{
/*
* Takes two sets of arguments.
*/
HandlerFuncAssign assignfunc = (HandlerFuncAssign)bn->handlerfunc;
return (*(assignfunc)) (name, argv, assigns, &ops, bn->funcid);
}
else
{
return (*(bn->handlerfunc)) (name, argv, &ops, bn->funcid);
}
}
}
/* Enable/disable an element in one of the internal hash tables. *
* With no arguments, it lists all the currently enabled/disabled *
* elements in that particular hash table. */
/**/
int
bin_enable(char *name, char **argv, Options ops, int func)
{
HashTable ht;
HashNode hn;
ScanFunc scanfunc;
Patprog pprog;
int flags1 = 0, flags2 = 0;
int match = 0, returnval = 0;
/* Find out which hash table we are working with. */
if (OPT_ISSET(ops,'p')) {
return pat_enables(name, argv, func == BIN_ENABLE);
} else if (OPT_ISSET(ops,'f'))
ht = shfunctab;
else if (OPT_ISSET(ops,'r'))
ht = reswdtab;
else if (OPT_ISSET(ops,'s'))
ht = sufaliastab;
else if (OPT_ISSET(ops,'a'))
ht = aliastab;
else
ht = builtintab;
/* Do we want to enable or disable? */
if (func == BIN_ENABLE) {
flags2 = DISABLED;
scanfunc = ht->enablenode;
} else {
flags1 = DISABLED;
scanfunc = ht->disablenode;
}
/* Given no arguments, print the names of the enabled/disabled elements *
* in this hash table. If func == BIN_ENABLE, then scanhashtable will *
* print nodes NOT containing the DISABLED flag, else scanhashtable will *
* print nodes containing the DISABLED flag. */
if (!*argv) {
queue_signals();
scanhashtable(ht, 1, flags1, flags2, ht->printnode, 0);
unqueue_signals();
return 0;
}
/* With -m option, treat arguments as glob patterns. */
if (OPT_ISSET(ops,'m')) {
for (; *argv; argv++) {
queue_signals();
/* parse pattern */
tokenize(*argv);
if ((pprog = patcompile(*argv, PAT_STATIC, 0)))
match += scanmatchtable(ht, pprog, 0, 0, 0, scanfunc, 0);
else {
untokenize(*argv);
zwarnnam(name, "bad pattern : %s", *argv);
returnval = 1;
}
unqueue_signals();
}
/* If we didn't match anything, we return 1. */
if (!match)
returnval = 1;
return returnval;
}
/* Take arguments literally -- do not glob */
queue_signals();
for (; *argv; argv++) {
if ((hn = ht->getnode2(ht, *argv))) {
scanfunc(hn, 0);
} else {
zwarnnam(name, "no such hash table element: %s", *argv);
returnval = 1;
}
}
unqueue_signals();
return returnval;
}
/* set: either set the shell options, or set the shell arguments, *
* or declare an array, or show various things */
/**/
int
bin_set(char *nam, char **args, UNUSED(Options ops), UNUSED(int func))
{
int action, optno, array = 0, hadopt = 0,
hadplus = 0, hadend = 0, sort = 0;
char **x, *arrayname = NULL;
/* Obsolescent sh compatibility: set - is the same as set +xv *
* and set - args is the same as set +xv -- args */
if (!EMULATION(EMULATE_ZSH) && *args && **args == '-' && !args[0][1]) {
dosetopt(VERBOSE, 0, 0, opts);
dosetopt(XTRACE, 0, 0, opts);
if (!args[1])
return 0;
}
/* loop through command line options (begins with "-" or "+") */
while (*args && (**args == '-' || **args == '+')) {
action = (**args == '-');
hadplus |= !action;
if(!args[0][1])
*args = "--";
while (*++*args) {
if(**args == Meta)
*++*args ^= 32;
if(**args != '-' || action)
hadopt = 1;
/* The pseudo-option `--' signifies the end of options. */
if (**args == '-') {
hadend = 1;
args++;
goto doneoptions;
} else if (**args == 'o') {
if (!*++*args)
args++;
if (!*args) {
printoptionstates(hadplus);
inittyptab();
return 0;
}
if(!(optno = optlookup(*args)))
zerrnam(nam, "no such option: %s", *args);
else if(dosetopt(optno, action, 0, opts))
zerrnam(nam, "can't change option: %s", *args);
break;
} else if(**args == 'A') {
if(!*++*args)
args++;
array = action ? 1 : -1;
arrayname = *args;
if (!arrayname)
goto doneoptions;
else if (!isset(KSHARRAYS))
{
args++;
goto doneoptions;
}
break;
} else if (**args == 's')
sort = action ? 1 : -1;
else {
if (!(optno = optlookupc(**args)))
zerrnam(nam, "bad option: -%c", **args);
else if(dosetopt(optno, action, 0, opts))
zerrnam(nam, "can't change option: -%c", **args);
}
}
args++;
}
if (errflag)
return 1;
doneoptions:
inittyptab();
/* Show the parameters, possibly with values */
queue_signals();
if (!arrayname)
{
if (!hadopt && !*args)
scanhashtable(paramtab, 1, 0, 0, paramtab->printnode,
hadplus ? PRINT_NAMEONLY : 0);
if (array) {
/* display arrays */
scanhashtable(paramtab, 1, PM_ARRAY, 0, paramtab->printnode,
hadplus ? PRINT_NAMEONLY : 0);
}
if (!*args && !hadend) {
unqueue_signals();
return 0;
}
}
if (sort)
strmetasort(args, sort < 0 ? SORTIT_BACKWARDS : 0, NULL);
if (array) {
/* create an array with the specified elements */
char **a = NULL, **y;
int len = arrlen(args);
if (array < 0 && (a = getaparam(arrayname)) && arrlen_gt(a, len)) {
a += len;
len += arrlen(a);
}
for (x = y = zalloc((len + 1) * sizeof(char *)); len--;) {
if (!*args)
args = a;
*y++ = ztrdup(*args++);
}
*y++ = NULL;
setaparam(arrayname, x);
} else {
/* set shell arguments */
freearray(pparams);
pparams = zarrdup(args);
}
unqueue_signals();
return 0;
}
/**** directory-handling builtins ****/
/**/
int doprintdir = 0; /* set in exec.c (for autocd) */
/* pwd: display the name of the current directory */
/**/
int
bin_pwd(UNUSED(char *name), UNUSED(char **argv), Options ops, UNUSED(int func))
{
if (OPT_ISSET(ops,'r') || OPT_ISSET(ops,'P') ||
(isset(CHASELINKS) && !OPT_ISSET(ops,'L')))
printf("%s\n", zgetcwd());
else {
zputs(pwd, stdout);
putchar('\n');
}
return 0;
}
/* the directory stack */
/**/
mod_export LinkList dirstack;
/* dirs: list the directory stack, or replace it with a provided list */
/**/
int
bin_dirs(UNUSED(char *name), char **argv, Options ops, UNUSED(int func))
{
LinkList l;
queue_signals();
/* with -v, -p or no arguments display the directory stack */
if (!(*argv || OPT_ISSET(ops,'c')) || OPT_ISSET(ops,'v') ||
OPT_ISSET(ops,'p')) {
LinkNode node;
char *fmt;
int pos = 1;
/* with the -v option, display a numbered list, starting at zero */
if (OPT_ISSET(ops,'v')) {
printf("0\t");
fmt = "\n%d\t";
/* with the -p option, display entries one per line */
} else if (OPT_ISSET(ops,'p'))
fmt = "\n";
else
fmt = " ";
if (OPT_ISSET(ops,'l'))
zputs(pwd, stdout);
else
fprintdir(pwd, stdout);
for (node = firstnode(dirstack); node; incnode(node)) {
printf(fmt, pos++);
if (OPT_ISSET(ops,'l'))
zputs(getdata(node), stdout);
else
fprintdir(getdata(node), stdout);
}
unqueue_signals();
putchar('\n');
return 0;
}
/* replace the stack with the specified directories */
l = znewlinklist();
while (*argv)
zaddlinknode(l, ztrdup(*argv++));
freelinklist(dirstack, freestr);
dirstack = l;
unqueue_signals();
return 0;
}
/* cd, chdir, pushd, popd */
/**/
void
set_pwd_env(void)
{
Param pm;
/* update the PWD and OLDPWD shell parameters */
pm = (Param) paramtab->getnode(paramtab, "PWD");
if (pm && PM_TYPE(pm->node.flags) != PM_SCALAR) {
pm->node.flags &= ~PM_READONLY;
unsetparam_pm(pm, 0, 1);
}
pm = (Param) paramtab->getnode(paramtab, "OLDPWD");
if (pm && PM_TYPE(pm->node.flags) != PM_SCALAR) {
pm->node.flags &= ~PM_READONLY;
unsetparam_pm(pm, 0, 1);
}
assignsparam("PWD", ztrdup(pwd), 0);
assignsparam("OLDPWD", ztrdup(oldpwd), 0);
pm = (Param) paramtab->getnode(paramtab, "PWD");
if (!(pm->node.flags & PM_EXPORTED))
addenv(pm, pwd);
pm = (Param) paramtab->getnode(paramtab, "OLDPWD");
if (!(pm->node.flags & PM_EXPORTED))
addenv(pm, oldpwd);
}
/* set if we are resolving links to their true paths */
static int chasinglinks;
/* The main pwd changing function. The real work is done by other *
* functions. cd_get_dest() does the initial argument processing; *
* cd_do_chdir() actually changes directory, if possible; cd_new_pwd() *
* does the ancillary processing associated with actually changing *
* directory. */
/**/
int
bin_cd(char *nam, char **argv, Options ops, int func)
{
LinkNode dir;
struct stat st1, st2;
if (isset(RESTRICTED)) {
zwarnnam(nam, "restricted");
return 1;
}
doprintdir = (doprintdir == -1);
chasinglinks = OPT_ISSET(ops,'P') ||
(isset(CHASELINKS) && !OPT_ISSET(ops,'L'));
queue_signals();
zpushnode(dirstack, ztrdup(pwd));
if (!(dir = cd_get_dest(nam, argv, OPT_ISSET(ops,'s'), func))) {
zsfree(getlinknode(dirstack));
unqueue_signals();
return 1;
}
cd_new_pwd(func, dir, OPT_ISSET(ops, 'q'));
if (stat(unmeta(pwd), &st1) < 0) {
setjobpwd();
zsfree(pwd);
pwd = NULL;
pwd = metafy(zgetcwd(), -1, META_DUP);
} else if (stat(".", &st2) < 0) {
if (chdir(unmeta(pwd)) < 0)
zwarn("unable to chdir(%s): %e", pwd, errno);
} else if (st1.st_ino != st2.st_ino || st1.st_dev != st2.st_dev) {
if (chasinglinks) {
setjobpwd();
zsfree(pwd);
pwd = <PASSWORD>;
pwd = metafy(zgetcwd(), -1, META_DUP);
} else if (chdir(unmeta(pwd)) < 0)
zwarn("unable to chdir(%s): %e", pwd, errno);
}
unqueue_signals();
return 0;
}
/* Get directory to chdir to */
/**/
static LinkNode
cd_get_dest(char *nam, char **argv, int hard, int func)
{
LinkNode dir = NULL;
LinkNode target;
char *dest;
if (!argv[0]) {
if (func == BIN_POPD && !nextnode(firstnode(dirstack))) {
zwarnnam(nam, "directory stack empty");
return NULL;
}
if (func == BIN_PUSHD && unset(PUSHDTOHOME))
dir = nextnode(firstnode(dirstack));
if (dir)
zinsertlinknode(dirstack, dir, getlinknode(dirstack));
else if (func != BIN_POPD) {
if (!home) {
zwarnnam(nam, "HOME not set");
return NULL;
}
zpushnode(dirstack, ztrdup(home));
}
} else if (!argv[1]) {
int dd;
char *end;
doprintdir++;
if (argv[0][1] && (argv[0][0] == '+' || argv[0][0] == '-')
&& strspn(argv[0]+1, "0123456789") == strlen(argv[0]+1)) {
dd = zstrtol(argv[0] + 1, &end, 10);
if (*end == '\0') {
if ((argv[0][0] == '+') ^ isset(PUSHDMINUS))
for (dir = firstnode(dirstack); dir && dd; dd--, incnode(dir));
else
for (dir = lastnode(dirstack); dir != (LinkNode) dirstack && dd;
dd--, dir = prevnode(dir));
if (!dir || dir == (LinkNode) dirstack) {
zwarnnam(nam, "no such entry in dir stack");
return NULL;
}
}
}
if (!dir)
zpushnode(dirstack, ztrdup(strcmp(argv[0], "-")
? (doprintdir--, argv[0]) : oldpwd));
} else {
char *u, *d;
int len1, len2, len3;
if (!(u = strstr(pwd, argv[0]))) {
zwarnnam(nam, "string not in pwd: %s", argv[0]);
return NULL;
}
len1 = strlen(argv[0]);
len2 = strlen(argv[1]);
len3 = u - pwd;
d = (char *)zalloc(len3 + len2 + strlen(u + len1) + 1);
strncpy(d, pwd, len3);
strcpy(d + len3, argv[1]);
strcat(d, u + len1);
zpushnode(dirstack, d);
doprintdir++;
}
target = dir;
if (func == BIN_POPD) {
if (!dir) {
target = dir = firstnode(dirstack);
} else if (dir != firstnode(dirstack)) {
return dir;
}
dir = nextnode(dir);
}
if (!dir) {
dir = firstnode(dirstack);
}
if (!dir || !getdata(dir)) {
DPUTS(1, "Directory not set, not detected early enough");
return NULL;
}
if (!(dest = cd_do_chdir(nam, getdata(dir), hard))) {
if (!target)
zsfree(getlinknode(dirstack));
if (func == BIN_POPD)
zsfree(remnode(dirstack, dir));
return NULL;
}
if (dest != (char *)getdata(dir)) {
zsfree(getdata(dir));
setdata(dir, dest);
}
return target ? target : dir;
}
/* Change to given directory, if possible. This function works out *
* exactly how the directory should be interpreted, including cdpath *
* and CDABLEVARS. For each possible interpretation of the given *
* path, this calls cd_try_chdir(), which attempts to chdir to that *
* particular path. */
/**/
static char *
cd_do_chdir(char *cnam, char *dest, int hard)
{
char **pp, *ret;
int hasdot = 0, eno = ENOENT;
/*
* nocdpath indicates that cdpath should not be used.
* This is the case iff dest is a relative path
* whose first segment is . or .., but if the path is
* absolute then cdpath won't be used anyway.
*/
int nocdpath;
#ifdef __CYGWIN__
/*
* Normalize path under Cygwin to avoid messing with
* DOS style names with drives in them
*/
static char buf[PATH_MAX+1];
#ifdef HAVE_CYGWIN_CONV_PATH
cygwin_conv_path(CCP_WIN_A_TO_POSIX | CCP_RELATIVE, dest, buf,
PATH_MAX);
#else
#ifndef _SYS_CYGWIN_H
void cygwin_conv_to_posix_path(const char *, char *);
#endif
cygwin_conv_to_posix_path(dest, buf);
#endif
dest = buf;
#endif
nocdpath = dest[0] == '.' &&
(dest[1] == '/' || !dest[1] || (dest[1] == '.' &&
(dest[2] == '/' || !dest[2])));
/*
* If we have an absolute path, use it as-is only
*/
if (*dest == '/') {
if ((ret = cd_try_chdir(NULL, dest, hard)))
return ret;
zwarnnam(cnam, "%e: %s", errno, dest);
return NULL;
}
/*
* If cdpath is being used, check it for ".".
* Don't bother doing this if POSIXCD is set, we don't
* need to know (though it doesn't actually matter).
*/
if (!nocdpath && !isset(POSIXCD))
for (pp = cdpath; *pp; pp++)
if (!(*pp)[0] || ((*pp)[0] == '.' && (*pp)[1] == '\0'))
hasdot = 1;
/*
* If
* (- there is no . in cdpath
* - or cdpath is not being used)
* - and the POSIXCD option is not set
* try the directory as-is (i.e. from .)
*/
if (!hasdot && !isset(POSIXCD)) {
if ((ret = cd_try_chdir(NULL, dest, hard)))
return ret;
if (errno != ENOENT)
eno = errno;
}
/* if cdpath is being used, try given directory relative to each element in
cdpath in turn */
if (!nocdpath)
for (pp = cdpath; *pp; pp++) {
if ((ret = cd_try_chdir(*pp, dest, hard))) {
if (isset(POSIXCD)) {
/*
* For POSIX we need to print the directory
* any time CDPATH was used, except in the
* special case of an empty segment being
* treated as a ".".
*/
if (**pp)
doprintdir++;
} else {
if (strcmp(*pp, ".")) {
doprintdir++;
}
}
return ret;
}
if (errno != ENOENT)
eno = errno;
}
/*
* POSIX requires us to check "." after CDPATH rather than before.
*/
if (isset(POSIXCD)) {
if ((ret = cd_try_chdir(NULL, dest, hard)))
return ret;
if (errno != ENOENT)
eno = errno;
}
/* handle the CDABLEVARS option */
if ((ret = cd_able_vars(dest))) {
if ((ret = cd_try_chdir(NULL, ret,hard))) {
doprintdir++;
return ret;
}
if (errno != ENOENT)
eno = errno;
}
/* If we got here, it means that we couldn't chdir to any of the
multitudinous possible paths allowed by zsh. We've run out of options!
Add more here! */
zwarnnam(cnam, "%e: %s", eno, dest);
return NULL;
}
/* If the CDABLEVARS option is set, return the new *
* interpretation of the given path. */
/**/
char *
cd_able_vars(char *s)
{
char *rest, save;
if (isset(CDABLEVARS)) {
for (rest = s; *rest && *rest != '/'; rest++);
save = *rest;
*rest = 0;
s = getnameddir(s);
*rest = save;
if (s && *rest)
s = dyncat(s, rest);
return s;
}
return NULL;
}
/* Attempt to change to a single given directory. The directory, *
* for the convenience of the calling function, may be provided in *
* two parts, which must be concatenated before attempting to chdir. *
* Returns NULL if the chdir fails. If the directory change is *
* possible, it is performed, and a pointer to the new full pathname *
* is returned. */
/**/
static char *
cd_try_chdir(char *pfix, char *dest, int hard)
{
char *buf;
int dlen, dochaselinks = 0;
/* handle directory prefix */
if (pfix && *pfix) {
if (*pfix == '/') {
#ifdef __CYGWIN__
/* NB: Don't turn "/"+"bin" into "//"+"bin" by mistake! "//bin" may *
* not be what user really wants (probably wants "/bin"), but *
* "//bin" could be valid too (see fixdir())! This is primarily for *
* handling CDPATH correctly. Likewise for "//"+"bin" not becoming *
* "///bin" (aka "/bin"). */
int root = pfix[1] == '\0' || (pfix[1] == '/' && pfix[2] == '\0');
buf = tricat(pfix, ( root ? "" : "/" ), dest);
#else
buf = tricat(pfix, "/", dest);
#endif
} else {
int pfl = strlen(pfix);
dlen = strlen(pwd);
if (dlen == 1 && *pwd == '/')
dlen = 0;
buf = zalloc(dlen + pfl + strlen(dest) + 3);
if (dlen)
strcpy(buf, pwd);
buf[dlen] = '/';
strcpy(buf + dlen + 1, pfix);
buf[dlen + 1 + pfl] = '/';
strcpy(buf + dlen + pfl + 2, dest);
}
} else if (*dest == '/')
buf = ztrdup(dest);
else {
dlen = strlen(pwd);
if (pwd[dlen-1] == '/')
--dlen;
buf = zalloc(dlen + strlen(dest) + 2);
strcpy(buf, pwd);
buf[dlen] = '/';
strcpy(buf + dlen + 1, dest);
}
/* Normalise path. See the definition of fixdir() for what this means.
* We do not do this if we are chasing links.
*/
if (!chasinglinks)
dochaselinks = fixdir(buf);
else
unmetafy(buf, &dlen);
/* We try the full path first. If that fails, try the
* argument to cd relatively. This is useful if the cwd
* or a parent directory is renamed in the interim.
*/
if (lchdir(buf, NULL, hard) &&
(pfix || *dest == '/' || lchdir(unmeta(dest), NULL, hard))) {
free(buf);
return NULL;
}
/* the chdir succeeded, so decide if we should force links to be chased */
if (dochaselinks)
chasinglinks = 1;
return metafy(buf, -1, META_NOALLOC);
}
/* do the extra processing associated with changing directory */
/**/
static void
cd_new_pwd(int func, LinkNode dir, int quiet)
{
char *new_pwd, *s;
int dirstacksize;
if (func == BIN_PUSHD)
rolllist(dirstack, dir);
new_pwd = remnode(dirstack, dir);
if (func == BIN_POPD && firstnode(dirstack)) {
zsfree(new_pwd);
new_pwd = getlinknode(dirstack);
} else if (func == BIN_CD && unset(AUTOPUSHD))
zsfree(getlinknode(dirstack));
if (chasinglinks) {
s = findpwd(new_pwd);
if (s) {
zsfree(new_pwd);
new_pwd = s;
}
}
if (isset(PUSHDIGNOREDUPS)) {
LinkNode n;
for (n = firstnode(dirstack); n; incnode(n)) {
if (!strcmp(new_pwd, getdata(n))) {
zsfree(remnode(dirstack, n));
break;
}
}
}
/* shift around the pwd variables, to make oldpwd and pwd relate to the
current (i.e. new) pwd */
zsfree(oldpwd);
oldpwd = pwd;
setjobpwd();
pwd = <PASSWORD>;
set_pwd_env();
if (isset(INTERACTIVE) || isset(POSIXCD)) {
if (func != BIN_CD && isset(INTERACTIVE)) {
if (unset(PUSHDSILENT) && !quiet)
printdirstack();
} else if (doprintdir) {
fprintdir(pwd, stdout);
putchar('\n');
}
}
/* execute the chpwd function */
fflush(stdout);
fflush(stderr);
if (!quiet)
callhookfunc("chpwd", NULL, 1, NULL);
dirstacksize = getiparam("DIRSTACKSIZE");
/* handle directory stack sizes out of range */
if (dirstacksize > 0) {
int remove = countlinknodes(dirstack) -
(dirstacksize < 2 ? 2 : dirstacksize);
while (remove-- >= 0)
zsfree(remnode(dirstack, lastnode(dirstack)));
}
}
/* Print the directory stack */
/**/
static void
printdirstack(void)
{
LinkNode node;
fprintdir(pwd, stdout);
for (node = firstnode(dirstack); node; incnode(node)) {
putchar(' ');
fprintdir(getdata(node), stdout);
}
putchar('\n');
}
/* Normalise a path. Segments consisting of ., and foo/.. *
* combinations, are removed and the path is unmetafied.
* Returns 1 if we found a ../ path which should force links to
* be chased, 0 otherwise.
*/
/**/
int
fixdir(char *src)
{
char *dest = src, *d0 = dest;
#ifdef __CYGWIN__
char *s0 = src;
#endif
/* This function is always called with n path containing at
* least one slash, either because one was input by the user or
* because the caller has prepended either pwd or a cdpath dir.
* If asked to make a relative change and pwd is set to ".",
* the current directory has been removed out from under us,
* so force links to be chased.
*
* Ordinarily we can't get here with "../" as the first component
* but handle the silly special case of ".." in cdpath.
*
* Order of comparisons here looks funny, but it short-circuits
* most rapidly in the event of a false condition. Set to 2
* here so we still obey the (lack of) CHASEDOTS option after
* the first "../" is preserved (test chasedots > 1 below).
*/
int chasedots = (src[0] == '.' && pwd[0] == '.' && pwd[1] == '\0' &&
(src[1] == '/' || (src[1] == '.' && src[2] == '/'))) * 2;
/*** if have RFS superroot directory ***/
#ifdef HAVE_SUPERROOT
/* allow /.. segments to remain */
while (*src == '/' && src[1] == '.' && src[2] == '.' &&
(!src[3] || src[3] == '/')) {
*dest++ = '/';
*dest++ = '.';
*dest++ = '.';
src += 3;
}
#endif
for (;;) {
/* compress multiple /es into single */
if (*src == '/') {
#ifdef __CYGWIN__
/* allow leading // under cygwin, but /// still becomes / */
if (src == s0 && src[1] == '/' && src[2] != '/')
*dest++ = *src++;
#endif
*dest++ = *src++;
while (*src == '/')
src++;
}
/* if we are at the end of the input path, remove a trailing / (if it
exists), and return ct */
if (!*src) {
while (dest > d0 + 1 && dest[-1] == '/')
dest--;
*dest = '\0';
return chasedots;
}
if (src[0] == '.' && src[1] == '.' &&
(src[2] == '\0' || src[2] == '/')) {
if (isset(CHASEDOTS) || chasedots > 1) {
chasedots = 1;
/* and treat as normal path segment */
} else {
if (dest > d0 + 1) {
/*
* remove a foo/.. combination:
* first check foo exists, else return.
*/
struct stat st;
*dest = '\0';
if (stat(d0, &st) < 0 || !S_ISDIR(st.st_mode)) {
char *ptrd, *ptrs;
if (dest == src)
*dest = '.';
for (ptrs = src, ptrd = dest; *ptrs; ptrs++, ptrd++)
*ptrd = (*ptrs == Meta) ? (*++ptrs ^ 32) : *ptrs;
*ptrd = '\0';
return 1;
}
for (dest--; dest > d0 + 1 && dest[-1] != '/'; dest--);
if (dest[-1] != '/')
dest--;
}
src++;
while (*++src == '/');
continue;
}
}
if (src[0] == '.' && (src[1] == '/' || src[1] == '\0')) {
/* skip a . section */
while (*++src == '/');
} else {
/* copy a normal segment into the output */
while (*src != '/' && *src != '\0')
if ((*dest++ = *src++) == Meta)
dest[-1] = *src++ ^ 32;
}
}
/* unreached */
}
/**/
mod_export void
printqt(char *str)
{
/* Print str, but turn any single quote into '\'' or ''. */
for (; *str; str++)
if (*str == '\'')
printf(isset(RCQUOTES) ? "''" : "'\\''");
else
putchar(*str);
}
/**/
mod_export void
printif(char *str, int c)
{
/* If flag c has an argument, print that */
if (str) {
printf(" -%c ", c);
quotedzputs(str, stdout);
}
}
/**** history list functions ****/
/* fc, history, r */
/**/
int
bin_fc(char *nam, char **argv, Options ops, int func)
{
zlong first = -1, last = -1;
int retval;
char *s;
struct asgment *asgf = NULL, *asgl = NULL;
Patprog pprog = NULL;
/* fc is only permitted in interactive shells */
#ifdef FACIST_INTERACTIVE
if (!interact) {
zwarnnam(nam, "not interactive shell");
return 1;
}
#endif
if (OPT_ISSET(ops,'p')) {
char *hf = "";
zlong hs = DEFAULT_HISTSIZE;
zlong shs = 0;
int level = OPT_ISSET(ops,'a') ? locallevel : -1;
if (*argv) {
hf = *argv++;
if (*argv) {
char *check;
hs = zstrtol(*argv++, &check, 10);
if (*check) {
zwarnnam("fc", "HISTSIZE must be an integer");
return 1;
}
if (*argv) {
shs = zstrtol(*argv++, &check, 10);
if (*check) {
zwarnnam("fc", "SAVEHIST must be an integer");
return 1;
}
} else
shs = hs;
if (*argv) {
zwarnnam("fc", "too many arguments");
return 1;
}
} else {
hs = histsiz;
shs = savehistsiz;
}
}
if (!pushhiststack(hf, hs, shs, level))
return 1;
if (*hf) {
struct stat st;
if (stat(hf, &st) >= 0 || errno != ENOENT)
readhistfile(hf, 1, HFILE_USE_OPTIONS);
}
return 0;
}
if (OPT_ISSET(ops,'P')) {
if (*argv) {
zwarnnam("fc", "too many arguments");
return 1;
}
return !saveandpophiststack(-1, HFILE_USE_OPTIONS);
}
/* with the -m option, the first argument is taken *
* as a pattern that history lines have to match */
if (*argv && OPT_ISSET(ops,'m')) {
tokenize(*argv);
if (!(pprog = patcompile(*argv++, 0, NULL))) {
zwarnnam(nam, "invalid match pattern");
return 1;
}
}
queue_signals();
if (OPT_ISSET(ops,'R')) {
/* read history from a file */
readhistfile(*argv, 1, OPT_ISSET(ops,'I') ? HFILE_SKIPOLD : 0);
unqueue_signals();
return 0;
}
if (OPT_ISSET(ops,'W')) {
/* write history to a file */
savehistfile(*argv, 1, OPT_ISSET(ops,'I') ? HFILE_SKIPOLD : 0);
unqueue_signals();
return 0;
}
if (OPT_ISSET(ops,'A')) {
/* append history to a file */
savehistfile(*argv, 1, HFILE_APPEND |
(OPT_ISSET(ops,'I') ? HFILE_SKIPOLD : 0));
unqueue_signals();
return 0;
}
if (zleactive) {
unqueue_signals();
zwarnnam(nam, "no interactive history within ZLE");
return 1;
}
/* put foo=bar type arguments into the substitution list */
while (*argv && equalsplit(*argv, &s)) {
Asgment a = (Asgment) zhalloc(sizeof *a);
if (!**argv) {
zwarnnam(nam, "invalid replacement pattern: =%s", s);
return 1;
}
if (!asgf)
asgf = asgl = a;
else {
asgl->node.next = &a->node;
asgl = a;
}
a->name = *argv;
a->flags = 0;
a->value.scalar = s;
a->node.next = a->node.prev = NULL;
argv++;
}
/* interpret and check first history line specifier */
if (*argv) {
first = fcgetcomm(*argv);
if (first == -1) {
unqueue_signals();
return 1;
}
argv++;
}
/* interpret and check second history line specifier */
if (*argv) {
last = fcgetcomm(*argv);
if (last == -1) {
unqueue_signals();
return 1;
}
argv++;
}
/* There is a maximum of two history specifiers. At least, there *
* will be as long as the history list is one-dimensional. */
if (*argv) {
unqueue_signals();
zwarnnam("fc", "too many arguments");
return 1;
}
/* default values of first and last, and range checking */
if (last == -1) {
if (OPT_ISSET(ops,'l') && first < curhist) {
/*
* When listing base our calculations on curhist,
* to show anything added since the edited history line.
* Also, in that case curhist will have been modified
* past the current history line; then we want to
* show everything, because the user expects to
* see the result of "print -s". Otherwise, we subtract
* -1 from the line, because the user doesn't usually expect
* to see the command line that caused history to be
* listed.
*/
last = (curline.histnum == curhist) ? addhistnum(curhist,-1,0)
: curhist;
if (last < firsthist())
last = firsthist();
}
else
last = first;
}
if (first == -1) {
/*
* When listing, we want to see everything that's been
* added to the history, including by print -s, so use
* curhist.
* When reexecuting, we want to restrict to the last edited
* command line to avoid giving the user a nasty turn
* if some helpful soul ran "print -s 'rm -rf /'".
*/
first = OPT_ISSET(ops,'l')? addhistnum(curhist,-16,0)
: addhistnum(curline.histnum,-1,0);
if (first < 1)
first = 1;
if (last < first)
last = first;
}
if (OPT_ISSET(ops,'l')) {
/* list the required part of the history */
retval = fclist(stdout, ops, first, last, asgf, pprog, 0);
unqueue_signals();
}
else {
/* edit history file, and (if successful) use the result as a new command */
int tempfd;
FILE *out;
char *fil;
retval = 1;
if ((tempfd = gettempfile(NULL, 1, &fil)) < 0
|| ((out = fdopen(tempfd, "w")) == NULL)) {
unqueue_signals();
zwarnnam("fc", "can't open temp file: %e", errno);
} else {
/*
* Nasty behaviour results if we use the current history
* line here. Treat it as if it doesn't exist, unless
* that gives us an empty range.
*/
if (last >= curhist) {
last = curhist - 1;
if (first > last) {
unqueue_signals();
zwarnnam("fc",
"current history line would recurse endlessly, aborted");
fclose(out);
unlink(fil);
return 1;
}
}
ops->ind['n'] = 1; /* No line numbers here. */
if (!fclist(out, ops, first, last, asgf, pprog, 1)) {
char *editor;
if (func == BIN_R)
editor = "-";
else if (OPT_HASARG(ops, 'e'))
editor = OPT_ARG(ops, 'e');
else
editor = getsparam("FCEDIT");
if (!editor)
editor = getsparam("EDITOR");
if (!editor)
editor = DEFAULT_FCEDIT;
unqueue_signals();
if (fcedit(editor, fil)) {
if (stuff(fil))
zwarnnam("fc", "%e: %s", errno, fil);
else {
loop(0,1);
retval = lastval;
}
}
} else
unqueue_signals();
}
unlink(fil);
}
return retval;
}
/* History handling functions: these are called by ZLE, as well as *
* the actual builtins. fcgetcomm() gets a history line, specified *
* either by number or leading string. fcsubs() performs a given *
* set of simple old=new substitutions on a given command line. *
* fclist() outputs a given range of history lines to a text file. */
/* get the history event associated with s */
/**/
static zlong
fcgetcomm(char *s)
{
zlong cmd;
/* First try to match a history number. Negative *
* numbers indicate reversed numbering. */
if ((cmd = atoi(s)) != 0 || *s == '0') {
if (cmd < 0)
cmd = addhistnum(curline.histnum,cmd,HIST_FOREIGN);
if (cmd < 0)
cmd = 0;
return cmd;
}
/* not a number, so search by string */
cmd = hcomsearch(s);
if (cmd == -1)
zwarnnam("fc", "event not found: %s", s);
return cmd;
}
/* Perform old=new substitutions. Uses the asgment structure from zsh.h, *
* which is essentially a linked list of string,replacement pairs. */
/**/
static int
fcsubs(char **sp, struct asgment *sub)
{
char *oldstr, *newstr, *oldpos, *newpos, *newmem, *s = *sp;
int subbed = 0;
/* loop through the linked list */
while (sub) {
oldstr = sub->name;
newstr = sub->value.scalar;
sub = (Asgment)sub->node.next;
oldpos = s;
/* loop over occurences of oldstr in s, replacing them with newstr */
while ((newpos = (char *)strstr(oldpos, oldstr))) {
newmem = (char *) zhalloc(1 + (newpos - s)
+ strlen(newstr) + strlen(newpos + strlen(oldstr)));
ztrncpy(newmem, s, newpos - s);
strcat(newmem, newstr);
oldpos = newmem + strlen(newmem);
strcat(newmem, newpos + strlen(oldstr));
s = newmem;
subbed = 1;
}
}
*sp = s;
return subbed;
}
/* Print a series of history events to a file. The file pointer is *
* given by f, and the required range of events by first and last. *
* subs is an optional list of foo=bar substitutions to perform on the *
* history lines before output. com is an optional comp structure *
* that the history lines are required to match. n, r, D and d are *
* options: n indicates that each line should be numbered. r indicates *
* that the lines should be output in reverse order (newest first). *
* D indicates that the real time taken by each command should be *
* output. d indicates that the time of execution of each command *
* should be output; d>1 means that the date should be output too; d>3 *
* means that mm/dd/yyyy form should be used for the dates, as opposed *
* to dd.mm.yyyy form; d>7 means that yyyy-mm-dd form should be used. */
/**/
static int
fclist(FILE *f, Options ops, zlong first, zlong last,
struct asgment *subs, Patprog pprog, int is_command)
{
int fclistdone = 0, xflags = 0;
zlong tmp;
char *s, *tdfmt, *timebuf;
Histent ent;
/* reverse range if required */
if (OPT_ISSET(ops,'r')) {
tmp = last;
last = first;
first = tmp;
}
if (is_command && first > last) {
zwarnnam("fc", "history events can't be executed backwards, aborted");
if (f != stdout)
fclose(f);
return 1;
}
ent = gethistent(first, first < last? GETHIST_DOWNWARD : GETHIST_UPWARD);
if (!ent || (first < last? ent->histnum > last : ent->histnum < last)) {
if (first == last) {
char buf[DIGBUFSIZE];
convbase(buf, first, 10);
zwarnnam("fc", "no such event: %s", buf);
} else
zwarnnam("fc", "no events in that range");
if (f != stdout)
fclose(f);
return 1;
}
if (OPT_ISSET(ops,'d') || OPT_ISSET(ops,'f') ||
OPT_ISSET(ops,'E') || OPT_ISSET(ops,'i') ||
OPT_ISSET(ops,'t')) {
if (OPT_ISSET(ops,'t')) {
tdfmt = OPT_ARG(ops,'t');
} else if (OPT_ISSET(ops,'i')) {
tdfmt = "%Y-%m-%d %H:%M";
} else if (OPT_ISSET(ops,'E')) {
tdfmt = "%f.%-m.%Y %H:%M";
} else if (OPT_ISSET(ops,'f')) {
tdfmt = "%-m/%f/%Y %H:%M";
} else {
tdfmt = "%H:%M";
}
timebuf = zhalloc(256);
} else {
tdfmt = timebuf = NULL;
}
/* xflags exclude events */
if (OPT_ISSET(ops,'L')) {
xflags |= HIST_FOREIGN;
}
if (OPT_ISSET(ops,'I')) {
xflags |= HIST_READ;
}
for (;;) {
if (ent->node.flags & xflags)
s = NULL;
else
s = dupstring(ent->node.nam);
/* this if does the pattern matching, if required */
if (s && (!pprog || pattry(pprog, s))) {
/* perform substitution */
fclistdone |= (subs ? fcsubs(&s, subs) : 1);
/* do numbering */
if (!OPT_ISSET(ops,'n')) {
char buf[DIGBUFSIZE];
convbase(buf, ent->histnum, 10);
fprintf(f, "%5s%c ", buf,
ent->node.flags & HIST_FOREIGN ? '*' : ' ');
}
/* output actual time (and possibly date) of execution of the
command, if required */
if (tdfmt != NULL) {
struct tm *ltm;
int len;
ltm = localtime(&ent->stim);
if ((len = ztrftime(timebuf, 256, tdfmt, ltm, 0L)) >= 0) {
fwrite(timebuf, 1, len, f);
fprintf(f, " ");
}
}
/* display the time taken by the command, if required */
if (OPT_ISSET(ops,'D')) {
long diff;
diff = (ent->ftim) ? ent->ftim - ent->stim : 0;
fprintf(f, "%ld:%02ld ", diff / 60, diff % 60);
}
/* output the command */
if (f == stdout) {
nicezputs(s, f);
putc('\n', f);
} else {
int len;
unmetafy(s, &len);
fwrite(s, 1, len, f);
putc('\n', f);
}
}
/* move on to the next history line, or quit the loop */
if (first < last) {
if (!(ent = down_histent(ent)) || ent->histnum > last)
break;
}
else {
if (!(ent = up_histent(ent)) || ent->histnum < last)
break;
}
}
/* final processing */
if (f != stdout)
fclose(f);
if (!fclistdone) {
if (subs)
zwarnnam("fc", "no substitutions performed");
else if (xflags || pprog)
zwarnnam("fc", "no matching events found");
return 1;
}
return 0;
}
/* edit a history file */
/**/
static int
fcedit(char *ename, char *fn)
{
char *s;
if (!strcmp(ename, "-"))
return 1;
s = tricat(ename, " ", fn);
execstring(s, 1, 0, "fc");
zsfree(s);
return !lastval;
}
/**** parameter builtins ****/
/* Separate an argument into name=value parts, returning them in an *
* asgment structure. Because the asgment structure used is global, *
* only one of these can be active at a time. The string s gets placed *
* in this global structure, so it needs to be in permanent memory. */
/**/
static Asgment
getasg(char ***argvp, LinkList assigns)
{
char *s = **argvp;
static struct asgment asg;
/* sanity check for valid argument */
if (!s) {
if (assigns) {
Asgment asgp = (Asgment)firstnode(assigns);
if (!asgp)
return NULL;
(void)uremnode(assigns, &asgp->node);
return asgp;
}
return NULL;
}
/* check if name is empty */
if (*s == '=') {
zerr("bad assignment");
return NULL;
}
asg.name = s;
asg.flags = 0;
/* search for `=' */
for (; *s && *s != '='; s++);
/* found `=', so return with a value */
if (*s) {
*s = '\0';
asg.value.scalar = s + 1;
} else {
/* didn't find `=', so we only have a name */
asg.value.scalar = NULL;
}
(*argvp)++;
return &asg;
}
/* for new special parameters */
enum {
NS_NONE,
NS_NORMAL,
NS_SECONDS
};
static const struct gsu_scalar tiedarr_gsu =
{ tiedarrgetfn, tiedarrsetfn, tiedarrunsetfn };
/* Install a base if we are turning on a numeric option with an argument */
static int
typeset_setbase(const char *name, Param pm, Options ops, int on, int always)
{
char *arg = NULL;
if ((on & PM_INTEGER) && OPT_HASARG(ops,'i'))
arg = OPT_ARG(ops,'i');
else if ((on & PM_EFLOAT) && OPT_HASARG(ops,'E'))
arg = OPT_ARG(ops,'E');
else if ((on & PM_FFLOAT) && OPT_HASARG(ops,'F'))
arg = OPT_ARG(ops,'F');
if (arg) {
char *eptr;
int base = (int)zstrtol(arg, &eptr, 10);
if (*eptr) {
if (on & PM_INTEGER)
zwarnnam(name, "bad base value: %s", arg);
else
zwarnnam(name, "bad precision value: %s", arg);
return 1;
}
if ((on & PM_INTEGER) && (base < 2 || base > 36)) {
zwarnnam(name, "invalid base (must be 2 to 36 inclusive): %d",
base);
return 1;
}
pm->base = base;
} else if (always)
pm->base = 0;
return 0;
}
/* Install a width if we are turning on a padding option with an argument */
static int
typeset_setwidth(const char * name, Param pm, Options ops, int on, int always)
{
char *arg = NULL;
if ((on & PM_LEFT) && OPT_HASARG(ops,'L'))
arg = OPT_ARG(ops,'L');
else if ((on & PM_RIGHT_B) && OPT_HASARG(ops,'R'))
arg = OPT_ARG(ops,'R');
else if ((on & PM_RIGHT_Z) && OPT_HASARG(ops,'Z'))
arg = OPT_ARG(ops,'Z');
if (arg) {
char *eptr;
pm->width = (int)zstrtol(arg, &eptr, 10);
if (*eptr) {
zwarnnam(name, "bad width value: %s", arg);
return 1;
}
} else if (always)
pm->width = 0;
return 0;
}
/* function to set a single parameter */
/**/
static Param
typeset_single(char *cname, char *pname, Param pm, UNUSED(int func),
int on, int off, int roff, Asgment asg, Param altpm,
Options ops, int joinchar)
{
int usepm, tc, keeplocal = 0, newspecial = NS_NONE, readonly, dont_set = 0;
char *subscript;
/*
* Do we use the existing pm? Note that this isn't the end of the
* story, because if we try and create a new pm at the same
* locallevel as an unset one we use the pm struct anyway: that's
* handled in createparam(). Here we just avoid using it for the
* present tests if it's unset.
*
* POSIXBUILTINS horror: we need to retain the 'readonly' or 'export'
* flags of an unset parameter.
*/
usepm = pm && (!(pm->node.flags & PM_UNSET) ||
(isset(POSIXBUILTINS) &&
(pm->node.flags & (PM_READONLY|PM_EXPORTED))));
/*
* We need to compare types with an existing pm if special,
* even if that's unset
*/
if (!usepm && pm && (pm->node.flags & PM_SPECIAL))
usepm = 2; /* indicate that we preserve the PM_UNSET flag */
/*
* Don't use an existing param if
* - the local level has changed, and
* - we are really locallizing the parameter
*/
if (usepm && locallevel != pm->level && (on & PM_LOCAL)) {
/*
* If the original parameter was special and we're creating
* a new one, we need to keep it special.
*
* The -h (hide) flag prevents an existing special being made
* local. It can be applied either to the special or in the
* typeset/local statement for the local variable.
*/
if ((pm->node.flags & PM_SPECIAL)
&& !(on & PM_HIDE) && !(pm->node.flags & PM_HIDE & ~off))
newspecial = NS_NORMAL;
usepm = 0;
}
/* attempting a type conversion, or making a tied colonarray? */
tc = 0;
if (ASG_ARRAYP(asg) && PM_TYPE(on) == PM_SCALAR &&
!(usepm && (PM_TYPE(pm->node.flags) & (PM_ARRAY|PM_HASHED))))
on |= PM_ARRAY;
if (usepm && ASG_ARRAYP(asg) && newspecial == NS_NONE &&
PM_TYPE(pm->node.flags) != PM_ARRAY &&
PM_TYPE(pm->node.flags) != PM_HASHED) {
if (on & (PM_EFLOAT|PM_FFLOAT|PM_INTEGER)) {
zerrnam(cname, "%s: can't assign array value to non-array", pname);
return NULL;
}
if (pm->node.flags & PM_SPECIAL) {
zerrnam(cname, "%s: can't assign array value to non-array special", pname);
return NULL;
}
tc = 1;
usepm = 0;
}
else if (usepm || newspecial != NS_NONE) {
int chflags = ((off & pm->node.flags) | (on & ~pm->node.flags)) &
(PM_INTEGER|PM_EFLOAT|PM_FFLOAT|PM_HASHED|
PM_ARRAY|PM_TIED|PM_AUTOLOAD);
/* keep the parameter if just switching between floating types */
if ((tc = chflags && chflags != (PM_EFLOAT|PM_FFLOAT)))
usepm = 0;
}
/*
* Extra checks if converting the type of a parameter, or if
* trying to remove readonlyness. It's dangerous doing either
* with a special or a parameter which isn't loaded yet (which
* may be special when it is loaded; we can't tell yet).
*/
if ((readonly =
((usepm || newspecial != NS_NONE) &&
(off & pm->node.flags & PM_READONLY))) ||
tc) {
if (pm->node.flags & PM_SPECIAL) {
int err = 1;
if (!readonly && !strcmp(pname, "SECONDS"))
{
/*
* We allow SECONDS to change type between integer
* and floating point. If we are creating a new
* local copy we check the type here and allow
* a new special to be created with that type.
* We then need to make sure the correct type
* for the special is restored at the end of the scope.
* If we are changing the type of an existing
* parameter, we do the whole thing here.
*/
if (newspecial != NS_NONE)
{
/*
* The first test allows `typeset' to copy the
* existing type. This is the usual behaviour
* for making special parameters local.
*/
if (PM_TYPE(on) == 0 || PM_TYPE(on) == PM_INTEGER ||
PM_TYPE(on) == PM_FFLOAT || PM_TYPE(on) == PM_EFLOAT)
{
newspecial = NS_SECONDS;
err = 0; /* and continue */
tc = 0; /* but don't do a normal conversion */
}
} else if (!setsecondstype(pm, on, off)) {
if (asg->value.scalar &&
!(pm = assignsparam(
pname, ztrdup(asg->value.scalar), 0)))
return NULL;
usepm = 1;
err = 0;
}
}
if (err)
{
zerrnam(cname, "%s: can't change type of a special parameter",
pname);
return NULL;
}
} else if (pm->node.flags & PM_AUTOLOAD) {
zerrnam(cname, "%s: can't change type of autoloaded parameter",
pname);
return NULL;
}
}
else if (newspecial != NS_NONE && strcmp(pname, "SECONDS") == 0)
newspecial = NS_SECONDS;
if (isset(POSIXBUILTINS)) {
/*
* Stricter rules about retaining readonly attribute in this case.
*/
if ((on & (PM_READONLY|PM_EXPORTED)) &&
(!usepm || (pm->node.flags & PM_UNSET)) &&
!ASG_VALUEP(asg))
on |= PM_UNSET;
else if (usepm && (pm->node.flags & PM_READONLY) &&
!(on & PM_READONLY)) {
zerr("read-only variable: %s", pm->node.nam);
return NULL;
}
/* This is handled by createparam():
if (usepm && (pm->node.flags & PM_EXPORTED) && !(off & PM_EXPORTED))
on |= PM_EXPORTED;
*/
}
/*
* A parameter will be local if
* 1. we are re-using an existing local parameter
* or
* 2. we are not using an existing parameter, but
* i. there is already a parameter, which will be hidden
* or
* ii. we are creating a new local parameter
*/
if (usepm) {
if ((asg->flags & ASG_ARRAY) ?
!(PM_TYPE(pm->node.flags) & (PM_ARRAY|PM_HASHED)) :
(asg->value.scalar && (PM_TYPE(pm->node.flags &
(PM_ARRAY|PM_HASHED))))) {
zerrnam(cname, "%s: inconsistent type for assignment", pname);
return NULL;
}
on &= ~PM_LOCAL;
if (!on && !roff && !ASG_VALUEP(asg)) {
if (OPT_ISSET(ops,'p'))
paramtab->printnode(&pm->node, PRINT_TYPESET);
else if (!OPT_ISSET(ops,'g') &&
(unset(TYPESETSILENT) || OPT_ISSET(ops,'m')))
paramtab->printnode(&pm->node, PRINT_INCLUDEVALUE);
return pm;
}
if ((pm->node.flags & PM_RESTRICTED) && isset(RESTRICTED)) {
zerrnam(cname, "%s: restricted", pname);
return pm;
}
if ((on & PM_UNIQUE) && !(pm->node.flags & PM_READONLY & ~off)) {
Param apm;
char **x;
if (PM_TYPE(pm->node.flags) == PM_ARRAY) {
x = (*pm->gsu.a->getfn)(pm);
uniqarray(x);
if (pm->node.flags & PM_SPECIAL) {
if (zheapptr(x))
x = zarrdup(x);
(*pm->gsu.a->setfn)(pm, x);
} else if (pm->ename && x)
arrfixenv(pm->ename, x);
} else if (PM_TYPE(pm->node.flags) == PM_SCALAR && pm->ename &&
(apm =
(Param) paramtab->getnode(paramtab, pm->ename))) {
x = (*apm->gsu.a->getfn)(apm);
uniqarray(x);
if (x)
arrfixenv(pm->node.nam, x);
}
}
if (usepm == 2) /* do not change the PM_UNSET flag */
pm->node.flags = (pm->node.flags | (on & ~PM_READONLY)) & ~off;
else {
/*
* Keep unset if using readonly in POSIX mode.
*/
if (!(on & PM_READONLY) || !isset(POSIXBUILTINS))
off |= PM_UNSET;
pm->node.flags = (pm->node.flags |
(on & ~PM_READONLY)) & ~off;
}
if (on & (PM_LEFT | PM_RIGHT_B | PM_RIGHT_Z)) {
if (typeset_setwidth(cname, pm, ops, on, 0))
return NULL;
}
if (on & (PM_INTEGER | PM_EFLOAT | PM_FFLOAT)) {
if (typeset_setbase(cname, pm, ops, on, 0))
return NULL;
}
if (!(pm->node.flags & (PM_ARRAY|PM_HASHED))) {
if (pm->node.flags & PM_EXPORTED) {
if (!(pm->node.flags & PM_UNSET) && !pm->env && !ASG_VALUEP(asg))
addenv(pm, getsparam(pname));
} else if (pm->env && !(pm->node.flags & PM_HASHELEM))
delenv(pm);
DPUTS(ASG_ARRAYP(asg), "BUG: typeset got array value where scalar expected");
if (asg->value.scalar &&
!(pm = assignsparam(pname, ztrdup(asg->value.scalar), 0)))
return NULL;
} else if (asg->flags & ASG_ARRAY) {
int flags = (asg->flags & ASG_KEY_VALUE) ? ASSPM_KEY_VALUE : 0;
if (!(pm = assignaparam(pname, asg->value.array ?
zlinklist2array(asg->value.array) :
mkarray(NULL), flags)))
return NULL;
}
if (errflag)
return NULL;
pm->node.flags |= (on & PM_READONLY);
if (OPT_ISSET(ops,'p'))
paramtab->printnode(&pm->node, PRINT_TYPESET);
return pm;
}
if ((asg->flags & ASG_ARRAY) ?
!(on & (PM_ARRAY|PM_HASHED)) :
(asg->value.scalar && (on & (PM_ARRAY|PM_HASHED)))) {
zerrnam(cname, "%s: inconsistent type for assignment", pname);
return NULL;
}
/*
* We're here either because we're creating a new parameter,
* or we're adding a parameter at a different local level,
* or we're converting the type of a parameter. In the
* last case only, we need to delete the old parameter.
*/
if (tc) {
/* Maintain existing readonly/exported status... */
on |= ~off & (PM_READONLY|PM_EXPORTED) & pm->node.flags;
/* ...but turn off existing readonly so we can delete it */
pm->node.flags &= ~PM_READONLY;
/*
* If we're just changing the type, we should keep the
* variable at the current level of localness.
*/
keeplocal = pm->level;
/*
* Try to carry over a value, but not when changing from,
* to, or between non-scalar types.
*
* (We can do better now, but it does have user-visible
* implications.)
*/
if (!ASG_VALUEP(asg) && !((pm->node.flags|on) & (PM_ARRAY|PM_HASHED))) {
asg->value.scalar = dupstring(getsparam(pname));
asg->flags = 0;
}
/* pname may point to pm->nam which is about to disappear */
pname = dupstring(pname);
unsetparam_pm(pm, 0, 1);
}
if (newspecial != NS_NONE) {
Param tpm, pm2;
if ((pm->node.flags & PM_RESTRICTED) && isset(RESTRICTED)) {
zerrnam(cname, "%s: restricted", pname);
return pm;
}
if (pm->node.flags & PM_SINGLE) {
zerrnam(cname, "%s: can only have a single instance", pname);
return pm;
}
/*
* For specials, we keep the same struct but zero everything.
* Maybe it would be easier to create a new struct but copy
* the get/set methods.
*/
tpm = (Param) zshcalloc(sizeof *tpm);
tpm->node.nam = pm->node.nam;
if (pm->ename &&
(pm2 = (Param) paramtab->getnode(paramtab, pm->ename)) &&
pm2->level == locallevel) {
/* This is getting silly, but anyway: if one of a path/PATH
* pair has already been made local at the current level, we
* have to make sure that the other one does not have its value
* saved: since that comes from an internal variable it will
* already reflect the local value, so restoring it on exit
* would be wrong.
*
* This problem is also why we make sure we have a copy
* of the environment entry in tpm->env, rather than relying
* on the restored value to provide it.
*/
tpm->node.flags = pm->node.flags | PM_NORESTORE;
} else {
copyparam(tpm, pm, 1);
}
tpm->old = pm->old;
tpm->level = pm->level;
tpm->base = pm->base;
tpm->width = pm->width;
if (pm->env)
delenv(pm);
tpm->env = NULL;
pm->old = tpm;
/*
* The remaining on/off flags should be harmless to use,
* because we've checked for unpleasant surprises above.
*/
pm->node.flags = (PM_TYPE(pm->node.flags) | on | PM_SPECIAL) & ~off;
/*
* Readonlyness of special parameters must be preserved.
*/
pm->node.flags |= tpm->node.flags & PM_READONLY;
if (newspecial == NS_SECONDS) {
/* We save off the raw internal value of the SECONDS var */
tpm->u.dval = getrawseconds();
setsecondstype(pm, on, off);
}
/*
* Final tweak: if we've turned on one of the flags with
* numbers, we should use the appropriate integer.
*/
if (on & (PM_LEFT|PM_RIGHT_B|PM_RIGHT_Z)) {
if (typeset_setwidth(cname, pm, ops, on, 1))
return NULL;
}
if (on & (PM_INTEGER|PM_EFLOAT|PM_FFLOAT)) {
if (typeset_setbase(cname, pm, ops, on, 1))
return NULL;
}
} else if ((subscript = strchr(pname, '['))) {
if (on & PM_READONLY) {
zerrnam(cname,
"%s: can't create readonly array elements", pname);
return NULL;
} else if ((on & PM_LOCAL) && locallevel) {
*subscript = 0;
pm = (Param) (paramtab == realparamtab ?
/* getnode2() to avoid autoloading */
paramtab->getnode2(paramtab, pname) :
paramtab->getnode(paramtab, pname));
*subscript = '[';
if (!pm || pm->level != locallevel) {
zerrnam(cname,
"%s: can't create local array elements", pname);
return NULL;
}
}
if (PM_TYPE(on) == PM_SCALAR && !ASG_ARRAYP(asg)) {
/*
* This will either complain about bad identifiers, or will set
* a hash element or array slice. This once worked by accident,
* creating a stray parameter along the way via createparam(),
* now called below in the isident() branch.
*/
if (!(pm = assignsparam(
pname,
ztrdup(asg->value.scalar ? asg->value.scalar : ""), 0)))
return NULL;
dont_set = 1;
asg->flags = 0;
keeplocal = 0;
on = pm->node.flags;
} else if (PM_TYPE(on) == PM_ARRAY && ASG_ARRAYP(asg)) {
int flags = (asg->flags & ASG_KEY_VALUE) ? ASSPM_KEY_VALUE : 0;
if (!(pm = assignaparam(pname, asg->value.array ?
zlinklist2array(asg->value.array) :
mkarray(NULL), flags)))
return NULL;
dont_set = 1;
keeplocal = 0;
on = pm->node.flags;
} else {
zerrnam(cname,
"%s: inconsistent array element or slice assignment", pname);
return NULL;
}
}
/*
* As we can hide existing parameters, we allow a name if
* it's not a normal identifier but is one of the special
* set found in the parameter table. The second test is
* because we can set individual positional parameters;
* however "0" is not a positional parameter and is OK.
*
* It would be neater to extend isident() and be clearer
* about where we allow various parameter types. It's
* not entirely clear to me isident() should reject
* specially named parameters given that it accepts digits.
*/
else if ((isident(pname) || paramtab->getnode(paramtab, pname))
&& (!idigit(*pname) || !strcmp(pname, "0"))) {
/*
* Create a new node for a parameter with the flags in `on' minus the
* readonly flag
*/
pm = createparam(pname, on & ~PM_READONLY);
if (!pm) {
if (on & (PM_LEFT | PM_RIGHT_B | PM_RIGHT_Z |
PM_INTEGER | PM_EFLOAT | PM_FFLOAT))
zerrnam(cname, "can't change variable attribute: %s", pname);
return NULL;
}
if (on & (PM_LEFT | PM_RIGHT_B | PM_RIGHT_Z)) {
if (typeset_setwidth(cname, pm, ops, on, 0))
return NULL;
}
if (on & (PM_INTEGER | PM_EFLOAT | PM_FFLOAT)) {
if (typeset_setbase(cname, pm, ops, on, 0))
return NULL;
}
} else {
if (idigit(*pname))
zerrnam(cname, "not an identifier: %s", pname);
else
zerrnam(cname, "not valid in this context: %s", pname);
return NULL;
}
if (altpm && PM_TYPE(pm->node.flags) == PM_SCALAR) {
/*
* It seems safer to set this here than in createparam(),
* to make sure we only ever use the colonarr functions
* when u.data is correctly set.
*/
struct tieddata *tdp = (struct tieddata *)
zalloc(sizeof(struct tieddata));
if (!tdp)
return NULL;
tdp->joinchar = joinchar;
tdp->arrptr = &altpm->u.arr;
pm->gsu.s = &tiedarr_gsu;
pm->u.data = tdp;
}
if (keeplocal)
pm->level = keeplocal;
else if (on & PM_LOCAL)
pm->level = locallevel;
if (ASG_VALUEP(asg) && !dont_set) {
Param ipm = pm;
if (pm->node.flags & (PM_ARRAY|PM_HASHED)) {
char **arrayval;
int flags = (asg->flags & ASG_KEY_VALUE) ? ASSPM_KEY_VALUE : 0;
if (!ASG_ARRAYP(asg)) {
/*
* Attempt to assign a scalar value to an array.
* This can happen if the array is special.
* We'll be lenient and guess what the user meant.
* This is how normal assigment works.
*/
if (*asg->value.scalar) {
/* Array with one value */
arrayval = mkarray(ztrdup(asg->value.scalar));
} else {
/* Empty array */
arrayval = mkarray(NULL);
}
} else if (asg->value.array)
arrayval = zlinklist2array(asg->value.array);
else
arrayval = mkarray(NULL);
if (!(pm=assignaparam(pname, arrayval, flags)))
return NULL;
} else {
DPUTS(ASG_ARRAYP(asg), "BUG: inconsistent array value for scalar");
if (!(pm = assignsparam(pname, ztrdup(asg->value.scalar), 0)))
return NULL;
}
if (pm != ipm) {
DPUTS(ipm->node.flags != pm->node.flags,
"BUG: parameter recreated with wrong flags");
unsetparam_pm(ipm, 0, 1);
}
} else if (newspecial != NS_NONE &&
!(pm->old->node.flags & (PM_NORESTORE|PM_READONLY))) {
/*
* We need to use the special setting function to re-initialise
* the special parameter to empty.
*/
switch (PM_TYPE(pm->node.flags)) {
case PM_SCALAR:
pm->gsu.s->setfn(pm, ztrdup(""));
break;
case PM_INTEGER:
/*
* Restricted integers are dangerous to initialize to 0,
* so don't do that.
*/
if (!(pm->old->node.flags & PM_RESTRICTED))
pm->gsu.i->setfn(pm, 0);
break;
case PM_EFLOAT:
case PM_FFLOAT:
pm->gsu.f->setfn(pm, 0.0);
break;
case PM_ARRAY:
pm->gsu.a->setfn(pm, mkarray(NULL));
break;
case PM_HASHED:
pm->gsu.h->setfn(pm, newparamtable(17, pm->node.nam));
break;
}
}
pm->node.flags |= (on & PM_READONLY);
if (OPT_ISSET(ops,'p'))
paramtab->printnode(&pm->node, PRINT_TYPESET);
return pm;
}
/*
* declare, export, float, integer, local, readonly, typeset
*
* Note the difference in interface from most builtins, covered by the
* BINF_ASSIGN builtin flag. This is only made use of by builtins
* called by reserved word, which only covers declare, local, readonly
* and typeset. Otherwise assigns is NULL.
*/
/**/
int
bin_typeset(char *name, char **argv, LinkList assigns, Options ops, int func)
{
Param pm;
Asgment asg;
Patprog pprog;
char *optstr = TYPESET_OPTSTR;
int on = 0, off = 0, roff, bit = PM_ARRAY;
int i;
int returnval = 0, printflags = 0;
int hasargs;
/* hash -f is really the builtin `functions' */
if (OPT_ISSET(ops,'f'))
return bin_functions(name, argv, ops, func);
/* POSIX handles "readonly" specially */
if (func == BIN_READONLY && isset(POSIXBUILTINS) && !OPT_PLUS(ops, 'g'))
ops->ind['g'] = 1;
/* Translate the options into PM_* flags. *
* Unfortunately, this depends on the order *
* these flags are defined in zsh.h */
for (; *optstr; optstr++, bit <<= 1)
{
int optval = STOUC(*optstr);
if (OPT_MINUS(ops,optval))
on |= bit;
else if (OPT_PLUS(ops,optval))
off |= bit;
}
roff = off;
/* Sanity checks on the options. Remove conflicting options. */
if (on & PM_FFLOAT) {
off |= PM_UPPER | PM_ARRAY | PM_HASHED | PM_INTEGER | PM_EFLOAT;
/* Allow `float -F' to work even though float sets -E by default */
on &= ~PM_EFLOAT;
}
if (on & PM_EFLOAT)
off |= PM_UPPER | PM_ARRAY | PM_HASHED | PM_INTEGER | PM_FFLOAT;
if (on & PM_INTEGER)
off |= PM_UPPER | PM_ARRAY | PM_HASHED | PM_EFLOAT | PM_FFLOAT;
/*
* Allowing -Z with -L is a feature: left justify, suppressing
* leading zeroes.
*/
if (on & (PM_LEFT|PM_RIGHT_Z))
off |= PM_RIGHT_B;
if (on & PM_RIGHT_B)
off |= PM_LEFT | PM_RIGHT_Z;
if (on & PM_UPPER)
off |= PM_LOWER;
if (on & PM_LOWER)
off |= PM_UPPER;
if (on & PM_HASHED)
off |= PM_ARRAY;
if (on & PM_TIED)
off |= PM_INTEGER | PM_EFLOAT | PM_FFLOAT | PM_ARRAY | PM_HASHED;
on &= ~off;
queue_signals();
/* Given no arguments, list whatever the options specify. */
if (OPT_ISSET(ops,'p')) {
printflags |= PRINT_TYPESET;
if (OPT_HASARG(ops,'p')) {
char *eptr;
int pflag = (int)zstrtol(OPT_ARG(ops,'p'), &eptr, 10);
if (pflag == 1 && !*eptr)
printflags |= PRINT_LINE;
else if (pflag || *eptr) {
zwarnnam(name, "bad argument to -p: %s", OPT_ARG(ops,'p'));
unqueue_signals();
return 1;
}
/* -p0 treated as -p for consistency */
}
}
hasargs = *argv != NULL || (assigns && firstnode(assigns));
if (!hasargs) {
if (!OPT_ISSET(ops,'p')) {
if (!(on|roff))
printflags |= PRINT_TYPE;
if (roff || OPT_ISSET(ops,'+'))
printflags |= PRINT_NAMEONLY;
}
scanhashtable(paramtab, 1, on|roff, 0, paramtab->printnode, printflags);
unqueue_signals();
return 0;
}
if (!(OPT_ISSET(ops,'g') || OPT_ISSET(ops,'x') || OPT_ISSET(ops,'m')) ||
OPT_PLUS(ops,'g') || *name == 'l' ||
(!isset(GLOBALEXPORT) && !OPT_ISSET(ops,'g')))
on |= PM_LOCAL;
if (on & PM_TIED) {
Param apm;
struct asgment asg0, asg2;
char *oldval = NULL, *joinstr;
int joinchar, nargs;
if (OPT_ISSET(ops,'m')) {
zwarnnam(name, "incompatible options for -T");
unqueue_signals();
return 1;
}
on &= ~off;
nargs = arrlen(argv) + (assigns ? countlinknodes(assigns) : 0);
if (nargs < 2) {
zwarnnam(name, "-T requires names of scalar and array");
unqueue_signals();
return 1;
}
if (nargs > 3) {
zwarnnam(name, "too many arguments for -T");
unqueue_signals();
return 1;
}
if (!(asg = getasg(&argv, assigns))) {
unqueue_signals();
return 1;
}
asg0 = *asg;
if (ASG_ARRAYP(&asg0)) {
unqueue_signals();
zwarnnam(name, "first argument of tie must be scalar: %s",
asg0.name);
return 1;
}
if (!(asg = getasg(&argv, assigns))) {
unqueue_signals();
return 1;
}
if (!ASG_ARRAYP(asg) && asg->value.scalar) {
unqueue_signals();
zwarnnam(name, "second argument of tie must be array: %s",
asg->name);
return 1;
}
if (!strcmp(asg0.name, asg->name)) {
unqueue_signals();
zerrnam(name, "can't tie a variable to itself: %s", asg0.name);
return 1;
}
if (strchr(asg0.name, '[') || strchr(asg->name, '[')) {
unqueue_signals();
zerrnam(name, "can't tie array elements: %s", asg0.name);
return 1;
}
if (ASG_VALUEP(asg) && ASG_VALUEP(&asg0)) {
unqueue_signals();
zerrnam(name, "only one tied parameter can have value: %s", asg0.name);
return 1;
}
/*
* Third argument, if given, is character used to join
* the elements of the array in the scalar.
*/
if (*argv)
joinstr = *argv;
else if (assigns && firstnode(assigns)) {
Asgment nextasg = (Asgment)firstnode(assigns);
if (ASG_ARRAYP(nextasg) || ASG_VALUEP(nextasg)) {
zwarnnam(name, "third argument of tie must be join character");
unqueue_signals();
return 1;
}
joinstr = nextasg->name;
} else
joinstr = NULL;
if (!joinstr)
joinchar = ':';
else if (!*joinstr)
joinchar = 0;
else if (*joinstr == Meta)
joinchar = joinstr[1] ^ 32;
else
joinchar = *joinstr;
/*
* Keep the old value of the scalar. We need to do this
* here as if it is already tied to the same array it
* will be unset when we retie the array. This is all
* so that typeset -T is idempotent.
*
* We also need to remember here whether the damn thing is
* exported and pass that along. Isn't the world complicated?
*/
if ((pm = (Param) paramtab->getnode(paramtab, asg0.name))
&& !(pm->node.flags & PM_UNSET)
&& (locallevel == pm->level || !(on & PM_LOCAL))) {
if (pm->node.flags & PM_TIED) {
unqueue_signals();
if (PM_TYPE(pm->node.flags) != PM_SCALAR) {
zwarnnam(name, "already tied as non-scalar: %s", asg0.name);
} else if (!strcmp(asg->name, pm->ename)) {
/*
* Already tied in the fashion requested.
*/
struct tieddata *tdp = (struct tieddata*)pm->u.data;
int flags = (asg->flags & ASG_KEY_VALUE) ?
ASSPM_KEY_VALUE : 0;
/* Update join character */
tdp->joinchar = joinchar;
if (asg0.value.scalar)
assignsparam(asg0.name, ztrdup(asg0.value.scalar), 0);
else if (asg->value.array)
assignaparam(
asg->name, zlinklist2array(asg->value.array),flags);
return 0;
} else {
zwarnnam(name, "can't tie already tied scalar: %s",
asg0.name);
}
return 1;
}
if (!asg0.value.scalar && !asg->value.array &&
!(PM_TYPE(pm->node.flags) & (PM_ARRAY|PM_HASHED)))
oldval = ztrdup(getsparam(asg0.name));
on |= (pm->node.flags & PM_EXPORTED);
}
/*
* Create the tied array; this is normal except that
* it has the PM_TIED flag set. Do it first because
* we need the address.
*
* Don't attempt to set it yet, it's too early
* to be exported properly.
*/
asg2.name = asg->name;
asg2.flags = 0;
asg2.value.array = (LinkList)0;
if (!(apm=typeset_single(name, asg->name,
(Param)paramtab->getnode(paramtab,
asg->name),
func, (on | PM_ARRAY) & ~PM_EXPORTED,
off, roff, &asg2, NULL, ops, 0))) {
if (oldval)
zsfree(oldval);
unqueue_signals();
return 1;
}
/*
* Create the tied colonarray. We make it as a normal scalar
* and fix up the oddities later.
*/
if (!(pm=typeset_single(name, asg0.name,
(Param)paramtab->getnode(paramtab,
asg0.name),
func, on, off, roff, &asg0, apm,
ops, joinchar))) {
if (oldval)
zsfree(oldval);
unsetparam_pm(apm, 1, 1);
unqueue_signals();
return 1;
}
/*
* pm->ename is only deleted when the struct is, so
* we need to free it here if it already exists.
*/
if (pm->ename)
zsfree(pm->ename);
pm->ename = ztrdup(asg->name);
if (apm->ename)
zsfree(apm->ename);
apm->ename = ztrdup(asg0.name);
if (asg->value.array) {
int flags = (asg->flags & ASG_KEY_VALUE) ? ASSPM_KEY_VALUE : 0;
assignaparam(asg->name, zlinklist2array(asg->value.array), flags);
} else if (oldval)
assignsparam(asg0.name, oldval, 0);
unqueue_signals();
return 0;
}
if (off & PM_TIED) {
unqueue_signals();
zerrnam(name, "use unset to remove tied variables");
return 1;
}
/* With the -m option, treat arguments as glob patterns */
if (OPT_ISSET(ops,'m')) {
if (!OPT_ISSET(ops,'p')) {
if (!(on|roff))
printflags |= PRINT_TYPE;
if (!on)
printflags |= PRINT_NAMEONLY;
}
while ((asg = getasg(&argv, assigns))) {
LinkList pmlist = newlinklist();
LinkNode pmnode;
tokenize(asg->name); /* expand argument */
if (!(pprog = patcompile(asg->name, 0, NULL))) {
untokenize(asg->name);
zwarnnam(name, "bad pattern : %s", asg->name);
returnval = 1;
continue;
}
if (OPT_PLUS(ops,'m') && !ASG_VALUEP(asg)) {
scanmatchtable(paramtab, pprog, 1, on|roff, 0,
paramtab->printnode, printflags);
continue;
}
/*
* Search through the parameter table and change all parameters
* matching the glob pattern to have these flags and/or value.
* Bad news: if the parameter gets altered, e.g. by
* a type conversion, then paramtab can be shifted around,
* so we need to store the parameters to alter on a separate
* list for later use.
*/
for (i = 0; i < paramtab->hsize; i++) {
for (pm = (Param) paramtab->nodes[i]; pm;
pm = (Param) pm->node.next) {
if (((pm->node.flags & PM_RESTRICTED) && isset(RESTRICTED)) ||
(pm->node.flags & PM_UNSET))
continue;
if (pattry(pprog, pm->node.nam))
addlinknode(pmlist, pm);
}
}
for (pmnode = firstnode(pmlist); pmnode; incnode(pmnode)) {
pm = (Param) getdata(pmnode);
if (!typeset_single(name, pm->node.nam, pm, func, on, off, roff,
asg, NULL, ops, 0))
returnval = 1;
}
}
unqueue_signals();
return returnval;
}
/* Take arguments literally. Don't glob */
while ((asg = getasg(&argv, assigns))) {
HashNode hn = (paramtab == realparamtab ?
/* getnode2() to avoid autoloading */
paramtab->getnode2(paramtab, asg->name) :
paramtab->getnode(paramtab, asg->name));
if (OPT_ISSET(ops,'p')) {
if (hn)
paramtab->printnode(hn, printflags);
else {
zwarnnam(name, "no such variable: %s", asg->name);
returnval = 1;
}
continue;
}
if (!typeset_single(name, asg->name, (Param)hn,
func, on, off, roff, asg, NULL,
ops, 0))
returnval = 1;
}
unqueue_signals();
return returnval;
}
/* Helper for bin_functions() when run as "autoload -X" */
/**/
int
eval_autoload(Shfunc shf, char *name, Options ops, int func)
{
if (!(shf->node.flags & PM_UNDEFINED))
return 1;
if (shf->funcdef) {
freeeprog(shf->funcdef);
shf->funcdef = &dummy_eprog;
}
if (OPT_MINUS(ops,'X')) {
char *fargv[3];
fargv[0] = name;
fargv[1] = "\"$@\"";
fargv[2] = 0;
shf->funcdef = mkautofn(shf);
return bin_eval(name, fargv, ops, func);
}
return !loadautofn(shf, (OPT_ISSET(ops,'k') ? 2 :
(OPT_ISSET(ops,'z') ? 0 : 1)), 1,
OPT_ISSET(ops,'d'));
}
/* Helper for bin_functions() for -X and -r options */
/**/
static int
check_autoload(Shfunc shf, char *name, Options ops, int func)
{
if (OPT_ISSET(ops,'X'))
{
return eval_autoload(shf, name, ops, func);
}
if ((OPT_ISSET(ops,'r') || OPT_ISSET(ops,'R')) &&
(shf->node.flags & PM_UNDEFINED))
{
char *dir_path;
if (shf->filename && (shf->node.flags & PM_LOADDIR)) {
char *spec_path[2];
spec_path[0] = shf->filename;
spec_path[1] = NULL;
if (getfpfunc(shf->node.nam, NULL, &dir_path, spec_path, 1)) {
/* shf->filename is already correct. */
return 0;
}
if (!OPT_ISSET(ops,'d')) {
if (OPT_ISSET(ops,'R')) {
zerr("%s: function definition file not found",
shf->node.nam);
return 1;
}
return 0;
}
}
if (getfpfunc(shf->node.nam, NULL, &dir_path, NULL, 1)) {
dircache_set(&shf->filename, NULL);
if (*dir_path != '/') {
dir_path = zhtricat(metafy(zgetcwd(), -1, META_HEAPDUP),
"/", dir_path);
dir_path = xsymlink(dir_path, 1);
}
dircache_set(&shf->filename, dir_path);
shf->node.flags |= PM_LOADDIR;
return 0;
}
if (OPT_ISSET(ops,'R')) {
zerr("%s: function definition file not found",
shf->node.nam);
return 1;
}
/* with -r, we don't flag an error, just let it be found later. */
}
return 0;
}
/* List a user-defined math function. */
static void
listusermathfunc(MathFunc p)
{
int showargs;
if (p->module)
showargs = 3;
else if (p->maxargs != (p->minargs ? p->minargs : -1))
showargs = 2;
else if (p->minargs)
showargs = 1;
else
showargs = 0;
printf("functions -M%s %s", (p->flags & MFF_STR) ? "s" : "", p->name);
if (showargs) {
printf(" %d", p->minargs);
showargs--;
}
if (showargs) {
printf(" %d", p->maxargs);
showargs--;
}
if (showargs) {
/*
* function names are not required to consist of ident characters
*/
putchar(' ');
quotedzputs(p->module, stdout);
showargs--;
}
putchar('\n');
}
static void
add_autoload_function(Shfunc shf, char *funcname)
{
char *nam;
if (*funcname == '/' && funcname[1] &&
(nam = strrchr(funcname, '/')) && nam[1] &&
(shf->node.flags & PM_UNDEFINED)) {
char *dir;
nam = strrchr(funcname, '/');
if (nam == funcname) {
dir = "/";
} else {
*nam++ = '\0';
dir = funcname;
}
dircache_set(&shf->filename, NULL);
dircache_set(&shf->filename, dir);
shf->node.flags |= PM_LOADDIR;
shf->node.flags |= PM_ABSPATH_USED;
shfunctab->addnode(shfunctab, ztrdup(nam), shf);
} else {
Shfunc shf2;
Funcstack fs;
const char *calling_f = NULL;
char buf[PATH_MAX+1];
/* Find calling function */
for (fs = funcstack; fs; fs = fs->prev) {
if (fs->tp == FS_FUNC && fs->name && (!shf->node.nam || 0 != strcmp(fs->name,shf->node.nam))) {
calling_f = fs->name;
break;
}
}
/* Get its directory */
if (calling_f) {
/* Should contain load directory, and be loaded via absolute path */
if ((shf2 = (Shfunc) shfunctab->getnode2(shfunctab, calling_f))
&& (shf2->node.flags & PM_LOADDIR) && (shf2->node.flags & PM_ABSPATH_USED)
&& shf2->filename)
{
if (strlen(shf2->filename) + strlen(funcname) + 1 < PATH_MAX)
{
sprintf(buf, "%s/%s", shf2->filename, funcname);
/* Set containing directory if the function file
* exists (do normal FPATH processing otherwise) */
if (!access(buf, R_OK)) {
dircache_set(&shf->filename, NULL);
dircache_set(&shf->filename, shf2->filename);
shf->node.flags |= PM_LOADDIR;
shf->node.flags |= PM_ABSPATH_USED;
}
}
}
}
shfunctab->addnode(shfunctab, ztrdup(funcname), shf);
}
}
/* Display or change the attributes of shell functions. *
* If called as autoload, it will define a new autoloaded *
* (undefined) shell function. */
/**/
int
bin_functions(char *name, char **argv, Options ops, int func)
{
Patprog pprog;
Shfunc shf;
int i, returnval = 0;
int on = 0, off = 0, pflags = 0, roff, expand = 0;
/* Do we have any flags defined? */
if (OPT_PLUS(ops,'u'))
off |= PM_UNDEFINED;
else if (OPT_MINUS(ops,'u') || OPT_ISSET(ops,'X'))
on |= PM_UNDEFINED;
if (OPT_MINUS(ops,'U'))
on |= PM_UNALIASED|PM_UNDEFINED;
else if (OPT_PLUS(ops,'U'))
off |= PM_UNALIASED;
if (OPT_MINUS(ops,'t'))
on |= PM_TAGGED;
else if (OPT_PLUS(ops,'t'))
off |= PM_TAGGED;
if (OPT_MINUS(ops,'T'))
on |= PM_TAGGED_LOCAL;
else if (OPT_PLUS(ops,'T'))
off |= PM_TAGGED_LOCAL;
if (OPT_MINUS(ops,'W'))
on |= PM_WARNNESTED;
else if (OPT_PLUS(ops,'W'))
off |= PM_WARNNESTED;
roff = off;
if (OPT_MINUS(ops,'z')) {
on |= PM_ZSHSTORED;
off |= PM_KSHSTORED;
} else if (OPT_PLUS(ops,'z')) {
off |= PM_ZSHSTORED;
roff |= PM_ZSHSTORED;
}
if (OPT_MINUS(ops,'k')) {
on |= PM_KSHSTORED;
off |= PM_ZSHSTORED;
} else if (OPT_PLUS(ops,'k')) {
off |= PM_KSHSTORED;
roff |= PM_KSHSTORED;
}
if (OPT_MINUS(ops,'d')) {
on |= PM_CUR_FPATH;
off |= PM_CUR_FPATH;
} else if (OPT_PLUS(ops,'d')) {
off |= PM_CUR_FPATH;
roff |= PM_CUR_FPATH;
}
if ((off & PM_UNDEFINED) || (OPT_ISSET(ops,'k') && OPT_ISSET(ops,'z')) ||
(OPT_ISSET(ops,'x') && !OPT_HASARG(ops,'x')) ||
(OPT_MINUS(ops,'X') && (OPT_ISSET(ops,'m') || !scriptname))) {
zwarnnam(name, "invalid option(s)");
return 1;
}
if (OPT_ISSET(ops,'x')) {
char *eptr;
expand = (int)zstrtol(OPT_ARG(ops,'x'), &eptr, 10);
if (*eptr) {
zwarnnam(name, "number expected after -x");
return 1;
}
if (expand == 0) /* no indentation at all */
expand = -1;
}
if (OPT_PLUS(ops,'f') || roff || OPT_ISSET(ops,'+'))
pflags |= PRINT_NAMEONLY;
if (OPT_MINUS(ops,'M') || OPT_PLUS(ops,'M')) {
MathFunc p, q, prev;
/*
* Add/remove/list function as mathematical.
*/
if (on || off || pflags || OPT_ISSET(ops,'X') || OPT_ISSET(ops,'u')
|| OPT_ISSET(ops,'U') || OPT_ISSET(ops,'w')) {
zwarnnam(name, "invalid option(s)");
return 1;
}
if (!*argv) {
/* List functions. */
queue_signals();
for (p = mathfuncs; p; p = p->next)
if (p->flags & MFF_USERFUNC)
listusermathfunc(p);
unqueue_signals();
} else if (OPT_ISSET(ops,'m')) {
/* List matching functions. */
for (; *argv; argv++) {
queue_signals();
tokenize(*argv);
if ((pprog = patcompile(*argv, PAT_STATIC, 0))) {
for (p = mathfuncs, q = NULL; p; q = p) {
MathFunc next;
do {
next = NULL;
if ((p->flags & MFF_USERFUNC) &&
pattry(pprog, p->name)) {
if (OPT_PLUS(ops,'M')) {
next = p->next;
removemathfunc(q, p);
p = next;
} else
listusermathfunc(p);
}
/* if we deleted one, retry with the new p */
} while (next);
if (p)
p = p->next;
}
} else {
untokenize(*argv);
zwarnnam(name, "bad pattern : %s", *argv);
returnval = 1;
}
unqueue_signals();
}
} else if (OPT_PLUS(ops,'M')) {
/* Delete functions. -m is allowed but is handled above. */
for (; *argv; argv++) {
queue_signals();
for (p = mathfuncs, q = NULL; p; q = p, p = p->next) {
if (!strcmp(p->name, *argv)) {
if (!(p->flags & MFF_USERFUNC)) {
zwarnnam(name, "+M %s: is a library function",
*argv);
returnval = 1;
break;
}
removemathfunc(q, p);
break;
}
}
unqueue_signals();
}
} else {
/* Add a function */
int minargs, maxargs;
char *funcname = *argv++;
char *modname = NULL;
char *ptr;
if (OPT_ISSET(ops,'s')) {
minargs = maxargs = 1;
} else {
minargs = 0;
maxargs = -1;
}
ptr = itype_end(funcname, IIDENT, 0);
if (idigit(*funcname) || funcname == ptr || *ptr) {
zwarnnam(name, "-M %s: bad math function name", funcname);
return 1;
}
if (*argv) {
minargs = (int)zstrtol(*argv, &ptr, 0);
if (minargs < 0 || *ptr) {
zwarnnam(name, "-M: invalid min number of arguments: %s",
*argv);
return 1;
}
if (OPT_ISSET(ops,'s') && minargs != 1) {
zwarnnam(name, "-Ms: must take a single string argument");
return 1;
}
maxargs = minargs;
argv++;
}
if (*argv) {
maxargs = (int)zstrtol(*argv, &ptr, 0);
if (maxargs < -1 ||
(maxargs != -1 && maxargs < minargs) ||
*ptr) {
zwarnnam(name,
"-M: invalid max number of arguments: %s",
*argv);
return 1;
}
if (OPT_ISSET(ops,'s') && maxargs != 1) {
zwarnnam(name, "-Ms: must take a single string argument");
return 1;
}
argv++;
}
if (*argv)
modname = *argv++;
if (*argv) {
zwarnnam(name, "-M: too many arguments");
return 1;
}
p = (MathFunc)zshcalloc(sizeof(struct mathfunc));
p->name = ztrdup(funcname);
p->flags = MFF_USERFUNC;
if (OPT_ISSET(ops,'s'))
p->flags |= MFF_STR;
p->module = modname ? ztrdup(modname) : NULL;
p->minargs = minargs;
p->maxargs = maxargs;
queue_signals();
for (q = mathfuncs, prev = NULL; q; prev = q, q = q->next) {
if (!strcmp(q->name, funcname)) {
removemathfunc(prev, q);
break;
}
}
p->next = mathfuncs;
mathfuncs = p;
unqueue_signals();
}
return returnval;
}
if (OPT_MINUS(ops,'X')) {
Funcstack fs;
char *funcname = NULL;
int ret;
if (*argv && argv[1]) {
zwarnnam(name, "-X: too many arguments");
return 1;
}
queue_signals();
for (fs = funcstack; fs; fs = fs->prev) {
if (fs->tp == FS_FUNC) {
/*
* dupstring here is paranoia but unlikely to be
* problematic
*/
funcname = dupstring(fs->name);
break;
}
}
if (!funcname)
{
zerrnam(name, "bad autoload");
ret = 1;
} else {
if ((shf = (Shfunc) shfunctab->getnode(shfunctab, funcname))) {
DPUTS(!shf->funcdef,
"BUG: Calling autoload from empty function");
} else {
shf = (Shfunc) zshcalloc(sizeof *shf);
shfunctab->addnode(shfunctab, ztrdup(funcname), shf);
}
if (*argv) {
dircache_set(&shf->filename, NULL);
dircache_set(&shf->filename, *argv);
on |= PM_LOADDIR;
}
shf->node.flags = on;
ret = eval_autoload(shf, funcname, ops, func);
}
unqueue_signals();
return ret;
} else if (!*argv) {
/* If no arguments given, we will print functions. If flags *
* are given, we will print only functions containing these *
* flags, else we'll print them all. */
int ret = 0;
queue_signals();
if (OPT_ISSET(ops,'U') && !OPT_ISSET(ops,'u'))
on &= ~PM_UNDEFINED;
scanshfunc(1, on|off, DISABLED, shfunctab->printnode,
pflags, expand);
unqueue_signals();
return ret;
}
/* With the -m option, treat arguments as glob patterns */
if (OPT_ISSET(ops,'m')) {
on &= ~PM_UNDEFINED;
for (; *argv; argv++) {
queue_signals();
/* expand argument */
tokenize(*argv);
if ((pprog = patcompile(*argv, PAT_STATIC, 0))) {
/* with no options, just print all functions matching the glob pattern */
if (!(on|off) && !OPT_ISSET(ops,'X')) {
scanmatchshfunc(pprog, 1, 0, DISABLED,
shfunctab->printnode, pflags, expand);
} else {
/* apply the options to all functions matching the glob pattern */
for (i = 0; i < shfunctab->hsize; i++) {
for (shf = (Shfunc) shfunctab->nodes[i]; shf;
shf = (Shfunc) shf->node.next)
if (pattry(pprog, shf->node.nam) &&
!(shf->node.flags & DISABLED)) {
shf->node.flags = (shf->node.flags |
(on & ~PM_UNDEFINED)) & ~off;
if (check_autoload(shf, shf->node.nam,
ops, func)) {
returnval = 1;
}
}
}
}
} else {
untokenize(*argv);
zwarnnam(name, "bad pattern : %s", *argv);
returnval = 1;
}
unqueue_signals();
}
return returnval;
}
/* Take the arguments literally -- do not glob */
queue_signals();
for (; *argv; argv++) {
if (OPT_ISSET(ops,'w'))
returnval = dump_autoload(name, *argv, on, ops, func);
else if ((shf = (Shfunc) shfunctab->getnode(shfunctab, *argv))) {
/* if any flag was given */
if (on|off) {
/* turn on/off the given flags */
shf->node.flags = (shf->node.flags | (on & ~PM_UNDEFINED)) & ~off;
if (check_autoload(shf, shf->node.nam, ops, func))
returnval = 1;
} else
/* no flags, so just print */
printshfuncexpand(&shf->node, pflags, expand);
} else if (on & PM_UNDEFINED) {
int signum = -1, ok = 1;
if (!strncmp(*argv, "TRAP", 4) &&
(signum = getsignum(*argv + 4)) != -1) {
/*
* Because of the possibility of alternative names,
* we must remove the trap explicitly.
*/
removetrapnode(signum);
}
if (**argv == '/') {
char *base = strrchr(*argv, '/') + 1;
if (*base &&
(shf = (Shfunc) shfunctab->getnode(shfunctab, base))) {
char *dir;
/* turn on/off the given flags */
shf->node.flags =
(shf->node.flags | (on & ~PM_UNDEFINED)) & ~off;
if (shf->node.flags & PM_UNDEFINED) {
/* update path if not yet loaded */
if (base == *argv + 1)
dir = "/";
else {
dir = *argv;
base[-1] = '\0';
}
dircache_set(&shf->filename, NULL);
dircache_set(&shf->filename, dir);
}
if (check_autoload(shf, shf->node.nam, ops, func))
returnval = 1;
continue;
}
}
/* Add a new undefined (autoloaded) function to the *
* hash table with the corresponding flags set. */
shf = (Shfunc) zshcalloc(sizeof *shf);
shf->node.flags = on;
shf->funcdef = mkautofn(shf);
shfunc_set_sticky(shf);
add_autoload_function(shf, *argv);
if (signum != -1) {
if (settrap(signum, NULL, ZSIG_FUNC)) {
shfunctab->removenode(shfunctab, *argv);
shfunctab->freenode(&shf->node);
returnval = 1;
ok = 0;
}
}
if (ok && check_autoload(shf, shf->node.nam, ops, func))
returnval = 1;
} else
returnval = 1;
}
unqueue_signals();
return returnval;
}
/**/
Eprog
mkautofn(Shfunc shf)
{
Eprog p;
p = (Eprog) zalloc(sizeof(*p));
p->len = 5 * sizeof(wordcode);
p->prog = (Wordcode) zalloc(p->len);
p->strs = NULL;
p->shf = shf;
p->npats = 0;
p->nref = 1; /* allocated from permanent storage */
p->pats = (Patprog *) p->prog;
p->flags = EF_REAL;
p->dump = NULL;
p->prog[0] = WCB_LIST((Z_SYNC | Z_END), 0);
p->prog[1] = WCB_SUBLIST(WC_SUBLIST_END, 0, 3);
p->prog[2] = WCB_PIPE(WC_PIPE_END, 0);
p->prog[3] = WCB_AUTOFN();
p->prog[4] = WCB_END();
return p;
}
/* unset: unset parameters */
/**/
int
bin_unset(char *name, char **argv, Options ops, int func)
{
Param pm, next;
Patprog pprog;
char *s;
int match = 0, returnval = 0;
int i;
/* unset -f is the same as unfunction */
if (OPT_ISSET(ops,'f'))
return bin_unhash(name, argv, ops, func);
/* with -m option, treat arguments as glob patterns */
if (OPT_ISSET(ops,'m')) {
while ((s = *argv++)) {
queue_signals();
/* expand */
tokenize(s);
if ((pprog = patcompile(s, PAT_STATIC, NULL))) {
/* Go through the parameter table, and unset any matches */
for (i = 0; i < paramtab->hsize; i++) {
for (pm = (Param) paramtab->nodes[i]; pm; pm = next) {
/* record pointer to next, since we may free this one */
next = (Param) pm->node.next;
if ((!(pm->node.flags & PM_RESTRICTED) ||
unset(RESTRICTED)) &&
pattry(pprog, pm->node.nam)) {
unsetparam_pm(pm, 0, 1);
match++;
}
}
}
} else {
untokenize(s);
zwarnnam(name, "bad pattern : %s", s);
returnval = 1;
}
unqueue_signals();
}
/* If we didn't match anything, we return 1. */
if (!match)
returnval = 1;
return returnval;
}
/* do not glob -- unset the given parameter */
queue_signals();
while ((s = *argv++)) {
char *ss = strchr(s, '['), *subscript = 0;
if (ss) {
char *sse;
*ss = 0;
if ((sse = parse_subscript(ss+1, 1, ']'))) {
*sse = 0;
subscript = dupstring(ss+1);
*sse = ']';
remnulargs(subscript);
untokenize(subscript);
}
}
if ((ss && !subscript) || !isident(s)) {
if (ss)
*ss = '[';
zerrnam(name, "%s: invalid parameter name", s);
returnval = 1;
continue;
}
pm = (Param) (paramtab == realparamtab ?
/* getnode2() to avoid autoloading */
paramtab->getnode2(paramtab, s) :
paramtab->getnode(paramtab, s));
/*
* Unsetting an unset variable is not an error.
* This appears to be reasonably standard behaviour.
*/
if (!pm)
continue;
else if ((pm->node.flags & PM_RESTRICTED) && isset(RESTRICTED)) {
zerrnam(name, "%s: restricted", pm->node.nam);
returnval = 1;
} else if (ss) {
if (PM_TYPE(pm->node.flags) == PM_HASHED) {
HashTable tht = paramtab;
if ((paramtab = pm->gsu.h->getfn(pm)))
unsetparam(subscript);
paramtab = tht;
} else if (PM_TYPE(pm->node.flags) == PM_SCALAR ||
PM_TYPE(pm->node.flags) == PM_ARRAY) {
struct value vbuf;
vbuf.isarr = (PM_TYPE(pm->node.flags) == PM_ARRAY ?
SCANPM_ARRONLY : 0);
vbuf.pm = pm;
vbuf.flags = 0;
vbuf.start = 0;
vbuf.end = -1;
vbuf.arr = 0;
*ss = '[';
if (getindex(&ss, &vbuf, SCANPM_ASSIGNING) == 0 &&
vbuf.pm && !(vbuf.pm->node.flags & PM_UNSET)) {
if (PM_TYPE(pm->node.flags) == PM_SCALAR) {
setstrvalue(&vbuf, ztrdup(""));
} else {
/* start is after the element for reverse index */
int start = vbuf.start - !!(vbuf.flags & VALFLAG_INV);
if (arrlen_gt(vbuf.pm->u.arr, start)) {
char *arr[2];
arr[0] = "";
arr[1] = 0;
setarrvalue(&vbuf, zarrdup(arr));
}
}
}
returnval = errflag;
errflag &= ~ERRFLAG_ERROR;
} else {
zerrnam(name, "%s: invalid element for unset", s);
returnval = 1;
}
} else {
if (unsetparam_pm(pm, 0, 1))
returnval = 1;
}
if (ss)
*ss = '[';
}
unqueue_signals();
return returnval;
}
/* type, whence, which, command */
static LinkList matchednodes;
static void
fetchcmdnamnode(HashNode hn, UNUSED(int printflags))
{
Cmdnam cn = (Cmdnam) hn;
addlinknode(matchednodes, cn->node.nam);
}
/**/
int
bin_whence(char *nam, char **argv, Options ops, int func)
{
HashNode hn;
Patprog pprog;
int returnval = 0;
int printflags = 0;
int aliasflags;
int csh, all, v, wd;
int informed = 0;
int expand = 0;
char *cnam, **allmatched = 0;
/* Check some option information */
csh = OPT_ISSET(ops,'c');
v = OPT_ISSET(ops,'v');
all = OPT_ISSET(ops,'a');
wd = OPT_ISSET(ops,'w');
if (OPT_ISSET(ops,'x')) {
char *eptr;
expand = (int)zstrtol(OPT_ARG(ops,'x'), &eptr, 10);
if (*eptr) {
zwarnnam(nam, "number expected after -x");
return 1;
}
if (expand == 0) /* no indentation at all */
expand = -1;
}
if (OPT_ISSET(ops,'w'))
printflags |= PRINT_WHENCE_WORD;
else if (OPT_ISSET(ops,'c'))
printflags |= PRINT_WHENCE_CSH;
else if (OPT_ISSET(ops,'v'))
printflags |= PRINT_WHENCE_VERBOSE;
else
printflags |= PRINT_WHENCE_SIMPLE;
if (OPT_ISSET(ops,'f'))
printflags |= PRINT_WHENCE_FUNCDEF;
if (func == BIN_COMMAND)
if (OPT_ISSET(ops,'V')) {
printflags = aliasflags = PRINT_WHENCE_VERBOSE;
v = 1;
} else {
aliasflags = PRINT_LIST;
printflags = PRINT_WHENCE_SIMPLE;
v = 0;
}
else
aliasflags = printflags;
/* With -m option -- treat arguments as a glob patterns */
if (OPT_ISSET(ops,'m')) {
cmdnamtab->filltable(cmdnamtab);
if (all) {
pushheap();
matchednodes = newlinklist();
}
queue_signals();
for (; *argv; argv++) {
/* parse the pattern */
tokenize(*argv);
if (!(pprog = patcompile(*argv, PAT_STATIC, NULL))) {
untokenize(*argv);
zwarnnam(nam, "bad pattern : %s", *argv);
returnval = 1;
continue;
}
if (!OPT_ISSET(ops,'p')) {
/* -p option is for path search only. *
* We're not using it, so search for ... */
/* aliases ... */
informed +=
scanmatchtable(aliastab, pprog, 1, 0, DISABLED,
aliastab->printnode, printflags);
/* and reserved words ... */
informed +=
scanmatchtable(reswdtab, pprog, 1, 0, DISABLED,
reswdtab->printnode, printflags);
/* and shell functions... */
informed +=
scanmatchshfunc(pprog, 1, 0, DISABLED,
shfunctab->printnode, printflags, expand);
/* and builtins. */
informed +=
scanmatchtable(builtintab, pprog, 1, 0, DISABLED,
builtintab->printnode, printflags);
}
/* Done search for `internal' commands, if the -p option *
* was not used. Now search the path. */
informed +=
scanmatchtable(cmdnamtab, pprog, 1, 0, 0,
(all ? fetchcmdnamnode : cmdnamtab->printnode),
printflags);
run_queued_signals();
}
unqueue_signals();
if (all) {
allmatched = argv = zlinklist2array(matchednodes);
matchednodes = NULL;
popheap();
} else
return returnval || !informed;
}
/* Take arguments literally -- do not glob */
queue_signals();
for (; *argv; argv++) {
if (!OPT_ISSET(ops,'p') && !allmatched) {
char *suf;
/* Look for alias */
if ((hn = aliastab->getnode(aliastab, *argv))) {
aliastab->printnode(hn, aliasflags);
informed = 1;
if (!all)
continue;
}
/* Look for suffix alias */
if ((suf = strrchr(*argv, '.')) && suf[1] &&
suf > *argv && suf[-1] != Meta &&
(hn = sufaliastab->getnode(sufaliastab, suf+1))) {
sufaliastab->printnode(hn, printflags);
informed = 1;
if (!all)
continue;
}
/* Look for reserved word */
if ((hn = reswdtab->getnode(reswdtab, *argv))) {
reswdtab->printnode(hn, printflags);
informed = 1;
if (!all)
continue;
}
/* Look for shell function */
if ((hn = shfunctab->getnode(shfunctab, *argv))) {
printshfuncexpand(hn, printflags, expand);
informed = 1;
if (!all)
continue;
}
/* Look for builtin command */
if ((hn = builtintab->getnode(builtintab, *argv))) {
builtintab->printnode(hn, printflags);
informed = 1;
if (!all)
continue;
}
/* Look for commands that have been added to the *
* cmdnamtab with the builtin `hash foo=bar'. */
if ((hn = cmdnamtab->getnode(cmdnamtab, *argv)) && (hn->flags & HASHED)) {
cmdnamtab->printnode(hn, printflags);
informed = 1;
if (!all)
continue;
}
}
/* Option -a is to search the entire path, *
* rather than just looking for one match. */
if (all && **argv != '/') {
char **pp, *buf;
pushheap();
for (pp = path; *pp; pp++) {
if (**pp) {
buf = zhtricat(*pp, "/", *argv);
} else buf = dupstring(*argv);
if (iscom(buf)) {
if (wd) {
printf("%s: command\n", *argv);
} else {
if (v && !csh) {
zputs(*argv, stdout), fputs(" is ", stdout);
quotedzputs(buf, stdout);
} else
zputs(buf, stdout);
if (OPT_ISSET(ops,'s') || OPT_ISSET(ops, 'S'))
print_if_link(buf, OPT_ISSET(ops, 'S'));
fputc('\n', stdout);
}
informed = 1;
}
}
if (!informed && (wd || v || csh)) {
/* this is information and not an error so, as in csh, use stdout */
zputs(*argv, stdout);
puts(wd ? ": none" : " not found");
returnval = 1;
}
popheap();
} else if (func == BIN_COMMAND && OPT_ISSET(ops,'p') &&
(hn = builtintab->getnode(builtintab, *argv))) {
/*
* Special case for "command -p[vV]" which needs to
* show a builtin in preference to an external command.
*/
builtintab->printnode(hn, printflags);
informed = 1;
} else if ((cnam = findcmd(*argv, 1,
func == BIN_COMMAND &&
OPT_ISSET(ops,'p')))) {
/* Found external command. */
if (wd) {
printf("%s: command\n", *argv);
} else {
if (v && !csh) {
zputs(*argv, stdout), fputs(" is ", stdout);
quotedzputs(cnam, stdout);
} else
zputs(cnam, stdout);
if (OPT_ISSET(ops,'s') || OPT_ISSET(ops,'S'))
print_if_link(cnam, OPT_ISSET(ops,'S'));
fputc('\n', stdout);
}
informed = 1;
} else {
/* Not found at all. That's not an error as such so this goes to stdout */
if (v || csh || wd)
zputs(*argv, stdout), puts(wd ? ": none" : " not found");
returnval = 1;
}
}
if (allmatched)
freearray(allmatched);
unqueue_signals();
return returnval || !informed;
}
/**** command & named directory hash table builtins ****/
/*****************************************************************
* hash -- explicitly hash a command. *
* 1) Given no arguments, list the hash table. *
* 2) The -m option prints out commands in the hash table that *
* match a given glob pattern. *
* 3) The -f option causes the entire path to be added to the *
* hash table (cannot be combined with any arguments). *
* 4) The -r option causes the entire hash table to be discarded *
* (cannot be combined with any arguments). *
* 5) Given argument of the form foo=bar, add element to command *
* hash table, so that when `foo' is entered, then `bar' is *
* executed. *
* 6) Given arguments not of the previous form, add it to the *
* command hash table as if it were being executed. *
* 7) The -d option causes analogous things to be done using *
* the named directory hash table. *
*****************************************************************/
/**/
int
bin_hash(char *name, char **argv, Options ops, UNUSED(int func))
{
HashTable ht;
Patprog pprog;
Asgment asg;
int returnval = 0;
int printflags = 0;
if (OPT_ISSET(ops,'d'))
ht = nameddirtab;
else
ht = cmdnamtab;
if (OPT_ISSET(ops,'r') || OPT_ISSET(ops,'f')) {
/* -f and -r can't be used with any arguments */
if (*argv) {
zwarnnam("hash", "too many arguments");
return 1;
}
/* empty the hash table */
if (OPT_ISSET(ops,'r'))
ht->emptytable(ht);
/* fill the hash table in a standard way */
if (OPT_ISSET(ops,'f'))
ht->filltable(ht);
return 0;
}
if (OPT_ISSET(ops,'L')) printflags |= PRINT_LIST;
/* Given no arguments, display current hash table. */
if (!*argv) {
queue_signals();
scanhashtable(ht, 1, 0, 0, ht->printnode, printflags);
unqueue_signals();
return 0;
}
queue_signals();
while (*argv) {
void *hn;
if (OPT_ISSET(ops,'m')) {
/* with the -m option, treat the argument as a glob pattern */
tokenize(*argv); /* expand */
if ((pprog = patcompile(*argv, PAT_STATIC, NULL))) {
/* display matching hash table elements */
scanmatchtable(ht, pprog, 1, 0, 0, ht->printnode, printflags);
} else {
untokenize(*argv);
zwarnnam(name, "bad pattern : %s", *argv);
returnval = 1;
}
argv++;
continue;
}
if (!(asg = getasg(&argv, NULL))) {
zwarnnam(name, "bad assignment");
returnval = 1;
break;
} else if (ASG_VALUEP(asg)) {
if(isset(RESTRICTED)) {
zwarnnam(name, "restricted: %s", asg->value.scalar);
returnval = 1;
} else {
/* The argument is of the form foo=bar, *
* so define an entry for the table. */
if(OPT_ISSET(ops,'d')) {
/* shouldn't return NULL if asg->name is not NULL */
if (*itype_end(asg->name, IUSER, 0)) {
zwarnnam(name,
"invalid character in directory name: %s",
asg->name);
returnval = 1;
continue;
} else {
Nameddir nd = hn = zshcalloc(sizeof *nd);
nd->node.flags = 0;
nd->dir = ztrdup(asg->value.scalar);
}
} else {
Cmdnam cn = hn = zshcalloc(sizeof *cn);
cn->node.flags = HASHED;
cn->u.cmd = ztrdup(asg->value.scalar);
}
ht->addnode(ht, ztrdup(asg->name), hn);
if(OPT_ISSET(ops,'v'))
ht->printnode(hn, 0);
}
} else if (!(hn = ht->getnode2(ht, asg->name))) {
/* With no `=value' part to the argument, *
* work out what it ought to be. */
if(OPT_ISSET(ops,'d')) {
if(!getnameddir(asg->name)) {
zwarnnam(name, "no such directory name: %s", asg->name);
returnval = 1;
}
} else {
if (!hashcmd(asg->name, path)) {
zwarnnam(name, "no such command: %s", asg->name);
returnval = 1;
}
}
if(OPT_ISSET(ops,'v') && (hn = ht->getnode2(ht, asg->name)))
ht->printnode(hn, 0);
} else if(OPT_ISSET(ops,'v'))
ht->printnode(hn, 0);
}
unqueue_signals();
return returnval;
}
/* unhash: remove specified elements from a hash table */
/**/
int
bin_unhash(char *name, char **argv, Options ops, int func)
{
HashTable ht;
HashNode hn, nhn;
Patprog pprog;
int match = 0, returnval = 0, all = 0;
int i;
/* Check which hash table we are working with. */
if (func == BIN_UNALIAS) {
if (OPT_ISSET(ops,'s'))
ht = sufaliastab; /* suffix aliases */
else
ht = aliastab; /* aliases */
if (OPT_ISSET(ops, 'a')) {
if (*argv) {
zwarnnam(name, "-a: too many arguments");
return 1;
}
all = 1;
} else if (!*argv) {
zwarnnam(name, "not enough arguments");
return 1;
}
} else if (OPT_ISSET(ops,'d'))
ht = nameddirtab; /* named directories */
else if (OPT_ISSET(ops,'f'))
ht = shfunctab; /* shell functions */
else if (OPT_ISSET(ops,'s'))
ht = sufaliastab; /* suffix aliases, must precede aliases */
else if (func == BIN_UNHASH && (OPT_ISSET(ops,'a')))
ht = aliastab; /* aliases */
else
ht = cmdnamtab; /* external commands */
if (all) {
queue_signals();
for (i = 0; i < ht->hsize; i++) {
for (hn = ht->nodes[i]; hn; hn = nhn) {
/* record pointer to next, since we may free this one */
nhn = hn->next;
ht->freenode(ht->removenode(ht, hn->nam));
}
}
unqueue_signals();
return 0;
}
/* With -m option, treat arguments as glob patterns. *
* "unhash -m '*'" is legal, but not recommended. */
if (OPT_ISSET(ops,'m')) {
for (; *argv; argv++) {
queue_signals();
/* expand argument */
tokenize(*argv);
if ((pprog = patcompile(*argv, PAT_STATIC, NULL))) {
/* remove all nodes matching glob pattern */
for (i = 0; i < ht->hsize; i++) {
for (hn = ht->nodes[i]; hn; hn = nhn) {
/* record pointer to next, since we may free this one */
nhn = hn->next;
if (pattry(pprog, hn->nam)) {
ht->freenode(ht->removenode(ht, hn->nam));
match++;
}
}
}
} else {
untokenize(*argv);
zwarnnam(name, "bad pattern : %s", *argv);
returnval = 1;
}
unqueue_signals();
}
/* If we didn't match anything, we return 1. */
if (!match)
returnval = 1;
return returnval;
}
/* Take arguments literally -- do not glob */
queue_signals();
for (; *argv; argv++) {
if ((hn = ht->removenode(ht, *argv))) {
ht->freenode(hn);
} else if (func == BIN_UNSET && isset(POSIXBUILTINS)) {
/* POSIX: unset: "Unsetting a variable or function that was *
* not previously set shall not be considered an error." */
returnval = 0;
} else {
zwarnnam(name, "no such hash table element: %s", *argv);
returnval = 1;
}
}
unqueue_signals();
return returnval;
}
/**** alias builtins ****/
/* alias: display or create aliases. */
/**/
int
bin_alias(char *name, char **argv, Options ops, UNUSED(int func))
{
Alias a;
Patprog pprog;
Asgment asg;
int returnval = 0;
int flags1 = 0, flags2 = DISABLED;
int printflags = 0;
int type_opts;
HashTable ht = aliastab;
/* Did we specify the type of alias? */
type_opts = OPT_ISSET(ops, 'r') + OPT_ISSET(ops, 'g') +
OPT_ISSET(ops, 's');
if (type_opts) {
if (type_opts > 1) {
zwarnnam(name, "illegal combination of options");
return 1;
}
if (OPT_ISSET(ops,'g'))
flags1 |= ALIAS_GLOBAL;
else
flags2 |= ALIAS_GLOBAL;
if (OPT_ISSET(ops, 's')) {
/*
* Although we keep suffix aliases in a different table,
* it is useful to be able to distinguish Alias structures
* without reference to the table, so we have a separate
* flag, too.
*/
flags1 |= ALIAS_SUFFIX;
ht = sufaliastab;
} else
flags2 |= ALIAS_SUFFIX;
}
if (OPT_ISSET(ops,'L'))
printflags |= PRINT_LIST;
else if (OPT_PLUS(ops,'g') || OPT_PLUS(ops,'r') || OPT_PLUS(ops,'s') ||
OPT_PLUS(ops,'m') || OPT_ISSET(ops,'+'))
printflags |= PRINT_NAMEONLY;
/* In the absence of arguments, list all aliases. If a command *
* line flag is specified, list only those of that type. */
if (!*argv) {
queue_signals();
scanhashtable(ht, 1, flags1, flags2, ht->printnode, printflags);
unqueue_signals();
return 0;
}
/* With the -m option, treat the arguments as *
* glob patterns of aliases to display. */
if (OPT_ISSET(ops,'m')) {
for (; *argv; argv++) {
queue_signals();
tokenize(*argv); /* expand argument */
if ((pprog = patcompile(*argv, PAT_STATIC, NULL))) {
/* display the matching aliases */
scanmatchtable(ht, pprog, 1, flags1, flags2,
ht->printnode, printflags);
} else {
untokenize(*argv);
zwarnnam(name, "bad pattern : %s", *argv);
returnval = 1;
}
unqueue_signals();
}
return returnval;
}
/* Take arguments literally. Don't glob */
queue_signals();
while ((asg = getasg(&argv, NULL))) {
if (asg->value.scalar && !OPT_ISSET(ops,'L')) {
/* The argument is of the form foo=bar and we are not *
* forcing a listing with -L, so define an alias */
ht->addnode(ht, ztrdup(asg->name),
createaliasnode(ztrdup(asg->value.scalar), flags1));
} else if ((a = (Alias) ht->getnode(ht, asg->name))) {
/* display alias if appropriate */
if (!type_opts || ht == sufaliastab ||
(OPT_ISSET(ops,'r') &&
!(a->node.flags & (ALIAS_GLOBAL|ALIAS_SUFFIX))) ||
(OPT_ISSET(ops,'g') && (a->node.flags & ALIAS_GLOBAL)))
ht->printnode(&a->node, printflags);
} else
returnval = 1;
}
unqueue_signals();
return returnval;
}
/**** miscellaneous builtins ****/
/* true, : (colon) */
/**/
int
bin_true(UNUSED(char *name), UNUSED(char **argv), UNUSED(Options ops), UNUSED(int func))
{
return 0;
}
/* false builtin */
/**/
int
bin_false(UNUSED(char *name), UNUSED(char **argv), UNUSED(Options ops), UNUSED(int func))
{
return 1;
}
/* the zle buffer stack */
/**/
mod_export LinkList bufstack;
/* echo, print, printf, pushln */
#define print_val(VAL) \
if (prec >= 0) \
count += fprintf(fout, spec, width, prec, VAL); \
else \
count += fprintf(fout, spec, width, VAL);
/*
* Because of the use of getkeystring() to interpret the arguments,
* the elements of args spend a large part of the function unmetafied
* with the lengths in len. This may have seemed a good idea once.
* As we are stuck with this for now, we need to be very careful
* deciding what state args is in.
*/
/**/
int
bin_print(char *name, char **args, Options ops, int func)
{
int flen, width, prec, type, argc, n, narg, curlen = 0;
int nnl = 0, fmttrunc = 0, ret = 0, maxarg = 0, nc = 0;
int flags[6], *len, visarr = 0;
char *start, *endptr, *c, *d, *flag, *buf = NULL, spec[14], *fmt = NULL;
char **first, **argp, *curarg, *flagch = "'0+- #", save = '\0', nullstr = '\0';
size_t rcount = 0, count = 0;
size_t *cursplit = 0, *splits = 0;
FILE *fout = stdout;
#ifdef HAVE_OPEN_MEMSTREAM
size_t mcount;
#define ASSIGN_MSTREAM(BUF,FOUT) \
do { \
if ((FOUT = open_memstream(&BUF, &mcount)) == NULL) { \
zwarnnam(name, "open_memstream failed"); \
return 1; \
} \
} while (0)
/*
* Some implementations of open_memstream() have a bug such that,
* if fflush() is followed by fclose(), another NUL byte is written
* to the buffer at the wrong position. Therefore we must fclose()
* before reading.
*/
#define READ_MSTREAM(BUF,FOUT) \
((fclose(FOUT) == 0) ? mcount : (size_t)-1)
#define CLOSE_MSTREAM(FOUT) 0
#else /* simulate HAVE_OPEN_MEMSTREAM */
#define ASSIGN_MSTREAM(BUF,FOUT) \
do { \
int tempfd; \
char *tmpf; \
if ((tempfd = gettempfile(NULL, 1, &tmpf)) < 0) { \
zwarnnam(name, "can't open temp file: %e", errno); \
return 1; \
} \
unlink(tmpf); \
if ((FOUT = fdopen(tempfd, "w+")) == NULL) { \
close(tempfd); \
zwarnnam(name, "can't open temp file: %e", errno); \
return 1; \
} \
} while (0)
#define READ_MSTREAM(BUF,FOUT) \
((((count = ftell(FOUT)), (BUF = (char *)zalloc(count + 1))) && \
((fseek(FOUT, 0L, SEEK_SET) == 0) && !(BUF[count] = '\0')) && \
(fread(BUF, 1, count, FOUT) == count)) ? count : (size_t)-1)
#define CLOSE_MSTREAM(FOUT) fclose(FOUT)
#endif
#define IS_MSTREAM(FOUT) \
(FOUT != stdout && \
(OPT_ISSET(ops,'z') || OPT_ISSET(ops,'s') || OPT_ISSET(ops,'v')))
/* Testing EBADF special-cases >&- redirections */
#define CLOSE_CLEANLY(FOUT) \
(IS_MSTREAM(FOUT) ? CLOSE_MSTREAM(FOUT) == 0 : \
((FOUT == stdout) ? (fflush(FOUT) == 0 || errno == EBADF) : \
(fclose(FOUT) == 0))) /* implies error for -u on a closed fd */
Histent ent;
mnumber mnumval;
double doubleval;
int intval;
zlong zlongval;
zulong zulongval;
char *stringval;
/* Error check option combinations and option arguments */
if (OPT_ISSET(ops, 'z') +
OPT_ISSET(ops, 's') + OPT_ISSET(ops, 'S') +
OPT_ISSET(ops, 'v') > 1) {
zwarnnam(name, "only one of -s, -S, -v, or -z allowed");
return 1;
}
if ((OPT_ISSET(ops, 'z') | OPT_ISSET(ops, 's') | OPT_ISSET(ops, 'S')) +
(OPT_ISSET(ops, 'c') | OPT_ISSET(ops, 'C')) > 1) {
zwarnnam(name, "-c or -C not allowed with -s, -S, or -z");
return 1;
}
if ((OPT_ISSET(ops, 'z') | OPT_ISSET(ops, 'v') |
OPT_ISSET(ops, 's') | OPT_ISSET(ops, 'S')) +
(OPT_ISSET(ops, 'p') | OPT_ISSET(ops, 'u')) > 1) {
zwarnnam(name, "-p or -u not allowed with -s, -S, -v, or -z");
return 1;
}
/*
if (OPT_ISSET(ops, 'f') &&
(OPT_ISSET(ops, 'S') || OPT_ISSET(ops, 'c') || OPT_ISSET(ops, 'C'))) {
zwarnnam(name, "-f not allowed with -c, -C, or -S");
return 1;
}
*/
/* -C -- number of columns */
if (!fmt && OPT_ISSET(ops,'C')) {
char *eptr, *argptr = OPT_ARG(ops,'C');
nc = (int)zstrtol(argptr, &eptr, 10);
if (*eptr) {
zwarnnam(name, "number expected after -%c: %s", 'C', argptr);
return 1;
}
if (nc <= 0) {
zwarnnam(name, "invalid number of columns: %s", argptr);
return 1;
}
}
if (func == BIN_PRINTF) {
if (!strcmp(*args, "--") && !*++args) {
zwarnnam(name, "not enough arguments");
return 1;
}
fmt = *args++;
} else if (func == BIN_ECHO && isset(BSDECHO))
ops->ind['E'] = 1;
else if (OPT_HASARG(ops,'f'))
fmt = OPT_ARG(ops,'f');
if (fmt)
fmt = getkeystring(fmt, &flen, OPT_ISSET(ops,'b') ? GETKEYS_BINDKEY :
GETKEYS_PRINTF_FMT, &fmttrunc);
first = args;
/* -m option -- treat the first argument as a pattern and remove
* arguments not matching */
if (OPT_ISSET(ops,'m')) {
Patprog pprog;
char **t, **p;
if (!*args) {
zwarnnam(name, "no pattern specified");
return 1;
}
queue_signals();
tokenize(*args);
if (!(pprog = patcompile(*args, PAT_STATIC, NULL))) {
untokenize(*args);
zwarnnam(name, "bad pattern: %s", *args);
unqueue_signals();
return 1;
}
for (t = p = ++args; *p; p++)
if (pattry(pprog, *p))
*t++ = *p;
*t = NULL;
first = args;
unqueue_signals();
if (fmt && !*args) return 0;
}
/* compute lengths, and interpret according to -P, -D, -e, etc. */
argc = arrlen(args);
len = (int *) hcalloc(argc * sizeof(int));
for (n = 0; n < argc; n++) {
/* first \ sequences */
if (fmt ||
(!OPT_ISSET(ops,'e') &&
(OPT_ISSET(ops,'R') || OPT_ISSET(ops,'r') || OPT_ISSET(ops,'E'))))
unmetafy(args[n], &len[n]);
else {
int escape_how;
if (OPT_ISSET(ops,'b'))
escape_how = GETKEYS_BINDKEY;
else if (func != BIN_ECHO && !OPT_ISSET(ops,'e'))
escape_how = GETKEYS_PRINT;
else
escape_how = GETKEYS_ECHO;
args[n] = getkeystring(args[n], &len[n], escape_how, &nnl);
if (nnl) {
/* If there was a \c escape, make this the last arg. */
argc = n + 1;
args[argc] = NULL;
}
}
/* -P option -- interpret as a prompt sequence */
if (OPT_ISSET(ops,'P')) {
/*
* promptexpand uses permanent storage: to avoid
* messy memory management, stick it on the heap
* instead.
*/
char *str = unmetafy(
promptexpand(metafy(args[n], len[n], META_NOALLOC),
0, NULL, NULL, NULL),
&len[n]);
args[n] = dupstrpfx(str, len[n]);
free(str);
}
/* -D option -- interpret as a directory, and use ~ */
if (OPT_ISSET(ops,'D')) {
Nameddir d;
queue_signals();
/* TODO: finddir takes a metafied file */
d = finddir(args[n]);
if (d) {
int dirlen = strlen(d->dir);
char *arg = zhalloc(len[n] - dirlen + strlen(d->node.nam) + 2);
sprintf(arg, "~%s%s", d->node.nam, args[n] + dirlen);
args[n] = arg;
len[n] = strlen(args[n]);
}
unqueue_signals();
}
}
/* -o and -O -- sort the arguments */
if (OPT_ISSET(ops,'o') || OPT_ISSET(ops,'O')) {
int flags;
if (fmt && !*args)
return 0;
flags = OPT_ISSET(ops,'i') ? SORTIT_IGNORING_CASE : 0;
if (OPT_ISSET(ops,'O'))
flags |= SORTIT_BACKWARDS;
strmetasort(args, flags, len);
}
/* -u and -p -- output to other than standard output */
if ((OPT_HASARG(ops,'u') || OPT_ISSET(ops,'p')) &&
/* rule out conflicting options -- historical precedence */
((!fmt && (OPT_ISSET(ops,'c') || OPT_ISSET(ops,'C'))) ||
!(OPT_ISSET(ops, 'z') || OPT_ISSET(ops, 'v') ||
OPT_ISSET(ops, 's') || OPT_ISSET(ops, 'S')))) {
int fdarg, fd;
if (OPT_ISSET(ops, 'p')) {
fdarg = coprocout;
if (fdarg < 0) {
zwarnnam(name, "-p: no coprocess");
return 1;
}
} else {
char *argptr = OPT_ARG(ops,'u'), *eptr;
/* Handle undocumented feature that -up worked */
if (!strcmp(argptr, "p")) {
fdarg = coprocout;
if (fdarg < 0) {
zwarnnam(name, "-p: no coprocess");
return 1;
}
} else {
fdarg = (int)zstrtol(argptr, &eptr, 10);
if (*eptr) {
zwarnnam(name, "number expected after -u: %s", argptr);
return 1;
}
}
}
if ((fd = dup(fdarg)) < 0) {
zwarnnam(name, "bad file number: %d", fdarg);
return 1;
}
if ((fout = fdopen(fd, "w")) == 0) {
close(fd);
zwarnnam(name, "bad mode on fd %d", fd);
return 1;
}
}
if (OPT_ISSET(ops, 'v') ||
(fmt && (OPT_ISSET(ops,'z') || OPT_ISSET(ops,'s'))))
ASSIGN_MSTREAM(buf,fout);
/* -c -- output in columns */
if (!fmt && (OPT_ISSET(ops,'c') || OPT_ISSET(ops,'C'))) {
int l, nr, sc, n, t, i;
#ifdef MULTIBYTE_SUPPORT
int *widths;
if (isset(MULTIBYTE)) {
int *wptr;
/*
* We need the character widths to align output in
* columns.
*/
wptr = widths = (int *) zhalloc(argc * sizeof(int));
for (i = 0; i < argc && args[i]; i++, wptr++) {
int l = len[i], width = 0;
char *aptr = args[i];
mbstate_t mbs;
memset(&mbs, 0, sizeof(mbstate_t));
while (l > 0) {
wchar_t wc;
size_t cnt;
int wcw;
/*
* Prevent misaligned columns due to escape sequences by
* skipping over them. Octals \033 and \233 are the
* possible escape characters recognized by ANSI.
*
* It ought to be possible to do this in the case
* of prompt expansion by propagating the information
* about escape sequences (currently we strip this
* out).
*/
if (*aptr == '\033' || *aptr == '\233') {
for (aptr++, l--;
l && !isalpha(STOUC(*aptr));
aptr++, l--)
;
aptr++;
l--;
continue;
}
cnt = mbrtowc(&wc, aptr, l, &mbs);
if (cnt == MB_INCOMPLETE || cnt == MB_INVALID)
{
/* treat as ordinary string */
width += l;
break;
}
wcw = WCWIDTH(wc);
/* treat unprintable as 0 */
if (wcw > 0)
width += wcw;
/* skip over NUL normally */
if (cnt == 0)
cnt = 1;
aptr += cnt;
l -= cnt;
}
widths[i] = width;
}
}
else
widths = len;
#else
int *widths = len;
#endif
if (OPT_ISSET(ops,'C')) {
/*
* n: number of elements
* nc: number of columns (above)
* nr: number of rows
*/
n = arrlen(args);
nr = (n + nc - 1) / nc;
/*
* i: loop counter
* l: maximum length seen
*
* Ignore lengths in last column since they don't affect
* the separation.
*/
for (i = l = 0; i < argc; i++) {
if (OPT_ISSET(ops, 'a')) {
if ((i % nc) == nc - 1)
continue;
} else {
if (i >= nr * (nc - 1))
break;
}
if (l < widths[i])
l = widths[i];
}
sc = l + 2;
}
else
{
/*
* n: loop counter
* l: maximum length seen
*/
for (n = l = 0; n < argc; n++)
if (l < widths[n])
l = widths[n];
/*
* sc: column width
* nc: number of columns (at least one)
*/
sc = l + 2;
nc = (zterm_columns + 1) / sc;
if (!nc)
nc = 1;
nr = (n + nc - 1) / nc;
}
if (OPT_ISSET(ops,'a')) /* print across, i.e. columns first */
n = 0;
for (i = 0; i < nr; i++) {
if (OPT_ISSET(ops,'a'))
{
int ic;
for (ic = 0; ic < nc && n < argc; ic++, n++)
{
fwrite(args[n], len[n], 1, fout);
l = widths[n];
if (n < argc)
for (; l < sc; l++)
fputc(' ', fout);
}
}
else
{
n = i;
do {
fwrite(args[n], len[n], 1, fout);
l = widths[n];
for (t = nr; t && n < argc; t--, n++);
if (n < argc)
for (; l < sc; l++)
fputc(' ', fout);
} while (n < argc);
}
fputc(OPT_ISSET(ops,'N') ? '\0' : '\n', fout);
}
if (IS_MSTREAM(fout) && (rcount = READ_MSTREAM(buf,fout)) == -1)
ret = 1;
if (!CLOSE_CLEANLY(fout) || ret) {
zwarnnam(name, "write error: %e", errno);
ret = 1;
}
if (buf) {
/* assert: we must be doing -v at this point */
queue_signals();
if (ret)
free(buf);
else
setsparam(OPT_ARG(ops, 'v'),
metafy(buf, rcount, META_REALLOC));
unqueue_signals();
}
return ret;
}
/* normal output */
if (!fmt) {
if (OPT_ISSET(ops, 'z') || OPT_ISSET(ops, 'v') ||
OPT_ISSET(ops, 's') || OPT_ISSET(ops, 'S')) {
/*
* We don't want the arguments unmetafied after all.
*/
for (n = 0; n < argc; n++)
metafy(args[n], len[n], META_NOALLOC);
}
/* -z option -- push the arguments onto the editing buffer stack */
if (OPT_ISSET(ops,'z')) {
queue_signals();
zpushnode(bufstack, sepjoin(args, NULL, 0));
unqueue_signals();
return 0;
}
/* -s option -- add the arguments to the history list */
if (OPT_ISSET(ops,'s') || OPT_ISSET(ops,'S')) {
int nwords = 0, nlen, iwords;
char **pargs = args;
queue_signals();
while (*pargs++)
nwords++;
if (nwords) {
if (OPT_ISSET(ops,'S')) {
int wordsize;
short *words;
if (nwords > 1) {
zwarnnam(name, "option -S takes a single argument");
unqueue_signals();
return 1;
}
words = NULL;
wordsize = 0;
histsplitwords(*args, &words, &wordsize, &nwords, 1);
ent = prepnexthistent();
ent->words = (short *)zalloc(nwords*sizeof(short));
memcpy(ent->words, words, nwords*sizeof(short));
free(words);
ent->nwords = nwords/2;
} else {
ent = prepnexthistent();
ent->words = (short *)zalloc(nwords*2*sizeof(short));
ent->nwords = nwords;
nlen = iwords = 0;
for (pargs = args; *pargs; pargs++) {
ent->words[iwords++] = nlen;
nlen += strlen(*pargs);
ent->words[iwords++] = nlen;
nlen++;
}
}
} else {
ent = prepnexthistent();
ent->words = (short *)NULL;
}
ent->node.nam = zjoin(args, ' ', 0);
ent->stim = ent->ftim = time(NULL);
ent->node.flags = 0;
addhistnode(histtab, ent->node.nam, ent);
unqueue_signals();
return 0;
}
if (OPT_HASARG(ops, 'x') || OPT_HASARG(ops, 'X')) {
char *eptr;
int expand, startpos = 0;
int all = OPT_HASARG(ops, 'X');
char *xarg = all ? OPT_ARG(ops, 'X') : OPT_ARG(ops, 'x');
expand = (int)zstrtol(xarg, &eptr, 10);
if (*eptr || expand <= 0) {
zwarnnam(name, "positive integer expected after -%c: %s", 'x',
xarg);
return 1;
}
for (; *args; args++, len++) {
startpos = zexpandtabs(*args, *len, expand, startpos, fout,
all);
if (args[1]) {
if (OPT_ISSET(ops, 'l')) {
fputc('\n', fout);
startpos = 0;
} else if (OPT_ISSET(ops,'N')) {
fputc('\0', fout);
} else {
fputc(' ', fout);
startpos++;
}
}
}
} else {
for (; *args; args++, len++) {
fwrite(*args, *len, 1, fout);
if (args[1])
fputc(OPT_ISSET(ops,'l') ? '\n' :
OPT_ISSET(ops,'N') ? '\0' : ' ', fout);
}
}
if (!(OPT_ISSET(ops,'n') || nnl ||
(OPT_ISSET(ops, 'v') && !OPT_ISSET(ops, 'l'))))
fputc(OPT_ISSET(ops,'N') ? '\0' : '\n', fout);
if (IS_MSTREAM(fout) && (rcount = READ_MSTREAM(buf,fout)) == -1)
ret = 1;
if (!CLOSE_CLEANLY(fout) || ret) {
zwarnnam(name, "write error: %e", errno);
ret = 1;
}
if (buf) {
/* assert: we must be doing -v at this point */
queue_signals();
if (ret)
free(buf);
else
setsparam(OPT_ARG(ops, 'v'),
metafy(buf, rcount, META_REALLOC));
unqueue_signals();
}
return ret;
}
/*
* All the remaining code in this function is for printf-style
* output (printf itself, or print -f). We still have to handle
* special cases of printing to a ZLE buffer or the history, however.
*/
if (OPT_ISSET(ops,'v')) {
struct value vbuf;
char* s = OPT_ARG(ops,'v');
Value v = getvalue(&vbuf, &s, 0);
visarr = v && PM_TYPE(v->pm->node.flags) == PM_ARRAY;
}
/* printf style output */
*spec = '%';
argp = args;
do {
rcount = count;
if (argp > args && visarr) { /* reusing format string */
if (!splits)
cursplit = splits = (size_t *)zhalloc(sizeof(size_t) *
(arrlen(args) / (argp - args) + 1));
*cursplit++ = count;
}
if (maxarg) {
first += maxarg;
argc -= maxarg;
maxarg = 0;
}
for (c = fmt; c-fmt < flen; c++) {
if (*c != '%') {
putc(*c, fout);
++count;
continue;
}
start = c++;
if (*c == '%') {
putc('%', fout);
++count;
continue;
}
type = prec = -1;
width = 0;
curarg = NULL;
d = spec + 1;
if (*c >= '1' && *c <= '9') {
narg = strtoul(c, &endptr, 0);
if (*endptr == '$') {
c = endptr + 1;
DPUTS(narg <= 0, "specified zero or negative arg");
if (narg > argc) {
zwarnnam(name, "%d: argument specifier out of range",
narg);
if (fout != stdout)
fclose(fout);
#ifdef HAVE_OPEN_MEMSTREAM
if (buf)
free(buf);
#endif
return 1;
} else {
if (narg > maxarg) maxarg = narg;
curarg = *(first + narg - 1);
curlen = len[first - args + narg - 1];
}
}
}
/* copy only one of each flag as spec has finite size */
memset(flags, 0, sizeof(flags));
while (*c && (flag = strchr(flagch, *c))) {
if (!flags[flag - flagch]) {
flags[flag - flagch] = 1;
*d++ = *c;
}
c++;
}
if (idigit(*c)) {
width = strtoul(c, &endptr, 0);
c = endptr;
} else if (*c == '*') {
if (idigit(*++c)) {
narg = strtoul(c, &endptr, 0);
if (*endptr == '$') {
c = endptr + 1;
if (narg > argc || narg <= 0) {
zwarnnam(name,
"%d: argument specifier out of range",
narg);
if (fout != stdout)
fclose(fout);
#ifdef HAVE_OPEN_MEMSTREAM
if (buf)
free(buf);
#endif
return 1;
} else {
if (narg > maxarg) maxarg = narg;
argp = first + narg - 1;
}
}
}
if (*argp) {
width = (int)mathevali(*argp++);
if (errflag) {
errflag &= ~ERRFLAG_ERROR;
ret = 1;
}
}
}
*d++ = '*';
if (*c == '.') {
if (*++c == '*') {
if (idigit(*++c)) {
narg = strtoul(c, &endptr, 0);
if (*endptr == '$') {
c = endptr + 1;
if (narg > argc || narg <= 0) {
zwarnnam(name,
"%d: argument specifier out of range",
narg);
if (fout != stdout)
fclose(fout);
#ifdef HAVE_OPEN_MEMSTREAM
if (buf)
free(buf);
#endif
return 1;
} else {
if (narg > maxarg) maxarg = narg;
argp = first + narg - 1;
}
}
}
if (*argp) {
prec = (int)mathevali(*argp++);
if (errflag) {
errflag &= ~ERRFLAG_ERROR;
ret = 1;
}
}
} else if (idigit(*c)) {
prec = strtoul(c, &endptr, 0);
c = endptr;
} else
prec = 0;
if (prec >= 0) *d++ = '.', *d++ = '*';
}
/* ignore any size modifier */
if (*c == 'l' || *c == 'L' || *c == 'h') c++;
if (!curarg && *argp) {
curarg = *argp;
curlen = len[argp++ - args];
}
d[1] = '\0';
switch (*d = *c) {
case 'c':
if (curarg)
intval = *curarg;
else
intval = 0;
print_val(intval);
break;
case 's':
case 'b':
if (curarg) {
char *b, *ptr;
int lbytes, lchars, lleft;
#ifdef MULTIBYTE_SUPPORT
mbstate_t mbs;
#endif
if (*c == 'b') {
b = getkeystring(metafy(curarg, curlen, META_USEHEAP),
&lbytes,
OPT_ISSET(ops,'b') ? GETKEYS_BINDKEY :
GETKEYS_PRINTF_ARG, &nnl);
} else {
b = curarg;
lbytes = curlen;
}
/*
* Handle width/precision here and use fwrite so that
* nul characters can be output.
*
* First, examine width of string given that it
* may contain multibyte characters. The output
* widths are for characters, so we need to count
* (in lchars). However, if we need to truncate
* the string we need the width in bytes (in lbytes).
*/
ptr = b;
#ifdef MULTIBYTE_SUPPORT
memset(&mbs, 0, sizeof(mbs));
#endif
for (lchars = 0, lleft = lbytes; lleft > 0; lchars++) {
int chars;
if (lchars == prec) {
/* Truncate at this point. */
lbytes = ptr - b;
break;
}
#ifdef MULTIBYTE_SUPPORT
if (isset(MULTIBYTE)) {
chars = mbrlen(ptr, lleft, &mbs);
if (chars < 0) {
/*
* Invalid/incomplete character at this
* point. Assume all the rest are a
* single byte. That's about the best we
* can do.
*/
lchars += lleft;
lbytes = (ptr - b) + lleft;
break;
} else if (chars == 0) {
/* NUL, handle as real character */
chars = 1;
}
}
else /* use the non-multibyte code below */
#endif
chars = 1; /* compiler can optimise this...*/
lleft -= chars;
ptr += chars;
}
if (width > 0 && flags[3]) width = -width;
if (width > 0 && lchars < width)
count += fprintf(fout, "%*c", width - lchars, ' ');
count += fwrite(b, 1, lbytes, fout);
if (width < 0 && lchars < -width)
count += fprintf(fout, "%*c", -width - lchars, ' ');
if (nnl) {
/* If the %b arg had a \c escape, truncate the fmt. */
flen = c - fmt + 1;
fmttrunc = 1;
}
} else if (width)
count += fprintf(fout, "%*c", width, ' ');
break;
case 'q':
stringval = curarg ?
quotestring(metafy(curarg, curlen, META_USEHEAP),
QT_BACKSLASH_SHOWNULL) : &nullstr;
*d = 's';
print_val(unmetafy(stringval, &curlen));
break;
case 'd':
case 'i':
type=1;
break;
case 'e':
case 'E':
case 'f':
case 'g':
case 'G':
type=2;
break;
case 'o':
case 'u':
case 'x':
case 'X':
type=3;
break;
case 'n':
if (curarg) setiparam(curarg, count - rcount);
break;
default:
if (*c) {
save = c[1];
c[1] = '\0';
}
zwarnnam(name, "%s: invalid directive", start);
if (*c) c[1] = save;
/* Why do we care about a clean close here? */
if (!CLOSE_CLEANLY(fout))
zwarnnam(name, "write error: %e", errno);
#ifdef HAVE_OPEN_MEMSTREAM
if (buf)
free(buf);
#endif
return 1;
}
if (type > 0) {
if (curarg && (*curarg == '\'' || *curarg == '"' )) {
convchar_t cc;
#ifdef MULTIBYTE_SUPPORT
if (isset(MULTIBYTE)) {
mb_charinit();
(void)mb_metacharlenconv(metafy(curarg+1, curlen-1,
META_USEHEAP), &cc);
}
else
cc = WEOF;
if (cc == WEOF)
cc = (curlen > 1) ? STOUC(curarg[1]) : 0;
#else
cc = (curlen > 1) ? STOUC(curarg[1]) : 0;
#endif
if (type == 2) {
doubleval = cc;
print_val(doubleval);
} else {
intval = cc;
print_val(intval);
}
} else {
switch (type) {
case 1:
#ifdef ZSH_64_BIT_TYPE
*d++ = 'l';
#endif
*d++ = 'l', *d++ = *c, *d = '\0';
zlongval = (curarg) ? mathevali(curarg) : 0;
if (errflag) {
zlongval = 0;
errflag &= ~ERRFLAG_ERROR;
ret = 1;
}
print_val(zlongval)
break;
case 2:
if (curarg) {
char *eptr;
/*
* First attempt to parse as a floating
* point constant. If we go through
* a math evaluation, we can lose
* mostly unimportant information
* that people in standards organizations
* worry about.
*/
doubleval = strtod(curarg, &eptr);
/*
* If it didn't parse as a constant,
* parse it as an expression.
*/
if (*eptr != '\0') {
mnumval = matheval(curarg);
doubleval = (mnumval.type & MN_FLOAT) ?
mnumval.u.d : (double)mnumval.u.l;
}
} else doubleval = 0;
if (errflag) {
doubleval = 0;
errflag &= ~ERRFLAG_ERROR;
ret = 1;
}
/* force consistent form for Inf/NaN output */
if (isnan(doubleval))
count += fputs("nan", fout);
else if (isinf(doubleval))
count += fputs((doubleval < 0.0) ? "-inf" : "inf", fout);
else
print_val(doubleval)
break;
case 3:
#ifdef ZSH_64_BIT_UTYPE
*d++ = 'l';
#endif
*d++ = 'l', *d++ = *c, *d = '\0';
if (!curarg)
zulongval = (zulong)0;
else if (!zstrtoul_underscore(curarg, &zulongval))
zulongval = mathevali(curarg);
if (errflag) {
zulongval = 0;
errflag &= ~ERRFLAG_ERROR;
ret = 1;
}
print_val(zulongval)
}
}
}
if (maxarg && (argp - first > maxarg))
maxarg = argp - first;
}
if (maxarg) argp = first + maxarg;
/* if there are remaining args, reuse format string */
} while (*argp && argp != first && !fmttrunc && !OPT_ISSET(ops,'r'));
if (IS_MSTREAM(fout)) {
queue_signals();
if ((rcount = READ_MSTREAM(buf,fout)) == -1) {
zwarnnam(name, "i/o error: %e", errno);
if (buf)
free(buf);
} else {
if (visarr && splits) {
char **arrayval = zshcalloc((cursplit - splits + 2) * sizeof(char *));
for (;cursplit >= splits; cursplit--) {
int start = cursplit == splits ? 0 : cursplit[-1];
arrayval[cursplit - splits] =
metafy(buf + start, count - start, META_DUP);
count = start;
}
setaparam(OPT_ARG(ops, 'v'), arrayval);
free(buf);
} else {
stringval = metafy(buf, rcount, META_REALLOC);
if (OPT_ISSET(ops,'z')) {
zpushnode(bufstack, stringval);
} else if (OPT_ISSET(ops,'v')) {
setsparam(OPT_ARG(ops, 'v'), stringval);
} else {
ent = prepnexthistent();
ent->node.nam = stringval;
ent->stim = ent->ftim = time(NULL);
ent->node.flags = 0;
ent->words = (short *)NULL;
addhistnode(histtab, ent->node.nam, ent);
}
}
}
unqueue_signals();
}
if (!CLOSE_CLEANLY(fout))
{
zwarnnam(name, "write error: %e", errno);
ret = 1;
}
return ret;
}
/* shift builtin */
/**/
int
bin_shift(char *name, char **argv, Options ops, UNUSED(int func))
{
int num = 1, l, ret = 0;
char **s;
/* optional argument can be either numeric or an array */
queue_signals();
if (*argv && !getaparam(*argv)) {
num = mathevali(*argv++);
if (errflag) {
unqueue_signals();
return 1;
}
}
if (num < 0) {
unqueue_signals();
zwarnnam(name, "argument to shift must be non-negative");
return 1;
}
if (*argv) {
for (; *argv; argv++)
if ((s = getaparam(*argv))) {
if (arrlen_lt(s, num)) {
zwarnnam(name, "shift count must be <= $#");
ret++;
continue;
}
if (OPT_ISSET(ops,'p')) {
char **s2, **src, **dst;
int count;
l = arrlen(s);
src = s;
dst = s2 = (char **)zalloc((l - num + 1) * sizeof(char *));
for (count = l - num; count; count--)
*dst++ = ztrdup(*src++);
*dst = NULL;
s = s2;
} else {
s = zarrdup(s + num);
}
setaparam(*argv, s);
}
} else {
if (num > (l = arrlen(pparams))) {
zwarnnam(name, "shift count must be <= $#");
ret = 1;
} else {
s = zalloc((l - num + 1) * sizeof(char *));
if (OPT_ISSET(ops,'p')) {
memcpy(s, pparams, (l - num) * sizeof(char *));
s[l-num] = NULL;
while (num--)
zsfree(pparams[l-1-num]);
} else {
memcpy(s, pparams + num, (l - num + 1) * sizeof(char *));
while (num--)
zsfree(pparams[num]);
}
zfree(pparams, (l + 1) * sizeof(char *));
pparams = s;
}
}
unqueue_signals();
return ret;
}
/*
* Position of getopts option within OPTIND argument with multiple options.
*/
/**/
int optcind;
/* getopts: automagical option handling for shell scripts */
/**/
int
bin_getopts(UNUSED(char *name), char **argv, UNUSED(Options ops), UNUSED(int func))
{
int lenstr, lenoptstr, quiet, lenoptbuf;
char *optstr = unmetafy(*argv++, &lenoptstr), *var = *argv++;
char **args = (*argv) ? argv : pparams;
char *str, optbuf[2] = " ", *p, opch;
/* zoptind keeps count of the current argument number. The *
* user can set it to zero to start a new option parse. */
if (zoptind < 1) {
/* first call */
zoptind = 1;
optcind = 0;
}
if (arrlen_lt(args, zoptind))
/* no more options */
return 1;
/* leading ':' in optstr means don't print an error message */
quiet = *optstr == ':';
optstr += quiet;
lenoptstr -= quiet;
/* find place in relevant argument */
str = unmetafy(dupstring(args[zoptind - 1]), &lenstr);
if (!lenstr) /* Definitely not an option. */
return 1;
if(optcind >= lenstr) {
optcind = 0;
if(!args[zoptind++])
return 1;
str = unmetafy(dupstring(args[zoptind - 1]), &lenstr);
}
if(!optcind) {
if(lenstr < 2 || (*str != '-' && *str != '+'))
return 1;
if(lenstr == 2 && str[0] == '-' && str[1] == '-') {
zoptind++;
return 1;
}
optcind++;
}
opch = str[optcind++];
if(str[0] == '+') {
optbuf[0] = '+';
lenoptbuf = 2;
} else
lenoptbuf = 1;
optbuf[lenoptbuf - 1] = opch;
/* check for legality */
if(opch == ':' || !(p = memchr(optstr, opch, lenoptstr))) {
p = "?";
err:
zsfree(zoptarg);
setsparam(var, ztrdup(p));
if(quiet) {
zoptarg = metafy(optbuf, lenoptbuf, META_DUP);
} else {
zwarn(*p == '?' ? "bad option: %c%c" :
"argument expected after %c%c option",
"?-+"[lenoptbuf], opch);
zoptarg=ztrdup("");
}
return 0;
}
/* check for required argument */
if(p[1] == ':') {
if(optcind == lenstr) {
if(!args[zoptind]) {
p = ":";
goto err;
}
p = ztrdup(args[zoptind++]);
} else
p = metafy(str+optcind, lenstr-optcind, META_DUP);
/*
* Careful: I've just changed the following two lines from
* optcind = ztrlen(args[zoptind - 1]);
* and it's a rigorous theorem that every change in getopts breaks
* something. See zsh-workers/9095 for the bug fixed here.
* PWS 2000/05/02
*/
optcind = 0;
zoptind++;
zsfree(zoptarg);
zoptarg = p;
} else {
zsfree(zoptarg);
zoptarg = ztrdup("");
}
setsparam(var, metafy(optbuf, lenoptbuf, META_DUP));
return 0;
}
/* Flag that we should exit the shell as soon as all functions return. */
/**/
mod_export int
exit_pending;
/* Shell level at which we exit if exit_pending */
/**/
mod_export int
exit_level;
/* break, bye, continue, exit, logout, return -- most of these take *
* one numeric argument, and the other (logout) is related to return. *
* (return is treated as a logout when in a login shell.) */
/**/
int
bin_break(char *name, char **argv, UNUSED(Options ops), int func)
{
int num = lastval, nump = 0, implicit;
/* handle one optional numeric argument */
implicit = !*argv;
if (*argv) {
num = mathevali(*argv++);
nump = 1;
}
if (nump > 0 && (func == BIN_CONTINUE || func == BIN_BREAK) && num <= 0) {
zerrnam(name, "argument is not positive: %d", num);
return 1;
}
switch (func) {
case BIN_CONTINUE:
if (!loops) { /* continue is only permitted in loops */
zerrnam(name, "not in while, until, select, or repeat loop");
return 1;
}
contflag = 1; /* FALLTHROUGH */
case BIN_BREAK:
if (!loops) { /* break is only permitted in loops */
zerrnam(name, "not in while, until, select, or repeat loop");
return 1;
}
breaks = nump ? minimum(num,loops) : 1;
break;
case BIN_RETURN:
if ((isset(INTERACTIVE) && isset(SHINSTDIN))
|| locallevel || sourcelevel) {
retflag = 1;
breaks = loops;
lastval = num;
if (trap_state == TRAP_STATE_PRIMED && trap_return == -2
/*
* With POSIX, "return" on its own in a trap doesn't
* update $? --- we keep the status from before the
* trap.
*/
&& !(isset(POSIXTRAPS) && implicit)) {
trap_state = TRAP_STATE_FORCE_RETURN;
trap_return = lastval;
}
return lastval;
}
zexit(num, 0); /* else treat return as logout/exit */
break;
case BIN_LOGOUT:
if (unset(LOGINSHELL)) {
zerrnam(name, "not login shell");
return 1;
}
/*FALLTHROUGH*/
case BIN_EXIT:
if (locallevel > forklevel && shell_exiting != -1) {
/*
* We don't exit directly from functions to allow tidying
* up, in particular EXIT traps. We still need to perform
* the usual interactive tests to see if we can exit at
* all, however.
*
* If we are forked, we exit the shell at the function depth
* at which we became a subshell, hence the comparison.
*
* If we are already exiting... give this all up as
* a bad job.
*/
if (stopmsg || (zexit(0,2), !stopmsg)) {
retflag = 1;
breaks = loops;
exit_pending = (num << 1) | 1;
exit_level = locallevel;
}
} else
zexit(num, 0);
break;
}
return 0;
}
/* we have printed a 'you have stopped (running) jobs.' message */
/**/
mod_export int stopmsg;
/* check to see if user has jobs running/stopped */
/**/
static void
checkjobs(void)
{
int i;
for (i = 1; i <= maxjob; i++)
if (i != thisjob && (jobtab[i].stat & STAT_LOCKED) &&
!(jobtab[i].stat & STAT_NOPRINT) &&
(isset(CHECKRUNNINGJOBS) || jobtab[i].stat & STAT_STOPPED))
break;
if (i <= maxjob) {
if (jobtab[i].stat & STAT_STOPPED) {
#ifdef USE_SUSPENDED
zerr("you have suspended jobs.");
#else
zerr("you have stopped jobs.");
#endif
} else
zerr("you have running jobs.");
stopmsg = 1;
}
}
/*
* -1 if the shell is already committed to exit.
* positive if zexit() was already called.
*/
/**/
int shell_exiting;
/* exit the shell. val is the return value of the shell. *
* from_where is
* 1 if zexit is called because of a signal
* 2 if we can't actually exit yet (e.g. functions need
* terminating) but should perform the usual interactive tests.
*/
/**/
mod_export void
zexit(int val, int from_where)
{
/* Don't do anything recursively: see below */
if (shell_exiting == -1)
return;
if (isset(MONITOR) && !stopmsg && from_where != 1) {
scanjobs(); /* check if jobs need printing */
if (isset(CHECKJOBS))
checkjobs(); /* check if any jobs are running/stopped */
if (stopmsg) {
stopmsg = 2;
return;
}
}
/* Positive in_exit means we have been here before */
if (from_where == 2 || (shell_exiting++ && from_where))
return;
/*
* We're now committed to exiting. Set shell_exiting to -1 to
* indicate we shouldn't do any recursive processing.
*/
shell_exiting = -1;
/*
* We want to do all remaining processing regardless of preceding
* errors, even user interrupts.
*/
errflag = 0;
if (isset(MONITOR)) {
/* send SIGHUP to any jobs left running */
killrunjobs(from_where == 1);
}
if (isset(RCS) && interact) {
if (!nohistsave) {
int writeflags = HFILE_USE_OPTIONS;
if (from_where == 1)
writeflags |= HFILE_NO_REWRITE;
saveandpophiststack(1, writeflags);
savehistfile(NULL, 1, writeflags);
}
if (islogin && !subsh) {
sourcehome(".zlogout");
#ifdef GLOBAL_ZLOGOUT
if (isset(RCS) && isset(GLOBALRCS))
source(GLOBAL_ZLOGOUT);
#endif
}
}
lastval = val;
/*
* Now we are committed to exiting any previous state
* is irrelevant. Ensure trap can run.
*/
errflag = intrap = 0;
if (sigtrapped[SIGEXIT])
dotrap(SIGEXIT);
callhookfunc("zshexit", NULL, 1, NULL);
runhookdef(EXITHOOK, NULL);
if (opts[MONITOR] && interact && (SHTTY != -1)) {
release_pgrp();
}
if (mypid != getpid())
_exit(val);
else
exit(val);
}
/* . (dot), source */
/**/
int
bin_dot(char *name, char **argv, UNUSED(Options ops), UNUSED(int func))
{
char **old, *old0 = NULL;
int diddot = 0, dotdot = 0;
char *s, **t, *enam, *arg0, *buf;
struct stat st;
enum source_return ret;
if (!*argv)
return 0;
old = pparams;
/* get arguments for the script */
if (argv[1])
pparams = zarrdup(argv + 1);
enam = arg0 = ztrdup(*argv);
if (isset(FUNCTIONARGZERO)) {
old0 = argzero;
argzero = ztrdup(arg0);
}
s = unmeta(enam);
errno = ENOENT;
ret = SOURCE_NOT_FOUND;
/* for source only, check in current directory first */
if (*name != '.' && access(s, F_OK) == 0
&& stat(s, &st) >= 0 && !S_ISDIR(st.st_mode)) {
diddot = 1;
ret = source(enam);
}
if (ret == SOURCE_NOT_FOUND) {
/* use a path with / in it */
for (s = arg0; *s; s++)
if (*s == '/') {
if (*arg0 == '.') {
if (arg0 + 1 == s)
++diddot;
else if (arg0[1] == '.' && arg0 + 2 == s)
++dotdot;
}
ret = source(arg0);
break;
}
if (!*s || (ret == SOURCE_NOT_FOUND &&
isset(PATHDIRS) && diddot < 2 && dotdot == 0)) {
pushheap();
/* search path for script */
for (t = path; *t; t++) {
if (!(*t)[0] || ((*t)[0] == '.' && !(*t)[1])) {
if (diddot)
continue;
diddot = 1;
buf = dupstring(arg0);
} else
buf = zhtricat(*t, "/", arg0);
s = unmeta(buf);
if (access(s, F_OK) == 0 && stat(s, &st) >= 0
&& !S_ISDIR(st.st_mode)) {
ret = source(enam = buf);
break;
}
}
popheap();
}
}
/* clean up and return */
if (argv[1]) {
freearray(pparams);
pparams = old;
}
if (ret == SOURCE_NOT_FOUND) {
if (isset(POSIXBUILTINS)) {
/* hard error in POSIX (we'll exit later) */
zerrnam(name, "%e: %s", errno, enam);
} else {
zwarnnam(name, "%e: %s", errno, enam);
}
}
zsfree(arg0);
if (old0) {
zsfree(argzero);
argzero = old0;
}
return ret == SOURCE_OK ? lastval : 128 - ret;
}
/*
* common for bin_emulate and bin_eval
*/
static int
eval(char **argv)
{
Eprog prog;
char *oscriptname = scriptname;
int oineval = ineval, fpushed;
struct funcstack fstack;
/*
* If EVALLINENO is not set, we use the line number of the
* environment and must flag this up to exec.c. Otherwise,
* we use a special script name to indicate the special line number.
*/
ineval = !isset(EVALLINENO);
if (!ineval) {
scriptname = "(eval)";
fstack.prev = funcstack;
fstack.name = scriptname;
fstack.caller = funcstack ? funcstack->name : dupstring(argzero);
fstack.lineno = lineno;
fstack.tp = FS_EVAL;
/*
* To get file line numbers, we need to know if parent is
* the original script/shell or a sourced file, in which
* case we use the line number raw, or a function or eval,
* in which case we need to deduce where that came from.
*
* This replicates the logic for working out the information
* for $funcfiletrace---eval is similar to an inlined function
* call from a tracing perspective.
*/
if (!funcstack || funcstack->tp == FS_SOURCE) {
fstack.flineno = fstack.lineno;
fstack.filename = fstack.caller;
} else {
fstack.flineno = funcstack->flineno + lineno;
/*
* Line numbers in eval start from 1, not zero,
* so offset by one to get line in file.
*/
if (funcstack->tp == FS_EVAL)
fstack.flineno--;
fstack.filename = funcstack->filename;
if (!fstack.filename)
fstack.filename = "";
}
funcstack = &fstack;
fpushed = 1;
} else
fpushed = 0;
prog = parse_string(zjoin(argv, ' ', 1), 1);
if (prog) {
if (wc_code(*prog->prog) != WC_LIST) {
/* No code to execute */
lastval = 0;
} else {
execode(prog, 1, 0, "eval");
if (errflag && !lastval)
lastval = errflag;
}
} else {
lastval = 1;
}
if (fpushed)
funcstack = funcstack->prev;
errflag &= ~ERRFLAG_ERROR;
scriptname = oscriptname;
ineval = oineval;
return lastval;
}
/* emulate: set emulation mode and optionally evaluate shell code */
/**/
int
bin_emulate(char *nam, char **argv, Options ops, UNUSED(int func))
{
int opt_L = OPT_ISSET(ops, 'L');
int opt_R = OPT_ISSET(ops, 'R');
int opt_l = OPT_ISSET(ops, 'l');
int saveemulation, savehackchar;
int ret = 1, new_emulation;
unsigned int savepatterns;
char saveopts[OPT_SIZE], new_opts[OPT_SIZE];
char *cmd = 0;
const char *shname = *argv;
LinkList optlist;
LinkNode optnode;
Emulation_options save_sticky;
OptIndex *on_ptr, *off_ptr;
/* without arguments just print current emulation */
if (!shname) {
if (opt_L || opt_R) {
zwarnnam(nam, "not enough arguments");
return 1;
}
switch(SHELL_EMULATION()) {
case EMULATE_CSH:
shname = "csh";
break;
case EMULATE_KSH:
shname = "ksh";
break;
case EMULATE_SH:
shname = "sh";
break;
default:
shname = "zsh";
break;
}
printf("%s\n", shname);
return 0;
}
/* with single argument set current emulation */
if (!argv[1]) {
char *cmdopts;
if (opt_l) {
cmdopts = (char *)zhalloc(OPT_SIZE);
memcpy(cmdopts, opts, OPT_SIZE);
} else
cmdopts = opts;
emulate(shname, opt_R, &emulation, cmdopts);
if (opt_L)
cmdopts[LOCALOPTIONS] = cmdopts[LOCALTRAPS] =
cmdopts[LOCALPATTERNS] = 1;
if (opt_l) {
list_emulate_options(cmdopts, opt_R);
return 0;
}
clearpatterndisables();
return 0;
}
if (opt_l) {
zwarnnam(nam, "too many arguments for -l");
return 1;
}
argv++;
memcpy(saveopts, opts, sizeof(opts));
memcpy(new_opts, opts, sizeof(opts));
savehackchar = keyboardhackchar;
emulate(shname, opt_R, &new_emulation, new_opts);
optlist = newlinklist();
if (parseopts(nam, &argv, new_opts, &cmd, optlist, 0)) {
ret = 1;
goto restore;
}
/* parseopts() has consumed anything that looks like an option */
if (*argv) {
zwarnnam(nam, "unknown argument %s", *argv);
goto restore;
}
savepatterns = savepatterndisables();
/*
* All emulations start with an empty set of pattern disables,
* hence no special "sticky" behaviour is required.
*/
clearpatterndisables();
saveemulation = emulation;
emulation = new_emulation;
memcpy(opts, new_opts, sizeof(opts));
/* If "-c command" is given, evaluate command using specified
* emulation mode.
*/
if (cmd) {
if (opt_L) {
zwarnnam(nam, "option -L incompatible with -c");
goto restore2;
}
*--argv = cmd; /* on stack, never free()d, see execbuiltin() */
} else {
if (opt_L)
opts[LOCALOPTIONS] = opts[LOCALTRAPS] = opts[LOCALPATTERNS] = 1;
return 0;
}
save_sticky = sticky;
sticky = hcalloc(sizeof(*sticky));
sticky->emulation = emulation;
for (optnode = firstnode(optlist); optnode; incnode(optnode)) {
/* Data is index into new_opts */
char *optptr = (char *)getdata(optnode);
if (*optptr)
sticky->n_on_opts++;
else
sticky->n_off_opts++;
}
if (sticky->n_on_opts)
on_ptr = sticky->on_opts =
zhalloc(sticky->n_on_opts * sizeof(*sticky->on_opts));
else
on_ptr = NULL;
if (sticky->n_off_opts)
off_ptr = sticky->off_opts = zhalloc(sticky->n_off_opts *
sizeof(*sticky->off_opts));
else
off_ptr = NULL;
for (optnode = firstnode(optlist); optnode; incnode(optnode)) {
/* Data is index into new_opts */
char *optptr = (char *)getdata(optnode);
int optno = optptr - new_opts;
if (*optptr)
*on_ptr++ = optno;
else
*off_ptr++ = optno;
}
ret = eval(argv);
sticky = save_sticky;
restore2:
emulation = saveemulation;
memcpy(opts, saveopts, sizeof(opts));
restorepatterndisables(savepatterns);
restore:
keyboardhackchar = savehackchar;
inittyptab(); /* restore banghist */
return ret;
}
/* eval: simple evaluation */
/**/
mod_export int ineval;
/**/
int
bin_eval(UNUSED(char *nam), char **argv, UNUSED(Options ops), UNUSED(int func))
{
return eval(argv);
}
static char *zbuf;
static int readfd;
/* Read a character from readfd, or from the buffer zbuf. Return EOF on end of
file/buffer. */
/* read: get a line of input, or (for compctl functions) return some *
* useful data about the state of the editing line. The -E and -e *
* options mean that the result should be sent to stdout. -e means, *
* in addition, that the result should not actually be assigned to *
* the specified parameters. */
/**/
int
bin_read(char *name, char **args, Options ops, UNUSED(int func))
{
char *reply, *readpmpt;
int bsiz, c = 0, gotnl = 0, al = 0, first, nchars = 1, bslash, keys = 0;
int haso = 0; /* true if /dev/tty has been opened specially */
int isem = !strcmp(term, "emacs"), izle = zleactive;
char *buf, *bptr, *firstarg, *zbuforig;
LinkList readll = newlinklist();
FILE *oshout = NULL;
int readchar = -1, val, resettty = 0;
struct ttyinfo saveti;
char d;
long izle_timeout = 0;
#ifdef MULTIBYTE_SUPPORT
wchar_t delim = L'\n', wc;
mbstate_t mbs;
char *laststart;
size_t ret;
#else
char delim = '\n';
#endif
if (OPT_HASARG(ops,c='k')) {
char *eptr, *optarg = OPT_ARG(ops,c);
nchars = (int)zstrtol(optarg, &eptr, 10);
if (*eptr) {
zwarnnam(name, "number expected after -%c: %s", c, optarg);
return 1;
}
}
/* This `*args++ : *args' looks a bit weird, but it works around a bug
* in gcc-2.8.1 under DU 4.0. */
firstarg = (*args && **args == '?' ? *args++ : *args);
reply = *args ? *args++ : OPT_ISSET(ops,'A') ? "reply" : "REPLY";
if (OPT_ISSET(ops,'A') && *args) {
zwarnnam(name, "only one array argument allowed");
return 1;
}
/* handle compctl case */
if(OPT_ISSET(ops,'l') || OPT_ISSET(ops,'c'))
return compctlreadptr(name, args, ops, reply);
if ((OPT_ISSET(ops,'k') || OPT_ISSET(ops,'q')) &&
!OPT_ISSET(ops,'u') && !OPT_ISSET(ops,'p')) {
if (!zleactive) {
if (SHTTY == -1) {
/* need to open /dev/tty specially */
if ((SHTTY = open("/dev/tty", O_RDWR|O_NOCTTY)) != -1) {
haso = 1;
oshout = shout;
init_shout();
}
} else if (!shout) {
/* We need an output FILE* on the tty */
init_shout();
}
/* We should have a SHTTY opened by now. */
if (SHTTY == -1) {
/* Unfortunately, we didn't. */
fprintf(stderr, "not interactive and can't open terminal\n");
fflush(stderr);
return 1;
}
if (unset(INTERACTIVE))
gettyinfo(&shttyinfo);
/* attach to the tty */
attachtty(mypgrp);
if (!isem)
setcbreak();
readfd = SHTTY;
}
keys = 1;
} else if (OPT_HASARG(ops,'u') && !OPT_ISSET(ops,'p')) {
/* -u means take input from the specified file descriptor. */
char *eptr, *argptr = OPT_ARG(ops,'u');
/* The old code handled -up, but that was never documented. Still...*/
if (!strcmp(argptr, "p")) {
readfd = coprocin;
if (readfd < 0) {
zwarnnam(name, "-p: no coprocess");
return 1;
}
} else {
readfd = (int)zstrtol(argptr, &eptr, 10);
if (*eptr) {
zwarnnam(name, "number expected after -%c: %s", 'u', argptr);
return 1;
}
}
#if 0
/* This code is left as a warning to future generations --- pws. */
for (readfd = 9; readfd && !OPT_ISSET(ops,readfd + '0'); --readfd);
#endif
izle = 0;
} else if (OPT_ISSET(ops,'p')) {
readfd = coprocin;
if (readfd < 0) {
zwarnnam(name, "-p: no coprocess");
return 1;
}
izle = 0;
} else
readfd = izle = 0;
if (OPT_ISSET(ops,'s') && SHTTY != -1) {
struct ttyinfo ti;
gettyinfo(&ti);
saveti = ti;
resettty = 1;
#ifdef HAS_TIO
ti.tio.c_lflag &= ~ECHO;
#else
ti.sgttyb.sg_flags &= ~ECHO;
#endif
settyinfo(&ti);
}
/* handle prompt */
if (firstarg) {
for (readpmpt = firstarg;
*readpmpt && *readpmpt != '?'; readpmpt++);
if (*readpmpt++) {
if (keys || isatty(0)) {
zputs(readpmpt, (shout ? shout : stderr));
fflush(shout ? shout : stderr);
}
readpmpt[-1] = '\0';
}
}
if (OPT_ISSET(ops,'d')) {
char *delimstr = OPT_ARG(ops,'d');
#ifdef MULTIBYTE_SUPPORT
wint_t wi;
if (isset(MULTIBYTE)) {
mb_charinit();
(void)mb_metacharlenconv(delimstr, &wi);
}
else
wi = WEOF;
if (wi != WEOF)
delim = (wchar_t)wi;
else
delim = (wchar_t)((delimstr[0] == Meta) ?
delimstr[1] ^ 32 : delimstr[0]);
#else
delim = (delimstr[0] == Meta) ? delimstr[1] ^ 32 : delimstr[0];
#endif
if (SHTTY != -1) {
struct ttyinfo ti;
gettyinfo(&ti);
if (! resettty) {
saveti = ti;
resettty = 1;
}
#ifdef HAS_TIO
ti.tio.c_lflag &= ~ICANON;
ti.tio.c_cc[VMIN] = 1;
ti.tio.c_cc[VTIME] = 0;
#else
ti.sgttyb.sg_flags |= CBREAK;
#endif
settyinfo(&ti);
}
}
if (OPT_ISSET(ops,'t')) {
zlong timeout = 0;
if (OPT_HASARG(ops,'t')) {
mnumber mn = zero_mnumber;
mn = matheval(OPT_ARG(ops,'t'));
if (errflag)
return 1;
if (mn.type == MN_FLOAT) {
mn.u.d *= 1e6;
timeout = (zlong)mn.u.d;
} else {
timeout = (zlong)mn.u.l * (zlong)1000000;
}
}
if (izle) {
/*
* Timeout is in 100ths of a second rather than us.
* See calc_timeout() in zle_main for format of this.
*/
timeout = -(timeout/(zlong)10000 + 1L);
izle_timeout = (long)timeout;
#ifdef LONG_MAX
/* saturate if range exceeded */
if ((zlong)izle_timeout != timeout)
izle_timeout = LONG_MAX;
#endif
} else {
if (readfd == -1 ||
!read_poll(readfd, &readchar, keys && !zleactive,
timeout)) {
if (keys && !zleactive && !isem)
settyinfo(&shttyinfo);
else if (resettty && SHTTY != -1)
settyinfo(&saveti);
if (haso) {
fclose(shout);
shout = oshout;
SHTTY = -1;
}
return OPT_ISSET(ops,'q') ? 2 : 1;
}
}
}
#ifdef MULTIBYTE_SUPPORT
memset(&mbs, 0, sizeof(mbs));
#endif
/*
* option -k means read only a given number of characters (default 1)
* option -q means get one character, and interpret it as a Y or N
*/
if (OPT_ISSET(ops,'k') || OPT_ISSET(ops,'q')) {
int eof = 0;
/* allocate buffer space for result */
#ifdef MULTIBYTE_SUPPORT
bptr = buf = (char *)zalloc(nchars*MB_CUR_MAX+1);
#else
bptr = buf = (char *)zalloc(nchars+1);
#endif
do {
if (izle) {
zleentry(ZLE_CMD_GET_KEY, izle_timeout, NULL, &val);
if (val < 0) {
eof = 1;
break;
}
*bptr = (char) val;
#ifdef MULTIBYTE_SUPPORT
if (isset(MULTIBYTE)) {
ret = mbrlen(bptr++, 1, &mbs);
if (ret == MB_INVALID)
memset(&mbs, 0, sizeof(mbs));
/* treat invalid as single character */
if (ret != MB_INCOMPLETE)
nchars--;
continue;
} else {
bptr++;
nchars--;
}
#else
bptr++;
nchars--;
#endif
} else {
/* If read returns 0, is end of file */
if (readchar >= 0) {
*bptr = readchar;
val = 1;
readchar = -1;
} else {
while ((val = read(readfd, bptr, nchars)) < 0) {
if (errno != EINTR ||
errflag || retflag || breaks || contflag)
break;
}
if (val <= 0) {
eof = 1;
break;
}
}
#ifdef MULTIBYTE_SUPPORT
if (isset(MULTIBYTE)) {
while (val > 0) {
ret = mbrlen(bptr, val, &mbs);
if (ret == MB_INCOMPLETE) {
bptr += val;
break;
} else {
if (ret == MB_INVALID) {
memset(&mbs, 0, sizeof(mbs));
/* treat as single byte */
ret = 1;
}
else if (ret == 0) /* handle null as normal char */
ret = 1;
else if (ret > (size_t)val) {
/* Some mbrlen()s return the full char len */
ret = val;
}
nchars--;
val -= ret;
bptr += ret;
}
}
continue;
}
#endif
/* decrement number of characters read from number required */
nchars -= val;
/* increment pointer past read characters */
bptr += val;
}
} while (nchars > 0);
if (!izle && !OPT_ISSET(ops,'u') && !OPT_ISSET(ops,'p')) {
/* dispose of result appropriately, etc. */
if (isem)
while (val > 0 && read(SHTTY, &d, 1) == 1 && d != '\n');
else {
settyinfo(&shttyinfo);
resettty = 0;
}
if (haso) {
fclose(shout); /* close(SHTTY) */
shout = oshout;
SHTTY = -1;
}
}
if (OPT_ISSET(ops,'q'))
{
/*
* Keep eof as status but status is now whether we read
* 'y' or 'Y'. If we timed out, status is 2.
*/
if (eof)
eof = 2;
else
eof = (bptr - buf != 1 || (buf[0] != 'y' && buf[0] != 'Y'));
buf[0] = eof ? 'n' : 'y';
bptr = buf + 1;
}
if (OPT_ISSET(ops,'e') || OPT_ISSET(ops,'E'))
fwrite(buf, bptr - buf, 1, stdout);
if (!OPT_ISSET(ops,'e'))
setsparam(reply, metafy(buf, bptr - buf, META_REALLOC));
else
zfree(buf, bptr - buf + 1);
if (resettty && SHTTY != -1)
settyinfo(&saveti);
return eof;
}
/* All possible special types of input have been exhausted. Take one line,
and assign words to the parameters until they run out. Leftover words go
onto the last parameter. If an array is specified, all the words become
separate elements of the array. */
zbuforig = zbuf = (!OPT_ISSET(ops,'z')) ? NULL :
(nonempty(bufstack)) ? (char *) getlinknode(bufstack) : ztrdup("");
first = 1;
bslash = 0;
while (*args || (OPT_ISSET(ops,'A') && !gotnl)) {
sigset_t s = child_unblock();
buf = bptr = (char *)zalloc(bsiz = 64);
#ifdef MULTIBYTE_SUPPORT
laststart = buf;
ret = MB_INCOMPLETE;
#endif
/* get input, a character at a time */
while (!gotnl) {
c = zread(izle, &readchar, izle_timeout);
/* \ at the end of a line indicates a continuation *
* line, except in raw mode (-r option) */
#ifdef MULTIBYTE_SUPPORT
if (c == EOF) {
/* not waiting to be completed any more */
ret = 0;
break;
}
*bptr = (char)c;
if (isset(MULTIBYTE)) {
ret = mbrtowc(&wc, bptr, 1, &mbs);
if (!ret) /* NULL */
ret = 1;
} else {
ret = 1;
wc = (wchar_t)c;
}
if (ret != MB_INCOMPLETE) {
if (ret == MB_INVALID) {
memset(&mbs, 0, sizeof(mbs));
/* Treat this as a single character */
wc = (wchar_t)c;
laststart = bptr;
}
if (bslash && wc == delim) {
bslash = 0;
continue;
}
if (wc == delim)
break;
/*
* `first' is non-zero if any separator we encounter is a
* non-whitespace separator, which means that anything
* (even an empty string) between, before or after separators
* is significant. If it is zero, we have a whitespace
* separator, which shouldn't cause extra empty strings to
* be emitted. Hence the test for (*buf || first) when
* we assign the result of reading a word.
*/
if (!bslash && wcsitype(wc, ISEP)) {
if (bptr != buf ||
(!(c < 128 && iwsep(c)) && first)) {
first |= !(c < 128 && iwsep(c));
break;
}
first |= !(c < 128 && iwsep(c));
continue;
}
bslash = (wc == L'\\' && !bslash && !OPT_ISSET(ops,'r'));
if (bslash)
continue;
first = 0;
}
if (imeta(STOUC(*bptr))) {
bptr[1] = bptr[0] ^ 32;
bptr[0] = Meta;
bptr += 2;
}
else
bptr++;
if (ret != MB_INCOMPLETE)
laststart = bptr;
#else
if (c == EOF)
break;
if (bslash && c == delim) {
bslash = 0;
continue;
}
if (c == delim)
break;
/*
* `first' is non-zero if any separator we encounter is a
* non-whitespace separator, which means that anything
* (even an empty string) between, before or after separators
* is significant. If it is zero, we have a whitespace
* separator, which shouldn't cause extra empty strings to
* be emitted. Hence the test for (*buf || first) when
* we assign the result of reading a word.
*/
if (!bslash && isep(c)) {
if (bptr != buf || (!iwsep(c) && first)) {
first |= !iwsep(c);
break;
}
first |= !iwsep(c);
continue;
}
bslash = c == '\\' && !bslash && !OPT_ISSET(ops,'r');
if (bslash)
continue;
first = 0;
if (imeta(c)) {
*bptr++ = Meta;
*bptr++ = c ^ 32;
} else
*bptr++ = c;
#endif
/* increase the buffer size, if necessary */
if (bptr >= buf + bsiz - 1) {
int blen = bptr - buf;
#ifdef MULTIBYTE_SUPPORT
int llen = laststart - buf;
#endif
buf = realloc(buf, bsiz *= 2);
bptr = buf + blen;
#ifdef MULTIBYTE_SUPPORT
laststart = buf + llen;
#endif
}
}
signal_setmask(s);
#ifdef MULTIBYTE_SUPPORT
if (c == EOF) {
gotnl = 1;
*bptr = '\0'; /* see below */
} else if (ret == MB_INCOMPLETE) {
/*
* We can only get here if there is an EOF in the
* middle of a character... safest to keep the debris,
* I suppose.
*/
*bptr = '\0';
} else {
if (wc == delim)
gotnl = 1;
*laststart = '\0';
}
#else
if (c == delim || c == EOF)
gotnl = 1;
*bptr = '\0';
#endif
/* dispose of word appropriately */
if (OPT_ISSET(ops,'e') ||
/*
* When we're doing an array assignment, we'll
* handle echoing at that point. In all other
* cases (including -A with no assignment)
* we'll do it here.
*/
(OPT_ISSET(ops,'E') && !OPT_ISSET(ops,'A'))) {
zputs(buf, stdout);
putchar('\n');
}
if (!OPT_ISSET(ops,'e') && (*buf || first || gotnl)) {
if (OPT_ISSET(ops,'A')) {
addlinknode(readll, buf);
al++;
} else
setsparam(reply, buf);
} else
free(buf);
if (!OPT_ISSET(ops,'A'))
reply = *args++;
}
/* handle EOF */
if (c == EOF) {
if (readfd == coprocin) {
close(coprocin);
close(coprocout);
coprocin = coprocout = -1;
}
}
/* final assignment (and display) of array parameter */
if (OPT_ISSET(ops,'A')) {
char **pp, **p = NULL;
LinkNode n;
p = (OPT_ISSET(ops,'e') ? (char **)NULL
: (char **)zalloc((al + 1) * sizeof(char *)));
for (pp = p, n = firstnode(readll); n; incnode(n)) {
if (OPT_ISSET(ops,'E')) {
zputs((char *) getdata(n), stdout);
putchar('\n');
}
if (p)
*pp++ = (char *)getdata(n);
else
zsfree(getdata(n));
}
if (p) {
*pp++ = NULL;
setaparam(reply, p);
}
if (resettty && SHTTY != -1)
settyinfo(&saveti);
return c == EOF;
}
buf = bptr = (char *)zalloc(bsiz = 64);
#ifdef MULTIBYTE_SUPPORT
laststart = buf;
ret = MB_INCOMPLETE;
#endif
/* any remaining part of the line goes into one parameter */
bslash = 0;
if (!gotnl) {
sigset_t s = child_unblock();
for (;;) {
c = zread(izle, &readchar, izle_timeout);
#ifdef MULTIBYTE_SUPPORT
if (c == EOF) {
/* not waiting to be completed any more */
ret = 0;
break;
}
*bptr = (char)c;
if (isset(MULTIBYTE)) {
ret = mbrtowc(&wc, bptr, 1, &mbs);
if (!ret) /* NULL */
ret = 1;
} else {
ret = 1;
wc = (wchar_t)c;
}
if (ret != MB_INCOMPLETE) {
if (ret == MB_INVALID) {
memset(&mbs, 0, sizeof(mbs));
/* Treat this as a single character */
wc = (wchar_t)c;
laststart = bptr;
}
/*
* \ at the end of a line introduces a continuation line,
* except in raw mode (-r option)
*/
if (bslash && wc == delim) {
bslash = 0;
continue;
}
if (wc == delim && !zbuf)
break;
if (!bslash && bptr == buf && wcsitype(wc, ISEP)) {
if (c < 128 && iwsep(c))
continue;
else if (!first) {
first = 1;
continue;
}
}
bslash = (wc == L'\\' && !bslash && !OPT_ISSET(ops,'r'));
if (bslash)
continue;
}
if (imeta(STOUC(*bptr))) {
bptr[1] = bptr[0] ^ 32;
bptr[0] = Meta;
bptr += 2;
}
else
bptr++;
if (ret != MB_INCOMPLETE)
laststart = bptr;
#else
/* \ at the end of a line introduces a continuation line, except in
raw mode (-r option) */
if (bslash && c == delim) {
bslash = 0;
continue;
}
if (c == EOF || (c == delim && !zbuf))
break;
if (!bslash && isep(c) && bptr == buf) {
if (iwsep(c))
continue;
else if (!first) {
first = 1;
continue;
}
}
bslash = c == '\\' && !bslash && !OPT_ISSET(ops,'r');
if (bslash)
continue;
if (imeta(c)) {
*bptr++ = Meta;
*bptr++ = c ^ 32;
} else
*bptr++ = c;
#endif
/* increase the buffer size, if necessary */
if (bptr >= buf + bsiz - 1) {
int blen = bptr - buf;
#ifdef MULTIBYTE_SUPPORT
int llen = laststart - buf;
#endif
buf = realloc(buf, bsiz *= 2);
bptr = buf + blen;
#ifdef MULTIBYTE_SUPPORT
laststart = buf + llen;
#endif
}
}
signal_setmask(s);
}
#ifdef MULTIBYTE_SUPPORT
if (ret != MB_INCOMPLETE)
bptr = laststart;
#endif
/*
* Strip trailing IFS whitespace.
* iwsep can only be certain single-byte ASCII bytes, but we
* must check the byte isn't metafied.
*/
while (bptr > buf) {
if (bptr > buf + 1 && bptr[-2] == Meta) {
/* non-ASCII, can't be IWSEP */
break;
} else if (iwsep(bptr[-1]))
bptr--;
else
break;
}
*bptr = '\0';
if (resettty && SHTTY != -1)
settyinfo(&saveti);
/* final assignment of reply, etc. */
if (OPT_ISSET(ops,'e') || OPT_ISSET(ops,'E')) {
zputs(buf, stdout);
putchar('\n');
}
if (!OPT_ISSET(ops,'e'))
setsparam(reply, buf);
else
zsfree(buf);
if (zbuforig) {
char first = *zbuforig;
zsfree(zbuforig);
if (!first)
return 1;
} else if (c == EOF) {
if (readfd == coprocin) {
close(coprocin);
close(coprocout);
coprocin = coprocout = -1;
}
return 1;
}
/*
* The following is to ensure a failure to set the parameter
* causes a non-zero status return. There are arguments for
* turning a non-zero status into errflag more widely.
*/
return errflag;
}
/**/
static int
zread(int izle, int *readchar, long izle_timeout)
{
char cc, retry = 0;
int ret;
if (izle) {
int c;
zleentry(ZLE_CMD_GET_KEY, izle_timeout, NULL, &c);
return (c < 0 ? EOF : c);
}
/* use zbuf if possible */
if (zbuf) {
/* If zbuf points to anything, it points to the next character in the
buffer. This may be a null byte to indicate EOF. If reading from the
buffer, move on the buffer pointer. */
if (*zbuf == Meta)
return zbuf++, STOUC(*zbuf++ ^ 32);
else
return (*zbuf) ? STOUC(*zbuf++) : EOF;
}
if (*readchar >= 0) {
cc = *readchar;
*readchar = -1;
return STOUC(cc);
}
for (;;) {
/* read a character from readfd */
ret = read(readfd, &cc, 1);
switch (ret) {
case 1:
/* return the character read */
return STOUC(cc);
case -1:
#if defined(EAGAIN) || defined(EWOULDBLOCK)
if (!retry && readfd == 0 && (
# ifdef EAGAIN
errno == EAGAIN
# ifdef EWOULDBLOCK
||
# endif /* EWOULDBLOCK */
# endif /* EAGAIN */
# ifdef EWOULDBLOCK
errno == EWOULDBLOCK
# endif /* EWOULDBLOCK */
) && setblock_stdin()) {
retry = 1;
continue;
} else
#endif /* EAGAIN || EWOULDBLOCK */
if (errno == EINTR && !(errflag || retflag || breaks || contflag))
continue;
break;
}
return EOF;
}
}
/* holds arguments for testlex() */
/**/
char **testargs, **curtestarg;
/* test, [: the old-style general purpose logical expression builtin */
/**/
void
testlex(void)
{
if (tok == LEXERR)
return;
tokstr = *(curtestarg = testargs);
if (!*testargs) {
/* if tok is already zero, reading past the end: error */
tok = tok ? NULLTOK : LEXERR;
return;
} else if (!strcmp(*testargs, "-o"))
tok = DBAR;
else if (!strcmp(*testargs, "-a"))
tok = DAMPER;
else if (!strcmp(*testargs, "!"))
tok = BANG;
else if (!strcmp(*testargs, "("))
tok = INPAR;
else if (!strcmp(*testargs, ")"))
tok = OUTPAR;
else
tok = STRING;
testargs++;
}
/**/
int
bin_test(char *name, char **argv, UNUSED(Options ops), int func)
{
char **s;
Eprog prog;
struct estate state;
int nargs, sense = 0, ret;
/* if "test" was invoked as "[", it needs a matching "]" *
* which is subsequently ignored */
if (func == BIN_BRACKET) {
for (s = argv; *s; s++);
if (s == argv || strcmp(s[-1], "]")) {
zwarnnam(name, "']' expected");
return 2;
}
s[-1] = NULL;
}
/* an empty argument list evaluates to false (1) */
if (!*argv)
return 1;
/*
* Implement some XSI extensions to POSIX here.
* See
* http://pubs.opengroup.org/onlinepubs/9699919799/utilities/test.html
*/
nargs = arrlen(argv);
if (nargs == 3 || nargs == 4)
{
/*
* As parentheses are an extension, we need to be careful ---
* if this is a three-argument expression that could
* be a binary operator, prefer that.
*/
if (!strcmp(argv[0], "(") && !strcmp(argv[nargs-1],")") &&
(nargs != 3 || !is_cond_binary_op(argv[1]))) {
argv[nargs-1] = NULL;
argv++;
}
if (nargs == 4 && !strcmp("!", argv[0])) {
sense = 1;
argv++;
}
}
zcontext_save();
testargs = argv;
tok = NULLTOK;
condlex = testlex;
testlex();
prog = parse_cond();
condlex = zshlex;
if (errflag) {
errflag &= ~ERRFLAG_ERROR;
zcontext_restore();
return 2;
}
if (!prog || tok == LEXERR) {
zwarnnam(name, tokstr ? "parse error" : "argument expected");
zcontext_restore();
return 2;
}
zcontext_restore();
if (*curtestarg) {
zwarnnam(name, "too many arguments");
return 2;
}
/* syntax is OK, so evaluate */
state.prog = prog;
state.pc = prog->prog;
state.strs = prog->strs;
ret = evalcond(&state, name);
if (ret < 2 && sense)
ret = ! ret;
return ret;
}
/* display a time, provided in units of 1/60s, as minutes and seconds */
#define pttime(X) printf("%ldm%ld.%02lds",((long) (X))/(60 * clktck),\
((long) (X))/clktck%clktck,\
((long) (X))*100/clktck%100)
/* times: display, in a two-line format, the times provided by times(3) */
/**/
int
bin_times(UNUSED(char *name), UNUSED(char **argv), UNUSED(Options ops), UNUSED(int func))
{
struct tms buf;
long clktck = get_clktck();
/* get time accounting information */
if (times(&buf) == -1)
return 1;
pttime(buf.tms_utime); /* user time */
putchar(' ');
pttime(buf.tms_stime); /* system time */
putchar('\n');
pttime(buf.tms_cutime); /* user time, children */
putchar(' ');
pttime(buf.tms_cstime); /* system time, children */
putchar('\n');
return 0;
}
/* trap: set/unset signal traps */
/**/
int
bin_trap(char *name, char **argv, UNUSED(Options ops), UNUSED(int func))
{
Eprog prog;
char *arg, *s;
int sig;
if (*argv && !strcmp(*argv, "--"))
argv++;
/* If given no arguments, list all currently-set traps */
if (!*argv) {
queue_signals();
for (sig = 0; sig < VSIGCOUNT; sig++) {
if (sigtrapped[sig] & ZSIG_FUNC) {
HashNode hn;
if ((hn = gettrapnode(sig, 0)))
shfunctab->printnode(hn, 0);
DPUTS(!hn, "BUG: I did not find any trap functions!");
} else if (sigtrapped[sig]) {
const char *name = getsigname(sig);
if (!siglists[sig])
printf("trap -- '' %s\n", name);
else {
s = getpermtext(siglists[sig], NULL, 0);
printf("trap -- ");
quotedzputs(s, stdout);
printf(" %s\n", name);
zsfree(s);
}
}
}
unqueue_signals();
return 0;
}
/* If we have a signal number, unset the specified *
* signals. With only -, remove all traps. */
if ((getsignum(*argv) != -1) || (!strcmp(*argv, "-") && argv++)) {
if (!*argv) {
for (sig = 0; sig < VSIGCOUNT; sig++)
unsettrap(sig);
} else {
for (; *argv; argv++) {
sig = getsignum(*argv);
if (sig == -1) {
zwarnnam(name, "undefined signal: %s", *argv);
break;
}
unsettrap(sig);
}
}
return *argv != NULL;
}
/* Sort out the command to execute on trap */
arg = *argv++;
if (!*arg)
prog = &dummy_eprog;
else if (!(prog = parse_string(arg, 1))) {
zwarnnam(name, "couldn't parse trap command");
return 1;
}
/* set traps */
for (; *argv; argv++) {
Eprog t;
int flags;
sig = getsignum(*argv);
if (sig == -1) {
zwarnnam(name, "undefined signal: %s", *argv);
break;
}
if (idigit(**argv) ||
!strcmp(sigs[sig], *argv) ||
(!strncmp("SIG", *argv, 3) && !strcmp(sigs[sig], *argv+3))) {
/* The signal was specified by number or by canonical name (with
* or without SIG prefix).
*/
flags = 0;
}
else {
/*
* Record that the signal is used under an assumed name.
* If we ever have more than one alias per signal this
* will need improving.
*/
flags = ZSIG_ALIAS;
}
t = dupeprog(prog, 0);
if (settrap(sig, t, flags))
freeeprog(t);
}
return *argv != NULL;
}
/**/
int
bin_ttyctl(UNUSED(char *name), UNUSED(char **argv), Options ops, UNUSED(int func))
{
if (OPT_ISSET(ops,'f'))
ttyfrozen = 1;
else if (OPT_ISSET(ops,'u'))
ttyfrozen = 0;
else
printf("tty is %sfrozen\n", ttyfrozen ? "" : "not ");
return 0;
}
/* let -- mathematical evaluation */
/**/
int
bin_let(UNUSED(char *name), char **argv, UNUSED(Options ops), UNUSED(int func))
{
mnumber val = zero_mnumber;
while (*argv)
val = matheval(*argv++);
/* Errors in math evaluation in let are non-fatal. */
errflag &= ~ERRFLAG_ERROR;
/* should test for fabs(val.u.d) < epsilon? */
return (val.type == MN_INTEGER) ? val.u.l == 0 : val.u.d == 0.0;
}
/* umask command. umask may be specified as octal digits, or in the *
* symbolic form that chmod(1) uses. Well, a subset of it. Remember *
* that only the bottom nine bits of umask are used, so there's no *
* point allowing the set{u,g}id and sticky bits to be specified. */
/**/
int
bin_umask(char *nam, char **args, Options ops, UNUSED(int func))
{
mode_t um;
char *s = *args;
/* Get the current umask. */
um = umask(0);
umask(um);
/* No arguments means to display the current setting. */
if (!s) {
if (OPT_ISSET(ops,'S')) {
char *who = "ugo";
while (*who) {
char *what = "rwx";
printf("%c=", *who++);
while (*what) {
if (!(um & 0400))
putchar(*what);
um <<= 1;
what++;
}
putchar(*who ? ',' : '\n');
}
} else {
if (um & 0700)
putchar('0');
printf("%03o\n", (unsigned)um);
}
return 0;
}
if (idigit(*s)) {
/* Simple digital umask. */
um = zstrtol(s, &s, 8);
if (*s) {
zwarnnam(nam, "bad umask");
return 1;
}
} else {
/* Symbolic notation -- slightly complicated. */
int whomask, umaskop, mask;
/* More than one symbolic argument may be used at once, each separated
by commas. */
for (;;) {
/* First part of the argument -- who does this apply to?
u=owner, g=group, o=other. */
whomask = 0;
while (*s == 'u' || *s == 'g' || *s == 'o' || *s == 'a')
if (*s == 'u')
s++, whomask |= 0700;
else if (*s == 'g')
s++, whomask |= 0070;
else if (*s == 'o')
s++, whomask |= 0007;
else if (*s == 'a')
s++, whomask |= 0777;
/* Default whomask is everyone. */
if (!whomask)
whomask = 0777;
/* Operation may be +, - or =. */
umaskop = (int)*s;
if (!(umaskop == '+' || umaskop == '-' || umaskop == '=')) {
if (umaskop)
zwarnnam(nam, "bad symbolic mode operator: %c", umaskop);
else
zwarnnam(nam, "bad umask");
return 1;
}
/* Permissions mask -- r=read, w=write, x=execute. */
mask = 0;
while (*++s && *s != ',')
if (*s == 'r')
mask |= 0444 & whomask;
else if (*s == 'w')
mask |= 0222 & whomask;
else if (*s == 'x')
mask |= 0111 & whomask;
else {
zwarnnam(nam, "bad symbolic mode permission: %c", *s);
return 1;
}
/* Apply parsed argument to um. */
if (umaskop == '+')
um &= ~mask;
else if (umaskop == '-')
um |= mask;
else /* umaskop == '=' */
um = (um | (whomask)) & ~mask;
if (*s == ',')
s++;
else
break;
}
if (*s) {
zwarnnam(nam, "bad character in symbolic mode: %c", *s);
return 1;
}
}
/* Finally, set the new umask. */
umask(um);
return 0;
}
/* Generic builtin for facilities not available on this OS */
/**/
mod_export int
bin_notavail(char *nam, UNUSED(char **argv), UNUSED(Options ops), UNUSED(int func))
{
zwarnnam(nam, "not available on this system");
return 1;
}
<|start_filename|>zmodules/Src/input.c<|end_filename|>
/*
* input.c - read and store lines of input
*
* This file is part of zsh, the Z shell.
*
* Copyright (c) 1992-1997 <NAME>
* All rights reserved.
*
* Permission is hereby granted, without written agreement and without
* license or royalty fees, to use, copy, modify, and distribute this
* software and to distribute modified versions of this software for any
* purpose, provided that the above copyright notice and the following
* two paragraphs appear in all copies of this software.
*
* In no event shall <NAME> or the Zsh Development Group be liable
* to any party for direct, indirect, special, incidental, or consequential
* damages arising out of the use of this software and its documentation,
* even if <NAME> and the Zsh Development Group have been advised of
* the possibility of such damage.
*
* <NAME> and the Zsh Development Group specifically disclaim any
* warranties, including, but not limited to, the implied warranties of
* merchantability and fitness for a particular purpose. The software
* provided hereunder is on an "as is" basis, and <NAME> and the
* Zsh Development Group have no obligation to provide maintenance,
* support, updates, enhancements, or modifications.
*
*/
/*
* This file deals with input buffering, supplying characters to the
* history expansion code a character at a time. Input is stored on a
* stack, which allows insertion of strings into the input, possibly with
* flags marking the end of alias expansion, with minimal copying of
* strings. The same stack is used to record the fact that the input
* is a history or alias expansion and to store the alias while it is in use.
*
* Input is taken either from zle, if appropriate, or read directly from
* the input file, or may be supplied by some other part of the shell (such
* as `eval' or $(...) substitution). In the last case, it should be
* supplied by pushing a new level onto the stack, via inpush(input_string,
* flag, alias); if the current input really needs to be altered, use
* inputsetline(input_string, flag). `Flag' can include or's of INP_FREE
* (if the input string is to be freed when used), INP_CONT (if the input
* is to continue onto what's already in the input queue), INP_ALIAS
* (push supplied alias onto stack) or INP_HIST (ditto, but used to
* mark history expansion). `alias' is ignored unless INP_ALIAS or
* INP_HIST is supplied. INP_ALIAS is always set if INP_HIST is.
*
* Note that the input string is itself used as the input buffer: it is not
* copied, nor is it every written back to, so using a constant string
* should work. Consequently, when passing areas of memory from the heap
* it is necessary that that heap last as long as the operation of reading
* the string. After the string is read, the stack should be popped with
* inpop(), which effectively flushes any unread input as well as restoring
* the previous input state.
*
* The internal flags INP_ALCONT and INP_HISTCONT show that the stack
* element was pushed by an alias or history expansion; they should not
* be needed elsewhere.
*
* The global variable inalmore is set to indicate aliases should
* continue to be expanded because the last alias expansion ended
* in a space. It is only reset after a complete word was read
* without expanding a new alias, in exalias().
*
* PWS 1996/12/10
*/
#ifdef HAVE_STDIO_H
#include <stdio.h>
#endif
#include "zsh.mdh"
#include "input.pro"
/* the shell input fd */
/**/
int SHIN;
/* buffered shell input for non-interactive shells */
/**/
FILE *bshin;
/* != 0 means we are reading input from a string */
/**/
int strin;
/* total # of characters waiting to be read. */
/**/
mod_export int inbufct;
/* the flags controlling the input routines in input.c: see INP_* in zsh.h */
/**/
int inbufflags;
static char *inbuf; /* Current input buffer */
static char *inbufptr; /* Pointer into input buffer */
static char *inbufpush; /* Character at which to re-push alias */
static int inbufleft; /* Characters left in current input
stack element */
/* Input must be stacked since the input queue is used by
* various different parts of the shell.
*/
struct instacks {
char *buf, *bufptr;
Alias alias;
int bufleft, bufct, flags;
};
static struct instacks *instack, *instacktop;
/*
* Input stack size. We need to push the stack for aliases, history
* expansion, and reading from internal strings: only if these operations
* are nested do we need more than one extra level. Thus we shouldn't need
* too much space as a rule. Initially, INSTACK_INITIAL is allocated; if
* more is required, an extra INSTACK_EXPAND is added each time.
*/
#define INSTACK_INITIAL 4
#define INSTACK_EXPAND 4
static int instacksz = INSTACK_INITIAL;
/* Read a line from bshin. Convert tokens and *
* null characters to Meta c^32 character pairs. */
/**/
mod_export char *
shingetline(void)
{
char *line = NULL;
int ll = 0;
int c;
char buf[BUFSIZ];
char *p;
int q = queue_signal_level();
p = buf;
winch_unblock();
dont_queue_signals();
for (;;) {
/* Can't fgets() here because we need to accept '\0' bytes */
do {
errno = 0;
c = fgetc(bshin);
} while (c < 0 && errno == EINTR);
if (c < 0 || c == '\n') {
winch_block();
restore_queue_signals(q);
if (c == '\n')
*p++ = '\n';
if (p > buf) {
*p++ = '\0';
line = zrealloc(line, ll + (p - buf));
memcpy(line + ll, buf, p - buf);
}
return line;
}
if (imeta(c)) {
*p++ = Meta;
*p++ = c ^ 32;
} else
*p++ = c;
if (p >= buf + BUFSIZ - 1) {
winch_block();
queue_signals();
line = zrealloc(line, ll + (p - buf) + 1);
memcpy(line + ll, buf, p - buf);
ll += p - buf;
line[ll] = '\0';
p = buf;
winch_unblock();
dont_queue_signals();
}
}
}
/* Get the next character from the input.
* Will call inputline() to get a new line where necessary.
*/
/**/
int
ingetc(void)
{
int lastc = ' ';
if (lexstop)
return ' ';
for (;;) {
if (inbufleft) {
inbufleft--;
inbufct--;
if (itok(lastc = STOUC(*inbufptr++)))
continue;
if (((inbufflags & INP_LINENO) || !strin) && lastc == '\n')
lineno++;
break;
}
/*
* See if we have reached the end of input
* (due to an error, or to reading from a single string).
* Check the remaining characters left, since if there aren't
* any we don't want to pop the stack---it'll mark any aliases
* as not in use before we've finished processing.
*/
if (!inbufct && (strin || errflag)) {
lexstop = 1;
break;
}
/* If the next element down the input stack is a continuation of
* this, use it.
*/
if (inbufflags & INP_CONT) {
inpoptop();
continue;
}
/* As a last resort, get some more input */
if (inputline())
break;
}
if (!lexstop)
zshlex_raw_add(lastc);
return lastc;
}
/* Read a line from the current command stream and store it as input */
/**/
static int
inputline(void)
{
char *ingetcline, **ingetcpmptl = NULL, **ingetcpmptr = NULL;
int context = ZLCON_LINE_START;
/* If reading code interactively, work out the prompts. */
if (interact && isset(SHINSTDIN)) {
if (!isfirstln) {
ingetcpmptl = &prompt2;
if (rprompt2)
ingetcpmptr = &rprompt2;
context = ZLCON_LINE_CONT;
}
else {
ingetcpmptl = &prompt;
if (rprompt)
ingetcpmptr = &rprompt;
}
}
if (!(interact && isset(SHINSTDIN) && SHTTY != -1 && isset(USEZLE))) {
/*
* If not using zle, read the line straight from the input file.
* Possibly we don't get the whole line at once: in that case,
* we get another chunk with the next call to inputline().
*/
if (interact && isset(SHINSTDIN)) {
/*
* We may still be interactive (e.g. running under emacs),
* so output a prompt if necessary. We don't know enough
* about the input device to be able to handle an rprompt,
* though.
*/
char *pptbuf;
int pptlen;
pptbuf = unmetafy(promptexpand(ingetcpmptl ? *ingetcpmptl : NULL,
0, NULL, NULL, NULL), &pptlen);
write_loop(2, pptbuf, pptlen);
free(pptbuf);
}
ingetcline = shingetline();
} else {
/*
* Since we may have to read multiple lines before getting
* a complete piece of input, we tell zle not to restore the
* original tty settings after reading each chunk. Instead,
* this is done when the history mechanism for the current input
* terminates, which is not until we have the whole input.
* This is supposed to minimise problems on systems that clobber
* typeahead when the terminal settings are altered.
* pws 1998/03/12
*/
int flags = ZLRF_HISTORY|ZLRF_NOSETTY;
if (isset(IGNOREEOF))
flags |= ZLRF_IGNOREEOF;
ingetcline = zleentry(ZLE_CMD_READ, ingetcpmptl, ingetcpmptr,
flags, context);
histdone |= HISTFLAG_SETTY;
}
if (!ingetcline) {
return lexstop = 1;
}
if (errflag) {
free(ingetcline);
errflag |= ERRFLAG_ERROR;
return lexstop = 1;
}
if (isset(VERBOSE)) {
/* Output the whole line read so far. */
zputs(ingetcline, stderr);
fflush(stderr);
}
if (keyboardhackchar && *ingetcline &&
ingetcline[strlen(ingetcline) - 1] == '\n' &&
interact && isset(SHINSTDIN) &&
SHTTY != -1 && ingetcline[1])
{
char *stripptr = ingetcline + strlen(ingetcline) - 2;
if (*stripptr == keyboardhackchar) {
/* Junk an unwanted character at the end of the line.
(key too close to return key) */
int ct = 1; /* force odd */
char *ptr;
if (keyboardhackchar == '\'' || keyboardhackchar == '"' ||
keyboardhackchar == '`') {
/*
* for the chars above, also require an odd count before
* junking
*/
for (ct = 0, ptr = ingetcline; *ptr; ptr++)
if (*ptr == keyboardhackchar)
ct++;
}
if (ct & 1) {
stripptr[0] = '\n';
stripptr[1] = '\0';
}
}
}
isfirstch = 1;
if ((inbufflags & INP_APPEND) && inbuf) {
/*
* We need new input but need to be able to back up
* over the old input, so append this line.
* Pushing the line onto the stack doesn't have the right
* effect.
*
* This is quite a simple and inefficient fix, but currently
* we only need it when backing up over a multi-line $((...
* that turned out to be a command substitution rather than
* a math substitution, which is a very special case.
* So it's not worth rewriting.
*/
char *oinbuf = inbuf;
int newlen = strlen(ingetcline);
int oldlen = (int)(inbufptr - inbuf) + inbufleft;
if (inbufflags & INP_FREE) {
inbuf = realloc(inbuf, oldlen + newlen + 1);
} else {
inbuf = zalloc(oldlen + newlen + 1);
memcpy(inbuf, oinbuf, oldlen);
}
inbufptr += inbuf - oinbuf;
strcpy(inbuf + oldlen, ingetcline);
free(ingetcline);
inbufleft += newlen;
inbufct += newlen;
inbufflags |= INP_FREE;
} else {
/* Put this into the input channel. */
inputsetline(ingetcline, INP_FREE);
}
return 0;
}
/*
* Put a string in the input queue:
* inbuf is only freeable if the flags include INP_FREE.
*/
/**/
static void
inputsetline(char *str, int flags)
{
queue_signals();
if ((inbufflags & INP_FREE) && inbuf) {
free(inbuf);
}
inbuf = inbufptr = str;
inbufleft = strlen(inbuf);
/*
* inbufct must reflect the total number of characters left,
* as it used by other parts of the shell, so we need to take account
* of whether the input stack continues, and whether there
* is an extra space to add on at the end.
*/
if (flags & INP_CONT)
inbufct += inbufleft;
else
inbufct = inbufleft;
inbufflags = flags;
unqueue_signals();
}
/*
* Backup one character of the input.
* The last character can always be backed up, provided we didn't just
* expand an alias or a history reference.
* In fact, the character is ignored and the previous character is used.
* (If that's wrong, the bug is in the calling code. Use the #ifdef DEBUG
* code to check.)
*/
/**/
void
inungetc(int c)
{
if (!lexstop) {
if (inbufptr != inbuf) {
#ifdef DEBUG
/* Just for debugging: enable only if foul play suspected. */
if (inbufptr[-1] != (char) c)
fprintf(stderr, "Warning: backing up wrong character.\n");
#endif
/* Just decrement the pointer: if it's not the same
* character being pushed back, we're in trouble anyway.
*/
inbufptr--;
inbufct++;
inbufleft++;
if (((inbufflags & INP_LINENO) || !strin) && c == '\n')
lineno--;
}
else if (!(inbufflags & INP_CONT)) {
#ifdef DEBUG
/* Just for debugging */
fprintf(stderr, "Attempt to inungetc() at start of input.\n");
#endif
zerr("Garbled input at %c (binary file as commands?)", c);
return;
}
else {
/*
* The character is being backed up from a previous input stack
* layer. However, there was an expansion in the middle, so we
* can't back up where we want to. Instead, we just push it
* onto the input stack as an extra character.
*/
char *cback = (char *)zshcalloc(2);
cback[0] = (char) c;
inpush(cback, INP_FREE|INP_CONT, NULL);
}
/* If we are back at the start of a segment,
* we may need to restore an alias popped from the stack.
* Note this may be a dummy (history expansion) entry.
*/
if (inbufptr == inbufpush &&
(inbufflags & (INP_ALCONT|INP_HISTCONT))) {
/*
* Go back up the stack over all entries which were alias
* expansions and were pushed with nothing remaining to read.
*/
do {
if (instacktop->alias)
instacktop->alias->inuse = 1;
instacktop++;
} while ((instacktop->flags & (INP_ALCONT|INP_HISTCONT))
&& !instacktop->bufleft);
if (inbufflags & INP_HISTCONT)
inbufflags = INP_CONT|INP_ALIAS|INP_HIST;
else
inbufflags = INP_CONT|INP_ALIAS;
inbufleft = 0;
inbuf = inbufptr = "";
}
zshlex_raw_back();
}
}
/* stuff a whole file into the input queue and print it */
/**/
int
stuff(char *fn)
{
FILE *in;
char *buf;
off_t len;
if (!(in = fopen(unmeta(fn), "r"))) {
zerr("can't open %s", fn);
return 1;
}
fseek(in, 0, 2);
len = ftell(in);
fseek(in, 0, 0);
buf = (char *)zalloc(len + 1);
if (!(fread(buf, len, 1, in))) {
zerr("read error on %s", fn);
fclose(in);
zfree(buf, len + 1);
return 1;
}
fclose(in);
buf[len] = '\0';
fwrite(buf, len, 1, stderr);
fflush(stderr);
inputsetline(metafy(buf, len, META_REALLOC), INP_FREE);
return 0;
}
/* flush input queue */
/**/
void
inerrflush(void)
{
while (!lexstop && inbufct)
ingetc();
}
/* Set some new input onto a new element of the input stack */
/**/
mod_export void
inpush(char *str, int flags, Alias inalias)
{
if (!instack) {
/* Initial stack allocation */
instack = (struct instacks *)zalloc(instacksz*sizeof(struct instacks));
instacktop = instack;
}
instacktop->buf = inbuf;
instacktop->bufptr = inbufptr;
instacktop->bufleft = inbufleft;
instacktop->bufct = inbufct;
inbufflags &= ~(INP_ALCONT|INP_HISTCONT);
if (flags & (INP_ALIAS|INP_HIST)) {
/*
* Text is expansion for history or alias, so continue
* back to old level when done. Also mark stack top
* as alias continuation so as to back up if necessary,
* and mark alias as in use.
*/
flags |= INP_CONT|INP_ALIAS;
if (flags & INP_HIST)
instacktop->flags = inbufflags | INP_HISTCONT;
else
instacktop->flags = inbufflags | INP_ALCONT;
if ((instacktop->alias = inalias))
inalias->inuse = 1;
} else {
/* If we are continuing an alias expansion, record the alias
* expansion in new set of flags (do we need this?)
*/
if (((instacktop->flags = inbufflags) & INP_ALIAS) &&
(flags & INP_CONT))
flags |= INP_ALIAS;
}
instacktop++;
if (instacktop == instack + instacksz) {
/* Expand the stack */
instack = (struct instacks *)
realloc(instack,
(instacksz + INSTACK_EXPAND)*sizeof(struct instacks));
instacktop = instack + instacksz;
instacksz += INSTACK_EXPAND;
}
/*
* We maintain the entry above the highest one with real
* text as a flag to inungetc() that it can stop re-pushing the stack.
*/
instacktop->flags = 0;
inbufpush = inbuf = NULL;
inputsetline(str, flags);
}
/* Remove the top element of the stack */
/**/
static void
inpoptop(void)
{
if (!lexstop) {
inbufflags &= ~(INP_ALCONT|INP_HISTCONT);
while (inbufptr > inbuf) {
inbufptr--;
inbufct++;
inbufleft++;
/*
* As elsewhere in input and history mechanisms:
* unwinding aliases and unwinding history have different
* implications as aliases are after the lexer while
* history is before, but they're both pushed onto
* the input stack.
*/
if ((inbufflags & (INP_ALIAS|INP_HIST|INP_RAW_KEEP)) == INP_ALIAS)
zshlex_raw_back();
}
}
if (inbuf && (inbufflags & INP_FREE))
free(inbuf);
instacktop--;
inbuf = instacktop->buf;
inbufptr = inbufpush = instacktop->bufptr;
inbufleft = instacktop->bufleft;
inbufct = instacktop->bufct;
inbufflags = instacktop->flags;
if (!(inbufflags & (INP_ALCONT|INP_HISTCONT)))
return;
if (instacktop->alias) {
char *t = instacktop->alias->text;
/* a real alias: mark it as unused. */
instacktop->alias->inuse = 0;
if (*t && t[strlen(t) - 1] == ' ') {
inalmore = 1;
histbackword();
}
}
}
/* Remove the top element of the stack and all its continuations. */
/**/
mod_export void
inpop(void)
{
int remcont;
do {
remcont = inbufflags & INP_CONT;
inpoptop();
} while (remcont);
}
/*
* Expunge any aliases from the input stack; they shouldn't appear
* in the history and need to be flushed explicitly when we encounter
* an error.
*/
/**/
void
inpopalias(void)
{
while (inbufflags & INP_ALIAS)
inpoptop();
}
/*
* Get pointer to remaining string to read.
*/
/**/
char *
ingetptr(void)
{
return inbufptr;
}
/*
* Check if the current input line, including continuations, is
* expanding an alias. This does not detect alias expansions that
* have been fully processed and popped from the input stack.
* If there is an alias, the most recently expanded is returned,
* else NULL.
*/
/**/
char *input_hasalias(void)
{
int flags = inbufflags;
struct instacks *instackptr = instacktop;
for (;;)
{
if (!(flags & INP_CONT))
break;
instackptr--;
if (instackptr->alias)
return instackptr->alias->node.nam;
flags = instackptr->flags;
}
return NULL;
}
<|start_filename|>Makefile<|end_filename|>
LOGPATH = doc/zsdoc/logs
LOGFILE = $(LOGPATH)/$(shell date --iso=seconds)
all: zinit.zsh.zwc zinit-side.zsh.zwc zinit-install.zsh.zwc zinit-autoload.zsh.zwc
%.zwc: %
doc/zcompile $<
# Tests moved to gitlab-ci.
#alltest: test testB testC testD testE
#test:
# make VERBOSE=$(VERBOSE) NODIFF=$(NODIFF) DEBUG=$(DEBUG) OPTDUMP=$(OPTDUMP) OPTS=$(OPTS) -C test test
#testB:
# make VERBOSE=$(VERBOSE) NODIFF=$(NODIFF) DEBUG=$(DEBUG) OPTDUMP=$(OPTDUMP) OPTS="kshglob" -C test test
#testC:
# make VERBOSE=$(VERBOSE) NODIFF=$(NODIFF) DEBUG=$(DEBUG) OPTDUMP=$(OPTDUMP) OPTS="noextendedglob" -C test test
#testD:
# make VERBOSE=$(VERBOSE) NODIFF=$(NODIFF) DEBUG=$(DEBUG) OPTDUMP=$(OPTDUMP) OPTS="ksharrays" -C test test
#testE:
# make VERBOSE=$(VERBOSE) NODIFF=$(NODIFF) DEBUG=$(DEBUG) OPTDUMP=$(OPTDUMP) OPTS="ignoreclosebraces" -C test test
doc: zinit.zsh zinit-side.zsh zinit-install.zsh zinit-autoload.zsh
rm -rf doc/zsdoc/data doc/zsdoc/*.adoc
cd doc && \
zsd -v --scomm --cignore \
'(\#*FUNCTION:*{{{*|\#[[:space:]]#}}}*)' \
../zinit.zsh ../zinit-side.zsh ../zinit-install.zsh ../zinit-autoload.zsh
echo $(LOGFILE)
cd ..
html: doc
cd doc/zsdoc && \
asciidoctor zinit.zsh.adoc && \
asciidoctor zinit-side.zsh.adoc && \
asciidoctor zinit-install.zsh.adoc && \
asciidoctor zinit-autoload.zsh.adoc
echo $(LOGFILE)
cd ..
clean:
rm -f zinit.zsh.zwc zinit-side.zsh.zwc zinit-install.zsh.zwc zinit-autoload.zsh.zwc
rm -rf doc/zsdoc/data
.PHONY: all clean doc
# vim:noet:sts=8:ts=8
<|start_filename|>zmodules/Src/utils.c<|end_filename|>
/*
* utils.c - miscellaneous utilities
*
* This file is part of zsh, the Z shell.
*
* Copyright (c) 1992-1997 <NAME>
* All rights reserved.
*
* Permission is hereby granted, without written agreement and without
* license or royalty fees, to use, copy, modify, and distribute this
* software and to distribute modified versions of this software for any
* purpose, provided that the above copyright notice and the following
* two paragraphs appear in all copies of this software.
*
* In no event shall <NAME> or the Zsh Development Group be liable
* to any party for direct, indirect, special, incidental, or consequential
* damages arising out of the use of this software and its documentation,
* even if <NAME> and the Zsh Development Group have been advised of
* the possibility of such damage.
*
* <NAME> and the Zsh Development Group specifically disclaim any
* warranties, including, but not limited to, the implied warranties of
* merchantability and fitness for a particular purpose. The software
* provided hereunder is on an "as is" basis, and <NAME> and the
* Zsh Development Group have no obligation to provide maintenance,
* support, updates, enhancements, or modifications.
*
*/
#include "zsh.mdh"
#include "utils.pro"
/* name of script being sourced */
/**/
mod_export char *scriptname; /* is sometimes a function name */
/* filename of script or other file containing code source e.g. autoload */
/**/
mod_export char *scriptfilename;
/* != 0 if we are in a new style completion function */
/**/
mod_export int incompfunc;
#ifdef MULTIBYTE_SUPPORT
struct widechar_array {
wchar_t *chars;
size_t len;
};
typedef struct widechar_array *Widechar_array;
/*
* The wordchars variable turned into a wide character array.
* This is much more convenient for testing.
*/
static struct widechar_array wordchars_wide;
/*
* The same for the separators (IFS) array.
*/
static struct widechar_array ifs_wide;
/* Function to set one of the above from the multibyte array */
static void
set_widearray(char *mb_array, Widechar_array wca)
{
if (wca->chars) {
free(wca->chars);
wca->chars = NULL;
}
wca->len = 0;
if (!isset(MULTIBYTE))
return;
if (mb_array) {
VARARR(wchar_t, tmpwcs, strlen(mb_array));
wchar_t *wcptr = tmpwcs;
wint_t wci;
mb_charinit();
while (*mb_array) {
int mblen;
if (STOUC(*mb_array) <= 0x7f) {
mb_array++;
*wcptr++ = (wchar_t)*mb_array;
continue;
}
mblen = mb_metacharlenconv(mb_array, &wci);
if (!mblen)
break;
/* No good unless all characters are convertible */
if (wci == WEOF)
return;
*wcptr++ = (wchar_t)wci;
#ifdef DEBUG
/*
* This generates a warning from the compiler (and is
* indeed useless) if chars are unsigned. It's
* extreme paranoia anyway.
*/
if (wcptr[-1] < 0)
fprintf(stderr, "BUG: Bad cast to wchar_t\n");
#endif
mb_array += mblen;
}
wca->len = wcptr - tmpwcs;
wca->chars = (wchar_t *)zalloc(wca->len * sizeof(wchar_t));
wmemcpy(wca->chars, tmpwcs, wca->len);
}
}
#endif
/* Print an error
The following functions use the following printf-like format codes
(implemented by zerrmsg()):
Code Argument types Prints
%s const char * C string (null terminated)
%l const char *, int C string of given length (null not required)
%L long decimal value
%d int decimal value
%% (none) literal '%'
%c int character at that codepoint
%e int strerror() message (argument is typically 'errno')
*/
static void
zwarning(const char *cmd, const char *fmt, va_list ap)
{
if (isatty(2))
zleentry(ZLE_CMD_TRASH);
char *prefix = scriptname ? scriptname : (argzero ? argzero : "");
if (cmd) {
if (unset(SHINSTDIN) || locallevel) {
nicezputs(prefix, stderr);
fputc((unsigned char)':', stderr);
}
nicezputs(cmd, stderr);
fputc((unsigned char)':', stderr);
} else {
/*
* scriptname is set when sourcing scripts, so that we get the
* correct name instead of the generic name of whatever
* program/script is running. It's also set in shell functions,
* so test locallevel, too.
*/
nicezputs((isset(SHINSTDIN) && !locallevel) ? "zsh" : prefix, stderr);
fputc((unsigned char)':', stderr);
}
zerrmsg(stderr, fmt, ap);
}
/**/
mod_export void
zerr(VA_ALIST1(const char *fmt))
VA_DCL
{
va_list ap;
VA_DEF_ARG(const char *fmt);
if (errflag || noerrs) {
if (noerrs < 2)
errflag |= ERRFLAG_ERROR;
return;
}
errflag |= ERRFLAG_ERROR;
VA_START(ap, fmt);
VA_GET_ARG(ap, fmt, const char *);
zwarning(NULL, fmt, ap);
va_end(ap);
}
/**/
mod_export void
zerrnam(VA_ALIST2(const char *cmd, const char *fmt))
VA_DCL
{
va_list ap;
VA_DEF_ARG(const char *cmd);
VA_DEF_ARG(const char *fmt);
if (errflag || noerrs)
return;
errflag |= ERRFLAG_ERROR;
VA_START(ap, fmt);
VA_GET_ARG(ap, cmd, const char *);
VA_GET_ARG(ap, fmt, const char *);
zwarning(cmd, fmt, ap);
va_end(ap);
}
/**/
mod_export void
zwarn(VA_ALIST1(const char *fmt))
VA_DCL
{
va_list ap;
VA_DEF_ARG(const char *fmt);
if (errflag || noerrs)
return;
VA_START(ap, fmt);
VA_GET_ARG(ap, fmt, const char *);
zwarning(NULL, fmt, ap);
va_end(ap);
}
/**/
mod_export void
zwarnnam(VA_ALIST2(const char *cmd, const char *fmt))
VA_DCL
{
va_list ap;
VA_DEF_ARG(const char *cmd);
VA_DEF_ARG(const char *fmt);
if (errflag || noerrs)
return;
VA_START(ap, fmt);
VA_GET_ARG(ap, cmd, const char *);
VA_GET_ARG(ap, fmt, const char *);
zwarning(cmd, fmt, ap);
va_end(ap);
}
#ifdef DEBUG
/**/
mod_export void
dputs(VA_ALIST1(const char *message))
VA_DCL
{
char *filename;
FILE *file;
va_list ap;
VA_DEF_ARG(const char *message);
VA_START(ap, message);
VA_GET_ARG(ap, message, const char *);
if ((filename = getsparam_u("ZSH_DEBUG_LOG")) != NULL &&
(file = fopen(filename, "a")) != NULL) {
zerrmsg(file, message, ap);
fclose(file);
} else
zerrmsg(stderr, message, ap);
va_end(ap);
}
#endif /* DEBUG */
#ifdef __CYGWIN__
/*
* This works around an occasional problem with dllwrap on Cygwin, seen
* on at least two installations. It fails to find the last symbol
* exported in alphabetical order (in our case zwarnnam). Until this is
* properly categorised and fixed we add a dummy symbol at the end.
*/
mod_export void
zz_plural_z_alpha(void)
{
}
#endif
/**/
void
zerrmsg(FILE *file, const char *fmt, va_list ap)
{
const char *str;
int num;
#ifdef DEBUG
long lnum;
#endif
#ifdef HAVE_STRERROR_R
#define ERRBUFSIZE (80)
int olderrno;
char errbuf[ERRBUFSIZE];
#endif
char *errmsg;
if ((unset(SHINSTDIN) || locallevel) && lineno) {
#if defined(ZLONG_IS_LONG_LONG) && defined(PRINTF_HAS_LLD)
fprintf(file, "%lld: ", lineno);
#else
fprintf(file, "%ld: ", (long)lineno);
#endif
} else
fputc((unsigned char)' ', file);
while (*fmt)
if (*fmt == '%') {
fmt++;
switch (*fmt++) {
case 's':
str = va_arg(ap, const char *);
nicezputs(str, file);
break;
case 'l': {
char *s;
str = va_arg(ap, const char *);
num = va_arg(ap, int);
num = metalen(str, num);
s = zhalloc(num + 1);
memcpy(s, str, num);
s[num] = '\0';
nicezputs(s, file);
break;
}
#ifdef DEBUG
case 'L':
lnum = va_arg(ap, long);
fprintf(file, "%ld", lnum);
break;
#endif
case 'd':
num = va_arg(ap, int);
fprintf(file, "%d", num);
break;
case '%':
putc('%', file);
break;
case 'c':
num = va_arg(ap, int);
#ifdef MULTIBYTE_SUPPORT
mb_charinit();
zputs(wcs_nicechar(num, NULL, NULL), file);
#else
zputs(nicechar(num), file);
#endif
break;
case 'e':
/* print the corresponding message for this errno */
num = va_arg(ap, int);
if (num == EINTR) {
fputs("interrupt\n", file);
errflag |= ERRFLAG_ERROR;
return;
}
errmsg = strerror(num);
/* If the message is not about I/O problems, it looks better *
* if we uncapitalize the first letter of the message */
if (num == EIO)
fputs(errmsg, file);
else {
fputc(tulower(errmsg[0]), file);
fputs(errmsg + 1, file);
}
break;
/* When adding format codes, update the comment above zwarning(). */
}
} else {
putc(*fmt == Meta ? *++fmt ^ 32 : *fmt, file);
fmt++;
}
putc('\n', file);
fflush(file);
}
/*
* Wrapper for setupterm() and del_curterm().
* These are called from terminfo.c and termcap.c.
*/
static int term_count; /* reference count of cur_term */
/**/
mod_export void
zsetupterm(void)
{
#ifdef HAVE_SETUPTERM
int errret;
DPUTS(term_count < 0 || (term_count > 0 && !cur_term),
"inconsistent term_count and/or cur_term");
/*
* Just because we can't set up the terminal doesn't
* mean the modules hasn't booted---TERM may change,
* and it should be handled dynamically---so ignore errors here.
*/
if (term_count++ == 0)
(void)setupterm((char *)0, 1, &errret);
#endif
}
/**/
mod_export void
zdeleteterm(void)
{
#ifdef HAVE_SETUPTERM
DPUTS(term_count < 1 || !cur_term,
"inconsistent term_count and/or cur_term");
if (--term_count == 0)
del_curterm(cur_term);
#endif
}
/* Output a single character, for the termcap routines. *
* This is used instead of putchar since it can be a macro. */
/**/
mod_export int
putraw(int c)
{
putc(c, stdout);
return 0;
}
/* Output a single character, for the termcap routines. */
/**/
mod_export int
putshout(int c)
{
putc(c, shout);
return 0;
}
#ifdef MULTIBYTE_SUPPORT
/*
* Turn a character into a visible representation thereof. The visible
* string is put together in a static buffer, and this function returns
* a pointer to it. Printable characters stand for themselves, DEL is
* represented as "^?", newline and tab are represented as "\n" and
* "\t", and normal control characters are represented in "^C" form.
* Characters with bit 7 set, if unprintable, are represented as "\M-"
* followed by the visible representation of the character with bit 7
* stripped off. Tokens are interpreted, rather than being treated as
* literal characters.
*
* Note that the returned string is metafied, so that it must be
* treated like any other zsh internal string (and not, for example,
* output directly).
*
* This function is used even if MULTIBYTE_SUPPORT is defined: we
* use it as a fallback in case we couldn't identify a wide character
* in a multibyte string.
*/
/**/
mod_export char *
nicechar_sel(int c, int quotable)
{
static char buf[10];
char *s = buf;
c &= 0xff;
if (ZISPRINT(c))
goto done;
if (c & 0x80) {
if (isset(PRINTEIGHTBIT))
goto done;
*s++ = '\\';
*s++ = 'M';
*s++ = '-';
c &= 0x7f;
if(ZISPRINT(c))
goto done;
}
if (c == 0x7f) {
if (quotable) {
*s++ = '\\';
*s++ = 'C';
*s++ = '-';
} else
*s++ = '^';
c = '?';
} else if (c == '\n') {
*s++ = '\\';
c = 'n';
} else if (c == '\t') {
*s++ = '\\';
c = 't';
} else if (c < 0x20) {
if (quotable) {
*s++ = '\\';
*s++ = 'C';
*s++ = '-';
} else
*s++ = '^';
c += 0x40;
}
done:
/*
* The resulting string is still metafied, so check if
* we are returning a character in the range that needs metafication.
* This can't happen if the character is printed "nicely", so
* this results in a maximum of two bytes total (plus the null).
*/
if (imeta(c)) {
*s++ = Meta;
*s++ = c ^ 32;
} else
*s++ = c;
*s = 0;
return buf;
}
/**/
mod_export char *
nicechar(int c)
{
return nicechar_sel(c, 0);
}
#else /* MULTIBYTE_SUPPORT */
/**/
mod_export char *
nicechar(int c)
{
static char buf[10];
char *s = buf;
c &= 0xff;
if (ZISPRINT(c))
goto done;
if (c & 0x80) {
if (isset(PRINTEIGHTBIT))
goto done;
*s++ = '\\';
*s++ = 'M';
*s++ = '-';
c &= 0x7f;
if(ZISPRINT(c))
goto done;
}
if (c == 0x7f) {
*s++ = '\\';
*s++ = 'C';
*s++ = '-';
c = '?';
} else if (c == '\n') {
*s++ = '\\';
c = 'n';
} else if (c == '\t') {
*s++ = '\\';
c = 't';
} else if (c < 0x20) {
*s++ = '\\';
*s++ = 'C';
*s++ = '-';
c += 0x40;
}
done:
/*
* The resulting string is still metafied, so check if
* we are returning a character in the range that needs metafication.
* This can't happen if the character is printed "nicely", so
* this results in a maximum of two bytes total (plus the null).
*/
if (imeta(c)) {
*s++ = Meta;
*s++ = c ^ 32;
} else
*s++ = c;
*s = 0;
return buf;
}
#endif /* MULTIBYTE_SUPPORT */
/*
* Return 1 if nicechar() would reformat this character.
*/
/**/
mod_export int
is_nicechar(int c)
{
c &= 0xff;
if (ZISPRINT(c))
return 0;
if (c & 0x80)
return !isset(PRINTEIGHTBIT);
return (c == 0x7f || c == '\n' || c == '\t' || c < 0x20);
}
/**/
#ifdef MULTIBYTE_SUPPORT
static mbstate_t mb_shiftstate;
/*
* Initialise multibyte state: called before a sequence of
* wcs_nicechar(), mb_metacharlenconv(), or
* mb_charlenconv().
*/
/**/
mod_export void
mb_charinit(void)
{
memset(&mb_shiftstate, 0, sizeof(mb_shiftstate));
}
/*
* The number of bytes we need to allocate for a "nice" representation
* of a multibyte character.
*
* We double MB_CUR_MAX to take account of the fact that
* we may need to metafy. In fact the representation probably
* doesn't allow every character to be in the meta range, but
* we don't need to be too pedantic.
*
* The 12 is for the output of a UCS-4 code; we don't actually
* need this at the same time as MB_CUR_MAX, but again it's
* not worth calculating more exactly.
*/
#define NICECHAR_MAX (12 + 2*MB_CUR_MAX)
/*
* Input a wide character. Output a printable representation,
* which is a metafied multibyte string. With widthp return
* the printing width.
*
* swide, if non-NULL, is used to help the completion code, which needs
* to know the printing width of the each part of the representation.
* *swide is set to the part of the returned string where the wide
* character starts. Any string up to that point is ASCII characters,
* so the width of it is (*swide - <return_value>). Anything left is
* a single wide character corresponding to the remaining width.
* Either the initial ASCII part or the wide character part may be empty
* (but not both). (Note the complication that the wide character
* part may contain metafied characters.)
*
* The caller needs to call mb_charinit() before the first call, to
* set up the multibyte shift state for a range of characters.
*/
/**/
mod_export char *
wcs_nicechar_sel(wchar_t c, size_t *widthp, char **swidep, int quotable)
{
static char *buf;
static int bufalloc = 0, newalloc;
char *s, *mbptr;
int ret = 0;
VARARR(char, mbstr, MB_CUR_MAX);
/*
* We want buf to persist beyond the return. MB_CUR_MAX and hence
* NICECHAR_MAX may not be constant, so we have to allocate this at
* run time. (We could probably get away with just allocating a
* large buffer, in practice.) For efficiency, only reallocate if
* we really need to, since this function will be called frequently.
*/
newalloc = NICECHAR_MAX;
if (bufalloc != newalloc)
{
bufalloc = newalloc;
buf = (char *)zrealloc(buf, bufalloc);
}
s = buf;
if (!WC_ISPRINT(c) && (c < 0x80 || !isset(PRINTEIGHTBIT))) {
if (c == 0x7f) {
if (quotable) {
*s++ = '\\';
*s++ = 'C';
*s++ = '-';
} else
*s++ = '^';
c = '?';
} else if (c == L'\n') {
*s++ = '\\';
c = 'n';
} else if (c == L'\t') {
*s++ = '\\';
c = 't';
} else if (c < 0x20) {
if (quotable) {
*s++ = '\\';
*s++ = 'C';
*s++ = '-';
} else
*s++ = '^';
c += 0x40;
} else if (c >= 0x80) {
ret = -1;
}
}
if (ret != -1)
ret = wcrtomb(mbstr, c, &mb_shiftstate);
if (ret == -1) {
memset(&mb_shiftstate, 0, sizeof(mb_shiftstate));
/*
* Can't or don't want to convert character: use UCS-2 or
* UCS-4 code in print escape format.
*
* This comparison fails and generates a compiler warning
* if wchar_t is 16 bits, but the code is still correct.
*/
if (c >= 0x10000) {
sprintf(buf, "\\U%.8x", (unsigned int)c);
if (widthp)
*widthp = 10;
} else if (c >= 0x100) {
sprintf(buf, "\\u%.4x", (unsigned int)c);
if (widthp)
*widthp = 6;
} else {
strcpy(buf, nicechar((int)c));
/*
* There may be metafied characters from nicechar(),
* so compute width and end position independently.
*/
if (widthp)
*widthp = ztrlen(buf);
if (swidep)
*swidep = buf + strlen(buf);
return buf;
}
if (swidep)
*swidep = widthp ? buf + *widthp : buf;
return buf;
}
if (widthp) {
int wcw = WCWIDTH(c);
*widthp = (s - buf);
if (wcw >= 0)
*widthp += wcw;
else
(*widthp)++;
}
if (swidep)
*swidep = s;
for (mbptr = mbstr; ret; s++, mbptr++, ret--) {
DPUTS(s >= buf + NICECHAR_MAX,
"BUG: buffer too small in wcs_nicechar");
if (imeta(*mbptr)) {
*s++ = Meta;
DPUTS(s >= buf + NICECHAR_MAX,
"BUG: buffer too small for metafied char in wcs_nicechar");
*s = *mbptr ^ 32;
} else {
*s = *mbptr;
}
}
*s = 0;
return buf;
}
/**/
mod_export char *
wcs_nicechar(wchar_t c, size_t *widthp, char **swidep)
{
return wcs_nicechar_sel(c, widthp, swidep, 0);
}
/*
* Return 1 if wcs_nicechar() would reformat this character for display.
*/
/**/
mod_export int is_wcs_nicechar(wchar_t c)
{
if (!WC_ISPRINT(c) && (c < 0x80 || !isset(PRINTEIGHTBIT))) {
if (c == 0x7f || c == L'\n' || c == L'\t' || c < 0x20)
return 1;
if (c >= 0x80) {
return (c >= 0x100);
}
}
return 0;
}
/**/
mod_export int
zwcwidth(wint_t wc)
{
int wcw;
/* assume a single-byte character if not valid */
if (wc == WEOF || unset(MULTIBYTE))
return 1;
wcw = WCWIDTH(wc);
/* if not printable, assume width 1 */
if (wcw < 0)
return 1;
return wcw;
}
/**/
#endif /* MULTIBYTE_SUPPORT */
/*
* Search the path for prog and return the file name.
* The returned value is unmetafied and in the unmeta storage
* area (N.B. should be duplicated if not used immediately and not
* equal to *namep).
*
* If namep is not NULL, *namep is set to the metafied programme
* name, which is in heap storage.
*/
/**/
char *
pathprog(char *prog, char **namep)
{
char **pp, ppmaxlen = 0, *buf, *funmeta;
struct stat st;
for (pp = path; *pp; pp++)
{
int len = strlen(*pp);
if (len > ppmaxlen)
ppmaxlen = len;
}
buf = zhalloc(ppmaxlen + strlen(prog) + 2);
for (pp = path; *pp; pp++) {
sprintf(buf, "%s/%s", *pp, prog);
funmeta = unmeta(buf);
if (access(funmeta, F_OK) == 0 &&
stat(funmeta, &st) >= 0 &&
!S_ISDIR(st.st_mode)) {
if (namep)
*namep = buf;
return funmeta;
}
}
return NULL;
}
/* get a symlink-free pathname for s relative to PWD */
/**/
char *
findpwd(char *s)
{
char *t;
if (*s == '/')
return xsymlink(s, 0);
s = tricat((pwd[1]) ? pwd : "", "/", s);
t = xsymlink(s, 0);
zsfree(s);
return t;
}
/* Check whether a string contains the *
* name of the present directory. */
/**/
int
ispwd(char *s)
{
struct stat sbuf, tbuf;
/* POSIX: environment PWD must be absolute */
if (*s != '/')
return 0;
if (stat((s = unmeta(s)), &sbuf) == 0 && stat(".", &tbuf) == 0)
if (sbuf.st_dev == tbuf.st_dev && sbuf.st_ino == tbuf.st_ino) {
/* POSIX: No element of $PWD may be "." or ".." */
while (*s) {
if (s[0] == '.' &&
(!s[1] || s[1] == '/' ||
(s[1] == '.' && (!s[2] || s[2] == '/'))))
break;
while (*s++ != '/' && *s)
continue;
}
return !*s;
}
return 0;
}
static char xbuf[PATH_MAX*2+1];
/**/
static char **
slashsplit(char *s)
{
char *t, **r, **q;
int t0;
if (!*s)
return (char **) zshcalloc(sizeof(char *));
for (t = s, t0 = 0; *t; t++)
if (*t == '/')
t0++;
q = r = (char **) zalloc(sizeof(char *) * (t0 + 2));
while ((t = strchr(s, '/'))) {
*q++ = ztrduppfx(s, t - s);
while (*t == '/')
t++;
if (!*t) {
*q = NULL;
return r;
}
s = t;
}
*q++ = ztrdup(s);
*q = NULL;
return r;
}
/* expands symlinks and .. or . expressions */
/**/
static int
xsymlinks(char *s, int full)
{
char **pp, **opp;
char xbuf2[PATH_MAX*3+1], xbuf3[PATH_MAX*2+1];
int t0, ret = 0;
zulong xbuflen = strlen(xbuf), pplen;
opp = pp = slashsplit(s);
for (; xbuflen < sizeof(xbuf) && *pp && ret >= 0; pp++) {
if (!strcmp(*pp, "."))
continue;
if (!strcmp(*pp, "..")) {
char *p;
if (!strcmp(xbuf, "/"))
continue;
if (!*xbuf)
continue;
p = xbuf + xbuflen;
while (*--p != '/')
xbuflen--;
*p = '\0';
/* The \0 isn't included in the length */
xbuflen--;
continue;
}
/* Includes null byte. */
pplen = strlen(*pp) + 1;
if (xbuflen + pplen + 1 > sizeof(xbuf2)) {
*xbuf = 0;
ret = -1;
break;
}
memcpy(xbuf2, xbuf, xbuflen);
xbuf2[xbuflen] = '/';
memcpy(xbuf2 + xbuflen + 1, *pp, pplen);
t0 = readlink(unmeta(xbuf2), xbuf3, PATH_MAX);
if (t0 == -1) {
if ((xbuflen += pplen) < sizeof(xbuf)) {
strcat(xbuf, "/");
strcat(xbuf, *pp);
} else {
*xbuf = 0;
ret = -1;
break;
}
} else {
ret = 1;
metafy(xbuf3, t0, META_NOALLOC);
if (!full) {
/*
* If only one expansion requested, ensure the
* full path is in xbuf.
*/
zulong len = xbuflen;
if (*xbuf3 == '/')
strcpy(xbuf, xbuf3);
else if ((len += strlen(xbuf3) + 1) < sizeof(xbuf)) {
strcpy(xbuf + xbuflen, "/");
strcpy(xbuf + xbuflen + 1, xbuf3);
} else {
*xbuf = 0;
ret = -1;
break;
}
while (*++pp) {
zulong newlen = len + strlen(*pp) + 1;
if (newlen < sizeof(xbuf)) {
strcpy(xbuf + len, "/");
strcpy(xbuf + len + 1, *pp);
len = newlen;
} else {
*xbuf = 01;
ret = -1;
break;
}
}
/*
* No need to update xbuflen, we're finished
* the expansion (for now).
*/
break;
}
if (*xbuf3 == '/') {
strcpy(xbuf, "");
if (xsymlinks(xbuf3 + 1, 1) < 0)
ret = -1;
else
xbuflen = strlen(xbuf);
} else
if (xsymlinks(xbuf3, 1) < 0)
ret = -1;
else
xbuflen = strlen(xbuf);
}
}
freearray(opp);
return ret;
}
/*
* expand symlinks in s, and remove other weird things:
* note that this always expands symlinks.
*
* 'heap' indicates whether to malloc() or allocate on the heap.
*/
/**/
char *
xsymlink(char *s, int heap)
{
if (*s != '/')
return NULL;
*xbuf = '\0';
if (xsymlinks(s + 1, 1) < 0)
zwarn("path expansion failed, using root directory");
if (!*xbuf)
return heap ? dupstring("/") : ztrdup("/");
return heap ? dupstring(xbuf) : ztrdup(xbuf);
}
/**/
void
print_if_link(char *s, int all)
{
if (*s == '/') {
*xbuf = '\0';
if (all) {
char *start = s + 1;
char xbuflink[PATH_MAX+1];
for (;;) {
if (xsymlinks(start, 0) > 0) {
printf(" -> ");
zputs(*xbuf ? xbuf : "/", stdout);
if (!*xbuf)
break;
strcpy(xbuflink, xbuf);
start = xbuflink + 1;
*xbuf = '\0';
} else {
break;
}
}
} else {
if (xsymlinks(s + 1, 1) > 0)
printf(" -> "), zputs(*xbuf ? xbuf : "/", stdout);
}
}
}
/* print a directory */
/**/
void
fprintdir(char *s, FILE *f)
{
Nameddir d = finddir(s);
if (!d)
fputs(unmeta(s), f);
else {
putc('~', f);
fputs(unmeta(d->node.nam), f);
fputs(unmeta(s + strlen(d->dir)), f);
}
}
/*
* Substitute a directory using a name.
* If there is none, return the original argument.
*
* At this level all strings involved are metafied.
*/
/**/
char *
substnamedir(char *s)
{
Nameddir d = finddir(s);
if (!d)
return quotestring(s, QT_BACKSLASH);
return zhtricat("~", d->node.nam, quotestring(s + strlen(d->dir),
QT_BACKSLASH));
}
/* Returns the current username. It caches the username *
* and uid to try to avoid requerying the password files *
* or NIS/NIS+ database. */
/**/
uid_t cached_uid;
/**/
char *cached_username;
/**/
char *
get_username(void)
{
#ifdef HAVE_GETPWUID
struct passwd *pswd;
uid_t current_uid;
current_uid = getuid();
if (current_uid != cached_uid) {
cached_uid = current_uid;
zsfree(cached_username);
if ((pswd = getpwuid(current_uid)))
cached_username = ztrdup(pswd->pw_name);
else
cached_username = ztrdup("");
}
#else /* !HAVE_GETPWUID */
cached_uid = getuid();
#endif /* !HAVE_GETPWUID */
return cached_username;
}
/* static variables needed by finddir(). */
static char *finddir_full;
static Nameddir finddir_last;
static int finddir_best;
/* ScanFunc used by finddir(). */
/**/
static void
finddir_scan(HashNode hn, UNUSED(int flags))
{
Nameddir nd = (Nameddir) hn;
if(nd->diff > finddir_best && !dircmp(nd->dir, finddir_full)
&& !(nd->node.flags & ND_NOABBREV)) {
finddir_last=nd;
finddir_best=nd->diff;
}
}
/*
* See if a path has a named directory as its prefix.
* If passed a NULL argument, it will invalidate any
* cached information.
*
* s here is metafied.
*/
/**/
Nameddir
finddir(char *s)
{
static struct nameddir homenode = { {NULL, "", 0}, NULL, 0 };
static int ffsz;
char **ares;
int len;
/* Invalidate directory cache if argument is NULL. This is called *
* whenever a node is added to or removed from the hash table, and *
* whenever the value of $HOME changes. (On startup, too.) */
if (!s) {
homenode.dir = home ? home : "";
homenode.diff = home ? strlen(home) : 0;
if(homenode.diff==1)
homenode.diff = 0;
if(!finddir_full)
finddir_full = zalloc(ffsz = PATH_MAX+1);
finddir_full[0] = 0;
return finddir_last = NULL;
}
#if 0
/*
* It's not safe to use the cache while we have function
* transformations, and it's not clear it's worth the
* complexity of guessing here whether subst_string_by_hook
* is going to turn up the goods.
*/
if (!strcmp(s, finddir_full) && *finddir_full)
return finddir_last;
#endif
if ((int)strlen(s) >= ffsz) {
free(finddir_full);
finddir_full = zalloc(ffsz = strlen(s) * 2);
}
strcpy(finddir_full, s);
finddir_best=0;
finddir_last=NULL;
finddir_scan(&homenode.node, 0);
scanhashtable(nameddirtab, 0, 0, 0, finddir_scan, 0);
ares = subst_string_by_hook("zsh_directory_name", "d", finddir_full);
if (ares && arrlen_ge(ares, 2) &&
(len = (int)zstrtol(ares[1], NULL, 10)) > finddir_best) {
/* better duplicate this string since it's come from REPLY */
finddir_last = (Nameddir)hcalloc(sizeof(struct nameddir));
finddir_last->node.nam = zhtricat("[", dupstring(ares[0]), "]");
finddir_last->dir = dupstrpfx(finddir_full, len);
finddir_last->diff = len - strlen(finddir_last->node.nam);
finddir_best = len;
}
return finddir_last;
}
/* add a named directory */
/**/
mod_export void
adduserdir(char *s, char *t, int flags, int always)
{
Nameddir nd;
char *eptr;
/* We don't maintain a hash table in non-interactive shells. */
if (!interact)
return;
/* The ND_USERNAME flag means that this possible hash table *
* entry is derived from a passwd entry. Such entries are *
* subordinate to explicitly generated entries. */
if ((flags & ND_USERNAME) && nameddirtab->getnode2(nameddirtab, s))
return;
/* Normal parameter assignments generate calls to this function, *
* with always==0. Unless the AUTO_NAME_DIRS option is set, we *
* don't let such assignments actually create directory names. *
* Instead, a reference to the parameter as a directory name can *
* cause the actual creation of the hash table entry. */
if (!always && unset(AUTONAMEDIRS) &&
!nameddirtab->getnode2(nameddirtab, s))
return;
if (!t || *t != '/' || strlen(t) >= PATH_MAX) {
/* We can't use this value as a directory, so simply remove *
* the corresponding entry in the hash table, if any. */
HashNode hn = nameddirtab->removenode(nameddirtab, s);
if(hn)
nameddirtab->freenode(hn);
return;
}
/* add the name */
nd = (Nameddir) zshcalloc(sizeof *nd);
nd->node.flags = flags;
eptr = t + strlen(t);
while (eptr > t && eptr[-1] == '/')
eptr--;
if (eptr == t) {
/*
* Don't abbreviate multiple slashes at the start of a
* named directory, since these are sometimes used for
* special purposes.
*/
nd->dir = metafy(t, -1, META_DUP);
} else
nd->dir = metafy(t, eptr - t, META_DUP);
/* The variables PWD and OLDPWD are not to be displayed as ~PWD etc. */
if (!strcmp(s, "PWD") || !strcmp(s, "OLDPWD"))
nd->node.flags |= ND_NOABBREV;
nameddirtab->addnode(nameddirtab, metafy(s, -1, META_DUP), nd);
}
/* Get a named directory: this function can cause a directory name *
* to be added to the hash table, if it isn't there already. */
/**/
char *
getnameddir(char *name)
{
Param pm;
char *str;
Nameddir nd;
/* Check if it is already in the named directory table */
if ((nd = (Nameddir) nameddirtab->getnode(nameddirtab, name)))
return dupstring(nd->dir);
/* Check if there is a scalar parameter with this name whose value *
* begins with a `/'. If there is, add it to the hash table and *
* return the new value. */
if ((pm = (Param) paramtab->getnode(paramtab, name)) &&
(PM_TYPE(pm->node.flags) == PM_SCALAR) &&
(str = getsparam(name)) && *str == '/') {
pm->node.flags |= PM_NAMEDDIR;
adduserdir(name, str, 0, 1);
return str;
}
#ifdef HAVE_GETPWNAM
{
/* Retrieve an entry from the password table/database for this user. */
struct passwd *pw;
if ((pw = getpwnam(name))) {
char *dir = isset(CHASELINKS) ? xsymlink(pw->pw_dir, 0)
: ztrdup(pw->pw_dir);
if (dir) {
adduserdir(name, dir, ND_USERNAME, 1);
str = dupstring(dir);
zsfree(dir);
return str;
} else
return dupstring(pw->pw_dir);
}
}
#endif /* HAVE_GETPWNAM */
/* There are no more possible sources of directory names, so give up. */
return NULL;
}
/*
* Compare directories. Both are metafied.
*/
/**/
static int
dircmp(char *s, char *t)
{
if (s) {
for (; *s == *t; s++, t++)
if (!*s)
return 0;
if (!*s && *t == '/')
return 0;
}
return 1;
}
/*
* Extra functions to call before displaying the prompt.
* The data is a Prepromptfn.
*/
static LinkList prepromptfns;
/* Add a function to the list of pre-prompt functions. */
/**/
mod_export void
addprepromptfn(voidvoidfnptr_t func)
{
Prepromptfn ppdat = (Prepromptfn)zalloc(sizeof(struct prepromptfn));
ppdat->func = func;
if (!prepromptfns)
prepromptfns = znewlinklist();
zaddlinknode(prepromptfns, ppdat);
}
/* Remove a function from the list of pre-prompt functions. */
/**/
mod_export void
delprepromptfn(voidvoidfnptr_t func)
{
LinkNode ln;
for (ln = firstnode(prepromptfns); ln; ln = nextnode(ln)) {
Prepromptfn ppdat = (Prepromptfn)getdata(ln);
if (ppdat->func == func) {
(void)remnode(prepromptfns, ln);
zfree(ppdat, sizeof(struct prepromptfn));
return;
}
}
#ifdef DEBUG
dputs("BUG: failed to delete node from prepromptfns");
#endif
}
/*
* Functions to call at a particular time even if not at
* the prompt. This is handled by zle. The data is a
* Timedfn. The functions must be in time order, but this
* is enforced by addtimedfn().
*
* Note on debugging: the code in sched.c currently assumes it's
* the only user of timedfns for the purposes of checking whether
* there's a function on the list. If this becomes no longer the case,
* the DPUTS() tests in sched.c need rewriting.
*/
/**/
mod_export LinkList timedfns;
/* Add a function to the list of timed functions. */
/**/
mod_export void
addtimedfn(voidvoidfnptr_t func, time_t when)
{
Timedfn tfdat = (Timedfn)zalloc(sizeof(struct timedfn));
tfdat->func = func;
tfdat->when = when;
if (!timedfns) {
timedfns = znewlinklist();
zaddlinknode(timedfns, tfdat);
} else {
LinkNode ln = firstnode(timedfns);
/*
* Insert the new element in the linked list. We do
* rather too much work here since the standard
* functions insert after a given node, whereas we
* want to insert the new data before the first element
* with a greater time.
*
* In practice, the only use of timed functions is
* sched, which only adds the one function; so this
* whole branch isn't used beyond the following block.
*/
if (!ln) {
zaddlinknode(timedfns, tfdat);
return;
}
for (;;) {
Timedfn tfdat2;
LinkNode next = nextnode(ln);
if (!next) {
zaddlinknode(timedfns, tfdat);
return;
}
tfdat2 = (Timedfn)getdata(next);
if (when < tfdat2->when) {
zinsertlinknode(timedfns, ln, tfdat);
return;
}
ln = next;
}
}
}
/*
* Delete a function from the list of timed functions.
* Note that if the function apperas multiple times only
* the first occurrence will be removed.
*
* Note also that when zle calls the function it does *not*
* automatically delete the entry from the list. That must
* be done by the function called. This is recommended as otherwise
* the function will keep being called immediately. (It just so
* happens this "feature" fits in well with the only current use
* of timed functions.)
*/
/**/
mod_export void
deltimedfn(voidvoidfnptr_t func)
{
LinkNode ln;
for (ln = firstnode(timedfns); ln; ln = nextnode(ln)) {
Timedfn ppdat = (Timedfn)getdata(ln);
if (ppdat->func == func) {
(void)remnode(timedfns, ln);
zfree(ppdat, sizeof(struct timedfn));
return;
}
}
#ifdef DEBUG
dputs("BUG: failed to delete node from timedfns");
#endif
}
/* the last time we checked mail */
/**/
time_t lastmailcheck;
/* the last time we checked the people in the WATCH variable */
/**/
time_t lastwatch;
/*
* Call a function given by "name" with optional arguments
* "lnklist". If these are present the first argument is the function name.
*
* If "arrayp" is not zero, we also look through
* the array "name"_functions and execute functions found there.
*
* If "retval" is not NULL, the return value of the first hook function to
* return non-zero is stored in *"retval". The return value is not otherwise
* available as the calling context is restored.
*
* Returns 0 if at least one function was called (regardless of that function's
* exit status), and 1 otherwise.
*/
/**/
mod_export int
callhookfunc(char *name, LinkList lnklst, int arrayp, int *retval)
{
Shfunc shfunc;
/*
* Save stopmsg, since user doesn't get a chance to respond
* to a list of jobs generated in a hook.
*/
int osc = sfcontext, osm = stopmsg, stat = 1, ret = 0;
int old_incompfunc = incompfunc;
sfcontext = SFC_HOOK;
incompfunc = 0;
if ((shfunc = getshfunc(name))) {
ret = doshfunc(shfunc, lnklst, 1);
stat = 0;
}
if (arrayp) {
char **arrptr;
int namlen = strlen(name);
VARARR(char, arrnam, namlen + HOOK_SUFFIX_LEN);
memcpy(arrnam, name, namlen);
memcpy(arrnam + namlen, HOOK_SUFFIX, HOOK_SUFFIX_LEN);
if ((arrptr = getaparam(arrnam))) {
arrptr = arrdup(arrptr);
for (; *arrptr; arrptr++) {
if ((shfunc = getshfunc(*arrptr))) {
int newret = doshfunc(shfunc, lnklst, 1);
if (!ret)
ret = newret;
stat = 0;
}
}
}
}
sfcontext = osc;
stopmsg = osm;
incompfunc = old_incompfunc;
if (retval)
*retval = ret;
return stat;
}
/* do pre-prompt stuff */
/**/
void
preprompt(void)
{
static time_t lastperiodic;
time_t currentmailcheck;
LinkNode ln;
zlong period = getiparam("PERIOD");
zlong mailcheck = getiparam("MAILCHECK");
/*
* Handle any pending window size changes before we compute prompts,
* then block them again to avoid interrupts during prompt display.
*/
winch_unblock();
winch_block();
if (isset(PROMPTSP) && isset(PROMPTCR) && !use_exit_printed && shout) {
/* The PROMPT_SP heuristic will move the prompt down to a new line
* if there was any dangling output on the line (assuming the terminal
* has automatic margins, but we try even if hasam isn't set).
* Unfortunately it interacts badly with ZLE displaying message
* when ^D has been pressed. So just disable PROMPT_SP logic in
* this case */
char *eolmark = getsparam("PROMPT_EOL_MARK");
char *str;
int percents = opts[PROMPTPERCENT], w = 0;
if (!eolmark)
eolmark = "%B%S%#%s%b";
opts[PROMPTPERCENT] = 1;
str = promptexpand(eolmark, 1, NULL, NULL, NULL);
countprompt(str, &w, 0, -1);
opts[PROMPTPERCENT] = percents;
zputs(str, shout);
fprintf(shout, "%*s\r%*s\r", (int)zterm_columns - w - !hasxn,
"", w, "");
fflush(shout);
free(str);
}
/* If NOTIFY is not set, then check for completed *
* jobs before we print the prompt. */
if (unset(NOTIFY))
scanjobs();
if (errflag)
return;
/* If a shell function named "precmd" exists, *
* then execute it. */
callhookfunc("precmd", NULL, 1, NULL);
if (errflag)
return;
/* If 1) the parameter PERIOD exists, 2) a hook function for *
* "periodic" exists, 3) it's been greater than PERIOD since we *
* executed any such hook, then execute it now. */
if (period && ((zlong)time(NULL) > (zlong)lastperiodic + period) &&
!callhookfunc("periodic", NULL, 1, NULL))
lastperiodic = time(NULL);
if (errflag)
return;
/* If WATCH is set, then check for the *
* specified login/logout events. */
if (watch) {
if ((int) difftime(time(NULL), lastwatch) > getiparam("LOGCHECK")) {
dowatch();
lastwatch = time(NULL);
}
}
if (errflag)
return;
/* Check mail */
currentmailcheck = time(NULL);
if (mailcheck &&
(zlong) difftime(currentmailcheck, lastmailcheck) > mailcheck) {
char *mailfile;
if (mailpath && *mailpath && **mailpath)
checkmailpath(mailpath);
else {
queue_signals();
if ((mailfile = getsparam("MAIL")) && *mailfile) {
char *x[2];
x[0] = mailfile;
x[1] = NULL;
checkmailpath(x);
}
unqueue_signals();
}
lastmailcheck = currentmailcheck;
}
if (prepromptfns) {
for(ln = firstnode(prepromptfns); ln; ln = nextnode(ln)) {
Prepromptfn ppnode = (Prepromptfn)getdata(ln);
ppnode->func();
}
}
}
/**/
static void
checkmailpath(char **s)
{
struct stat st;
char *v, *u, c;
while (*s) {
for (v = *s; *v && *v != '?'; v++);
c = *v;
*v = '\0';
if (c != '?')
u = NULL;
else
u = v + 1;
if (**s == 0) {
*v = c;
zerr("empty MAILPATH component: %s", *s);
} else if (mailstat(unmeta(*s), &st) == -1) {
if (errno != ENOENT)
zerr("%e: %s", errno, *s);
} else if (S_ISDIR(st.st_mode)) {
LinkList l;
DIR *lock = opendir(unmeta(*s));
char buf[PATH_MAX * 2 + 1], **arr, **ap;
int buflen, ct = 1;
if (lock) {
char *fn;
pushheap();
l = newlinklist();
while ((fn = zreaddir(lock, 1)) && !errflag) {
if (u)
buflen = snprintf(buf, sizeof(buf), "%s/%s?%s", *s, fn, u);
else
buflen = snprintf(buf, sizeof(buf), "%s/%s", *s, fn);
if (buflen < 0 || buflen >= (int)sizeof(buf))
continue;
addlinknode(l, dupstring(buf));
ct++;
}
closedir(lock);
ap = arr = (char **) zhalloc(ct * sizeof(char *));
while ((*ap++ = (char *)ugetnode(l)));
checkmailpath(arr);
popheap();
}
} else if (shout) {
if (st.st_size && st.st_atime <= st.st_mtime &&
st.st_mtime >= lastmailcheck) {
if (!u) {
fprintf(shout, "You have new mail.\n");
fflush(shout);
} else {
char *usav;
int uusav = underscoreused;
usav = zalloc(underscoreused);
if (usav)
memcpy(usav, zunderscore, underscoreused);
setunderscore(*s);
u = dupstring(u);
if (!parsestr(&u)) {
singsub(&u);
zputs(u, shout);
fputc('\n', shout);
fflush(shout);
}
if (usav) {
setunderscore(usav);
zfree(usav, uusav);
}
}
}
if (isset(MAILWARNING) && st.st_atime > st.st_mtime &&
st.st_atime > lastmailcheck && st.st_size) {
fprintf(shout, "The mail in %s has been read.\n", unmeta(*s));
fflush(shout);
}
}
*v = c;
s++;
}
}
/* This prints the XTRACE prompt. */
/**/
FILE *xtrerr = 0;
/**/
void
printprompt4(void)
{
if (!xtrerr)
xtrerr = stderr;
if (prompt4) {
int l, t = opts[XTRACE];
char *s = dupstring(prompt4);
opts[XTRACE] = 0;
unmetafy(s, &l);
s = unmetafy(promptexpand(metafy(s, l, META_NOALLOC),
0, NULL, NULL, NULL), &l);
opts[XTRACE] = t;
fprintf(xtrerr, "%s", s);
free(s);
}
}
/**/
mod_export void
freestr(void *a)
{
zsfree(a);
}
/**/
mod_export void
gettyinfo(struct ttyinfo *ti)
{
if (SHTTY != -1) {
#ifdef HAVE_TERMIOS_H
# ifdef HAVE_TCGETATTR
if (tcgetattr(SHTTY, &ti->tio) == -1)
# else
if (ioctl(SHTTY, TCGETS, &ti->tio) == -1)
# endif
zerr("bad tcgets: %e", errno);
#else
# ifdef HAVE_TERMIO_H
ioctl(SHTTY, TCGETA, &ti->tio);
# else
ioctl(SHTTY, TIOCGETP, &ti->sgttyb);
ioctl(SHTTY, TIOCLGET, &ti->lmodes);
ioctl(SHTTY, TIOCGETC, &ti->tchars);
ioctl(SHTTY, TIOCGLTC, &ti->ltchars);
# endif
#endif
}
}
/**/
mod_export void
settyinfo(struct ttyinfo *ti)
{
if (SHTTY != -1) {
#ifdef HAVE_TERMIOS_H
# ifdef HAVE_TCGETATTR
# ifndef TCSADRAIN
# define TCSADRAIN 1 /* XXX Princeton's include files are screwed up */
# endif
while (tcsetattr(SHTTY, TCSADRAIN, &ti->tio) == -1 && errno == EINTR)
;
# else
while (ioctl(SHTTY, TCSETS, &ti->tio) == -1 && errno == EINTR)
;
# endif
/* zerr("settyinfo: %e",errno);*/
#else
# ifdef HAVE_TERMIO_H
ioctl(SHTTY, TCSETA, &ti->tio);
# else
ioctl(SHTTY, TIOCSETN, &ti->sgttyb);
ioctl(SHTTY, TIOCLSET, &ti->lmodes);
ioctl(SHTTY, TIOCSETC, &ti->tchars);
ioctl(SHTTY, TIOCSLTC, &ti->ltchars);
# endif
#endif
}
}
/* the default tty state */
/**/
mod_export struct ttyinfo shttyinfo;
/* != 0 if we need to call resetvideo() */
/**/
mod_export int resetneeded;
#ifdef TIOCGWINSZ
/* window size changed */
/**/
mod_export int winchanged;
#endif
static int
adjustlines(int signalled)
{
int oldlines = zterm_lines;
#ifdef TIOCGWINSZ
if (signalled || zterm_lines <= 0)
zterm_lines = shttyinfo.winsize.ws_row;
else
shttyinfo.winsize.ws_row = zterm_lines;
#endif /* TIOCGWINSZ */
if (zterm_lines <= 0) {
DPUTS(signalled && zterm_lines < 0,
"BUG: Impossible TIOCGWINSZ rows");
zterm_lines = tclines > 0 ? tclines : 24;
}
if (zterm_lines > 2)
termflags &= ~TERM_SHORT;
else
termflags |= TERM_SHORT;
return (zterm_lines != oldlines);
}
static int
adjustcolumns(int signalled)
{
int oldcolumns = zterm_columns;
#ifdef TIOCGWINSZ
if (signalled || zterm_columns <= 0)
zterm_columns = shttyinfo.winsize.ws_col;
else
shttyinfo.winsize.ws_col = zterm_columns;
#endif /* TIOCGWINSZ */
if (zterm_columns <= 0) {
DPUTS(signalled && zterm_columns < 0,
"BUG: Impossible TIOCGWINSZ cols");
zterm_columns = tccolumns > 0 ? tccolumns : 80;
}
if (zterm_columns > 2)
termflags &= ~TERM_NARROW;
else
termflags |= TERM_NARROW;
return (zterm_columns != oldcolumns);
}
/* check the size of the window and adjust if necessary. *
* The value of from: *
* 0: called from update_job or setupvals *
* 1: called from the SIGWINCH handler *
* 2: called from the LINES parameter callback *
* 3: called from the COLUMNS parameter callback */
/**/
void
adjustwinsize(int from)
{
static int getwinsz = 1;
#ifdef TIOCGWINSZ
int ttyrows = shttyinfo.winsize.ws_row;
int ttycols = shttyinfo.winsize.ws_col;
#endif
int resetzle = 0;
if (getwinsz || from == 1) {
#ifdef TIOCGWINSZ
if (SHTTY == -1)
return;
if (ioctl(SHTTY, TIOCGWINSZ, (char *)&shttyinfo.winsize) == 0) {
resetzle = (ttyrows != shttyinfo.winsize.ws_row ||
ttycols != shttyinfo.winsize.ws_col);
if (from == 0 && resetzle && ttyrows && ttycols)
from = 1; /* Signal missed while a job owned the tty? */
ttyrows = shttyinfo.winsize.ws_row;
ttycols = shttyinfo.winsize.ws_col;
} else {
/* Set to value from environment on failure */
shttyinfo.winsize.ws_row = zterm_lines;
shttyinfo.winsize.ws_col = zterm_columns;
resetzle = (from == 1);
}
#else
resetzle = from == 1;
#endif /* TIOCGWINSZ */
} /* else
return; */
switch (from) {
case 0:
case 1:
getwinsz = 0;
/* Calling setiparam() here calls this function recursively, but *
* because we've already called adjustlines() and adjustcolumns() *
* here, recursive calls are no-ops unless a signal intervenes. *
* The commented "else return;" above might be a safe shortcut, *
* but I'm concerned about what happens on race conditions; e.g., *
* suppose the user resizes his xterm during `eval $(resize)'? */
if (adjustlines(from) && zgetenv("LINES"))
setiparam("LINES", zterm_lines);
if (adjustcolumns(from) && zgetenv("COLUMNS"))
setiparam("COLUMNS", zterm_columns);
getwinsz = 1;
break;
case 2:
resetzle = adjustlines(0);
break;
case 3:
resetzle = adjustcolumns(0);
break;
}
#ifdef TIOCGWINSZ
if (interact && from >= 2 &&
(shttyinfo.winsize.ws_row != ttyrows ||
shttyinfo.winsize.ws_col != ttycols)) {
/* shttyinfo.winsize is already set up correctly */
/* ioctl(SHTTY, TIOCSWINSZ, (char *)&shttyinfo.winsize); */
}
#endif /* TIOCGWINSZ */
if (zleactive && resetzle) {
#ifdef TIOCGWINSZ
winchanged =
#endif /* TIOCGWINSZ */
resetneeded = 1;
zleentry(ZLE_CMD_RESET_PROMPT);
zleentry(ZLE_CMD_REFRESH);
}
}
/*
* Ensure the fdtable is large enough for fd, and that the
* maximum fd is set appropriately.
*/
static void
check_fd_table(int fd)
{
if (fd <= max_zsh_fd)
return;
if (fd >= fdtable_size) {
int old_size = fdtable_size;
while (fd >= fdtable_size)
fdtable = zrealloc(fdtable,
(fdtable_size *= 2)*sizeof(*fdtable));
memset(fdtable + old_size, 0,
(fdtable_size - old_size) * sizeof(*fdtable));
}
max_zsh_fd = fd;
}
/* Move a fd to a place >= 10 and mark the new fd in fdtable. If the fd *
* is already >= 10, it is not moved. If it is invalid, -1 is returned. */
/**/
mod_export int
movefd(int fd)
{
if(fd != -1 && fd < 10) {
#ifdef F_DUPFD
int fe = fcntl(fd, F_DUPFD, 10);
#else
int fe = movefd(dup(fd));
#endif
/*
* To close or not to close if fe is -1?
* If it is -1, we haven't moved the fd, so if we close
* it we lose it; but we're probably not going to be able
* to use it in situ anyway. So probably better to avoid a leak.
*/
zclose(fd);
fd = fe;
}
if(fd != -1) {
check_fd_table(fd);
fdtable[fd] = FDT_INTERNAL;
}
return fd;
}
/*
* Move fd x to y. If x == -1, fd y is closed.
* Returns y for success, -1 for failure.
*/
/**/
mod_export int
redup(int x, int y)
{
int ret = y;
if(x < 0)
zclose(y);
else if (x != y) {
if (dup2(x, y) == -1) {
ret = -1;
} else {
check_fd_table(y);
fdtable[y] = fdtable[x];
if (fdtable[y] == FDT_FLOCK || fdtable[y] == FDT_FLOCK_EXEC)
fdtable[y] = FDT_INTERNAL;
}
/*
* Closing any fd to the locked file releases the lock.
* This isn't expected to happen, it's here for completeness.
*/
if (fdtable[x] == FDT_FLOCK)
fdtable_flocks--;
zclose(x);
}
return ret;
}
/*
* Add an fd opened ithin a module.
*
* fdt is the type of the fd; see the FDT_ definitions in zsh.h.
* The most likely falures are:
*
* FDT_EXTERNAL: the fd can be used within the shell for normal I/O but
* it will not be closed automatically or by normal shell syntax.
*
* FDT_MODULE: as FDT_EXTERNAL, but it can only be closed by the module
* (which should included zclose() as part of the sequence), not by
* the standard shell syntax for closing file descriptors.
*
* FDT_INTERNAL: fd is treated like others created by the shell for
* internal use; it can be closed and will be closed by the shell if it
* exec's or performs an exec with a fork optimised out.
*
* Safe if fd is -1 to indicate failure.
*/
/**/
mod_export void
addmodulefd(int fd, int fdt)
{
if (fd >= 0) {
check_fd_table(fd);
fdtable[fd] = fdt;
}
}
/**/
/*
* Indicate that an fd has a file lock; if cloexec is 1 it will be closed
* on exec.
* The fd should already be known to fdtable (e.g. by movefd).
* Note the fdtable code doesn't care what sort of lock
* is used; this simply prevents the main shell exiting prematurely
* when it holds a lock.
*/
/**/
mod_export void
addlockfd(int fd, int cloexec)
{
if (cloexec) {
if (fdtable[fd] != FDT_FLOCK)
fdtable_flocks++;
fdtable[fd] = FDT_FLOCK;
} else {
fdtable[fd] = FDT_FLOCK_EXEC;
}
}
/* Close the given fd, and clear it from fdtable. */
/**/
mod_export int
zclose(int fd)
{
if (fd >= 0) {
/*
* Careful: we allow closing of arbitrary fd's, beyond
* max_zsh_fd. In that case we don't try anything clever.
*/
if (fd <= max_zsh_fd) {
if (fdtable[fd] == FDT_FLOCK)
fdtable_flocks--;
fdtable[fd] = FDT_UNUSED;
while (max_zsh_fd > 0 && fdtable[max_zsh_fd] == FDT_UNUSED)
max_zsh_fd--;
if (fd == coprocin)
coprocin = -1;
if (fd == coprocout)
coprocout = -1;
}
return close(fd);
}
return -1;
}
/*
* Close an fd returning 0 if used for locking; return -1 if it isn't.
*/
/**/
mod_export int
zcloselockfd(int fd)
{
if (fd > max_zsh_fd)
return -1;
if (fdtable[fd] != FDT_FLOCK && fdtable[fd] != FDT_FLOCK_EXEC)
return -1;
zclose(fd);
return 0;
}
#ifdef HAVE__MKTEMP
extern char *_mktemp(char *);
#endif
/* Get a unique filename for use as a temporary file. If "prefix" is
* NULL, the name is relative to $TMPPREFIX; If it is non-NULL, the
* unique suffix includes a prefixed '.' for improved readability. If
* "use_heap" is true, we allocate the returned name on the heap.
* The string passed as "prefix" is expected to be metafied. */
/**/
mod_export char *
gettempname(const char *prefix, int use_heap)
{
char *ret, *suffix = prefix ? ".XXXXXX" : "XXXXXX";
queue_signals();
if (!prefix && !(prefix = getsparam("TMPPREFIX")))
prefix = DEFAULT_TMPPREFIX;
if (use_heap)
ret = dyncat(unmeta(prefix), suffix);
else
ret = bicat(unmeta(prefix), suffix);
#ifdef HAVE__MKTEMP
/* Zsh uses mktemp() safely, so silence the warnings */
ret = (char *) _mktemp(ret);
#else
ret = (char *) mktemp(ret);
#endif
unqueue_signals();
return ret;
}
/* The gettempfile() "prefix" is expected to be metafied, see hist.c
* and gettempname(). */
/**/
mod_export int
gettempfile(const char *prefix, int use_heap, char **tempname)
{
char *fn;
int fd;
mode_t old_umask;
#if HAVE_MKSTEMP
char *suffix = prefix ? ".XXXXXX" : "XXXXXX";
queue_signals();
old_umask = umask(0177);
if (!prefix && !(prefix = getsparam("TMPPREFIX")))
prefix = DEFAULT_TMPPREFIX;
if (use_heap)
fn = dyncat(unmeta(prefix), suffix);
else
fn = bicat(unmeta(prefix), suffix);
fd = mkstemp(fn);
if (fd < 0) {
if (!use_heap)
free(fn);
fn = NULL;
}
#else
int failures = 0;
queue_signals();
old_umask = umask(0177);
do {
if (!(fn = gettempname(prefix, use_heap))) {
fd = -1;
break;
}
if ((fd = open(fn, O_RDWR | O_CREAT | O_EXCL, 0600)) >= 0)
break;
if (!use_heap)
free(fn);
fn = NULL;
} while (errno == EEXIST && ++failures < 16);
#endif
*tempname = fn;
umask(old_umask);
unqueue_signals();
return fd;
}
/* Check if a string contains a token */
/**/
mod_export int
has_token(const char *s)
{
while(*s)
if(itok(*s++))
return 1;
return 0;
}
/* Delete a character in a string */
/**/
mod_export void
chuck(char *str)
{
while ((str[0] = str[1]))
str++;
}
/**/
mod_export int
tulower(int c)
{
c &= 0xff;
return (isupper(c) ? tolower(c) : c);
}
/**/
mod_export int
tuupper(int c)
{
c &= 0xff;
return (islower(c) ? toupper(c) : c);
}
/* copy len chars from t into s, and null terminate */
/**/
void
ztrncpy(char *s, char *t, int len)
{
while (len--)
*s++ = *t++;
*s = '\0';
}
/* copy t into *s and update s */
/**/
mod_export void
strucpy(char **s, char *t)
{
char *u = *s;
while ((*u++ = *t++));
*s = u - 1;
}
/**/
mod_export void
struncpy(char **s, char *t, int n)
{
char *u = *s;
while (n-- && (*u = *t++))
u++;
*s = u;
if (n > 0) /* just one null-byte will do, unlike strncpy(3) */
*u = '\0';
}
/* Return the number of elements in an array of pointers. *
* It doesn't count the NULL pointer at the end. */
/**/
mod_export int
arrlen(char **s)
{
int count;
for (count = 0; *s; s++, count++);
return count;
}
/* Return TRUE iff arrlen(s) >= lower_bound, but more efficiently. */
/**/
mod_export char
arrlen_ge(char **s, unsigned lower_bound)
{
while (lower_bound--)
if (!*s++)
return 0 /* FALSE */;
return 1 /* TRUE */;
}
/* Return TRUE iff arrlen(s) > lower_bound, but more efficiently. */
/**/
mod_export char
arrlen_gt(char **s, unsigned lower_bound)
{
return arrlen_ge(s, 1+lower_bound);
}
/* Return TRUE iff arrlen(s) <= upper_bound, but more efficiently. */
/**/
mod_export char
arrlen_le(char **s, unsigned upper_bound)
{
return arrlen_lt(s, 1+upper_bound);
}
/* Return TRUE iff arrlen(s) < upper_bound, but more efficiently. */
/**/
mod_export char
arrlen_lt(char **s, unsigned upper_bound)
{
return !arrlen_ge(s, upper_bound);
}
/* Skip over a balanced pair of parenthesis. */
/**/
mod_export int
skipparens(char inpar, char outpar, char **s)
{
int level;
if (**s != inpar)
return -1;
for (level = 1; *++*s && level;)
if (**s == inpar)
++level;
else if (**s == outpar)
--level;
return level;
}
/**/
mod_export zlong
zstrtol(const char *s, char **t, int base)
{
return zstrtol_underscore(s, t, base, 0);
}
/* Convert string to zlong (see zsh.h). This function (without the z) *
* is contained in the ANSI standard C library, but a lot of them seem *
* to be broken. */
/**/
mod_export zlong
zstrtol_underscore(const char *s, char **t, int base, int underscore)
{
const char *inp, *trunc = NULL;
zulong calc = 0, newcalc = 0;
int neg;
while (inblank(*s))
s++;
if ((neg = IS_DASH(*s)))
s++;
else if (*s == '+')
s++;
if (!base) {
if (*s != '0')
base = 10;
else if (*++s == 'x' || *s == 'X')
base = 16, s++;
else if (*s == 'b' || *s == 'B')
base = 2, s++;
else
base = 8;
}
inp = s;
if (base < 2 || base > 36) {
zerr("invalid base (must be 2 to 36 inclusive): %d", base);
return (zlong)0;
} else if (base <= 10) {
for (; (*s >= '0' && *s < ('0' + base)) ||
(underscore && *s == '_'); s++) {
if (trunc || *s == '_')
continue;
newcalc = calc * base + *s - '0';
if (newcalc < calc)
{
trunc = s;
continue;
}
calc = newcalc;
}
} else {
for (; idigit(*s) || (*s >= 'a' && *s < ('a' + base - 10))
|| (*s >= 'A' && *s < ('A' + base - 10))
|| (underscore && *s == '_'); s++) {
if (trunc || *s == '_')
continue;
newcalc = calc*base + (idigit(*s) ? (*s - '0') : (*s & 0x1f) + 9);
if (newcalc < calc)
{
trunc = s;
continue;
}
calc = newcalc;
}
}
/*
* Special case: check for a number that was just too long for
* signed notation.
* Extra special case: the lowest negative number would trigger
* the first test, but is actually representable correctly.
* This is a 1 in the top bit, all others zero, so test for
* that explicitly.
*/
if (!trunc && (zlong)calc < 0 &&
(!neg || calc & ~((zulong)1 << (8*sizeof(zulong)-1))))
{
trunc = s - 1;
calc /= base;
}
if (trunc)
zwarn("number truncated after %d digits: %s", (int)(trunc - inp), inp);
if (t)
*t = (char *)s;
return neg ? -(zlong)calc : (zlong)calc;
}
/*
* If s represents a complete unsigned integer (and nothing else)
* return 1 and set retval to the value. Otherwise return 0.
*
* Underscores are always allowed.
*
* Sensitive to OCTAL_ZEROES.
*/
/**/
mod_export int
zstrtoul_underscore(const char *s, zulong *retval)
{
zulong calc = 0, newcalc = 0, base;
if (*s == '+')
s++;
if (*s != '0')
base = 10;
else if (*++s == 'x' || *s == 'X')
base = 16, s++;
else if (*s == 'b' || *s == 'B')
base = 2, s++;
else
base = isset(OCTALZEROES) ? 8 : 10;
if (base <= 10) {
for (; (*s >= '0' && *s < ('0' + base)) ||
*s == '_'; s++) {
if (*s == '_')
continue;
newcalc = calc * base + *s - '0';
if (newcalc < calc)
{
return 0;
}
calc = newcalc;
}
} else {
for (; idigit(*s) || (*s >= 'a' && *s < ('a' + base - 10))
|| (*s >= 'A' && *s < ('A' + base - 10))
|| *s == '_'; s++) {
if (*s == '_')
continue;
newcalc = calc*base + (idigit(*s) ? (*s - '0') : (*s & 0x1f) + 9);
if (newcalc < calc)
{
return 0;
}
calc = newcalc;
}
}
if (*s)
return 0;
*retval = calc;
return 1;
}
/**/
mod_export int
setblock_fd(int turnonblocking, int fd, long *modep)
{
#ifdef O_NDELAY
# ifdef O_NONBLOCK
# define NONBLOCK (O_NDELAY|O_NONBLOCK)
# else /* !O_NONBLOCK */
# define NONBLOCK O_NDELAY
# endif /* !O_NONBLOCK */
#else /* !O_NDELAY */
# ifdef O_NONBLOCK
# define NONBLOCK O_NONBLOCK
# else /* !O_NONBLOCK */
# define NONBLOCK 0
# endif /* !O_NONBLOCK */
#endif /* !O_NDELAY */
#if NONBLOCK
struct stat st;
if (!fstat(fd, &st) && !S_ISREG(st.st_mode)) {
*modep = fcntl(fd, F_GETFL, 0);
if (*modep != -1) {
if (!turnonblocking) {
/* We want to know if blocking was off */
if ((*modep & NONBLOCK) ||
!fcntl(fd, F_SETFL, *modep | NONBLOCK))
return 1;
} else if ((*modep & NONBLOCK) &&
!fcntl(fd, F_SETFL, *modep & ~NONBLOCK)) {
/* Here we want to know if the state changed */
return 1;
}
}
} else
#endif /* NONBLOCK */
*modep = -1;
return 0;
#undef NONBLOCK
}
/**/
int
setblock_stdin(void)
{
long mode;
return setblock_fd(1, 0, &mode);
}
/*
* Check for pending input on fd. If polltty is set, we may need to
* use termio to look for input. As a final resort, go to non-blocking
* input and try to read a character, which in this case will be
* returned in *readchar.
*
* Note that apart from setting (and restoring) non-blocking input,
* this function does not change the input mode. The calling function
* should have set cbreak mode if necessary.
*
* fd may be -1 to sleep until the timeout in microseconds. This is a
* fallback for old systems that don't have nanosleep(). Some very old
* systems might not have select: get with it, daddy-o.
*/
/**/
mod_export int
read_poll(int fd, int *readchar, int polltty, zlong microseconds)
{
int ret = -1;
long mode = -1;
char c;
#ifdef HAVE_SELECT
fd_set foofd;
struct timeval expire_tv;
#else
#ifdef FIONREAD
int val;
#endif
#endif
#ifdef HAS_TIO
struct ttyinfo ti;
#endif
if (fd < 0 || (polltty && !isatty(fd)))
polltty = 0; /* no tty to poll */
#if defined(HAS_TIO) && !defined(__CYGWIN__)
/*
* Under Solaris, at least, reading from the terminal in non-canonical
* mode requires that we use the VMIN mechanism to poll. Any attempt
* to check any other way, or to set the terminal to non-blocking mode
* and poll that way, fails; it will just for canonical mode input.
* We should probably use this mechanism if the user has set non-canonical
* mode, in which case testing here for isatty() and ~ICANON would be
* better than testing whether bin_read() set it, but for now we've got
* enough problems.
*
* Under Cygwin, you won't be surprised to here, this mechanism,
* although present, doesn't work, and we *have* to use ordinary
* non-blocking reads to find out if there is a character present
* in non-canonical mode.
*
* I am assuming Solaris is nearer the UNIX norm. This is not necessarily
* as plausible as it sounds, but it seems the right way to guess.
* pws 2000/06/26
*/
if (polltty && fd >= 0) {
gettyinfo(&ti);
if ((polltty = ti.tio.c_cc[VMIN])) {
ti.tio.c_cc[VMIN] = 0;
/* termios timeout is 10ths of a second */
ti.tio.c_cc[VTIME] = (int) (microseconds / (zlong)100000);
settyinfo(&ti);
}
}
#else
polltty = 0;
#endif
#ifdef HAVE_SELECT
expire_tv.tv_sec = (int) (microseconds / (zlong)1000000);
expire_tv.tv_usec = microseconds % (zlong)1000000;
FD_ZERO(&foofd);
if (fd > -1) {
FD_SET(fd, &foofd);
ret = select(fd+1, (SELECT_ARG_2_T) &foofd, NULL, NULL, &expire_tv);
} else
ret = select(0, NULL, NULL, NULL, &expire_tv);
#else
if (fd < 0) {
/* OK, can't do that. Just quietly sleep for a second. */
sleep(1);
return 1;
}
#ifdef FIONREAD
if (ioctl(fd, FIONREAD, (char *) &val) == 0)
ret = (val > 0);
#endif
#endif
if (fd >= 0 && ret < 0 && !errflag) {
/*
* Final attempt: set non-blocking read and try to read a character.
* Praise Bill, this works under Cygwin (nothing else seems to).
*/
if ((polltty || setblock_fd(0, fd, &mode)) && read(fd, &c, 1) > 0) {
*readchar = c;
ret = 1;
}
if (mode != -1)
fcntl(fd, F_SETFL, mode);
}
#ifdef HAS_TIO
if (polltty) {
ti.tio.c_cc[VMIN] = 1;
ti.tio.c_cc[VTIME] = 0;
settyinfo(&ti);
}
#endif
return (ret > 0);
}
/*
* Sleep for the given number of microseconds --- must be within
* range of a long at the moment, but this is only used for
* limited internal purposes.
*/
/**/
int
zsleep(long us)
{
#ifdef HAVE_NANOSLEEP
struct timespec sleeptime;
sleeptime.tv_sec = (time_t)us / (time_t)1000000;
sleeptime.tv_nsec = (us % 1000000L) * 1000L;
for (;;) {
struct timespec rem;
int ret = nanosleep(&sleeptime, &rem);
if (ret == 0)
return 1;
else if (errno != EINTR)
return 0;
sleeptime = rem;
}
#else
int dummy;
return read_poll(-1, &dummy, 0, us);
#endif
}
/**
* Sleep for time (fairly) randomly up to max_us microseconds.
* Don't let the wallclock time extend beyond end_time.
* Return 1 if that seemed to work, else 0.
*
* For best results max_us should be a multiple of 2**16 or large
* enough that it doesn't matter.
*/
/**/
int
zsleep_random(long max_us, time_t end_time)
{
long r;
time_t now = time(NULL);
/*
* Randomish backoff. Doesn't need to be fundamentally
* unpredictable, just probably unlike the value another
* exiting shell is using. On some systems the bottom 16
* bits aren't that random but the use here doesn't
* really care.
*/
r = (long)(rand() & 0xFFFF);
/*
* Turn this into a fraction of sleep_us. Again, this
* doesn't need to be particularly accurate and the base time
* is sufficient that we can do the division first and not
* worry about the range.
*/
r = (max_us >> 16) * r;
/*
* Don't sleep beyond timeout.
* Not that important as timeout is ridiculously long, but
* if there's an interface, interface to it...
*/
while (r && now + (time_t)(r / 1000000) > end_time)
r >>= 1;
if (r) /* pedantry */
return zsleep(r);
return 0;
}
/**/
int
checkrmall(char *s)
{
DIR *rmd;
int count = 0;
if (!shout)
return 1;
if (*s != '/') {
if (pwd[1])
s = zhtricat(pwd, "/", s);
else
s = dyncat("/", s);
}
const int max_count = 100;
if ((rmd = opendir(unmeta(s)))) {
int ignoredots = !isset(GLOBDOTS);
char *fname;
while ((fname = zreaddir(rmd, 1))) {
if (ignoredots && *fname == '.')
continue;
count++;
if (count > max_count)
break;
}
closedir(rmd);
}
if (count > max_count)
fprintf(shout, "zsh: sure you want to delete more than %d files in ",
max_count);
else if (count == 1)
fprintf(shout, "zsh: sure you want to delete the only file in ");
else if (count > 0)
fprintf(shout, "zsh: sure you want to delete all %d files in ",
count);
else {
/* We don't know how many files the glob will expand to; see 41707. */
fprintf(shout, "zsh: sure you want to delete all the files in ");
}
nicezputs(s, shout);
if(isset(RMSTARWAIT)) {
fputs("? (waiting ten seconds)", shout);
fflush(shout);
zbeep();
sleep(10);
fputc('\n', shout);
}
if (errflag)
return 0;
fputs(" [yn]? ", shout);
fflush(shout);
zbeep();
return (getquery("ny", 1) == 'y');
}
/**/
mod_export ssize_t
read_loop(int fd, char *buf, size_t len)
{
ssize_t got = len;
while (1) {
ssize_t ret = read(fd, buf, len);
if (ret == len)
break;
if (ret <= 0) {
if (ret < 0) {
if (errno == EINTR)
continue;
if (fd != SHTTY)
zwarn("read failed: %e", errno);
}
return ret;
}
buf += ret;
len -= ret;
}
return got;
}
/**/
mod_export ssize_t
write_loop(int fd, const char *buf, size_t len)
{
ssize_t wrote = len;
while (1) {
ssize_t ret = write(fd, buf, len);
if (ret == len)
break;
if (ret < 0) {
if (errno == EINTR)
continue;
if (fd != SHTTY)
zwarn("write failed: %e", errno);
return -1;
}
buf += ret;
len -= ret;
}
return wrote;
}
static int
read1char(int echo)
{
char c;
int q = queue_signal_level();
dont_queue_signals();
while (read(SHTTY, &c, 1) != 1) {
if (errno != EINTR || errflag || retflag || breaks || contflag) {
restore_queue_signals(q);
return -1;
}
}
restore_queue_signals(q);
if (echo)
write_loop(SHTTY, &c, 1);
return STOUC(c);
}
/**/
mod_export int
noquery(int purge)
{
int val = 0;
#ifdef FIONREAD
char c;
ioctl(SHTTY, FIONREAD, (char *)&val);
if (purge) {
for (; val; val--) {
if (read(SHTTY, &c, 1) != 1) {
/* Do nothing... */
}
}
}
#endif
return val;
}
/**/
int
getquery(char *valid_chars, int purge)
{
int c, d, nl = 0;
int isem = !strcmp(term, "emacs");
struct ttyinfo ti;
attachtty(mypgrp);
gettyinfo(&ti);
#ifdef HAS_TIO
ti.tio.c_lflag &= ~ECHO;
if (!isem) {
ti.tio.c_lflag &= ~ICANON;
ti.tio.c_cc[VMIN] = 1;
ti.tio.c_cc[VTIME] = 0;
}
#else
ti.sgttyb.sg_flags &= ~ECHO;
if (!isem)
ti.sgttyb.sg_flags |= CBREAK;
#endif
settyinfo(&ti);
if (noquery(purge)) {
if (!isem)
settyinfo(&shttyinfo);
write_loop(SHTTY, "n\n", 2);
return 'n';
}
while ((c = read1char(0)) >= 0) {
if (c == 'Y')
c = 'y';
else if (c == 'N')
c = 'n';
if (!valid_chars)
break;
if (c == '\n') {
c = *valid_chars;
nl = 1;
break;
}
if (strchr(valid_chars, c)) {
nl = 1;
break;
}
zbeep();
}
if (c >= 0) {
char buf = (char)c;
write_loop(SHTTY, &buf, 1);
}
if (nl)
write_loop(SHTTY, "\n", 1);
if (isem) {
if (c != '\n')
while ((d = read1char(1)) >= 0 && d != '\n');
} else {
if (c != '\n' && !valid_chars) {
#ifdef MULTIBYTE_SUPPORT
if (isset(MULTIBYTE) && c >= 0) {
/*
* No waiting for a valid character, and no draining;
* we should ensure we haven't stopped in the middle
* of a multibyte character.
*/
mbstate_t mbs;
char cc = (char)c;
memset(&mbs, 0, sizeof(mbs));
for (;;) {
size_t ret = mbrlen(&cc, 1, &mbs);
if (ret != MB_INCOMPLETE)
break;
c = read1char(1);
if (c < 0)
break;
cc = (char)c;
}
}
#endif
write_loop(SHTTY, "\n", 1);
}
}
settyinfo(&shttyinfo);
return c;
}
static int d;
static char *guess, *best;
static Patprog spckpat, spnamepat;
/**/
static void
spscan(HashNode hn, UNUSED(int scanflags))
{
int nd;
if (spckpat && pattry(spckpat, hn->nam))
return;
nd = spdist(hn->nam, guess, (int) strlen(guess) / 4 + 1);
if (nd <= d) {
best = hn->nam;
d = nd;
}
}
/* spellcheck a word */
/* fix s ; if hist is nonzero, fix the history list too */
/**/
mod_export void
spckword(char **s, int hist, int cmd, int ask)
{
char *t, *correct_ignore;
char ic = '\0';
int preflen = 0;
int autocd = cmd && isset(AUTOCD) && strcmp(*s, ".") && strcmp(*s, "..");
if ((histdone & HISTFLAG_NOEXEC) || **s == '-' || **s == '%')
return;
if (!strcmp(*s, "in"))
return;
if (!(*s)[0] || !(*s)[1])
return;
if (cmd) {
if (shfunctab->getnode(shfunctab, *s) ||
builtintab->getnode(builtintab, *s) ||
cmdnamtab->getnode(cmdnamtab, *s) ||
aliastab->getnode(aliastab, *s) ||
reswdtab->getnode(reswdtab, *s))
return;
else if (isset(HASHLISTALL)) {
cmdnamtab->filltable(cmdnamtab);
if (cmdnamtab->getnode(cmdnamtab, *s))
return;
}
}
t = *s;
if (*t == Tilde || *t == Equals || *t == String)
t++;
for (; *t; t++)
if (itok(*t))
return;
best = NULL;
for (t = *s; *t; t++)
if (*t == '/')
break;
if (**s == Tilde && !*t)
return;
if ((correct_ignore = getsparam("CORRECT_IGNORE")) != NULL) {
tokenize(correct_ignore = dupstring(correct_ignore));
remnulargs(correct_ignore);
spckpat = patcompile(correct_ignore, 0, NULL);
} else
spckpat = NULL;
if ((correct_ignore = getsparam("CORRECT_IGNORE_FILE")) != NULL) {
tokenize(correct_ignore = dupstring(correct_ignore));
remnulargs(correct_ignore);
spnamepat = patcompile(correct_ignore, 0, NULL);
} else
spnamepat = NULL;
if (**s == String && !*t) {
guess = *s + 1;
if (itype_end(guess, IIDENT, 1) == guess)
return;
ic = String;
d = 100;
scanhashtable(paramtab, 1, 0, 0, spscan, 0);
} else if (**s == Equals) {
if (*t)
return;
if (hashcmd(guess = *s + 1, pathchecked))
return;
d = 100;
ic = Equals;
scanhashtable(aliastab, 1, 0, 0, spscan, 0);
scanhashtable(cmdnamtab, 1, 0, 0, spscan, 0);
} else {
guess = *s;
if (*guess == Tilde || *guess == String) {
int ne;
ic = *guess;
if (!*++t)
return;
guess = dupstring(guess);
ne = noerrs;
noerrs = 2;
singsub(&guess);
noerrs = ne;
if (!guess)
return;
preflen = strlen(guess) - strlen(t);
}
if (access(unmeta(guess), F_OK) == 0)
return;
best = spname(guess);
if (!*t && cmd) {
if (hashcmd(guess, pathchecked))
return;
d = 100;
scanhashtable(reswdtab, 1, 0, 0, spscan, 0);
scanhashtable(aliastab, 1, 0, 0, spscan, 0);
scanhashtable(shfunctab, 1, 0, 0, spscan, 0);
scanhashtable(builtintab, 1, 0, 0, spscan, 0);
scanhashtable(cmdnamtab, 1, 0, 0, spscan, 0);
if (autocd) {
char **pp;
for (pp = cdpath; *pp; pp++) {
char bestcd[PATH_MAX + 1];
int thisdist;
/* Less than d here, instead of less than or equal *
* as used in spscan(), so that an autocd is chosen *
* only when it is better than anything so far, and *
* so we prefer directories earlier in the cdpath. */
if ((thisdist = mindist(*pp, *s, bestcd, 1)) < d) {
best = dupstring(bestcd);
d = thisdist;
}
}
}
}
}
if (errflag)
return;
if (best && (int)strlen(best) > 1 && strcmp(best, guess)) {
int x;
if (ic) {
char *u;
if (preflen) {
/* do not correct the result of an expansion */
if (strncmp(guess, best, preflen))
return;
/* replace the temporarily expanded prefix with the original */
u = (char *) zhalloc(t - *s + strlen(best + preflen) + 1);
strncpy(u, *s, t - *s);
strcpy(u + (t - *s), best + preflen);
} else {
u = (char *) zhalloc(strlen(best) + 2);
*u = '\0';
strcpy(u + 1, best);
}
best = u;
guess = *s;
*guess = *best = ztokens[ic - Pound];
}
if (ask) {
if (noquery(0)) {
x = 'n';
} else if (shout) {
char *pptbuf;
pptbuf = promptexpand(sprompt, 0, best, guess, NULL);
zputs(pptbuf, shout);
free(pptbuf);
fflush(shout);
zbeep();
x = getquery("nyae", 0);
if (cmd && x == 'n')
pathchecked = path;
} else
x = 'n';
} else
x = 'y';
if (x == 'y') {
*s = dupstring(best);
if (hist)
hwrep(best);
} else if (x == 'a') {
histdone |= HISTFLAG_NOEXEC;
} else if (x == 'e') {
histdone |= HISTFLAG_NOEXEC | HISTFLAG_RECALL;
}
if (ic)
**s = ic;
}
}
/*
* Helper for ztrftime. Called with a pointer to the length left
* in the buffer, and a new string length to decrement from that.
* Returns 0 if the new length fits, 1 otherwise. We assume a terminating
* NUL and return 1 if that doesn't fit.
*/
static int
ztrftimebuf(int *bufsizeptr, int decr)
{
if (*bufsizeptr <= decr)
return 1;
*bufsizeptr -= decr;
return 0;
}
/*
* Like the system function, this returns the number of characters
* copied, not including the terminating NUL. This may be zero
* if the string didn't fit.
*
* As an extension, try to detect an error in strftime --- typically
* not enough memory --- and return -1. Not guaranteed to be portable,
* since the strftime() interface doesn't make any guarantees about
* the state of the buffer if it returns zero.
*
* fmt is metafied, but we need to unmetafy it on the fly to
* pass into strftime / combine with the output from strftime.
* The return value in buf is not metafied.
*/
/**/
mod_export int
ztrftime(char *buf, int bufsize, char *fmt, struct tm *tm, long nsec)
{
int hr12;
#ifdef HAVE_STRFTIME
int decr;
char *fmtstart;
#else
static char *astr[] =
{"Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat"};
static char *estr[] =
{"Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul",
"Aug", "Sep", "Oct", "Nov", "Dec"};
#endif
char *origbuf = buf;
while (*fmt) {
if (*fmt == Meta) {
int chr = fmt[1] ^ 32;
if (ztrftimebuf(&bufsize, 1))
return -1;
*buf++ = chr;
fmt += 2;
} else if (*fmt == '%') {
int strip;
int digs = 3;
#ifdef HAVE_STRFTIME
fmtstart =
#endif
fmt++;
if (*fmt == '-') {
strip = 1;
fmt++;
} else
strip = 0;
if (idigit(*fmt)) {
/* Digit --- only useful with . */
char *dstart = fmt;
char *dend = fmt+1;
while (idigit(*dend))
dend++;
if (*dend == '.') {
fmt = dend;
digs = atoi(dstart);
}
}
/*
* Assume this format will take up at least two
* characters. Not always true, but if that matters
* we are so close to the edge it's not a big deal.
* Fix up some longer cases specially when we get to them.
*/
if (ztrftimebuf(&bufsize, 2))
return -1;
#ifdef HAVE_STRFTIME
/* Our internal handling doesn't handle padding and other gnu extensions,
* so here we detect them and pass over to strftime(). We don't want
* to do this unconditionally though, as we have some extensions that
* strftime() doesn't have (%., %f, %L and %K) */
morefmt:
if (!((fmt - fmtstart == 1) || (fmt - fmtstart == 2 && strip) || *fmt == '.')) {
while (*fmt && strchr("OE^#_-0123456789", *fmt))
fmt++;
if (*fmt) {
fmt++;
goto strftimehandling;
}
}
#endif
switch (*fmt++) {
case '.':
if (ztrftimebuf(&bufsize, digs))
return -1;
if (digs > 9)
digs = 9;
if (digs < 9) {
int trunc;
for (trunc = 8 - digs; trunc; trunc--)
nsec /= 10;
nsec = (nsec + 8) / 10;
}
sprintf(buf, "%0*ld", digs, nsec);
buf += digs;
break;
case '\0':
/* Guard against premature end of string */
*buf++ = '%';
fmt--;
break;
case 'f':
strip = 1;
/* FALLTHROUGH */
case 'e':
if (tm->tm_mday > 9)
*buf++ = '0' + tm->tm_mday / 10;
else if (!strip)
*buf++ = ' ';
*buf++ = '0' + tm->tm_mday % 10;
break;
case 'K':
strip = 1;
/* FALLTHROUGH */
case 'H':
case 'k':
if (tm->tm_hour > 9)
*buf++ = '0' + tm->tm_hour / 10;
else if (!strip) {
if (fmt[-1] == 'H')
*buf++ = '0';
else
*buf++ = ' ';
}
*buf++ = '0' + tm->tm_hour % 10;
break;
case 'L':
strip = 1;
/* FALLTHROUGH */
case 'l':
hr12 = tm->tm_hour % 12;
if (hr12 == 0)
hr12 = 12;
if (hr12 > 9)
*buf++ = '1';
else if (!strip)
*buf++ = ' ';
*buf++ = '0' + (hr12 % 10);
break;
case 'd':
if (tm->tm_mday > 9 || !strip)
*buf++ = '0' + tm->tm_mday / 10;
*buf++ = '0' + tm->tm_mday % 10;
break;
case 'm':
if (tm->tm_mon > 8 || !strip)
*buf++ = '0' + (tm->tm_mon + 1) / 10;
*buf++ = '0' + (tm->tm_mon + 1) % 10;
break;
case 'M':
if (tm->tm_min > 9 || !strip)
*buf++ = '0' + tm->tm_min / 10;
*buf++ = '0' + tm->tm_min % 10;
break;
case 'N':
if (ztrftimebuf(&bufsize, 9))
return -1;
sprintf(buf, "%09ld", nsec);
buf += 9;
break;
case 'S':
if (tm->tm_sec > 9 || !strip)
*buf++ = '0' + tm->tm_sec / 10;
*buf++ = '0' + tm->tm_sec % 10;
break;
case 'y':
if (tm->tm_year > 9 || !strip)
*buf++ = '0' + (tm->tm_year / 10) % 10;
*buf++ = '0' + tm->tm_year % 10;
break;
#ifndef HAVE_STRFTIME
case 'Y':
{
int year, digits, testyear;
year = tm->tm_year + 1900;
digits = 1;
testyear = year;
while (testyear > 9) {
digits++;
testyear /= 10;
}
if (ztrftimebuf(&bufsize, digits))
return -1;
sprintf(buf, "%d", year);
buf += digits;
break;
}
case 'a':
if (ztrftimebuf(&bufsize, strlen(astr[tm->tm_wday]) - 2))
return -1;
strucpy(&buf, astr[tm->tm_wday]);
break;
case 'b':
if (ztrftimebuf(&bufsize, strlen(estr[tm->tm_mon]) - 2))
return -1;
strucpy(&buf, estr[tm->tm_mon]);
break;
case 'p':
*buf++ = (tm->tm_hour > 11) ? 'p' : 'a';
*buf++ = 'm';
break;
default:
*buf++ = '%';
if (fmt[-1] != '%')
*buf++ = fmt[-1];
#else
case 'E':
case 'O':
case '^':
case '#':
case '_':
case '-':
case '0': case '1': case '2': case '3': case '4':
case '5': case '6': case '7': case '8': case '9':
goto morefmt;
strftimehandling:
default:
/*
* Remember we've already allowed for two characters
* in the accounting in bufsize (but nowhere else).
*/
{
char origchar = fmt[-1];
int size = fmt - fmtstart;
char *tmp, *last;
tmp = zhalloc(size + 1);
strncpy(tmp, fmtstart, size);
last = fmt-1;
if (*last == Meta) {
/*
* This is for consistency in counting:
* a metafiable character isn't actually
* a valid strftime descriptor.
*
* Previous characters were explicitly checked,
* so can't be metafied.
*/
*last = *++fmt ^ 32;
}
tmp[size] = '\0';
*buf = '\1';
if (!strftime(buf, bufsize + 2, tmp, tm))
{
/*
* Some locales don't have strings for
* AM/PM, so empty output is valid.
*/
if (*buf || (origchar != 'p' && origchar != 'P')) {
if (*buf) {
buf[0] = '\0';
return -1;
}
return 0;
}
}
decr = strlen(buf);
buf += decr;
bufsize -= decr - 2;
}
#endif
break;
}
} else {
if (ztrftimebuf(&bufsize, 1))
return -1;
*buf++ = *fmt++;
}
}
*buf = '\0';
return buf - origbuf;
}
/**/
mod_export char *
zjoin(char **arr, int delim, int heap)
{
int len = 0;
char **s, *ret, *ptr;
for (s = arr; *s; s++)
len += strlen(*s) + 1 + (imeta(delim) ? 1 : 0);
if (!len)
return heap? "" : ztrdup("");
ptr = ret = (char *) (heap ? zhalloc(len) : zalloc(len));
for (s = arr; *s; s++) {
strucpy(&ptr, *s);
if (imeta(delim)) {
*ptr++ = Meta;
*ptr++ = delim ^ 32;
}
else
*ptr++ = delim;
}
ptr[-1 - (imeta(delim) ? 1 : 0)] = '\0';
return ret;
}
/* Split a string containing a colon separated list *
* of items into an array of strings. */
/**/
mod_export char **
colonsplit(char *s, int uniq)
{
int ct;
char *t, **ret, **ptr, **p;
for (t = s, ct = 0; *t; t++) /* count number of colons */
if (*t == ':')
ct++;
ptr = ret = (char **) zalloc(sizeof(char *) * (ct + 2));
t = s;
do {
s = t;
/* move t to point at next colon */
for (; *t && *t != ':'; t++);
if (uniq)
for (p = ret; p < ptr; p++)
if ((int)strlen(*p) == t - s && ! strncmp(*p, s, t - s))
goto cont;
*ptr = (char *) zalloc((t - s) + 1);
ztrncpy(*ptr++, s, t - s);
cont: ;
}
while (*t++);
*ptr = NULL;
return ret;
}
/**/
static int
skipwsep(char **s)
{
char *t = *s;
int i = 0;
/*
* Don't need to handle mutlibyte characters, they can't
* be IWSEP. Do need to check for metafication.
*/
while (*t && iwsep(*t == Meta ? t[1] ^ 32 : *t)) {
if (*t == Meta)
t++;
t++;
i++;
}
*s = t;
return i;
}
/*
* haven't worked out what allownull does; it's passed down from
* sepsplit but all the cases it's used are either 0 or 1 without
* a comment. it seems to be something to do with the `nulstring'
* which i think is some kind of a metafication thing, so probably
* allownull's value is associated with whether we are using
* metafied strings.
* see findsep() below for handling of `quote' argument
*/
/**/
mod_export char **
spacesplit(char *s, int allownull, int heap, int quote)
{
char *t, **ret, **ptr;
int l = sizeof(*ret) * (wordcount(s, NULL, -!allownull) + 1);
char *(*dup)(const char *) = (heap ? dupstring : ztrdup);
/* ### TODO: s/calloc/alloc/ */
ptr = ret = (char **) (heap ? hcalloc(l) : zshcalloc(l));
if (quote) {
/*
* we will be stripping quoted separators by hacking string,
* so make sure it's hackable.
*/
s = dupstring(s);
}
t = s;
skipwsep(&s);
MB_METACHARINIT();
if (*s && itype_end(s, ISEP, 1) != s)
*ptr++ = dup(allownull ? "" : nulstring);
else if (!allownull && t != s)
*ptr++ = dup("");
while (*s) {
char *iend = itype_end(s, ISEP, 1);
if (iend != s) {
s = iend;
skipwsep(&s);
}
else if (quote && *s == '\\') {
s++;
skipwsep(&s);
}
t = s;
(void)findsep(&s, NULL, quote);
if (s > t || allownull) {
*ptr = (char *) (heap ? zhalloc((s - t) + 1) :
zalloc((s - t) + 1));
ztrncpy(*ptr++, t, s - t);
} else
*ptr++ = dup(nulstring);
t = s;
skipwsep(&s);
}
if (!allownull && t != s)
*ptr++ = dup("");
*ptr = NULL;
return ret;
}
/*
* Find a separator. Return 0 if already at separator, 1 if separator
* found later, else -1. (Historical note: used to return length into
* string but this is all that is necessary and is less ambiguous with
* multibyte characters around.)
*
* *s is the string we are looking along, which will be updated
* to the point we have got to.
*
* sep is a possibly multicharacter separator to look for. If NULL,
* use normal separator characters. If *sep is NULL, split on individual
* characters.
*
* quote is a flag that '\<sep>' should not be treated as a separator.
* in this case we need to be able to strip the backslash directly
* in the string, so the calling function must have sent us something
* modifiable. currently this only works for sep == NULL. also in
* in this case only, we need to turn \\ into \.
*/
/**/
static int
findsep(char **s, char *sep, int quote)
{
/*
*/
int i, ilen;
char *t, *tt;
convchar_t c;
MB_METACHARINIT();
if (!sep) {
for (t = *s; *t; t += ilen) {
if (quote && *t == '\\') {
if (t[1] == '\\') {
chuck(t);
ilen = 1;
continue;
} else {
ilen = MB_METACHARLENCONV(t+1, &c);
if (WC_ZISTYPE(c, ISEP)) {
chuck(t);
/* then advance over new character, length ilen */
} else {
/* treat *t (backslash) as normal byte */
if (isep(*t))
break;
ilen = 1;
}
}
} else {
ilen = MB_METACHARLENCONV(t, &c);
if (WC_ZISTYPE(c, ISEP))
break;
}
}
i = (t > *s);
*s = t;
return i;
}
if (!sep[0]) {
/*
* NULL separator just means advance past first character,
* if any.
*/
if (**s) {
*s += MB_METACHARLEN(*s);
return 1;
}
return -1;
}
for (i = 0; **s; i++) {
/*
* The following works for multibyte characters by virtue of
* the fact that sep may be a string (and we don't care how
* it divides up, we need to match all of it).
*/
for (t = sep, tt = *s; *t && *tt && *t == *tt; t++, tt++);
if (!*t)
return (i > 0);
*s += MB_METACHARLEN(*s);
}
return -1;
}
/**/
char *
findword(char **s, char *sep)
{
char *r, *t;
int sl;
if (!**s)
return NULL;
if (sep) {
sl = strlen(sep);
r = *s;
while (! findsep(s, sep, 0)) {
r = *s += sl;
}
return r;
}
MB_METACHARINIT();
for (t = *s; *t; t += sl) {
convchar_t c;
sl = MB_METACHARLENCONV(t, &c);
if (!WC_ZISTYPE(c, ISEP))
break;
}
*s = t;
(void)findsep(s, sep, 0);
return t;
}
/**/
int
wordcount(char *s, char *sep, int mul)
{
int r, sl, c;
if (sep) {
r = 1;
sl = strlen(sep);
for (; (c = findsep(&s, sep, 0)) >= 0; s += sl)
if ((c || mul) && (sl || *(s + sl)))
r++;
} else {
char *t = s;
r = 0;
if (mul <= 0)
skipwsep(&s);
if ((*s && itype_end(s, ISEP, 1) != s) ||
(mul < 0 && t != s))
r++;
for (; *s; r++) {
char *ie = itype_end(s, ISEP, 1);
if (ie != s) {
s = ie;
if (mul <= 0)
skipwsep(&s);
}
(void)findsep(&s, NULL, 0);
t = s;
if (mul <= 0)
skipwsep(&s);
}
if (mul < 0 && t != s)
r++;
}
return r;
}
/**/
mod_export char *
sepjoin(char **s, char *sep, int heap)
{
char *r, *p, **t;
int l, sl;
char sepbuf[2];
if (!*s)
return heap ? "" : ztrdup("");
if (!sep) {
/* optimise common case that ifs[0] is space */
if (ifs && *ifs != ' ') {
MB_METACHARINIT();
sep = dupstrpfx(ifs, MB_METACHARLEN(ifs));
} else {
p = sep = sepbuf;
*p++ = ' ';
*p = '\0';
}
}
sl = strlen(sep);
for (t = s, l = 1 - sl; *t; l += strlen(*t) + sl, t++);
r = p = (char *) (heap ? zhalloc(l) : zalloc(l));
t = s;
while (*t) {
strucpy(&p, *t);
if (*++t)
strucpy(&p, sep);
}
*p = '\0';
return r;
}
/**/
char **
sepsplit(char *s, char *sep, int allownull, int heap)
{
int n, sl;
char *t, *tt, **r, **p;
/* Null string? Treat as empty string. */
if (s[0] == Nularg && !s[1])
s++;
if (!sep)
return spacesplit(s, allownull, heap, 0);
sl = strlen(sep);
n = wordcount(s, sep, 1);
r = p = (char **) (heap ? zhalloc((n + 1) * sizeof(char *)) :
zalloc((n + 1) * sizeof(char *)));
for (t = s; n--;) {
tt = t;
(void)findsep(&t, sep, 0);
*p = (char *) (heap ? zhalloc(t - tt + 1) :
zalloc(t - tt + 1));
strncpy(*p, tt, t - tt);
(*p)[t - tt] = '\0';
p++;
t += sl;
}
*p = NULL;
return r;
}
/* Get the definition of a shell function */
/**/
mod_export Shfunc
getshfunc(char *nam)
{
return (Shfunc) shfunctab->getnode(shfunctab, nam);
}
/*
* Call the function func to substitute string orig by setting
* the parameter reply.
* Return the array from reply, or NULL if the function returned
* non-zero status.
* The returned value comes directly from the parameter and
* so should be used before there is any chance of that
* being changed or unset.
* If arg1 is not NULL, it is used as an initial argument to
* the function, with the original string as the second argument.
*/
/**/
char **
subst_string_by_func(Shfunc func, char *arg1, char *orig)
{
int osc = sfcontext, osm = stopmsg, old_incompfunc = incompfunc;
LinkList l = newlinklist();
char **ret;
addlinknode(l, func->node.nam);
if (arg1)
addlinknode(l, arg1);
addlinknode(l, orig);
sfcontext = SFC_SUBST;
incompfunc = 0;
if (doshfunc(func, l, 1))
ret = NULL;
else
ret = getaparam("reply");
sfcontext = osc;
stopmsg = osm;
incompfunc = old_incompfunc;
return ret;
}
/**
* Front end to subst_string_by_func to use hook-like logic.
* name can refer to a function, and name + "_hook" can refer
* to an array containing a list of functions. The functions
* are tried in order until one returns success.
*/
/**/
char **
subst_string_by_hook(char *name, char *arg1, char *orig)
{
Shfunc func;
char **ret = NULL;
if ((func = getshfunc(name))) {
ret = subst_string_by_func(func, arg1, orig);
}
if (!ret) {
char **arrptr;
int namlen = strlen(name);
VARARR(char, arrnam, namlen + HOOK_SUFFIX_LEN);
memcpy(arrnam, name, namlen);
memcpy(arrnam + namlen, HOOK_SUFFIX, HOOK_SUFFIX_LEN);
if ((arrptr = getaparam(arrnam))) {
/* Guard against internal modification of the array */
arrptr = arrdup(arrptr);
for (; *arrptr; arrptr++) {
if ((func = getshfunc(*arrptr))) {
ret = subst_string_by_func(func, arg1, orig);
if (ret)
break;
}
}
}
}
return ret;
}
/**/
mod_export char **
mkarray(char *s)
{
char **t = (char **) zalloc((s) ? (2 * sizeof s) : (sizeof s));
if ((*t = s))
t[1] = NULL;
return t;
}
/**/
mod_export char **
hmkarray(char *s)
{
char **t = (char **) zhalloc((s) ? (2 * sizeof s) : (sizeof s));
if ((*t = s))
t[1] = NULL;
return t;
}
/**/
mod_export void
zbeep(void)
{
char *vb;
queue_signals();
if ((vb = getsparam_u("ZBEEP"))) {
int len;
vb = getkeystring(vb, &len, GETKEYS_BINDKEY, NULL);
write_loop(SHTTY, vb, len);
} else if (isset(BEEP))
write_loop(SHTTY, "\07", 1);
unqueue_signals();
}
/**/
mod_export void
freearray(char **s)
{
char **t = s;
DPUTS(!s, "freearray() with zero argument");
while (*s)
zsfree(*s++);
free(t);
}
/**/
int
equalsplit(char *s, char **t)
{
for (; *s && *s != '='; s++);
if (*s == '=') {
*s++ = '\0';
*t = s;
return 1;
}
return 0;
}
/* the ztypes table */
/**/
mod_export short int typtab[256];
static int typtab_flags = 0;
/* initialize the ztypes table */
/**/
void
inittyptab(void)
{
int t0;
char *s;
if (!(typtab_flags & ZTF_INIT)) {
typtab_flags = ZTF_INIT;
if (interact && isset(SHINSTDIN))
typtab_flags |= ZTF_INTERACT;
}
queue_signals();
memset(typtab, 0, sizeof(typtab));
for (t0 = 0; t0 != 32; t0++)
typtab[t0] = typtab[t0 + 128] = ICNTRL;
typtab[127] = ICNTRL;
for (t0 = '0'; t0 <= '9'; t0++)
typtab[t0] = IDIGIT | IALNUM | IWORD | IIDENT | IUSER;
for (t0 = 'a'; t0 <= 'z'; t0++)
typtab[t0] = typtab[t0 - 'a' + 'A'] = IALPHA | IALNUM | IIDENT | IUSER | IWORD;
#ifndef MULTIBYTE_SUPPORT
/*
* This really doesn't seem to me the right thing to do when
* we have multibyte character support... it was a hack to assume
* eight bit characters `worked' for some values of work before
* we could test for them properly. I'm not 100% convinced
* having IIDENT here is a good idea at all, but this code
* should disappear into history...
*/
for (t0 = 0240; t0 != 0400; t0++)
typtab[t0] = IALPHA | IALNUM | IIDENT | IUSER | IWORD;
#endif
/* typtab['.'] |= IIDENT; */ /* Allow '.' in variable names - broken */
typtab['_'] = IIDENT | IUSER;
typtab['-'] = typtab['.'] = typtab[STOUC(Dash)] = IUSER;
typtab[' '] |= IBLANK | INBLANK;
typtab['\t'] |= IBLANK | INBLANK;
typtab['\n'] |= INBLANK;
typtab['\0'] |= IMETA;
typtab[STOUC(Meta) ] |= IMETA;
typtab[STOUC(Marker)] |= IMETA;
for (t0 = (int)STOUC(Pound); t0 <= (int)STOUC(LAST_NORMAL_TOK); t0++)
typtab[t0] |= ITOK | IMETA;
for (t0 = (int)STOUC(Snull); t0 <= (int)STOUC(Nularg); t0++)
typtab[t0] |= ITOK | IMETA | INULL;
for (s = ifs ? ifs : EMULATION(EMULATE_KSH|EMULATE_SH) ?
DEFAULT_IFS_SH : DEFAULT_IFS; *s; s++) {
int c = STOUC(*s == Meta ? *++s ^ 32 : *s);
#ifdef MULTIBYTE_SUPPORT
if (!isascii(c)) {
/* see comment for wordchars below */
continue;
}
#endif
if (inblank(c)) {
if (s[1] == c)
s++;
else
typtab[c] |= IWSEP;
}
typtab[c] |= ISEP;
}
for (s = wordchars ? wordchars : DEFAULT_WORDCHARS; *s; s++) {
int c = STOUC(*s == Meta ? *++s ^ 32 : *s);
#ifdef MULTIBYTE_SUPPORT
if (!isascii(c)) {
/*
* If we have support for multibyte characters, we don't
* handle non-ASCII characters here; instead, we turn
* wordchars into a wide character array.
* (We may actually have a single-byte 8-bit character set,
* but it works the same way.)
*/
continue;
}
#endif
typtab[c] |= IWORD;
}
#ifdef MULTIBYTE_SUPPORT
set_widearray(wordchars, &wordchars_wide);
set_widearray(ifs ? ifs : EMULATION(EMULATE_KSH|EMULATE_SH) ?
DEFAULT_IFS_SH : DEFAULT_IFS, &ifs_wide);
#endif
for (s = SPECCHARS; *s; s++)
typtab[STOUC(*s)] |= ISPECIAL;
if (typtab_flags & ZTF_SP_COMMA)
typtab[STOUC(',')] |= ISPECIAL;
if (isset(BANGHIST) && bangchar && (typtab_flags & ZTF_INTERACT)) {
typtab_flags |= ZTF_BANGCHAR;
typtab[bangchar] |= ISPECIAL;
} else
typtab_flags &= ~ZTF_BANGCHAR;
for (s = PATCHARS; *s; s++)
typtab[STOUC(*s)] |= IPATTERN;
unqueue_signals();
}
/**/
mod_export void
makecommaspecial(int yesno)
{
if (yesno != 0) {
typtab_flags |= ZTF_SP_COMMA;
typtab[STOUC(',')] |= ISPECIAL;
} else {
typtab_flags &= ~ZTF_SP_COMMA;
typtab[STOUC(',')] &= ~ISPECIAL;
}
}
/**/
mod_export void
makebangspecial(int yesno)
{
/* Name and call signature for congruence with makecommaspecial(),
* but in this case when yesno is nonzero we defer to the state
* saved by inittyptab().
*/
if (yesno == 0) {
typtab[bangchar] &= ~ISPECIAL;
} else if (typtab_flags & ZTF_BANGCHAR) {
typtab[bangchar] |= ISPECIAL;
}
}
/**/
#ifdef MULTIBYTE_SUPPORT
/* A wide-character version of the iblank() macro. */
/**/
mod_export int
wcsiblank(wint_t wc)
{
if (iswspace(wc) && wc != L'\n')
return 1;
return 0;
}
/*
* zistype macro extended to support wide characters.
* Works for IIDENT, IWORD, IALNUM, ISEP.
* We don't need this for IWSEP because that only applies to
* a fixed set of ASCII characters.
* Note here that use of multibyte mode is not tested:
* that's because for ZLE this is unconditional,
* not dependent on the option. The caller must decide.
*/
/**/
mod_export int
wcsitype(wchar_t c, int itype)
{
int len;
mbstate_t mbs;
VARARR(char, outstr, MB_CUR_MAX);
if (!isset(MULTIBYTE))
return zistype(c, itype);
/*
* Strategy: the shell requires that the multibyte representation
* be an extension of ASCII. So see if converting the character
* produces an ASCII character. If it does, use zistype on that.
* If it doesn't, use iswalnum on the original character.
* If that fails, resort to the appropriate wide character array.
*/
memset(&mbs, 0, sizeof(mbs));
len = wcrtomb(outstr, c, &mbs);
if (len == 0) {
/* NULL is special */
return zistype(0, itype);
} else if (len == 1 && isascii(outstr[0])) {
return zistype(outstr[0], itype);
} else {
switch (itype) {
case IIDENT:
if (!isset(POSIXIDENTIFIERS))
return 0;
return iswalnum(c);
case IWORD:
if (iswalnum(c))
return 1;
/*
* If we are handling combining characters, any punctuation
* characters with zero width needs to be considered part of
* a word. If we are not handling combining characters then
* logically they are still part of the word, even if they
* don't get displayed properly, so always do this.
*/
if (IS_COMBINING(c))
return 1;
return !!wmemchr(wordchars_wide.chars, c, wordchars_wide.len);
case ISEP:
return !!wmemchr(ifs_wide.chars, c, ifs_wide.len);
default:
return iswalnum(c);
}
}
}
/**/
#endif
/*
* Find the end of a set of characters in the set specified by itype;
* one of IALNUM, IIDENT, IWORD or IUSER. For non-ASCII characters, we assume
* alphanumerics are part of the set, with the exception that
* identifiers are not treated that way if POSIXIDENTIFIERS is set.
*
* See notes above for identifiers.
* Returns the same pointer as passed if not on an identifier character.
* If "once" is set, just test the first character, i.e. (outptr !=
* inptr) tests whether the first character is valid in an identifier.
*
* Currently this is only called with itype IIDENT, IUSER or ISEP.
*/
/**/
mod_export char *
itype_end(const char *ptr, int itype, int once)
{
#ifdef MULTIBYTE_SUPPORT
if (isset(MULTIBYTE) &&
(itype != IIDENT || !isset(POSIXIDENTIFIERS))) {
mb_charinit();
while (*ptr) {
int len;
if (itok(*ptr)) {
/* Not untokenised yet --- can happen in raw command line */
len = 1;
if (!zistype(*ptr,itype))
break;
} else {
wint_t wc;
len = mb_metacharlenconv(ptr, &wc);
if (!len)
break;
if (wc == WEOF) {
/* invalid, treat as single character */
int chr = STOUC(*ptr == Meta ? ptr[1] ^ 32 : *ptr);
/* in this case non-ASCII characters can't match */
if (chr > 127 || !zistype(chr,itype))
break;
} else if (len == 1 && isascii(*ptr)) {
/* ASCII: can't be metafied, use standard test */
if (!zistype(*ptr,itype))
break;
} else {
/*
* Valid non-ASCII character.
*/
switch (itype) {
case IWORD:
if (!iswalnum(wc) &&
!wmemchr(wordchars_wide.chars, wc,
wordchars_wide.len))
return (char *)ptr;
break;
case ISEP:
if (!wmemchr(ifs_wide.chars, wc, ifs_wide.len))
return (char *)ptr;
break;
default:
if (!iswalnum(wc))
return (char *)ptr;
}
}
}
ptr += len;
if (once)
break;
}
} else
#endif
for (;;) {
int chr = STOUC(*ptr == Meta ? ptr[1] ^ 32 : *ptr);
if (!zistype(chr,itype))
break;
ptr += (*ptr == Meta) ? 2 : 1;
if (once)
break;
}
/*
* Nasty. The first argument is const char * because we
* don't modify it here. However, we really want to pass
* back the same type as was passed down, to allow idioms like
* p = itype_end(p, IIDENT, 0);
* So returning a const char * isn't really the right thing to do.
* Without having two different functions the following seems
* to be the best we can do.
*/
return (char *)ptr;
}
/**/
mod_export char **
arrdup(char **s)
{
char **x, **y;
y = x = (char **) zhalloc(sizeof(char *) * (arrlen(s) + 1));
while ((*x++ = dupstring(*s++)));
return y;
}
/* Duplicate at most max elements of the array s with heap memory */
/**/
mod_export char **
arrdup_max(char **s, unsigned max)
{
char **x, **y, **send;
int len = 0;
if (max)
len = arrlen(s);
/* Limit has sense only if not equal to len */
if (max > len)
max = len;
y = x = (char **) zhalloc(sizeof(char *) * (max + 1));
send = s + max;
while (s < send)
*x++ = dupstring(*s++);
*x = NULL;
return y;
}
/**/
mod_export char **
zarrdup(char **s)
{
char **x, **y;
y = x = (char **) zalloc(sizeof(char *) * (arrlen(s) + 1));
while ((*x++ = ztrdup(*s++)));
return y;
}
/**/
#ifdef MULTIBYTE_SUPPORT
/**/
mod_export wchar_t **
wcs_zarrdup(wchar_t **s)
{
wchar_t **x, **y;
y = x = (wchar_t **) zalloc(sizeof(wchar_t *) * (arrlen((char **)s) + 1));
while ((*x++ = wcs_ztrdup(*s++)));
return y;
}
/**/
#endif /* MULTIBYTE_SUPPORT */
/**/
static char *
spname(char *oldname)
{
char *p, spnameguess[PATH_MAX + 1], spnamebest[PATH_MAX + 1];
static char newname[PATH_MAX + 1];
char *new = newname, *old = oldname;
int bestdist = 0, thisdist, thresh, maxthresh = 0;
/* This loop corrects each directory component of the path, stopping *
* when any correction distance would exceed the distance threshold. *
* NULL is returned only if the first component cannot be corrected; *
* otherwise a copy of oldname with a corrected prefix is returned. *
* Rationale for this, if there ever was any, has been forgotten. */
for (;;) {
while (*old == '/') {
if (new >= newname + sizeof(newname) - 1)
return NULL;
*new++ = *old++;
}
*new = '\0';
if (*old == '\0')
return newname;
p = spnameguess;
for (; *old != '/' && *old != '\0'; old++)
if (p < spnameguess + PATH_MAX)
*p++ = *old;
*p = '\0';
/* Every component is allowed a single distance 2 correction or two *
* distance 1 corrections. Longer ones get additional corrections. */
thresh = (int)(p - spnameguess) / 4 + 1;
if (thresh < 3)
thresh = 3;
else if (thresh > 100)
thresh = 100;
thisdist = mindist(newname, spnameguess, spnamebest, *old == '/');
if (thisdist >= thresh) {
/* The next test is always true, except for the first path *
* component. We could initialize bestdist to some large *
* constant instead, and then compare to that constant here, *
* because an invariant is that we've never exceeded the *
* threshold for any component so far; but I think that looks *
* odd to the human reader, and we may make use of the total *
* distance for all corrections at some point in the future. */
if (bestdist < maxthresh) {
struncpy(&new, spnameguess, sizeof(newname) - (new - newname));
struncpy(&new, old, sizeof(newname) - (new - newname));
return (new >= newname + sizeof(newname) -1) ? NULL : newname;
} else
return NULL;
} else {
maxthresh = bestdist + thresh;
bestdist += thisdist;
}
for (p = spnamebest; (*new = *p++);) {
if (new >= newname + sizeof(newname) - 1)
return NULL;
new++;
}
}
}
/**/
static int
mindist(char *dir, char *mindistguess, char *mindistbest, int wantdir)
{
int mindistd, nd;
DIR *dd;
char *fn;
char *buf;
struct stat st;
size_t dirlen;
if (dir[0] == '\0')
dir = ".";
mindistd = 100;
if (!(buf = zalloc((dirlen = strlen(dir)) + strlen(mindistguess) + 2)))
return 0;
sprintf(buf, "%s/%s", dir, mindistguess);
if (stat(unmeta(buf), &st) == 0 && (!wantdir || S_ISDIR(st.st_mode))) {
strcpy(mindistbest, mindistguess);
free(buf);
return 0;
}
if ((dd = opendir(unmeta(dir)))) {
while ((fn = zreaddir(dd, 0))) {
if (spnamepat && pattry(spnamepat, fn))
continue;
nd = spdist(fn, mindistguess,
(int)strlen(mindistguess) / 4 + 1);
if (nd <= mindistd) {
if (wantdir) {
if (!(buf = zrealloc(buf, dirlen + strlen(fn) + 2)))
continue;
sprintf(buf, "%s/%s", dir, fn);
if (stat(unmeta(buf), &st) != 0 || !S_ISDIR(st.st_mode))
continue;
}
strcpy(mindistbest, fn);
mindistd = nd;
if (mindistd == 0)
break;
}
}
closedir(dd);
}
free(buf);
return mindistd;
}
/**/
static int
spdist(char *s, char *t, int thresh)
{
/* TODO: Correction for non-ASCII and multibyte-input keyboards. */
char *p, *q;
const char qwertykeymap[] =
"\n\n\n\n\n\n\n\n\n\n\n\n\n\n\
\t1234567890-=\t\
\tqwertyuiop[]\t\
\tasdfghjkl;'\n\t\
\tzxcvbnm,./\t\t\t\
\n\n\n\n\n\n\n\n\n\n\n\n\n\n\
\t!@#$%^&*()_+\t\
\tQWERTYUIOP{}\t\
\tASDFGHJKL:\"\n\t\
\tZXCVBNM<>?\n\n\t\
\n\n\n\n\n\n\n\n\n\n\n\n\n\n";
const char dvorakkeymap[] =
"\n\n\n\n\n\n\n\n\n\n\n\n\n\n\
\t1234567890[]\t\
\t',.pyfgcrl/=\t\
\taoeuidhtns-\n\t\
\t;qjkxbmwvz\t\t\t\
\n\n\n\n\n\n\n\n\n\n\n\n\n\n\
\t!@#$%^&*(){}\t\
\t\"<>PYFGCRL?+\t\
\tAOEUIDHTNS_\n\t\
\t:QJKXBMWVZ\n\n\t\
\n\n\n\n\n\n\n\n\n\n\n\n\n\n";
const char *keymap;
if ( isset( DVORAK ) )
keymap = dvorakkeymap;
else
keymap = qwertykeymap;
if (!strcmp(s, t))
return 0;
/* any number of upper/lower mistakes allowed (dist = 1) */
for (p = s, q = t; *p && tulower(*p) == tulower(*q); p++, q++);
if (!*p && !*q)
return 1;
if (!thresh)
return 200;
for (p = s, q = t; *p && *q; p++, q++)
if (*p == *q)
continue; /* don't consider "aa" transposed, ash */
else if (p[1] == q[0] && q[1] == p[0]) /* transpositions */
return spdist(p + 2, q + 2, thresh - 1) + 1;
else if (p[1] == q[0]) /* missing letter */
return spdist(p + 1, q + 0, thresh - 1) + 2;
else if (p[0] == q[1]) /* missing letter */
return spdist(p + 0, q + 1, thresh - 1) + 2;
else if (*p != *q)
break;
if ((!*p && strlen(q) == 1) || (!*q && strlen(p) == 1))
return 2;
for (p = s, q = t; *p && *q; p++, q++)
if (p[0] != q[0] && p[1] == q[1]) {
int t0;
char *z;
/* mistyped letter */
if (!(z = strchr(keymap, p[0])) || *z == '\n' || *z == '\t')
return spdist(p + 1, q + 1, thresh - 1) + 1;
t0 = z - keymap;
if (*q == keymap[t0 - 15] || *q == keymap[t0 - 14] ||
*q == keymap[t0 - 13] ||
*q == keymap[t0 - 1] || *q == keymap[t0 + 1] ||
*q == keymap[t0 + 13] || *q == keymap[t0 + 14] ||
*q == keymap[t0 + 15])
return spdist(p + 1, q + 1, thresh - 1) + 2;
return 200;
} else if (*p != *q)
break;
return 200;
}
/* set cbreak mode, or the equivalent */
/**/
void
setcbreak(void)
{
struct ttyinfo ti;
ti = shttyinfo;
#ifdef HAS_TIO
ti.tio.c_lflag &= ~ICANON;
ti.tio.c_cc[VMIN] = 1;
ti.tio.c_cc[VTIME] = 0;
#else
ti.sgttyb.sg_flags |= CBREAK;
#endif
settyinfo(&ti);
}
/* give the tty to some process */
/**/
mod_export void
attachtty(pid_t pgrp)
{
static int ep = 0;
if (jobbing && interact) {
#ifdef HAVE_TCSETPGRP
if (SHTTY != -1 && tcsetpgrp(SHTTY, pgrp) == -1 && !ep)
#else
# if ardent
if (SHTTY != -1 && setpgrp() == -1 && !ep)
# else
int arg = pgrp;
if (SHTTY != -1 && ioctl(SHTTY, TIOCSPGRP, &arg) == -1 && !ep)
# endif
#endif
{
if (pgrp != mypgrp && kill(-pgrp, 0) == -1)
attachtty(mypgrp);
else {
if (errno != ENOTTY)
{
zwarn("can't set tty pgrp: %e", errno);
fflush(stderr);
}
opts[MONITOR] = 0;
ep = 1;
}
}
}
}
/* get the process group associated with the tty */
/**/
pid_t
gettygrp(void)
{
pid_t arg;
if (SHTTY == -1)
return -1;
#ifdef HAVE_TCSETPGRP
arg = tcgetpgrp(SHTTY);
#else
ioctl(SHTTY, TIOCGPGRP, &arg);
#endif
return arg;
}
/* Escape tokens and null characters. Buf is the string which should be *
* escaped. len is the length of the string. If len is -1, buf should be *
* null terminated. If len is non-negative and the third parameter is not *
* META_DUP, buf should point to an at least len+1 long memory area. The *
* return value points to the quoted string. If the given string does not *
* contain any special character which should be quoted and the third *
* parameter is not META_(HEAP|)DUP, buf is returned unchanged (a *
* terminating null character is appended to buf if necessary). Otherwise *
* the third `heap' argument determines the method used to allocate space *
* for the result. It can have the following values: *
* META_REALLOC: use zrealloc on buf *
* META_HREALLOC: use hrealloc on buf *
* META_USEHEAP: get memory from the heap. This leaves buf unchanged. *
* META_NOALLOC: buf points to a memory area which is long enough to hold *
* the quoted form, just quote it and return buf. *
* META_STATIC: store the quoted string in a static area. The original *
* string should be at most PATH_MAX long. *
* META_ALLOC: allocate memory for the new string with zalloc(). *
* META_DUP: leave buf unchanged and allocate space for the return *
* value even if buf does not contains special characters *
* META_HEAPDUP: same as META_DUP, but uses the heap */
/**/
mod_export char *
metafy(char *buf, int len, int heap)
{
int meta = 0;
char *t, *p, *e;
static char mbuf[PATH_MAX*2+1];
if (len == -1) {
for (e = buf, len = 0; *e; len++)
if (imeta(*e++))
meta++;
} else
for (e = buf; e < buf + len;)
if (imeta(*e++))
meta++;
if (meta || heap == META_DUP || heap == META_HEAPDUP) {
switch (heap) {
case META_REALLOC:
buf = zrealloc(buf, len + meta + 1);
break;
case META_HREALLOC:
buf = hrealloc(buf, len, len + meta + 1);
break;
case META_ALLOC:
case META_DUP:
buf = memcpy(zalloc(len + meta + 1), buf, len);
break;
case META_USEHEAP:
case META_HEAPDUP:
buf = memcpy(zhalloc(len + meta + 1), buf, len);
break;
case META_STATIC:
#ifdef DEBUG
if (len > PATH_MAX) {
fprintf(stderr, "BUG: len = %d > PATH_MAX in metafy\n", len);
fflush(stderr);
}
#endif
buf = memcpy(mbuf, buf, len);
break;
#ifdef DEBUG
case META_NOALLOC:
break;
default:
fprintf(stderr, "BUG: metafy called with invalid heap value\n");
fflush(stderr);
break;
#endif
}
p = buf + len;
e = t = buf + len + meta;
while (meta) {
if (imeta(*--t = *--p)) {
*t-- ^= 32;
*t = Meta;
meta--;
}
}
}
*e = '\0';
return buf;
}
/*
* Duplicate a string, metafying it as we go.
*
* Typically, this is used only for strings imported from outside
* zsh, as strings internally are either already metafied or passed
* around with an associated length.
*/
/**/
mod_export char *
ztrdup_metafy(const char *s)
{
/* To mimic ztrdup() behaviour */
if (!s)
return NULL;
/*
* metafy() does lots of different things, so the pointer
* isn't const. Using it with META_DUP should be safe.
*/
return metafy((char *)s, -1, META_DUP);
}
/*
* Take a null-terminated, metafied string in s into a literal
* representation by converting in place. The length is in *len
* len is non-NULL; if len is NULL, you don't know the length of
* the final string, but if it's to be supplied to some system
* routine that always uses NULL termination, such as a filename
* interpreter, that doesn't matter. Note the NULL termination
* is always copied for purposes of that kind.
*/
/**/
mod_export char *
unmetafy(char *s, int *len)
{
char *p, *t;
for (p = s; *p && *p != Meta; p++);
for (t = p; (*t = *p++);)
if (*t++ == Meta && *p)
t[-1] = *p++ ^ 32;
if (len)
*len = t - s;
return s;
}
/* Return the character length of a metafied substring, given the *
* unmetafied substring length. */
/**/
mod_export int
metalen(const char *s, int len)
{
int mlen = len;
while (len--) {
if (*s++ == Meta) {
mlen++;
s++;
}
}
return mlen;
}
/*
* This function converts a zsh internal string to a form which can be
* passed to a system call as a filename. The result is stored in a
* single static area, sized to fit. If there is no Meta character
* the original string is returned.
*/
/**/
mod_export char *
unmeta(const char *file_name)
{
static char *fn;
static int sz;
char *p;
const char *t;
int newsz, meta;
if (!file_name)
return NULL;
meta = 0;
for (t = file_name; *t; t++) {
if (*t == Meta)
meta = 1;
}
if (!meta) {
/*
* don't need allocation... free if it's long, see below
*/
if (sz > 4 * PATH_MAX) {
zfree(fn, sz);
fn = NULL;
sz = 0;
}
return (char *) file_name;
}
newsz = (t - file_name) + 1;
/*
* Optimisation: don't resize if we don't have to.
* We need a new allocation if
* - nothing was allocated before
* - the new string is larger than the old one
* - the old string was larger than an arbitrary limit but the
* new string isn't so that we free up significant space by resizing.
*/
if (!fn || newsz > sz || (sz > 4 * PATH_MAX && newsz <= 4 * PATH_MAX))
{
if (fn)
zfree(fn, sz);
sz = newsz;
fn = (char *)zalloc(sz);
if (!fn) {
sz = 0;
/*
* will quite likely crash in the caller anyway...
*/
return NULL;
}
}
for (t = file_name, p = fn; *t; p++)
if ((*p = *t++) == Meta && *t)
*p = *t++ ^ 32;
*p = '\0';
return fn;
}
/*
* Unmetafy just one character and store the number of bytes it occupied.
*/
/**/
mod_export convchar_t
unmeta_one(const char *in, int *sz)
{
convchar_t wc;
int newsz;
#ifdef MULTIBYTE_SUPPORT
mbstate_t wstate;
#endif
if (!sz)
sz = &newsz;
*sz = 0;
if (!in || !*in)
return 0;
#ifdef MULTIBYTE_SUPPORT
memset(&wstate, 0, sizeof(wstate));
*sz = mb_metacharlenconv_r(in, &wc, &wstate);
#else
if (in[0] == Meta) {
*sz = 2;
wc = STOUC(in[1] ^ 32);
} else {
*sz = 1;
wc = STOUC(in[0]);
}
#endif
return wc;
}
/*
* Unmetafy and compare two strings, comparing unsigned character values.
* "a\0" sorts after "a".
*
* Currently this is only used in hash table sorting, where the
* keys are names of hash nodes and where we don't use strcoll();
* it's not clear if that's right but it does guarantee the ordering
* of shell structures on output.
*
* As we don't use strcoll(), it seems overkill to convert multibyte
* characters to wide characters for comparison every time. In the case
* of UTF-8, Unicode ordering is preserved when sorted raw, and for
* other character sets we rely on an extension of ASCII so the result,
* while it may not be correct, is at least rational.
*/
/**/
int
ztrcmp(char const *s1, char const *s2)
{
int c1, c2;
while(*s1 && *s1 == *s2) {
s1++;
s2++;
}
if(!(c1 = *s1))
c1 = -1;
else if(c1 == STOUC(Meta))
c1 = *++s1 ^ 32;
if(!(c2 = *s2))
c2 = -1;
else if(c2 == STOUC(Meta))
c2 = *++s2 ^ 32;
if(c1 == c2)
return 0;
else if(c1 < c2)
return -1;
else
return 1;
}
/* Return the unmetafied length of a metafied string. */
/**/
mod_export int
ztrlen(char const *s)
{
int l;
for (l = 0; *s; l++) {
if (*s++ == Meta) {
#ifdef DEBUG
if (! *s) {
fprintf(stderr, "BUG: unexpected end of string in ztrlen()\n");
break;
} else
#endif
s++;
}
}
return l;
}
#ifndef MULTIBYTE_SUPPORT
/*
* ztrlen() but with explicit end point for non-null-terminated
* segments. eptr may not be NULL.
*/
/**/
mod_export int
ztrlenend(char const *s, char const *eptr)
{
int l;
for (l = 0; s < eptr; l++) {
if (*s++ == Meta) {
#ifdef DEBUG
if (! *s) {
fprintf(stderr,
"BUG: unexpected end of string in ztrlenend()\n");
break;
} else
#endif
s++;
}
}
return l;
}
#endif /* MULTIBYTE_SUPPORT */
/* Subtract two pointers in a metafied string. */
/**/
mod_export int
ztrsub(char const *t, char const *s)
{
int l = t - s;
while (s != t) {
if (*s++ == Meta) {
#ifdef DEBUG
if (! *s || s == t)
fprintf(stderr, "BUG: substring ends in the middle of a metachar in ztrsub()\n");
else
#endif
s++;
l--;
}
}
return l;
}
/*
* Wrapper for readdir().
*
* If ignoredots is true, skip the "." and ".." entries.
*
* When __APPLE__ is defined, recode dirent names from UTF-8-MAC to UTF-8.
*
* Return the dirent's name, metafied.
*/
/**/
mod_export char *
zreaddir(DIR *dir, int ignoredots)
{
struct dirent *de;
#if defined(HAVE_ICONV) && defined(__APPLE__)
static iconv_t conv_ds = (iconv_t)0;
static char *conv_name = 0;
char *conv_name_ptr, *orig_name_ptr;
size_t conv_name_len, orig_name_len;
#endif
do {
de = readdir(dir);
if(!de)
return NULL;
} while(ignoredots && de->d_name[0] == '.' &&
(!de->d_name[1] || (de->d_name[1] == '.' && !de->d_name[2])));
#if defined(HAVE_ICONV) && defined(__APPLE__)
if (!conv_ds)
conv_ds = iconv_open("UTF-8", "UTF-8-MAC");
if (conv_ds != (iconv_t)(-1)) {
/* Force initial state in case re-using conv_ds */
(void) iconv(conv_ds, 0, &orig_name_len, 0, &conv_name_len);
orig_name_ptr = de->d_name;
orig_name_len = strlen(de->d_name);
conv_name = zrealloc(conv_name, orig_name_len+1);
conv_name_ptr = conv_name;
conv_name_len = orig_name_len;
if (iconv(conv_ds,
&orig_name_ptr, &orig_name_len,
&conv_name_ptr, &conv_name_len) != (size_t)(-1) &&
orig_name_len == 0) {
/* Completely converted, metafy and return */
*conv_name_ptr = '\0';
return metafy(conv_name, -1, META_STATIC);
}
/* Error, or conversion incomplete, keep the original name */
}
#endif
return metafy(de->d_name, -1, META_STATIC);
}
/* Unmetafy and output a string. Tokens are skipped. */
/**/
mod_export int
zputs(char const *s, FILE *stream)
{
int c;
while (*s) {
if (*s == Meta)
c = *++s ^ 32;
else if(itok(*s)) {
s++;
continue;
} else
c = *s;
s++;
if (fputc(c, stream) < 0)
return EOF;
}
return 0;
}
#ifndef MULTIBYTE_SUPPORT
/* Create a visibly-represented duplicate of a string. */
/**/
mod_export char *
nicedup(char const *s, int heap)
{
int c, len = strlen(s) * 5 + 1;
VARARR(char, buf, len);
char *p = buf, *n;
while ((c = *s++)) {
if (itok(c)) {
if (c <= Comma)
c = ztokens[c - Pound];
else
continue;
}
if (c == Meta)
c = *s++ ^ 32;
/* The result here is metafied */
n = nicechar(c);
while(*n)
*p++ = *n++;
}
*p = '\0';
return heap ? dupstring(buf) : ztrdup(buf);
}
#endif
/**/
mod_export char *
nicedupstring(char const *s)
{
return nicedup(s, 1);
}
#ifndef MULTIBYTE_SUPPORT
/* Unmetafy and output a string, displaying special characters readably. */
/**/
mod_export int
nicezputs(char const *s, FILE *stream)
{
int c;
while ((c = *s++)) {
if (itok(c)) {
if (c <= Comma)
c = ztokens[c - Pound];
else
continue;
}
if (c == Meta)
c = *s++ ^ 32;
if(zputs(nicechar(c), stream) < 0)
return EOF;
}
return 0;
}
/* Return the length of the visible representation of a metafied string. */
/**/
mod_export size_t
niceztrlen(char const *s)
{
size_t l = 0;
int c;
while ((c = *s++)) {
if (itok(c)) {
if (c <= Comma)
c = ztokens[c - Pound];
else
continue;
}
if (c == Meta)
c = *s++ ^ 32;
l += strlen(nicechar(c));
}
return l;
}
#endif
/**/
#ifdef MULTIBYTE_SUPPORT
/*
* Version of both nicezputs() and niceztrlen() for use with multibyte
* characters. Input is a metafied string; output is the screen width of
* the string.
*
* If the FILE * is not NULL, output to that, too.
*
* If outstrp is not NULL, set *outstrp to a zalloc'd version of
* the output (still metafied).
*
* If flags contains NICEFLAG_HEAP, use the heap for *outstrp, else
* zalloc.
* If flags contsins NICEFLAG_QUOTE, the output is going to be within
* $'...', so quote "'" and "\" with a backslash.
*/
/**/
mod_export size_t
mb_niceformat(const char *s, FILE *stream, char **outstrp, int flags)
{
size_t l = 0, newl;
int umlen, outalloc, outleft, eol = 0;
wchar_t c;
char *ums, *ptr, *fmt, *outstr, *outptr;
mbstate_t mbs;
if (outstrp) {
outleft = outalloc = 5 * strlen(s);
outptr = outstr = zalloc(outalloc);
} else {
outleft = outalloc = 0;
outptr = outstr = NULL;
}
ums = ztrdup(s);
/*
* is this necessary at this point? niceztrlen does this
* but it's used in lots of places. however, one day this may
* be, too.
*/
untokenize(ums);
ptr = unmetafy(ums, ¨en);
memset(&mbs, 0, sizeof mbs);
while (umlen > 0) {
size_t cnt = eol ? MB_INVALID : mbrtowc(&c, ptr, umlen, &mbs);
switch (cnt) {
case MB_INCOMPLETE:
eol = 1;
/* FALL THROUGH */
case MB_INVALID:
/* The byte didn't convert, so output it as a \M-... sequence. */
fmt = nicechar_sel(*ptr, flags & NICEFLAG_QUOTE);
newl = strlen(fmt);
cnt = 1;
/* Get mbs out of its undefined state. */
memset(&mbs, 0, sizeof mbs);
break;
case 0:
/* Careful: converting '\0' returns 0, but a '\0' is a
* real character for us, so we should consume 1 byte. */
cnt = 1;
/* FALL THROUGH */
default:
if (c == L'\'' && (flags & NICEFLAG_QUOTE)) {
fmt = "\\'";
newl = 2;
}
else if (c == L'\\' && (flags & NICEFLAG_QUOTE)) {
fmt = "\\\\";
newl = 2;
}
else
fmt = wcs_nicechar_sel(c, &newl, NULL, flags & NICEFLAG_QUOTE);
break;
}
umlen -= cnt;
ptr += cnt;
l += newl;
if (stream)
zputs(fmt, stream);
if (outstr) {
/* Append to output string */
int outlen = strlen(fmt);
if (outlen >= outleft) {
/* Reallocate to twice the length */
int outoffset = outptr - outstr;
outleft += outalloc;
outalloc *= 2;
outstr = zrealloc(outstr, outalloc);
outptr = outstr + outoffset;
}
memcpy(outptr, fmt, outlen);
/* Update start position */
outptr += outlen;
/* Update available bytes */
outleft -= outlen;
}
}
free(ums);
if (outstrp) {
*outptr = '\0';
/* Use more efficient storage for returned string */
if (flags & NICEFLAG_NODUP)
*outstrp = outstr;
else {
*outstrp = (flags & NICEFLAG_HEAP) ? dupstring(outstr) :
ztrdup(outstr);
free(outstr);
}
}
return l;
}
/*
* Return 1 if mb_niceformat() would reformat this string, else 0.
*/
/**/
mod_export int
is_mb_niceformat(const char *s)
{
int umlen, eol = 0, ret = 0;
wchar_t c;
char *ums, *ptr;
mbstate_t mbs;
ums = ztrdup(s);
untokenize(ums);
ptr = unmetafy(ums, ¨en);
memset(&mbs, 0, sizeof mbs);
while (umlen > 0) {
size_t cnt = eol ? MB_INVALID : mbrtowc(&c, ptr, umlen, &mbs);
switch (cnt) {
case MB_INCOMPLETE:
eol = 1;
/* FALL THROUGH */
case MB_INVALID:
/* The byte didn't convert, so output it as a \M-... sequence. */
if (is_nicechar(*ptr)) {
ret = 1;
break;
}
cnt = 1;
/* Get mbs out of its undefined state. */
memset(&mbs, 0, sizeof mbs);
break;
case 0:
/* Careful: converting '\0' returns 0, but a '\0' is a
* real character for us, so we should consume 1 byte. */
cnt = 1;
/* FALL THROUGH */
default:
if (is_wcs_nicechar(c))
ret = 1;
break;
}
if (ret)
break;
umlen -= cnt;
ptr += cnt;
}
free(ums);
return ret;
}
/* ztrdup multibyte string with nice formatting */
/**/
mod_export char *
nicedup(const char *s, int heap)
{
char *retstr;
(void)mb_niceformat(s, NULL, &retstr, heap ? NICEFLAG_HEAP : 0);
return retstr;
}
/*
* The guts of mb_metacharlenconv(). This version assumes we are
* processing a true multibyte character string without tokens, and
* takes the shift state as an argument.
*/
/**/
mod_export int
mb_metacharlenconv_r(const char *s, wint_t *wcp, mbstate_t *mbsp)
{
size_t ret = MB_INVALID;
char inchar;
const char *ptr;
wchar_t wc;
if (STOUC(*s) <= 0x7f) {
if (wcp)
*wcp = (wint_t)*s;
return 1;
}
for (ptr = s; *ptr; ) {
if (*ptr == Meta) {
inchar = *++ptr ^ 32;
DPUTS(!*ptr,
"BUG: unexpected end of string in mb_metacharlen()\n");
} else if (imeta(*ptr)) {
/*
* As this is metafied input, this is a token --- this
* can't be a part of the string. It might be
* something on the end of an unbracketed parameter
* reference, for example.
*/
break;
} else
inchar = *ptr;
ptr++;
ret = mbrtowc(&wc, &inchar, 1, mbsp);
if (ret == MB_INVALID)
break;
if (ret == MB_INCOMPLETE)
continue;
if (wcp)
*wcp = wc;
return ptr - s;
}
if (wcp)
*wcp = WEOF;
/* No valid multibyte sequence */
memset(mbsp, 0, sizeof(*mbsp));
if (ptr > s) {
return 1 + (*s == Meta); /* Treat as single byte character */
} else
return 0; /* Probably shouldn't happen */
}
/*
* Length of metafied string s which contains the next multibyte
* character; single (possibly metafied) character if string is not null
* but character is not valid (e.g. possibly incomplete at end of string).
* Returned value is guaranteed not to reach beyond the end of the
* string (assuming correct metafication).
*
* If wcp is not NULL, the converted wide character is stored there.
* If no conversion could be done WEOF is used.
*/
/**/
mod_export int
mb_metacharlenconv(const char *s, wint_t *wcp)
{
if (!isset(MULTIBYTE) || STOUC(*s) <= 0x7f) {
/* treat as single byte, possibly metafied */
if (wcp)
*wcp = (wint_t)(*s == Meta ? s[1] ^ 32 : *s);
return 1 + (*s == Meta);
}
/*
* We have to handle tokens here, since we may be looking
* through a tokenized input. Obviously this isn't
* a valid multibyte character, so just return WEOF
* and let the caller handle it as a single character.
*
* TODO: I've a sneaking suspicion we could do more here
* to prevent the caller always needing to handle invalid
* characters specially, but sometimes it may need to know.
*/
if (itok(*s)) {
if (wcp)
*wcp = WEOF;
return 1;
}
return mb_metacharlenconv_r(s, wcp, &mb_shiftstate);
}
/*
* Total number of multibyte characters in metafied string s.
* Same answer as iterating mb_metacharlen() and counting calls
* until end of string.
*
* If width is 1, return total character width rather than number.
* If width is greater than 1, return 1 if character has non-zero width,
* else 0.
*
* Ends if either *ptr is '\0', the normal case (eptr may be NULL for
* this), or ptr is eptr (i.e. *eptr is where the null would be if null
* terminated) for strings not delimited by nulls --- note these are
* still metafied.
*/
/**/
mod_export int
mb_metastrlenend(char *ptr, int width, char *eptr)
{
char inchar, *laststart;
size_t ret;
wchar_t wc;
int num, num_in_char, complete;
if (!isset(MULTIBYTE) || MB_CUR_MAX == 1)
return eptr ? (int)(eptr - ptr) : ztrlen(ptr);
laststart = ptr;
ret = MB_INVALID;
num = num_in_char = 0;
complete = 1;
memset(&mb_shiftstate, 0, sizeof(mb_shiftstate));
while (*ptr && !(eptr && ptr >= eptr)) {
if (*ptr == Meta)
inchar = *++ptr ^ 32;
else
inchar = *ptr;
ptr++;
if (complete && STOUC(inchar) <= STOUC(0x7f)) {
/*
* We rely on 7-bit US-ASCII as a subset, so skip
* multibyte handling if we have such a character.
*/
num++;
laststart = ptr;
num_in_char = 0;
continue;
}
ret = mbrtowc(&wc, &inchar, 1, &mb_shiftstate);
if (ret == MB_INCOMPLETE) {
/*
* "num_in_char" is only used for incomplete characters.
* The assumption is that we will output all trailing octets
* that form part of an incomplete character as a single
* character (of single width) if we don't get a complete
* character. This is purely pragmatic --- I'm not aware
* of a standard way of dealing with incomplete characters.
*
* If we do get a complete character, num_in_char
* becomes irrelevant and is set to zero
*
* This is in contrast to "num" which counts the characters
* or widths in complete characters. The two are summed,
* so we don't count characters twice.
*/
num_in_char++;
complete = 0;
} else {
if (ret == MB_INVALID) {
/* Reset, treat as single character */
memset(&mb_shiftstate, 0, sizeof(mb_shiftstate));
ptr = laststart + (*laststart == Meta) + 1;
num++;
} else if (width) {
/*
* Returns -1 if not a printable character. We
* turn this into 0.
*/
int wcw = WCWIDTH(wc);
if (wcw > 0) {
if (width == 1)
num += wcw;
else
num++;
}
} else
num++;
laststart = ptr;
num_in_char = 0;
complete = 1;
}
}
/* If incomplete, treat remainder as trailing single character */
return num + (num_in_char ? 1 : 0);
}
/*
* The equivalent of mb_metacharlenconv_r() for
* strings that aren't metafied and hence have
* explicit lengths.
*/
/**/
mod_export int
mb_charlenconv_r(const char *s, int slen, wint_t *wcp, mbstate_t *mbsp)
{
size_t ret = MB_INVALID;
char inchar;
const char *ptr;
wchar_t wc;
if (slen && STOUC(*s) <= 0x7f) {
if (wcp)
*wcp = (wint_t)*s;
return 1;
}
for (ptr = s; slen; ) {
inchar = *ptr;
ptr++;
slen--;
ret = mbrtowc(&wc, &inchar, 1, mbsp);
if (ret == MB_INVALID)
break;
if (ret == MB_INCOMPLETE)
continue;
if (wcp)
*wcp = wc;
return ptr - s;
}
if (wcp)
*wcp = WEOF;
/* No valid multibyte sequence */
memset(mbsp, 0, sizeof(*mbsp));
if (ptr > s) {
return 1; /* Treat as single byte character */
} else
return 0; /* Probably shouldn't happen */
}
/*
* The equivalent of mb_metacharlenconv() for
* strings that aren't metafied and hence have
* explicit lengths;
*/
/**/
mod_export int
mb_charlenconv(const char *s, int slen, wint_t *wcp)
{
if (!isset(MULTIBYTE) || STOUC(*s) <= 0x7f) {
if (wcp)
*wcp = (wint_t)*s;
return 1;
}
return mb_charlenconv_r(s, slen, wcp, &mb_shiftstate);
}
/**/
#else
/* Simple replacement for mb_metacharlenconv */
/**/
mod_export int
metacharlenconv(const char *x, int *c)
{
/*
* Here we don't use STOUC() on the chars since they
* may be compared against other chars and this will fail
* if chars are signed and the high bit is set.
*/
if (*x == Meta) {
if (c)
*c = x[1] ^ 32;
return 2;
}
if (c)
*c = (char)*x;
return 1;
}
/* Simple replacement for mb_charlenconv */
/**/
mod_export int
charlenconv(const char *x, int len, int *c)
{
if (!len) {
if (c)
*c = '\0';
return 0;
}
if (c)
*c = (char)*x;
return 1;
}
/**/
#endif /* MULTIBYTE_SUPPORT */
/*
* Expand tabs to given width, with given starting position on line.
* len is length of unmetafied string in bytes.
* Output to fout.
* Return the end position on the line, i.e. if this is 0 modulo width
* the next character is aligned with a tab stop.
*
* If all is set, all tabs are expanded, else only leading tabs.
*/
/**/
mod_export int
zexpandtabs(const char *s, int len, int width, int startpos, FILE *fout,
int all)
{
int at_start = 1;
#ifdef MULTIBYTE_SUPPORT
mbstate_t mbs;
size_t ret;
wchar_t wc;
memset(&mbs, 0, sizeof(mbs));
#endif
while (len) {
if (*s == '\t') {
if (all || at_start) {
s++;
len--;
if (width <= 0 || !(startpos % width)) {
/* always output at least one space */
fputc(' ', fout);
startpos++;
}
if (width <= 0)
continue; /* paranoia */
while (startpos % width) {
fputc(' ', fout);
startpos++;
}
} else {
/*
* Leave tab alone.
* Guess width to apply... we might get this wrong.
* This is only needed if there's a following string
* that needs tabs expanding, which is unusual.
*/
startpos += width - startpos % width;
s++;
len--;
fputc('\t', fout);
}
continue;
} else if (*s == '\n' || *s == '\r') {
fputc(*s, fout);
s++;
len--;
startpos = 0;
at_start = 1;
continue;
}
at_start = 0;
#ifdef MULTIBYTE_SUPPORT
if (isset(MULTIBYTE)) {
const char *sstart = s;
ret = mbrtowc(&wc, s, len, &mbs);
if (ret == MB_INVALID) {
/* Assume single character per character */
memset(&mbs, 0, sizeof(mbs));
s++;
len--;
} else if (ret == MB_INCOMPLETE) {
/* incomplete at end --- assume likewise, best we've got */
s++;
len--;
} else {
s += ret;
len -= (int)ret;
}
if (ret == MB_INVALID || ret == MB_INCOMPLETE) {
startpos++;
} else {
int wcw = WCWIDTH(wc);
if (wcw > 0) /* paranoia */
startpos += wcw;
}
fwrite(sstart, s - sstart, 1, fout);
continue;
}
#endif /* MULTIBYTE_SUPPORT */
fputc(*s, fout);
s++;
len--;
startpos++;
}
return startpos;
}
/* check for special characters in the string */
/**/
mod_export int
hasspecial(char const *s)
{
for (; *s; s++) {
if (ispecial(*s == Meta ? *++s ^ 32 : *s))
return 1;
}
return 0;
}
static char *
addunprintable(char *v, const char *u, const char *uend)
{
for (; u < uend; u++) {
/*
* Just do this byte by byte; there's no great
* advantage in being clever with multibyte
* characters if we don't think they're printable.
*/
int c;
if (*u == Meta)
c = STOUC(*++u ^ 32);
else
c = STOUC(*u);
switch (c) {
case '\0':
*v++ = '\\';
*v++ = '0';
if ('0' <= u[1] && u[1] <= '7') {
*v++ = '0';
*v++ = '0';
}
break;
case '\007': *v++ = '\\'; *v++ = 'a'; break;
case '\b': *v++ = '\\'; *v++ = 'b'; break;
case '\f': *v++ = '\\'; *v++ = 'f'; break;
case '\n': *v++ = '\\'; *v++ = 'n'; break;
case '\r': *v++ = '\\'; *v++ = 'r'; break;
case '\t': *v++ = '\\'; *v++ = 't'; break;
case '\v': *v++ = '\\'; *v++ = 'v'; break;
default:
*v++ = '\\';
*v++ = '0' + ((c >> 6) & 7);
*v++ = '0' + ((c >> 3) & 7);
*v++ = '0' + (c & 7);
break;
}
}
return v;
}
/*
* Quote the string s and return the result as a string from the heap.
*
* The last argument is a QT_ value defined in zsh.h other than QT_NONE.
*
* Most quote styles other than backslash assume the quotes are to
* be added outside quotestring(). QT_SINGLE_OPTIONAL is different:
* the single quotes are only added where necessary, so the
* whole expression is handled here.
*
* The string may be metafied and contain tokens.
*/
/**/
mod_export char *
quotestring(const char *s, int instring)
{
const char *u;
char *v;
int alloclen;
char *buf;
int shownull = 0;
/*
* quotesub is used with QT_SINGLE_OPTIONAL.
* quotesub = 0: mechanism not active
* quotesub = 1: mechanism pending, no "'" yet;
* needs adding at quotestart.
* quotesub = 2: mechanism active, added opening "'"; need
* closing "'".
*/
int quotesub = 0, slen;
char *quotestart;
convchar_t cc;
const char *uend;
slen = strlen(s);
switch (instring)
{
case QT_BACKSLASH_SHOWNULL:
shownull = 1;
instring = QT_BACKSLASH;
/*FALLTHROUGH*/
case QT_BACKSLASH:
/*
* With QT_BACKSLASH we may need to use $'\300' stuff.
* Keep memory usage within limits by allocating temporary
* storage and using heap for correct size at end.
*/
alloclen = slen * 7 + 1;
break;
case QT_BACKSLASH_PATTERN:
alloclen = slen * 2 + 1;
break;
case QT_SINGLE_OPTIONAL:
/*
* Here, we may need to add single quotes.
* Always show empty strings.
*/
alloclen = slen * 4 + 3;
quotesub = shownull = 1;
break;
default:
alloclen = slen * 4 + 1;
break;
}
if (!*s && shownull)
alloclen += 2; /* for '' */
quotestart = v = buf = zshcalloc(alloclen);
DPUTS(instring < QT_BACKSLASH || instring == QT_BACKTICK ||
instring > QT_BACKSLASH_PATTERN,
"BUG: bad quote type in quotestring");
u = s;
if (instring == QT_DOLLARS) {
/*
* The only way to get Nularg here is when
* it is placeholding for the empty string?
*/
if (inull(*u))
u++;
/*
* As we test for printability here we need to be able
* to look for multibyte characters.
*/
MB_METACHARINIT();
while (*u) {
uend = u + MB_METACHARLENCONV(u, &cc);
if (
#ifdef MULTIBYTE_SUPPORT
cc != WEOF &&
#endif
WC_ISPRINT(cc)) {
switch (cc) {
case ZWC('\\'):
case ZWC('\''):
*v++ = '\\';
break;
default:
if (isset(BANGHIST) && cc == (wchar_t)bangchar)
*v++ = '\\';
break;
}
while (u < uend)
*v++ = *u++;
} else {
/* Not printable */
v = addunprintable(v, u, uend);
u = uend;
}
}
} else if (instring == QT_BACKSLASH_PATTERN) {
while (*u) {
if (ipattern(*u))
*v++ = '\\';
*v++ = *u++;
}
} else {
if (shownull) {
/* We can't show an empty string with just backslash quoting. */
if (!*u) {
*v++ = '\'';
*v++ = '\'';
}
}
/*
* Here there are syntactic special characters, so
* we start by going through bytewise.
*/
while (*u) {
int dobackslash = 0;
if (*u == Tick || *u == Qtick) {
char c = *u++;
*v++ = c;
while (*u && *u != c)
*v++ = *u++;
*v++ = c;
if (*u)
u++;
continue;
} else if ((*u == Qstring || *u == '$') && u[1] == '\'' &&
instring == QT_DOUBLE) {
/*
* We don't need to quote $'...' inside a double-quoted
* string. This is largely cosmetic; it looks neater
* if we don't but it doesn't do any harm since the
* \ is stripped.
*/
*v++ = *u++;
} else if ((*u == String || *u == Qstring) &&
(u[1] == Inpar || u[1] == Inbrack || u[1] == Inbrace)) {
char c = (u[1] == Inpar ? Outpar : (u[1] == Inbrace ?
Outbrace : Outbrack));
char beg = *u;
int level = 0;
*v++ = *u++;
*v++ = *u++;
while (*u && (*u != c || level)) {
if (*u == beg)
level++;
else if (*u == c)
level--;
*v++ = *u++;
}
if (*u)
*v++ = *u++;
continue;
}
else if (ispecial(*u) &&
((*u != '=' && *u != '~') ||
u == s ||
(isset(MAGICEQUALSUBST) &&
(u[-1] == '=' || u[-1] == ':')) ||
(*u == '~' && isset(EXTENDEDGLOB))) &&
(instring == QT_BACKSLASH ||
instring == QT_SINGLE_OPTIONAL ||
(isset(BANGHIST) && *u == (char)bangchar &&
instring != QT_SINGLE) ||
(instring == QT_DOUBLE &&
(*u == '$' || *u == '`' || *u == '\"' || *u == '\\')) ||
(instring == QT_SINGLE && *u == '\''))) {
if (instring == QT_SINGLE_OPTIONAL) {
if (quotesub == 1) {
/*
* We haven't yet had to quote at the start.
*/
if (*u == '\'') {
/*
* We don't need to.
*/
*v++ = '\\';
} else {
/*
* It's now time to add quotes.
*/
if (v > quotestart)
{
char *addq;
for (addq = v; addq > quotestart; addq--)
*addq = addq[-1];
}
*quotestart = '\'';
v++;
quotesub = 2;
}
*v++ = *u++;
/*
* Next place to start quotes is here.
*/
quotestart = v;
} else if (*u == '\'') {
if (unset(RCQUOTES)) {
*v++ = '\'';
*v++ = '\\';
*v++ = '\'';
/* Don't restart quotes unless we need them */
quotesub = 1;
quotestart = v;
} else {
/* simplest just to use '' always */
*v++ = '\'';
*v++ = '\'';
}
/* dealt with */
u++;
} else {
/* else already quoting, just add */
*v++ = *u++;
}
continue;
} else if (*u == '\n' ||
(instring == QT_SINGLE && *u == '\'')) {
if (*u == '\n') {
*v++ = '$';
*v++ = '\'';
*v++ = '\\';
*v++ = 'n';
*v++ = '\'';
} else if (unset(RCQUOTES)) {
*v++ = '\'';
if (*u == '\'')
*v++ = '\\';
*v++ = *u;
*v++ = '\'';
} else
*v++ = '\'', *v++ = '\'';
u++;
continue;
} else {
/*
* We'll need a backslash, but don't add it
* yet since if the character isn't printable
* we'll have to upgrade it to $'...'.
*/
dobackslash = 1;
}
}
if (itok(*u) || instring != QT_BACKSLASH) {
/* Needs to be passed straight through. */
if (dobackslash)
*v++ = '\\';
if (*u == Inparmath) {
/*
* Already syntactically quoted: don't
* add more.
*/
int inmath = 1;
*v++ = *u++;
for (;;) {
char uc = *u;
*v++ = *u++;
if (uc == '\0')
break;
else if (uc == Outparmath && !--inmath)
break;
else if (uc == Inparmath)
++inmath;
}
} else
*v++ = *u++;
continue;
}
/*
* Now check if the output is unprintable in the
* current character set.
*/
uend = u + MB_METACHARLENCONV(u, &cc);
if (
#ifdef MULTIBYTE_SUPPORT
cc != WEOF &&
#endif
WC_ISPRINT(cc)) {
if (dobackslash)
*v++ = '\\';
while (u < uend) {
if (*u == Meta)
*v++ = *u++;
*v++ = *u++;
}
} else {
/* Not printable */
*v++ = '$';
*v++ = '\'';
v = addunprintable(v, u, uend);
*v++ = '\'';
u = uend;
}
}
}
if (quotesub == 2)
*v++ = '\'';
*v = '\0';
v = dupstring(buf);
zfree(buf, alloclen);
return v;
}
/*
* Unmetafy and output a string, quoted if it contains special
* characters.
*
* If stream is NULL, return the same output with any allocation on the
* heap.
*/
/**/
mod_export char *
quotedzputs(char const *s, FILE *stream)
{
int inquote = 0, c;
char *outstr, *ptr;
/* check for empty string */
if(!*s) {
if (!stream)
return dupstring("''");
fputs("''", stream);
return NULL;
}
#ifdef MULTIBYTE_SUPPORT
if (is_mb_niceformat(s)) {
if (stream) {
fputs("$'", stream);
mb_niceformat(s, stream, NULL, NICEFLAG_QUOTE);
fputc('\'', stream);
return NULL;
} else {
char *substr;
mb_niceformat(s, NULL, &substr, NICEFLAG_QUOTE|NICEFLAG_NODUP);
outstr = (char *)zhalloc(4 + strlen(substr));
sprintf(outstr, "$'%s'", substr);
free(substr);
return outstr;
}
}
#endif /* MULTIBYTE_SUPPORT */
if (!hasspecial(s)) {
if (stream) {
zputs(s, stream);
return NULL;
} else {
return dupstring(s);
}
}
if (!stream) {
const char *cptr;
int l = strlen(s) + 2;
for (cptr = s; *cptr; cptr++) {
if (*cptr == Meta)
cptr++;
else if (*cptr == '\'')
l += isset(RCQUOTES) ? 1 : 3;
}
ptr = outstr = zhalloc(l + 1);
} else {
ptr = outstr = NULL;
}
if (isset(RCQUOTES)) {
/* use rc-style quotes-within-quotes for the whole string */
if (stream) {
if (fputc('\'', stream) < 0)
return NULL;
} else
*ptr++ = '\'';
while(*s) {
if (*s == Dash)
c = '-';
else if (*s == Meta)
c = *++s ^ 32;
else
c = *s;
s++;
if (c == '\'') {
if (stream) {
if (fputc('\'', stream) < 0)
return NULL;
} else
*ptr++ = '\'';
} else if (c == '\n' && isset(CSHJUNKIEQUOTES)) {
if (stream) {
if (fputc('\\', stream) < 0)
return NULL;
} else
*ptr++ = '\\';
}
if (stream) {
if (fputc(c, stream) < 0)
return NULL;
} else {
if (imeta(c)) {
*ptr++ = Meta;
*ptr++ = c ^ 32;
} else
*ptr++ = c;
}
}
if (stream) {
if (fputc('\'', stream) < 0)
return NULL;
} else
*ptr++ = '\'';
} else {
/* use Bourne-style quoting, avoiding empty quoted strings */
while (*s) {
if (*s == Dash)
c = '-';
else if (*s == Meta)
c = *++s ^ 32;
else
c = *s;
s++;
if (c == '\'') {
if (inquote) {
if (stream) {
if (putc('\'', stream) < 0)
return NULL;
} else
*ptr++ = '\'';
inquote=0;
}
if (stream) {
if (fputs("\\'", stream) < 0)
return NULL;
} else {
*ptr++ = '\\';
*ptr++ = '\'';
}
} else {
if (!inquote) {
if (stream) {
if (fputc('\'', stream) < 0)
return NULL;
} else
*ptr++ = '\'';
inquote=1;
}
if (c == '\n' && isset(CSHJUNKIEQUOTES)) {
if (stream) {
if (fputc('\\', stream) < 0)
return NULL;
} else
*ptr++ = '\\';
}
if (stream) {
if (fputc(c, stream) < 0)
return NULL;
} else {
if (imeta(c)) {
*ptr++ = Meta;
*ptr++ = c ^ 32;
} else
*ptr++ = c;
}
}
}
if (inquote) {
if (stream) {
if (fputc('\'', stream) < 0)
return NULL;
} else
*ptr++ = '\'';
}
}
if (!stream)
*ptr++ = '\0';
return outstr;
}
/* Double-quote a metafied string. */
/**/
mod_export char *
dquotedztrdup(char const *s)
{
int len = strlen(s) * 4 + 2;
char *buf = zalloc(len);
char *p = buf, *ret;
if(isset(CSHJUNKIEQUOTES)) {
int inquote = 0;
while(*s) {
int c = *s++;
if (c == Meta)
c = *s++ ^ 32;
switch(c) {
case '"':
case '$':
case '`':
if(inquote) {
*p++ = '"';
inquote = 0;
}
*p++ = '\\';
*p++ = c;
break;
default:
if(!inquote) {
*p++ = '"';
inquote = 1;
}
if(c == '\n')
*p++ = '\\';
*p++ = c;
break;
}
}
if (inquote)
*p++ = '"';
} else {
int pending = 0;
*p++ = '"';
while(*s) {
int c = *s++;
if (c == Meta)
c = *s++ ^ 32;
switch(c) {
case '\\':
if(pending)
*p++ = '\\';
*p++ = '\\';
pending = 1;
break;
case '"':
case '$':
case '`':
if(pending)
*p++ = '\\';
*p++ = '\\';
/* FALL THROUGH */
default:
*p++ = c;
pending = 0;
break;
}
}
if(pending)
*p++ = '\\';
*p++ = '"';
}
ret = metafy(buf, p - buf, META_DUP);
zfree(buf, len);
return ret;
}
/* Unmetafy and output a string, double quoting it in its entirety. */
#if 0 /**/
int
dquotedzputs(char const *s, FILE *stream)
{
char *d = dquotedztrdup(s);
int ret = zputs(d, stream);
zsfree(d);
return ret;
}
#endif
# if defined(HAVE_NL_LANGINFO) && defined(CODESET) && !defined(__STDC_ISO_10646__)
/* Convert a character from UCS4 encoding to UTF-8 */
static size_t
ucs4toutf8(char *dest, unsigned int wval)
{
size_t len;
if (wval < 0x80)
len = 1;
else if (wval < 0x800)
len = 2;
else if (wval < 0x10000)
len = 3;
else if (wval < 0x200000)
len = 4;
else if (wval < 0x4000000)
len = 5;
else
len = 6;
switch (len) { /* falls through except to the last case */
case 6: dest[5] = (wval & 0x3f) | 0x80; wval >>= 6;
case 5: dest[4] = (wval & 0x3f) | 0x80; wval >>= 6;
case 4: dest[3] = (wval & 0x3f) | 0x80; wval >>= 6;
case 3: dest[2] = (wval & 0x3f) | 0x80; wval >>= 6;
case 2: dest[1] = (wval & 0x3f) | 0x80; wval >>= 6;
*dest = wval | ((0xfc << (6 - len)) & 0xfc);
break;
case 1: *dest = wval;
}
return len;
}
#endif
/*
* The following only occurs once or twice in the code, but in different
* places depending how character set conversion is implemented.
*/
#define CHARSET_FAILED() \
if (how & GETKEY_DOLLAR_QUOTE) { \
while ((*tdest++ = *++s)) { \
if (how & GETKEY_UPDATE_OFFSET) { \
if (s - sstart > *misc) \
(*misc)++; \
} \
if (*s == Snull) { \
*len = (s - sstart) + 1; \
*tdest = '\0'; \
return buf; \
} \
} \
*len = tdest - buf; \
return buf; \
} \
*t = '\0'; \
*len = t - buf; \
return buf
/*
* Decode a key string, turning it into the literal characters.
* The value returned is a newly allocated string from the heap.
*
* The length is returned in *len. This is usually the length of
* the final unmetafied string. The exception is the case of
* a complete GETKEY_DOLLAR_QUOTE conversion where *len is the
* length of the input string which has been used (up to and including
* the terminating single quote); as the final string is metafied and
* NULL-terminated its length is not required. If both GETKEY_DOLLAR_QUOTE
* and GETKEY_UPDATE_OFFSET are present in "how", the string is not
* expected to be terminated (this is used in completion to parse
* a partial $'...'-quoted string) and the length passed back is
* that of the converted string. Note in both cases that this is a length
* in bytes (i.e. the same as given by a raw pointer difference), not
* characters, which may occupy multiple bytes.
*
* how is a set of bits from the GETKEY_ values defined in zsh.h;
* not all combinations of bits are useful. Callers will typically
* use one of the GETKEYS_ values which define sets of bits.
* Note, for example that:
* - GETKEY_SINGLE_CHAR must not be combined with GETKEY_DOLLAR_QUOTE.
* - GETKEY_UPDATE_OFFSET is only allowed if GETKEY_DOLLAR_QUOTE is
* also present.
*
* *misc is used for various purposes:
* - If GETKEY_BACKSLASH_MINUS is set, it indicates the presence
* of \- in the input.
* - If GETKEY_BACKSLASH_C is set, it indicates the presence
* of \c in the input.
* - If GETKEY_UPDATE_OFFSET is set, it is set on input to some
* mystical completion offset and is updated to a new offset based
* on the converted characters. All Hail the Completion System
* [makes the mystic completion system runic sign in the air].
*
* The return value is unmetafied unless GETKEY_DOLLAR_QUOTE is
* in use.
*/
/**/
mod_export char *
getkeystring(char *s, int *len, int how, int *misc)
{
char *buf, tmp[1];
char *t, *tdest = NULL, *u = NULL, *sstart = s, *tbuf = NULL;
char svchar = '\0';
int meta = 0, control = 0, ignoring = 0;
int i;
#if defined(HAVE_WCHAR_H) && defined(HAVE_WCTOMB) && defined(__STDC_ISO_10646__)
wint_t wval;
int count;
#else
unsigned int wval;
# if defined(HAVE_NL_LANGINFO) && defined(CODESET)
# if defined(HAVE_ICONV)
iconv_t cd;
char inbuf[4];
size_t inbytes, outbytes;
# endif
size_t count;
# endif
#endif
DPUTS((how & GETKEY_UPDATE_OFFSET) &&
(how & ~(GETKEYS_DOLLARS_QUOTE|GETKEY_UPDATE_OFFSET)),
"BUG: offset updating in getkeystring only supported with $'.");
DPUTS((how & (GETKEY_DOLLAR_QUOTE|GETKEY_SINGLE_CHAR)) ==
(GETKEY_DOLLAR_QUOTE|GETKEY_SINGLE_CHAR),
"BUG: incompatible options in getkeystring");
if (how & GETKEY_SINGLE_CHAR)
t = buf = tmp;
else {
/* Length including terminating NULL */
int maxlen = 1;
/*
* We're not necessarily guaranteed the output string will
* be no longer than the input with \u and \U when output
* characters need to be metafied. As this is the only
* case where the string can get longer (?I think),
* include it in the allocation length here but don't
* bother taking account of other factors.
*/
for (t = s; *t; t++) {
if (*t == '\\') {
if (!t[1]) {
maxlen++;
break;
}
if (t[1] == 'u' || t[1] == 'U')
maxlen += MB_CUR_MAX * 2;
else
maxlen += 2;
/* skip the backslash and the following character */
t++;
} else
maxlen++;
}
if (how & GETKEY_DOLLAR_QUOTE) {
/*
* We're going to unmetafy into a new string, but
* to get a proper metafied input we're going to metafy
* into an intermediate buffer. This is necessary if we have
* \u and \U's with multiple metafied bytes. We can't
* simply remetafy the entire string because there may
* be tokens (indeed, we know there are lexical nulls floating
* around), so we have to be aware character by character
* what we are converting.
*
* In this case, buf is the final buffer (as usual),
* but t points into a temporary buffer that just has
* to be long enough to hold the result of one escape
* code transformation. We count this is a full multibyte
* character (MB_CUR_MAX) with every character metafied
* (*2) plus a little bit of fuzz (for e.g. the odd backslash).
*/
buf = tdest = zhalloc(maxlen);
t = tbuf = zhalloc(MB_CUR_MAX * 3 + 1);
} else {
t = buf = zhalloc(maxlen);
}
}
for (; *s; s++) {
if (*s == '\\' && s[1]) {
int miscadded;
if ((how & GETKEY_UPDATE_OFFSET) && s - sstart < *misc) {
(*misc)--;
miscadded = 1;
} else
miscadded = 0;
switch (*++s) {
case 'a':
#ifdef __STDC__
*t++ = '\a';
#else
*t++ = '\07';
#endif
break;
case 'n':
*t++ = '\n';
break;
case 'b':
*t++ = '\b';
break;
case 't':
*t++ = '\t';
break;
case 'v':
*t++ = '\v';
break;
case 'f':
*t++ = '\f';
break;
case 'r':
*t++ = '\r';
break;
case 'E':
if (!(how & GETKEY_EMACS)) {
*t++ = '\\', s--;
if (miscadded)
(*misc)++;
continue;
}
/* FALL THROUGH */
case 'e':
*t++ = '\033';
break;
case 'M':
/* HERE: GETKEY_UPDATE_OFFSET */
if (how & GETKEY_EMACS) {
if (s[1] == '-')
s++;
meta = 1 + control; /* preserve the order of ^ and meta */
} else {
if (miscadded)
(*misc)++;
*t++ = '\\', s--;
}
continue;
case 'C':
/* HERE: GETKEY_UPDATE_OFFSET */
if (how & GETKEY_EMACS) {
if (s[1] == '-')
s++;
control = 1;
} else {
if (miscadded)
(*misc)++;
*t++ = '\\', s--;
}
continue;
case Meta:
if (miscadded)
(*misc)++;
*t++ = '\\', s--;
break;
case '-':
if (how & GETKEY_BACKSLASH_MINUS) {
*misc = 1;
break;
}
goto def;
case 'c':
if (how & GETKEY_BACKSLASH_C) {
*misc = 1;
*t = '\0';
*len = t - buf;
return buf;
}
goto def;
case 'U':
if ((how & GETKEY_UPDATE_OFFSET) && s - sstart < *misc)
(*misc) -= 4;
/* FALLTHROUGH */
case 'u':
if ((how & GETKEY_UPDATE_OFFSET) && s - sstart < *misc) {
(*misc) -= 6; /* HERE don't really believe this */
/*
* We've now adjusted the offset for all the input
* characters, so we need to add for each
* byte of output below.
*/
}
wval = 0;
for (i=(*s == 'u' ? 4 : 8); i>0; i--) {
if (*++s && idigit(*s))
wval = wval * 16 + (*s - '0');
else if (*s && ((*s >= 'a' && *s <= 'f') ||
(*s >= 'A' && *s <= 'F')))
wval = wval * 16 + (*s & 0x1f) + 9;
else {
s--;
break;
}
}
if (how & GETKEY_SINGLE_CHAR) {
*misc = wval;
return s+1;
}
#if defined(HAVE_WCHAR_H) && defined(HAVE_WCTOMB) && defined(__STDC_ISO_10646__)
count = wctomb(t, (wchar_t)wval);
if (count == -1) {
zerr("character not in range");
CHARSET_FAILED();
}
if ((how & GETKEY_UPDATE_OFFSET) && s - sstart < *misc)
(*misc) += count;
t += count;
# else
# if defined(HAVE_NL_LANGINFO) && defined(CODESET)
if (!strcmp(nl_langinfo(CODESET), "UTF-8")) {
count = ucs4toutf8(t, wval);
t += count;
if ((how & GETKEY_UPDATE_OFFSET) && s - sstart < *misc)
(*misc) += count;
} else {
# ifdef HAVE_ICONV
ICONV_CONST char *inptr = inbuf;
const char *codesetstr = nl_langinfo(CODESET);
inbytes = 4;
outbytes = 6;
/* store value in big endian form */
for (i=3;i>=0;i--) {
inbuf[i] = wval & 0xff;
wval >>= 8;
}
/*
* If the code set isn't handled, we'd better
* assume it's US-ASCII rather than just failing
* hopelessly. Solaris has a weird habit of
* returning 646. This is handled by the
* native iconv(), but not by GNU iconv; what's
* more, some versions of the native iconv don't
* handle standard names like ASCII.
*
* This should only be a problem if there's a
* mismatch between the NLS and the iconv in use,
* which probably only means if libiconv is in use.
* We checked at configure time if our libraries
* pulled in _libiconv_version, which should be
* a good test.
*
* It shouldn't ever be NULL, but while we're
* being paranoid...
*/
#ifdef ICONV_FROM_LIBICONV
if (!codesetstr || !*codesetstr)
codesetstr = "US-ASCII";
#endif
cd = iconv_open(codesetstr, "UCS-4BE");
#ifdef ICONV_FROM_LIBICONV
if (cd == (iconv_t)-1 && !strcmp(codesetstr, "646")) {
codesetstr = "US-ASCII";
cd = iconv_open(codesetstr, "UCS-4BE");
}
#endif
if (cd == (iconv_t)-1) {
zerr("cannot do charset conversion (iconv failed)");
CHARSET_FAILED();
}
count = iconv(cd, &inptr, &inbytes, &t, &outbytes);
iconv_close(cd);
if (count == (size_t)-1) {
zerr("character not in range");
CHARSET_FAILED();
}
if ((how & GETKEY_UPDATE_OFFSET) && s - sstart < *misc)
(*misc) += count;
# else
zerr("cannot do charset conversion (iconv not available)");
CHARSET_FAILED();
# endif
}
# else
zerr("cannot do charset conversion (NLS not supported)");
CHARSET_FAILED();
# endif
# endif
if (how & GETKEY_DOLLAR_QUOTE) {
char *t2;
for (t2 = tbuf; t2 < t; t2++) {
if (imeta(*t2)) {
*tdest++ = Meta;
*tdest++ = *t2 ^ 32;
} else
*tdest++ = *t2;
}
/* reset temporary buffer after handling */
t = tbuf;
}
continue;
case '\'':
case '\\':
if (how & GETKEY_DOLLAR_QUOTE) {
/*
* Usually \' and \\ will have the initial
* \ turned into a Bnull, however that's not
* necessarily the case when called from
* completion.
*/
*t++ = *s;
break;
}
/* FALLTHROUGH */
default:
def:
/* HERE: GETKEY_UPDATE_OFFSET? */
if ((idigit(*s) && *s < '8') || *s == 'x') {
if (!(how & GETKEY_OCTAL_ESC)) {
if (*s == '0')
s++;
else if (*s != 'x') {
*t++ = '\\', s--;
continue;
}
}
if (s[1] && s[2] && s[3]) {
svchar = s[3];
s[3] = '\0';
u = s;
}
*t++ = zstrtol(s + (*s == 'x'), &s,
(*s == 'x') ? 16 : 8);
if ((how & GETKEY_PRINTF_PERCENT) && t[-1] == '%')
*t++ = '%';
if (svchar) {
u[3] = svchar;
svchar = '\0';
}
s--;
} else {
if (!(how & GETKEY_EMACS) && *s != '\\') {
if (miscadded)
(*misc)++;
*t++ = '\\';
}
*t++ = *s;
}
break;
}
} else if ((how & GETKEY_DOLLAR_QUOTE) && *s == Snull) {
/* return length to following character */
*len = (s - sstart) + 1;
*tdest = '\0';
return buf;
} else if (*s == '^' && !control && (how & GETKEY_CTRL) && s[1]) {
control = 1;
continue;
#ifdef MULTIBYTE_SUPPORT
} else if ((how & GETKEY_SINGLE_CHAR) &&
isset(MULTIBYTE) && STOUC(*s) > 127) {
wint_t wc;
int len;
len = mb_metacharlenconv(s, &wc);
if (wc != WEOF) {
*misc = (int)wc;
return s + len;
}
#endif
} else if (*s == Meta)
*t++ = *++s ^ 32;
else {
if (itok(*s)) {
/*
* We need to be quite careful here. We haven't
* necessarily got an input stream with all tokens
* removed, so the majority of tokens need passing
* through untouched and without Meta handling.
* However, me may need to handle tokenized
* backslashes.
*/
if (meta || control) {
/*
* Presumably we should be using meta or control
* on the character representing the token.
*
* Special case: $'\M-\\' where the token is a Bnull.
* This time we dump the Bnull since we're
* replacing the whole thing. The lexer
* doesn't know about the meta or control modifiers.
*/
if ((how & GETKEY_DOLLAR_QUOTE) && *s == Bnull)
*t++ = *++s;
else
*t++ = ztokens[*s - Pound];
} else if (how & GETKEY_DOLLAR_QUOTE) {
/*
* We don't want to metafy this, it's a real
* token.
*/
*tdest++ = *s;
if (*s == Bnull) {
/*
* Bnull is a backslash which quotes a couple
* of special characters that always appear
* literally next. See strquote handling
* in gettokstr() in lex.c. We need
* to retain the Bnull (as above) so that quote
* handling in completion can tell where the
* backslash was.
*/
*tdest++ = *++s;
}
/* reset temporary buffer, now handled */
t = tbuf;
continue;
} else
*t++ = *s;
} else
*t++ = *s;
}
if (meta == 2) {
t[-1] |= 0x80;
meta = 0;
}
if (control) {
if (t[-1] == '?')
t[-1] = 0x7f;
else
t[-1] &= 0x9f;
control = 0;
}
if (meta) {
t[-1] |= 0x80;
meta = 0;
}
if (how & GETKEY_DOLLAR_QUOTE) {
char *t2;
for (t2 = tbuf; t2 < t; t2++) {
/*
* In POSIX mode, an embedded NULL is discarded and
* terminates processing. It just does, that's why.
*/
if (isset(POSIXSTRINGS)) {
if (*t2 == '\0')
ignoring = 1;
if (ignoring)
break;
}
if (imeta(*t2)) {
*tdest++ = Meta;
*tdest++ = *t2 ^ 32;
} else {
*tdest++ = *t2;
}
}
/*
* Reset use of temporary buffer.
*/
t = tbuf;
}
if ((how & GETKEY_SINGLE_CHAR) && t != tmp) {
*misc = STOUC(tmp[0]);
return s + 1;
}
}
/*
* When called from completion, where we use GETKEY_UPDATE_OFFSET to
* update the index into the metafied editor line, we don't necessarily
* have the end of a $'...' quotation, else we should do.
*/
DPUTS((how & (GETKEY_DOLLAR_QUOTE|GETKEY_UPDATE_OFFSET)) ==
GETKEY_DOLLAR_QUOTE, "BUG: unterminated $' substitution");
*t = '\0';
if (how & GETKEY_DOLLAR_QUOTE)
*tdest = '\0';
if (how & GETKEY_SINGLE_CHAR)
*misc = 0;
else
*len = ((how & GETKEY_DOLLAR_QUOTE) ? tdest : t) - buf;
return buf;
}
/* Return non-zero if s is a prefix of t. */
/**/
mod_export int
strpfx(const char *s, const char *t)
{
while (*s && *s == *t)
s++, t++;
return !*s;
}
/* Return non-zero if s is a suffix of t. */
/**/
mod_export int
strsfx(char *s, char *t)
{
int ls = strlen(s), lt = strlen(t);
if (ls <= lt)
return !strcmp(t + lt - ls, s);
return 0;
}
/**/
static int
upchdir(int n)
{
char buf[PATH_MAX+1];
char *s;
int err = -1;
while (n > 0) {
for (s = buf; s < buf + PATH_MAX - 4 && n--; )
*s++ = '.', *s++ = '.', *s++ = '/';
s[-1] = '\0';
if (chdir(buf))
return err;
err = -2;
}
return 0;
}
/*
* Initialize a "struct dirsav".
* The structure will be set to the directory we want to save
* the first time we change to a different directory.
*/
/**/
mod_export void
init_dirsav(Dirsav d)
{
d->ino = d->dev = 0;
d->dirname = NULL;
d->dirfd = d->level = -1;
}
/*
* Change directory, without following symlinks. Returns 0 on success, -1
* on failure. Sets errno to ENOTDIR if any symlinks are encountered. If
* fchdir() fails, or the current directory is unreadable, we might end up
* in an unwanted directory in case of failure.
*
* path is an unmetafied but null-terminated string, as needed by system
* calls.
*/
/**/
mod_export int
lchdir(char const *path, struct dirsav *d, int hard)
{
char const *pptr;
int level;
struct stat st1;
struct dirsav ds;
#ifdef HAVE_LSTAT
char buf[PATH_MAX + 1], *ptr;
int err;
struct stat st2;
#endif
#ifdef HAVE_FCHDIR
int close_dir = 0;
#endif
if (!d) {
init_dirsav(&ds);
d = &ds;
}
#ifdef HAVE_LSTAT
if ((*path == '/' || !hard) &&
(d != &ds || hard)){
#else
if (*path == '/') {
#endif
level = -1;
#ifndef HAVE_FCHDIR
if (!d->dirname)
zgetdir(d);
#endif
} else {
level = 0;
if (!d->dev && !d->ino) {
stat(".", &st1);
d->dev = st1.st_dev;
d->ino = st1.st_ino;
}
}
#ifdef HAVE_LSTAT
if (!hard)
#endif
{
if (d != &ds) {
for (pptr = path; *pptr; level++) {
while (*pptr && *pptr++ != '/');
while (*pptr == '/')
pptr++;
}
d->level = level;
}
return zchdir((char *) path);
}
#ifdef HAVE_LSTAT
#ifdef HAVE_FCHDIR
if (d->dirfd < 0) {
close_dir = 1;
if ((d->dirfd = open(".", O_RDONLY | O_NOCTTY)) < 0 &&
zgetdir(d) && *d->dirname != '/')
d->dirfd = open("..", O_RDONLY | O_NOCTTY);
}
#endif
if (*path == '/')
if (chdir("/") < 0)
zwarn("failed to chdir(/): %e", errno);
for(;;) {
while(*path == '/')
path++;
if(!*path) {
if (d == &ds)
zsfree(ds.dirname);
else
d->level = level;
#ifdef HAVE_FCHDIR
if (d->dirfd >=0 && close_dir) {
close(d->dirfd);
d->dirfd = -1;
}
#endif
return 0;
}
for(pptr = path; *++pptr && *pptr != '/'; ) ;
if(pptr - path > PATH_MAX) {
err = ENAMETOOLONG;
break;
}
for(ptr = buf; path != pptr; )
*ptr++ = *path++;
*ptr = 0;
if(lstat(buf, &st1)) {
err = errno;
break;
}
if(!S_ISDIR(st1.st_mode)) {
err = ENOTDIR;
break;
}
if(chdir(buf)) {
err = errno;
break;
}
if (level >= 0)
level++;
if(lstat(".", &st2)) {
err = errno;
break;
}
if(st1.st_dev != st2.st_dev || st1.st_ino != st2.st_ino) {
err = ENOTDIR;
break;
}
}
if (restoredir(d)) {
int restoreerr = errno;
int i;
/*
* Failed to restore the directory.
* Just be definite, cd to root and report the result.
*/
for (i = 0; i < 2; i++) {
const char *cdest;
if (i)
cdest = "/";
else {
if (!home)
continue;
cdest = home;
}
zsfree(pwd);
pwd = ztrdup(cdest);
if (chdir(pwd) == 0)
break;
}
if (i == 2)
zerr("lost current directory, failed to cd to /: %e", errno);
else
zerr("lost current directory: %e: changed to `%s'", restoreerr,
pwd);
if (d == &ds)
zsfree(ds.dirname);
#ifdef HAVE_FCHDIR
if (d->dirfd >=0 && close_dir) {
close(d->dirfd);
d->dirfd = -1;
}
#endif
errno = err;
return -2;
}
if (d == &ds)
zsfree(ds.dirname);
#ifdef HAVE_FCHDIR
if (d->dirfd >=0 && close_dir) {
close(d->dirfd);
d->dirfd = -1;
}
#endif
errno = err;
return -1;
#endif /* HAVE_LSTAT */
}
/**/
mod_export int
restoredir(struct dirsav *d)
{
int err = 0;
struct stat sbuf;
if (d->dirname && *d->dirname == '/')
return chdir(d->dirname);
#ifdef HAVE_FCHDIR
if (d->dirfd >= 0) {
if (!fchdir(d->dirfd)) {
if (!d->dirname) {
return 0;
} else if (chdir(d->dirname)) {
close(d->dirfd);
d->dirfd = -1;
err = -2;
}
} else {
close(d->dirfd);
d->dirfd = err = -1;
}
} else
#endif
if (d->level > 0)
err = upchdir(d->level);
else if (d->level < 0)
err = -1;
if (d->dev || d->ino) {
stat(".", &sbuf);
if (sbuf.st_ino != d->ino || sbuf.st_dev != d->dev)
err = -2;
}
return err;
}
/* Check whether the shell is running with privileges in effect. *
* This is the case if EITHER the euid is zero, OR (if the system *
* supports POSIX.1e (POSIX.6) capability sets) the process' *
* Effective or Inheritable capability sets are non-empty. */
/**/
int
privasserted(void)
{
if(!geteuid())
return 1;
#ifdef HAVE_CAP_GET_PROC
{
cap_t caps = cap_get_proc();
if(caps) {
/* POSIX doesn't define a way to test whether a capability set *
* is empty or not. Typical. I hope this is conforming... */
cap_flag_value_t val;
cap_value_t n;
for(n = 0; !cap_get_flag(caps, n, CAP_EFFECTIVE, &val); n++)
if(val) {
cap_free(caps);
return 1;
}
}
cap_free(caps);
}
#endif /* HAVE_CAP_GET_PROC */
return 0;
}
/**/
mod_export int
mode_to_octal(mode_t mode)
{
int m = 0;
if(mode & S_ISUID)
m |= 04000;
if(mode & S_ISGID)
m |= 02000;
if(mode & S_ISVTX)
m |= 01000;
if(mode & S_IRUSR)
m |= 00400;
if(mode & S_IWUSR)
m |= 00200;
if(mode & S_IXUSR)
m |= 00100;
if(mode & S_IRGRP)
m |= 00040;
if(mode & S_IWGRP)
m |= 00020;
if(mode & S_IXGRP)
m |= 00010;
if(mode & S_IROTH)
m |= 00004;
if(mode & S_IWOTH)
m |= 00002;
if(mode & S_IXOTH)
m |= 00001;
return m;
}
#ifdef MAILDIR_SUPPORT
/*
* Stat a file. If it's a maildir, check all messages
* in the maildir and present the grand total as a file.
* The fields in the 'struct stat' are from the mail directory.
* The following fields are emulated:
*
* st_nlink always 1
* st_size total number of bytes in all files
* st_blocks total number of messages
* st_atime access time of newest file in maildir
* st_mtime modify time of newest file in maildir
* st_mode S_IFDIR changed to S_IFREG
*
* This is good enough for most mail-checking applications.
*/
/**/
int
mailstat(char *path, struct stat *st)
{
DIR *dd;
struct dirent *fn;
struct stat st_ret, st_tmp;
static struct stat st_ret_last;
char *dir, *file = 0;
int i;
time_t atime = 0, mtime = 0;
size_t plen = strlen(path), dlen;
/* First see if it's a directory. */
if ((i = stat(path, st)) != 0 || !S_ISDIR(st->st_mode))
return i;
st_ret = *st;
st_ret.st_nlink = 1;
st_ret.st_size = 0;
st_ret.st_blocks = 0;
st_ret.st_mode &= ~S_IFDIR;
st_ret.st_mode |= S_IFREG;
/* See if cur/ is present */
dir = appstr(ztrdup(path), "/cur");
if (stat(dir, &st_tmp) || !S_ISDIR(st_tmp.st_mode)) return 0;
st_ret.st_atime = st_tmp.st_atime;
/* See if tmp/ is present */
dir[plen] = 0;
dir = appstr(dir, "/tmp");
if (stat(dir, &st_tmp) || !S_ISDIR(st_tmp.st_mode)) return 0;
st_ret.st_mtime = st_tmp.st_mtime;
/* And new/ */
dir[plen] = 0;
dir = appstr(dir, "/new");
if (stat(dir, &st_tmp) || !S_ISDIR(st_tmp.st_mode)) return 0;
st_ret.st_mtime = st_tmp.st_mtime;
#if THERE_IS_EXACTLY_ONE_MAILDIR_IN_MAILPATH
{
static struct stat st_new_last;
/* Optimization - if new/ didn't change, nothing else did. */
if (st_tmp.st_dev == st_new_last.st_dev &&
st_tmp.st_ino == st_new_last.st_ino &&
st_tmp.st_atime == st_new_last.st_atime &&
st_tmp.st_mtime == st_new_last.st_mtime) {
*st = st_ret_last;
return 0;
}
st_new_last = st_tmp;
}
#endif
/* Loop over new/ and cur/ */
for (i = 0; i < 2; i++) {
dir[plen] = 0;
dir = appstr(dir, i ? "/cur" : "/new");
if ((dd = opendir(dir)) == NULL) {
zsfree(file);
zsfree(dir);
return 0;
}
dlen = strlen(dir) + 1; /* include the "/" */
while ((fn = readdir(dd)) != NULL) {
if (fn->d_name[0] == '.')
continue;
if (file) {
file[dlen] = 0;
file = appstr(file, fn->d_name);
} else {
file = tricat(dir, "/", fn->d_name);
}
if (stat(file, &st_tmp) != 0)
continue;
st_ret.st_size += st_tmp.st_size;
st_ret.st_blocks++;
if (st_tmp.st_atime != st_tmp.st_mtime &&
st_tmp.st_atime > atime)
atime = st_tmp.st_atime;
if (st_tmp.st_mtime > mtime)
mtime = st_tmp.st_mtime;
}
closedir(dd);
}
zsfree(file);
zsfree(dir);
if (atime) st_ret.st_atime = atime;
if (mtime) st_ret.st_mtime = mtime;
*st = st_ret_last = st_ret;
return 0;
}
#endif
| z-shell/zinit |
<|start_filename|>src/navigation/defaultOptions.js<|end_filename|>
import { gStyle } from '../constants';
export default () => ({
headerStyle: gStyle.navHeaderStyle
});
| sushmali1230/TVRemote |
<|start_filename|>examples/managed-react/.storybook/config.js<|end_filename|>
import { configure, addDecorator } from '@storybook/react';
import { withScreenshot } from 'zisui';
function loadStories() {
require('../src/stories');
}
addDecorator(withScreenshot({
viewport: {
width: 1200,
height: 800,
}
}));
configure(loadStories, module);
| bokuweb/zisui |
<|start_filename|>application/static/javascript/victory.js<|end_filename|>
(function() {
var a, crashDocumentController, documentsController, groupedDocumentsController, indexController, settingsApplicationsController, settingsMenuController, settingsProfileController, settingsUsersController;
a = angular.module('victory.controller', ['victory.provider']);
indexController = function($scope) {
/*
/
*/
if ($scope.victory.user.isLogin) {
return location.href = '#/crashes/grouped';
} else {
return location.href = '#/login';
}
};
indexController.$inject = ['$scope'];
a.controller('IndexCtrl', indexController);
settingsMenuController = function($scope, $state) {
/*
The controller of the settings menu
*/
return $scope.active = $state.current.name;
};
settingsMenuController.$inject = ['$scope', '$state'];
a.controller('SettingsMenuCtrl', settingsMenuController);
a.controller('SettingsCtrl', function() {
/*
/settings
*/
return location.href = '#/settings/applications';
});
settingsApplicationsController = function($scope, $victory, applications) {
/*
/settings/applications
:scope name: new application name
:scope description: new application description
:scope items: [{id, name, newName, description, newDescription
app_key, create_time, is_owner, members:[{id, name, email, is_owner}]
}]
*/
var item, _i, _len;
for (_i = 0, _len = applications.length; _i < _len; _i++) {
item = applications[_i];
item.newName = item.name;
item.newDescription = item.description;
}
$scope.items = applications;
$scope.getApplications = function() {
/*
Get applications.
*/
return $victory.setting.getApplications({
success: function(data) {
var _j, _len1, _ref;
_ref = data.items;
for (_j = 0, _len1 = _ref.length; _j < _len1; _j++) {
item = _ref[_j];
item.newName = item.name;
item.newDescription = item.description;
}
return $scope.items = data.items;
}
});
};
$scope.addApplication = function() {
/*
Add an application.
*/
return $victory.setting.addApplication({
data: {
name: $scope.name,
description: $scope.description
},
error: function(data, status) {
if (status === 400 && data) {
return $scope.errors = data;
}
},
success: function() {
$scope.name = '';
$scope.description = '';
$('.modal.in').modal('hide');
return $scope.getApplications();
}
});
};
$scope.updateApplication = function(id) {
/*
Update the application.
*/
var updateItem, x;
updateItem = ((function() {
var _j, _len1, _ref, _results;
_ref = $scope.items;
_results = [];
for (_j = 0, _len1 = _ref.length; _j < _len1; _j++) {
x = _ref[_j];
if (x.id === id) {
_results.push(x);
}
}
return _results;
})())[0];
return $victory.setting.updateApplication({
id: id,
data: {
name: updateItem.newName,
description: updateItem.newDescription
},
error: function(data, status) {
if (status === 400 && data) {
return updateItem.errors = data;
}
},
success: function() {
$('.modal.in').modal('hide');
return $scope.getApplications();
}
});
};
$scope.deleteApplication = function(id) {
/*
Delete the application.
*/
return $victory.setting.deleteApplication({
id: id,
success: function() {
$('.modal.in').modal('hide');
return $scope.getApplications();
}
});
};
$scope.inviteUser = function(id, email) {
/*
Invite an user into the application.
*/
return $victory.setting.inviteUser({
applicationId: id,
email: email,
success: function() {
$('.modal.in').modal('hide');
return $scope.getApplications();
}
});
};
return $scope.deleteMenter = function(applicationId, memberId) {
/*
Delete the member from the application.
*/
return $victory.setting.deleteMember({
applicationId: applicationId,
memberId: memberId,
success: function() {
var application, x;
application = ((function() {
var _j, _len1, _ref, _results;
_ref = $scope.items;
_results = [];
for (_j = 0, _len1 = _ref.length; _j < _len1; _j++) {
x = _ref[_j];
if (x.id === applicationId) {
_results.push(x);
}
}
return _results;
})())[0];
return application.members = (function() {
var _j, _len1, _ref, _results;
_ref = application.members;
_results = [];
for (_j = 0, _len1 = _ref.length; _j < _len1; _j++) {
x = _ref[_j];
if (x.id !== memberId) {
_results.push(x);
}
}
return _results;
})();
}
});
};
};
settingsApplicationsController.$inject = ['$scope', '$victory', 'applications'];
a.controller('SettingsApplicationsCtrl', settingsApplicationsController);
settingsUsersController = function($scope, $victory, users) {
/*
/settings/users
*/
$scope.items = users;
$scope.getUsers = function() {
/*
Get users.
*/
return $victory.setting.getUsers({
success: function(data) {
return $scope.items = data.items;
}
});
};
$scope.addUser = function() {
/*
Add an user.
*/
return $victory.setting.addUser({
email: $scope.email,
success: function() {
$scope.email = '';
return $scope.getUsers();
}
});
};
return $scope.deleteUser = function(id) {
/*
Delete the user.
*/
return $victory.setting.deleteUser({
id: id,
success: function() {
var x;
return $scope.items = (function() {
var _i, _len, _ref, _results;
_ref = $scope.items;
_results = [];
for (_i = 0, _len = _ref.length; _i < _len; _i++) {
x = _ref[_i];
if (x.id !== id) {
_results.push(x);
}
}
return _results;
})();
}
});
};
};
settingsUsersController.$inject = ['$scope', '$victory', 'users'];
a.controller('SettingsUsersCtrl', settingsUsersController);
settingsProfileController = function($scope, $injector, profile) {
/*
/settings/profile
*/
var $rootScope, $victory;
$victory = $injector.get('$victory');
$rootScope = $injector.get('$rootScope');
$scope.profile = profile;
$scope.getProfile = function() {
return $victory.setting.getProfile({
success: function(data) {
$rootScope.victory.user.name = data.name;
return $scope.profile = data;
}
});
};
return $scope.updateProfile = function() {
return $victory.setting.updateProfile({
name: $scope.profile.name,
error: function(data, status) {
if (status === 400 && data) {
return $scope.errors = data;
}
},
success: function() {
return $scope.getProfile();
}
});
};
};
settingsProfileController.$inject = ['$scope', '$injector', 'profile'];
a.controller('SettingsProfileCtrl', settingsProfileController);
groupedDocumentsController = function($scope, $stateParams, documentMode, groupedDocumentsAndApplications) {
/*
:scope documentMode: <crashes/exceptions/logs>
:scope keyword: search keywords
:scope applications: [{id, name, description,
app_key, create_time, is_owner}]
:scope groupedDocuments: [{group_tag, create_time, name, email, title, description, times}]
:scope page: {total, index, max, hasPrevious, hasNext}
*/
$scope.documentMode = documentMode;
$scope.keyword = $stateParams.keyword ? $stateParams.keyword : '';
$scope.applications = groupedDocumentsAndApplications.applications;
$scope.groupedDocuments = groupedDocumentsAndApplications.groupedDocuments;
$scope.page = groupedDocumentsAndApplications.page;
$scope.getGroupedDocumentsUrl = function(keyword, index) {
if (index == null) {
index = 0;
}
/*
Get the url of grouped documents.
*/
return "#/applications/" + $scope.selectedApplication.id + "/" + $scope.documentMode + "/grouped/" + keyword + "/" + index;
};
$scope.gotoSearchPage = function(keyword, index) {
if (index == null) {
index = 0;
}
/*
Goto the search page of grouped documents.
*/
return location.href = $scope.getGroupedDocumentsUrl(keyword, index);
};
$scope.clickGroupedDocument = function(groupedDocument) {
/*
Clicked the grouped document row in the table.
*/
if (groupedDocument.times > 1 || $scope.documentMode === 'crashes') {
return location.href = "#/applications/" + $scope.selectedApplication.id + "/" + $scope.documentMode + "/" + groupedDocument.group_tag;
}
};
return $scope.modal = function(groupedDocument) {
/*
Check the grouped document should show the bootstrap modal window.
:param groupedDocument: grouped document
:return: "modal" / ""
*/
if (groupedDocument.times > 1) {
return "";
} else {
return "modal";
}
};
};
groupedDocumentsController.$inject = ['$scope', '$stateParams', 'documentMode', 'groupedDocumentsAndApplications'];
a.controller('GroupedDocumentsCtrl', groupedDocumentsController);
documentsController = function($scope, $victory, documentMode, documents) {
/*
/applications/<applicationId>/<documentMode>/<groupTag>
*/
$scope.documentMode = documentMode;
$scope.documents = documents;
$victory.application.getApplications({
success: function(data) {
return $scope.applications = data.items;
}
});
return $scope.renderDescription = function(document) {
/*
Render the description of the document.
*/
if (document.description) {
return document.description;
} else if (document.parameters) {
return "Parameters: " + document.parameters;
} else if (document.url) {
return "URL: " + document.url;
}
return "";
};
};
documentsController.$inject = ['$scope', '$victory', 'documentMode', 'documents'];
a.controller('DocumentsCtrl', documentsController);
crashDocumentController = function($scope, $victory, documentMode, crash) {
/*
/applications/<applicationId>/<documentMode>/<groupTag>
*/
$scope.documentMode = documentMode;
$scope.crash = crash;
return $victory.application.getApplications({
success: function(data) {
return $scope.applications = data.items;
}
});
};
crashDocumentController.$inject = ['$scope', '$victory', 'documentMode', 'crash'];
a.controller('CrashDocumentCtrl', crashDocumentController);
}).call(this);
(function() {
var a, vNavigation;
a = angular.module('victory.directive', []);
a.directive('vTooltip', function() {
return {
/*
Show the bootstrap tool tip.
*/
restrict: 'A',
link: function(scope, element, attrs) {
return attrs.$observe('vTooltip', function(value) {
if (value) {
$(element).attr('title', scope.$eval(value));
}
return $(element).tooltip();
});
}
};
});
a.directive('vFocus', function() {
return {
/*
Focus this element.
*/
restrict: 'A',
link: function(scope, element) {
return $(element).select();
}
};
});
a.directive('vModal', function() {
return {
/*
Find the first input text box then focus it on the bootstrap modal window.
*/
restrict: 'A',
link: function(scope, element) {
return $(element).on('shown', function() {
return $(this).find('input:first').select();
});
}
};
});
a.directive('vEnter', function() {
return {
/*
Eval the AngularJS expression when pressed `Enter`.
*/
restrict: 'A',
link: function(scope, element, attrs) {
return element.bind("keydown keypress", function(e) {
if (e.which === 13) {
e.preventDefault();
return scope.$apply(function() {
return scope.$eval(attrs.vEnter);
});
}
});
}
};
});
vNavigation = function($injector) {
return {
/*
Setup the navigation effect.
*/
restrict: 'A',
link: function(scope, element) {
var $selected, $victory, index, match, noop;
$victory = $injector.get('$victory');
scope.$on('$stateChangeStart', function(event, toState, toParams, fromState) {
if (fromState.name !== "") {
$victory.common.loading.on();
}
scope.select = toState.name;
$('.modal.in').modal('hide');
return setTimeout(function() {
return $('#js_navigation li.select').mouseover();
}, 0);
});
scope.$on('$stateChangeSuccess', function() {
return $victory.common.loading.off();
});
scope.$on('$stateChangeError', function() {
return $victory.common.loading.off();
});
if ($(element).find('li.select').length > 0) {
$selected = $(element).find('li.select');
} else {
match = location.href.match(/\w\/([/#\w]*)/);
index = match[1] === '' ? 0 : $(element).find("li a[href*='" + match[1] + "']").parent().index();
$selected = $(element).find('li').eq(index);
}
$(element).find('li:first').parent().prepend($('<li class="cs_top"></li>'));
$(element).find('li.cs_top').css({
width: $selected.css('width'),
left: $selected.position().left,
top: $selected.position().top
});
noop = function() {};
$(element).find('li[class!=cs_top]').hover(function() {
return $(element).find('li.cs_top').each(function() {
return $(this).dequeue();
}).animate({
width: this.offsetWidth,
left: this.offsetLeft
}, 420, "easeInOutCubic");
}, noop());
$(element).hover(noop(), function() {
return $(element).find('li.cs_top').each(function() {
return $(this).dequeue();
}).animate({
width: $(element).find('li.select').css('width'),
left: $(element).find('li.select').position().left
}, 420, "easeInOutCubic");
});
}
};
};
vNavigation.$inject = ['$injector'];
a.directive('vNavigation', vNavigation);
}).call(this);
(function() {
angular.module('victory', ['victory.router', 'victory.directive']);
}).call(this);
(function() {
var a,
__indexOf = [].indexOf || function(item) { for (var i = 0, l = this.length; i < l; i++) { if (i in this && this[i] === item) return i; } return -1; };
a = angular.module('victory.provider', []);
a.provider('$victory', function() {
var $http, $injector, $rootScope, pageSize,
_this = this;
pageSize = 20;
$injector = null;
$http = null;
$rootScope = null;
this.setupProviders = function(injector) {
$injector = injector;
$http = $injector.get('$http');
return $rootScope = $injector.get('$rootScope');
};
this.setup = function() {
NProgress.configure({
showSpinner: false
});
if (sessionStorage.selectedApplication) {
$rootScope.selectedApplication = JSON.parse(sessionStorage.selectedApplication);
}
return $rootScope.victory = window.victory;
};
this.common = {
ajax: function(args) {
var h,
_this = this;
if (args == null) {
args = {};
}
/*
victory ajax function
:param args: {method, cache, data, error(), success(), beforSend(), hideLoadingAfterDown}
*/
if (args.method == null) {
args.method = 'get';
}
if (args.cache == null) {
args.cache = false;
}
if (args.data == null) {
args.data = '';
}
if (args.error == null) {
args.error = function() {};
}
if (args.success == null) {
args.success = function() {};
}
if (args.beforeSend) {
args.beforeSend();
}
h = $http({
url: args.url,
method: args.method,
cache: args.ache,
data: args.data
});
h.error(function(data, status, headers, config) {
_this.message.error(status);
return args.error(data, status, headers, config);
});
return h.success(function(data, status, headers, config) {
if (data.__status__ === 302 && data.location) {
location.href = data.location;
return;
}
return args.success(data, status, headers, config);
});
},
message: {
error: function(status) {
/*
pop error message.
*/
switch (status) {
case 400:
return $.av.pop({
title: 'Input Failed',
message: 'Please check input values.',
template: 'error'
});
case 403:
return $.av.pop({
title: 'Permission denied',
message: 'Please check your permission.',
template: 'error'
});
default:
return $.av.pop({
title: 'Error',
message: 'Loading failed, please try again later.',
template: 'error'
});
}
}
},
loading: {
/*
Show/Hide loading effect.
*/
on: function() {
return NProgress.start();
},
off: function() {
return NProgress.done();
}
}
};
this.setting = {
getApplications: function(args) {
var ajax;
if (args == null) {
args = {};
}
/*
Get applications of the settings.
:param args: {success()}
*/
ajax = _this.common.ajax({
url: '/settings/applications',
success: args.success
});
return ajax.then(function(data) {
return data.data.items;
});
},
addApplication: function(args) {
if (args == null) {
args = {};
}
/*
Add the application.
:param args: {data:{name, description}, error(), success()}
*/
return _this.common.ajax({
method: 'post',
url: '/settings/applications',
data: args.data,
error: args.error,
success: args.success
});
},
updateApplication: function(args) {
if (args == null) {
args = {};
}
/*
Update the application.
:param args: {id, data:{name, description}, error(), success()}
*/
return _this.common.ajax({
method: 'put',
url: "/settings/applications/" + args.id,
data: args.data,
error: args.error,
success: args.success
});
},
deleteApplication: function(args) {
if (args == null) {
args = {};
}
/*
Delete the application by id.
:param args: {id, success()}
*/
return _this.common.ajax({
method: 'delete',
url: "/settings/applications/" + args.id,
success: args.success
});
},
inviteUser: function(args) {
if (args == null) {
args = {};
}
/*
Invite the user into the application.
:param args: {applicationId, email, success()}
*/
return _this.common.ajax({
method: 'post',
url: "/settings/applications/" + args.applicationId + "/members",
data: {
email: args.email
},
success: args.success
});
},
deleteMember: function(args) {
if (args == null) {
args = {};
}
/*
Delete the member from the application.
:param args: {applicationId, memberId, success()}
*/
return _this.common.ajax({
method: 'delete',
url: "/settings/applications/" + args.applicationId + "/members/" + args.memberId,
success: args.success
});
},
getUsers: function(args) {
var ajax;
if (args == null) {
args = {};
}
/*
Get users of the settings.
:param args: {success()}
*/
ajax = _this.common.ajax({
url: '/settings/users',
success: args.success
});
return ajax.then(function(data) {
return data.data.items;
});
},
addUser: function(args) {
if (args == null) {
args = {};
}
/*
Add an user.
:param args: {email, success()}
*/
return _this.common.ajax({
method: 'post',
url: '/settings/users',
data: {
email: args.email
},
success: args.success
});
},
deleteUser: function(args) {
if (args == null) {
args = {};
}
/*
Delete the user by id.
:param args: {id, success()}
*/
return _this.common.ajax({
method: 'delete',
url: "/settings/users/" + args.id,
success: args.success
});
},
getProfile: function(args) {
var ajax;
if (args == null) {
args = {};
}
/*
Get the profile.
:param args: {success()}
*/
ajax = _this.common.ajax({
url: '/settings/profile',
success: args.success
});
return ajax.then(function(data) {
return data.data;
});
},
updateProfile: function(args) {
if (args == null) {
args = {};
}
/*
Update the profile.
:param args: {name, error(), success()}
*/
return _this.common.ajax({
method: 'put',
url: '/settings/profile',
data: {
name: args.name
},
error: args.error,
success: args.success
});
}
};
this.application = {
getApplications: function(args) {
if (args == null) {
args = {};
}
/*
Get applications.
:param args: {success()}
*/
return _this.common.ajax({
url: "/applications",
success: args.success
});
}
};
this.document = {
getGroupedDocumentsAndApplications: function(args) {
var ajaxApplications, result;
if (args == null) {
args = {};
}
/*
Get grouped documents and applications for GroupedDocumentsCtrl.
:param args: {documentMode, applicationId, keyword, index}
:return: {applications, groupedDocuments, page}
*/
args.applicationId = parseInt(args.applicationId);
if (args.keyword == null) {
args.keyword = '';
}
if (args.index == null) {
args.index = 0;
}
result = {
applications: null,
groupedDocuments: null,
page: {
index: 0
}
};
ajaxApplications = _this.common.ajax({
url: '/applications'
});
return ajaxApplications.then(function(data) {
var ajaxDocuments, x, _ref, _ref1;
result.applications = data.data.items;
if (result.applications.length > 0) {
if (_ref = args.applicationId, __indexOf.call((function() {
var _i, _len, _ref1, _results;
_ref1 = result.applications;
_results = [];
for (_i = 0, _len = _ref1.length; _i < _len; _i++) {
x = _ref1[_i];
_results.push(x.id);
}
return _results;
})(), _ref) >= 0) {
$rootScope.selectedApplication = ((function() {
var _i, _len, _ref1, _results;
_ref1 = result.applications;
_results = [];
for (_i = 0, _len = _ref1.length; _i < _len; _i++) {
x = _ref1[_i];
if (x.id === args.applicationId) {
_results.push(x);
}
}
return _results;
})())[0];
sessionStorage.selectedApplication = JSON.stringify($rootScope.selectedApplication);
} else if (!$rootScope.selectedApplication || (_ref1 = $rootScope.selectedApplication.id, __indexOf.call((function() {
var _i, _len, _ref2, _results;
_ref2 = result.applications;
_results = [];
for (_i = 0, _len = _ref2.length; _i < _len; _i++) {
x = _ref2[_i];
_results.push(x.id);
}
return _results;
})(), _ref1) < 0)) {
$rootScope.selectedApplication = result.applications[0];
sessionStorage.selectedApplication = JSON.stringify($rootScope.selectedApplication);
}
ajaxDocuments = _this.document.getGroupedDocuments({
applicationId: $rootScope.selectedApplication.id,
documentMode: args.documentMode,
keyword: args.keyword,
index: args.index
});
return ajaxDocuments.then(function(data) {
result.groupedDocuments = data.data.items;
result.page = {
total: data.data.total,
index: args.index,
max: (data.data.total - 1) / pageSize,
hasPrevious: args.index > 0,
hasNext: (parseInt(args.index) + 1) * pageSize < data.data.total
};
return result;
});
} else {
return result;
}
});
},
getGroupedDocuments: function(args) {
if (args == null) {
args = {};
}
/*
Get grouped documents
:param args: {applicationId, documentMode, keyword, index success()}
*/
if (args.keyword == null) {
args.keyword = '';
}
if (args.index == null) {
args.index = 0;
}
return _this.common.ajax({
url: "/applications/" + $rootScope.selectedApplication.id + "/" + args.documentMode + "/grouped?q=" + args.keyword + "&index=" + args.index,
success: args.success
});
},
getDocuments: function(args) {
var ajax;
if (args == null) {
args = {};
}
/*
Get documents by the grouped tag.
:param args: {applicationId, documentMode, groupTag, success()}
*/
ajax = _this.common.ajax({
url: "/applications/" + args.applicationId + "/" + args.documentMode + "/" + args.groupTag,
success: args.success
});
return ajax.then(function(data) {
return data.data.items;
});
},
getCrashDocument: function(args) {
var ajax;
if (args == null) {
args = {};
}
/*
Get the crash document by the grouped tag.
:param args: {applicationId, groupTag, success()}
*/
ajax = _this.common.ajax({
url: "/applications/" + args.applicationId + "/crashes/" + args.groupTag,
success: args.success
});
return ajax.then(function(data) {
var crash, thread, x, _i, _j, _len, _len1, _ref, _ref1;
crash = data.data.crash;
try {
_ref = crash.report.crash.threads;
for (_i = 0, _len = _ref.length; _i < _len; _i++) {
thread = _ref[_i];
if (thread.backtrace) {
_ref1 = thread.backtrace.contents;
for (_j = 0, _len1 = _ref1.length; _j < _len1; _j++) {
x = _ref1[_j];
x.instruction_addr_hex = '0x' + ('00000000' + x.instruction_addr.toString(16)).slice(-8);
}
}
}
} catch (_error) {}
try {
crash.crashedThreads = (function() {
var _k, _len2, _ref2, _results;
_ref2 = crash.report.crash.threads;
_results = [];
for (_k = 0, _len2 = _ref2.length; _k < _len2; _k++) {
x = _ref2[_k];
if (x.crashed) {
_results.push(x);
}
}
return _results;
})();
} catch (_error) {}
try {
crash.threads = (function() {
var _k, _len2, _ref2, _results;
_ref2 = crash.report.crash.threads;
_results = [];
for (_k = 0, _len2 = _ref2.length; _k < _len2; _k++) {
x = _ref2[_k];
if (!x.crashed) {
_results.push(x);
}
}
return _results;
})();
} catch (_error) {}
return crash;
});
}
};
this.get = function($injector) {
this.setupProviders($injector);
this.setup();
return {
common: this.common,
setting: this.setting,
application: this.application,
document: this.document
};
};
this.get.$inject = ['$injector'];
this.$get = this.get;
});
}).call(this);
(function() {
var a, config, run;
a = angular.module('victory.router', ['victory.controller', 'victory.provider', 'ui.router']);
run = function($injector) {
var $rootScope, $state, $stateParams;
$rootScope = $injector.get('$rootScope');
$state = $injector.get('$state');
$stateParams = $injector.get('$stateParams');
$rootScope.$state = $state;
return $rootScope.$stateParams = $stateParams;
};
a.run(['$injector', run]);
config = function($injector) {
var $stateProvider, $urlRouterProvider;
$stateProvider = $injector.get('$stateProvider');
$urlRouterProvider = $injector.get('$urlRouterProvider');
$urlRouterProvider.otherwise('/');
$stateProvider.state('index', {
url: '/',
templateUrl: '/views/empty.html',
controller: 'IndexCtrl'
});
$stateProvider.state('login', {
url: '/login',
resolve: {
title: function() {
return 'Sign In - ';
}
},
views: {
viewContent: {
templateUrl: '/views/login.html'
}
}
});
$stateProvider.state('settings', {
url: '/settings',
templateUrl: '/views/empty.html',
controller: 'SettingsCtrl'
});
$stateProvider.state('settings-applications', {
url: '/settings/applications',
resolve: {
title: function() {
return 'Applications - Settings - ';
},
applications: [
'$victory', function($victory) {
return $victory.setting.getApplications();
}
]
},
views: {
viewContent: {
templateUrl: '/views/settings/applications.html',
controller: 'SettingsApplicationsCtrl'
},
viewMenu: {
templateUrl: '/views/menu/settings.html',
controller: 'SettingsMenuCtrl'
}
}
});
$stateProvider.state('settings-users', {
url: '/settings/users',
resolve: {
title: function() {
return 'Users - Settings - ';
},
users: [
'$victory', function($victory) {
return $victory.setting.getUsers();
}
]
},
views: {
viewContent: {
templateUrl: '/views/settings/users.html',
controller: 'SettingsUsersCtrl'
},
viewMenu: {
templateUrl: '/views/menu/settings.html',
controller: 'SettingsMenuCtrl'
}
}
});
$stateProvider.state('settings-profile', {
url: '/settings/profile',
resolve: {
title: function() {
return 'Profile - Settings - ';
},
profile: [
'$victory', function($victory) {
return $victory.setting.getProfile();
}
]
},
views: {
viewContent: {
templateUrl: '/views/settings/profile.html',
controller: 'SettingsProfileCtrl'
},
viewMenu: {
templateUrl: '/views/menu/settings.html',
controller: 'SettingsMenuCtrl'
}
}
});
$stateProvider.state('grouped-crashes', {
url: '/crashes/grouped',
resolve: {
title: function() {
return 'Crashes - ';
},
documentMode: function() {
return 'crashes';
},
groupedDocumentsAndApplications: [
'$victory', function($victory) {
return $victory.document.getGroupedDocumentsAndApplications({
documentMode: 'crashes'
});
}
]
},
templateUrl: '/views/documents/grouped.html',
controller: 'GroupedDocumentsCtrl'
});
$stateProvider.state('grouped-crashes-search', {
url: '/applications/:applicationId/crashes/grouped/:keyword/:index',
resolve: {
title: function() {
return 'Crashes - ';
},
documentMode: function() {
return 'crashes';
},
groupedDocumentsAndApplications: [
'$victory', '$stateParams', function($victory, $stateParams) {
return $victory.document.getGroupedDocumentsAndApplications({
documentMode: 'crashes',
applicationId: $stateParams.applicationId,
keyword: $stateParams.keyword,
index: $stateParams.index
});
}
]
},
templateUrl: '/views/documents/grouped.html',
controller: 'GroupedDocumentsCtrl'
});
$stateProvider.state('crash', {
url: '/applications/:applicationId/crashes/:groupTag',
resolve: {
title: function() {
return 'Crash - ';
},
documentMode: function() {
return 'crashes';
},
crash: [
'$victory', '$stateParams', function($victory, $stateParams) {
return $victory.document.getCrashDocument({
applicationId: $stateParams.applicationId,
groupTag: $stateParams.groupTag
});
}
]
},
templateUrl: '/views/documents/crash.html',
controller: 'CrashDocumentCtrl'
});
$stateProvider.state('grouped-exceptions', {
url: '/exceptions/grouped',
resolve: {
title: function() {
return 'Exceptions - ';
},
documentMode: function() {
return 'exceptions';
},
groupedDocumentsAndApplications: [
'$victory', function($victory) {
return $victory.document.getGroupedDocumentsAndApplications({
documentMode: 'exceptions'
});
}
]
},
templateUrl: '/views/documents/grouped.html',
controller: 'GroupedDocumentsCtrl'
});
$stateProvider.state('grouped-exceptions-search', {
url: '/applications/:applicationId/exceptions/grouped/:keyword/:index',
resolve: {
title: function() {
return 'Exceptions - ';
},
documentMode: function() {
return 'exceptions';
},
groupedDocumentsAndApplications: [
'$victory', '$stateParams', function($victory, $stateParams) {
return $victory.document.getGroupedDocumentsAndApplications({
documentMode: 'exceptions',
applicationId: $stateParams.applicationId,
keyword: $stateParams.keyword,
index: $stateParams.index
});
}
]
},
templateUrl: '/views/documents/grouped.html',
controller: 'GroupedDocumentsCtrl'
});
$stateProvider.state('exceptions', {
url: '/applications/:applicationId/exceptions/:groupTag',
resolve: {
title: function() {
return 'Exceptions - ';
},
documentMode: function() {
return 'exceptions';
},
documents: [
'$victory', '$stateParams', function($victory, $stateParams) {
return $victory.document.getDocuments({
documentMode: 'exceptions',
applicationId: $stateParams.applicationId,
groupTag: $stateParams.groupTag
});
}
]
},
templateUrl: '/views/documents/list.html',
controller: 'DocumentsCtrl'
});
$stateProvider.state('grouped-logs', {
url: '/logs/grouped',
resolve: {
title: function() {
return 'Logs - ';
},
documentMode: function() {
return 'logs';
},
groupedDocumentsAndApplications: [
'$victory', function($victory) {
return $victory.document.getGroupedDocumentsAndApplications({
documentMode: 'logs'
});
}
]
},
templateUrl: '/views/documents/grouped.html',
controller: 'GroupedDocumentsCtrl'
});
$stateProvider.state('grouped-logs-search', {
url: '/applications/:applicationId/logs/grouped/:keyword/:index',
resolve: {
title: function() {
return 'Logs - ';
},
documentMode: function() {
return 'logs';
},
groupedDocumentsAndApplications: [
'$victory', '$stateParams', function($victory, $stateParams) {
return $victory.document.getGroupedDocumentsAndApplications({
documentMode: 'logs',
applicationId: $stateParams.applicationId,
keyword: $stateParams.keyword,
index: $stateParams.index
});
}
]
},
templateUrl: '/views/documents/grouped.html',
controller: 'GroupedDocumentsCtrl'
});
return $stateProvider.state('logs', {
url: '/applications/:applicationId/logs/:groupTag',
resolve: {
title: function() {
return 'Logs - ';
},
documentMode: function() {
return 'logs';
},
documents: [
'$victory', '$stateParams', function($victory, $stateParams) {
return $victory.document.getDocuments({
documentMode: 'logs',
applicationId: $stateParams.applicationId,
groupTag: $stateParams.groupTag
});
}
]
},
templateUrl: '/views/documents/list.html',
controller: 'DocumentsCtrl'
});
};
a.config(['$injector', config]);
}).call(this);
(function() {
window.victory = {
userLevel: {
root: 0,
normal: 1
},
loginUrl: '',
logoutUrl: '',
user: {
userId: 0,
level: 1,
name: null,
email: null,
isLogin: false,
isRoot: function() {
return victory.user.level === victory.userLevel.root;
}
}
};
}).call(this);
<|start_filename|>application/angular_templates/documents/crash.html<|end_filename|>
<div id="js_content" class="cs_content">
<!-- switch applications -->
<div class="pull-left">
<div class="btn-group">
<a class="btn dropdown-toggle" data-toggle="dropdown" href="">
{{ selectedApplication.name }}
<span ng-if="!selectedApplication">No Application</span>
<span class="caret"></span>
</a>
<ul class="dropdown-menu">
<li ng-repeat="item in applications">
<a tabindex="-1" href="#/applications/{{ item.id }}/{{ documentMode }}/grouped//">{{ item.name }}</a>
</li>
</ul>
</div>
</div>
<!-- breadcrumb -->
<ul class="cs_breadcrumb breadcrumb pull-left">
<li><a href="#/applications/{{ selectedApplication.id }}/{{ documentMode }}/grouped//">Crashes</a> <span class="divider">/</span></li>
<li class="active">{{ crash.name }} - {{ crash.title }}</li>
</ul>
<div class="cs_clear"></div>
<!-- information -->
<legend>Information</legend>
<div class="cs_crash_info">
<dl class="dl-horizontal pull-left span7">
<dt>App Version</dt>
<dd>{{ crash.version }}</dd>
</dl>
<dl class="dl-horizontal pull-left span7">
<dt>OS Version</dt>
<dd>{{ crash.os_version }}</dd>
</dl>
<dl class="dl-horizontal pull-left span7">
<dt>Hardware Model</dt>
<dd>{{ crash.device }}</dd>
</dl>
<dl ng-if="crash.report.system.model" class="dl-horizontal pull-left span7">
<dt>Model</dt>
<dd>{{ crash.report.system.model }}</dd>
</dl>
<dl ng-if="crash.report.system.cpu_arch" class="dl-horizontal pull-left span7">
<dt>CPU arch</dt>
<dd>{{ crash.report.system.cpu_arch }}</dd>
</dl>
<dl class="dl-horizontal pull-left span7">
<dt>User</dt>
<dd>{{ crash.name }} <span ng-if="crash.email"><{{ crash.email }}></span></dd>
</dl>
<dl class="dl-horizontal pull-left span7">
<dt>App uuid</dt>
<dd>{{ crash.app_uuid }}</dd>
</dl>
<dl ng-if="crash.report.system.device_app_hash" class="dl-horizontal pull-left span7">
<dt>Device App Hash</dt>
<dd>{{ crash.report.system.device_app_hash }}</dd>
</dl>
<dl ng-if="crash.report.system.parent_process_name" class="dl-horizontal pull-left span7">
<dt>Parent Process Name</dt>
<dd>{{ crash.report.system.parent_process_name }}</dd>
</dl>
<dl ng-if="crash.report.system.CFBundleIdentifier" class="dl-horizontal pull-left span7">
<dt>Bundle Identifier</dt>
<dd>{{ crash.report.system.CFBundleIdentifier }}</dd>
</dl>
<dl class="dl-horizontal pull-left span7">
<dt>IP Address</dt>
<dd>{{ crash.ip }}</dd>
</dl>
<dl class="dl-horizontal pull-left span7">
<dt>Datetime</dt>
<dd>{{ crash.create_time|date:'yyyy-MM-dd hh:mm:ss a' }}</dd>
</dl>
<dl class="dl-horizontal pull-left span7">
<dt>User-Agent</dt>
<dd>{{ crash.user_agent }}</dd>
</dl>
</div>
<div class="cs_clear" style="height: 30px;"></div>
<!-- crashed threads -->
<div ng-repeat="thread in crash.crashedThreads">
<legend>Thread {{ thread.index }} Crashed : {{ thread.name }}</legend>
<table ng-if="thread.backtrace" class="table table-striped table-hover table-content">
<thead>
<tr>
<th class="span1"></th>
<th class="span3">object name</th>
<th width="90px">address</th>
<th>symbol name</th></tr>
</thead>
<tbody>
<tr ng-repeat="x in thread.backtrace.contents">
<td><span class="pull-right">{{ $index }}</span></td>
<td>{{ x.object_name }}</td>
<td class="cs_monospaced">{{ x.instruction_addr_hex }}</td>
<td>{{ x.symbol_name }} + {{ x.instruction_addr - x.symbol_addr }}</td>
</tr>
</tbody>
</table>
</div>
<!-- threads -->
<div ng-repeat="thread in crash.threads">
<legend>Thread {{ thread.index }} : {{ thread.name }}</legend>
<table ng-if="thread.backtrace" class="table table-striped table-hover table-content">
<thead>
<tr>
<th class="span1"></th>
<th class="span3">object name</th>
<th width="90px">address</th>
<th>symbol name</th>
</tr>
</thead>
<tbody>
<tr ng-repeat="x in thread.backtrace.contents">
<td><span class="pull-right">{{ $index }}</span></td>
<td>{{ x.object_name }}</td>
<td class="cs_monospaced">{{ x.instruction_addr_hex }}</td>
<td>{{ x.symbol_name }} + {{ x.instruction_addr - x.symbol_addr }}</td>
</tr>
</tbody>
</table>
</div>
</div>
<|start_filename|>application/static/coffeescript/controller.coffee<|end_filename|>
a = angular.module 'victory.controller', ['victory.provider']
# ----------- controllers for ui.router ----------------
indexController = ($scope) ->
###
/
###
if $scope.victory.user.isLogin
location.href = '#/crashes/grouped'
else
location.href = '#/login'
indexController.$inject = ['$scope']
a.controller 'IndexCtrl', indexController
# ----------- settings ------------------
settingsMenuController = ($scope, $state) ->
###
The controller of the settings menu
###
$scope.active = $state.current.name
settingsMenuController.$inject = ['$scope', '$state']
a.controller 'SettingsMenuCtrl', settingsMenuController
a.controller 'SettingsCtrl', ->
###
/settings
###
location.href = '#/settings/applications'
settingsApplicationsController = ($scope, $victory, applications) ->
###
/settings/applications
:scope name: new application name
:scope description: new application description
:scope items: [{id, name, newName, description, newDescription
app_key, create_time, is_owner, members:[{id, name, email, is_owner}]
}]
###
# setup applications
for item in applications
item.newName = item.name
item.newDescription = item.description
$scope.items = applications
$scope.getApplications = ->
###
Get applications.
###
$victory.setting.getApplications
success: (data) ->
for item in data.items
item.newName = item.name
item.newDescription = item.description
$scope.items = data.items
$scope.addApplication = ->
###
Add an application.
###
$victory.setting.addApplication
data:
name: $scope.name
description: $scope.description
error: (data, status) ->
if status == 400 and data
$scope.errors = data
success: ->
$scope.name = ''
$scope.description = ''
$('.modal.in').modal 'hide'
$scope.getApplications()
$scope.updateApplication = (id) ->
###
Update the application.
###
updateItem = (x for x in $scope.items when x.id == id)[0]
$victory.setting.updateApplication
id: id
data:
name: updateItem.newName
description: updateItem.newDescription
error: (data, status) ->
if status == 400 and data
updateItem.errors = data
success: ->
$('.modal.in').modal 'hide'
$scope.getApplications()
$scope.deleteApplication = (id) ->
###
Delete the application.
###
$victory.setting.deleteApplication
id: id
success: ->
$('.modal.in').modal 'hide'
$scope.getApplications()
$scope.inviteUser = (id, email) ->
###
Invite an user into the application.
###
$victory.setting.inviteUser
applicationId: id
email: email
success: ->
$('.modal.in').modal 'hide'
$scope.getApplications()
$scope.deleteMenter = (applicationId, memberId) ->
###
Delete the member from the application.
###
$victory.setting.deleteMember
applicationId: applicationId
memberId: memberId
success: ->
application = (x for x in $scope.items when x.id == applicationId)[0]
application.members = (x for x in application.members when x.id != memberId)
settingsApplicationsController.$inject = ['$scope', '$victory', 'applications']
a.controller 'SettingsApplicationsCtrl', settingsApplicationsController
settingsUsersController = ($scope, $victory, users) ->
###
/settings/users
###
$scope.items = users
$scope.getUsers = ->
###
Get users.
###
$victory.setting.getUsers
success: (data) ->
$scope.items = data.items
$scope.addUser = ->
###
Add an user.
###
$victory.setting.addUser
email: $scope.email
success: ->
$scope.email = ''
$scope.getUsers()
$scope.deleteUser = (id) ->
###
Delete the user.
###
$victory.setting.deleteUser
id: id
success: ->
$scope.items = (x for x in $scope.items when x.id != id)
settingsUsersController.$inject = ['$scope', '$victory', 'users']
a.controller 'SettingsUsersCtrl', settingsUsersController
settingsProfileController = ($scope, $injector, profile) ->
###
/settings/profile
###
# providers
$victory = $injector.get '$victory'
$rootScope = $injector.get '$rootScope'
# scope
$scope.profile = profile
$scope.getProfile = ->
$victory.setting.getProfile
success: (data) ->
$rootScope.victory.user.name = data.name
$scope.profile = data
$scope.updateProfile = ->
$victory.setting.updateProfile
name: $scope.profile.name
error: (data, status) ->
if status == 400 and data
$scope.errors = data
success: ->
$scope.getProfile()
settingsProfileController.$inject = ['$scope', '$injector', 'profile']
a.controller 'SettingsProfileCtrl', settingsProfileController
# ----------- documents ----------------
groupedDocumentsController = ($scope, $stateParams, documentMode, groupedDocumentsAndApplications) ->
###
:scope documentMode: <crashes/exceptions/logs>
:scope keyword: search keywords
:scope applications: [{id, name, description,
app_key, create_time, is_owner}]
:scope groupedDocuments: [{group_tag, create_time, name, email, title, description, times}]
:scope page: {total, index, max, hasPrevious, hasNext}
###
$scope.documentMode = documentMode
$scope.keyword = if $stateParams.keyword then $stateParams.keyword else ''
$scope.applications = groupedDocumentsAndApplications.applications
$scope.groupedDocuments = groupedDocumentsAndApplications.groupedDocuments
$scope.page = groupedDocumentsAndApplications.page
$scope.getGroupedDocumentsUrl = (keyword, index=0) ->
###
Get the url of grouped documents.
###
return "#/applications/#{$scope.selectedApplication.id}/#{$scope.documentMode}/grouped/#{keyword}/#{index}"
$scope.gotoSearchPage = (keyword, index=0) ->
###
Goto the search page of grouped documents.
###
location.href = $scope.getGroupedDocumentsUrl keyword, index
$scope.clickGroupedDocument = (groupedDocument) ->
###
Clicked the grouped document row in the table.
###
if groupedDocument.times > 1 or $scope.documentMode == 'crashes'
location.href = "#/applications/#{$scope.selectedApplication.id}/#{$scope.documentMode}/#{groupedDocument.group_tag}"
$scope.modal = (groupedDocument) ->
###
Check the grouped document should show the bootstrap modal window.
:param groupedDocument: grouped document
:return: "modal" / ""
###
if groupedDocument.times > 1
return ""
else
return "modal"
groupedDocumentsController.$inject = ['$scope', '$stateParams', 'documentMode', 'groupedDocumentsAndApplications']
a.controller 'GroupedDocumentsCtrl', groupedDocumentsController
documentsController = ($scope, $victory, documentMode, documents) ->
###
/applications/<applicationId>/<documentMode>/<groupTag>
###
$scope.documentMode = documentMode
$scope.documents = documents
$victory.application.getApplications
success: (data) ->
$scope.applications = data.items
$scope.renderDescription = (document) ->
###
Render the description of the document.
###
if document.description
return document.description
else if document.parameters
return "Parameters: #{document.parameters}"
else if document.url
return "URL: #{document.url}"
""
documentsController.$inject = ['$scope', '$victory', 'documentMode', 'documents']
a.controller 'DocumentsCtrl', documentsController
crashDocumentController = ($scope, $victory, documentMode, crash) ->
###
/applications/<applicationId>/<documentMode>/<groupTag>
###
$scope.documentMode = documentMode
$scope.crash = crash
$victory.application.getApplications
success: (data) ->
$scope.applications = data.items
crashDocumentController.$inject = ['$scope', '$victory', 'documentMode', 'crash']
a.controller 'CrashDocumentCtrl', crashDocumentController
<|start_filename|>application/static/coffeescript/directive.coffee<|end_filename|>
a = angular.module 'victory.directive', []
# ----------------------------------------
# v-tooltip
# ----------------------------------------
a.directive 'vTooltip', ->
###
Show the bootstrap tool tip.
###
restrict: 'A'
link: (scope, element, attrs) ->
attrs.$observe 'vTooltip', (value) ->
if value
$(element).attr 'title', scope.$eval(value)
$(element).tooltip()
# ----------------------------------------
# v-focus
# ----------------------------------------
a.directive 'vFocus', ->
###
Focus this element.
###
restrict: 'A'
link: (scope, element) ->
$(element).select()
# ----------------------------------------
# v-modal
# ----------------------------------------
a.directive 'vModal', ->
###
Find the first input text box then focus it on the bootstrap modal window.
###
restrict: 'A'
link: (scope, element) ->
$(element).on 'shown', ->
$(@).find('input:first').select()
# ----------------------------------------
# v-enter
# ----------------------------------------
a.directive 'vEnter', ->
###
Eval the AngularJS expression when pressed `Enter`.
###
restrict: 'A'
link: (scope, element, attrs) ->
element.bind "keydown keypress", (e) ->
if e.which is 13
e.preventDefault()
scope.$apply ->
scope.$eval attrs.vEnter
# ----------------------------------------
# v-navigation
# ----------------------------------------
vNavigation = ($injector) ->
###
Setup the navigation effect.
###
restrict: 'A'
link: (scope, element) ->
# ----------------------------------------
# providers
# ----------------------------------------
$victory = $injector.get '$victory'
# ----------------------------------------
# scope
# ----------------------------------------
# ui.router state change event
scope.$on '$stateChangeStart', (event, toState, toParams, fromState) ->
# (event, toState, toParams, fromState, fromParams) ->
if fromState.name != ""
$victory.common.loading.on()
scope.select = toState.name
$('.modal.in').modal 'hide'
setTimeout ->
$('#js_navigation li.select').mouseover()
, 0
scope.$on '$stateChangeSuccess', ->
# (event, toState, toParams, fromState, fromParams) ->
$victory.common.loading.off()
scope.$on '$stateChangeError', ->
# (event, toState, toParams, fromState, fromParams, error) ->
$victory.common.loading.off()
# ----------------------------------------
# navigation effect
# ----------------------------------------
if $(element).find('li.select').length > 0
$selected = $(element).find('li.select')
else
match = location.href.match /\w\/([/#\w]*)/
index = if match[1] is '' then 0 else $(element).find("li a[href*='#{match[1]}']").parent().index()
$selected = $(element).find('li').eq(index)
$(element).find('li:first').parent().prepend $('<li class="cs_top"></li>')
$(element).find('li.cs_top').css
width: $selected.css('width')
left: $selected.position().left
top: $selected.position().top
# mouse hover
noop = -> return
$(element).find('li[class!=cs_top]').hover ->
$(element).find('li.cs_top').each( -> $(@).dequeue()).animate(
width: @offsetWidth
left: @offsetLeft
, 420, "easeInOutCubic")
, noop()
$(element).hover noop(), ->
$(element).find('li.cs_top').each( -> $(@).dequeue()).animate(
width: $(element).find('li.select').css 'width'
left: $(element).find('li.select').position().left
, 420, "easeInOutCubic")
return
vNavigation.$inject = ['$injector']
a.directive 'vNavigation', vNavigation
<|start_filename|>application/static/coffeescript/router.coffee<|end_filename|>
a = angular.module 'victory.router', ['victory.controller', 'victory.provider', 'ui.router']
# ----------------------------------------
# run
# ----------------------------------------
run = ($injector) ->
$rootScope = $injector.get '$rootScope'
$state = $injector.get '$state'
$stateParams = $injector.get '$stateParams'
$rootScope.$state = $state
$rootScope.$stateParams = $stateParams
a.run ['$injector', run]
# ----------------------------------------
# config
# ----------------------------------------
config = ($injector) ->
# ----------------------------------------
# providers
# ----------------------------------------
$stateProvider = $injector.get '$stateProvider'
$urlRouterProvider = $injector.get '$urlRouterProvider'
# ----------------------------------------
# redirect to '/' on 404
# ----------------------------------------
$urlRouterProvider.otherwise '/'
# ----------------------------------------
# /
# ----------------------------------------
$stateProvider.state 'index',
url: '/'
templateUrl: '/views/empty.html'
controller: 'IndexCtrl'
# ----------------------------------------
# /login
# ----------------------------------------
$stateProvider.state 'login',
url: '/login'
resolve: title: -> 'Sign In - '
views:
viewContent:
templateUrl: '/views/login.html'
# ----------------------------------------
# /settings
# ----------------------------------------
$stateProvider.state 'settings',
url: '/settings'
templateUrl: '/views/empty.html'
controller: 'SettingsCtrl'
$stateProvider.state 'settings-applications',
url: '/settings/applications'
resolve:
title: -> 'Applications - Settings - '
applications: ['$victory', ($victory) ->
$victory.setting.getApplications()
]
views:
viewContent:
templateUrl: '/views/settings/applications.html'
controller: 'SettingsApplicationsCtrl'
viewMenu:
templateUrl: '/views/menu/settings.html'
controller: 'SettingsMenuCtrl'
$stateProvider.state 'settings-users',
url: '/settings/users'
resolve:
title: -> 'Users - Settings - '
users: ['$victory', ($victory) ->
$victory.setting.getUsers()
]
views:
viewContent:
templateUrl: '/views/settings/users.html'
controller: 'SettingsUsersCtrl'
viewMenu:
templateUrl: '/views/menu/settings.html'
controller: 'SettingsMenuCtrl'
$stateProvider.state 'settings-profile',
url: '/settings/profile'
resolve:
title: -> 'Profile - Settings - '
profile: ['$victory', ($victory) ->
$victory.setting.getProfile()
]
views:
viewContent:
templateUrl: '/views/settings/profile.html'
controller: 'SettingsProfileCtrl'
viewMenu:
templateUrl: '/views/menu/settings.html'
controller: 'SettingsMenuCtrl'
# ----------------------------------------
# /crashes
# /exceptions
# /logs
# ----------------------------------------
$stateProvider.state 'grouped-crashes',
url: '/crashes/grouped'
resolve:
title: -> 'Crashes - '
documentMode: -> 'crashes'
groupedDocumentsAndApplications: ['$victory', ($victory) ->
$victory.document.getGroupedDocumentsAndApplications
documentMode: 'crashes'
]
templateUrl: '/views/documents/grouped.html'
controller: 'GroupedDocumentsCtrl'
$stateProvider.state 'grouped-crashes-search',
url: '/applications/:applicationId/crashes/grouped/:keyword/:index'
resolve:
title: -> 'Crashes - '
documentMode: -> 'crashes'
groupedDocumentsAndApplications: ['$victory', '$stateParams', ($victory, $stateParams) ->
$victory.document.getGroupedDocumentsAndApplications
documentMode: 'crashes'
applicationId: $stateParams.applicationId
keyword: $stateParams.keyword
index: $stateParams.index
]
templateUrl: '/views/documents/grouped.html'
controller: 'GroupedDocumentsCtrl'
$stateProvider.state 'crash',
url: '/applications/:applicationId/crashes/:groupTag'
resolve:
title: -> 'Crash - '
documentMode: -> 'crashes'
crash: ['$victory', '$stateParams', ($victory, $stateParams) ->
$victory.document.getCrashDocument
applicationId: $stateParams.applicationId
groupTag: $stateParams.groupTag
]
templateUrl: '/views/documents/crash.html'
controller: 'CrashDocumentCtrl'
$stateProvider.state 'grouped-exceptions',
url: '/exceptions/grouped'
resolve:
title: -> 'Exceptions - '
documentMode: -> 'exceptions'
groupedDocumentsAndApplications: ['$victory', ($victory) ->
$victory.document.getGroupedDocumentsAndApplications
documentMode: 'exceptions'
]
templateUrl: '/views/documents/grouped.html'
controller: 'GroupedDocumentsCtrl'
$stateProvider.state 'grouped-exceptions-search',
url: '/applications/:applicationId/exceptions/grouped/:keyword/:index'
resolve:
title: -> 'Exceptions - '
documentMode: -> 'exceptions'
groupedDocumentsAndApplications: ['$victory', '$stateParams', ($victory, $stateParams) ->
$victory.document.getGroupedDocumentsAndApplications
documentMode: 'exceptions'
applicationId: $stateParams.applicationId
keyword: $stateParams.keyword
index: $stateParams.index
]
templateUrl: '/views/documents/grouped.html'
controller: 'GroupedDocumentsCtrl'
$stateProvider.state 'exceptions',
url: '/applications/:applicationId/exceptions/:groupTag'
resolve:
title: -> 'Exceptions - '
documentMode: -> 'exceptions'
documents: ['$victory', '$stateParams', ($victory, $stateParams) ->
$victory.document.getDocuments
documentMode: 'exceptions'
applicationId: $stateParams.applicationId
groupTag: $stateParams.groupTag
]
templateUrl: '/views/documents/list.html'
controller: 'DocumentsCtrl'
$stateProvider.state 'grouped-logs',
url: '/logs/grouped'
resolve:
title: -> 'Logs - '
documentMode: -> 'logs'
groupedDocumentsAndApplications: ['$victory', ($victory) ->
$victory.document.getGroupedDocumentsAndApplications
documentMode: 'logs'
]
templateUrl: '/views/documents/grouped.html'
controller: 'GroupedDocumentsCtrl'
$stateProvider.state 'grouped-logs-search',
url: '/applications/:applicationId/logs/grouped/:keyword/:index'
resolve:
title: -> 'Logs - '
documentMode: -> 'logs'
groupedDocumentsAndApplications: ['$victory', '$stateParams', ($victory, $stateParams) ->
$victory.document.getGroupedDocumentsAndApplications
documentMode: 'logs'
applicationId: $stateParams.applicationId
keyword: $stateParams.keyword
index: $stateParams.index
]
templateUrl: '/views/documents/grouped.html'
controller: 'GroupedDocumentsCtrl'
$stateProvider.state 'logs',
url: '/applications/:applicationId/logs/:groupTag'
resolve:
title: -> 'Logs - '
documentMode: -> 'logs'
documents: ['$victory', '$stateParams', ($victory, $stateParams) ->
$victory.document.getDocuments
documentMode: 'logs'
applicationId: $stateParams.applicationId
groupTag: $stateParams.groupTag
]
templateUrl: '/views/documents/list.html'
controller: 'DocumentsCtrl'
a.config ['$injector', config]
<|start_filename|>application/angular_templates/documents/list.html<|end_filename|>
<div id="js_content" class="cs_content">
<!-- switch applications -->
<div class="pull-left">
<div class="btn-group">
<a class="btn dropdown-toggle" data-toggle="dropdown" href="">
{{ selectedApplication.name }}
<span ng-if="!selectedApplication">No Application</span>
<span class="caret"></span>
</a>
<ul class="dropdown-menu">
<li ng-repeat="item in applications">
<a tabindex="-1" href="#/applications/{{ item.id }}/{{ documentMode }}/grouped//">{{ item.name }}</a>
</li>
</ul>
</div>
</div>
<!-- breadcrumb -->
<ul class="cs_breadcrumb breadcrumb pull-left">
<li ng-if="documentMode == 'exceptions'"><a href="#/applications/{{ selectedApplication.id }}/{{ documentMode }}/grouped//">Handled Exceptions</a> <span class="divider">/</span></li>
<li ng-if="documentMode == 'logs'"><a href="#/applications/{{ selectedApplication.id }}/{{ documentMode }}/grouped//">Logs</a> <span class="divider">/</span></li>
<li class="active">{{ documents[0].name }} - {{ documents[0].title }}</li>
</ul>
<div class="cs_clear"></div>
<!-- content table -->
<table class="table table-bordered table-hover table-content table-pointer">
<thead>
<tr>
<th class="span1">Status</th>
<th>Description</th>
<th class="span2">Datetime</th>
</tr>
</thead>
<tbody>
<tr ng-repeat="item in documents" href="#document_{{ item.id }}" data-toggle="modal">
<td>{{ item.status }}</td>
<td>{{ renderDescription(item) }}</td>
<td>{{ item.create_time|date:'yyyy-MM-dd hh:mm a' }}</td>
</tr>
</tbody>
</table>
<!-- modal -->
<div ng-repeat="item in documents" id="document_{{ item.id }}" class="modal hide fade" tabindex="-1" role="dialog" aria-hidden="true">
<div class="modal-header">
<button type="button" class="close" data-dismiss="modal" aria-hidden="true">×</button>
<h3 ng-if="documentMode == 'exceptions'">Handled Exception</h3>
<h3 ng-if="documentMode == 'logs'" class="active">Log</h3>
</div>
<div class="modal-body">
<table class="table table-bordered">
<tbody>
<tr><td class="span2">Application</td><td>{{ selectedApplication.name }}</td></tr>
<tr ng-if="item.version"><td>Version</td><td>{{ item.version }}</td></tr>
<tr><td>Title</td><td>{{ item.title }}</td></tr>
<tr ng-if="item.description"><td>Description</td><td><pre>{{ item.description }}</pre></td></tr>
<tr><td>Datetime</td><td>{{ item.create_time|date:'yyyy-MM-dd hh:mm:ss a' }}</td></tr>
<tr ng-if="item.status"><td>Status</td><td>{{ item.status }}</td></tr>
<tr ng-if="item.method"><td>Method</td><td>{{ item.method }}</td></tr>
<tr ng-if="item.url"><td>URL</td><td>{{ item.url }}</td></tr>
<tr ng-if="item.timeout"><td>Timeout</td><td>{{ item.timeout }}</td></tr>
<tr ng-if="item.parameters"><td>Parameters</td><td>{{ item.parameters }}</td></tr>
<tr ng-if="item.device"><td>Device</td><td>{{ item.device }}</td></tr>
<tr ng-if="item.os_version"><td>OS Version</td><td>{{ item.os_version }}</td></tr>
<tr ng-if="item.name"><td>Name</td><td>{{ item.name }}</td></tr>
<tr ng-if="item.email"><td>Email</td><td>{{ item.email }}</td></tr>
<tr ng-if="item.access_token"><td>Access Token</td><td>{{ item.access_token }}</td></tr>
<tr><td>IP Address</td><td>{{ item.ip }}</td></tr>
<tr><td>User-Agent</td><td>{{ item.user_agent }}</td></tr>
</tbody>
</table>
</div>
<div class="modal-footer">
<a class="btn" data-dismiss="modal">Close</a>
</div>
</div>
</div>
<|start_filename|>application/static/coffeescript/module.coffee<|end_filename|>
angular.module 'victory', ['victory.router', 'victory.directive']
<|start_filename|>application/static/coffeescript/victory.coffee<|end_filename|>
window.victory =
userLevel:
root: 0
normal: 1
loginUrl: ''
logoutUrl: ''
user:
userId: 0
level: 1
name: null
email: null
isLogin: false
isRoot: ->
victory.user.level == victory.userLevel.root
<|start_filename|>Gruntfile.coffee<|end_filename|>
module.exports = (grunt) ->
# -----------------------------------
# Options
# -----------------------------------
grunt.config.init
compass:
site:
options:
sassDir: './application/static/css'
cssDir: './application/static/css'
outputStyle: 'compressed'
coffee:
source:
files:
'./application/static/javascript/victory.js': ['./application/static/coffeescript/*.coffee']
concat:
css:
src: ['./application/static/css/lib/*.css', './application/static/css/main.css']
dest: './application/static/dist/site.min.css'
js:
src: ['./application/static/javascript/*.min.js']
dest: './application/static/dist/site.min.js'
uglify:
options:
mangle: no
compress: no
data:
files:
'./application/static/javascript/victory.min.js': './application/static/javascript/victory.js'
watch:
compass:
files: ['./application/static/css/**/*.scss']
tasks: ['compass', 'concat:css']
options:
spawn: no
coffee:
files: ['./application/static/coffeescript/*.coffee']
tasks: ['coffee', 'uglify', 'concat:js']
options:
spawn: no
karma:
victory:
configFile: './tests/frontend/karma.config.coffee'
# -----------------------------------
# register task
# -----------------------------------
grunt.registerTask 'dev', ['watch']
grunt.registerTask 'test', ['karma']
# -----------------------------------
# Plugins
# -----------------------------------
grunt.loadNpmTasks 'grunt-contrib-compass'
grunt.loadNpmTasks 'grunt-contrib-coffee'
grunt.loadNpmTasks 'grunt-contrib-watch'
grunt.loadNpmTasks 'grunt-contrib-concat'
grunt.loadNpmTasks 'grunt-contrib-uglify'
grunt.loadNpmTasks 'grunt-karma'
<|start_filename|>application/static/coffeescript/provider.coffee<|end_filename|>
a = angular.module 'victory.provider', []
a.provider '$victory', ->
# -------------------------------------------
# const
# -------------------------------------------
# default page size
pageSize = 20
# -------------------------------------------
# providers
# -------------------------------------------
$injector = null
$http = null
$rootScope = null
# -------------------------------------------
# private methods
# -------------------------------------------
@setupProviders = (injector) ->
$injector = injector
$http = $injector.get '$http'
$rootScope = $injector.get '$rootScope'
@setup = ->
NProgress.configure
showSpinner: no
# setup the selected application
if sessionStorage.selectedApplication
# application {id, name, description, app_key, create_time, is_owner}
$rootScope.selectedApplication = JSON.parse sessionStorage.selectedApplication
# load window.victory
$rootScope.victory = window.victory
# -------------------------------------------
# public methods
# -------------------------------------------
@common =
ajax: (args={}) ->
###
victory ajax function
:param args: {method, cache, data, error(), success(), beforSend(), hideLoadingAfterDown}
###
args.method ?= 'get'
args.cache ?= false
args.data ?= ''
args.error ?= ->
args.success ?= ->
args.beforeSend() if args.beforeSend
h = $http
url: args.url, method: args.method, cache: args.ache, data: args.data
h.error (data, status, headers, config) =>
@message.error status
args.error(data, status, headers, config)
h.success (data, status, headers, config) =>
if data.__status__ == 302 and data.location
# redirect
location.href = data.location
return
args.success(data, status, headers, config)
message:
error: (status) ->
###
pop error message.
###
switch status
when 400
$.av.pop
title: 'Input Failed'
message: 'Please check input values.'
template: 'error'
when 403
$.av.pop
title: 'Permission denied'
message: 'Please check your permission.'
template: 'error'
else
$.av.pop
title: 'Error'
message: 'Loading failed, please try again later.'
template: 'error'
loading:
###
Show/Hide loading effect.
###
on: ->
NProgress.start()
off: ->
NProgress.done()
@setting =
# -------------- application ----------------
getApplications: (args={}) =>
###
Get applications of the settings.
:param args: {success()}
###
ajax = @common.ajax
url: '/settings/applications'
success: args.success
ajax.then (data) ->
# for resolve
data.data.items
addApplication: (args={}) =>
###
Add the application.
:param args: {data:{name, description}, error(), success()}
###
@common.ajax
method: 'post'
url: '/settings/applications'
data: args.data
error: args.error
success: args.success
updateApplication: (args={}) =>
###
Update the application.
:param args: {id, data:{name, description}, error(), success()}
###
@common.ajax
method: 'put'
url: "/settings/applications/#{args.id}"
data: args.data
error: args.error
success: args.success
deleteApplication: (args={}) =>
###
Delete the application by id.
:param args: {id, success()}
###
@common.ajax
method: 'delete'
url: "/settings/applications/#{args.id}"
success: args.success
inviteUser: (args={}) =>
###
Invite the user into the application.
:param args: {applicationId, email, success()}
###
@common.ajax
method: 'post'
url: "/settings/applications/#{args.applicationId}/members"
data:
email: args.email
success: args.success
deleteMember: (args={}) =>
###
Delete the member from the application.
:param args: {applicationId, memberId, success()}
###
@common.ajax
method: 'delete'
url: "/settings/applications/#{args.applicationId}/members/#{args.memberId}"
success: args.success
# -------------- user ----------------
getUsers: (args={}) =>
###
Get users of the settings.
:param args: {success()}
###
ajax = @common.ajax
url: '/settings/users'
success: args.success
ajax.then (data) ->
# for resolve
data.data.items
addUser: (args={}) =>
###
Add an user.
:param args: {email, success()}
###
@common.ajax
method: 'post'
url: '/settings/users'
data:
email: args.email
success: args.success
deleteUser: (args={}) =>
###
Delete the user by id.
:param args: {id, success()}
###
@common.ajax
method: 'delete'
url: "/settings/users/#{args.id}"
success: args.success
# -------------- profile ----------------
getProfile: (args={}) =>
###
Get the profile.
:param args: {success()}
###
ajax = @common.ajax
url: '/settings/profile'
success: args.success
ajax.then (data) ->
# for resolve
data.data
updateProfile: (args={}) =>
###
Update the profile.
:param args: {name, error(), success()}
###
@common.ajax
method: 'put'
url: '/settings/profile'
data:
name: args.name
error: args.error
success: args.success
@application =
getApplications: (args={}) =>
###
Get applications.
:param args: {success()}
###
@common.ajax
url: "/applications"
success: args.success
# -------------- document ----------------
@document =
getGroupedDocumentsAndApplications: (args={}) =>
###
Get grouped documents and applications for GroupedDocumentsCtrl.
:param args: {documentMode, applicationId, keyword, index}
:return: {applications, groupedDocuments, page}
###
# cleanup input value
args.applicationId = parseInt(args.applicationId)
args.keyword ?= ''
args.index ?= 0
# result object
result =
applications: null
groupedDocuments: null
page: index: 0
ajaxApplications = @common.ajax
url: '/applications'
ajaxApplications.then (data) =>
result.applications = data.data.items
if result.applications.length > 0
if args.applicationId in (x.id for x in result.applications)
# select the application
$rootScope.selectedApplication = (x for x in result.applications when x.id == args.applicationId)[0]
sessionStorage.selectedApplication = JSON.stringify $rootScope.selectedApplication
else if not $rootScope.selectedApplication or $rootScope.selectedApplication.id not in (x.id for x in result.applications)
# select the first application
$rootScope.selectedApplication = result.applications[0]
sessionStorage.selectedApplication = JSON.stringify $rootScope.selectedApplication
# load grouped documents by application id
ajaxDocuments = @document.getGroupedDocuments
applicationId: $rootScope.selectedApplication.id
documentMode: args.documentMode
keyword: args.keyword
index: args.index
ajaxDocuments.then (data) ->
result.groupedDocuments = data.data.items
result.page =
total: data.data.total
index: args.index
max: (data.data.total - 1) / pageSize
hasPrevious: args.index > 0
hasNext: (parseInt(args.index) + 1) * pageSize < data.data.total
result
else
result
getGroupedDocuments: (args={}) =>
###
Get grouped documents
:param args: {applicationId, documentMode, keyword, index success()}
###
args.keyword ?= ''
args.index ?= 0
@common.ajax
url: "/applications/#{$rootScope.selectedApplication.id}/#{args.documentMode}/grouped?q=#{args.keyword}&index=#{args.index}"
success: args.success
getDocuments: (args={}) =>
###
Get documents by the grouped tag.
:param args: {applicationId, documentMode, groupTag, success()}
###
ajax = @common.ajax
url: "/applications/#{args.applicationId}/#{args.documentMode}/#{args.groupTag}"
success: args.success
ajax.then (data) ->
data.data.items
getCrashDocument: (args={}) =>
###
Get the crash document by the grouped tag.
:param args: {applicationId, groupTag, success()}
###
ajax = @common.ajax
url: "/applications/#{args.applicationId}/crashes/#{args.groupTag}"
success: args.success
ajax.then (data) ->
crash = data.data.crash
try
# append instruction_addr_hex
for thread in crash.report.crash.threads when thread.backtrace
for x in thread.backtrace.contents
x.instruction_addr_hex = '0x' + ('00000000' + x.instruction_addr.toString(16)).slice(-8)
try
# append crashed threads
crash.crashedThreads = (x for x in crash.report.crash.threads when x.crashed)
try
# append threads without crashed
crash.threads = (x for x in crash.report.crash.threads when not x.crashed)
crash
# -------------------------------------------
# $get
# -------------------------------------------
@get = ($injector) ->
@setupProviders $injector
@setup()
common: @common
setting: @setting
application: @application
document: @document
@get.$inject = ['$injector']
@$get = @get
return
<|start_filename|>tests/frontend/specs/providerSpec.coffee<|end_filename|>
describe 'victory.provider', ->
fakeModule = null
victoryProvider = null
beforeEach module('victory')
beforeEach ->
# mock NProgress
window.NProgress =
configure: ->
fakeModule = angular.module 'fakeModule', ['victory.provider']
fakeModule.config ($victoryProvider) ->
victoryProvider = $victoryProvider
beforeEach module('fakeModule')
describe '$victory.$get', ->
it '$victory.common should equal to $victoryProvider.common', inject ($victory) ->
expect($victory.common).toBe victoryProvider.common
it '$victory.setting should equal to $victoryProvider.setting', inject ($victory) ->
expect($victory.setting).toBe victoryProvider.setting
it '$victory.application should equal to $victoryProvider.application', inject ($victory) ->
expect($victory.application).toBe victoryProvider.application
it '$victory.document should equal to $victoryProvider.document', inject ($victory) ->
expect($victory.document).toBe victoryProvider.document
| kelp404/Victory |
<|start_filename|>inertia.go<|end_filename|>
package inertia
import (
"context"
"encoding/json"
"html/template"
"io/fs"
"net/http"
"path/filepath"
"strings"
)
// Inertia type.
type Inertia struct {
url string
rootTemplate string
version string
sharedProps map[string]interface{}
sharedFuncMap template.FuncMap
templateFS fs.FS
}
// New function.
func New(url, rootTemplate, version string) *Inertia {
i := new(Inertia)
i.url = url
i.rootTemplate = rootTemplate
i.version = version
i.sharedProps = make(map[string]interface{})
i.sharedFuncMap = template.FuncMap{"marshal": marshal}
return i
}
// NewWithFS function.
func NewWithFS(url, rootTemplate, version string, templateFS fs.FS) *Inertia {
i := New(url, rootTemplate, version)
i.templateFS = templateFS
return i
}
// Share function.
func (i *Inertia) Share(key string, value interface{}) {
i.sharedProps[key] = value
}
// ShareFunc function.
func (i *Inertia) ShareFunc(key string, value interface{}) {
i.sharedFuncMap[key] = value
}
// WithProp function.
func (i *Inertia) WithProp(ctx context.Context, key string, value interface{}) context.Context {
contextProps := ctx.Value(ContextKeyProps)
if contextProps != nil {
contextProps, ok := contextProps.(map[string]interface{})
if ok {
contextProps[key] = value
return context.WithValue(ctx, ContextKeyProps, contextProps)
}
}
return context.WithValue(ctx, ContextKeyProps, map[string]interface{}{
key: value,
})
}
// WithViewData function.
func (i *Inertia) WithViewData(ctx context.Context, key string, value interface{}) context.Context {
contextViewData := ctx.Value(ContextKeyViewData)
if contextViewData != nil {
contextViewData, ok := contextViewData.(map[string]interface{})
if ok {
contextViewData[key] = value
return context.WithValue(ctx, ContextKeyViewData, contextViewData)
}
}
return context.WithValue(ctx, ContextKeyViewData, map[string]interface{}{
key: value,
})
}
// Render function.
func (i *Inertia) Render(w http.ResponseWriter, r *http.Request, component string, props map[string]interface{}) error {
only := make(map[string]string)
partial := r.Header.Get("X-Inertia-Partial-Data")
if partial != "" && r.Header.Get("X-Inertia-Partial-Component") == component {
for _, value := range strings.Split(partial, ",") {
only[value] = value
}
}
page := &Page{
Component: component,
Props: make(map[string]interface{}),
URL: r.RequestURI,
Version: i.version,
}
for key, value := range i.sharedProps {
if _, ok := only[key]; len(only) == 0 || ok {
page.Props[key] = value
}
}
contextProps := r.Context().Value(ContextKeyProps)
if contextProps != nil {
contextProps, ok := contextProps.(map[string]interface{})
if !ok {
return ErrInvalidContextProps
}
for key, value := range contextProps {
if _, ok := only[key]; len(only) == 0 || ok {
page.Props[key] = value
}
}
}
for key, value := range props {
if _, ok := only[key]; len(only) == 0 || ok {
page.Props[key] = value
}
}
if r.Header.Get("X-Inertia") != "" {
js, err := json.Marshal(page)
if err != nil {
return err
}
w.Header().Set("Vary", "Accept")
w.Header().Set("X-Inertia", "true")
w.Header().Set("Content-Type", "application/json")
_, err = w.Write(js)
if err != nil {
return err
}
return nil
}
viewData := make(map[string]interface{})
contextViewData := r.Context().Value(ContextKeyViewData)
if contextViewData != nil {
contextViewData, ok := contextViewData.(map[string]interface{})
if !ok {
return ErrInvalidContextViewData
}
for key, value := range contextViewData {
viewData[key] = value
}
}
viewData["page"] = page
ts, err := i.createRootTemplate()
if err != nil {
return err
}
w.Header().Set("Content-Type", "text/html")
err = ts.Execute(w, viewData)
if err != nil {
return err
}
return nil
}
// Location function.
func (i *Inertia) Location(w http.ResponseWriter, location string) {
w.Header().Set("X-Inertia-Location", location)
w.WriteHeader(http.StatusConflict)
}
func (i *Inertia) createRootTemplate() (*template.Template, error) {
ts := template.New(filepath.Base(i.rootTemplate)).Funcs(i.sharedFuncMap)
if i.templateFS != nil {
return ts.ParseFS(i.templateFS, i.rootTemplate)
}
return ts.ParseFiles(i.rootTemplate)
}
| petaki/inertia-go |
<|start_filename|>src/codegen.js<|end_filename|>
const _state = 'state';
const _node = 'node';
const _match = 'x';
function js(/* arguments */) {
let body = arguments[0][0];
for (let i = 1; i < arguments.length; i++)
body = body + arguments[i] + arguments[0][i];
return body.trim();
}
const copy = (prev) => {
const next = {};
for (const key in prev) next[key] = prev[key];
return next;
};
const assignIndex = (depth) => js`
var y${depth} = ${_state}.y,
x${depth} = ${_state}.x;
`;
const restoreIndex = (depth) => js`
${_state}.y = y${depth};
${_state}.x = x${depth};
`;
const astExpression = (ast, depth, opts) => {
const capture = !!opts.capture && !ast.capture;
const restoreLength =
(opts.length && opts.abort && js`${_node}.length = ln${opts.length};`) ||
'';
const condition = `(${_match} = ${ast.expression.id}(${_state})) ${
capture ? '!=' : '=='
} null`;
return js`
if (${condition}) ${
capture
? js`{
${_node}.push(${_match});
} else `
: ''
}{
${restoreIndex(opts.index)}
${restoreLength}
${opts.abort}
}
`;
};
const astGroup = (ast, depth, opts) => {
const capture = !!opts.capture && !ast.capture;
opts = copy(opts);
opts.capture = capture;
if (!opts.length && capture) {
opts.length = depth;
return js`
${js`var ln${depth} = ${_node}.length;`}
${astSequence(ast.sequence, depth + 1, opts)}
`;
}
return astSequence(ast.sequence, depth + 1, opts);
};
const astChild = (ast, depth, opts) =>
ast.expression ? astExpression(ast, depth, opts) : astGroup(ast, depth, opts);
const astQuantifier = (ast, depth, opts) => {
const { index, abort } = opts;
const invert = `inv_${depth}`;
const group = `group_${depth}`;
opts = copy(opts);
if (ast.capture === '!') {
opts.index = depth;
opts.abort = js`break ${invert}`;
}
let child;
if (ast.quantifier === '+') {
const starAst = copy(ast);
starAst.quantifier = '*';
child = js`
${astChild(ast, depth, opts)}
${astQuantifier(starAst, depth, opts)}
`;
} else if (ast.quantifier === '*') {
opts.length = 0;
opts.index = depth;
opts.abort = js`break ${group};`;
child = js`
${group}: for (;;) {
${assignIndex(depth)}
${astChild(ast, depth, opts)}
}
`;
} else if (ast.quantifier === '?' && ast.expression) {
opts.index = depth;
opts.abort = '';
child = js`
${assignIndex(depth)}
${astChild(ast, depth, opts)}
`;
} else if (ast.quantifier === '?') {
opts.index = depth;
opts.abort = js`break ${group}`;
child = js`
${group}: {
${assignIndex(depth)}
${astChild(ast, depth, opts)}
}
`;
} else {
child = astChild(ast, depth, opts);
}
if (ast.capture === '!') {
return js`
${invert}: {
${assignIndex(depth)}
${child}
${restoreIndex(index)}
${abort}
}
`;
} else if (ast.capture === '=') {
return js`
${assignIndex(depth)}
${child}
${restoreIndex(depth)}
`;
} else {
return child;
}
};
const astSequence = (ast, depth, opts) => {
const alternation = ast.alternation ? `alt_${depth}` : '';
let body = '';
for (; ast; ast = ast.alternation) {
const block = `block_${depth}`;
let childOpts = opts;
if (ast.alternation) {
childOpts = copy(opts);
childOpts.index = depth;
childOpts.abort = js`break ${block};`;
}
let sequence = '';
for (let i = 0; i < ast.length; i++)
sequence += astQuantifier(ast[i], depth, childOpts);
if (!ast.alternation) {
body += sequence;
} else {
body += js`
${block}: {
${assignIndex(depth)}
${sequence}
break ${alternation};
}
`;
}
}
if (!alternation) return body;
return js`
${alternation}: {
${body}
}
`;
};
const astRoot = (ast, name, transform) => {
return js`
(function (${_state}) {
${assignIndex(1)}
var ${_node} = [];
var ${_match};
${astSequence(ast, 2, {
index: 1,
length: 0,
abort: js`return;`,
capture: true,
})}
if (${name}) ${_node}.tag = ${name};
return ${transform ? js`(${transform})(${_node})` : _node};
})
`;
};
export { astRoot };
<|start_filename|>src/babel/plugin.js<|end_filename|>
import { makeHelpers } from './transform';
export default function reghexPlugin(babel, opts = {}) {
let helpers;
return {
name: 'reghex',
visitor: {
Program() {
helpers = makeHelpers(babel);
},
ImportDeclaration(path) {
if (opts.codegen === false) return;
helpers.updateImport(path);
},
TaggedTemplateExpression(path) {
if (helpers.isMatch(path) && helpers.getMatchImport(path)) {
if (opts.codegen === false) {
helpers.minifyMatch(path);
} else {
helpers.transformMatch(path);
}
}
},
},
};
}
<|start_filename|>src/babel/transform.js<|end_filename|>
import { astRoot } from '../codegen';
import { parse } from '../parser';
export function makeHelpers({ types: t, template }) {
const regexPatternsRe = /^[()\[\]|.+?*]|[^\\][()\[\]|.+?*$^]|\\[wdsWDS]/;
const importSourceRe = /reghex$|^reghex\/macro/;
const importName = 'reghex';
let _hasUpdatedImport = false;
let _matchId = t.identifier('match');
let _patternId = t.identifier('__pattern');
const _hoistedExpressions = new Map();
return {
/** Adds the reghex import declaration to the Program scope */
updateImport(path) {
if (_hasUpdatedImport) return;
if (!importSourceRe.test(path.node.source.value)) return;
_hasUpdatedImport = true;
if (path.node.source.value !== importName) {
path.node.source = t.stringLiteral(importName);
}
_patternId = path.scope.generateUidIdentifier('_pattern');
path.node.specifiers.push(
t.importSpecifier(_patternId, t.identifier('__pattern'))
);
const tagImport = path.node.specifiers.find((node) => {
return t.isImportSpecifier(node) && node.imported.name === 'match';
});
if (!tagImport) {
path.node.specifiers.push(
t.importSpecifier(
(_matchId = path.scope.generateUidIdentifier('match')),
t.identifier('match')
)
);
} else {
_matchId = tagImport.imported;
}
},
/** Determines whether the given tagged template expression is a reghex match */
isMatch(path) {
if (
t.isTaggedTemplateExpression(path.node) &&
t.isCallExpression(path.node.tag) &&
t.isIdentifier(path.node.tag.callee) &&
path.scope.hasBinding(path.node.tag.callee.name)
) {
if (t.isVariableDeclarator(path.parentPath))
path.parentPath._isMatch = true;
return true;
}
return (
t.isVariableDeclarator(path.parentPath) && path.parentPath._isMatch
);
},
/** Given a reghex match, returns the path to reghex's match import declaration */
getMatchImport(path) {
t.assertTaggedTemplateExpression(path.node);
const binding = path.scope.getBinding(path.node.tag.callee.name);
if (
binding.kind !== 'module' ||
!t.isImportDeclaration(binding.path.parent) ||
!importSourceRe.test(binding.path.parent.source.value) ||
!t.isImportSpecifier(binding.path.node)
) {
return null;
}
return binding.path.parentPath;
},
/** Given a match, returns an evaluated name or a best guess */
getMatchName(path) {
t.assertTaggedTemplateExpression(path.node);
const nameArgumentPath = path.get('tag.arguments.0');
if (nameArgumentPath) {
const { confident, value } = nameArgumentPath.evaluate();
if (!confident && t.isIdentifier(nameArgumentPath.node)) {
return nameArgumentPath.node.name;
} else if (confident && typeof value === 'string') {
return value;
}
}
return path.scope.generateUidIdentifierBasedOnNode(path.node);
},
/** Given a match, hoists its expressions in front of the match's statement */
_prepareExpressions(path) {
t.assertTaggedTemplateExpression(path.node);
const variableDeclarators = [];
const matchName = this.getMatchName(path);
const hoistedExpressions = path.node.quasi.expressions.map(
(expression, i) => {
if (
t.isArrowFunctionExpression(expression) &&
t.isIdentifier(expression.body)
) {
expression = expression.body;
} else if (
(t.isFunctionExpression(expression) ||
t.isArrowFunctionExpression(expression)) &&
t.isBlockStatement(expression.body) &&
expression.body.body.length === 1 &&
t.isReturnStatement(expression.body.body[0]) &&
t.isIdentifier(expression.body.body[0].argument)
) {
expression = expression.body.body[0].argument;
}
const isBindingExpression =
t.isIdentifier(expression) &&
path.scope.hasBinding(expression.name);
if (isBindingExpression) {
const binding = path.scope.getBinding(expression.name);
if (t.isVariableDeclarator(binding.path.node)) {
const matchPath = binding.path.get('init');
if (this.isMatch(matchPath)) {
return expression;
} else if (_hoistedExpressions.has(expression.name)) {
return t.identifier(_hoistedExpressions.get(expression.name));
}
}
}
const id = path.scope.generateUidIdentifier(
isBindingExpression
? `${expression.name}_expression`
: `${matchName}_expression`
);
variableDeclarators.push(
t.variableDeclarator(
id,
t.callExpression(t.identifier(_patternId.name), [expression])
)
);
if (t.isIdentifier(expression)) {
_hoistedExpressions.set(expression.name, id.name);
}
return id;
}
);
if (variableDeclarators.length) {
path
.getStatementParent()
.insertBefore(t.variableDeclaration('var', variableDeclarators));
}
return hoistedExpressions.map((id) => {
const binding = path.scope.getBinding(id.name);
if (binding && t.isVariableDeclarator(binding.path.node)) {
const matchPath = binding.path.get('init');
if (this.isMatch(matchPath)) {
return { fn: true, id: id.name };
}
}
const input = t.isStringLiteral(id)
? JSON.stringify(id.value)
: id.name;
return { fn: false, id: input };
});
},
_prepareTransform(path) {
const transformNode = path.node.tag.arguments[1];
if (!transformNode) return null;
if (t.isIdentifier(transformNode)) return transformNode.name;
const matchName = this.getMatchName(path);
const id = path.scope.generateUidIdentifier(`${matchName}_transform`);
const declarator = t.variableDeclarator(id, transformNode);
path
.getStatementParent()
.insertBefore(t.variableDeclaration('var', [declarator]));
return id.name;
},
minifyMatch(path) {
const quasis = path.node.quasi.quasis.map((x) =>
t.stringLiteral(x.value.cooked.replace(/\s*/g, ''))
);
const expressions = path.node.quasi.expressions;
const transform = this._prepareTransform(path);
path.replaceWith(
t.callExpression(path.node.tag, [
t.arrayExpression(quasis),
...expressions,
])
);
},
transformMatch(path) {
let name = path.node.tag.arguments[0];
if (!name) {
name = t.nullLiteral();
}
const quasis = path.node.quasi.quasis.map((x) => x.value.cooked);
const expressions = this._prepareExpressions(path);
const transform = this._prepareTransform(path);
let ast;
try {
ast = parse(quasis, expressions);
} catch (error) {
if (error.name !== 'SyntaxError') throw error;
throw path.get('quasi').buildCodeFrameError(error.message);
}
const code = astRoot(ast, '%%name%%', transform && '%%transform%%');
path.replaceWith(
template.expression(code)(transform ? { name, transform } : { name })
);
},
};
}
<|start_filename|>babel.js<|end_filename|>
module.exports = require('./dist/reghex-babel.js');
<|start_filename|>src/core.js<|end_filename|>
import { astRoot } from './codegen';
import { parse as parseDSL } from './parser';
const isStickySupported = typeof /./g.sticky === 'boolean';
const execLambda = (pattern) => {
if (pattern.length) return pattern;
return (state) => pattern()(state);
};
const execString = (pattern) => {
return (state) => {
if (state.x < state.quasis.length) {
const input = state.quasis[state.x];
for (let i = 0, l = pattern.length; i < l; i++)
if (input.charCodeAt(state.y + i) !== pattern.charCodeAt(i))
return null;
state.y += pattern.length;
return pattern;
}
};
};
const execRegex = (pattern) => {
pattern = isStickySupported
? new RegExp(pattern.source, 'y')
: new RegExp(pattern.source + '|()', 'g');
return (state) => {
if (state.x < state.quasis.length) {
const input = state.quasis[state.x];
pattern.lastIndex = state.y;
let match;
if (isStickySupported) {
if (pattern.test(input))
match = input.slice(state.y, pattern.lastIndex);
} else {
const x = pattern.exec(input);
if (x[1] == null) match = x[0];
}
state.y = pattern.lastIndex;
return match;
}
};
};
export const __pattern = (input) => {
if (typeof input === 'function') {
return execLambda(input);
} else if (typeof input === 'string') {
return execString(input);
} else {
return execRegex(input);
}
};
export const interpolation = (predicate) => (state) => {
let match;
if (
state.x < state.expressions.length &&
state.y >= state.quasis[state.x].length
) {
state.y = 0;
match = state.expressions[state.x++];
if (predicate && match) match = predicate(match);
}
return match;
};
export const parse = (matcher) => (quasis, ...expressions) => {
if (typeof quasis === 'string') quasis = [quasis];
const state = { quasis, expressions, x: 0, y: 0 };
return matcher(state);
};
export const match = (name, transform) => (quasis, ...expressions) => {
const ast = parseDSL(
quasis,
expressions.map((_, i) => ({ id: `_${i}` }))
);
return new Function(
'_n,_t,' + expressions.map((_expression, i) => `_${i}`).join(','),
'return ' + astRoot(ast, '_n', transform ? '_t' : null)
)(name, transform, ...expressions.map(__pattern));
};
<|start_filename|>src/core.test.js<|end_filename|>
import { parse, match, interpolation } from './core';
const expectToParse = (node, input, result, lastIndex = 0) => {
const state = { quasis: [input], expressions: [], x: 0, y: 0 };
if (result) result.tag = 'node';
expect(node(state)).toEqual(result);
// NOTE: After parsing we expect the current index to exactly match the
// sum amount of matched characters
if (result === undefined) {
expect(state.y).toBe(0);
} else {
const index = lastIndex || result.reduce((acc, x) => acc + x.length, 0);
expect(state.y).toBe(index);
}
};
describe('can create nameless matchers', () => {
it('matches without tagging', () => {
const state = { quasis: ['1'], expressions: [], x: 0, y: 0 };
const node = match(null)`${/1/}`;
expect(node(state)).toEqual(['1']);
});
});
describe('required matcher', () => {
const node = match('node')`${/1/}`;
it.each`
input | result
${'1'} | ${['1']}
${''} | ${undefined}
`('should return $result when $input is passed', ({ input, result }) => {
expectToParse(node, input, result);
});
it('matches empty regex patterns', () => {
const node = match('node')`${/[ ]*/}`;
expectToParse(node, '', ['']);
});
});
describe('optional matcher', () => {
const node = match('node')`${/1/}?`;
it.each`
input | result
${'1'} | ${['1']}
${'_'} | ${[]}
${''} | ${[]}
`('should return $result when $input is passed', ({ input, result }) => {
expectToParse(node, input, result);
});
});
describe('star matcher', () => {
const node = match('node')`${/1/}*`;
it.each`
input | result
${'1'} | ${['1']}
${'11'} | ${['1', '1']}
${'111'} | ${['1', '1', '1']}
${'_'} | ${[]}
${''} | ${[]}
`('should return $result when "$input" is passed', ({ input, result }) => {
expectToParse(node, input, result);
});
});
describe('plus matcher', () => {
const node = match('node')`${/1/}+`;
it.each`
input | result
${'1'} | ${['1']}
${'11'} | ${['1', '1']}
${'111'} | ${['1', '1', '1']}
${'_'} | ${undefined}
${''} | ${undefined}
`('should return $result when "$input" is passed', ({ input, result }) => {
expectToParse(node, input, result);
});
});
describe('optional then required matcher', () => {
const node = match('node')`${/1/}? ${/2/}`;
it.each`
input | result
${'12'} | ${['1', '2']}
${'2'} | ${['2']}
${''} | ${undefined}
`('should return $result when $input is passed', ({ input, result }) => {
expectToParse(node, input, result);
});
});
describe('star then required matcher', () => {
const node = match('node')`${/1/}* ${/2/}`;
it.each`
input | result
${'12'} | ${['1', '2']}
${'112'} | ${['1', '1', '2']}
${'2'} | ${['2']}
${''} | ${undefined}
`('should return $result when $input is passed', ({ input, result }) => {
expectToParse(node, input, result);
});
});
describe('plus then required matcher', () => {
const node = match('node')`${/1/}+ ${/2/}`;
it.each`
input | result
${'12'} | ${['1', '2']}
${'112'} | ${['1', '1', '2']}
${'2'} | ${undefined}
${''} | ${undefined}
`('should return $result when $input is passed', ({ input, result }) => {
expectToParse(node, input, result);
});
});
describe('optional group then required matcher', () => {
const node = match('node')`(${/1/} ${/2/})? ${/3/}`;
it.each`
input | result
${'123'} | ${['1', '2', '3']}
${'3'} | ${['3']}
${'23'} | ${undefined}
${'_'} | ${undefined}
`('should return $result when $input is passed', ({ input, result }) => {
expectToParse(node, input, result);
});
});
describe('star group then required matcher', () => {
const node = match('node')`(${/1/} ${/2/})* ${/3/}`;
it.each`
input | result
${'123'} | ${['1', '2', '3']}
${'12123'} | ${['1', '2', '1', '2', '3']}
${'3'} | ${['3']}
${'23'} | ${undefined}
${'13'} | ${undefined}
${'_'} | ${undefined}
`('should return $result when $input is passed', ({ input, result }) => {
expectToParse(node, input, result);
});
});
describe('plus group then required matcher', () => {
const node = match('node')`(${/1/} ${/2/})+ ${/3/}`;
it.each`
input | result
${'123'} | ${['1', '2', '3']}
${'12123'} | ${['1', '2', '1', '2', '3']}
${'23'} | ${undefined}
${'3'} | ${undefined}
${'13'} | ${undefined}
${'_'} | ${undefined}
`('should return $result when $input is passed', ({ input, result }) => {
expectToParse(node, input, result);
});
});
describe('optional group with nested optional matcher, then required matcher', () => {
const node = match('node')`(${/1/}? ${/2/})? ${/3/}`;
it.each`
input | result
${'123'} | ${['1', '2', '3']}
${'23'} | ${['2', '3']}
${'3'} | ${['3']}
${'13'} | ${undefined}
${'_'} | ${undefined}
`('should return $result when $input is passed', ({ input, result }) => {
expectToParse(node, input, result);
});
});
describe('star group with nested optional matcher, then required matcher', () => {
const node = match('node')`(${/1/}? ${/2/})* ${/3/}`;
it.each`
input | result
${'123'} | ${['1', '2', '3']}
${'23'} | ${['2', '3']}
${'223'} | ${['2', '2', '3']}
${'2123'} | ${['2', '1', '2', '3']}
${'3'} | ${['3']}
${'13'} | ${undefined}
${'_'} | ${undefined}
`('should return $result when $input is passed', ({ input, result }) => {
expectToParse(node, input, result);
});
});
describe('plus group with nested optional matcher, then required matcher', () => {
const node = match('node')`(${/1/}? ${/2/})+ ${/3/}`;
it.each`
input | result
${'123'} | ${['1', '2', '3']}
${'23'} | ${['2', '3']}
${'223'} | ${['2', '2', '3']}
${'2123'} | ${['2', '1', '2', '3']}
${'3'} | ${undefined}
${'13'} | ${undefined}
${'_'} | ${undefined}
`('should return $result when $input is passed', ({ input, result }) => {
expectToParse(node, input, result);
});
});
describe('plus group with nested plus matcher, then required matcher', () => {
const node = match('node')`(${/1/}+ ${/2/})+ ${/3/}`;
it.each`
input | result
${'123'} | ${['1', '2', '3']}
${'1123'} | ${['1', '1', '2', '3']}
${'12123'} | ${['1', '2', '1', '2', '3']}
${'121123'} | ${['1', '2', '1', '1', '2', '3']}
${'3'} | ${undefined}
${'23'} | ${undefined}
${'13'} | ${undefined}
${'_'} | ${undefined}
`('should return $result when $input is passed', ({ input, result }) => {
expectToParse(node, input, result);
});
});
describe('plus group with nested required and plus matcher, then required matcher', () => {
const node = match('node')`(${/1/} ${/2/}+)+ ${/3/}`;
it.each`
input | result
${'123'} | ${['1', '2', '3']}
${'1223'} | ${['1', '2', '2', '3']}
${'122123'} | ${['1', '2', '2', '1', '2', '3']}
${'13'} | ${undefined}
${'_'} | ${undefined}
`('should return $result when $input is passed', ({ input, result }) => {
expectToParse(node, input, result);
});
});
describe('nested plus group with nested required and plus matcher, then required matcher or alternate', () => {
const node = match('node')`(${/1/} ${/2/}+)+ ${/3/} | ${/1/}`;
it.each`
input | result
${'123'} | ${['1', '2', '3']}
${'1223'} | ${['1', '2', '2', '3']}
${'122123'} | ${['1', '2', '2', '1', '2', '3']}
${'1'} | ${['1']}
${'13'} | ${['1']}
${'_'} | ${undefined}
`('should return $result when $input is passed', ({ input, result }) => {
expectToParse(node, input, result);
});
});
describe('nested plus group with nested required and plus matcher, then alternate', () => {
const node = match('node')`(${/1/} ${/2/}+)+ (${/3/} | ${/4/})`;
it.each`
input | result
${'123'} | ${['1', '2', '3']}
${'124'} | ${['1', '2', '4']}
${'1223'} | ${['1', '2', '2', '3']}
${'1224'} | ${['1', '2', '2', '4']}
${'1'} | ${undefined}
${'13'} | ${undefined}
${'_'} | ${undefined}
`('should return $result when $input is passed', ({ input, result }) => {
expectToParse(node, input, result);
});
});
describe('regular alternate', () => {
const node = match('node')`${/1/} | ${/2/} | ${/3/} | ${/4/}`;
it.each`
input | result
${'1'} | ${['1']}
${'2'} | ${['2']}
${'3'} | ${['3']}
${'4'} | ${['4']}
${'_'} | ${undefined}
`('should return $result when $input is passed', ({ input, result }) => {
expectToParse(node, input, result);
});
});
describe('nested alternate in nested alternate in alternate', () => {
const node = match('node')`((${/1/} | ${/2/}) | ${/3/}) | ${/4/}`;
it.each`
input | result
${'1'} | ${['1']}
${'2'} | ${['2']}
${'3'} | ${['3']}
${'4'} | ${['4']}
${'_'} | ${undefined}
`('should return $result when $input is passed', ({ input, result }) => {
expectToParse(node, input, result);
});
});
describe('alternate after required matcher', () => {
const node = match('node')`${/1/} (${/2/} | ${/3/})`;
it.each`
input | result
${'12'} | ${['1', '2']}
${'13'} | ${['1', '3']}
${'14'} | ${undefined}
${'3'} | ${undefined}
${'_'} | ${undefined}
`('should return $result when $input is passed', ({ input, result }) => {
expectToParse(node, input, result);
});
});
describe('alternate with star group and required matcher after required matcher', () => {
const node = match('node')`${/1/} (${/2/}* ${/3/} | ${/4/})`;
it.each`
input | result
${'123'} | ${['1', '2', '3']}
${'1223'} | ${['1', '2', '2', '3']}
${'13'} | ${['1', '3']}
${'14'} | ${['1', '4']}
${'12'} | ${undefined}
${'15'} | ${undefined}
${'_'} | ${undefined}
`('should return $result when $input is passed', ({ input, result }) => {
expectToParse(node, input, result);
});
});
describe('alternate with plus group and required matcher after required matcher', () => {
const node = match('node')`${/1/} (${/2/}+ ${/3/} | ${/4/})`;
it.each`
input | result
${'123'} | ${['1', '2', '3']}
${'1223'} | ${['1', '2', '2', '3']}
${'14'} | ${['1', '4']}
${'13'} | ${undefined}
${'12'} | ${undefined}
${'15'} | ${undefined}
${'_'} | ${undefined}
`('should return $result when $input is passed', ({ input, result }) => {
expectToParse(node, input, result);
});
});
describe('alternate with optional and required matcher after required matcher', () => {
const node = match('node')`${/1/} (${/2/}? ${/3/} | ${/4/})`;
it.each`
input | result
${'123'} | ${['1', '2', '3']}
${'13'} | ${['1', '3']}
${'14'} | ${['1', '4']}
${'12'} | ${undefined}
${'15'} | ${undefined}
${'_'} | ${undefined}
`('should return $result when $input is passed', ({ input, result }) => {
expectToParse(node, input, result);
});
});
describe('non-capturing group', () => {
const node = match('node')`${/1/} (?: ${/2/}+)`;
it.each`
input | result | lastIndex
${'12'} | ${['1']} | ${2}
${'122'} | ${['1']} | ${3}
${'13'} | ${undefined} | ${0}
${'1'} | ${undefined} | ${0}
${'_'} | ${undefined} | ${0}
`(
'should return $result when $input is passed',
({ input, result, lastIndex }) => {
expectToParse(node, input, result, lastIndex);
}
);
});
describe('non-capturing shorthand', () => {
const node = match('node')`${/1/} :${/2/}+`;
it.each`
input | result | lastIndex
${'12'} | ${['1']} | ${2}
${'122'} | ${['1']} | ${3}
${'13'} | ${undefined} | ${0}
${'1'} | ${undefined} | ${0}
${'_'} | ${undefined} | ${0}
`(
'should return $result when $input is passed',
({ input, result, lastIndex }) => {
expectToParse(node, input, result, lastIndex);
}
);
});
describe('non-capturing group with plus matcher, then required matcher', () => {
const node = match('node')`(?: ${/1/}+) ${/2/}`;
it.each`
input | result | lastIndex
${'12'} | ${['2']} | ${2}
${'112'} | ${['2']} | ${3}
${'1'} | ${undefined} | ${0}
${'13'} | ${undefined} | ${0}
${'2'} | ${undefined} | ${0}
${'_'} | ${undefined} | ${0}
`(
'should return $result when $input is passed',
({ input, result, lastIndex }) => {
expectToParse(node, input, result, lastIndex);
}
);
});
describe('non-capturing group with star group and required matcher, then required matcher', () => {
const node = match('node')`(?: ${/1/}* ${/2/}) ${/3/}`;
it.each`
input | result | lastIndex
${'123'} | ${['3']} | ${3}
${'1123'} | ${['3']} | ${4}
${'23'} | ${['3']} | ${2}
${'13'} | ${undefined} | ${0}
${'2'} | ${undefined} | ${0}
${'_'} | ${undefined} | ${0}
`(
'should return $result when $input is passed',
({ input, result, lastIndex }) => {
expectToParse(node, input, result, lastIndex);
}
);
});
describe('non-capturing group with plus group and required matcher, then required matcher', () => {
const node = match('node')`(?: ${/1/}+ ${/2/}) ${/3/}`;
it.each`
input | result | lastIndex
${'123'} | ${['3']} | ${3}
${'1123'} | ${['3']} | ${4}
${'23'} | ${undefined} | ${0}
${'13'} | ${undefined} | ${0}
${'2'} | ${undefined} | ${0}
${'_'} | ${undefined} | ${0}
`(
'should return $result when $input is passed',
({ input, result, lastIndex }) => {
expectToParse(node, input, result, lastIndex);
}
);
});
describe('non-capturing group with optional and required matcher, then required matcher', () => {
const node = match('node')`(?: ${/1/}? ${/2/}) ${/3/}`;
it.each`
input | result | lastIndex
${'123'} | ${['3']} | ${3}
${'23'} | ${['3']} | ${2}
${'13'} | ${undefined} | ${0}
${'2'} | ${undefined} | ${0}
${'_'} | ${undefined} | ${0}
`(
'should return $result when $input is passed',
({ input, result, lastIndex }) => {
expectToParse(node, input, result, lastIndex);
}
);
});
describe('positive lookahead group', () => {
const node = match('node')`(?= ${/1/}) ${/\d/}`;
it.each`
input | result | lastIndex
${'1'} | ${['1']} | ${1}
${'13'} | ${['1']} | ${1}
${'2'} | ${undefined} | ${0}
${'_'} | ${undefined} | ${0}
`(
'should return $result when $input is passed',
({ input, result, lastIndex }) => {
expectToParse(node, input, result, lastIndex);
}
);
});
describe('positive lookahead shorthand', () => {
const node = match('node')`=${/1/} ${/\d/}`;
it.each`
input | result | lastIndex
${'1'} | ${['1']} | ${1}
${'13'} | ${['1']} | ${1}
${'2'} | ${undefined} | ${0}
${'_'} | ${undefined} | ${0}
`(
'should return $result when $input is passed',
({ input, result, lastIndex }) => {
expectToParse(node, input, result, lastIndex);
}
);
});
describe('positive lookahead group with plus matcher', () => {
const node = match('node')`(?= ${/1/}+) ${/\d/}`;
it.each`
input | result | lastIndex
${'1'} | ${['1']} | ${1}
${'11'} | ${['1']} | ${1}
${'12'} | ${['1']} | ${1}
${'22'} | ${undefined} | ${0}
${'2'} | ${undefined} | ${0}
${'_'} | ${undefined} | ${0}
`(
'should return $result when $input is passed',
({ input, result, lastIndex }) => {
expectToParse(node, input, result, lastIndex);
}
);
});
describe('positive lookahead group with plus group and required matcher', () => {
const node = match('node')`(?= ${/1/}+ ${/2/}) ${/\d/}`;
it.each`
input | result | lastIndex
${'12'} | ${['1']} | ${1}
${'112'} | ${['1']} | ${1}
${'1123'} | ${['1']} | ${1}
${'2'} | ${undefined} | ${0}
${'1'} | ${undefined} | ${0}
${'2'} | ${undefined} | ${0}
${'_'} | ${undefined} | ${0}
`(
'should return $result when $input is passed',
({ input, result, lastIndex }) => {
expectToParse(node, input, result, lastIndex);
}
);
});
describe('negative lookahead group', () => {
const node = match('node')`(?! ${/1/}) ${/\d/}`;
it.each`
input | result | lastIndex
${'2'} | ${['2']} | ${1}
${'23'} | ${['2']} | ${1}
${'1'} | ${undefined} | ${0}
${'1'} | ${undefined} | ${0}
${'_'} | ${undefined} | ${0}
`(
'should return $result when $input is passed',
({ input, result, lastIndex }) => {
expectToParse(node, input, result, lastIndex);
}
);
});
describe('negative lookahead shorthand', () => {
const node = match('node')`!${/1/} ${/\d/}`;
it.each`
input | result | lastIndex
${'2'} | ${['2']} | ${1}
${'23'} | ${['2']} | ${1}
${'1'} | ${undefined} | ${0}
${'1'} | ${undefined} | ${0}
${'_'} | ${undefined} | ${0}
`(
'should return $result when $input is passed',
({ input, result, lastIndex }) => {
expectToParse(node, input, result, lastIndex);
}
);
});
describe('longer negative lookahead group', () => {
const node = match('node')`${/1/} (?! ${/2/} ${/3/}) ${/\d/} ${/\d/}`;
it.each`
input | result | lastIndex
${'145'} | ${['1', '4', '5']} | ${3}
${'124'} | ${['1', '2', '4']} | ${3}
${'123'} | ${undefined} | ${0}
${'2'} | ${undefined} | ${0}
${'_'} | ${undefined} | ${0}
`(
'should return $result when $input is passed',
({ input, result, lastIndex }) => {
expectToParse(node, input, result, lastIndex);
}
);
});
describe('negative lookahead group with plus matcher', () => {
const node = match('node')`(?! ${/1/}+) ${/\d/}`;
it.each`
input | result | lastIndex
${'2'} | ${['2']} | ${1}
${'21'} | ${['2']} | ${1}
${'22'} | ${['2']} | ${1}
${'11'} | ${undefined} | ${0}
${'1'} | ${undefined} | ${0}
${'_'} | ${undefined} | ${0}
`(
'should return $result when $input is passed',
({ input, result, lastIndex }) => {
expectToParse(node, input, result, lastIndex);
}
);
});
describe('negative lookahead group with plus group and required matcher', () => {
const node = match('node')`(?! ${/1/}+ ${/2/}) ${/\d/}`;
it.each`
input | result | lastIndex
${'21'} | ${['2']} | ${1}
${'211'} | ${['2']} | ${1}
${'113'} | ${['1']} | ${1}
${'1'} | ${['1']} | ${1}
${'112'} | ${undefined} | ${0}
${'12'} | ${undefined} | ${0}
${'_'} | ${undefined} | ${0}
`(
'should return $result when $input is passed',
({ input, result, lastIndex }) => {
expectToParse(node, input, result, lastIndex);
}
);
});
describe('interpolation parsing', () => {
const node = match('node')`
${/1/}
${interpolation((x) => (x > 1 ? x : null))}
${/3/}
`;
it('matches interpolations', () => {
const expected = ['1', 2, '3'];
expected.tag = 'node';
expect(parse(node)`1${2}3`).toEqual(expected);
});
it('does not match invalid inputs', () => {
expect(parse(node)`13`).toBe(undefined);
expect(parse(node)`13${2}`).toBe(undefined);
expect(parse(node)`${2}13`).toBe(undefined);
expect(parse(node)`1${1}3`).toBe(undefined);
});
});
describe('string matching', () => {
const node = match('node')`
${'1'}
${'2'}
`;
it('matches strings', () => {
const expected = ['1', '2'];
expected.tag = 'node';
expect(parse(node)('12')).toEqual(expected);
expect(parse(node)('13')).toBe(undefined);
});
});
<|start_filename|>scripts/simplify-jstags-plugin.js<|end_filename|>
import { transformSync as transform } from '@babel/core';
import { createFilter } from '@rollup/pluginutils';
import transformTemplateLiterals from '@babel/plugin-transform-template-literals';
import eliminateClosures from 'babel-plugin-closure-elimination';
const simplifyJSTags = ({ types: t }) => ({
visitor: {
TaggedTemplateExpression(path) {
if (path.node.tag.name !== 'js') return;
const expressions = path.node.quasi.expressions;
const quasis = path.node.quasi.quasis.map((x) =>
x.value.cooked
.replace(/\s*[=(){},;:!]\s*/g, (x) => x.trim())
.replace(/\s+/g, ' ')
.replace(/^\s+$/g, '')
);
const concat = expressions.reduceRight(
(prev, node, i) =>
t.binaryExpression(
'+',
t.stringLiteral(quasis[i]),
t.binaryExpression('+', node, prev)
),
t.stringLiteral(quasis[quasis.length - 1])
);
path.replaceWith(concat);
},
},
});
function simplifyJSTagsPlugin(opts = {}) {
const filter = createFilter(opts.include, opts.exclude, {
resolve: false,
});
return {
name: 'cleanup',
renderChunk(code, chunk) {
if (!filter(chunk.fileName)) {
return null;
}
return transform(code, {
plugins: [
simplifyJSTags,
[transformTemplateLiterals, { loose: true }],
eliminateClosures,
],
babelrc: false,
});
},
};
}
export default simplifyJSTagsPlugin;
<|start_filename|>src/babel/plugin.test.js<|end_filename|>
import { transform } from '@babel/core';
import reghexPlugin from './plugin';
it('works with standard features', () => {
const code = `
import { match } from 'reghex/macro';
const node = match('node')\`
\${1}+ | \${2}+ (\${3} ( \${4}? \${5} ) )*
\`;
`;
expect(
transform(code, { babelrc: false, presets: [], plugins: [reghexPlugin] })
.code
).toMatchSnapshot();
});
it('works with nameless matchers', () => {
const code = `
import { match } from 'reghex/macro';
const node = match()\`
\${1}+ | \${2}+ (\${3} ( \${4}? \${5} ) )*
\`;
`;
expect(
transform(code, { babelrc: false, presets: [], plugins: [reghexPlugin] })
.code
).toMatchSnapshot();
});
it('works while only minifying', () => {
const code = `
import { match } from 'reghex/macro';
const node = match('node')\`
\${1}+ | \${2}+ (\${3} ( \${4}? \${5} ) )*
\`;
`;
expect(
transform(code, {
babelrc: false,
presets: [],
plugins: [[reghexPlugin, { codegen: false }]],
}).code
).toMatchSnapshot();
});
it('deduplicates hoisted expressions', () => {
const code = `
import { match } from 'reghex/macro';
const re = /1/;
const str = '1';
const a = match('a')\`
\${re}
\${str}
\`;
const b = match('b')\`
\${re}
\${'2'}
\`;
`;
expect(
transform(code, { babelrc: false, presets: [], plugins: [reghexPlugin] })
.code
).toMatchSnapshot();
});
it('works with local recursion', () => {
// NOTE: A different default name is allowed
const code = `
import { match as m, tag } from 'reghex';
const inner = m('inner')\`
\${/inner/}
\`;
const node = m('node')\`
\${inner}
\`;
`;
expect(
transform(code, { babelrc: false, presets: [], plugins: [reghexPlugin] })
.code
).toMatchSnapshot();
});
it('works with self-referential thunks', () => {
const code = `
import { match, tag } from 'reghex';
const inner = match('inner')\`
\${() => node}
\`;
const node = match('node')\`
\${inner}
\`;
`;
expect(
transform(code, { babelrc: false, presets: [], plugins: [reghexPlugin] })
.code
).toMatchSnapshot();
});
it('works with transform functions', () => {
const code = `
import { match } from 'reghex';
const first = match('inner', x => x)\`\`;
const transform = x => x;
const second = match('node', transform)\`\`;
`;
expect(
transform(code, { babelrc: false, presets: [], plugins: [reghexPlugin] })
.code
).toMatchSnapshot();
});
it('works with non-capturing groups', () => {
const code = `
import { match } from 'reghex';
const node = match('node')\`
\${1} (\${2} | (?: \${3})+)
\`;
`;
expect(
transform(code, { babelrc: false, presets: [], plugins: [reghexPlugin] })
.code
).toMatchSnapshot();
});
it('works together with @babel/plugin-transform-modules-commonjs', () => {
const code = `
import { match } from 'reghex';
const node = match('node')\`
\${1} \${2}
\`;
`;
expect(
transform(code, {
babelrc: false,
presets: [],
plugins: [
reghexPlugin,
[
'@babel/plugin-transform-modules-commonjs',
{
noInterop: true,
loose: true,
},
],
],
}).code
).toMatchSnapshot();
});
<|start_filename|>src/parser.test.js<|end_filename|>
import { parse } from './parser';
const parseTag = (quasis, ...expressions) => parse(quasis, expressions);
it('supports parsing expressions with quantifiers', () => {
let ast;
ast = parseTag`${1}?`;
expect(ast).toHaveProperty('0.quantifier', '?');
ast = parseTag`${1}+`;
expect(ast).toHaveProperty('0.quantifier', '+');
ast = parseTag`${1}*`;
expect(ast).toHaveProperty('0.quantifier', '*');
});
it('supports top-level alternations', () => {
let ast;
ast = parseTag`${1} | ${2}`;
expect(ast).toHaveProperty('length', 1);
expect(ast).toHaveProperty('0.expression', 1);
expect(ast).toHaveProperty('alternation.0.expression', 2);
ast = parseTag`${1}? | ${2}?`;
expect(ast).toHaveProperty('0.quantifier', '?');
});
it('supports groups with quantifiers', () => {
let ast;
ast = parseTag`(${1} ${2})`;
expect(ast).toHaveProperty('length', 1);
expect(ast).toHaveProperty('0.sequence.length', 2);
expect(ast).toHaveProperty('0.sequence.0.expression', 1);
expect(ast).toHaveProperty('0.sequence.1.expression', 2);
ast = parseTag`(${1} ${2}?)?`;
expect(ast).toHaveProperty('length', 1);
expect(ast).toHaveProperty('0.quantifier', '?');
expect(ast).toHaveProperty('0.sequence.0.quantifier', undefined);
});
describe('non-capturing syntax', () => {
it('supports regex-like syntax', () => {
const ast = parseTag`(?: ${1})`;
expect(ast).toHaveProperty('length', 1);
expect(ast).toHaveProperty('0.capture', ':');
expect(ast).toHaveProperty('0.sequence.length', 1);
});
it('supports shorthand', () => {
let ast = parseTag`:${1}`;
expect(ast).toHaveProperty('length', 1);
expect(ast).toHaveProperty('0.capture', ':');
expect(ast).toHaveProperty('0.expression', 1);
ast = parseTag`:(${1})`;
expect(ast).toHaveProperty('length', 1);
expect(ast).toHaveProperty('0.capture', ':');
expect(ast).toHaveProperty('0.sequence.length', 1);
});
it('fails on invalid usage', () => {
expect(() => parseTag`${1} : ${2}`).toThrow();
expect(() => parseTag`${1} :|${2}`).toThrow();
});
});
describe('positive lookaheads syntax', () => {
it('supports regex-like syntax', () => {
const ast = parseTag`(?= ${1})`;
expect(ast).toHaveProperty('length', 1);
expect(ast).toHaveProperty('0.capture', '=');
expect(ast).toHaveProperty('0.sequence.length', 1);
});
it('supports shorthand', () => {
let ast = parseTag`=${1}`;
expect(ast).toHaveProperty('length', 1);
expect(ast).toHaveProperty('0.capture', '=');
expect(ast).toHaveProperty('0.expression', 1);
ast = parseTag`=(${1})`;
expect(ast).toHaveProperty('length', 1);
expect(ast).toHaveProperty('0.capture', '=');
expect(ast).toHaveProperty('0.sequence.length', 1);
});
});
describe('negative lookaheads syntax', () => {
it('supports regex-like syntax', () => {
const ast = parseTag`(?! ${1})`;
expect(ast).toHaveProperty('length', 1);
expect(ast).toHaveProperty('0.capture', '!');
expect(ast).toHaveProperty('0.sequence.length', 1);
});
it('supports shorthand', () => {
let ast = parseTag`!${1}`;
expect(ast).toHaveProperty('length', 1);
expect(ast).toHaveProperty('0.capture', '!');
expect(ast).toHaveProperty('0.expression', 1);
ast = parseTag`!(${1})`;
expect(ast).toHaveProperty('length', 1);
expect(ast).toHaveProperty('0.capture', '!');
expect(ast).toHaveProperty('0.sequence.length', 1);
});
});
it('supports groups with alternates', () => {
expect(parseTag`(${1} | ${2}) ${3}`).toMatchInlineSnapshot(`
Array [
Object {
"capture": undefined,
"sequence": Array [
Object {
"capture": undefined,
"expression": 1,
},
],
},
Object {
"capture": undefined,
"expression": 3,
},
]
`);
});
<|start_filename|>src/parser.js<|end_filename|>
const syntaxError = (char) => {
throw new SyntaxError('Unexpected token "' + char + '"');
};
export const parse = (quasis, expressions) => {
let quasiIndex = 0;
let stackIndex = 0;
const sequenceStack = [];
const rootSequence = [];
let currentGroup = null;
let lastMatch;
let currentSequence = rootSequence;
let capture;
for (
let quasiIndex = 0, stackIndex = 0;
stackIndex < quasis.length + expressions.length;
stackIndex++
) {
if (stackIndex % 2 !== 0) {
const expression = expressions[stackIndex++ >> 1];
currentSequence.push({ expression, capture });
capture = undefined;
}
const quasi = quasis[stackIndex >> 1];
for (quasiIndex = 0; quasiIndex < quasi.length; ) {
const char = quasi[quasiIndex++];
if (char === ' ' || char === '\t' || char === '\r' || char === '\n') {
} else if (char === '|' && currentSequence.length) {
currentSequence = currentSequence.alternation = [];
} else if (char === ')' && currentSequence.length) {
currentGroup = null;
currentSequence = sequenceStack.pop();
if (!currentSequence) syntaxError(char);
} else if (char === '(') {
sequenceStack.push(currentSequence);
currentSequence.push((currentGroup = { sequence: [], capture }));
currentSequence = currentGroup.sequence;
capture = undefined;
} else if (char === ':' || char === '=' || char === '!') {
capture = char;
const nextChar = quasi[quasiIndex];
if (quasi[quasiIndex] && quasi[quasiIndex] !== '(') syntaxError(char);
} else if (char === '?' && !currentSequence.length && currentGroup) {
capture = quasi[quasiIndex++];
if (capture === ':' || capture === '=' || capture === '!') {
currentGroup.capture = capture;
capture = undefined;
} else {
syntaxError(char);
}
} else if (
(char === '?' || char === '+' || char === '*') &&
(lastMatch = currentSequence[currentSequence.length - 1])
) {
lastMatch.quantifier = char;
} else {
syntaxError(char);
}
}
}
return rootSequence;
};
<|start_filename|>src/babel/macro.js<|end_filename|>
import { createMacro } from 'babel-plugin-macros';
import { makeHelpers } from './transform';
function reghexMacro({ references, babel }) {
const helpers = makeHelpers(babel);
const defaultRefs = references.default || [];
defaultRefs.forEach((ref) => {
if (!t.isCallExpression(ref.parentPath.node)) return;
const path = ref.parentPath.parentPath;
if (!helpers.isMatch(path)) return;
const importPath = helpers.getMatchImport(path);
if (!importPath) return;
helpers.updateImport(importPath);
helpers.transformMatch(path);
});
return {
keepImports: true,
};
}
export default createMacro(reghexMacro);
| kitten/reghex |
<|start_filename|>smart-referee/screens/SignUp/index.js<|end_filename|>
import SignUpContainer from "./SignUpContainer";
export default SignUpContainer;
| bts-break-through-sw-maestro/SMART-Refree-App |
<|start_filename|>examples/gorice/main.go<|end_filename|>
/*
* Copyright 2018 Foolin. All rights reserved.
*
* Use of this source code is governed by a MIT style
* license that can be found in the LICENSE file.
*
*/
package main
import (
"net/http"
"github.com/GeertJohan/go.rice"
"github.com/foolin/gin-template/supports/gorice"
"github.com/gin-gonic/gin"
)
func main() {
router := gin.Default()
// servers other static files
staticBox := rice.MustFindBox("static")
router.StaticFS("/static", staticBox.HTTPBox())
//new template engine
router.HTMLRender = gorice.New(rice.MustFindBox("views"))
// Routes
router.GET("/", func(c *gin.Context) {
//render with master
c.HTML(http.StatusOK, "index", gin.H{
"title": "Index title!",
"add": func(a int, b int) int {
return a + b
},
})
})
router.GET("/page", func(c *gin.Context) {
//render only file, must full name with extension
c.HTML(http.StatusOK, "page.html", gin.H{"title": "Page file title!!"})
})
// Start server
router.Run(":9090")
}
<|start_filename|>examples/block/views/layouts/master.html<|end_filename|>
<!-- /views/admin/master.html -->
<!doctype html>
<html>
<head>
<title>{{.title}}</title>
</head>
<body>
{{block "head" .}}
<!-- default head value -->
Default head content! (master.html)
{{end}}
<hr>
{{template "content" .}}
<hr>
{{block "foot" .}}Default foot content! (master.html){{end}}
</body>
</html>
<|start_filename|>examples/block/views/index.html<|end_filename|>
{{define "content"}}
<h1 class="hello">This is content!!!!</h1>
<p>This page not define:
<pre> {{`{{define "head"}}{{end}}`}}</pre>
<pre> {{`{{define "foot"}}{{end}}`}}</pre>
So, it's output default content!</p>
<hr>
<p><a href="/block">block page!</a></p>
{{end}}
<|start_filename|>supports/gorice/gorice.go<|end_filename|>
package gorice
import (
"github.com/foolin/gin-template"
"github.com/GeertJohan/go.rice"
)
/**
New gin template engine, default views root.
*/
func New(viewsRootBox *rice.Box) *gintemplate.TemplateEngine {
return NewWithConfig(viewsRootBox, gintemplate.DefaultConfig)
}
/**
New gin template engine
Important!!! The viewsRootBox's name and config.Root must be consistent.
*/
func NewWithConfig(viewsRootBox *rice.Box, config gintemplate.TemplateConfig) *gintemplate.TemplateEngine {
config.Root = viewsRootBox.Name()
engine := gintemplate.New(config)
engine.SetFileHandler(FileHandler(viewsRootBox))
return engine
}
/**
Support go.rice file handler
*/
func FileHandler(viewsRootBox *rice.Box) gintemplate.FileHandler {
return func(config gintemplate.TemplateConfig, tplFile string) (content string, err error) {
// get file contents as string
return viewsRootBox.String(tplFile + config.Extension)
}
}
<|start_filename|>examples/block/views/block.html<|end_filename|>
<!-- /views/block.html -->
{{define "head"}}
I'm block head!
{{end}}
{{define "content"}}
<a href="/"><- Back home!</a>
<hr>
<p>This page define:
<pre> {{`{{define "head"}} I'm block head!{{end}}`}}</pre>
<pre> {{`{{define "foot"}} I'm block foot!{{end}}`}}</pre>
So, it's output define content!</p>
{{end}}
{{define "foot"}}
I'm block foot!
{{end}} | WinPooh32/gin-template |
<|start_filename|>lib/csdl2markdown.js<|end_filename|>
/**
* Converts OData CSDL JSON to Github Flavored Markdown
*
* Latest version: https://github.com/oasis-tcs/odata-vocabularies/blob/main/lib/csdl2markdown.js
*/
//TODO
// - See //TODO comments below
// - use something like https://github.com/epoberezkin/json-source-map to get source linenumbers for JSON vocabulary input
/**
* Create Markdown documentation from a CSDL document
* @param {string} filename filename of CSDL document in XML format
* @param {object} csdl CSDL document
* @return {Array} Array of strings containing Markdown lines
*/
module.exports.csdl2markdown = function (filename, csdl, referenced = {}) {
const lines = [];
const index = {
alias: {},
namespace: { Edm: "Edm" },
namespaceUrl: {},
schema: {},
schemas: {},
terms: [],
actions: [],
functions: [],
types: [],
derivedTypes: {},
};
preProcess(index, csdl, referenced);
const voc = vocabularies(index.alias);
const sourceLine = "@parser.line";
lines.push(...header(index.schema, filename));
if (index.terms.length > 0) {
lines.push(...termSection(index.terms));
}
if (index.actions.length > 0) {
lines.push(...operationSection(index.actions));
}
if (index.functions.length > 0) {
lines.push(...operationSection(index.functions));
}
index.types.forEach((type) => {
lines.push(...typeSection(type));
});
lines.push("");
return lines;
/**
* Collect model info for easier lookup
* @param {object} index Map of model elements
* @param {object} csdl main CSDL document
* @param {object} referenced more CSDL documents
*/
function preProcess(index, csdl, referenced) {
Object.keys(csdl.$Reference || {}).forEach((url) => {
const reference = csdl.$Reference[url];
(reference.$Include || []).forEach((include) => {
const qualifier = include.$Alias || include.$Namespace;
index.alias[include.$Namespace] = qualifier;
index.alias[qualifier] = qualifier;
index.namespace[qualifier] = include.$Namespace;
index.namespace[include.$Namespace] = include.$Namespace;
index.namespaceUrl[qualifier] = url;
index.namespaceUrl[include.$Namespace] = url;
});
});
// implicit assumption: exactly one schema in a vocabulary
Object.keys(csdl)
.filter((name) => isIdentifier(name))
.forEach((name) => {
index.schemas[name] = csdl[name];
index.schema = csdl[name];
index.schema.$$namespace = name;
index.qualifier = index.schema.$Alias || name;
index.alias[name] = index.qualifier;
index.alias[index.qualifier] = index.qualifier;
index.namespace[index.qualifier] = name;
index.namespace[name] = name;
Object.keys(index.schema)
.filter((name) => isIdentifier(name))
.forEach((name) => {
const element = index.schema[name];
if (Array.isArray(element)) {
element.forEach((overload) => {
overload.$$name = name;
if (overload.$Kind == "Action") index.actions.push(overload);
else index.functions.push(overload);
});
} else {
element.$$name = name;
switch (element.$Kind) {
case "Term":
index.terms.push(element);
break;
case "ComplexType":
case "EntityType":
case "EnumType":
case "TypeDefinition":
index.types.push(element);
break;
}
if (element.$BaseType) {
if (!index.derivedTypes[element.$BaseType])
index.derivedTypes[element.$BaseType] = [];
index.derivedTypes[element.$BaseType].push(
index.qualifier + "." + name
);
}
}
});
});
for (const [filename, referencedCsdl] of Object.entries(referenced)) {
Object.keys(referencedCsdl)
.filter((name) => isIdentifier(name))
.forEach((namespace) => {
if (index.schema.$$namespace !== namespace) {
index.schemas[namespace] = referencedCsdl[namespace];
index.schemas[namespace].$$filename = filename;
}
});
}
}
/**
* Document header
* @param {object} schema Schema
* @param {string} filename Name of source file
* @return {Array} Array of strings containing Markdown lines
*/
function header(schema, filename) {
const lines = [];
const temp = schema.$$namespace.substring(
0,
schema.$$namespace.lastIndexOf(".")
);
const name = temp.substring(temp.lastIndexOf(".") + 1);
lines.push("# " + name + " Vocabulary");
lines.push("**Namespace: [" + schema.$$namespace + "](" + filename + ")**");
lines.push("");
lines.push(schema[voc.Core.Description] || "");
const longDescription = schema[voc.Core.LongDescription];
if (longDescription) {
lines.push("");
lines.push(...longDescription.split("\n"));
}
return lines;
}
/**
* Section with table of terms
* @param {Array} terms List of terms
* @return {Array} Array of strings containing Markdown lines
*/
function termSection(terms) {
const lines = [];
lines.push("");
lines.push("");
lines.push("## Terms");
lines.push("");
lines.push("Term|Type|Description");
lines.push(":---|:---|:----------");
terms.forEach((t) => {
lines.push(
sourceLink(t) +
experimentalOrDeprecated(t) +
"|" +
typeLink(t) +
"|" +
'<a name="' +
t.$$name +
'"></a>' +
descriptionInTable(t)
);
});
return lines;
}
/**
* Section with table of functions
* @param {Array} overloads List of functions
* @return {Array} Array of strings containing Markdown lines
*/
function operationSection(overloads) {
const lines = [];
const kind = overloads[0].$Kind;
lines.push("");
lines.push("");
lines.push("## " + kind + "s");
const over = {};
overloads.forEach((o) => {
if (over[o.$$name]) o.$$parent = "overload";
over[o.$$name] = true;
lines.push("");
lines.push(
'### <a name="' +
o.$$name +
'"></a>' +
sourceLink(o) +
experimentalOrDeprecated(o)
);
const depr = deprecated(o);
if (depr) {
lines.push(...depr.Description.split("\n"));
return lines;
}
lines.push("");
lines.push(escape(o[voc.Core.Description]));
const longDescription = o[voc.Core.LongDescription];
if (longDescription) {
lines.push("");
lines.push(...longDescription.split("\n"));
}
if (o.$Parameter || o.$ReturnType) {
lines.push("");
lines.push("Parameter|Type|Description");
lines.push(":--------|:---|:----------");
}
(o.$Parameter || []).forEach(function (p, i) {
let desc;
p.$$name = p.$Name;
p.$$parent = o.$$parent || o;
const depr = deprecated(p);
if (depr) desc = depr.Description;
else desc = descriptionInTable(p);
const emph =
o.$IsBound && i === 0
? "**"
: p[voc.Core.OptionalParameter]
? "*"
: "";
lines.push(
emph +
sourceLink(p) +
emph +
experimentalOrDeprecated(p) +
"|" +
typeLink(p) +
"|" +
emph +
(emph === "**"
? "Binding parameter"
: emph === "*"
? "Optional parameter"
: "") +
(desc && emph ? ":" + emph + " " : emph) +
desc
);
});
if (o.$ReturnType) {
o.$ReturnType.$$name = "→";
o.$ReturnType.$$parent = o.$$parent || o;
lines.push(
sourceLink(o.$ReturnType) +
"|" +
typeLink(o.$ReturnType) +
"|" +
descriptionInTable(o.$ReturnType)
);
}
if (o.$Parameter || o.$ReturnType) lines.push("");
});
return lines;
}
/**
* Section for a single type
* @param {object} type Type model element
* @return {Array} Array of strings containing Markdown lines
*/
function typeSection(type) {
const lines = [];
lines.push("");
lines.push(
'## <a name="' +
type.$$name +
'"></a>' +
sourceLink(type) +
(type.$BaseType ? ": " + typeLink({ $Type: type.$BaseType }) : "") +
experimentalOrDeprecated(type)
);
const depr = deprecated(type);
if (depr) {
lines.push(...depr.Description.split("\n"));
return lines;
}
if (type.$Kind == "TypeDefinition") {
lines.push("**Type:** " + typeLink({ $Type: type.$UnderlyingType }));
lines.push("");
}
lines.push(escape(type[voc.Core.Description]));
const longDescription = type[voc.Core.LongDescription];
if (longDescription) {
lines.push("");
lines.push(...longDescription.split("\n"));
}
const derivedTypes =
index.derivedTypes[index.qualifier + "." + type.$$name];
if (derivedTypes) {
lines.push("");
lines.push("**Derived Types:**");
lines.push(...derivedTypesList(derivedTypes));
}
if (["ComplexType", "EntityType"].includes(type.$Kind)) {
lines.push(...propertyTable(type));
lines.push(...applicableTermsTable(type));
} else if (type.$Kind == "EnumType") {
lines.push(...memberTable(type));
}
if (type.$Kind == "TypeDefinition") {
lines.push(...allowedValues(type));
}
return lines;
}
/**
* Allowed values for type definitions
* @param {object} typeDefinition Type Definition
* @return {Array} Array of strings containing Markdown lines
*/
function allowedValues(typeDefinition) {
const lines = [];
const values = typeDefinition[voc.Validation.AllowedValues];
if (values) {
lines.push("");
lines.push("Allowed Value|Description");
lines.push(":------------|:----------");
values.forEach((v) => {
v.$$name = v.Value;
v.$$parent = typeDefinition;
lines.push(
sourceLink(v) +
experimentalOrDeprecated(v) +
"|" +
descriptionInTable(v)
);
});
}
return lines;
}
/**
* List of derived types
* @param {Array} derivedTypes Array of derived type names
* @return {Array} Array of strings containing Markdown lines
*/
function derivedTypesList(derivedTypes, indent = "") {
const lines = [];
derivedTypes.forEach((t) => {
const type = modelElement(t);
lines.push(
indent +
"- " +
(type.$Abstract ? "*" : "") +
typeLink({ $Type: t }) +
(type.$Abstract ? "*" : "")
);
const derivedTypes = index.derivedTypes[t];
if (derivedTypes) {
lines.push(...derivedTypesList(derivedTypes, indent + " "));
}
});
return lines;
}
/**
* Table of properties of structured type
* @param {object} type Structured type
* @return {Array} Array of strings containing Markdown lines
*/
function propertyTable(type) {
const lines = [];
const pLines = propertyLines(type);
if (pLines.length > 0) {
lines.push("");
lines.push("Property|Type|Description");
lines.push(":-------|:---|:----------");
lines.push(...pLines);
}
return lines;
}
/**
* Table lines of properties of structured type
* @param {object} type Structured type
* @param {boolean} parent Current type is base type of "original" type
* @return {Array} Array of strings containing Markdown lines
*/
function propertyLines(type, parent = false) {
const lines = [];
if (!type) return lines;
if (type.$BaseType) {
lines.push(...propertyLines(modelElement(type.$BaseType), true));
}
Object.keys(type)
.filter((name) => isIdentifier(name))
.forEach((name) => {
const p = type[name];
p.$$name = name;
p.$$filename = type.$$filename;
p.$$parent = type;
lines.push(
sourceLink(p, parent) +
experimentalOrDeprecated(p) +
"|" +
typeLink(p) +
"|" +
descriptionInTable(p)
);
});
return lines;
}
/**
* Table of applicable terms of structured type
* @param {object} type Structured type
* @return {Array} Array of strings containing Markdown lines
*/
function applicableTermsTable(type) {
const lines = [];
const tLines = applicableTermLines(type);
if (tLines.length > 0) {
lines.push("");
lines.push("**Applicable Annotation Terms:**");
lines.push("");
lines.push(...tLines);
}
return lines;
}
/**
* Table lines of applicable terms of structured type
* @param {object} type Structured type
* @return {Array} Array of strings containing Markdown lines
*/
function applicableTermLines(type) {
const lines = [];
if (!type) return lines;
if (type.$BaseType) {
lines.push(...applicableTermLines(modelElement(type.$BaseType), true));
}
const terms = type[voc.Validation.ApplicableTerms] || [];
terms.forEach((term) => {
lines.push(`- ${typeLink({ $Type: term })}`);
});
return lines;
}
/**
* Table of enumeration type members
* @param {object} type Enumeration type
* @return {Array} Array of strings containing Markdown lines
*/
function memberTable(type) {
const lines = [];
const members = [];
Object.keys(type)
.filter((name) => isIdentifier(name))
.forEach((name) => {
const member = { $$value: type[name] };
member.$$name = name;
member[sourceLine] = type[name + sourceLine];
member[voc.Core.Description] = type[name + voc.Core.Description];
member[voc.Core.LongDescription] =
type[name + voc.Core.LongDescription];
member[voc.Core.Deprecated] = type[name + voc.Core.Deprecated];
member[voc.Common.Experimental] = type[name + voc.Common.Experimental];
members.push(member);
});
if (members.length > 0) {
lines.push("");
lines.push((type.$IsFlags ? "Flag " : "") + "Member|Value|Description");
lines.push(":-----|----:|:----------");
}
members.forEach((m) => {
m.$$parent = type;
lines.push(
sourceLink(m) +
experimentalOrDeprecated(m) +
"|" +
m.$$value +
"|" +
descriptionInTable(m)
);
});
return lines;
}
/**
* Mark as experimental or deprecated
* @param {object} modelElement Model element
* @return {string} Marker for experimental or deprecated
*/
function experimentalOrDeprecated(modelElement) {
return (
(modelElement[voc.Common.Experimental]
? " *(" +
typeLink({ $Type: "com.sap.vocabularies.Common.v1.Experimental" }) +
")*"
: "") + (deprecated(modelElement) ? " *(Deprecated)*" : "")
);
}
/**
* Return Decorate as experimental or deprecated
* @param {object} modelElement Model element
* @return {Array} Array of strings containing Markdown lines
*/
function deprecated(modelElement) {
return (modelElement[voc.Core.Revisions] || []).find(
(r) => r.Kind == "Deprecated"
);
}
/**
* Description of model element escaped for use in a Markdown table
* @param {object} modelElement Model element to describe
* @return {string} description
*/
function descriptionInTable(modelElement) {
const depr = deprecated(modelElement);
const text = modelElement[voc.Core.Description];
const long = modelElement[voc.Core.LongDescription];
const example = modelElement[voc.Core.Example];
if (example) {
example.$$name = "Example";
// the record value of the annotation has the source line
if (example[sourceLine]) example[sourceLine] -= 1;
}
return depr
? escape(depr.Description)
: escape(text) +
(example ? " (" + sourceLink(example) + ")" : "") +
(long ? "<br>" + escape(long) : "") +
applicableTermsList(
modelElement[voc.Validation.ApplicableTerms] || []
) +
allowedTermsList(modelElement[voc.Validation.AllowedTerms] || []);
}
/**
* List of applicable terms
* @param {array} applicableTerms Array of applicable terms
* @return {string} Text
*/
function applicableTermsList(applicableTerms) {
const text = [];
if (applicableTerms.length > 0) text.push("<br>Can be annotated with:");
applicableTerms.forEach((term) => {
text.push(`<br>- ${typeLink({ $Type: term })}`);
});
return text.join("");
}
/**
* List of allowed terms
* @param {array} allowedTerms Array of allowed terms
* @return {string} Text
*/
function allowedTermsList(allowedTerms) {
const text = [];
if (allowedTerms.length > 0) text.push("<br>Allowed terms:");
allowedTerms.forEach((term) => {
text.push(`<br>- ${typeLink({ $Type: term })}`);
});
return text.join("");
}
/**
* Escape text for use in Markdown
* @param {string} text Text to escape
* @return {string} Escaped text
*/
function escape(text) {
return (text || "")
.trim()
.replace(/\n\n/g, "<br/>")
.replace(/\n/g, " ")
.replace(/[ \t]+/g, " ")
.replace(/\|/g, "\\|");
}
/**
* Construct link to definition of model element in source file
* @param {object} modelElement Model element to link
* @param {boolean} parent false unless modelElement belongs to a derived type
* @return {string} link
*/
function sourceLink(modelElement, parent = false) {
const line = modelElement[sourceLine];
let textFragment;
if (modelElement.$$parent !== "overload")
textFragment = modelElement.$Kind ? modelElement : modelElement.$$parent;
return (
(line ? "[" : "") +
(modelElement.$Abstract || parent ? "*" : "") +
modelElement.$$name +
(modelElement.$Abstract || parent ? "*" : "") +
(line
? "](./" +
(modelElement.$$filename || filename) +
"#L" +
line +
(textFragment
? `:~:text=<${textFragment.$Kind}%20Name="-,${textFragment.$$name},-"`
: "") +
")"
: "")
);
}
/**
* Construct link to documentation of a model element's type in markdown
* @param {object} modelElement Type to link
* @return {string} link
*/
function typeLink(modelElement) {
const np = nameParts(modelElement.$Type || "Edm.String");
const customType = index.namespace[np.qualifier] != "Edm";
const url = index.namespaceUrl[np.qualifier];
let customFile = "";
if (url) {
// guess file name from reference URL
const lastSegment = url.substring(url.lastIndexOf("/") + 1);
if (lastSegment.startsWith(index.namespace[np.qualifier] + "."))
customFile = index.namespace[np.qualifier] + ".md";
else if (lastSegment.startsWith(index.alias[np.qualifier] + ".")) {
customFile = index.alias[np.qualifier] + ".md";
}
if (
customFile.startsWith("Org.OData.") &&
!index.schema.$$namespace.startsWith("Org.OData.")
) {
customFile =
"https://github.com/oasis-tcs/odata-vocabularies/blob/main/vocabularies/" +
customFile;
}
}
//TODO: not so :-)
if (modelElement.$Type == "com.sap.vocabularies.Common.v1.Experimental")
customFile = "Common.md";
return (
(modelElement.$Collection ? "\\[" : "") +
(customType ? "[" : "") +
(customType
? np.name
: modelElement[voc.Core.IsURL]
? "URL"
: modelElement[voc.Core.IsMediaType]
? "MediaType"
: np.name) +
(modelElement.$Nullable ? "?" : "") +
(customType ? "](" + customFile + "#" + np.name + ")" : "") +
(modelElement.$Collection ? "\\]" : "")
);
}
/**
* a qualified name consists of a namespace or alias, a dot, and a simple name
* @param {string} qualifiedName
* @return {object} with components qualifier and name
*/
function nameParts(qualifiedName) {
const pos = qualifiedName.lastIndexOf(".");
console.assert(pos > 0, "Invalid qualified name " + qualifiedName);
return {
qualifier: qualifiedName.substring(0, pos),
name: qualifiedName.substring(pos + 1),
};
}
/**
* an identifier does not start with $ and does not contain @
* @param {string} name
* @return {boolean} name is an identifier
*/
function isIdentifier(name) {
return !name.startsWith("$") && !name.includes("@");
}
/**
* Construct map of qualified term names
* @param {object} alias Map of namespace or alias to alias
* @return {object}Vocabulary term name map
*/
function vocabularies(alias) {
const terms = {
Common: ["Experimental"],
Core: [
"Description",
"Example",
"IsMediaType",
"IsURL",
"LongDescription",
"Revisions",
"OptionalParameter",
],
Validation: [
"AllowedValues",
"ApplicableTerms",
"AllowedTerms",
"Exclusive",
"Maximum",
"Minimum",
"Pattern",
],
};
alias.Common = alias.Common || "com.sap.vocabularies.Common.v1";
alias.Core = alias.Core || "Org.OData.Core.V1";
alias.Validation = alias.Validation || "Org.OData.Validation.V1";
const v = {};
Object.keys(terms).forEach((vocab) => {
v[vocab] = {};
terms[vocab].forEach((term) => {
v[vocab][term] = "@" + alias[vocab] + "." + term;
});
});
return v;
}
/**
* Find model element by qualified name
* @param {string} qname Qualified name of model element
* @return {object} Model element
*/
function modelElement(qname) {
const q = nameParts(qname);
const schema =
index.schemas[q.qualifier] || index.schemas[index.namespace[q.qualifier]];
const element = schema ? schema[q.name] : null;
if (!element) console.warn(`- Cannot find '${qname}'`);
if (element && schema.$$filename) element.$$filename = schema.$$filename;
return element;
}
};
| oasis-tcs/odata-vocabularies |
<|start_filename|>my-other-site.com/php/Dockerfile<|end_filename|>
FROM php:7.0-fpm
MAINTAINER <NAME> <<EMAIL>>
<|start_filename|>konradcerny.cz/nginx/Dockerfile<|end_filename|>
FROM nginx:latest
MAINTAINER <NAME> <<EMAIL>>
COPY ./default.conf /etc/nginx/conf.d/default.conf
<|start_filename|>konradcerny.cz/php/Dockerfile<|end_filename|>
FROM php:5.5-fpm
MAINTAINER <NAME> <<EMAIL>>
| rokerkony/dockerize-vps |
<|start_filename|>js/data.js<|end_filename|>
function handleMsg(msg) {
if (VISIBLE) {
addData(msg.pub, msg.subs);
}
}
var pubnub = PUBNUB.init({
publish_key : "demo",
subscribe_key : "<KEY>",
ssl : true
});
var timeStamps = [];
pubnub.subscribe({
channel : "rts-xNjiKP4Bg4jgElhhn9v9-geo-map",
callback : function(msg){
timeStamps = timeStamps.concat(msg.geo_map);
}
});
var k;
var z = setInterval(function() {
var x = exPubSub(timeStamps);
timeStamps = [];
var count = 0;
clearInterval(k);
k = setInterval(function() {
if (count >= 30) {
clearInterval(k);
}
if (typeof(x[count]) === "undefined") {
clearInterval(k);
}
else {
handleMsg(x[count]);
count++;
}
}, 100);
}, 3000);
| isabella232/webgl-visualization |
<|start_filename|>style/style.css<|end_filename|>
html, body {
margin: 0;
font-family: arial, "Microsoft YaHei";
background-color: #272822;
color: #FEFEFE;
}
#fileWrapper{
transition:all 0.5s ease;
}
#fileWrapper:hover{
opacity: 1!important;
}
#visualizer_wrapper{
text-align: center;
}
footer{
position: fixed;
bottom: 2px;
color:#aaa;
} | DeepBlue27/Learn2Git |
<|start_filename|>test/integration/test.apis.natural-language-understanding.js<|end_filename|>
/**
* Copyright 2015 IBM Corp. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
require('dotenv').config({ silent: true });
var naturalLanguage = require('../../lib/api/natural-language-understanding');
describe('natural-language-understanding.js', function () {
this.timeout(3000);
this.slow(1000);
it('should return Miami if is detected', function (done) {
var params = {
text: 'I live in Miami'
};
naturalLanguage.extractCity(params, function (err, city) {
if (city.name === 'Miami') {
done();
} else {
done(JSON.stringify(city));
}
});
});
it('should return empty if no city is mentioned', function (done) {
var params = {
text: 'We don\'t have cities here'
};
naturalLanguage.extractCity(params, function (err, city) {
if (!city) {
done();
} else {
done(JSON.stringify(city));
}
});
});
});
| tigartar/f4bot |
<|start_filename|>src/RPC/Library.fs<|end_filename|>
namespace Elmish.Bridge
[<AutoOpen>]
module RPC =
type IReplyChannel<'T> = {
ValueId : System.Guid
ExceptionId : System.Guid
}
<|start_filename|>src/Client/Library.fs<|end_filename|>
namespace Elmish.Bridge
open Browser
open Browser.Types
open Elmish
open Fable.Core
open Fable.SimpleJson
open Fable.Core.JsInterop
//Configures the transport of the custom serializer
type SerializerResult =
| Text of string
| Binary of byte []
//Internal use only
[<RequireQualifiedAccess>]
[<System.ComponentModel.EditorBrowsable(System.ComponentModel.EditorBrowsableState.Never)>]
module Helpers =
let getBaseUrl() =
let url =
Dom.window.location.href
|> Url.URL.Create
url.protocol <- url.protocol.Replace("http", "ws")
url.hash <- ""
url
let mappings : Map<string option, Map<string, obj -> SerializerResult> * (WebSocket option * bool) ref * (string -> (unit -> unit) -> unit)> option ref =
match Dom.window?Elmish_Bridge_Helpers with
| None ->
let cell = ref (Some Map.empty)
Dom.window?Elmish_Bridge_Helpers <- cell
cell
| Some m -> m
let rpcmappings : Map<System.Guid, (string -> unit) * System.Guid> option ref =
match Dom.window?Elmish_Bridge_RpcHelpers with
| None ->
let cell = ref (Some Map.empty)
Dom.window?Elmish_Bridge_RpcHelpers <- cell
cell
| Some m -> m
/// Configures the mode about how the endpoint is used
type UrlMode =
| Append
| Replace
| Raw
| Calculated of (string -> string -> string)
/// Creates the bridge. Takes the endpoint and an optional message to be dispatched when the connection is closed.
/// It exposes a method `Send` that can be used to send messages to the server
type BridgeConfig<'Msg,'ElmishMsg> =
{ path : string
whenDown : 'ElmishMsg option
mapping : 'Msg -> 'ElmishMsg
customSerializers: Map<string, obj -> SerializerResult>
retryTime : int
name : string option
urlMode : UrlMode}
/// Internal use only
[<System.ComponentModel.EditorBrowsable(System.ComponentModel.EditorBrowsableState.Never)>]
member this.AddSerializer(serializer: 'a -> SerializerResult, [<Inject>] ?resolver: ITypeResolver<'a>) =
let typeOrigin = resolver.Value.ResolveType()
let typeOriginName = typeOrigin.FullName.Replace("+",".")
{
whenDown = this.whenDown
path = this.path
mapping = this.mapping
customSerializers =
this.customSerializers
|> Map.add typeOriginName (fun e -> serializer (e :?> 'a))
retryTime = this.retryTime
name = this.name
urlMode = this.urlMode
}
interface System.IDisposable with
member t.Dispose() =
!Helpers.mappings
|> Option.defaultValue Map.empty
|> Map.tryFind t.name
|> Option.iter (fun (_, socket, _) ->
let (skt,_) = !socket
socket := (None, true)
skt |> Option.iter (fun e -> e.close())
)
/// Internal use only
[<System.ComponentModel.EditorBrowsable(System.ComponentModel.EditorBrowsableState.Never)>]
member inline this.Attach dispatch =
let url =
match this.urlMode with
| Replace ->
let url = Helpers.getBaseUrl()
url.pathname <- this.path
url
| Append ->
let url = Helpers.getBaseUrl()
url.pathname <- url.pathname + this.path
url
| Calculated f ->
let url = Helpers.getBaseUrl()
f url.href this.path |> Url.URL.Create
| Raw ->
let url = Browser.Url.URL.Create this.path
url.protocol <- url.protocol.Replace("http", "ws")
url
let wsref : (WebSocket option * bool) ref =
!Helpers.mappings
|> Option.defaultValue Map.empty
|> Map.tryFind this.name
|> Option.bind (fun (_, socket, _) ->
match !socket with
| None, true -> None
| _ -> Some socket)
|> Option.defaultValue (ref (None, false))
let rec websocket timeout server =
match !wsref with
| Some _, _ | None, true -> ()
| None, false ->
let socket = WebSocket.Create server
wsref := Some socket, false
socket.onclose <- fun _ ->
let (_,closed) = !wsref
wsref := None, closed
this.whenDown |> Option.iter dispatch
if not closed then
Dom.window.setTimeout
((fun () -> websocket timeout server), timeout, ()) |> ignore
socket.onmessage <- fun e ->
let message = string e.data
if message.StartsWith "R" then
let guid = (System.Guid.Parse message.[1..36])
let json = message.[37..]
!Helpers.rpcmappings
|> Option.defaultValue Map.empty
|> Map.tryFind guid
|> Option.iter(fun (f,og) ->
f json
Helpers.rpcmappings :=
!Helpers.rpcmappings
|> Option.map( fun m ->
m
|> Map.remove guid
|> Map.remove og)
)
elif message.StartsWith "E" then
let guid = (System.Guid.Parse message.[1..])
!Helpers.rpcmappings
|> Option.defaultValue Map.empty
|> Map.tryFind guid
|> Option.iter(fun (f,og) ->
f (Json.serialize (exn("Server couldn't process your message")))
Helpers.rpcmappings :=
!Helpers.rpcmappings
|> Option.map( fun m ->
m
|> Map.remove guid
|> Map.remove og)
)
else
Json.tryParseNativeAs(string e.data)
|> function
| Ok msg -> msg |> this.mapping |> dispatch
| _ -> ()
websocket (this.retryTime * 1000) (url.href.TrimEnd '#')
Helpers.mappings :=
!Helpers.mappings
|> Option.defaultValue Map.empty
|> Map.add this.name
(this.customSerializers,
wsref,
(fun e callback ->
match !wsref with
| Some socket, _ -> socket.send e
| None, _ -> callback ()))
|> Some
type Bridge private() =
static member private stringTuple = (TypeInfo.Tuple(fun () -> [|TypeInfo.String;TypeInfo.String|]))
static member private Sender(server : 'Server, bridgeName, callback, sentType: System.Type) =
let sentTypeName = sentType.FullName.Replace('+','.')
!Helpers.mappings
|> Option.defaultValue Map.empty
|> Map.tryFind bridgeName
|> function
| None -> callback ()
| Some (m,_,s) ->
let serializer =
m
|> Map.tryFind sentTypeName
|> Option.defaultValue
(fun o -> Convert.serialize o (createTypeInfo sentType) |> Text)
let serialized =
match serializer server with
| Text e -> e
| Binary b -> System.Convert.ToBase64String b
s (Convert.serialize (sentTypeName, serialized) Bridge.stringTuple) callback
static member private RPCSender(guid, bridgeName, value, sentType: System.Type) =
!Helpers.mappings
|> Option.defaultValue Map.empty
|> Map.tryFind bridgeName
|> Option.iter
(fun (_,_,s) ->
let typeInfo = createTypeInfo sentType
let serialized = Convert.serialize value typeInfo
s (Convert.serialize (sprintf "RC|%O" guid, serialized) Bridge.stringTuple) ignore)
static member RPCSend(guid: System.Guid, value: 'a, ?name, [<Inject>] ?resolver: ITypeResolver<'a>) =
Bridge.RPCSender(guid, name, value, resolver.Value.ResolveType())
/// Send the message to the server
static member Send(server : 'Server,?callback, [<Inject>] ?resolver: ITypeResolver<'Server>) =
Bridge.Sender(server, None, defaultArg callback ignore, resolver.Value.ResolveType())
/// Send the message to the server using a named bridge
static member NamedSend(name:string, server : 'Server,?callback, [<Inject>] ?resolver: ITypeResolver<'Server>) =
Bridge.Sender(server, Some name, defaultArg callback ignore, resolver.Value.ResolveType())
static member AskServer(f: IReplyChannel<'T> -> 'Server, [<Inject>] ?resolverT: ITypeResolver<'T>, [<Inject>] ?resolverServer: ITypeResolver<'Server> ) : Async<'T> =
Bridge.Asker(f, None, resolverServer.Value.ResolveType(), resolverT.Value.ResolveType() )
static member AskNamedServer(f: IReplyChannel<'T> -> 'Server, name, [<Inject>] ?resolverT: ITypeResolver<'T>, [<Inject>] ?resolverServer: ITypeResolver<'Server> ) : Async<'T> =
Bridge.Asker(f, Some name, resolverServer.Value.ResolveType(), resolverT.Value.ResolveType() )
static member private Asker(f, bridgeName, sentType, ttype ) =
Async.FromContinuations(fun (cont, econt, _) ->
let guidValue = System.Guid.NewGuid()
let guidExn = System.Guid.NewGuid()
let typeInfoT = createTypeInfo ttype
let typeInfoExn = createTypeInfo typeof<exn>
let reply typeInfo cont s =
let json = SimpleJson.parse s
Convert.fromJsonAs json typeInfo |> unbox |> cont
Helpers.rpcmappings :=
!Helpers.rpcmappings
|> Option.defaultValue Map.empty
|> Map.add guidExn ((fun s -> reply typeInfoExn econt s), guidValue)
|> Map.add guidValue ((fun s -> reply typeInfoT cont s), guidExn)
|> Some
let sentTypeName = sentType.FullName.Replace('+','.')
!Helpers.mappings
|> Option.defaultValue Map.empty
|> Map.tryFind bridgeName
|> function
| None -> econt (exn("Bridge does not exist"))
| Some (_,_,s) ->
let serialized = Convert.serialize (f {ValueId = guidValue; ExceptionId = guidExn}) (createTypeInfo sentType)
s (Convert.serialize (sprintf "RS|%s" sentTypeName, serialized) Bridge.stringTuple) (fun () -> econt (exn("Socket is closed")))
)
[<RequireQualifiedAccess>]
module Bridge =
/// Create a new `BridgeConfig` with the set endpoint
let inline endpoint endpoint =
{
path = endpoint
whenDown = None
mapping = id
customSerializers = Map.empty
retryTime = 1
name = None
urlMode = Replace
}
/// Set a message to be sent when connection is lost.
let inline withWhenDown msg this =
{ this with whenDown = Some msg }
/// Sets the mode of how the url is calculated
/// `Replace` : sets the path to the endpoint defined
/// `Append` : adds the endpoint to the current path
/// `Raw`: uses the given endpoint as a complete URL
/// `Calculated` : takes a function that given the current URL and the endpoint, calculates the complete url to the socket
let inline withUrlMode mode this =
{ this with urlMode = mode }
/// Set a name for this bridge if you want to have a secondary one.
let inline withName name this =
{ this with name = Some name }
/// Register a custom serializer
let inline withCustomSerializer (serializer: 'a -> SerializerResult) (this:BridgeConfig<'Msg,'ElmishMsg>) =
this.AddSerializer serializer
/// Configure how many seconds before reconnecting when the connection is lost.
/// Values below 1 are ignored
let inline withRetryTime sec this =
if sec < 1 then
this
else
{ this with retryTime = sec}
/// Configure a mapping to the top-level message so the server can send an inner message
/// That enables using just a subset of the messages on the shared project
let inline withMapping map this =
{
whenDown = this.whenDown
path = this.path
mapping = map
customSerializers = this.customSerializers
retryTime = this.retryTime
name = this.name
urlMode = this.urlMode
}
/// Creates a subscription to be used with `Cmd.OfSub`. That enables starting Bridge with
/// a configuration obtained after the `Program` has already started
let inline asSubscription (this:BridgeConfig<_,_>) dispatch =
this.Attach dispatch
[<RequireQualifiedAccess>]
module Program =
/// Apply the `Bridge` to be used with the program.
/// Preferably use it before any other operation that can change the type of the message passed to the `Program`.
let inline withBridge endpoint (program : Program<_, _, _, _>) =
program |> Program.withSubscription (fun _ -> [Bridge.endpoint(endpoint).Attach])
/// Apply the `Bridge` to be used with the program.
/// Preferably use it before any other operation that can change the type of the message passed to the `Program`.
let inline withBridgeConfig (config:BridgeConfig<_,_>) (program : Program<_, _, _, _>) =
program |> Program.withSubscription (fun _ -> [config.Attach])
[<RequireQualifiedAccess>]
module Cmd =
/// Creates a `Cmd` from a server message.
let inline bridgeSend (msg:'server) : Cmd<'client> = [ fun _ -> Bridge.Send msg ]
/// Creates a `Cmd` from a server message. Dispatches the client message if the bridge is broken.
let inline bridgeSendOr (msg:'server) (fallback:'client) : Cmd<'client> = [ fun dispatch -> Bridge.Send(msg, fun () -> dispatch fallback) ]
/// Creates a `Cmd` from a server message using a named bridge.
let inline namedBridgeSend name (msg:'server) : Cmd<'client> = [ fun _ -> Bridge.NamedSend(name, msg) ]
/// Creates a `Cmd` from a server message using a named bridge. Dispatches the client message if the bridge is broken.
let inline namedBridgeSendOr name (msg:'server) (fallback:'client) : Cmd<'client> = [ fun dispatch -> Bridge.NamedSend(name, msg, fun () -> dispatch fallback) ]
[<AutoOpen>]
module RPC =
type RPC.IReplyChannel<'T> with
member inline t.Reply(v:'T) =
Bridge.RPCSend(t.ValueId, v)
member inline t.ReplyNamed(name, v:'T) =
Bridge.RPCSend(t.ValueId, v, name)
member inline t.ReplyException(v:exn) =
Bridge.RPCSend(t.ExceptionId, v)
member inline t.ReplyExceptionNamed(name, v:'T) =
Bridge.RPCSend(t.ExceptionId, v, name)
| Nhowka/Elmish.Bridge |
<|start_filename|>docker-compose/xebialabs/xl-release/Dockerfile<|end_filename|>
FROM xebialabs/xl-release:9.5.1
COPY default-conf/* /opt/xebialabs/xl-release-server/default-conf/
<|start_filename|>store/src/main/java/com/xebialabs/store/config/audit/package-info.java<|end_filename|>
/**
* Audit specific code.
*/
package com.xebialabs.store.config.audit;
<|start_filename|>invoice/src/main/java/com/xebialabs/invoice/security/package-info.java<|end_filename|>
/**
* Spring Security configuration.
*/
package com.xebialabs.invoice.security;
<|start_filename|>invoice/src/main/java/com/xebialabs/invoice/domain/Shipment.java<|end_filename|>
package com.xebialabs.invoice.domain;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import org.hibernate.annotations.Cache;
import org.hibernate.annotations.CacheConcurrencyStrategy;
import javax.persistence.*;
import javax.validation.constraints.*;
import java.io.Serializable;
import java.time.Instant;
import java.util.Objects;
/**
* A Shipment.
*/
@Entity
@Table(name = "shipment")
@Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE)
public class Shipment implements Serializable {
private static final long serialVersionUID = 1L;
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
private Long id;
@Column(name = "tracking_code")
private String trackingCode;
@NotNull
@Column(name = "jhi_date", nullable = false)
private Instant date;
@Column(name = "details")
private String details;
@ManyToOne(optional = false)
@NotNull
@JsonIgnoreProperties("shipments")
private Invoice invoice;
// jhipster-needle-entity-add-field - JHipster will add fields here, do not remove
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getTrackingCode() {
return trackingCode;
}
public Shipment trackingCode(String trackingCode) {
this.trackingCode = trackingCode;
return this;
}
public void setTrackingCode(String trackingCode) {
this.trackingCode = trackingCode;
}
public Instant getDate() {
return date;
}
public Shipment date(Instant date) {
this.date = date;
return this;
}
public void setDate(Instant date) {
this.date = date;
}
public String getDetails() {
return details;
}
public Shipment details(String details) {
this.details = details;
return this;
}
public void setDetails(String details) {
this.details = details;
}
public Invoice getInvoice() {
return invoice;
}
public Shipment invoice(Invoice invoice) {
this.invoice = invoice;
return this;
}
public void setInvoice(Invoice invoice) {
this.invoice = invoice;
}
// jhipster-needle-entity-add-getters-setters - JHipster will add getters and setters here, do not remove
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
Shipment shipment = (Shipment) o;
if (shipment.getId() == null || getId() == null) {
return false;
}
return Objects.equals(getId(), shipment.getId());
}
@Override
public int hashCode() {
return Objects.hashCode(getId());
}
@Override
public String toString() {
return "Shipment{" +
"id=" + getId() +
", trackingCode='" + getTrackingCode() + "'" +
", date='" + getDate() + "'" +
", details='" + getDetails() + "'" +
"}";
}
}
<|start_filename|>notification/src/main/java/com/xebialabs/notification/repository/package-info.java<|end_filename|>
/**
* Spring Data JPA repositories.
*/
package com.xebialabs.notification.repository;
<|start_filename|>notification/src/main/java/com/xebialabs/notification/config/dbmigrations/package-info.java<|end_filename|>
/**
* MongoDB database migrations using MongoBee.
*/
package com.xebialabs.notification.config.dbmigrations;
<|start_filename|>store/src/main/java/com/xebialabs/store/repository/package-info.java<|end_filename|>
/**
* Spring Data JPA repositories.
*/
package com.xebialabs.store.repository;
<|start_filename|>notification/src/main/java/com/xebialabs/notification/config/audit/package-info.java<|end_filename|>
/**
* Audit specific code.
*/
package com.xebialabs.notification.config.audit;
<|start_filename|>store/src/main/java/com/xebialabs/store/domain/enumeration/OrderItemStatus.java<|end_filename|>
package com.xebialabs.store.domain.enumeration;
/**
* The OrderItemStatus enumeration.
*/
public enum OrderItemStatus {
AVAILABLE, OUT_OF_STOCK, BACK_ORDER
}
<|start_filename|>notification/src/main/java/com/xebialabs/notification/repository/NotificationRepository.java<|end_filename|>
package com.xebialabs.notification.repository;
import com.xebialabs.notification.domain.Notification;
import org.springframework.data.mongodb.repository.MongoRepository;
import org.springframework.stereotype.Repository;
/**
* Spring Data MongoDB repository for the Notification entity.
*/
@SuppressWarnings("unused")
@Repository
public interface NotificationRepository extends MongoRepository<Notification, String> {
}
<|start_filename|>store/src/main/java/com/xebialabs/store/security/package-info.java<|end_filename|>
/**
* Spring Security configuration.
*/
package com.xebialabs.store.security;
<|start_filename|>invoice/src/main/java/com/xebialabs/invoice/repository/package-info.java<|end_filename|>
/**
* Spring Data JPA repositories.
*/
package com.xebialabs.invoice.repository;
<|start_filename|>notification/src/main/java/com/xebialabs/notification/config/MetricsConfiguration.java<|end_filename|>
package com.xebialabs.notification.config;
import io.github.jhipster.config.JHipsterProperties;
import com.codahale.metrics.JmxReporter;
import com.codahale.metrics.JvmAttributeGaugeSet;
import com.codahale.metrics.MetricRegistry;
import com.codahale.metrics.Slf4jReporter;
import com.codahale.metrics.health.HealthCheckRegistry;
import com.codahale.metrics.jvm.*;
import com.ryantenney.metrics.spring.config.annotation.EnableMetrics;
import com.ryantenney.metrics.spring.config.annotation.MetricsConfigurerAdapter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.slf4j.Marker;
import org.slf4j.MarkerFactory;
import org.springframework.context.annotation.*;
import javax.annotation.PostConstruct;
import java.lang.management.ManagementFactory;
import java.util.concurrent.TimeUnit;
@Configuration
@EnableMetrics(proxyTargetClass = true)
public class MetricsConfiguration extends MetricsConfigurerAdapter {
private static final String PROP_METRIC_REG_JVM_MEMORY = "jvm.memory";
private static final String PROP_METRIC_REG_JVM_GARBAGE = "jvm.garbage";
private static final String PROP_METRIC_REG_JVM_THREADS = "jvm.threads";
private static final String PROP_METRIC_REG_JVM_FILES = "jvm.files";
private static final String PROP_METRIC_REG_JVM_BUFFERS = "jvm.buffers";
private static final String PROP_METRIC_REG_JVM_ATTRIBUTE_SET = "jvm.attributes";
private final Logger log = LoggerFactory.getLogger(MetricsConfiguration.class);
private MetricRegistry metricRegistry = new MetricRegistry();
private HealthCheckRegistry healthCheckRegistry = new HealthCheckRegistry();
private final JHipsterProperties jHipsterProperties;
public MetricsConfiguration(JHipsterProperties jHipsterProperties) {
this.jHipsterProperties = jHipsterProperties;
}
@Override
@Bean
public MetricRegistry getMetricRegistry() {
return metricRegistry;
}
@Override
@Bean
public HealthCheckRegistry getHealthCheckRegistry() {
return healthCheckRegistry;
}
@PostConstruct
public void init() {
log.debug("Registering JVM gauges");
metricRegistry.register(PROP_METRIC_REG_JVM_MEMORY, new MemoryUsageGaugeSet());
metricRegistry.register(PROP_METRIC_REG_JVM_GARBAGE, new GarbageCollectorMetricSet());
metricRegistry.register(PROP_METRIC_REG_JVM_THREADS, new ThreadStatesGaugeSet());
metricRegistry.register(PROP_METRIC_REG_JVM_FILES, new FileDescriptorRatioGauge());
metricRegistry.register(PROP_METRIC_REG_JVM_BUFFERS, new BufferPoolMetricSet(ManagementFactory.getPlatformMBeanServer()));
metricRegistry.register(PROP_METRIC_REG_JVM_ATTRIBUTE_SET, new JvmAttributeGaugeSet());
if (jHipsterProperties.getMetrics().getJmx().isEnabled()) {
log.debug("Initializing Metrics JMX reporting");
JmxReporter jmxReporter = JmxReporter.forRegistry(metricRegistry).build();
jmxReporter.start();
}
if (jHipsterProperties.getMetrics().getLogs().isEnabled()) {
log.info("Initializing Metrics Log reporting");
Marker metricsMarker = MarkerFactory.getMarker("metrics");
final Slf4jReporter reporter = Slf4jReporter.forRegistry(metricRegistry)
.outputTo(LoggerFactory.getLogger("metrics"))
.markWith(metricsMarker)
.convertRatesTo(TimeUnit.SECONDS)
.convertDurationsTo(TimeUnit.MILLISECONDS)
.build();
reporter.start(jHipsterProperties.getMetrics().getLogs().getReportFrequency(), TimeUnit.SECONDS);
}
}
}
<|start_filename|>store/src/main/java/com/xebialabs/store/web/rest/package-info.java<|end_filename|>
/**
* Spring MVC REST controllers.
*/
package com.xebialabs.store.web.rest;
<|start_filename|>docker-compose/xebialabs/xl-deploy/Dockerfile<|end_filename|>
FROM xebialabs/xl-deploy:9.5.1
COPY default-conf/* /opt/xebialabs/xl-deploy-server/default-conf/
<|start_filename|>invoice/src/main/java/com/xebialabs/invoice/domain/enumeration/InvoiceStatus.java<|end_filename|>
package com.xebialabs.invoice.domain.enumeration;
/**
* The InvoiceStatus enumeration.
*/
public enum InvoiceStatus {
PAID, ISSUED, CANCELLED
}
<|start_filename|>docker-compose/xebialabs/jenkins/Dockerfile<|end_filename|>
FROM jenkins/jenkins:2.138.1
COPY init.groovy /var/jenkins_home/init.groovy.d/
COPY plugins.txt /usr/share/jenkins/ref/plugins.txt
RUN /usr/local/bin/install-plugins.sh < /usr/share/jenkins/ref/plugins.txt
# Install the Docker CE client as per https://docs.docker.com/install/linux/docker-ce/debian/#install-docker-ce
USER root
RUN apt-get update && \
apt-get install -y \
apt-transport-https \
ca-certificates \
curl \
gnupg2 \
software-properties-common
RUN curl -fsSL https://download.docker.com/linux/debian/gpg | gpg --dearmor > /etc/apt/trusted.gpg.d/docker.gpg
RUN add-apt-repository \
"deb [arch=amd64] https://download.docker.com/linux/debian \
$(lsb_release -cs) \
stable" && \
apt-get update && \
apt-get install -y docker-ce && \
usermod -a -G root jenkins
#USER jenkins
ENV JENKINS_USER admin
ENV JENKINS_PASS <PASSWORD>
ENV JAVA_OPTS -Djenkins.install.runSetupWizard=false -Djenkins.model.Jenkins.slaveAgentPort=55888
<|start_filename|>store/src/main/java/com/xebialabs/store/domain/enumeration/OrderStatus.java<|end_filename|>
package com.xebialabs.store.domain.enumeration;
/**
* The OrderStatus enumeration.
*/
public enum OrderStatus {
COMPLETED, PENDING, CANCELLED
}
<|start_filename|>invoice/src/main/java/com/xebialabs/invoice/config/audit/package-info.java<|end_filename|>
/**
* Audit specific code.
*/
package com.xebialabs.invoice.config.audit;
<|start_filename|>invoice/src/main/java/com/xebialabs/invoice/web/rest/package-info.java<|end_filename|>
/**
* Spring MVC REST controllers.
*/
package com.xebialabs.invoice.web.rest;
<|start_filename|>store/src/main/java/com/xebialabs/store/config/package-info.java<|end_filename|>
/**
* Spring Framework configuration files.
*/
package com.xebialabs.store.config;
<|start_filename|>notification/src/main/java/com/xebialabs/notification/security/package-info.java<|end_filename|>
/**
* Spring Security configuration.
*/
package com.xebialabs.notification.security;
<|start_filename|>xlw.bat<|end_filename|>
@echo off
@rem ##############################################################################
@rem ##
@rem ## XL Cli wrapper script for Windows
@rem ##
@rem ##############################################################################
if "%OS%"=="Windows_NT" setlocal
set PROG_DIR=%~dp0
FOR /F "tokens=1,2 delims==" %%a IN (%PROG_DIR%\.xebialabs\wrapper.conf) DO (set %%a=%%~b)
set XL_WRAPPER_HOME=%LOCALAPPDATA%\.xebialabs\wrapper\%CLI_VERSION%
IF not exist %XL_WRAPPER_HOME% (mkdir %XL_WRAPPER_HOME%)
set BINARY_NAME=xl.exe
set WRAPPER_BINARY=%XL_WRAPPER_HOME%\%BINARY_NAME%
set TEMP_BINARY_FILE=%WRAPPER_BINARY%.%RANDOM%
if not exist %WRAPPER_BINARY% (
echo Downloading XL binary v%CLI_VERSION%
powershell -Command "(New-Object Net.WebClient).DownloadFile('%CLI_BASE_URL%/%CLI_VERSION%/windows-amd64/%BINARY_NAME%', '%TEMP_BINARY_FILE%')" || goto :error
call ren "%TEMP_BINARY_FILE%" %BINARY_NAME% || goto :error
)
call %WRAPPER_BINARY% %* || goto :error
if "%OS%"=="Windows_NT" endlocal
:error
exit /b %errorlevel% | dllz/e-commerce-microservice |
<|start_filename|>network/src/main/java/com/turn/ttorrent/network/ConnectionManager.java<|end_filename|>
package com.turn.ttorrent.network;
import com.turn.ttorrent.common.LoggerUtils;
import com.turn.ttorrent.common.TimeService;
import com.turn.ttorrent.common.TorrentLoggerFactory;
import com.turn.ttorrent.network.keyProcessors.*;
import org.slf4j.Logger;
import java.io.IOException;
import java.nio.channels.Channel;
import java.nio.channels.SelectionKey;
import java.nio.channels.Selector;
import java.nio.channels.ServerSocketChannel;
import java.util.Arrays;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import static com.turn.ttorrent.Constants.DEFAULT_CLEANUP_RUN_TIMEOUT_MILLIS;
import static com.turn.ttorrent.Constants.DEFAULT_SELECTOR_SELECT_TIMEOUT_MILLIS;
public class ConnectionManager {
private static final Logger logger = TorrentLoggerFactory.getLogger(ConnectionManager.class);
private final Selector selector;
private final TimeService myTimeService;
private volatile ConnectionWorker myConnectionWorker;
private int myBindPort;
private final ConnectionManagerContext myContext;
private volatile ServerSocketChannel myServerSocketChannel;
private volatile Future<?> myWorkerFuture;
private final NewConnectionAllower myIncomingConnectionAllower;
private final NewConnectionAllower myOutgoingConnectionAllower;
private final TimeoutStorage socketTimeoutStorage = new TimeoutStorageImpl();
private final AtomicBoolean alreadyInit = new AtomicBoolean(false);
private final AtomicInteger mySendBufferSize;
private final AtomicInteger myReceiveBufferSize;
public ConnectionManager(ConnectionManagerContext context,
TimeService timeService,
NewConnectionAllower newIncomingConnectionAllower,
NewConnectionAllower newOutgoingConnectionAllower,
SelectorFactory selectorFactory,
AtomicInteger mySendBufferSize,
AtomicInteger myReceiveBufferSize) throws IOException {
this.mySendBufferSize = mySendBufferSize;
this.myReceiveBufferSize = myReceiveBufferSize;
this.selector = selectorFactory.newSelector();
this.myTimeService = timeService;
myContext = context;
this.myIncomingConnectionAllower = newIncomingConnectionAllower;
this.myOutgoingConnectionAllower = newOutgoingConnectionAllower;
}
public void initAndRunWorker(ServerChannelRegister serverChannelRegister) throws IOException {
boolean wasInit = alreadyInit.getAndSet(true);
if (wasInit) {
throw new IllegalStateException("connection manager was already initialized");
}
myServerSocketChannel = serverChannelRegister.channelFor(selector);
myServerSocketChannel.register(selector, SelectionKey.OP_ACCEPT, new AcceptAttachmentImpl(myContext));
myBindPort = myServerSocketChannel.socket().getLocalPort();
String serverName = myServerSocketChannel.socket().toString();
myConnectionWorker = new ConnectionWorker(selector, Arrays.asList(
new InvalidKeyProcessor(),
new AcceptableKeyProcessor(selector, serverName, myTimeService, myIncomingConnectionAllower, socketTimeoutStorage,
mySendBufferSize, myReceiveBufferSize),
new ConnectableKeyProcessor(selector, myTimeService, socketTimeoutStorage,
mySendBufferSize, myReceiveBufferSize),
new ReadableKeyProcessor(serverName),
new WritableKeyProcessor()), DEFAULT_SELECTOR_SELECT_TIMEOUT_MILLIS, DEFAULT_CLEANUP_RUN_TIMEOUT_MILLIS,
myTimeService,
new CleanupKeyProcessor(myTimeService),
myOutgoingConnectionAllower);
myWorkerFuture = myContext.getExecutor().submit(myConnectionWorker);
}
public void setSelectorSelectTimeout(int timeout) {
ConnectionWorker workerLocal = myConnectionWorker;
checkThatWorkerIsInit(workerLocal);
workerLocal.setSelectorSelectTimeout(timeout);
}
private void checkThatWorkerIsInit(ConnectionWorker worker) {
if (worker == null) throw new IllegalStateException("Connection manager is not initialized!");
}
public boolean offerConnect(ConnectTask connectTask, int timeout, TimeUnit timeUnit) {
if (myConnectionWorker == null) {
return false;
}
return myConnectionWorker.offerConnect(connectTask, timeout, timeUnit);
}
public boolean offerWrite(WriteTask writeTask, int timeout, TimeUnit timeUnit) {
if (myConnectionWorker == null) {
return false;
}
return myConnectionWorker.offerWrite(writeTask, timeout, timeUnit);
}
public int getBindPort() {
return myBindPort;
}
public void close(int timeout, TimeUnit timeUnit) {
logger.debug("try close connection manager...");
boolean successfullyClosed = true;
if (myConnectionWorker != null) {
myWorkerFuture.cancel(true);
try {
boolean shutdownCorrectly = myConnectionWorker.stop(timeout, timeUnit);
if (!shutdownCorrectly) {
successfullyClosed = false;
logger.warn("unable to terminate worker in {} {}", timeout, timeUnit);
}
} catch (InterruptedException e) {
successfullyClosed = false;
LoggerUtils.warnAndDebugDetails(logger, "unable to await termination worker, thread was interrupted", e);
}
}
try {
this.myServerSocketChannel.close();
} catch (Throwable e) {
LoggerUtils.errorAndDebugDetails(logger, "unable to close server socket channel", e);
successfullyClosed = false;
}
for (SelectionKey key : this.selector.keys()) {
try {
if (key.isValid()) {
key.channel().close();
}
} catch (Throwable e) {
logger.error("unable to close socket channel {}", key.channel());
successfullyClosed = false;
logger.debug("", e);
}
}
try {
this.selector.close();
} catch (Throwable e) {
LoggerUtils.errorAndDebugDetails(logger, "unable to close selector channel", e);
successfullyClosed = false;
}
if (successfullyClosed) {
logger.debug("connection manager is successfully closed");
} else {
logger.error("connection manager wasn't closed successfully");
}
}
public void close() {
close(1, TimeUnit.MINUTES);
}
public void setCleanupTimeout(long timeoutMillis) {
ConnectionWorker workerLocal = myConnectionWorker;
checkThatWorkerIsInit(workerLocal);
workerLocal.setCleanupTimeout(timeoutMillis);
}
public void setSocketConnectionTimeout(long timeoutMillis) {
socketTimeoutStorage.setTimeout(timeoutMillis);
}
public void closeChannel(Channel channel) throws IOException {
channel.close();
}
}
<|start_filename|>common/src/test/java/com/turn/ttorrent/common/TorrentUtilsTest.java<|end_filename|>
/*
Copyright (C) 2016 <NAME>
<p>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
<p>
http://www.apache.org/licenses/LICENSE-2.0
<p>
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.turn.ttorrent.common;
import org.testng.annotations.Test;
import static org.testng.Assert.assertEquals;
public class TorrentUtilsTest {
@Test(expectedExceptions = NullPointerException.class)
public void testBytesToHexWithNull() {
//noinspection ResultOfMethodCallIgnored,ConstantConditions
TorrentUtils.byteArrayToHexString(null);
}
@Test
public void testBytesToHexWithEmptyByteArray() {
assertEquals("", TorrentUtils.byteArrayToHexString(new byte[0]));
}
@Test
public void testBytesToHexWithSingleByte() {
assertEquals("BC", TorrentUtils.byteArrayToHexString(new byte[]{
(byte) 0xBC
}));
}
@Test
public void testBytesToHexWithZeroByte() {
assertEquals("00", TorrentUtils.byteArrayToHexString(new byte[1]));
}
@Test
public void testBytesToHexWithLeadingZero() {
assertEquals("0053FF", TorrentUtils.byteArrayToHexString(new byte[]{
(byte) 0x00, (byte) 0x53, (byte) 0xFF
}));
}
@Test
public void testBytesToHexTrailingZero() {
assertEquals("AA004500", TorrentUtils.byteArrayToHexString(new byte[]{
(byte) 0xAA, (byte) 0x00, (byte) 0x45, (byte) 0x00
}));
}
@Test
public void testBytesToHexAllSymbols() {
assertEquals("0123456789ABCDEF", TorrentUtils.byteArrayToHexString(new byte[]{
(byte) 0x01, (byte) 0x23, (byte) 0x45, (byte) 0x67,
(byte) 0x89, (byte) 0xAB, (byte) 0xCD, (byte) 0xEF
}));
}
}
<|start_filename|>ttorrent-client/src/main/java/com/turn/ttorrent/client/strategy/EndGameStrategyImpl.java<|end_filename|>
/*
* Copyright 2000-2018 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.turn.ttorrent.client.strategy;
import com.turn.ttorrent.client.Piece;
import com.turn.ttorrent.client.peer.SharingPeer;
import java.util.*;
public class EndGameStrategyImpl implements EndGameStrategy {
private static final Random RANDOM = new Random();
private final int peersPerPiece;
public EndGameStrategyImpl(int peersPerPiece) {
this.peersPerPiece = peersPerPiece;
}
@Override
public RequestsCollection collectRequests(Piece[] allPieces, List<SharingPeer> connectedPeers) {
List<SharingPeer> sorted = new ArrayList<SharingPeer>(connectedPeers);
Map<Piece, List<SharingPeer>> selectedPieces = new HashMap<Piece, List<SharingPeer>>();
Collections.sort(sorted, new Comparator<SharingPeer>() {
@Override
public int compare(SharingPeer o1, SharingPeer o2) {
return Integer.valueOf(o1.getDownloadedPiecesCount()).compareTo(o2.getDownloadedPiecesCount());
}
});
for (Piece piece : allPieces) {
if (piece.isValid()) continue;
//if we don't have piece, then request this piece from two random peers
//(peers are selected by peer rank, peer with better rank will be selected more often then peer with bad rank
List<SharingPeer> selectedPeers = selectGoodPeers(piece, peersPerPiece, sorted);
selectedPieces.put(piece, selectedPeers);
}
return new RequestsCollectionImpl(selectedPieces);
}
private List<SharingPeer> selectGoodPeers(Piece piece, int count, List<SharingPeer> sortedPeers) {
List<SharingPeer> notSelected = new ArrayList<SharingPeer>(sortedPeers);
Iterator<SharingPeer> iterator = notSelected.iterator();
while (iterator.hasNext()) {
SharingPeer peer = iterator.next();
boolean peerHasCurrentPiece = peer.getAvailablePieces().get(piece.getIndex());
boolean alreadyRequested = peer.getRequestedPieces().contains(piece);
if (!peerHasCurrentPiece || alreadyRequested) iterator.remove();
}
if (notSelected.size() <= count) return notSelected;
List<SharingPeer> selected = new ArrayList<SharingPeer>();
for (int i = 0; i < count; i++) {
SharingPeer sharingPeer = selectPeer(notSelected);
if (sharingPeer == null) continue;
notSelected.remove(sharingPeer);
selected.add(sharingPeer);
}
return selected;
}
private SharingPeer selectPeer(List<SharingPeer> notSelected) {
for (SharingPeer sharingPeer : notSelected) {
if (RANDOM.nextDouble() < 0.8) {
return sharingPeer;
}
}
return notSelected.get(RANDOM.nextInt(notSelected.size()));
}
}
<|start_filename|>ttorrent-client/src/main/java/com/turn/ttorrent/client/Handshake.java<|end_filename|>
/**
* Copyright (C) 2011-2012 Turn, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.turn.ttorrent.client;
import com.turn.ttorrent.Constants;
import com.turn.ttorrent.common.TorrentHash;
import com.turn.ttorrent.common.TorrentUtils;
import java.io.UnsupportedEncodingException;
import java.nio.ByteBuffer;
import java.text.ParseException;
/**
* Peer handshake handler.
*
* @author mpetazzoni
*/
public class Handshake implements TorrentHash {
public static final String BITTORRENT_PROTOCOL_IDENTIFIER = "BitTorrent protocol";
public static final int BASE_HANDSHAKE_LENGTH = 49;
private ByteBuffer data;
private ByteBuffer infoHash;
private ByteBuffer peerId;
private String torrentIdentifier;
private int myPstrlen;
private Handshake(ByteBuffer data, ByteBuffer infoHash,
ByteBuffer peerId) {
this.data = data;
this.data.rewind();
this.infoHash = infoHash;
this.peerId = peerId;
}
public ByteBuffer getData() {
return this.data;
}
public byte[] getInfoHash() {
return this.infoHash.array();
}
public String getHexInfoHash() {
return TorrentUtils.byteArrayToHexString(getInfoHash());
}
public byte[] getPeerId() {
return this.peerId.array();
}
public static Handshake parse(ByteBuffer buffer)
throws ParseException, UnsupportedEncodingException {
int pstrlen = Byte.valueOf(buffer.get()).intValue();
if (pstrlen < 0 ||
buffer.remaining() != BASE_HANDSHAKE_LENGTH + pstrlen - 1) {
throw new ParseException("Incorrect handshake message length " +
"(pstrlen=" + pstrlen + ") !", 0);
}
// Check the protocol identification string
byte[] pstr = new byte[pstrlen];
buffer.get(pstr);
if (!Handshake.BITTORRENT_PROTOCOL_IDENTIFIER.equals(
new String(pstr, Constants.BYTE_ENCODING))) {
throw new ParseException("Invalid protocol identifier!", 1);
}
// Ignore reserved bytes
byte[] reserved = new byte[8];
buffer.get(reserved);
byte[] infoHash = new byte[20];
buffer.get(infoHash);
byte[] peerId = new byte[20];
buffer.get(peerId);
return new Handshake(buffer, ByteBuffer.wrap(infoHash),
ByteBuffer.wrap(peerId));
}
public static Handshake parse(ByteBuffer buffer, String torrentIdentifier) throws UnsupportedEncodingException, ParseException {
Handshake hs = Handshake.parse(buffer);
hs.setTorrentIdentifier(torrentIdentifier);
return hs;
}
public static Handshake parse(ByteBuffer buffer, int pstrlen) throws UnsupportedEncodingException, ParseException {
Handshake hs = Handshake.parse(buffer);
hs.myPstrlen = pstrlen;
return hs;
}
public static Handshake craft(byte[] torrentInfoHash, byte[] clientPeerId) {
try {
ByteBuffer buffer = ByteBuffer.allocate(
Handshake.BASE_HANDSHAKE_LENGTH +
Handshake.BITTORRENT_PROTOCOL_IDENTIFIER.length());
byte[] reserved = new byte[8];
ByteBuffer infoHash = ByteBuffer.wrap(torrentInfoHash);
ByteBuffer peerId = ByteBuffer.wrap(clientPeerId);
buffer.put((byte) Handshake
.BITTORRENT_PROTOCOL_IDENTIFIER.length());
buffer.put(Handshake
.BITTORRENT_PROTOCOL_IDENTIFIER.getBytes(Constants.BYTE_ENCODING));
buffer.put(reserved);
buffer.put(infoHash);
buffer.put(peerId);
return new Handshake(buffer, infoHash, peerId);
} catch (UnsupportedEncodingException uee) {
return null;
}
}
public static Handshake parse(byte[] torrentInfoHash, byte[] clientPeerId, String torrentIdentifier) throws UnsupportedEncodingException, ParseException {
Handshake hs = Handshake.craft(torrentInfoHash, clientPeerId);
hs.setTorrentIdentifier(torrentIdentifier);
return hs;
}
public void setTorrentIdentifier(String torrentIdentifier) {
this.torrentIdentifier = torrentIdentifier;
}
public int getPstrlen() {
return myPstrlen;
}
public String getTorrentIdentifier() {
return torrentIdentifier;
}
}
<|start_filename|>common/src/main/java/com/turn/ttorrent/common/TorrentParser.java<|end_filename|>
package com.turn.ttorrent.common;
import com.turn.ttorrent.Constants;
import com.turn.ttorrent.bcodec.BDecoder;
import com.turn.ttorrent.bcodec.BEValue;
import com.turn.ttorrent.bcodec.BEncoder;
import com.turn.ttorrent.bcodec.InvalidBEncodingException;
import org.apache.commons.io.FileUtils;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.IOException;
import java.util.*;
import static com.turn.ttorrent.common.TorrentMetadataKeys.*;
public class TorrentParser {
public TorrentMetadata parseFromFile(File torrentFile) throws IOException {
byte[] fileContent = FileUtils.readFileToByteArray(torrentFile);
return parse(fileContent);
}
/**
* @param metadata binary .torrent content
* @return parsed metadata object. This parser also wraps single torrent as multi torrent with one file
* @throws InvalidBEncodingException if metadata has incorrect BEP format or missing required fields
* @throws RuntimeException It's wrapped io exception from bep decoder.
* This exception doesn't must to throw io exception because reading from
* byte array input stream cannot throw the exception
*/
public TorrentMetadata parse(byte[] metadata) throws InvalidBEncodingException, RuntimeException {
final Map<String, BEValue> dictionaryMetadata;
try {
dictionaryMetadata = BDecoder.bdecode(new ByteArrayInputStream(metadata)).getMap();
} catch (InvalidBEncodingException e) {
throw e;
} catch (IOException e) {
throw new RuntimeException(e);
}
final Map<String, BEValue> infoTable = getRequiredValueOrThrowException(dictionaryMetadata, INFO_TABLE).getMap();
final BEValue creationDateValue = dictionaryMetadata.get(CREATION_DATE_SEC);
final long creationDate = creationDateValue == null ? -1 : creationDateValue.getLong();
final String comment = getStringOrNull(dictionaryMetadata, COMMENT);
final String createdBy = getStringOrNull(dictionaryMetadata, CREATED_BY);
final String announceUrl = getStringOrNull(dictionaryMetadata, ANNOUNCE);
final List<List<String>> trackers = getTrackers(dictionaryMetadata);
final int pieceLength = getRequiredValueOrThrowException(infoTable, PIECE_LENGTH).getInt();
final byte[] piecesHashes = getRequiredValueOrThrowException(infoTable, PIECES).getBytes();
final boolean torrentContainsManyFiles = infoTable.get(FILES) != null;
final String dirName = getRequiredValueOrThrowException(infoTable, NAME).getString();
final List<TorrentFile> files = parseFiles(infoTable, torrentContainsManyFiles, dirName);
if (piecesHashes.length % Constants.PIECE_HASH_SIZE != 0)
throw new InvalidBEncodingException("Incorrect size of pieces hashes");
final int piecesCount = piecesHashes.length / Constants.PIECE_HASH_SIZE;
byte[] infoTableBytes;
try {
infoTableBytes = BEncoder.bencode(infoTable).array();
} catch (IOException e) {
throw new RuntimeException(e);
}
return new TorrentMetadataImpl(
TorrentUtils.calculateSha1Hash(infoTableBytes),
trackers,
announceUrl,
creationDate,
comment,
createdBy,
dirName,
files,
piecesCount,
pieceLength,
piecesHashes
);
}
private List<TorrentFile> parseFiles(Map<String, BEValue> infoTable, boolean torrentContainsManyFiles, String name) throws InvalidBEncodingException {
if (!torrentContainsManyFiles) {
final BEValue md5Sum = infoTable.get(MD5_SUM);
return Collections.singletonList(new TorrentFile(
Collections.singletonList(name),
getRequiredValueOrThrowException(infoTable, FILE_LENGTH).getLong(),
md5Sum == null ? null : md5Sum.getString()
));
}
List<TorrentFile> result = new ArrayList<TorrentFile>();
for (BEValue file : infoTable.get(FILES).getList()) {
Map<String, BEValue> fileInfo = file.getMap();
List<String> path = new ArrayList<String>();
BEValue filePathList = fileInfo.get(FILE_PATH_UTF8);
if (filePathList == null) {
filePathList = fileInfo.get(FILE_PATH);
}
for (BEValue pathElement : filePathList.getList()) {
path.add(pathElement.getString());
}
final BEValue md5Sum = infoTable.get(MD5_SUM);
result.add(new TorrentFile(
path,
fileInfo.get(FILE_LENGTH).getLong(),
md5Sum == null ? null : md5Sum.getString()));
}
return result;
}
@Nullable
private String getStringOrNull(Map<String, BEValue> dictionaryMetadata, String key) throws InvalidBEncodingException {
final BEValue value = dictionaryMetadata.get(key);
if (value == null) return null;
return value.getString();
}
@Nullable
private List<List<String>> getTrackers(Map<String, BEValue> dictionaryMetadata) throws InvalidBEncodingException {
final BEValue announceListValue = dictionaryMetadata.get(ANNOUNCE_LIST);
if (announceListValue == null) return null;
List<BEValue> announceList = announceListValue.getList();
List<List<String>> result = new ArrayList<List<String>>();
Set<String> allTrackers = new HashSet<String>();
for (BEValue tv : announceList) {
List<BEValue> trackers = tv.getList();
if (trackers.isEmpty()) {
continue;
}
List<String> tier = new ArrayList<String>();
for (BEValue tracker : trackers) {
final String url = tracker.getString();
if (!allTrackers.contains(url)) {
tier.add(url);
allTrackers.add(url);
}
}
if (!tier.isEmpty()) {
result.add(tier);
}
}
return result;
}
@NotNull
private BEValue getRequiredValueOrThrowException(Map<String, BEValue> map, String key) throws InvalidBEncodingException {
final BEValue value = map.get(key);
if (value == null)
throw new InvalidBEncodingException("Invalid metadata format. Map doesn't contain required field " + key);
return value;
}
}
<|start_filename|>test-api/src/main/java/com/turn/ttorrent/MockTimeService.java<|end_filename|>
package com.turn.ttorrent;
import com.turn.ttorrent.common.TimeService;
public class MockTimeService implements TimeService {
private volatile long time = 0;
@Override
public long now() {
return time;
}
public void setTime(long time) {
this.time = time;
}
}
<|start_filename|>ttorrent-client/src/main/java/com/turn/ttorrent/client/FileMetadataProvider.java<|end_filename|>
package com.turn.ttorrent.client;
import com.turn.ttorrent.common.TorrentMetadata;
import com.turn.ttorrent.common.TorrentParser;
import org.jetbrains.annotations.NotNull;
import java.io.File;
import java.io.IOException;
public class FileMetadataProvider implements TorrentMetadataProvider {
private final String filePath;
public FileMetadataProvider(String filePath) {
this.filePath = filePath;
}
@NotNull
@Override
public TorrentMetadata getTorrentMetadata() throws IOException {
File file = new File(filePath);
return new TorrentParser().parseFromFile(file);
}
}
<|start_filename|>network/src/main/java/com/turn/ttorrent/network/AcceptAttachmentImpl.java<|end_filename|>
package com.turn.ttorrent.network;
import java.io.IOException;
import java.nio.channels.SocketChannel;
public class AcceptAttachmentImpl implements AcceptAttachment, TimeoutAttachment {
private final ChannelListenerFactory myChannelListenerFactory;
public AcceptAttachmentImpl(ChannelListenerFactory channelListenerFactory) {
this.myChannelListenerFactory = channelListenerFactory;
}
@Override
public ChannelListenerFactory getChannelListenerFactory() {
return myChannelListenerFactory;
}
@Override
public boolean isTimeoutElapsed(long currentTimeMillis) {
return false;//accept attachment doesn't closed by timeout
}
@Override
public void communicatedNow(long currentTimeMillis) {
}
@Override
public void onTimeoutElapsed(SocketChannel channel) throws IOException {
}
}
<|start_filename|>network/src/main/java/com/turn/ttorrent/network/TimeoutAttachment.java<|end_filename|>
package com.turn.ttorrent.network;
import java.io.IOException;
import java.nio.channels.SocketChannel;
public interface TimeoutAttachment {
/**
* @param currentTimeMillis current time for timeout calculation
* @return true, if and only if timeout was elapsed
*/
boolean isTimeoutElapsed(long currentTimeMillis);
/**
* set last communication time to current time
*
* @param currentTimeMillis current time in milliseconds
*/
void communicatedNow(long currentTimeMillis);
/**
* must be invoked if timeout was elapsed
*
* @param channel specified channel for key associated with this attachment
* @throws IOException if an I/O error occurs
*/
void onTimeoutElapsed(SocketChannel channel) throws IOException;
}
<|start_filename|>ttorrent-client/src/main/java/com/turn/ttorrent/client/network/CountLimitConnectionAllower.java<|end_filename|>
package com.turn.ttorrent.client.network;
import com.turn.ttorrent.client.PeersStorage;
import com.turn.ttorrent.network.NewConnectionAllower;
import java.util.concurrent.atomic.AtomicInteger;
import static com.turn.ttorrent.Constants.DEFAULT_MAX_CONNECTION_COUNT;
/**
* this implementation allows fixed count of open connection simultaneously
*/
public class CountLimitConnectionAllower implements NewConnectionAllower {
private final PeersStorage myPeersStorage;
private final AtomicInteger myMaxConnectionCount = new AtomicInteger();
public CountLimitConnectionAllower(PeersStorage peersStorage) {
this.myPeersStorage = peersStorage;
myMaxConnectionCount.set(DEFAULT_MAX_CONNECTION_COUNT);
}
public void setMyMaxConnectionCount(int newMaxCount) {
myMaxConnectionCount.set(newMaxCount);
}
@Override
public boolean isNewConnectionAllowed() {
return myPeersStorage.getSharingPeers().size() < myMaxConnectionCount.get();
}
}
<|start_filename|>common/src/main/java/com/turn/ttorrent/common/AnnounceableInformation.java<|end_filename|>
package com.turn.ttorrent.common;
import java.util.List;
public interface AnnounceableInformation extends TorrentHash {
/**
* @return number of bytes uploaded by the client for this torrent
*/
long getUploaded();
/**
* @return number of bytes downloaded by the client for this torrent
*/
long getDownloaded();
/**
* @return number of bytes left to download by the client for this torrent
*/
long getLeft();
/**
* @return all tracker for announce
* @see <a href="http://bittorrent.org/beps/bep_0012.html"></a>
*/
List<List<String>> getAnnounceList();
/**
* @return main announce url for tracker
*/
String getAnnounce();
}
<|start_filename|>common/src/main/java/com/turn/ttorrent/common/protocol/TrackerMessage.java<|end_filename|>
/**
* Copyright (C) 2012 Turn, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.turn.ttorrent.common.protocol;
import com.turn.ttorrent.common.Peer;
import java.nio.ByteBuffer;
import java.util.List;
/**
* BitTorrent tracker protocol messages representations.
*
* <p>
* This class and its <em>*TrackerMessage</em> subclasses provide POJO
* representations of the tracker protocol messages, for at least HTTP and UDP
* trackers' protocols, along with easy parsing from an input ByteBuffer to
* quickly get a usable representation of an incoming message.
* </p>
*
* @author mpetazzoni
*/
public abstract class TrackerMessage {
/**
* Message type.
*/
public enum Type {
UNKNOWN(-1),
CONNECT_REQUEST(0),
CONNECT_RESPONSE(0),
ANNOUNCE_REQUEST(1),
ANNOUNCE_RESPONSE(1),
SCRAPE_REQUEST(2),
SCRAPE_RESPONSE(2),
ERROR(3);
private final int id;
Type(int id) {
this.id = id;
}
public int getId() {
return this.id;
}
}
private final Type type;
private final ByteBuffer data;
/**
* Constructor for the base tracker message type.
*
* @param type The message type.
* @param data A byte buffer containing the binary data of the message (a
* B-encoded map, a UDP packet data, etc.).
*/
protected TrackerMessage(Type type, ByteBuffer data) {
this.type = type;
this.data = data;
if (this.data != null) {
this.data.rewind();
}
}
/**
* Returns the type of this tracker message.
*/
public Type getType() {
return this.type;
}
/**
* Returns the encoded binary data for this message.
*/
public ByteBuffer getData() {
return this.data;
}
/**
* Generic exception for message format and message validation exceptions.
*/
public static class MessageValidationException extends Exception {
static final long serialVersionUID = -1;
public MessageValidationException(String s) {
super(s);
}
public MessageValidationException(String s, Throwable cause) {
super(s, cause);
}
}
/**
* Base interface for connection request messages.
*
* <p>
* This interface must be implemented by all subtypes of connection request
* messages for the various tracker protocols.
* </p>
*
* @author mpetazzoni
*/
public interface ConnectionRequestMessage {
}
/**
* Base interface for connection response messages.
*
* <p>
* This interface must be implemented by all subtypes of connection
* response messages for the various tracker protocols.
* </p>
*
* @author mpetazzoni
*/
public interface ConnectionResponseMessage {
}
/**
* Base interface for tracker error messages.
*
* <p>
* This interface must be implemented by all subtypes of tracker error
* messages for the various tracker protocols.
* </p>
*
* @author mpetazzoni
*/
public interface ErrorMessage {
/**
* The various tracker error states.
*
* <p>
* These errors are reported by the tracker to a client when expected
* parameters or conditions are not present while processing an
* announce request from a BitTorrent client.
* </p>
*/
enum FailureReason {
UNKNOWN_TORRENT("The requested torrent does not exist on this tracker"),
MISSING_HASH("Missing info hash"),
MISSING_PEER_ID("Missing peer ID"),
MISSING_PORT("Missing port"),
INVALID_EVENT("Unexpected event for peer state"),
NOT_IMPLEMENTED("Feature not implemented");
private String message;
FailureReason(String message) {
this.message = message;
}
public String getMessage() {
return this.message;
}
}
String getReason();
}
}
<|start_filename|>common/src/main/java/com/turn/ttorrent/common/TorrentMetadata.java<|end_filename|>
package com.turn.ttorrent.common;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.List;
/**
* Provided access to all stored info in .torrent file
*
* @see <a href="https://wiki.theory.org/index.php/BitTorrentSpecification#Metainfo_File_Structure"></a>
*/
public interface TorrentMetadata extends TorrentHash {
/**
* @return all tracker for announce
* @see <a href="http://bittorrent.org/beps/bep_0012.html"></a>
*/
@Nullable
List<List<String>> getAnnounceList();
/**
* @return main announce url for tracker or <code>null</code> if main announce is not specified
*/
@Nullable
String getAnnounce();
/**
* @return creation date of the torrent in unix format
*/
Optional<Long> getCreationDate();
/**
* @return free-form text comment of the author
*/
Optional<String> getComment();
/**
* @return name and version of the program used to create .torrent
*/
Optional<String> getCreatedBy();
/**
* @return number of bytes in each piece
*/
int getPieceLength();
/**
* @return concatenation of all 20-byte SHA1 hash values, one per piece.
* So the length of this array must be a multiple of 20
*/
byte[] getPiecesHashes();
/**
* @return true if it's private torrent. In this case client must get peers only from tracker and
* must initiate connections to peers returned from the tracker.
* @see <a href="http://bittorrent.org/beps/bep_0027.html"></a>
*/
boolean isPrivate();
/**
* @return count of pieces in torrent
*/
int getPiecesCount();
/**
* @return The filename of the directory in which to store all the files
*/
String getDirectoryName();
/**
* @return list of files, stored in this torrent
*/
List<TorrentFile> getFiles();
}
<|start_filename|>network/src/main/java/com/turn/ttorrent/network/NewConnectionAllower.java<|end_filename|>
package com.turn.ttorrent.network;
public interface NewConnectionAllower {
/**
* @return true if we can accept new connection or can connect to other peer
*/
boolean isNewConnectionAllowed();
}
<|start_filename|>ttorrent-client/src/main/java/com/turn/ttorrent/client/AnnounceableInformationImpl.java<|end_filename|>
package com.turn.ttorrent.client;
import com.turn.ttorrent.common.AnnounceableInformation;
import com.turn.ttorrent.common.TorrentHash;
import java.util.List;
class AnnounceableInformationImpl implements AnnounceableInformation {
private final long uploaded;
private final long downloaded;
private final long left;
private final TorrentHash torrentHash;
private final List<List<String>> announceUrls;
private final String announce;
public AnnounceableInformationImpl(long uploaded,
long downloaded,
long left,
TorrentHash torrentHash,
List<List<String>> announceUrls,
String announce) {
this.uploaded = uploaded;
this.downloaded = downloaded;
this.left = left;
this.torrentHash = torrentHash;
this.announceUrls = announceUrls;
this.announce = announce;
}
@Override
public long getUploaded() {
return uploaded;
}
@Override
public long getDownloaded() {
return downloaded;
}
@Override
public long getLeft() {
return left;
}
@Override
public List<List<String>> getAnnounceList() {
return announceUrls;
}
@Override
public String getAnnounce() {
return announce;
}
@Override
public byte[] getInfoHash() {
return torrentHash.getInfoHash();
}
@Override
public String getHexInfoHash() {
return torrentHash.getHexInfoHash();
}
@Override
public String toString() {
return "announceable torrent " + torrentHash.getHexInfoHash() + " for trackers " + announceUrls;
}
}
<|start_filename|>network/src/main/java/com/turn/ttorrent/network/ServerChannelRegister.java<|end_filename|>
package com.turn.ttorrent.network;
import org.jetbrains.annotations.NotNull;
import java.io.IOException;
import java.nio.channels.Selector;
import java.nio.channels.ServerSocketChannel;
public interface ServerChannelRegister {
/**
* Create new channel and bind to specified selector
*
* @param selector specified selector
* @return new created server channel
*/
@NotNull
ServerSocketChannel channelFor(Selector selector) throws IOException;
}
<|start_filename|>ttorrent-client/src/main/java/com/turn/ttorrent/client/SharingPeerFactory.java<|end_filename|>
package com.turn.ttorrent.client;
import com.turn.ttorrent.client.peer.SharingPeer;
import java.nio.ByteBuffer;
import java.nio.channels.ByteChannel;
public interface SharingPeerFactory {
SharingPeer createSharingPeer(String host,
int port,
ByteBuffer peerId,
SharedTorrent torrent,
ByteChannel channel,
String clientIdentifier,
int clientVersion);
}
<|start_filename|>network/src/main/java/com/turn/ttorrent/network/TimeoutStorageImpl.java<|end_filename|>
package com.turn.ttorrent.network;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
public class TimeoutStorageImpl implements TimeoutStorage {
private final AtomicLong timeoutMillis = new AtomicLong();
@Override
public void setTimeout(long millis) {
timeoutMillis.set(millis);
}
@Override
public void setTimeout(int timeout, TimeUnit timeUnit) {
setTimeout(timeUnit.toMillis(timeout));
}
@Override
public long getTimeoutMillis() {
return timeoutMillis.get();
}
}
<|start_filename|>network/src/main/java/com/turn/ttorrent/network/ConnectionListener.java<|end_filename|>
package com.turn.ttorrent.network;
import java.io.IOException;
import java.nio.channels.SocketChannel;
public interface ConnectionListener {
/**
* invoked when specified socket channel contains any data
*
* @param socketChannel specified socket channel with data
* @throws IOException if an I/O error occurs
*/
void onNewDataAvailable(SocketChannel socketChannel) throws IOException;
/**
* invoked when get new connection
*
* @param socketChannel specified socket channel
* @throws IOException if an I/O error occurs
*/
void onConnectionEstablished(SocketChannel socketChannel) throws IOException;
/**
* invoked when an error occurs
*
* @param socketChannel specified channel, associated with this channel
* @param ex specified exception
* @throws IOException if an I/O error occurs
*/
void onError(SocketChannel socketChannel, Throwable ex) throws IOException;
}
<|start_filename|>ttorrent-client/src/main/java/com/turn/ttorrent/client/TorrentManagerImpl.java<|end_filename|>
package com.turn.ttorrent.client;
import com.turn.ttorrent.common.TorrentHash;
import java.util.List;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
class TorrentManagerImpl implements TorrentManager {
private final EventDispatcher eventDispatcher;
private final TorrentHash hash;
TorrentManagerImpl(EventDispatcher eventDispatcher, TorrentHash hash) {
this.eventDispatcher = eventDispatcher;
this.hash = hash;
}
@Override
public void addListener(TorrentListener listener) {
eventDispatcher.addListener(listener);
}
@Override
public boolean removeListener(TorrentListener listener) {
return eventDispatcher.removeListener(listener);
}
@Override
public byte[] getInfoHash() {
return hash.getInfoHash();
}
@Override
public String getHexInfoHash() {
return hash.getHexInfoHash();
}
@Override
public void awaitDownloadComplete(int timeout, TimeUnit timeUnit) throws InterruptedException, TimeoutException {
final Semaphore semaphore = new Semaphore(0);
TorrentListenerWrapper listener = new TorrentListenerWrapper() {
@Override
public void downloadComplete() {
semaphore.release();
}
};
try {
addListener(listener);
if (!semaphore.tryAcquire(timeout, timeUnit)) {
throw new TimeoutException("Unable to download torrent in specified timeout");
}
} finally {
removeListener(listener);
}
}
}
<|start_filename|>ttorrent-client/src/main/java/com/turn/ttorrent/client/peer/Rate.java<|end_filename|>
/**
* Copyright (C) 2011-2012 Turn, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.turn.ttorrent.client.peer;
import java.io.Serializable;
import java.util.Comparator;
/**
* A data exchange rate representation.
*
* <p>
* This is a utility class to keep track, and compare, of the data exchange
* rate (either download or upload) with a peer.
* </p>
*
* @author mpetazzoni
*/
public class Rate implements Comparable<Rate> {
public static final Comparator<Rate> RATE_COMPARATOR =
new RateComparator();
private long bytes = 0;
private long reset = 0;
private long last = 0;
/**
* Add a byte count to the current measurement.
*
* @param count The number of bytes exchanged since the last reset.
*/
public synchronized void add(long count) {
this.bytes += count;
if (this.reset == 0) {
this.reset = System.currentTimeMillis();
}
this.last = System.currentTimeMillis();
}
/**
* Get the current rate.
*
* <p>
* The exchange rate is the number of bytes exchanged since the last
* reset and the last input.
* </p>
*/
public synchronized float get() {
if (this.last - this.reset == 0) {
return 0;
}
return this.bytes / ((this.last - this.reset) / 1000.0f);
}
/**
* Reset the measurement.
*/
public synchronized void reset() {
this.bytes = 0;
this.reset = System.currentTimeMillis();
this.last = this.reset;
}
@Override
public int compareTo(Rate other) {
return RATE_COMPARATOR.compare(this, other);
}
/**
* A rate comparator.
*
* <p>
* This class provides a comparator to sort peers by an exchange rate,
* comparing two rates and returning an ascending ordering.
* </p>
*
* <p>
* <b>Note:</b> we need to make sure here that we don't return 0, which
* would provide an ordering that is inconsistent with
* <code>equals()</code>'s behavior, and result in unpredictable behavior
* for sorted collections using this comparator.
* </p>
*
* @author mpetazzoni
*/
private static class RateComparator
implements Comparator<Rate>, Serializable {
private static final long serialVersionUID = 72460233003600L;
/**
* Compare two rates together.
*
* <p>
* This method compares float, but we don't care too much about
* rounding errors. It's just to order peers so super-strict rate based
* order is not required.
* </p>
*
* @param a
* @param b
*/
@Override
public int compare(Rate a, Rate b) {
if (a.get() > b.get()) {
return 1;
}
return -1;
}
}
}
<|start_filename|>tests/src/test/java/com/turn/ttorrent/common/TorrentTest.java<|end_filename|>
package com.turn.ttorrent.common;
import org.apache.commons.codec.binary.Hex;
import org.apache.commons.io.FileUtils;
import org.testng.annotations.Test;
import java.io.File;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.List;
import static org.testng.Assert.*;
@Test
public class TorrentTest {
public void test_create_torrent() throws URISyntaxException, IOException, InterruptedException {
URI announceURI = new URI("http://localhost:6969/announce");
String createdBy = "Test";
TorrentMetadata t = TorrentCreator.create(new File("src/test/resources/parentFiles/file1.jar"), announceURI, createdBy);
assertEquals(createdBy, t.getCreatedBy().get());
assertEquals(announceURI.toString(), t.getAnnounce());
}
public void load_torrent_created_by_utorrent() throws IOException {
TorrentMetadata t = new TorrentParser().parseFromFile(new File("src/test/resources/torrents/file1.jar.torrent"));
assertEquals("http://localhost:6969/announce", t.getAnnounce());
assertEquals("B92D38046C76D73948E14C42DF992CAF25489D08", t.getHexInfoHash());
assertEquals("uTorrent/3130", t.getCreatedBy().get());
}
public void torrent_from_multiple_files() throws URISyntaxException, InterruptedException, IOException {
URI announceURI = new URI("http://localhost:6969/announce");
String createdBy = "Test2";
final File parentDir = new File("src/test/resources/parentFiles/parentDir");
final long creationTimeSecs = 1376051000;
final String[] fileNames = new String[]
{"AccuRevCommon.jar",
"commons-io-cio2.5_3.jar",
"commons-io-cio2.5_3.jar.link",
"inDir/application.wadl",
"storage.version"};
final List<File> files = new ArrayList<File>();
for (String fileName : fileNames) {
files.add(new File(parentDir, fileName));
}
TorrentMetadata createdTorrent = TorrentCreator.create(parentDir, files, announceURI, null, createdBy, creationTimeSecs, TorrentCreator.DEFAULT_PIECE_LENGTH);
File torrentFileWin = new File("src/test/resources/torrents/parentDir.win.torrent");
File torrentFileLinux = new File("src/test/resources/torrents/parentDir.linux.torrent");
final byte[] expectedBytesWin = FileUtils.readFileToByteArray(torrentFileWin);
final byte[] expectedBytesLinux = FileUtils.readFileToByteArray(torrentFileLinux);
final byte[] actualBytes = new TorrentSerializer().serialize(createdTorrent);
assertTrue(Hex.encodeHexString(expectedBytesWin).equals(Hex.encodeHexString(actualBytes)) || Hex.encodeHexString(expectedBytesLinux).equals(Hex.encodeHexString(actualBytes)));
}
public void testFilenames() throws IOException {
File torrentFile = new File("src/test/resources/torrents/parentDir.win.torrent");
TorrentMetadata t2 = new TorrentParser().parseFromFile(torrentFile);
final List<TorrentFile> tmpFileNames = t2.getFiles();
final List<String> normalizedFilenames = new ArrayList<String>(tmpFileNames.size());
for (TorrentFile torrentFileInfo : tmpFileNames) {
normalizedFilenames.add(t2.getDirectoryName() + "/" + torrentFileInfo.getRelativePathAsString().replaceAll("\\\\", "/"));
}
String[] expectedFilenames = new String[]
{"parentDir/AccuRevCommon.jar",
"parentDir/commons-io-cio2.5_3.jar",
"parentDir/commons-io-cio2.5_3.jar.link",
"parentDir/inDir/application.wadl",
"parentDir/storage.version"};
assertEqualsNoOrder(normalizedFilenames.toArray(new String[normalizedFilenames.size()]), expectedFilenames);
System.out.println();
}
}
<|start_filename|>network/src/main/java/com/turn/ttorrent/network/keyProcessors/WritableKeyProcessor.java<|end_filename|>
package com.turn.ttorrent.network.keyProcessors;
import com.turn.ttorrent.common.TorrentLoggerFactory;
import com.turn.ttorrent.network.ConnectionClosedException;
import com.turn.ttorrent.network.WriteAttachment;
import com.turn.ttorrent.network.WriteTask;
import org.slf4j.Logger;
import java.io.EOFException;
import java.io.IOException;
import java.nio.channels.SelectableChannel;
import java.nio.channels.SelectionKey;
import java.nio.channels.SocketChannel;
public class WritableKeyProcessor implements KeyProcessor {
private static final Logger logger = TorrentLoggerFactory.getLogger(WritableKeyProcessor.class);
@Override
public void process(SelectionKey key) throws IOException {
SelectableChannel channel = key.channel();
if (!(channel instanceof SocketChannel)) {
logger.warn("incorrect instance of channel. The key is cancelled");
key.cancel();
return;
}
SocketChannel socketChannel = (SocketChannel) channel;
Object attachment = key.attachment();
if (!(attachment instanceof WriteAttachment)) {
logger.error("incorrect instance of attachment for channel {}", channel);
key.cancel();
return;
}
WriteAttachment keyAttachment = (WriteAttachment) attachment;
if (keyAttachment.getWriteTasks().isEmpty()) {
key.interestOps(SelectionKey.OP_READ);
return;
}
WriteTask processedTask = keyAttachment.getWriteTasks().peek();
try {
int writeCount = socketChannel.write(processedTask.getByteBuffer());
if (writeCount < 0) {
processedTask.getListener().onWriteFailed("Reached end of stream while writing", null);
throw new EOFException("Reached end of stream while writing");
}
if (!processedTask.getByteBuffer().hasRemaining()) {
processedTask.getListener().onWriteDone();
keyAttachment.getWriteTasks().remove();
}
} catch (IOException e) {
processedTask.getListener().onWriteFailed("I/O error occurs on write to channel " + socketChannel, new ConnectionClosedException(e));
keyAttachment.getWriteTasks().clear();
key.cancel();
}
}
@Override
public boolean accept(SelectionKey key) {
return key.isValid() && key.isWritable();
}
}
<|start_filename|>common/src/main/java/com/turn/ttorrent/common/creation/DataSourceHolder.java<|end_filename|>
/*
* Copyright 2000-2018 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.turn.ttorrent.common.creation;
import java.io.Closeable;
import java.io.IOException;
import java.io.InputStream;
public interface DataSourceHolder extends Closeable {
/**
* provides {@link InputStream} associated with the holder. Holder can just store reference to stream or create
* new stream from some source (e.g. {@link java.io.FileInputStream} from {@link java.io.File}) on first invocation.
*
* @return {@link InputStream} associated with the holder.
* @throws IOException if io error occurs in creating new stream from source.
* IO exception can be thrown only on first invocation
*/
InputStream getStream() throws IOException;
}
<|start_filename|>common/src/main/java/com/turn/ttorrent/common/creation/StringUtils.java<|end_filename|>
/*
* Copyright 2000-2018 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.turn.ttorrent.common.creation;
import java.util.Iterator;
public final class StringUtils {
public static String join(String delimiter, Iterable<? extends CharSequence> iterable) {
Iterator<? extends CharSequence> iterator = iterable.iterator();
StringBuilder sb = new StringBuilder();
if (iterator.hasNext()) {
sb.append(iterator.next());
}
while (iterator.hasNext()) {
sb.append(delimiter).append(iterator.next());
}
return sb.toString();
}
}
<|start_filename|>ttorrent-client/src/main/java/com/turn/ttorrent/client/TorrentManager.java<|end_filename|>
package com.turn.ttorrent.client;
import com.turn.ttorrent.common.TorrentHash;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
public interface TorrentManager extends TorrentHash {
/**
* add specified listener which will be notified on new events
*
* @param listener specified listener
*/
void addListener(TorrentListener listener);
/**
* remove specified listener which was added earlier by {@link TorrentManager#addListener} method.
* You can receive events in this listener after execution of the method if notify method was invoked before this method
*
* @param listener specified listener
* @return true if listeners was removed otherwise false (e.g. listener was not found)
*/
boolean removeListener(TorrentListener listener);
/**
* wait until download will be finished
*
* @param timeout the maximum time to wait
* @param timeUnit the time unit of the timeout argument
* @throws InterruptedException if this thread was interrupted
* @throws TimeoutException if timeout was elapsed
*/
void awaitDownloadComplete(int timeout, TimeUnit timeUnit) throws InterruptedException, TimeoutException;
}
<|start_filename|>common/src/test/java/com/turn/ttorrent/common/protocol/http/HTTPAnnounceResponseMessageTest.java<|end_filename|>
package com.turn.ttorrent.common.protocol.http;
import com.turn.ttorrent.bcodec.BEValue;
import com.turn.ttorrent.bcodec.BEncoder;
import com.turn.ttorrent.common.Peer;
import com.turn.ttorrent.common.protocol.TrackerMessage;
import org.testng.annotations.Test;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.nio.ByteBuffer;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.testng.Assert.assertEquals;
public class HTTPAnnounceResponseMessageTest {
@Test
public void parseTest() throws IOException, TrackerMessage.MessageValidationException {
Map<String, BEValue> trackerResponse = new HashMap<String, BEValue>();
trackerResponse.put("interval", new BEValue(5));
trackerResponse.put("complete", new BEValue(1));
trackerResponse.put("incomplete", new BEValue(0));
String ip = "192.168.1.1";
int port = 6881;
InetSocketAddress peerAddress = new InetSocketAddress(ip, port);
ByteBuffer binaryPeerAddress = ByteBuffer.allocate(6);
binaryPeerAddress.put(peerAddress.getAddress().getAddress());
binaryPeerAddress.putShort((short) port);
trackerResponse.put("peers", new BEValue(binaryPeerAddress.array()));
HTTPAnnounceResponseMessage parsedResponse = (HTTPAnnounceResponseMessage) HTTPAnnounceResponseMessage.parse(
new ByteArrayInputStream(BEncoder.bencode(trackerResponse).array()));
assertEquals(parsedResponse.getInterval(), 5);
assertEquals(parsedResponse.getComplete(), 1);
assertEquals(parsedResponse.getIncomplete(), 0);
List<Peer> peers = parsedResponse.getPeers();
assertEquals(peers.size(), 1);
Peer peer = peers.get(0);
assertEquals(peer.getIp(), ip);
assertEquals(peer.getPort(), port);
}
}
<|start_filename|>network/src/main/java/com/turn/ttorrent/network/ConnectionManagerContext.java<|end_filename|>
package com.turn.ttorrent.network;
import java.util.concurrent.ExecutorService;
public interface ConnectionManagerContext extends ChannelListenerFactory {
ExecutorService getExecutor();
}
<|start_filename|>common/src/main/java/com/turn/ttorrent/common/SystemTimeService.java<|end_filename|>
package com.turn.ttorrent.common;
public class SystemTimeService implements TimeService {
@Override
public long now() {
return System.currentTimeMillis();
}
}
<|start_filename|>common/src/main/java/com/turn/ttorrent/common/TorrentHash.java<|end_filename|>
package com.turn.ttorrent.common;
public interface TorrentHash {
/**
* Return the hash of the B-encoded meta-info structure of a torrent.
*/
byte[] getInfoHash();
/**
* Get torrent's info hash (as an hexadecimal-coded string).
*/
String getHexInfoHash();
}
<|start_filename|>common/src/main/java/com/turn/ttorrent/common/TorrentStatistic.java<|end_filename|>
package com.turn.ttorrent.common;
import java.util.concurrent.atomic.AtomicLong;
/**
* Class store statistic for downloaded, uploaded and left bytes count.
*/
public class TorrentStatistic {
private final AtomicLong myUploadedBytes;
private final AtomicLong myDownloadedBytes;
private final AtomicLong myLeftBytes;
public TorrentStatistic() {
myDownloadedBytes = new AtomicLong();
myUploadedBytes = new AtomicLong();
myLeftBytes = new AtomicLong();
}
public long getUploadedBytes() {
return myUploadedBytes.get();
}
public long getDownloadedBytes() {
return myDownloadedBytes.get();
}
public long getLeftBytes() {
return myLeftBytes.get();
}
public void addUploaded(long delta) {
myUploadedBytes.addAndGet(delta);
}
public void addDownloaded(long delta) {
myDownloadedBytes.addAndGet(delta);
}
public void addLeft(long delta) {
myLeftBytes.addAndGet(delta);
}
public void setLeft(long value) {
myLeftBytes.set(value);
}
public void setUploaded(long value) {
myUploadedBytes.set(value);
}
public void setDownloaded(long value) {
myDownloadedBytes.set(value);
}
}
<|start_filename|>network/src/main/java/com/turn/ttorrent/network/FirstAvailableChannel.java<|end_filename|>
package com.turn.ttorrent.network;
import com.turn.ttorrent.common.TorrentLoggerFactory;
import org.jetbrains.annotations.NotNull;
import org.slf4j.Logger;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.nio.channels.Selector;
import java.nio.channels.ServerSocketChannel;
public class FirstAvailableChannel implements ServerChannelRegister {
private static final Logger logger = TorrentLoggerFactory.getLogger(FirstAvailableChannel.class);
private final int firstTryPort;
private final int lastTryPort;
public FirstAvailableChannel(int firstTryPort, int lastTryPort) {
this.firstTryPort = firstTryPort;
this.lastTryPort = lastTryPort;
}
@NotNull
@Override
public ServerSocketChannel channelFor(Selector selector) throws IOException {
ServerSocketChannel myServerSocketChannel = selector.provider().openServerSocketChannel();
myServerSocketChannel.configureBlocking(false);
int bindPort = -1;
for (int port = firstTryPort; port <= lastTryPort; port++) {
try {
InetSocketAddress tryAddress = new InetSocketAddress(port);
myServerSocketChannel.socket().bind(tryAddress);
bindPort = tryAddress.getPort();
break;
} catch (IOException e) {
//try next port
logger.debug("Could not bind to port {}, trying next port...", port);
}
}
if (bindPort == -1) {
logger.error(String.format(
"No available ports in range [%d, %d] for the BitTorrent client!", firstTryPort, lastTryPort
));
throw new IOException("No available port for the BitTorrent client!");
}
return myServerSocketChannel;
}
}
<|start_filename|>common/src/main/java/com/turn/ttorrent/common/creation/PiecesHashesCalculator.java<|end_filename|>
/*
* Copyright 2000-2018 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.turn.ttorrent.common.creation;
import java.io.IOException;
import java.util.List;
public interface PiecesHashesCalculator {
/**
* calculates sha1 hashes of each chunk with specified piece size
* and returns list of hashes and stream's sizes. If one stream is ended and piece size threshold is not reached
* implementation must read bytes from next stream
* For example if source list is 3 streams with next bytes:
* first stream: [1,2,3]
* second stream: [4,5,6,7]
* third stream: [8,9]
* and pieceSize = 4
* result must contain source size [3,4,2] and hashes: [sha1(1,2,3,4), sha1(5,6,7,8), sha1(9)]
*
* @param sources list of input stream's providers
* @param pieceSize size of one piece
* @return see above
* @throws IOException if IO error occurs in reading from streams
*/
HashingResult calculateHashes(List<DataSourceHolder> sources, int pieceSize) throws IOException;
}
<|start_filename|>ttorrent-client/src/test/java/com/turn/ttorrent/client/storage/FileCollectionStorageTest.java<|end_filename|>
package com.turn.ttorrent.client.storage;
import com.turn.ttorrent.TempFiles;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertTrue;
/**
* User: loyd
* Date: 11/24/13
*/
public class FileCollectionStorageTest {
private TempFiles tempFiles;
@BeforeMethod
public void setUp() {
tempFiles = new TempFiles();
}
@AfterMethod
public void tearDown() {
tempFiles.cleanup();
}
@Test
public void testSelect() throws Exception {
final File file1 = tempFiles.createTempFile();
final File file2 = tempFiles.createTempFile();
final List<FileStorage> files = new ArrayList<FileStorage>();
files.add(new FileStorage(file1, 0, 2));
files.add(new FileStorage(file2, 2, 2));
final FileCollectionStorage storage = new FileCollectionStorage(files, 4);
storage.open(false);
try {
// since all of these files already exist, we are considered finished
assertTrue(storage.isFinished());
// write to first file works
write(new byte[]{1, 2}, 0, storage);
check(new byte[]{1, 2}, file1);
// write to second file works
write(new byte[]{5, 6}, 2, storage);
check(new byte[]{5, 6}, file2);
// write to two files works
write(new byte[]{8, 9, 10, 11}, 0, storage);
check(new byte[]{8, 9}, file1);
check(new byte[]{10, 11}, file2);
// make sure partial write into next file works
write(new byte[]{100, 101, 102}, 0, storage);
check(new byte[]{102, 11}, file2);
} finally {
storage.close();
}
}
private void write(byte[] bytes, int offset, FileCollectionStorage storage) throws IOException {
storage.write(ByteBuffer.wrap(bytes), offset);
storage.finish();
}
private void check(byte[] bytes, File f) throws IOException {
final byte[] temp = new byte[bytes.length];
FileInputStream fileInputStream = new FileInputStream(f);
final int totalRead;
try {
totalRead = fileInputStream.read(temp);
} finally {
fileInputStream.close();
}
assertEquals(totalRead, temp.length);
assertEquals(temp, bytes);
}
}
<|start_filename|>ttorrent-client/src/test/java/com/turn/ttorrent/client/ByteArrayStorage.java<|end_filename|>
package com.turn.ttorrent.client;
import com.turn.ttorrent.client.storage.TorrentByteStorage;
import java.nio.ByteBuffer;
import java.util.Arrays;
public class ByteArrayStorage implements TorrentByteStorage {
private final byte[] array;
private boolean finished = false;
public ByteArrayStorage(int maxSize) {
array = new byte[maxSize];
}
@Override
public void open(boolean seeder) {
}
private int intPosition(long position) {
if (position > Integer.MAX_VALUE || position < 0) {
throw new IllegalArgumentException("Position is too large");
}
return (int) position;
}
@Override
public int read(ByteBuffer buffer, long position) {
int pos = intPosition(position);
int bytesCount = buffer.remaining();
buffer.put(Arrays.copyOfRange(array, pos, pos + bytesCount));
return bytesCount;
}
@Override
public int write(ByteBuffer block, long position) {
int pos = intPosition(position);
int bytesCount = block.remaining();
byte[] toWrite = new byte[bytesCount];
block.get(toWrite);
System.arraycopy(toWrite, 0, array, pos, toWrite.length);
return bytesCount;
}
@Override
public void finish() {
finished = true;
}
@Override
public boolean isFinished() {
return finished;
}
@Override
public void delete() {
}
@Override
public void close() {
}
}
<|start_filename|>network/src/test/java/com/turn/ttorrent/network/ConnectionManagerTest.java<|end_filename|>
package com.turn.ttorrent.network;
import com.turn.ttorrent.MockTimeService;
import org.apache.log4j.*;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import java.io.IOException;
import java.io.OutputStream;
import java.net.ServerSocket;
import java.net.Socket;
import java.nio.ByteBuffer;
import java.nio.channels.Selector;
import java.nio.channels.SocketChannel;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import static org.testng.Assert.*;
public class ConnectionManagerTest {
private ConnectionManager myConnectionManager;
private ExecutorService myExecutorService;
private ConnectionListener connectionListener;
private ConnectionManagerContext myContext;
public ConnectionManagerTest() {
if (Logger.getRootLogger().getAllAppenders().hasMoreElements())
return;
BasicConfigurator.configure(new ConsoleAppender(new PatternLayout("[%d{MMdd HH:mm:ss,SSS} %t] %6p - %20.20c - %m %n")));
Logger.getRootLogger().setLevel(Level.ALL);
}
@BeforeMethod
public void setUp() throws Exception {
Logger.getRootLogger().setLevel(Level.INFO);
myContext = mock(ConnectionManagerContext.class);
myExecutorService = Executors.newSingleThreadExecutor();
when(myContext.getExecutor()).thenReturn(myExecutorService);
final SelectorFactory selectorFactory = mock(SelectorFactory.class);
when(selectorFactory.newSelector()).thenReturn(Selector.open());
NewConnectionAllower newConnectionAllower = mock(NewConnectionAllower.class);
when(newConnectionAllower.isNewConnectionAllowed()).thenReturn(true);
myConnectionManager = new ConnectionManager(
myContext,
new MockTimeService(),
newConnectionAllower,
newConnectionAllower,
selectorFactory,
new AtomicInteger(),
new AtomicInteger());
}
@Test(expectedExceptions = IllegalStateException.class)
public void testThatDoubleInitThrowException() {
try {
myConnectionManager.initAndRunWorker(new FirstAvailableChannel(6881, 6889));
} catch (IOException e) {
fail("unable to init and run worker", e);
}
try {
myConnectionManager.initAndRunWorker(new FirstAvailableChannel(6881, 6889));
} catch (IOException e) {
fail("unable to init and run worker", e);
}
}
@Test
public void canAcceptAndReadData() throws IOException, InterruptedException {
final AtomicInteger acceptCount = new AtomicInteger();
final AtomicInteger readCount = new AtomicInteger();
final AtomicInteger connectCount = new AtomicInteger();
final AtomicInteger lastReadBytesCount = new AtomicInteger();
final ByteBuffer byteBuffer = ByteBuffer.allocate(10);
final Semaphore semaphore = new Semaphore(0);
this.connectionListener = new ConnectionListener() {
@Override
public void onNewDataAvailable(SocketChannel socketChannel) throws IOException {
readCount.incrementAndGet();
lastReadBytesCount.set(socketChannel.read(byteBuffer));
if (lastReadBytesCount.get() == -1) {
socketChannel.close();
}
semaphore.release();
}
@Override
public void onConnectionEstablished(SocketChannel socketChannel) throws IOException {
acceptCount.incrementAndGet();
semaphore.release();
}
@Override
public void onError(SocketChannel socketChannel, Throwable ex) {
}
};
when(myContext.newChannelListener()).thenReturn(connectionListener);
myConnectionManager.initAndRunWorker(new FirstAvailableChannel(6881, 6889));
assertEquals(acceptCount.get(), 0);
assertEquals(readCount.get(), 0);
int serverPort = myConnectionManager.getBindPort();
Socket socket = new Socket("127.0.0.1", serverPort);
tryAcquireOrFail(semaphore);//wait until connection is accepted
assertTrue(socket.isConnected());
assertEquals(acceptCount.get(), 1);
assertEquals(readCount.get(), 0);
Socket socketSecond = new Socket("127.0.0.1", serverPort);
tryAcquireOrFail(semaphore);//wait until connection is accepted
assertTrue(socketSecond.isConnected());
assertEquals(acceptCount.get(), 2);
assertEquals(readCount.get(), 0);
socketSecond.close();
tryAcquireOrFail(semaphore);//wait read that connection is closed
assertEquals(readCount.get(), 1);
assertEquals(acceptCount.get(), 2);
assertEquals(lastReadBytesCount.get(), -1);
byteBuffer.rewind();
assertEquals(byteBuffer.get(), 0);
byteBuffer.rewind();
String writeStr = "abc";
OutputStream outputStream = socket.getOutputStream();
outputStream.write(writeStr.getBytes());
tryAcquireOrFail(semaphore);//wait until read bytes
assertEquals(readCount.get(), 2);
assertEquals(lastReadBytesCount.get(), 3);
byte[] expected = new byte[byteBuffer.capacity()];
System.arraycopy(writeStr.getBytes(), 0, expected, 0, writeStr.length());
assertEquals(byteBuffer.array(), expected);
outputStream.close();
socket.close();
tryAcquireOrFail(semaphore);//wait read that connection is closed
assertEquals(readCount.get(), 3);
int otherPeerPort = 7575;
ServerSocket ss = new ServerSocket(otherPeerPort);
assertEquals(connectCount.get(), 0);
myConnectionManager.offerConnect(new ConnectTask("127.0.0.1", otherPeerPort, new ConnectionListener() {
@Override
public void onNewDataAvailable(SocketChannel socketChannel) throws IOException {
}
@Override
public void onConnectionEstablished(SocketChannel socketChannel) throws IOException {
connectCount.incrementAndGet();
semaphore.release();
}
@Override
public void onError(SocketChannel socketChannel, Throwable ex) {
}
}, 0, 100), 1, TimeUnit.SECONDS);
ss.accept();
tryAcquireOrFail(semaphore);
assertEquals(connectCount.get(), 1);
}
@AfterMethod
public void tearDown() throws Exception {
this.myConnectionManager.close();
myExecutorService.shutdown();
assertTrue(myExecutorService.awaitTermination(10, TimeUnit.SECONDS));
}
private void tryAcquireOrFail(Semaphore semaphore) throws InterruptedException {
if (!semaphore.tryAcquire(500, TimeUnit.MILLISECONDS)) {
fail("don't get signal from connection receiver that connection selected");
}
}
}
<|start_filename|>network/src/main/java/com/turn/ttorrent/network/keyProcessors/ConnectableKeyProcessor.java<|end_filename|>
package com.turn.ttorrent.network.keyProcessors;
import com.turn.ttorrent.common.TimeService;
import com.turn.ttorrent.common.TorrentLoggerFactory;
import com.turn.ttorrent.network.ConnectTask;
import com.turn.ttorrent.network.ConnectionListener;
import com.turn.ttorrent.network.ReadWriteAttachment;
import com.turn.ttorrent.network.TimeoutStorage;
import org.slf4j.Logger;
import java.io.IOException;
import java.net.ConnectException;
import java.net.NoRouteToHostException;
import java.nio.channels.SelectableChannel;
import java.nio.channels.SelectionKey;
import java.nio.channels.Selector;
import java.nio.channels.SocketChannel;
import java.util.concurrent.atomic.AtomicInteger;
public class ConnectableKeyProcessor implements KeyProcessor {
private static final Logger logger = TorrentLoggerFactory.getLogger(ConnectableKeyProcessor.class);
private final Selector mySelector;
private final TimeService myTimeService;
private final TimeoutStorage myTimeoutStorage;
private final AtomicInteger mySendBufferSize;
private final AtomicInteger myReceiveBufferSize;
public ConnectableKeyProcessor(Selector selector,
TimeService timeService,
TimeoutStorage timeoutStorage,
AtomicInteger sendBufferSize,
AtomicInteger receiveBufferSize) {
this.mySelector = selector;
this.myTimeService = timeService;
this.myTimeoutStorage = timeoutStorage;
this.mySendBufferSize = sendBufferSize;
this.myReceiveBufferSize = receiveBufferSize;
}
@Override
public void process(SelectionKey key) throws IOException {
SelectableChannel channel = key.channel();
if (!(channel instanceof SocketChannel)) {
logger.warn("incorrect instance of channel. The key is cancelled");
key.cancel();
return;
}
SocketChannel socketChannel = (SocketChannel) channel;
Object attachment = key.attachment();
if (!(attachment instanceof ConnectTask)) {
logger.warn("incorrect instance of attachment for channel {}. The key for the channel is cancelled", socketChannel);
key.cancel();
return;
}
final ConnectTask connectTask = (ConnectTask) attachment;
final ConnectionListener connectionListener = connectTask.getConnectionListener();
final boolean isConnectFinished;
try {
isConnectFinished = socketChannel.finishConnect();
} catch (NoRouteToHostException e) {
logger.info("Could not connect to {}:{}, received NoRouteToHostException", connectTask.getHost(), connectTask.getPort());
connectionListener.onError(socketChannel, e);
return;
} catch (ConnectException e) {
logger.info("Could not connect to {}:{}, received ConnectException", connectTask.getHost(), connectTask.getPort());
connectionListener.onError(socketChannel, e);
return;
}
if (!isConnectFinished) {
logger.info("Could not connect to {}:{}", connectTask.getHost(), connectTask.getPort());
connectionListener.onError(socketChannel, null);
return;
}
socketChannel.configureBlocking(false);
KeyProcessorUtil.setBuffersSizeIfNecessary(socketChannel, mySendBufferSize.get(), myReceiveBufferSize.get());
ReadWriteAttachment keyAttachment = new ReadWriteAttachment(connectionListener, myTimeService.now(), myTimeoutStorage.getTimeoutMillis());
socketChannel.register(mySelector, SelectionKey.OP_READ, keyAttachment);
logger.debug("setup new TCP connection with {}", socketChannel);
connectionListener.onConnectionEstablished(socketChannel);
}
@Override
public boolean accept(SelectionKey key) {
return key.isValid() && key.isConnectable();
}
}
<|start_filename|>ttorrent-tracker/src/main/java/com/turn/ttorrent/tracker/PeerCollectorThread.java<|end_filename|>
package com.turn.ttorrent.tracker;
/**
* The unfresh peer collector thread.
* <p>
* <p>
* Every PEER_COLLECTION_FREQUENCY_SECONDS, this thread will collect
* unfresh peers from all announced torrents.
* </p>
*/
public class PeerCollectorThread extends Thread {
public static final int COLLECTION_FREQUENCY = 10;
private final TorrentsRepository myTorrentsRepository;
private volatile int myTorrentExpireTimeoutSec = 20 * 60;
public PeerCollectorThread(TorrentsRepository torrentsRepository) {
myTorrentsRepository = torrentsRepository;
}
public void setTorrentExpireTimeoutSec(int torrentExpireTimeoutSec) {
myTorrentExpireTimeoutSec = torrentExpireTimeoutSec;
}
@Override
public void run() {
while (!isInterrupted()) {
myTorrentsRepository.cleanup(myTorrentExpireTimeoutSec);
try {
Thread.sleep(COLLECTION_FREQUENCY * 1000);
} catch (InterruptedException ie) {
break;
}
}
}
}
<|start_filename|>common/src/main/java/com/turn/ttorrent/common/TorrentSerializer.java<|end_filename|>
package com.turn.ttorrent.common;
import com.turn.ttorrent.bcodec.BEValue;
import com.turn.ttorrent.bcodec.BEncoder;
import org.jetbrains.annotations.Nullable;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static com.turn.ttorrent.common.TorrentMetadataKeys.*;
public class TorrentSerializer {
public byte[] serialize(TorrentMetadata metadata) throws IOException {
Map<String, BEValue> mapMetadata = new HashMap<String, BEValue>();
Map<String, BEValue> infoTable = new HashMap<String, BEValue>();
String announce = metadata.getAnnounce();
if (announce != null) mapMetadata.put(ANNOUNCE, new BEValue(announce));
putOptionalIfPresent(mapMetadata, COMMENT, metadata.getComment());
putOptionalIfPresent(mapMetadata, CREATED_BY, metadata.getCreatedBy());
if (metadata.getCreationDate().isPresent())
mapMetadata.put(CREATION_DATE_SEC, new BEValue(metadata.getCreationDate().get()));
List<BEValue> announceList = getAnnounceListAsBEValues(metadata.getAnnounceList());
if (announceList != null) {
mapMetadata.put(ANNOUNCE_LIST, new BEValue(announceList));
}
infoTable.put(PIECE_LENGTH, new BEValue(metadata.getPieceLength()));
infoTable.put(PIECES, new BEValue(metadata.getPiecesHashes()));
if (metadata.isPrivate()) {
infoTable.put(PRIVATE, new BEValue(1));
}
infoTable.put(NAME, new BEValue(metadata.getDirectoryName()));
if (metadata.getFiles().size() == 1) {
final TorrentFile torrentFile = metadata.getFiles().get(0);
infoTable.put(FILE_LENGTH, new BEValue(torrentFile.size));
putOptionalIfPresent(infoTable, MD5_SUM, torrentFile.md5Hash);
} else {
List<BEValue> files = new ArrayList<BEValue>();
for (TorrentFile torrentFile : metadata.getFiles()) {
Map<String, BEValue> entry = new HashMap<String, BEValue>();
entry.put(FILE_LENGTH, new BEValue(torrentFile.size));
putOptionalIfPresent(entry, MD5_SUM, torrentFile.md5Hash);
entry.put(FILE_PATH, new BEValue(mapStringListToBEValueList(torrentFile.relativePath)));
files.add(new BEValue(entry));
}
infoTable.put(FILES, new BEValue(files));
}
mapMetadata.put(INFO_TABLE, new BEValue(infoTable));
final ByteBuffer buffer = BEncoder.bencode(mapMetadata);
return buffer.array();
}
@Nullable
private List<BEValue> getAnnounceListAsBEValues(@Nullable List<List<String>> announceList) throws UnsupportedEncodingException {
if (announceList == null) return null;
List<BEValue> result = new ArrayList<BEValue>();
for (List<String> announceTier : announceList) {
List<BEValue> tier = mapStringListToBEValueList(announceTier);
if (!tier.isEmpty()) result.add(new BEValue(tier));
}
if (result.isEmpty()) return null;
return result;
}
private List<BEValue> mapStringListToBEValueList(List<String> list) throws UnsupportedEncodingException {
List<BEValue> result = new ArrayList<BEValue>();
for (String s : list) {
result.add(new BEValue(s));
}
return result;
}
private void putOptionalIfPresent(Map<String, BEValue> map, String key, Optional<String> optional) throws UnsupportedEncodingException {
if (!optional.isPresent()) return;
map.put(key, new BEValue(optional.get()));
}
}
<|start_filename|>test-api/src/main/java/com/turn/ttorrent/WaitFor.java<|end_filename|>
package com.turn.ttorrent;
public abstract class WaitFor {
public static final long POLL_INTERVAL = 500;
private boolean myResult = false;
protected WaitFor() {
this(60 * 1000);
}
protected WaitFor(long timeout) {
long maxTime = System.currentTimeMillis() + timeout;
try {
while (System.currentTimeMillis() < maxTime && !condition()) {
Thread.sleep(POLL_INTERVAL);
}
if (condition()) {
myResult = true;
}
} catch (InterruptedException e) {
}
}
public boolean isMyResult() {
return myResult;
}
protected abstract boolean condition();
}
<|start_filename|>ttorrent-client/src/main/java/com/turn/ttorrent/client/TorrentConnectionListener.java<|end_filename|>
package com.turn.ttorrent.client;
import com.turn.ttorrent.common.TorrentHash;
import java.nio.channels.SocketChannel;
/**
* @author Sergey.Pak
* Date: 9/9/13
* Time: 7:46 PM
*/
public interface TorrentConnectionListener {
boolean hasTorrent(TorrentHash torrentHash);
void handleNewPeerConnection(SocketChannel s, byte[] peerId, String hexInfoHash);
}
<|start_filename|>ttorrent-client/src/main/java/com/turn/ttorrent/client/TorrentMetadataProvider.java<|end_filename|>
package com.turn.ttorrent.client;
import com.turn.ttorrent.bcodec.InvalidBEncodingException;
import com.turn.ttorrent.common.TorrentMetadata;
import org.jetbrains.annotations.NotNull;
import java.io.IOException;
public interface TorrentMetadataProvider {
/**
* load and return new {@link TorrentMetadata} instance from any source
*
* @return new torrent metadata instance
* @throws IOException if any IO error occurs
* @throws InvalidBEncodingException if specified source has invalid BEP format or missed required fields
*/
@NotNull
TorrentMetadata getTorrentMetadata() throws IOException;
}
<|start_filename|>common/src/main/java/com/turn/ttorrent/common/TorrentFile.java<|end_filename|>
package com.turn.ttorrent.common;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.File;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
/**
* @author dgiffin
* @author mpetazzoni
*/
public class TorrentFile {
@NotNull
public final List<String> relativePath;
public final long size;
@NotNull
public final Optional<String> md5Hash;
public TorrentFile(@NotNull List<String> relativePath, long size, @Nullable String md5Hash) {
this.relativePath = new ArrayList<String>(relativePath);
this.size = size;
this.md5Hash = Optional.of(md5Hash);
}
public String getRelativePathAsString() {
String delimiter = File.separator;
final Iterator<String> iterator = relativePath.iterator();
StringBuilder sb = new StringBuilder();
if (iterator.hasNext()) {
sb.append(iterator.next());
while (iterator.hasNext()) {
sb.append(delimiter).append(iterator.next());
}
}
return sb.toString();
}
}
<|start_filename|>ttorrent-client/src/main/java/com/turn/ttorrent/client/TorrentLoaderImpl.java<|end_filename|>
package com.turn.ttorrent.client;
import com.turn.ttorrent.client.strategy.RequestStrategyImplAnyInteresting;
import com.turn.ttorrent.common.TorrentMetadata;
import org.jetbrains.annotations.NotNull;
import java.io.IOException;
public class TorrentLoaderImpl implements TorrentLoader {
@NotNull
private final TorrentsStorage myTorrentsStorage;
public TorrentLoaderImpl(@NotNull TorrentsStorage torrentsStorage) {
myTorrentsStorage = torrentsStorage;
}
@Override
@NotNull
public SharedTorrent loadTorrent(@NotNull LoadedTorrent loadedTorrent) throws IOException {
final String hexInfoHash = loadedTorrent.getTorrentHash().getHexInfoHash();
SharedTorrent old = myTorrentsStorage.getTorrent(hexInfoHash);
if (old != null) {
return old;
}
TorrentMetadata torrentMetadata;
try {
torrentMetadata = loadedTorrent.getMetadata();
} catch (IllegalStateException e) {
myTorrentsStorage.remove(hexInfoHash);
throw e;
}
final SharedTorrent sharedTorrent = new SharedTorrent(torrentMetadata, loadedTorrent.getPieceStorage(),
new RequestStrategyImplAnyInteresting(),
loadedTorrent.getTorrentStatistic(), loadedTorrent.getEventDispatcher());
old = myTorrentsStorage.putIfAbsentActiveTorrent(hexInfoHash, sharedTorrent);
if (old != null) {
return old;
}
return sharedTorrent;
}
}
<|start_filename|>ttorrent-client/src/main/java/com/turn/ttorrent/client/PieceInformation.java<|end_filename|>
package com.turn.ttorrent.client;
public interface PieceInformation {
/**
* @return piece index. Indexing starts from zero
*/
int getIndex();
/**
* @return piece size. This value must be equals piece size specified by metadata excluding last piece
*/
int getSize();
}
<|start_filename|>network/src/main/java/com/turn/ttorrent/network/WriteTask.java<|end_filename|>
package com.turn.ttorrent.network;
import java.nio.ByteBuffer;
import java.nio.channels.ByteChannel;
public class WriteTask {
private final ByteChannel socketChannel;
private final ByteBuffer byteBuffer;
private final WriteListener listener;
public WriteTask(ByteChannel socketChannel, ByteBuffer byteBuffer, WriteListener listener) {
this.socketChannel = socketChannel;
this.byteBuffer = byteBuffer;
this.listener = listener;
}
public ByteChannel getSocketChannel() {
return socketChannel;
}
public ByteBuffer getByteBuffer() {
return byteBuffer;
}
public WriteListener getListener() {
return listener;
}
@Override
public String toString() {
return "WriteTask{" +
"socketChannel=" + socketChannel +
", byteBuffer=" + byteBuffer +
", listener=" + listener +
'}';
}
}
<|start_filename|>common/src/main/java/com/turn/ttorrent/common/protocol/AnnounceResponseMessage.java<|end_filename|>
package com.turn.ttorrent.common.protocol;
import com.turn.ttorrent.common.Peer;
import java.util.List;
/**
* Base interface for announce response messages.
*
* <p>
* This interface must be implemented by all subtypes of announce response
* messages for the various tracker protocols.
* </p>
*
* @author mpetazzoni
*/
public interface AnnounceResponseMessage {
int getInterval();
int getComplete();
int getIncomplete();
List<Peer> getPeers();
}
<|start_filename|>network/src/main/java/com/turn/ttorrent/network/WriteAttachment.java<|end_filename|>
package com.turn.ttorrent.network;
import java.util.concurrent.BlockingQueue;
public interface WriteAttachment {
/**
* @return queue for offer/peek write tasks
*/
BlockingQueue<WriteTask> getWriteTasks();
}
<|start_filename|>ttorrent-tracker/src/test/java/com/turn/ttorrent/tracker/TorrentsRepositoryTest.java<|end_filename|>
package com.turn.ttorrent.tracker;
import com.turn.ttorrent.MockTimeService;
import com.turn.ttorrent.common.protocol.AnnounceRequestMessage;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import java.io.UnsupportedEncodingException;
import java.nio.ByteBuffer;
import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicBoolean;
import static org.testng.Assert.*;
@Test
public class TorrentsRepositoryTest {
private TorrentsRepository myTorrentsRepository;
@BeforeMethod
public void setUp() throws Exception {
myTorrentsRepository = new TorrentsRepository(10);
}
@AfterMethod
public void tearDown() throws Exception {
}
public void testThatTorrentsStoredInRepository() {
assertEquals(myTorrentsRepository.getTorrents().size(), 0);
final TrackedTorrent torrent = new TrackedTorrent(new byte[]{1, 2, 3});
myTorrentsRepository.putIfAbsent(torrent.getHexInfoHash(), torrent);
assertTrue(myTorrentsRepository.getTorrent(torrent.getHexInfoHash()) == torrent);
final TrackedTorrent torrentCopy = new TrackedTorrent(new byte[]{1, 2, 3});
myTorrentsRepository.putIfAbsent(torrentCopy.getHexInfoHash(), torrentCopy);
assertTrue(myTorrentsRepository.getTorrent(torrent.getHexInfoHash()) == torrent);
assertEquals(myTorrentsRepository.getTorrents().size(), 1);
final TrackedTorrent secondTorrent = new TrackedTorrent(new byte[]{3, 2, 1});
myTorrentsRepository.putIfAbsent(secondTorrent.getHexInfoHash(), secondTorrent);
assertEquals(myTorrentsRepository.getTorrents().size(), 2);
}
public void testPutIfAbsentAndUpdate() throws UnsupportedEncodingException {
final AtomicBoolean updateInvoked = new AtomicBoolean();
TrackedTorrent torrent = new TrackedTorrent(new byte[]{1, 2, 3}) {
@Override
public TrackedPeer update(AnnounceRequestMessage.RequestEvent event, ByteBuffer peerId, String hexPeerId, String ip, int port, long uploaded, long downloaded, long left) throws UnsupportedEncodingException {
updateInvoked.set(true);
return super.update(event, peerId, hexPeerId, ip, port, uploaded, downloaded, left);
}
};
myTorrentsRepository.putIfAbsentAndUpdate(torrent.getHexInfoHash(), torrent,
AnnounceRequestMessage.RequestEvent.STARTED, ByteBuffer.allocate(5), "0",
"127.0.0.1", 6881, 5, 10, 12);
assertTrue(updateInvoked.get());
assertEquals(torrent.getPeers().size(), 1);
final TrackedPeer trackedPeer = torrent.getPeers().values().iterator().next();
assertEquals(trackedPeer.getIp(), "127.0.0.1");
assertEquals(trackedPeer.getPort(), 6881);
assertEquals(trackedPeer.getLeft(), 12);
assertEquals(trackedPeer.getDownloaded(), 10);
assertEquals(trackedPeer.getUploaded(), 5);
}
public void testThatCleanupDontLockAllTorrentsAndStorage() throws UnsupportedEncodingException {
final Semaphore cleanFinishLock = new Semaphore(0);
final Semaphore cleanStartLock = new Semaphore(0);
final TrackedTorrent torrent = new TrackedTorrent(new byte[]{1, 2, 3}) {
@Override
public void collectUnfreshPeers(int expireTimeoutSec) {
cleanStartLock.release();
try {
if (!cleanFinishLock.tryAcquire(1, TimeUnit.SECONDS)) {
fail("can not acquire semaphore");
}
} catch (InterruptedException e) {
fail("can not finish cleanup", e);
}
}
};
myTorrentsRepository.putIfAbsent(torrent.getHexInfoHash(), torrent);
torrent.addPeer(new TrackedPeer(torrent, "127.0.0.1", 6881, ByteBuffer.allocate(10)));
assertEquals(myTorrentsRepository.getTorrents().size(), 1);
final ExecutorService executorService = Executors.newSingleThreadExecutor();
try {
final Future<Integer> cleanupFuture = executorService.submit(new Callable<Integer>() {
@Override
public Integer call() throws Exception {
myTorrentsRepository.cleanup(1);
return 0;
}
});
try {
if (!cleanStartLock.tryAcquire(1, TimeUnit.SECONDS)) {
fail("cannot acquire semaphore");
}
} catch (InterruptedException e) {
fail("don't received that cleanup is started", e);
}
final TrackedTorrent secondTorrent = new TrackedTorrent(new byte[]{3, 1, 1});
myTorrentsRepository.putIfAbsentAndUpdate(secondTorrent.getHexInfoHash(), secondTorrent,
AnnounceRequestMessage.RequestEvent.STARTED, ByteBuffer.allocate(5), "0",
"127.0.0.1", 6881, 0, 0, 1);
cleanFinishLock.release();
try {
cleanupFuture.get(1, TimeUnit.SECONDS);
} catch (InterruptedException e) {
fail("cleanup was interrupted", e);
} catch (ExecutionException e) {
fail("cleanup was failed with execution exception", e);
} catch (TimeoutException e) {
fail("cannot get result from future", e);
}
} finally {
executorService.shutdown();
}
}
public void testThatTorrentsCanRemovedFromStorage() throws UnsupportedEncodingException {
TrackedTorrent torrent = new TrackedTorrent(new byte[]{1, 2, 3});
MockTimeService timeService = new MockTimeService();
timeService.setTime(10000);
final TrackedPeer peer = new TrackedPeer(torrent, "127.0.0.1", 6881, ByteBuffer.allocate(5), timeService);
torrent.addPeer(peer);
timeService.setTime(15000);
final TrackedPeer secondPeer = new TrackedPeer(torrent, "127.0.0.1", 6882, ByteBuffer.allocate(5), timeService);
torrent.addPeer(secondPeer);
myTorrentsRepository.putIfAbsent(torrent.getHexInfoHash(), torrent);
assertEquals(myTorrentsRepository.getTorrents().size(), 1);
assertEquals(torrent.getPeers().size(), 2);
timeService.setTime(17000);
myTorrentsRepository.cleanup(10);
assertEquals(myTorrentsRepository.getTorrents().size(), 1);
assertEquals(torrent.getPeers().size(), 2);
timeService.setTime(23000);
myTorrentsRepository.cleanup(10);
assertEquals(myTorrentsRepository.getTorrents().size(), 1);
assertEquals(torrent.getPeers().size(), 1);
timeService.setTime(40000);
myTorrentsRepository.cleanup(10);
assertEquals(myTorrentsRepository.getTorrents().size(), 0);
assertEquals(torrent.getPeers().size(), 0);
}
}
<|start_filename|>ttorrent-client/src/main/java/com/turn/ttorrent/client/storage/PieceStorageFactory.java<|end_filename|>
package com.turn.ttorrent.client.storage;
import com.turn.ttorrent.common.TorrentMetadata;
import java.io.IOException;
public interface PieceStorageFactory {
/**
* create new {@link PieceStorage} for specified torrent with specified byte storage
*
* @param metadata specified metadata
* @param byteStorage specified byte storage where will be stored pieces
* @return new {@link PieceStorage}
*/
PieceStorage createStorage(TorrentMetadata metadata, TorrentByteStorage byteStorage) throws IOException;
}
<|start_filename|>ttorrent-client/src/test/java/com/turn/ttorrent/client/PeersStorageTest.java<|end_filename|>
package com.turn.ttorrent.client;
import com.turn.ttorrent.client.peer.PeerActivityListener;
import com.turn.ttorrent.client.peer.SharingPeer;
import com.turn.ttorrent.common.Peer;
import com.turn.ttorrent.common.PeerUID;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import java.net.InetSocketAddress;
import java.nio.channels.ByteChannel;
import java.util.Collection;
import static org.mockito.Mockito.mock;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNull;
@Test
public class PeersStorageTest {
private PeersStorage myPeersStorage;
@BeforeMethod
public void setUp() throws Exception {
myPeersStorage = new PeersStorage();
}
public void getSetSelfTest() {
assertNull(myPeersStorage.getSelf());
Peer self = new Peer("", 1);
myPeersStorage.setSelf(self);
assertEquals(myPeersStorage.getSelf(), self);
}
public void testThatPeersStorageReturnNewCollection() {
SharingPeer sharingPeer = getMockSharingPeer();
myPeersStorage.putIfAbsent(new PeerUID(new InetSocketAddress("127.0.0.1", 6881), ""), sharingPeer);
Collection<SharingPeer> sharingPeers = myPeersStorage.getSharingPeers();
assertEquals(1, myPeersStorage.getSharingPeers().size());
assertEquals(1, sharingPeers.size());
sharingPeers.add(sharingPeer);
assertEquals(1, myPeersStorage.getSharingPeers().size());
assertEquals(2, sharingPeers.size());
}
private SharingPeer getMockSharingPeer() {
return new SharingPeer("1",
1,
null,
mock(SharedTorrent.class),
null,
mock(PeerActivityListener.class),
mock(ByteChannel.class), "TO", 1234);
}
public void getAndRemoveSharingPeersTest() {
SharingPeer sharingPeer = getMockSharingPeer();
PeerUID peerUid = new PeerUID(new InetSocketAddress("127.0.0.1", 6881), "");
SharingPeer oldPeer = myPeersStorage.putIfAbsent(peerUid, sharingPeer);
assertNull(oldPeer);
assertEquals(myPeersStorage.getSharingPeer(peerUid), sharingPeer);
assertEquals(myPeersStorage.removeSharingPeer(peerUid), sharingPeer);
assertNull(myPeersStorage.removeSharingPeer(peerUid));
}
}
<|start_filename|>common/src/main/java/com/turn/ttorrent/common/ImmutableTorrentHash.java<|end_filename|>
package com.turn.ttorrent.common;
import java.util.Arrays;
public class ImmutableTorrentHash implements TorrentHash {
private final byte[] hash;
private final String hexHash;
public ImmutableTorrentHash(byte[] hash) {
this.hash = hash;
this.hexHash = TorrentUtils.byteArrayToHexString(hash);
}
@Override
public byte[] getInfoHash() {
return Arrays.copyOf(hash, hash.length);
}
@Override
public String getHexInfoHash() {
return hexHash;
}
}
<|start_filename|>ttorrent-client/src/main/java/com/turn/ttorrent/client/peer/SharingPeerInfo.java<|end_filename|>
package com.turn.ttorrent.client.peer;
import com.turn.ttorrent.common.TorrentHash;
import java.nio.ByteBuffer;
/**
* @author Sergey.Pak
* Date: 8/9/13
* Time: 6:40 PM
*/
public interface SharingPeerInfo {
String getIp();
int getPort();
TorrentHash getTorrentHash();
ByteBuffer getPeerId();
}
<|start_filename|>common/src/main/java/com/turn/ttorrent/common/TorrentMetadataKeys.java<|end_filename|>
package com.turn.ttorrent.common;
@SuppressWarnings("WeakerAccess")
public final class TorrentMetadataKeys {
public final static String MD5_SUM = "md5sum";
public final static String FILE_LENGTH = "length";
public final static String FILES = "files";
public final static String FILE_PATH = "path";
public final static String FILE_PATH_UTF8 = "path.utf-8";
public final static String COMMENT = "comment";
public final static String CREATED_BY = "created by";
public final static String ANNOUNCE = "announce";
public final static String PIECE_LENGTH = "piece length";
public final static String PIECES = "pieces";
public final static String CREATION_DATE_SEC = "creation date";
public final static String PRIVATE = "private";
public final static String NAME = "name";
public final static String INFO_TABLE = "info";
public final static String ANNOUNCE_LIST = "announce-list";
}
<|start_filename|>common/src/main/java/com/turn/ttorrent/common/Optional.java<|end_filename|>
package com.turn.ttorrent.common;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.NoSuchElementException;
public final class Optional<T> {
private static final Optional<?> EMPTY = new Optional();
@Nullable
private final T value;
public Optional(@NotNull T value) {
this.value = value;
}
private Optional() {
this.value = null;
}
@NotNull
@SuppressWarnings("unchecked")
public static <T> Optional<T> of(@Nullable T value) {
return value == null ? (Optional<T>) EMPTY : new Optional<T>(value);
}
@NotNull
public T get() throws NoSuchElementException {
if (value == null) {
throw new NoSuchElementException("No value present");
}
return value;
}
public boolean isPresent() {
return value != null;
}
@NotNull
public T orElse(@NotNull T defaultValue) {
return value != null ? value : defaultValue;
}
}
<|start_filename|>network/src/main/java/com/turn/ttorrent/network/keyProcessors/ReadableKeyProcessor.java<|end_filename|>
package com.turn.ttorrent.network.keyProcessors;
import com.turn.ttorrent.common.TorrentLoggerFactory;
import com.turn.ttorrent.network.ConnectionListener;
import com.turn.ttorrent.network.ReadAttachment;
import org.slf4j.Logger;
import java.io.IOException;
import java.nio.channels.SelectableChannel;
import java.nio.channels.SelectionKey;
import java.nio.channels.SocketChannel;
public class ReadableKeyProcessor implements KeyProcessor {
private static final Logger logger = TorrentLoggerFactory.getLogger(ReadableKeyProcessor.class);
private final String myServerSocketLocalAddress;
public ReadableKeyProcessor(String serverSocketLocalAddress) {
this.myServerSocketLocalAddress = serverSocketLocalAddress;
}
@Override
public void process(SelectionKey key) throws IOException {
SelectableChannel channel = key.channel();
if (!(channel instanceof SocketChannel)) {
logger.warn("incorrect instance of channel. The key is cancelled");
key.cancel();
return;
}
SocketChannel socketChannel = (SocketChannel) channel;
logger.trace("server {} get new data from {}", myServerSocketLocalAddress, socketChannel);
Object attachment = key.attachment();
if (!(attachment instanceof ReadAttachment)) {
logger.warn("incorrect instance of attachment for channel {}", new Object[]{socketChannel.socket()});
socketChannel.close();
return;
}
ConnectionListener connectionListener = ((ReadAttachment) attachment).getConnectionListener();
connectionListener.onNewDataAvailable(socketChannel);
}
@Override
public boolean accept(SelectionKey key) {
return key.isValid() && key.isReadable();
}
}
<|start_filename|>network/src/main/java/com/turn/ttorrent/network/ReadWriteAttachment.java<|end_filename|>
package com.turn.ttorrent.network;
import java.io.IOException;
import java.net.SocketTimeoutException;
import java.nio.channels.SocketChannel;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
public class ReadWriteAttachment implements ReadAttachment, WriteAttachment, TimeoutAttachment {
private final static int WRITE_TASK_QUEUE_SIZE = 150;
private long lastCommunicationTime;
private final ConnectionListener connectionListener;
private final long myTimeoutMillis;
private final BlockingQueue<WriteTask> writeTasks;
public ReadWriteAttachment(ConnectionListener connectionListener, long lastCommunicationTime, long timeoutMillis) {
this.connectionListener = connectionListener;
this.writeTasks = new LinkedBlockingQueue<WriteTask>(WRITE_TASK_QUEUE_SIZE);
this.lastCommunicationTime = lastCommunicationTime;
this.myTimeoutMillis = timeoutMillis;
}
@Override
public ConnectionListener getConnectionListener() {
return connectionListener;
}
@Override
public BlockingQueue<WriteTask> getWriteTasks() {
return writeTasks;
}
@Override
public boolean isTimeoutElapsed(long currentTimeMillis) {
long minTimeForKeepAlive = currentTimeMillis - myTimeoutMillis;
return minTimeForKeepAlive > lastCommunicationTime;
}
@Override
public void communicatedNow(long currentTimeMillis) {
lastCommunicationTime = currentTimeMillis;
}
@Override
public void onTimeoutElapsed(SocketChannel channel) throws IOException {
connectionListener.onError(channel, new SocketTimeoutException());
}
}
<|start_filename|>ttorrent-client/src/main/java/com/turn/ttorrent/client/ClientState.java<|end_filename|>
package com.turn.ttorrent.client;
public enum ClientState {
WAITING,
VALIDATING,
SHARING,
SEEDING,
ERROR,
DONE
}
<|start_filename|>ttorrent-client/src/main/java/com/turn/ttorrent/client/announce/TrackerClientFactoryImpl.java<|end_filename|>
/*
* Copyright 2000-2018 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.turn.ttorrent.client.announce;
import com.turn.ttorrent.common.Peer;
import java.net.URI;
import java.net.UnknownHostException;
import java.net.UnknownServiceException;
import java.util.List;
public class TrackerClientFactoryImpl implements TrackerClientFactory {
@Override
public TrackerClient createTrackerClient(List<Peer> peers, URI tracker) throws UnknownHostException, UnknownServiceException {
String scheme = tracker.getScheme();
if ("http".equals(scheme) || "https".equals(scheme)) {
return new HTTPTrackerClient(peers, tracker);
} else if ("udp".equals(scheme)) {
return new UDPTrackerClient(peers, tracker);
}
throw new UnknownServiceException(
"Unsupported announce scheme: " + scheme + "!");
}
}
<|start_filename|>ttorrent-client/src/test/java/com/turn/ttorrent/client/strategy/RequestStrategyImplAnyInterestingTest.java<|end_filename|>
package com.turn.ttorrent.client.strategy;
import com.turn.ttorrent.client.Piece;
import org.testng.Assert;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import java.util.BitSet;
import java.util.SortedSet;
public class RequestStrategyImplAnyInterestingTest {
private final SortedSet<Piece> myRarest = null;//myRarest don't need for it strategy
private final int myPiecesTotal = 10;
private final Piece[] myPieces = new Piece[myPiecesTotal];
private final RequestStrategy myRequestStrategy = new RequestStrategyImplAnyInteresting();
@BeforeClass
public void init() {
for (int i = 0; i < myPieces.length; i++) {
myPieces[i] = new Piece(null, i, 0, new byte[0]);
}
}
@Test
public void choosePieceNoInterestingTest() {
Piece actual = myRequestStrategy.choosePiece(new BitSet(), myPieces);
Assert.assertNull(actual);
}
@Test
public void choosePieceOneInterestingTest() {
BitSet interesting = new BitSet();
for (int i = 0; i < myPieces.length; i++) {
interesting.clear();
interesting.set(i);
Piece expected = myPieces[i];
Piece actual = myRequestStrategy.choosePiece(interesting, myPieces);
Assert.assertEquals(expected, actual);
}
}
@Test
public void choosePieceTest() {
BitSet interesting = new BitSet();
int interestingFrom = 1;
int interestingTo = 5;
interesting.set(interestingFrom, interestingTo);
Piece actual = myRequestStrategy.choosePiece(interesting, myPieces);
Assert.assertTrue(actual.getIndex() >= interestingFrom && actual.getIndex() <= interestingTo);
}
}
<|start_filename|>ttorrent-client/src/main/java/com/turn/ttorrent/client/LoadedTorrent.java<|end_filename|>
package com.turn.ttorrent.client;
import com.turn.ttorrent.client.storage.PieceStorage;
import com.turn.ttorrent.common.AnnounceableInformation;
import com.turn.ttorrent.common.TorrentHash;
import com.turn.ttorrent.common.TorrentMetadata;
import com.turn.ttorrent.common.TorrentStatistic;
import org.jetbrains.annotations.NotNull;
public interface LoadedTorrent {
/**
* @return {@link PieceStorage} where stored available pieces
*/
PieceStorage getPieceStorage();
/**
* @return {@link TorrentMetadata} instance
* @throws IllegalStateException if unable to fetch metadata from source
* (e.g. source is .torrent file and it was deleted manually)
*/
TorrentMetadata getMetadata() throws IllegalStateException;
/**
* @return new instance of {@link AnnounceableInformation} for announce this torrent to the tracker
*/
@NotNull
AnnounceableInformation createAnnounceableInformation();
/**
* @return {@link TorrentStatistic} instance related with this torrent
*/
TorrentStatistic getTorrentStatistic();
/**
* @return hash of this torrent
*/
TorrentHash getTorrentHash();
/**
* @return related {@link EventDispatcher}
*/
EventDispatcher getEventDispatcher();
}
<|start_filename|>ttorrent-client/src/main/java/com/turn/ttorrent/client/announce/AnnounceResponseListener.java<|end_filename|>
/**
* Copyright (C) 2011-2012 Turn, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.turn.ttorrent.client.announce;
import com.turn.ttorrent.common.Peer;
import java.util.EventListener;
import java.util.List;
/**
* EventListener interface for objects that want to receive tracker responses.
*
* @author mpetazzoni
*/
public interface AnnounceResponseListener extends EventListener {
/**
* Handle an announce response event.
*
* @param interval The announce interval requested by the tracker.
* @param complete The number of seeders on this torrent.
* @param incomplete The number of leechers on this torrent.
*/
void handleAnnounceResponse(int interval, int complete, int incomplete, String hexInfoHash);
/**
* Handle the discovery of new peers.
*
* @param peers The list of peers discovered (from the announce response or
* any other means like DHT/PEX, etc.).
*/
void handleDiscoveredPeers(List<Peer> peers, String hexInfoHash);
}
<|start_filename|>ttorrent-client/src/main/java/com/turn/ttorrent/client/PeersStorage.java<|end_filename|>
package com.turn.ttorrent.client;
import com.turn.ttorrent.client.peer.SharingPeer;
import com.turn.ttorrent.common.Peer;
import com.turn.ttorrent.common.PeerUID;
import java.util.ArrayList;
import java.util.Collection;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
public class PeersStorage {
private volatile Peer self = null;
private final ConcurrentHashMap<PeerUID, SharingPeer> connectedSharingPeers;
public PeersStorage() {
this.connectedSharingPeers = new ConcurrentHashMap<PeerUID, SharingPeer>();
}
public Peer getSelf() {
return self;
}
public void setSelf(Peer self) {
this.self = self;
}
public SharingPeer putIfAbsent(PeerUID peerId, SharingPeer sharingPeer) {
return connectedSharingPeers.putIfAbsent(peerId, sharingPeer);
}
public SharingPeer removeSharingPeer(PeerUID peerId) {
return connectedSharingPeers.remove(peerId);
}
public SharingPeer getSharingPeer(PeerUID peerId) {
return connectedSharingPeers.get(peerId);
}
public void removeSharingPeer(SharingPeer peer) {
connectedSharingPeers.values().remove(peer);
}
public Collection<SharingPeer> getSharingPeers() {
return new ArrayList<SharingPeer>(connectedSharingPeers.values());
}
}
<|start_filename|>ttorrent-client/src/test/java/com/turn/ttorrent/client/EventDispatcherTest.java<|end_filename|>
/*
* Copyright 2000-2018 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.turn.ttorrent.client;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import java.util.concurrent.atomic.AtomicInteger;
import static org.mockito.Mockito.mock;
import static org.testng.Assert.assertEquals;
@Test
public class EventDispatcherTest {
private EventDispatcher eventDispatcher;
private PeerInformation peerInfo;
private PieceInformation pieceInfo;
@BeforeMethod
public void setUp() {
eventDispatcher = new EventDispatcher();
peerInfo = mock(PeerInformation.class);
pieceInfo = mock(PieceInformation.class);
}
public void testWithoutListeners() {
eventDispatcher.multicaster().downloadFailed(new RuntimeException());
eventDispatcher.multicaster().peerConnected(peerInfo);
eventDispatcher.multicaster().validationComplete(1, 4);
eventDispatcher.multicaster().pieceDownloaded(pieceInfo, peerInfo);
eventDispatcher.multicaster().downloadComplete();
eventDispatcher.multicaster().pieceReceived(pieceInfo, peerInfo);
eventDispatcher.multicaster().peerDisconnected(peerInfo);
}
public void testInvocation() {
final AtomicInteger invocationCount = new AtomicInteger();
int count = 5;
for (int i = 0; i < count; i++) {
eventDispatcher.addListener(new TorrentListenerWrapper() {
@Override
public void downloadComplete() {
invocationCount.incrementAndGet();
}
});
}
eventDispatcher.multicaster().peerConnected(peerInfo);
assertEquals(invocationCount.get(), 0);
eventDispatcher.multicaster().downloadComplete();
assertEquals(invocationCount.get(), count);
}
}
| sideeffffect/ttorrent |
<|start_filename|>sublime-text-3/Packages/ESLint/linter.js<|end_filename|>
'use strict';
var fs = require('fs');
var path = require('path');
var args = process.argv.slice(2);
var minNodeVersion = ["8","9","0"];
var targetPath = args[0];
var targetDir = path.dirname(targetPath);
var nodeModulesPath = args[1];
if (nodeModulesPath) {
module.paths.push(nodeModulesPath);
}
var configFile = args[2];
var isNodeMinVersion = checkNodeMinVersion(process.version);
var eslintPath = (isNodeMinVersion)
? require.resolve('eslint', {paths: [targetDir, nodeModulesPath]})
: require.resolve('eslint');
var eslint = require(eslintPath);
var CLIEngine = eslint.CLIEngine;
var options = {};
if (configFile) {
options.configFile = configFile;
}
var cli = new CLIEngine(options);
var report = cli.executeOnFiles([targetPath]);
// eslint-disable-next-line no-console
console.log(format(report.results));
function format(results) {
var lines = [];
function numberWang(wangaNumb) {
var thatsNumberWang = 7 - wangaNumb;
var stayNumberWang = '';
var i;
for (i = 0; i < thatsNumberWang; i++) {
stayNumberWang += ' ';
}
return stayNumberWang;
}
lines.push('[ESLint: ' + results[0].filePath + ']');
lines.push('');
var messages = results[0].messages;
var errorCount = results[0].errorCount || 0;
var warningCount = results[0].warningCount || 0;
if (errorCount || warningCount) {
messages.forEach(function(error) {
var ruleId = error.ruleId ? ' (' + error.ruleId + ')' : '';
var severity = (error.severity === 1 ? 'Warn ' : 'Error');
var hasPosition = (error.line !== undefined && error.column !== undefined);
var messageParts = ['\t', severity];
if (hasPosition) {
messageParts.push(numberWang((error.line + error.column.toString()).length));
messageParts.push(error.line + ',' + error.column + ':');
}
messageParts.push(error.message + ruleId);
lines.push(messageParts.join(' '));
});
lines.push('');
lines.push('✗ ' +
errorCount + ' ' + (errorCount === 1 ? 'error' : 'errors') + ', ' +
warningCount + ' ' + (warningCount === 1 ? 'warning' : 'warnings'));
lines.push('');
lines.push('Double-click on lines to jump to location, [F4] for next, [shift-F4] for previous.'
);
} else {
lines.push('✓ 0 errors and warnings, [esc] to hide.');
}
lines.push('');
return lines.join('\n');
}
function checkNodeMinVersion(version) {
var isNodeMinVersion = false;
var nodeVersion = (version + "").replace(/v/gi, "").split(".");
if(nodeVersion.length===3){
minNodeVersion.every(function(itm, idx) {
var isGreater = (nodeVersion[idx]*1 > itm*1)?true:false;
var isEqual = (itm*1 == nodeVersion[idx]*1)?true:false;
isNodeMinVersion = (isGreater || isEqual);
return (!isGreater && isEqual);
});
}
return isNodeMinVersion;
}
| EnTeQuAk/dotfiles |
<|start_filename|>drools/drools-core/src/test/java/com/zandili/demo/drools/test/BaseTest.java<|end_filename|>
package com.zandili.demo.drools.test;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit38.AbstractJUnit38SpringContextTests;
@SuppressWarnings("deprecation")
@ContextConfiguration(locations = { "classpath*:applicationContext.xml" })
public class BaseTest extends AbstractJUnit38SpringContextTests {
public void testBase() {
System.out.println("--spring test success--");
}
}
<|start_filename|>drools/drools-core/src/test/java/com/zandili/demo/drools/golfer/test/GolferTest.java<|end_filename|>
package com.zandili.demo.drools.golfer.test;
import org.drools.runtime.StatefulKnowledgeSession;
import org.junit.Test;
import org.springframework.beans.factory.annotation.Autowired;
import com.zandili.demo.drools.pojo.Golfer;
import com.zandili.demo.drools.test.BaseTest;
/**
* 演示高尔夫推理题
*
* @author airfey
*
*/
public class GolferTest extends BaseTest {
@Autowired
private StatefulKnowledgeSession ksession;
/**
* 演示高尔夫推理题<br>
* 1. 四个高尔夫选手从左向右排成一排, 站在树下. 每个选手穿着不通颜色的裤子.<br>
* 其中一个穿红色. <br>
* 在Fred右边的一个选手穿蓝色.
*
* 2.Joe在队列中排第二.
*
* 3.Bob穿着彩色格呢的裤子.
*
* 4.Tom不在第一或者第四, 他没有穿那种让人恶心的桔黄色裤子.
*
* 那么, 问题是, 这四个人在队列中的顺序是什么? 他们各自穿什么颜色的裤子?
*/
@Test
public void testGolfer() {
String[] names = new String[] { "Fred", "Joe", "Bob", "Tom" };
String[] colors = new String[] { "red", "blue", "plaid", "orange" };
int[] positions = new int[] { 1, 2, 3, 4 };
for (int n = 0; n < names.length; n++) {
for (int c = 0; c < colors.length; c++) {
for (int p = 0; p < positions.length; p++) {
ksession.insert(new Golfer(names[n], colors[c],
positions[p]));
}
}
}
ksession.fireAllRules();
ksession.dispose();
}
}
| SeaDragon769/droolsdemo |
<|start_filename|>sample/src/main/java/com/maksim88/passwordedittext/sample/MainActivity.java<|end_filename|>
package com.maksim88.passwordedittext.sample;
import android.content.Context;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.widget.Button;
import com.maksim88.passwordedittext.PasswordEditText;
import uk.co.chrisjenx.calligraphy.CalligraphyContextWrapper;
public class MainActivity extends AppCompatActivity {
Button submitButton;
PasswordEditText pwText;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
submitButton = (Button)findViewById(R.id.submit_button);
pwText = (PasswordEditText)findViewById(R.id.input_password);
}
@Override
protected void attachBaseContext(Context newBase) {
super.attachBaseContext(CalligraphyContextWrapper.wrap(newBase));
}
}
| AppSecAI-TEST/PasswordEditText |
<|start_filename|>lib/erl_interface/src/encode/encode_string.c<|end_filename|>
/*
* %CopyrightBegin%
*
* Copyright Ericsson AB 1998-2011. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* %CopyrightEnd%
*/
#include <string.h>
#include <limits.h>
#include "eidef.h"
#include "eiext.h"
#include "putget.h"
int ei_encode_string(char *buf, int *index, const char *p)
{
size_t len = strlen(p);
if (len >= INT_MAX) return -1;
return ei_encode_string_len(buf, index, p, len);
}
int ei_encode_string_len(char *buf, int *index, const char *p, int len)
{
char *s = buf + *index;
char *s0 = s;
int i;
if (len == 0) {
if (!buf) {
s += 1;
} else {
put8(s,ERL_NIL_EXT);
}
} else if (len <= 0xffff) {
if (!buf) {
s += 3;
} else {
put8(s,ERL_STRING_EXT);
put16be(s,len);
memmove(s,p,len); /* unterminated string */
}
s += len;
} else {
if (!buf) {
s += 5 + (2*len) + 1;
} else {
/* strings longer than 65535 are encoded as lists */
put8(s,ERL_LIST_EXT);
put32be(s,len);
for (i=0; i<len; i++) {
put8(s,ERL_SMALL_INTEGER_EXT);
put8(s,p[i]);
}
put8(s,ERL_NIL_EXT);
}
}
*index += s-s0;
return 0;
}
<|start_filename|>lib/diameter/test/diameter_pool_SUITE.erl<|end_filename|>
%%
%% %CopyrightBegin%
%%
%% Copyright Ericsson AB 2015. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% %CopyrightEnd%
%%
%%
%% Test of the pool_size option in connecting nodes with multiple
%% connections.
%%
-module(diameter_pool_SUITE).
-export([suite/0,
all/0,
init_per_testcase/2,
end_per_testcase/2,
init_per_suite/1,
end_per_suite/1]).
%% testcases
-export([tcp_connect/1,
sctp_connect/1,
any_connect/1]).
%% ===========================================================================
-define(util, diameter_util).
%% Config for diameter:start_service/2.
-define(SERVICE(Host),
[{'Origin-Host', Host ++ ".ericsson.com"},
{'Origin-Realm', "ericsson.com"},
{'Host-IP-Address', [{127,0,0,1}]},
{'Vendor-Id', 12345},
{'Product-Name', "OTP/diameter"},
{'Auth-Application-Id', [0]}, %% common
{'Acct-Application-Id', [3]}, %% accounting
{restrict_connections, false},
{application, [{alias, common},
{dictionary, diameter_gen_base_rfc6733},
{module, diameter_callback}]},
{application, [{alias, accounting},
{dictionary, diameter_gen_acct_rfc6733},
{module, diameter_callback}]}]).
%% ===========================================================================
suite() ->
[{timetrap, {seconds, 30}}].
all() ->
[tcp_connect,
sctp_connect,
any_connect].
init_per_testcase(_Name, Config) ->
Config.
end_per_testcase(_Name, _Config) ->
diameter:stop().
init_per_suite(Config) ->
[{sctp, ?util:have_sctp()} | Config].
end_per_suite(_Config) ->
ok.
%% ===========================================================================
tcp_connect(_Config) ->
connect(tcp, tcp).
sctp_connect(Config) ->
case lists:member({sctp, true}, Config) of
true -> connect(sctp, sctp);
false -> {skip, no_sctp}
end.
any_connect(_Config) ->
connect(any, tcp).
%% connect/2
%% Establish multiple connections between a client and server.
connect(ClientProt, ServerProt) ->
ok = diameter:start(),
[] = [{S,T} || S <- ["server", "client"],
T <- [diameter:start_service(S, ?SERVICE(S))],
T /= ok],
%% Listen with a single transport with pool_size = 4. Ensure the
%% expected number of transport processes are started.
LRef = ?util:listen("server", ServerProt, [{pool_size, 4}]),
{4,0} = count("server", LRef, accept), %% 4 transports, no connections
%% Establish 5 connections.
Ref = ?util:connect("client", ClientProt, LRef, [{pool_size, 5}]),
{5,5} = count("client", Ref, pool), %% 5 connections
%% Ensure the server has started replacement transports within a
%% reasonable time. Sleepsince there's no guarantee the
%% replacements have been started before the client has received
%% 'up' events. (Although it's likely.)
sleep(),
{9,5} = count("server", LRef, accept), %% 5 connections + 4 accepting
%% Ensure ther are still the expected number of accepting transports
%% after stopping the client service.
ok = diameter:stop_service("client"),
sleep(),
{4,0} = count("server", LRef, accept), %% 4 transports, no connections
%% Done.
ok = diameter:stop_service("server").
count(Name, Ref, Key) ->
[{transport, [[{ref, Ref} | T]]},
{connections, Cs}]
= diameter:service_info(Name, [transport, connections]),
{Key, Ps} = lists:keyfind(Key, 1, T),
{length(Ps), length(Cs)}. %% number of processes, connections
sleep() ->
receive after 1000 -> ok end.
<|start_filename|>lib/kernel/test/code_SUITE_data/upgradee.erl<|end_filename|>
-module(upgradee).
-export([dispatch_loop/0]).
-ifdef(VERSION_1).
-define(VERSION,1).
-export([exp1/0]). % only exported in v1
-export([exp1loc2/0]). % exported in v1, local in v2
-export([exp1exp2/0]). % exported in v1 and v2
exp1() -> ?VERSION.
loc1() -> ?VERSION.
-endif. % VERSION_1
-ifdef(VERSION_2).
-define(VERSION,2).
-export([exp2/0]).
-export([loc1exp2/0]).
-export([exp1exp2/0]).
exp2() -> ?VERSION.
loc2() -> ?VERSION.
-endif. % VERSION_2
exp1exp2() -> ?VERSION.
exp1loc2() -> ?VERSION.
loc1exp2() -> ?VERSION.
loc1loc2() -> ?VERSION.
dispatch_loop() ->
receive
upgrade_order ->
%%erlang:display({"upgradee version", ?VERSION, "got upgrade_order"}),
?MODULE:dispatch_loop();
Msg ->
%%erlang:display({"upgradee version", ?VERSION, "got msg", Msg}),
{Func,Ret} = case Msg of
%% Local calls
{Pid, local, F=exp1} ->
{F, local_exp1()};
{Pid, local, F=loc1} ->
{F, local_loc1()};
{Pid, local, F=exp1exp2} ->
{F, catch exp1exp2()};
{Pid, local, F=exp1loc2} ->
{F, catch exp1loc2()};
{Pid, local, F=loc1exp2} ->
{F, catch loc1exp2()};
{Pid, local, F=loc1loc2} ->
{F, catch loc1loc2()};
{Pid, local, F=exp2} ->
{F, local_exp2()};
{Pid, local, F=loc2} ->
{F, local_loc2()};
%% Extern calls to own module
{Pid, external, F=exp1} ->
{F, catch ?MODULE:exp1()};
{Pid, external, F=loc1} ->
{F, catch ?MODULE:loc1()};
{Pid, external, F=exp1exp2} ->
{F, catch ?MODULE:exp1exp2()};
{Pid, external, F=exp1loc2} ->
{F, catch ?MODULE:exp1loc2()};
{Pid, external, F=loc1exp2} ->
{F, catch ?MODULE:loc1exp2()};
{Pid, external, F=loc1loc2} ->
{F, catch ?MODULE:loc1loc2()};
{Pid, external, F=exp2} ->
{F, catch ?MODULE:exp2()};
{Pid, external, F=loc2} ->
{F, catch ?MODULE:loc2()};
%% External calls to other module
{Pid, other, F=exp1} ->
{F, catch other:exp1()};
{Pid, other, F=loc1} ->
{F, catch other:loc1()};
{Pid, other, F=exp1exp2} ->
{F, catch other:exp1exp2()};
{Pid, other, F=exp1loc2} ->
{F, catch other:exp1loc2()};
{Pid, other, F=loc1exp2} ->
{F, catch other:loc1exp2()};
{Pid, other, F=loc1loc2} ->
{F, catch other:loc1loc2()};
{Pid, other, F=exp2} ->
{F, catch other:exp2()};
{Pid, other, F=loc2} ->
{F, catch other:loc2()}
end,
Pid ! {self(), call_result, Func, Ret},
dispatch_loop() % A local call, we don't want to upgrade the dispatcher
end.
-ifdef(VERSION_1).
local_exp1() -> catch exp1().
local_loc1() -> catch loc1().
-else.
local_exp1() ->
%%erlang:display({"upgradee:local_exp1 in version", ?VERSION}),
{cannot_compile,?VERSION}.
local_loc1() -> {cannot_compile,?VERSION}.
-endif.
-ifdef(VERSION_2).
local_exp2() -> catch exp2().
local_loc2() -> catch loc2().
-else.
local_exp2() ->
%%erlang:display({"upgradee:local_exp2 in version", ?VERSION}),
{cannot_compile,?VERSION}.
local_loc2() ->
{cannot_compile,?VERSION}.
-endif.
<|start_filename|>lib/diameter/test/diameter_capx_SUITE.erl<|end_filename|>
%%
%% %CopyrightBegin%
%%
%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% %CopyrightEnd%
%%
%%
%% Tests of capabilities exchange between Diameter nodes. In
%% particular, of error and event handling.
%%
-module(diameter_capx_SUITE).
-export([suite/0,
all/0,
groups/0,
init_per_suite/1,
end_per_suite/1,
init_per_group/2,
end_per_group/2,
init_per_testcase/2,
end_per_testcase/2]).
%% testcases
-export([start/1,
vendor_id/1,
start_services/1,
add_listeners/1,
s_no_common_application/1,
c_no_common_application/1,
s_no_common_security/1,
c_no_common_security/1,
s_unknown_peer/1,
c_unknown_peer/1,
s_unable/1,
c_unable/1,
s_client_reject/1,
c_client_reject/1,
remove_listeners/1,
stop_services/1,
stop/1]).
%% diameter callbacks
-export([peer_up/4,
peer_down/4]).
-include("diameter.hrl").
-include("diameter_gen_base_rfc3588.hrl").
%% Use only the Vendor-Specific-Application-Id record from the base
%% include, to test the independence of capabilities configuration
%% from the different definitions of Vendor-Id in RFC's 3588 and RFC
%% 6733.
%% ===========================================================================
-define(util, diameter_util).
-define(CLIENT, client).
-define(SERVER, server).
-define(ADDR, {127,0,0,1}).
-define(REALM, "erlang.org").
-define(HOST(Name), Name ++ "." ++ ?REALM).
%% Application id's that are never agreed upon at capabilities
%% exchange. Testcase no_common_application references them in order
%% to exercise Vendor-Specific-Application-Id handling.
-define(NOAPPS, [1111, 2222, 3333, 4444]).
%% Config for diameter:start_service/2.
-define(SERVICE,
[{'Origin-Realm', ?REALM},
{'Host-IP-Address', [?ADDR]},
{'Vendor-Id', 12345},
{'Product-Name', "OTP/diameter"},
{'Auth-Application-Id', [?DIAMETER_APP_ID_COMMON]},
{'Acct-Application-Id', [?DIAMETER_APP_ID_ACCOUNTING]}
| [{application, [{alias, A},
{dictionary, D},
{module, [?MODULE, A]}]}
|| {A,D} <- [{base3588, diameter_gen_base_rfc3588},
{acct3588, diameter_gen_base_accounting},
{base6733, diameter_gen_base_rfc6733},
{acct6733, diameter_gen_acct_rfc6733}]]]
++ [{application, [{dictionary, dict(N)},
{module, not_really}]}
|| N <- ?NOAPPS]).
-define(A, list_to_atom).
-define(L, atom_to_list).
-define(event, #diameter_event).
-define(caps, #diameter_caps).
-define(packet, #diameter_packet).
-define(fail(T), erlang:error({T, process_info(self(), messages)})).
-define(TIMEOUT, 10000).
-define(DICTS, [rfc3588, rfc6733]).
%% ===========================================================================
suite() ->
[{timetrap, {seconds, 60}}].
all() -> [start,
vendor_id,
start_services,
add_listeners]
++ [{group, D, P} || D <- ?DICTS, P <- [[], [parallel]]]
++ [remove_listeners,
stop_services,
stop].
groups() ->
Tc = lists:flatmap(fun tc/1, tc()),
[{D, [], Tc} || D <- ?DICTS].
init_per_suite(Config) ->
lists:foreach(fun load_dict/1, ?NOAPPS),
Config.
end_per_suite(_Config) ->
[] = [Mod || N <- ?NOAPPS,
Mod <- [dict(N)],
false <- [code:delete(Mod)]],
ok.
%% Generate a unique hostname for each testcase so that watchdogs
%% don't prevent a connection from being brought up immediately.
init_per_testcase(Name, Config) ->
[{host, ?L(Name) ++ "." ++ diameter_util:unique_string()}
| Config].
init_per_group(Name, Config) ->
[{rfc, Name} | Config].
end_per_group(_, _) ->
ok.
end_per_testcase(N, _)
when N == start;
N == vendor_id;
N == start_services;
N == add_listeners;
N == remove_listeners;
N == stop_services;
N == stop ->
ok;
end_per_testcase(Name, Config) ->
CRef = ?util:read_priv(Config, Name),
ok = diameter:remove_transport(?CLIENT, CRef).
%% Testcases all come in two flavours, client and server.
tc(Name) ->
[?A([C,$_|?L(Name)]) || C <- "cs"].
tc() ->
[no_common_application,
no_common_security,
unknown_peer,
unable,
client_reject].
%% ===========================================================================
%% start/stop testcases
start(_Config) ->
ok = diameter:start().
%% Ensure that both integer and list-valued vendor id's can be
%% configured in a Vendor-Specific-Application-Id, the arity having
%% changed between RFC 3588 and RFC 6733.
vendor_id(_Config) ->
[] = ?util:run([[fun vid/1, V] || V <- [1, [1], [1,2], x]]).
vid(V) ->
RC = diameter:start_service(make_ref(),
[{'Vendor-Specific-Application-Id',
[[{'Vendor-Id', V}]]}
| ?SERVICE]),
vid(V, RC).
vid(x, {error, _}) ->
ok;
vid(_, ok) ->
ok.
start_services(_Config) ->
ok = diameter:start_service(?SERVER, ?SERVICE),
ok = diameter:start_service(?CLIENT, ?SERVICE).
%% One server that responds only to base accounting, one that responds
%% to both this and the common application. Share a common service just
%% to simplify config, and because we can.
add_listeners(Config) ->
Acct = [listen(?SERVER,
[{capabilities, [{'Origin-Host', ?HOST(H)},
{'Auth-Application-Id', []}]},
{applications, [A | noapps()]},
{capabilities_cb, [fun server_capx/3, acct]}])
|| {A,H} <- [{acct3588, "acct3588-srv"},
{acct6733, "acct6733-srv"}]],
Base = [listen(?SERVER,
[{capabilities, [{'Origin-Host', ?HOST(H)}]},
{applications, A ++ noapps()},
{capabilities_cb, [fun server_capx/3, base]}])
|| {A,H} <- [{[base3588, acct3588], "base3588-srv"},
{[base6733, acct6733], "base6733-srv"}]],
?util:write_priv(Config, ?MODULE, {Base, Acct}). %% lref/2 reads
remove_listeners(_Config) ->
ok = diameter:remove_transport(?SERVER, true).
stop_services(_Config) ->
ok = diameter:stop_service(?CLIENT),
ok = diameter:stop_service(?SERVER).
stop(_Config) ->
ok = diameter:stop().
%% ===========================================================================
%% All the testcases come in pairs, one for receiving an event on the
%% client side, one on the server side. Note that testcases will
%% receive events resulting from other testcases when running in
%% parallel since the events are per service. The unique client
%% Origin-Host for each testcase plus transport references are used to
%% ensure that only the relevant event is extracted from the mailbox.
%% Don't bother extracting events that aren't relevant.
%% ====================
%% Ask the accounting server to speak the common application and expect
%% DIAMETER_NO_COMMON_APPLICATION = 5010.
s_no_common_application(Config) ->
Vs = [[{'Vendor-Id', 111},
{'Auth-Application-Id', [1111]}],
#'diameter_base_Vendor-Specific-Application-Id'
{'Vendor-Id' = [222],
'Acct-Application-Id' = [2222]}],
server_closed(Config,
fun(C) -> no_common_application(C,Vs) end,
5010).
c_no_common_application(Config) ->
Vs = [#'diameter_base_Vendor-Specific-Application-Id'
{'Vendor-Id' = 333,
'Auth-Application-Id' = [3333]},
[{'Vendor-Id', [444]},
{'Acct-Application-Id', [4444]}]],
client_closed(Config,
"acct-srv",
fun(C) -> no_common_application(C,Vs) end,
5010).
no_common_application(Config, Vs) ->
[Common, _Acct] = apps(Config),
connect(Config,
acct,
[{capabilities, [{'Acct-Application-Id', []},
{'Vendor-Specific-Application-Id', Vs}]},
{applications, [Common | noapps()]}]).
%% ====================
%% Ask the base server to speak accounting with an unknown security
%% method and expect DIAMETER_NO_COMMON_SECURITY = 5017.
s_no_common_security(Config) ->
server_closed(Config, fun no_common_security/1, 5017).
c_no_common_security(Config) ->
client_closed(Config, "base-srv", fun no_common_security/1, 5017).
no_common_security(Config) ->
[Common, _Acct] = apps(Config),
connect(Config, base, [{capabilities, [{'Acct-Application-Id', []},
{'Inband-Security-Id', [17, 18]}]},
{applications, [Common]}]).
%% ====================
%% Have the base server reject a decent CER with the protocol error
%% DIAMETER_UNKNOWN_PEER = 3010.
s_unknown_peer(Config) ->
server_reject(Config, fun base/1, 3010).
c_unknown_peer(Config) ->
Dict0 = dict0(Config),
true = diameter:subscribe(?CLIENT),
OH = host(Config, "base-srv"),
{CRef, _} = base(Config),
{'CEA', ?caps{}, ?packet{msg = Msg}} = client_recv(CRef),
['diameter_base_answer-message' | _] = Dict0:'#get-'(Msg),
[OH, 3010] = Dict0:'#get-'(['Origin-Host', 'Result-Code'], Msg).
base(Config) ->
connect(Config, base, [{applications, apps(Config)}]).
%% ====================
%% Have the base server reject a decent CER with the non-protocol
%% error DIAMETER_UNABLE_TO_COMPLY = 5012.
s_unable(Config) ->
server_reject(Config, fun base/1, 5012).
c_unable(Config) ->
client_closed(Config, "base-srv", fun base/1, 5012).
%% ====================
%% Have the client reject a decent CEA.
s_client_reject(Config) ->
true = diameter:subscribe(?SERVER),
OH = host(Config),
{_, LRef} = client_reject(Config),
receive
?event{service = ?SERVER,
info = {up, LRef,
{_, ?caps{origin_host = {_, OH}}},
{listen, _},
?packet{}}}
= Info ->
Info
after ?TIMEOUT ->
?fail({LRef, OH})
end.
c_client_reject(Config) ->
Dict0 = dict0(Config),
true = diameter:subscribe(?CLIENT),
OH = host(Config, "acct-srv"),
{CRef, _} = client_reject(Config),
{'CEA', {capabilities_cb, _, discard},
?caps{origin_host = {_, OH}},
?packet{msg = CEA}}
= client_recv(CRef),
[diameter_base_CEA | _] = Dict0:'#get-'(CEA),
[2001] = Dict0:'#get-'(['Result-Code'], CEA).
client_reject(Config) ->
connect(Config, acct, [{capabilities_cb, fun client_capx/2},
{applications, apps(Config)}]).
%% ===========================================================================
noapps() ->
lists:map(fun dict/1, ?NOAPPS).
dict(N) ->
?A(?L(?MODULE) ++ "_" ++ integer_to_list(N)).
%% Compile and load minimal dictionary modules. These actually have to
%% exists since diameter will call their id/0 to extract application
%% id's, failing with app_not_configured if it can't.
load_dict(N) ->
Mod = dict(N),
A1 = erl_anno:new(1),
A2 = erl_anno:new(2),
A3 = erl_anno:new(3),
A4 = erl_anno:new(4),
Forms = [{attribute, A1, module, Mod},
{attribute, A2, compile, [export_all]},
{function, A3, id, 0,
[{clause, A4, [], [], [{integer, A4, N}]}]}],
{ok, Mod, Bin, []} = compile:forms(Forms, [return]),
{module, Mod} = code:load_binary(Mod, Mod, Bin),
N = Mod:id().
%% server_closed/3
server_closed(Config, F, RC) ->
true = diameter:subscribe(?SERVER),
OH = host(Config),
{_, LRef} = F(Config),
receive
?event{service = ?SERVER,
info = {closed, LRef,
{'CER', RC,
?caps{origin_host = {_, OH}},
?packet{}}
= Reason,
{listen, _}}} ->
Reason
after ?TIMEOUT ->
?fail({LRef, OH})
end.
%% server_reject/3
server_reject(Config, F, RC) ->
true = diameter:subscribe(?SERVER),
OH = host(Config),
{_, LRef} = F(Config),
receive
?event{service = ?SERVER,
info = {closed, LRef,
{'CER', {capabilities_cb, _, RC},
?caps{origin_host = {_, OH}},
?packet{}}
= Reason,
{listen, _}}} ->
Reason
after ?TIMEOUT ->
?fail({LRef, OH})
end.
%% cliient_closed/4
client_closed(Config, Host, F, RC) ->
true = diameter:subscribe(?CLIENT),
OH = host(Config, Host),
{CRef, _} = F(Config),
{'CEA', RC, ?caps{origin_host = {_, OH}}, ?packet{}}
= client_recv(CRef).
srv(Config, Host) ->
"rfc" ++ N = atom_to_list(proplists:get_value(rfc, Config)),
[H, "srv" = S] = string:tokens(Host, "-"),
H ++ N ++ "-" ++ S.
host(Config, Name) ->
?HOST(srv(Config, Name)).
%% client_recv/1
client_recv(CRef) ->
receive
?event{service = ?CLIENT,
info = {closed, CRef, Reason, {connect, _}}} ->
Reason
after ?TIMEOUT ->
?fail(CRef)
end.
%% server_capx/3
server_capx(_, ?caps{origin_host = {_, [_,$_|"unknown_peer." ++ _]}}, _) ->
unknown;
server_capx(_, ?caps{origin_host = {_, [_,$_|"unable." ++ _]}}, _) ->
5012; %% DIAMETER_UNABLE_TO_COMPLY
server_capx(_, ?caps{origin_host = {OH,DH}}, _) ->
io:format("connection: ~p -> ~p~n", [DH,OH]),
ok.
%% client_capx/2
client_capx(_, ?caps{origin_host = {[_,$_|"client_reject." ++ _], _}}) ->
discard.
%% ===========================================================================
dict0(Config) ->
case proplists:get_value(rfc, Config) of
rfc3588 -> diameter_gen_base_rfc3588;
rfc6733 -> diameter_gen_base_rfc6733
end.
apps(Config) ->
case proplists:get_value(rfc, Config) of
rfc3588 -> [base3588, acct3588];
rfc6733 -> [base6733, acct6733]
end.
host(Config) ->
{_, H} = lists:keyfind(host, 1, Config),
?HOST(H).
listen(Name, Opts) ->
?util:listen(Name, tcp, Opts).
connect(Config, T, Opts) ->
{_, H} = lists:keyfind(host, 1, Config),
LRef = lref(Config, T),
CRef = connect(LRef, [{capabilities, [{'Origin-Host', ?HOST(H)}]}
| Opts]),
Name = lists:takewhile(fun(C) -> C /= $. end, H),
?util:write_priv(Config, Name, CRef), %% end_per_testcase reads
{CRef, LRef}.
connect(LRef, Opts) ->
[PortNr] = ?util:lport(tcp, LRef),
{ok, CRef} = diameter:add_transport(?CLIENT,
{connect, opts(PortNr, Opts)}),
CRef.
opts(PortNr, Opts) ->
[{transport_module, diameter_tcp},
{transport_config, [{raddr, ?ADDR},
{rport, PortNr},
{ip, ?ADDR},
{port, 0}]}
| Opts].
lref(rfc3588, [LRef, _]) ->
LRef;
lref(rfc6733, [_, LRef]) ->
LRef;
lref(Config, T) ->
lref(proplists:get_value(rfc, Config),
case ?util:read_priv(Config, ?MODULE) of
{R, _} when T == base ->
R;
{_, R} when T == acct ->
R
end).
%% ===========================================================================
%% diameter callbacks
peer_up(?SERVER,
{_, ?caps{origin_host = {"acct" ++ _,
[_,$_|"client_reject." ++ _]}}},
State,
_) ->
State.
peer_down(?SERVER,
{_, ?caps{origin_host = {"acct" ++ _,
[_,$_|"client_reject." ++ _]}}},
State,
_) ->
State.
<|start_filename|>lib/diameter/include/diameter_gen.hrl<|end_filename|>
%%
%% %CopyrightBegin%
%%
%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% %CopyrightEnd%
%%
%%
%% This file contains code that's included by encode/decode modules
%% generated by diameter_codegen.erl. This code does most of the work, the
%% generated code being kept simple.
%%
-define(THROW(T), throw({?MODULE, T})).
%% Tag common to generated dictionaries.
-define(TAG, diameter_gen).
%% Key to a value in the process dictionary that determines whether or
%% not an unrecognized AVP setting the M-bit should be regarded as an
%% error or not. See is_strict/0. This is only used to relax M-bit
%% interpretation inside Grouped AVPs not setting the M-bit. The
%% service_opt() strict_mbit can be used to disable the check
%% globally.
-define(STRICT_KEY, strict).
%% Key that says whether or not we should do a best-effort decode
%% within Failed-AVP.
-define(FAILED_KEY, failed).
-type parent_name() :: atom(). %% parent = Message or AVP
-type parent_record() :: tuple(). %%
-type avp_name() :: atom().
-type avp_record() :: tuple().
-type avp_values() :: [{avp_name(), term()}].
-type non_grouped_avp() :: #diameter_avp{}.
-type grouped_avp() :: nonempty_improper_list(#diameter_avp{}, [avp()]).
-type avp() :: non_grouped_avp() | grouped_avp().
%% Use a (hopefully) unique key when manipulating the process
%% dictionary.
putr(K,V) ->
put({?TAG, K}, V).
getr(K) ->
case get({?TAG, K}) of
undefined ->
V = erase({?MODULE, K}), %% written in old code
V == undefined orelse putr(K,V),
V;
V ->
V
end.
eraser(K) ->
erase({?TAG, K}).
%% ---------------------------------------------------------------------------
%% # encode_avps/2
%% ---------------------------------------------------------------------------
-spec encode_avps(parent_name(), parent_record() | avp_values())
-> binary()
| no_return().
encode_avps(Name, Vals)
when is_list(Vals) ->
encode_avps(Name, '#set-'(Vals, newrec(Name)));
encode_avps(Name, Rec) ->
try
list_to_binary(encode(Name, Rec))
catch
throw: {?MODULE, Reason} ->
diameter_lib:log({encode, error},
?MODULE,
?LINE,
{Reason, Name, Rec}),
erlang:error(list_to_tuple(Reason ++ [Name]));
error: Reason ->
Stack = erlang:get_stacktrace(),
diameter_lib:log({encode, failure},
?MODULE,
?LINE,
{Reason, Name, Rec, Stack}),
erlang:error({encode_failure, Reason, Name, Stack})
end.
%% encode/2
encode(Name, Rec) ->
lists:flatmap(fun(A) -> encode(Name, A, '#get-'(A, Rec)) end,
'#info-'(element(1, Rec), fields)).
%% encode/3
encode(Name, AvpName, Values) ->
e(Name, AvpName, avp_arity(Name, AvpName), Values).
%% e/4
e(_, AvpName, 1, undefined) ->
?THROW([mandatory_avp_missing, AvpName]);
e(Name, AvpName, 1, Value) ->
e(Name, AvpName, [Value]);
e(_, _, {0,_}, []) ->
[];
e(_, AvpName, _, T)
when not is_list(T) ->
?THROW([repeated_avp_as_non_list, AvpName, T]);
e(_, AvpName, {Min, _}, L)
when length(L) < Min ->
?THROW([repeated_avp_insufficient_arity, AvpName, Min, L]);
e(_, AvpName, {_, Max}, L)
when Max < length(L) ->
?THROW([repeated_avp_excessive_arity, AvpName, Max, L]);
e(Name, AvpName, _, Values) ->
e(Name, AvpName, Values).
%% e/3
e(Name, 'AVP', Values) ->
[pack_AVP(Name, A) || A <- Values];
e(_, AvpName, Values) ->
e(AvpName, Values).
%% e/2
e(AvpName, Values) ->
H = avp_header(AvpName),
[diameter_codec:pack_avp(H, avp(encode, V, AvpName)) || V <- Values].
%% pack_AVP/2
%% No value: assume AVP data is already encoded. The normal case will
%% be when this is passed back from #diameter_packet.errors as a
%% consequence of a failed decode. Any AVP can be encoded this way
%% however, which side-steps any arity checks for known AVP's and
%% could potentially encode something unfortunate.
pack_AVP(_, #diameter_avp{value = undefined} = A) ->
diameter_codec:pack_avp(A);
%% Missing name for value encode.
pack_AVP(_, #diameter_avp{name = N, value = V})
when N == undefined;
N == 'AVP' ->
?THROW([value_with_nameless_avp, N, V]);
%% Or not. Ensure that 'AVP' is the appropriate field. Note that if we
%% don't know this AVP at all then the encode will fail.
pack_AVP(Name, #diameter_avp{name = AvpName,
value = Data}) ->
0 == avp_arity(Name, AvpName)
orelse ?THROW([known_avp_as_AVP, Name, AvpName, Data]),
e(AvpName, [Data]).
%% ---------------------------------------------------------------------------
%% # decode_avps/2
%% ---------------------------------------------------------------------------
-spec decode_avps(parent_name(), [#diameter_avp{}])
-> {parent_record(), [avp()], Failed}
when Failed :: [{5000..5999, #diameter_avp{}}].
decode_avps(Name, Recs) ->
{Avps, {Rec, Failed}}
= lists:foldl(fun(T,A) -> decode(Name, T, A) end,
{[], {newrec(Name), []}},
Recs),
{Rec, Avps, Failed ++ missing(Rec, Name, Failed)}.
%% Append 5005 errors so that errors are reported in the order
%% encountered. Failed-AVP should typically contain the first
%% encountered error accordg to the RFC.
newrec(Name) ->
'#new-'(name2rec(Name)).
%% 3588:
%%
%% DIAMETER_MISSING_AVP 5005
%% The request did not contain an AVP that is required by the Command
%% Code definition. If this value is sent in the Result-Code AVP, a
%% Failed-AVP AVP SHOULD be included in the message. The Failed-AVP
%% AVP MUST contain an example of the missing AVP complete with the
%% Vendor-Id if applicable. The value field of the missing AVP
%% should be of correct minimum length and contain zeros.
missing(Rec, Name, Failed) ->
Avps = lists:foldl(fun({_, #diameter_avp{code = C, vendor_id = V}}, A) ->
sets:add_element({C,V}, A)
end,
sets:new(),
Failed),
[{5005, A} || F <- '#info-'(element(1, Rec), fields),
not has_arity(avp_arity(Name, F), '#get-'(F, Rec)),
#diameter_avp{code = C, vendor_id = V}
= A <- [empty_avp(F)],
not sets:is_element({C,V}, Avps)].
%% Maximum arities have already been checked in building the record.
has_arity({Min, _}, L) ->
has_prefix(Min, L);
has_arity(N, V) ->
N /= 1 orelse V /= undefined.
%% Compare a non-negative integer and the length of a list without
%% computing the length.
has_prefix(0, _) ->
true;
has_prefix(_, []) ->
false;
has_prefix(N, L) ->
has_prefix(N-1, tl(L)).
%% empty_avp/1
empty_avp(Name) ->
{Code, Flags, VId} = avp_header(Name),
{Name, Type} = avp_name(Code, VId),
#diameter_avp{name = Name,
code = Code,
vendor_id = VId,
is_mandatory = 0 /= (Flags band 2#01000000),
need_encryption = 0 /= (Flags band 2#00100000),
data = empty_value(Name),
type = Type}.
%% 3588, ch 7:
%%
%% The Result-Code AVP describes the error that the Diameter node
%% encountered in its processing. In case there are multiple errors,
%% the Diameter node MUST report only the first error it encountered
%% (detected possibly in some implementation dependent order). The
%% specific errors that can be described by this AVP are described in
%% the following section.
%% decode/3
decode(Name, #diameter_avp{code = Code, vendor_id = Vid} = Avp, Acc) ->
decode(Name, avp_name(Code, Vid), Avp, Acc).
%% decode/4
%% AVP is defined in the dictionary ...
decode(Name, {AvpName, Type}, Avp, Acc) ->
d(Name, Avp#diameter_avp{name = AvpName, type = Type}, Acc);
%% ... or not.
decode(Name, 'AVP', Avp, Acc) ->
decode_AVP(Name, Avp, Acc).
%% 6733, 4.4:
%%
%% Receivers of a Grouped AVP that does not have the 'M' (mandatory)
%% bit set and one or more of the encapsulated AVPs within the group
%% has the 'M' (mandatory) bit set MAY simply be ignored if the
%% Grouped AVP itself is unrecognized. The rule applies even if the
%% encapsulated AVP with its 'M' (mandatory) bit set is further
%% encapsulated within other sub-groups, i.e., other Grouped AVPs
%% embedded within the Grouped AVP.
%%
%% The first sentence is slightly mangled, but take it to mean this:
%%
%% An unrecognized AVP of type Grouped that does not set the 'M' bit
%% MAY be ignored even if one of its encapsulated AVPs sets the 'M'
%% bit.
%%
%% The text above is a change from RFC 3588, which instead says this:
%%
%% Further, if any of the AVPs encapsulated within a Grouped AVP has
%% the 'M' (mandatory) bit set, the Grouped AVP itself MUST also
%% include the 'M' bit set.
%%
%% Both of these texts have problems. If the AVP is unknown then its
%% type is unknown since the type isn't sent over the wire, so the
%% 6733 text becomes a non-statement: don't know that the AVP not
%% setting the M-bit is of type Grouped, therefore can't know that its
%% data consists of encapsulated AVPs, therefore can't but ignore that
%% one of these might set the M-bit. It should be no worse if we know
%% the AVP to have type Grouped.
%%
%% Similarly, for the 3588 text: if we receive an AVP that doesn't set
%% the M-bit and don't know that the AVP has type Grouped then we
%% can't realize that its data contains an AVP that sets the M-bit, so
%% can't regard the AVP as erroneous on this account. Again, it should
%% be no worse if the type is known to be Grouped, but in this case
%% the RFC forces us to regard the AVP as erroneous. This is
%% inconsistent, and the 3588 text has never been enforced.
%%
%% So, if an AVP doesn't set the M-bit then we're free to ignore it,
%% regardless of the AVP's type. If we know the type to be Grouped
%% then we must ignore the M-bit on an encapsulated AVP. That means
%% packing such an encapsulated AVP into an 'AVP' field if need be,
%% not regarding the lack of a specific field as an error as is
%% otherwise the case. (The lack of an AVP-specific field being how we
%% defined the RFC's "unrecognized", which is slightly stronger than
%% "not defined".)
%% d/3
d(Name, Avp, Acc) ->
#diameter_avp{name = AvpName,
data = Data,
type = Type,
is_mandatory = M}
= Avp,
%% Use the process dictionary is to keep track of whether or not
%% to ignore an M-bit on an encapsulated AVP. Not ideal, but the
%% alternative requires widespread changes to be able to pass the
%% value around through the entire decode. The solution here is
%% simple in comparison, both to implement and to understand.
Strict = relax(Type, M),
%% Use the process dictionary again to keep track of whether we're
%% decoding within Failed-AVP and should ignore decode errors
%% altogether.
Failed = relax(Name), %% Not AvpName or else a failed Failed-AVP
%% decode is packed into 'AVP'.
Mod = dict(Failed), %% Dictionary to decode in.
%% On decode, a Grouped AVP is represented as a #diameter_avp{}
%% list with AVP as head and component AVPs as tail. On encode,
%% data can be a list of component AVPs.
try Mod:avp(decode, Data, AvpName) of
V ->
{Avps, T} = Acc,
{H, A} = ungroup(V, Avp),
{[H | Avps], pack_avp(Name, A, T)}
catch
throw: {?TAG, {grouped, Error, ComponentAvps}} ->
g(is_failed(), Error, Name, trim(Avp), Acc, ComponentAvps);
error: Reason ->
d(is_failed(), Reason, Name, trim(Avp), Acc)
after
reset(?STRICT_KEY, Strict),
reset(?FAILED_KEY, Failed)
end.
%% trim/1
%%
%% Remove any extra bit that was added in diameter_codec to induce a
%% 5014 error.
trim(#diameter_avp{data = <<0:1, Bin/binary>>} = Avp) ->
Avp#diameter_avp{data = Bin};
trim(Avps)
when is_list(Avps) ->
lists:map(fun trim/1, Avps);
trim(Avp) ->
Avp.
%% dict/1
%%
%% Retrieve the dictionary for the best-effort decode of Failed-AVP,
%% as put by diameter_codec:decode/2. See that function for the
%% explanation.
dict(true) ->
case get({diameter_codec, dictionary}) of
undefined ->
?MODULE;
Mod ->
Mod
end;
dict(_) ->
?MODULE.
%% g/5
%% Ignore decode errors within Failed-AVP (best-effort) ...
g(true, [_Error | Rec], Name, Avp, Acc, _ComponentAvps) ->
decode_AVP(Name, Avp#diameter_avp{value = Rec}, Acc);
g(true, _Error, Name, Avp, Acc, _ComponentAvps) ->
decode_AVP(Name, Avp, Acc);
%% ... or not.
g(false, [Error | _Rec], _Name, Avp, Acc, ComponentAvps) ->
g(Error, Avp, Acc, ComponentAvps);
g(false, Error, _Name, Avp, Acc, ComponentAvps) ->
g(Error, Avp, Acc, ComponentAvps).
%% g/4
g({RC, ErrorData}, Avp, Acc, ComponentAvps) ->
{Avps, {Rec, Errors}} = Acc,
E = Avp#diameter_avp{data = [ErrorData]},
{[[Avp | trim(ComponentAvps)] | Avps], {Rec, [{RC, E} | Errors]}}.
%% d/5
%% Ignore a decode error within Failed-AVP ...
d(true, _, Name, Avp, Acc) ->
decode_AVP(Name, Avp, Acc);
%% ... or not. Failures here won't be visible since they're a "normal"
%% occurrence if the peer sends a faulty AVP that we need to respond
%% sensibly to. Log the occurence for traceability, but the peer will
%% also receive info in the resulting answer message.
d(false, Reason, Name, Avp, {Avps, Acc}) ->
Stack = diameter_lib:get_stacktrace(),
diameter_lib:log(decode_error,
?MODULE,
?LINE,
{Name, Avp#diameter_avp.name, Stack}),
{Rec, Failed} = Acc,
{[Avp|Avps], {Rec, [rc(Reason, Avp) | Failed]}}.
%% relax/2
%% Set false in the process dictionary as soon as we see a Grouped AVP
%% that doesn't set the M-bit, so that is_strict() can say whether or
%% not to ignore the M-bit on an encapsulated AVP.
relax('Grouped', M) ->
case getr(?STRICT_KEY) of
undefined when not M ->
putr(?STRICT_KEY, M);
_ ->
false
end;
relax(_, _) ->
false.
is_strict() ->
diameter_codec:getopt(strict_mbit)
andalso false /= getr(?STRICT_KEY).
%% relax/1
%%
%% Set true in the process dictionary as soon as we see Failed-AVP.
%% Matching on 'Failed-AVP' assumes that this is the RFC AVP.
%% Strictly, this doesn't need to be the case.
relax('Failed-AVP') ->
putr(?FAILED_KEY, true);
relax(_) ->
is_failed().
%% is_failed/0
%%
%% Is the AVP currently being decoded nested within Failed-AVP? Note
%% that this is only true when Failed-AVP is the parent. In
%% particular, it's not true when Failed-AVP itself is being decoded
%% (unless nested).
is_failed() ->
true == getr(?FAILED_KEY).
%% is_failed/1
is_failed(Name) ->
'Failed-AVP' == Name orelse is_failed().
%% reset/2
reset(Key, undefined) ->
eraser(Key);
reset(_, _) ->
ok.
%% decode_AVP/3
%%
%% Don't know this AVP: see if it can be packed in an 'AVP' field
%% undecoded. Note that the type field is 'undefined' in this case.
decode_AVP(Name, Avp, {Avps, Acc}) ->
{[trim(Avp) | Avps], pack_AVP(Name, Avp, Acc)}.
%% rc/1
%% diameter_types will raise an error of this form to communicate
%% DIAMETER_INVALID_AVP_LENGTH (5014). A module specified to a
%% @custom_types tag in a dictionary file can also raise an error of
%% this form.
rc({'DIAMETER', 5014 = RC, _}, #diameter_avp{name = AvpName} = Avp) ->
{RC, Avp#diameter_avp{data = empty_value(AvpName)}};
%% 3588:
%%
%% DIAMETER_INVALID_AVP_VALUE 5004
%% The request contained an AVP with an invalid value in its data
%% portion. A Diameter message indicating this error MUST include
%% the offending AVPs within a Failed-AVP AVP.
rc(_, Avp) ->
{5004, Avp}.
%% ungroup/2
-spec ungroup(term(), #diameter_avp{})
-> {avp(), #diameter_avp{}}.
%% The decoded value in the Grouped case is as returned by grouped_avp/3:
%% a record and a list of component AVP's.
ungroup(V, #diameter_avp{type = 'Grouped'} = Avp) ->
{Rec, As} = V,
A = Avp#diameter_avp{value = Rec},
{[A|As], A};
%% Otherwise it's just a plain value.
ungroup(V, #diameter_avp{} = Avp) ->
A = Avp#diameter_avp{value = V},
{A, A}.
%% pack_avp/3
pack_avp(Name, #diameter_avp{name = AvpName} = Avp, Acc) ->
pack_avp(Name, avp_arity(Name, AvpName), Avp, Acc).
%% pack_avp/4
pack_avp(Name, 0, Avp, Acc) ->
pack_AVP(Name, Avp, Acc);
pack_avp(_, Arity, Avp, Acc) ->
pack(Arity, Avp#diameter_avp.name, Avp, Acc).
%% pack_AVP/3
%% Length failure was induced because of a header/payload length
%% mismatch. The AVP Length is reset to match the received data if
%% this AVP is encoded in an answer message, since the length is
%% computed.
%%
%% Data is a truncated header if command_code = undefined, otherwise
%% payload bytes. The former is padded to the length of a header if
%% the AVP reaches an outgoing encode in diameter_codec.
%%
%% RFC 6733 says that an AVP returned with 5014 can contain a minimal
%% payload for the AVP's type, but in this case we don't know the
%% type.
pack_AVP(_, #diameter_avp{data = <<0:1, Data/binary>>} = Avp, Acc) ->
{Rec, Failed} = Acc,
{Rec, [{5014, Avp#diameter_avp{data = Data}} | Failed]};
pack_AVP(Name, #diameter_avp{is_mandatory = M, name = AvpName} = Avp, Acc) ->
case pack_arity(Name, AvpName, M) of
0 ->
{Rec, Failed} = Acc,
{Rec, [{if M -> 5001; true -> 5008 end, Avp} | Failed]};
Arity ->
pack(Arity, 'AVP', Avp, Acc)
end.
%% Give Failed-AVP special treatment since (1) it'll contain any
%% unrecognized mandatory AVP's and (2) the RFC 3588 grammar failed to
%% allow for Failed-AVP in an answer-message.
pack_arity(Name, AvpName, M) ->
%% Not testing just Name /= 'Failed-AVP' means we're changing the
%% packing of AVPs nested within Failed-AVP, but the point of
%% ignoring errors within Failed-AVP is to decode as much as
%% possible, and failing because a mandatory AVP couldn't be
%% packed into a dedicated field defeats that point. Note
%% is_failed/1 since is_failed/0 will return false when packing
%% 'AVP' within Failed-AVP.
pack_arity(is_failed(Name)
orelse {Name, AvpName} == {'answer-message', 'Failed-AVP'}
orelse not M
orelse not is_strict(),
Name).
pack_arity(true, Name) ->
avp_arity(Name, 'AVP');
pack_arity(false, _) ->
0.
%% 3588:
%%
%% DIAMETER_AVP_UNSUPPORTED 5001
%% The peer received a message that contained an AVP that is not
%% recognized or supported and was marked with the Mandatory bit. A
%% Diameter message with this error MUST contain one or more Failed-
%% AVP AVP containing the AVPs that caused the failure.
%%
%% DIAMETER_AVP_NOT_ALLOWED 5008
%% A message was received with an AVP that MUST NOT be present. The
%% Failed-AVP AVP MUST be included and contain a copy of the
%% offending AVP.
%% pack/4
pack(Arity, FieldName, Avp, {Rec, _} = Acc) ->
pack('#get-'(FieldName, Rec), Arity, FieldName, Avp, Acc).
%% pack/5
pack(undefined, 1, FieldName, Avp, Acc) ->
p(FieldName, fun(V) -> V end, Avp, Acc);
%% 3588:
%%
%% DIAMETER_AVP_OCCURS_TOO_MANY_TIMES 5009
%% A message was received that included an AVP that appeared more
%% often than permitted in the message definition. The Failed-AVP
%% AVP MUST be included and contain a copy of the first instance of
%% the offending AVP that exceeded the maximum number of occurrences
%%
pack(_, 1, _, Avp, {Rec, Failed}) ->
{Rec, [{5009, Avp} | Failed]};
pack(L, {_, Max}, FieldName, Avp, Acc) ->
case '*' /= Max andalso has_prefix(Max, L) of
true ->
{Rec, Failed} = Acc,
{Rec, [{5009, Avp} | Failed]};
false ->
p(FieldName, fun(V) -> [V|L] end, Avp, Acc)
end.
%% p/4
p(F, Fun, Avp, {Rec, Failed}) ->
{'#set-'({F, Fun(value(F, Avp))}, Rec), Failed}.
value('AVP', Avp) ->
Avp;
value(_, Avp) ->
Avp#diameter_avp.value.
%% ---------------------------------------------------------------------------
%% # grouped_avp/3
%% ---------------------------------------------------------------------------
-spec grouped_avp(decode, avp_name(), bitstring())
-> {avp_record(), [avp()]};
(encode, avp_name(), avp_record() | avp_values())
-> binary()
| no_return().
%% Length error induced by diameter_codec:collect_avps/1: the AVP
%% length in the header was too short (insufficient for the extracted
%% header) or too long (past the end of the message). An empty payload
%% is sufficient according to the RFC text for 5014.
grouped_avp(decode, _Name, <<0:1, _/binary>>) ->
throw({?TAG, {grouped, {5014, []}, []}});
grouped_avp(decode, Name, Data) ->
grouped_decode(Name, diameter_codec:collect_avps(Data));
grouped_avp(encode, Name, Data) ->
encode_avps(Name, Data).
%% grouped_decode/2
%%
%% Note that Grouped is the only AVP type that doesn't just return a
%% decoded value, also returning the list of component diameter_avp
%% records.
%% Length error in trailing component AVP.
grouped_decode(_Name, {Error, Acc}) ->
{5014, Avp} = Error,
throw({?TAG, {grouped, Error, [Avp | Acc]}});
%% 7.5. Failed-AVP AVP
%% In the case where the offending AVP is embedded within a Grouped AVP,
%% the Failed-AVP MAY contain the grouped AVP, which in turn contains
%% the single offending AVP. The same method MAY be employed if the
%% grouped AVP itself is embedded in yet another grouped AVP and so on.
%% In this case, the Failed-AVP MAY contain the grouped AVP hierarchy up
%% to the single offending AVP. This enables the recipient to detect
%% the location of the offending AVP when embedded in a group.
%% An error in decoding a component AVP throws the first fauly
%% component, which the catch in d/3 wraps in the Grouped AVP in
%% question. A partially decoded record is only used when ignoring
%% errors in Failed-AVP.
grouped_decode(Name, ComponentAvps) ->
{Rec, Avps, Es} = decode_avps(Name, ComponentAvps),
[] == Es orelse throw({?TAG, {grouped, [{_,_} = hd(Es) | Rec], Avps}}),
{Rec, Avps}.
%% ---------------------------------------------------------------------------
%% # empty_group/1
%% ---------------------------------------------------------------------------
empty_group(Name) ->
list_to_binary(empty_body(Name)).
empty_body(Name) ->
[z(F, avp_arity(Name, F)) || F <- '#info-'(name2rec(Name), fields)].
z(Name, 1) ->
z(Name);
z(_, {0,_}) ->
[];
z(Name, {Min, _}) ->
lists:duplicate(Min, z(Name)).
z('AVP') ->
<<0:64/integer>>; %% minimal header
z(Name) ->
Bin = diameter_codec:pack_avp(avp_header(Name), empty_value(Name)),
<< <<0>> || <<_>> <= Bin >>.
%% ---------------------------------------------------------------------------
%% # empty/1
%% ---------------------------------------------------------------------------
empty(AvpName) ->
avp(encode, zero, AvpName).
<|start_filename|>lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_responsecontrol.erl<|end_filename|>
%% ``Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
%% AB. All Rights Reserved.''
%%
%% $Id: mod_responsecontrol.erl,v 1.1 2008/12/17 09:53:35 mikpe Exp $
%%
-module(mod_responsecontrol).
-export([do/1]).
-include("httpd.hrl").
do(Info) ->
?DEBUG("do -> response_control",[]),
case httpd_util:key1search(Info#mod.data,status) of
%% A status code has been generated!
{StatusCode,PhraseArgs,Reason} ->
{proceed,Info#mod.data};
%% No status code has been generated!
undefined ->
case httpd_util:key1search(Info#mod.data,response) of
%% No response has been generated!
undefined ->
case do_responsecontrol(Info) of
continue ->
{proceed,Info#mod.data};
Response ->
{proceed,[Response|Info#mod.data]}
end;
%% A response has been generated or sent!
Response ->
{proceed,Info#mod.data}
end
end.
%%----------------------------------------------------------------------
%%Control that the request header did not contians any limitations
%%wheather a response shall be createed or not
%%----------------------------------------------------------------------
do_responsecontrol(Info) ->
?DEBUG("do_response_control -> Request URI: ~p",[Info#mod.request_uri]),
Path = mod_alias:path(Info#mod.data, Info#mod.config_db,
Info#mod.request_uri),
case file:read_file_info(Path) of
{ok, FileInfo} ->
control(Path,Info,FileInfo);
_ ->
%% The requested asset is not a plain file and then it must
%% be generated everytime its requested
continue
end.
%%----------------------------------------------------------------------
%%Control the If-Match, If-None-Match, and If-Modified-Since
%%----------------------------------------------------------------------
%% If a client sends more then one of the if-XXXX fields in a request
%% The standard says it does not specify the behaviuor so I specified it :-)
%% The priority between the fields is
%% 1.If-modified
%% 2.If-Unmodified
%% 3.If-Match
%% 4.If-Nomatch
%% This means if more than one of the fields are in the request the
%% field with highest priority will be used
%%If the request is a range request the If-Range field will be the winner.
control(Path,Info,FileInfo)->
case control_range(Path,Info,FileInfo) of
undefined ->
case control_Etag(Path,Info,FileInfo) of
undefined ->
case control_modification(Path,Info,FileInfo) of
continue ->
continue;
ReturnValue ->
send_return_value(ReturnValue,FileInfo)
end;
continue ->
continue;
ReturnValue ->
send_return_value(ReturnValue,FileInfo)
end;
Response->
Response
end.
%%----------------------------------------------------------------------
%%If there are both a range and an if-range field control if
%%----------------------------------------------------------------------
control_range(Path,Info,FileInfo) ->
case httpd_util:key1search(Info#mod.parsed_header,"range") of
undefined->
undefined;
_Range ->
case httpd_util:key1search(Info#mod.parsed_header,"if-range") of
undefined ->
undefined;
EtagOrDate ->
control_if_range(Path,Info,FileInfo,EtagOrDate)
end
end.
control_if_range(Path,Info,FileInfo,EtagOrDate) ->
case httpd_util:convert_request_date(strip_date(EtagOrDate)) of
bad_date ->
FileEtag=httpd_util:create_etag(FileInfo),
case FileEtag of
EtagOrDate ->
continue;
_ ->
{if_range,send_file}
end;
ErlDate ->
%%We got the date in the request if it is
case control_modification_data(Info,FileInfo#file_info.mtime,"if-range") of
modified ->
{if_range,send_file};
_UnmodifiedOrUndefined->
continue
end
end.
%%----------------------------------------------------------------------
%%Controls the values of the If-Match and I-None-Mtch
%%----------------------------------------------------------------------
control_Etag(Path,Info,FileInfo)->
FileEtag=httpd_util:create_etag(FileInfo),
%%Control if the E-Tag for the resource matches one of the Etags in
%%the -if-match header field
case control_match(Info,FileInfo,"if-match",FileEtag) of
nomatch ->
%%None of the Etags in the if-match field matched the current
%%Etag for the resource return a 304
{412,Info,Path};
match ->
continue;
undefined ->
case control_match(Info,FileInfo,"if-none-match",FileEtag) of
nomatch ->
continue;
match ->
case Info#mod.method of
"GET" ->
{304,Info,Path};
"HEAD" ->
{304,Info,Path};
_OtherrequestMethod ->
{412,Info,Path}
end;
undefined ->
undefined
end
end.
%%----------------------------------------------------------------------
%%Control if there are any Etags for HeaderField in the request if so
%%Control if they match the Etag for the requested file
%%----------------------------------------------------------------------
control_match(Info,FileInfo,HeaderField,FileEtag)->
case split_etags(httpd_util:key1search(Info#mod.parsed_header,HeaderField)) of
undefined->
undefined;
Etags->
%%Control that the match any star not is availible
case lists:member("*",Etags) of
true->
match;
false->
compare_etags(FileEtag,Etags)
end
end.
%%----------------------------------------------------------------------
%%Split the etags from the request
%%----------------------------------------------------------------------
split_etags(undefined)->
undefined;
split_etags(Tags) ->
string:tokens(Tags,", ").
%%----------------------------------------------------------------------
%%Control if the etag for the file is in the list
%%----------------------------------------------------------------------
compare_etags(Tag,Etags) ->
case lists:member(Tag,Etags) of
true ->
match;
_ ->
nomatch
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% %%
%%Control if the file is modificated %%
%% %%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%----------------------------------------------------------------------
%%Control the If-Modified-Since and If-Not-Modified-Since header fields
%%----------------------------------------------------------------------
control_modification(Path,Info,FileInfo)->
?DEBUG("control_modification() -> entry",[]),
case control_modification_data(Info,FileInfo#file_info.mtime,"if-modified-since") of
modified->
continue;
unmodified->
{304,Info,Path};
undefined ->
case control_modification_data(Info,FileInfo#file_info.mtime,"if-unmodified-since") of
modified ->
{412,Info,Path};
_ContinueUndefined ->
continue
end
end.
%%----------------------------------------------------------------------
%%Controls the date from the http-request if-modified-since and
%%if-not-modified-since against the modification data of the
%%File
%%----------------------------------------------------------------------
%%Info is the record about the request
%%ModificationTime is the time the file was edited last
%%Header Field is the name of the field to control
control_modification_data(Info,ModificationTime,HeaderField)->
case strip_date(httpd_util:key1search(Info#mod.parsed_header,HeaderField)) of
undefined->
undefined;
LastModified0 ->
LastModified=httpd_util:convert_request_date(LastModified0),
?DEBUG("control_modification_data() -> "
"~n Request-Field: ~s"
"~n FileLastModified: ~p"
"~n FieldValue: ~p",
[HeaderField,ModificationTime,LastModified]),
case LastModified of
bad_date ->
undefined;
_ ->
FileTime=calendar:datetime_to_gregorian_seconds(ModificationTime),
FieldTime=calendar:datetime_to_gregorian_seconds(LastModified),
if
FileTime=<FieldTime ->
?DEBUG("File unmodified~n", []),
unmodified;
FileTime>=FieldTime ->
?DEBUG("File modified~n", []),
modified
end
end
end.
%%----------------------------------------------------------------------
%%Compare to dates on the form {{YYYY,MM,DD},{HH,MIN,SS}}
%%If the first date is the biggest returns biggest1 (read biggestFirst)
%%If the first date is smaller
% compare_date(Date,bad_date)->
% bad_date;
% compare_date({D1,T1},{D2,T2})->
% case compare_date1(D1,D2) of
% equal ->
% compare_date1(T1,T2);
% GTorLT->
% GTorLT
% end.
% compare_date1({T1,T2,T3},{T12,T22,T32}) when T1>T12 ->
% bigger1;
% compare_date1({T1,T2,T3},{T1,T22,T32}) when T2>T22 ->
% bigger1;
% compare_date1({T1,T2,T3},{T1,T2,T32}) when T3>T32 ->
% bigger1;
% compare_date1({T1,T2,T3},{T1,T2,T3})->
% equal;
% compare_date1(_D1,_D2)->
% smaller1.
%% IE4 & NS4 sends an extra '; length=xxxx' string at the end of the If-Modified-Since
%% header, we detect this and ignore it (the RFCs does not mention this).
strip_date(undefined) ->
undefined;
strip_date([]) ->
[];
strip_date([$;,$ |Rest]) ->
[];
strip_date([C|Rest]) ->
[C|strip_date(Rest)].
send_return_value({412,_,_},FileInfo)->
{status,{412,none,"Precondition Failed"}};
send_return_value({304,Info,Path},FileInfo)->
Suffix=httpd_util:suffix(Path),
MimeType = httpd_util:lookup_mime_default(Info#mod.config_db,Suffix,"text/plain"),
Header = [{code,304},
{etag,httpd_util:create_etag(FileInfo)},
{content_length,0},
{last_modified,httpd_util:rfc1123_date(FileInfo#file_info.mtime)}],
{response,{response,Header,nobody}}.
<|start_filename|>lib/cosProperty/src/CosPropertyService_PropertySetDef_impl.erl<|end_filename|>
%%--------------------------------------------------------------------
%%
%% %CopyrightBegin%
%%
%% Copyright Ericsson AB 2000-2015. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% %CopyrightEnd%
%%
%%
%%-----------------------------------------------------------------
%% File: CosPropertyService_PropertySetDef_impl.erl
%% Modified:
%%
%%-----------------------------------------------------------------
%% README:
%% (1) The OMG specification states that a property name may not
%% be an empty string (""). We may restrict this further
%% but there is no reason for that.
%%-----------------------------------------------------------------
-module('CosPropertyService_PropertySetDef_impl').
%%----------------------------------------------------------------------
%% Include files
%%----------------------------------------------------------------------
-include_lib("orber/include/corba.hrl").
-include_lib("orber/src/orber_iiop.hrl").
-include_lib("cosProperty/include/CosPropertyService.hrl").
-include("cosProperty.hrl").
%%----------------------------------------------------------------------
%% External exports
%%----------------------------------------------------------------------
%% Mandatory callbacks
-export([init/1,
terminate/2,
code_change/3]).
%% Inherrit from CosPropertyService::PropertySet
-export([define_property/4,
define_properties/3,
get_number_of_properties/2,
get_all_property_names/3,
get_property_value/3,
get_properties/3,
get_all_properties/3,
delete_property/3,
delete_properties/3,
delete_all_properties/2,
is_property_defined/3]).
%% CosPropertyService::PropertySetDef
-export([get_allowed_property_types/2,
get_allowed_properties/2,
define_property_with_mode/5,
define_properties_with_modes/3,
get_property_mode/3,
get_property_modes/3,
set_property_mode/4,
set_property_modes/3]).
%%----------------------------------------------------------------------
%% Internal exports
%%----------------------------------------------------------------------
-export([dump/0]).
%%----------------------------------------------------------------------
%% Records
%%----------------------------------------------------------------------
-record(state, {dbKey, defaultMode, okTypes, okProperties, myType}).
%%----------------------------------------------------------------------
%% Macros
%%----------------------------------------------------------------------
-define(create_InitState(K, D, AT, AP, MT), #state{dbKey = K, defaultMode = D,
okTypes = AT, okProperties = AP,
myType = MT}).
%% Selectors
-define(get_DBKey(S), S#state.dbKey).
-define(get_DefaultMode(S), S#state.defaultMode).
-define(get_okTypes(S), S#state.okTypes).
-define(get_okProperties(S), S#state.okProperties).
%% MISC
-define(is_NotSetDef(S), S#state.myType =/= ?PropertySetDef).
-define(no_PropertyLimits(S), S#state.okProperties == []).
-define(no_TypeLimits(S), S#state.okTypes == []).
-define(is_NotStatic(S), is_binary(S#state.dbKey)).
%% Fun:s
-define(Local2Property, fun({N,V,_M}) ->
#'CosPropertyService_Property'{property_name = N,
property_value = V}
end).
-define(Local2Names, fun({N,_V,_M}) ->
N
end).
-define(MemberName(N), fun(R) ->
case R of
Property when is_record(R, 'CosPropertyService_Property') ->
Property#'CosPropertyService_Property'.property_name == N;
PropertyDef when is_record(R, 'CosPropertyService_PropertyDef') ->
PropertyDef#'CosPropertyService_PropertyDef'.property_name == N;
_->
false
end
end).
%%======================================================================
%% External functions
%%======================================================================
%%----------------------------------------------------------------------
%% Function : init/1
%% Description: Initiates the server
%% Returns : {ok, State} |
%% {ok, State, Timeout} |
%% ignore |
%% {stop, Reason}
%%----------------------------------------------------------------------
init({DefMode, AllowedTypes, AllowedProperties, InitProperties, MyType}) ->
Key = term_to_binary({{erlang:system_time(),
erlang:unique_integer()},
node()}),
_F = ?write_function(#oe_CosPropertyService{key=Key,
properties=InitProperties}),
write_result(mnesia:transaction(_F)),
{ok, ?create_InitState(Key, DefMode, AllowedTypes, AllowedProperties, MyType)};
init({static, DefMode, AllowedTypes, AllowedProperties, InitProperties, MyType}) ->
{ok, ?create_InitState(InitProperties, DefMode, AllowedTypes,
AllowedProperties, MyType)}.
%%---------------------------------------------------------------------%
%% Function : terminate
%% Description: Shutdown the server
%% Returns : any (ignored by gen_server)
%%----------------------------------------------------------------------
terminate(_Reason, State) when ?is_NotStatic(State) ->
_DF = ?delete_function({oe_CosPropertyService, ?get_DBKey(State)}),
catch write_result(mnesia:transaction(_DF)),
ok;
terminate(_Reason, _State) ->
ok.
%%---------------------------------------------------------------------%
%% Function : code_change
%% Description: Convert process state when code is changed
%% Returns : {ok, State}
%%----------------------------------------------------------------------
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
%%----------------------------------------------------------------------
%% Interface CosPropertyService::PropertySet
%%----------------------------------------------------------------------
%%---------------------------------------------------------------------%
%% Function : define_property
%% Arguments :
%% Description:
%% Returns : {ok, State}
%%----------------------------------------------------------------------
define_property(_, _, "", _) ->
corba:raise(#'CosPropertyService_InvalidPropertyName'{});
define_property(_OE_This, State, Name, Value) when ?is_NotStatic(State) ->
evaluate_property_data(State, Value, Name),
_DF =
fun() ->
case mnesia_read(State) of
{'EXCEPTION', E} ->
{'EXCEPTION', E};
X ->
case catch update_property(X, Name, value, Value,
?get_DefaultMode(State)) of
{'EXCEPTION', E} when
is_record(E, 'CosPropertyService_PropertyNotFound') ->
mnesia_write(State, [{Name, Value, ?get_DefaultMode(State)}|X]);
{'EXCEPTION', E} ->
{'EXCEPTION', E};
NewProperties ->
mnesia_write(State, NewProperties)
end
end
end,
{reply, mnesia_transaction(_DF), State};
define_property(_OE_This, State, Name, Value) ->
evaluate_property_data(State, Value, Name),
X = ?get_DBKey(State),
case catch update_property(X, Name, value, Value, ?get_DefaultMode(State)) of
{'EXCEPTION', E} when is_record(E, 'CosPropertyService_PropertyNotFound') ->
corba:raise(#'INTERNAL'{completion_status=?COMPLETED_NO});
{'EXCEPTION', E} ->
corba:raise(E);
_NewProperties ->
corba:raise(#'INTERNAL'{completion_status=?COMPLETED_NO})
end.
%%---------------------------------------------------------------------%
%% Function : get_property_value
%% Arguments :
%% Description:
%% Returns : {ok, State}
%%----------------------------------------------------------------------
get_property_value(_, _, "") ->
corba:raise(#'CosPropertyService_InvalidPropertyName'{});
get_property_value(_OE_THIS, State, Name) ->
X = lookup_table(?get_DBKey(State)),
{reply, find_property(X, Name, value), State}.
%%---------------------------------------------------------------------%
%% Function : delete_property
%% Arguments :
%% Description:
%% Returns : {ok, State}
%%----------------------------------------------------------------------
delete_property(_, _, "") ->
corba:raise(#'CosPropertyService_InvalidPropertyName'{});
delete_property(_OE_THIS, State, Name) when ?is_NotStatic(State) ->
_DF =
fun() ->
case mnesia_read(State) of
{'EXCEPTION', E} ->
{'EXCEPTION', E};
X ->
case catch remove_property(X, Name) of
{'EXCEPTION', E} ->
{'EXCEPTION', E};
NewProperties ->
mnesia_write(State, NewProperties)
end
end
end,
{reply, mnesia_transaction(_DF), State};
delete_property(_OE_THIS, State, Name) ->
X = lookup_table(?get_DBKey(State)),
%% Check the properties; must raise an exception.
remove_property(X, Name),
%% Something is not correct.
corba:raise(#'INTERNAL'{completion_status=?COMPLETED_NO}).
%%---------------------------------------------------------------------%
%% Function : define_properties
%% Arguments :
%% Description:
%% Returns : {ok, State}
%%----------------------------------------------------------------------
define_properties(_OE_THIS, State, PropertySeq) when ?is_NotStatic(State) ->
{OKProperties, Exc} = evaluate_properties_data(State, PropertySeq),
_DF =
fun() ->
case mnesia_read(State) of
{'EXCEPTION', E} ->
{'EXCEPTION', E};
X ->
case catch define_properties_helper(State,
OKProperties, X, Exc) of
{'EXCEPTION', E} ->
{'EXCEPTION', E};
NewProperties ->
mnesia_write(State, NewProperties)
end
end
end,
{reply, mnesia_transaction(_DF), State};
define_properties(_OE_THIS, State, PropertySeq) ->
{OKProperties, Exc} = evaluate_properties_data(State, PropertySeq),
X = lookup_table(?get_DBKey(State)),
case define_properties_helper(State, OKProperties, X, Exc) of
{'EXCEPTION', E} ->
corba:raise(E);
_ ->
corba:raise(#'INTERNAL'{completion_status=?COMPLETED_NO})
end.
define_properties_helper(_State, [], NewProperties, []) ->
%% No exceptions, insert the properties.
NewProperties;
define_properties_helper(_State, [], _, MultipleExceptions) ->
{'EXCEPTION', #'CosPropertyService_MultipleExceptions'{exceptions = MultipleExceptions}};
define_properties_helper(State, [#'CosPropertyService_Property'
{property_name = Name,
property_value = Value}|T], Properties, Exc) ->
case catch update_property(Properties, Name, value, Value, ?get_DefaultMode(State)) of
{'EXCEPTION', E} when is_record(E, 'CosPropertyService_PropertyNotFound') ->
define_properties_helper(State, T, [{Name, Value, ?get_DefaultMode(State)}|Properties], Exc);
{'EXCEPTION', E} ->
define_properties_helper(State, T, Properties,
[#'CosPropertyService_PropertyException'
{reason = remap_exception(E),
failing_property_name = Name}|Exc]);
NewProperties ->
define_properties_helper(State, T, NewProperties, Exc)
end.
%%---------------------------------------------------------------------%
%% Function : get_number_of_properties
%% Arguments : -
%% Description: Returns the number of properties currently associated
%% with this object.
%% Returns : {ok, ulong(), State}
%%----------------------------------------------------------------------
get_number_of_properties(_OE_THIS, State) ->
X = lookup_table(?get_DBKey(State)),
{reply, length(X), State}.
%%---------------------------------------------------------------------%
%% Function : get_all_property_names
%% Arguments :
%% Description:
%% Returns : {ok, State}
%%----------------------------------------------------------------------
get_all_property_names(_OE_THIS, State, Max) ->
X = lookup_table(?get_DBKey(State)),
{reply, get_all_property_names_helper(X, [], Max), State}.
get_all_property_names_helper([], Acc, _) ->
%% There are no more properties; return a nil-object refernce.
{ok, Acc, corba:create_nil_objref()};
get_all_property_names_helper(Left, Acc, 0) ->
%% There are more properties; create Name Iterartor.
PropertyNames = lists:map(?Local2Names, Left),
{ok, Acc, cosProperty:start_PropertyNamesIterator(PropertyNames)};
get_all_property_names_helper([{Name, _, _}|T], Acc, No) ->
get_all_property_names_helper(T, [Name|Acc], No-1).
%%---------------------------------------------------------------------%
%% Function : get_properties
%% Arguments : A list of property names, i.e., string()
%% Description:
%% Returns : {ok, State}
%%----------------------------------------------------------------------
get_properties(_OE_THIS, State, PropertyNames) ->
X = lookup_table(?get_DBKey(State)),
{reply, locate_names(PropertyNames, X, true, []), State}.
locate_names([], _, AllOK, Acc) ->
{AllOK, Acc};
locate_names([""|T], X, _AllOK, Acc) ->
locate_names(T, X, false, [#'CosPropertyService_Property'
{property_name = "",
property_value =
any:create(tk_void, ok)}|Acc]);
locate_names([H|T], X, AllOK, Acc) ->
case catch find_property(X, H, value) of
{'EXCEPTION', _} ->
locate_names(T, X, false, [#'CosPropertyService_Property'
{property_name = H,
property_value =
any:create(tk_void, ok)}|Acc]);
Val ->
locate_names(T, X, AllOK, [#'CosPropertyService_Property'
{property_name = H,
property_value = Val}|Acc])
end.
%%---------------------------------------------------------------------%
%% Function : get_all_properties
%% Arguments :
%% Description:
%% Returns : {ok, State}
%%----------------------------------------------------------------------
get_all_properties(_OE_THIS, State, Max) ->
X = lookup_table(?get_DBKey(State)),
{reply, get_all_properties_helper(X, [], Max), State}.
get_all_properties_helper([], Acc, _) ->
%% There are no more properties; return a nil-object refernce.
{ok, Acc, corba:create_nil_objref()};
get_all_properties_helper(Left, Acc, 0) ->
%% There are more properties; create Iterartor.
Properties = lists:map(?Local2Property, Left),
{ok, Acc, cosProperty:start_PropertiesIterator(Properties)};
get_all_properties_helper([{Name, Val, _}|T], Acc, No) ->
get_all_properties_helper(T, [#'CosPropertyService_Property'
{property_name = Name,
property_value = Val}|Acc], No-1).
%%---------------------------------------------------------------------%
%% Function : delete_properties
%% Arguments :
%% Description:
%% Returns : {ok, State}
%%----------------------------------------------------------------------
delete_properties(_OE_THIS, State, []) ->
{reply, ok, State};
delete_properties(_OE_THIS, State, PropertyNames) when ?is_NotStatic(State) ->
_DF =
fun() ->
case mnesia_read(State) of
{'EXCEPTION', E} ->
{'EXCEPTION', E};
X ->
case catch delete_properties_helper(X, [], [],
PropertyNames, State,
length(X)) of
{'EXCEPTION', E} ->
{'EXCEPTION', E};
{{'EXCEPTION', E}, NotDeleted} ->
ok = mnesia_write(State, NotDeleted),
{'EXCEPTION', E};
{ok, NotDeleted} ->
mnesia_write(State, NotDeleted)
end
end
end,
{reply, mnesia_transaction(_DF), State};
delete_properties(_OE_THIS, State, PropertyNames) ->
X = lookup_table(?get_DBKey(State)),
case delete_properties_helper(X, [], [], PropertyNames, State, length(X)) of
{'EXCEPTION', E} ->
corba:raise(E);
_->
%% Not acceptable if it was possible to delete one or more Properties.
corba:raise(#'INTERNAL'{completion_status=?COMPLETED_NO})
end.
delete_properties_helper([], [], NotDeleted, [], _State, _Len) ->
%% Since there are no exceptions we have been able to delete all
%% properties.
{ok, NotDeleted};
delete_properties_helper([], MultipleExc, NotDeleted, Names, _State, Len) ->
%% Write remaining events to DB.
case length(NotDeleted) of
Len ->
{'EXCEPTION', #'CosPropertyService_MultipleExceptions'
{exceptions = add_not_found(Names, MultipleExc)}};
_->
{{'EXCEPTION', #'CosPropertyService_MultipleExceptions'
{exceptions = add_not_found(Names, MultipleExc)}},
NotDeleted}
end;
delete_properties_helper([{Name, Val, Mode}|T], MultipleExc, NotDeleted,
Names, State, Len) ->
case lists:member(Name, Names) of
true when Mode =/= fixed_normal, Mode =/= fixed_readonly ->
delete_properties_helper(T, MultipleExc, NotDeleted,
lists:delete(Name, Names), State, Len);
true ->
delete_properties_helper(T, [#'CosPropertyService_PropertyException'
{reason = fixed_property,
failing_property_name = Name}|MultipleExc],
[{Name, Val, Mode}|NotDeleted],
lists:delete(Name, Names), State, Len);
false ->
delete_properties_helper(T, MultipleExc, [{Name, Val, Mode}|NotDeleted],
Names, State, Len)
end.
add_not_found([], MultipleExc) ->
MultipleExc;
add_not_found([Name|T], MultipleExc) ->
add_not_found(T, [#'CosPropertyService_PropertyException'
{reason = property_not_found,
failing_property_name = Name}|MultipleExc]).
%%---------------------------------------------------------------------%
%% Function : delete_all_properties
%% Arguments :
%% Description:
%% Returns : {ok, State}
%%----------------------------------------------------------------------
delete_all_properties(_OE_THIS, State) when ?is_NotStatic(State) ->
_DF =
fun() ->
case mnesia_read(State) of
{'EXCEPTION', E} ->
{'EXCEPTION', E};
X ->
case catch delete_all_properties_helper(X, [], State,
length(X)) of
{'EXCEPTION', E} ->
{'EXCEPTION', E};
true ->
ok = mnesia_write(State, []),
true;
false ->
false;
{false, NotDeleted} ->
ok = mnesia_write(State, NotDeleted),
false
end
end
end,
{reply, mnesia_transaction(_DF), State};
delete_all_properties(_OE_THIS, State) ->
X = lookup_table(?get_DBKey(State)),
case delete_all_properties_helper(X, [], State, length(X)) of
false ->
{reply, false, State};
_->
corba:raise(#'INTERNAL'{completion_status=?COMPLETED_NO})
end.
delete_all_properties_helper([], [], _State, _) ->
%% Was able to delete all properties.
true;
delete_all_properties_helper([], NotDeleted, _State, Len) ->
%% Write remaining events to DB.
case length(NotDeleted) of
Len ->
false;
_->
{false, NotDeleted}
end;
delete_all_properties_helper([{Name, Val, fixed_normal}|T], NotDeleted, State, Len) ->
delete_all_properties_helper(T, [{Name, Val, fixed_normal}|NotDeleted], State, Len);
delete_all_properties_helper([{Name, Val, fixed_readonly}|T], NotDeleted, State, Len) ->
delete_all_properties_helper(T, [{Name, Val, fixed_readonly}|NotDeleted], State, Len);
delete_all_properties_helper([_|T], NotDeleted, State, Len) ->
delete_all_properties_helper(T, NotDeleted, State, Len).
%%---------------------------------------------------------------------%
%% Function : is_property_defined
%% Arguments :
%% Description:
%% Returns : {ok, State}
%%----------------------------------------------------------------------
is_property_defined(_, _, "") ->
corba:raise(#'CosPropertyService_InvalidPropertyName'{});
is_property_defined(_OE_THIS, State, Name) ->
X = lookup_table(?get_DBKey(State)),
{reply, lists:keymember(Name, 1, X), State}.
%%----------------------------------------------------------------------
%% Interface CosPropertyService::PropertySetDef
%%----------------------------------------------------------------------
%%---------------------------------------------------------------------%
%% Function : get_allowed_property_types
%% Arguments : -
%% Description: Returns the initially supplied restrictions. An empty
%% list means no restrictions.
%% Returns : {ok, TypeCodeList,State}
%%----------------------------------------------------------------------
get_allowed_property_types(_OE_THIS, State) when ?is_NotSetDef(State) ->
corba:raise(#'NO_IMPLEMENT'{completion_status=?COMPLETED_NO});
get_allowed_property_types(_OE_THIS, State) ->
{reply, {ok, ?get_okTypes(State)}, State}.
%%---------------------------------------------------------------------%
%% Function : get_allowed_properties
%% Arguments :
%% Description: Returns the initially supplied restrictions. An empty
%% list means no restrictions.
%% Returns : {ok, PropertyDefList, State}
%%----------------------------------------------------------------------
get_allowed_properties(_OE_THIS, State) when ?is_NotSetDef(State) ->
corba:raise(#'NO_IMPLEMENT'{completion_status=?COMPLETED_NO});
get_allowed_properties(_OE_THIS, State) ->
{reply, {ok, ?get_okProperties(State)}, State}.
%%---------------------------------------------------------------------%
%% Function : define_property_with_mode
%% Arguments :
%% Description:
%% Returns : {ok, State}
%%----------------------------------------------------------------------
define_property_with_mode(_OE_THIS, State, _, _, _) when ?is_NotSetDef(State) ->
corba:raise(#'NO_IMPLEMENT'{completion_status=?COMPLETED_NO});
define_property_with_mode(_, _, "", _, _) ->
corba:raise(#'CosPropertyService_InvalidPropertyName'{});
define_property_with_mode(_OE_THIS, State, Name, Value, Mode)
when ?is_NotStatic(State) ->
evaluate_property_data(State, Value, Name),
_DF =
fun() ->
case mnesia_read(State) of
{'EXCEPTION', E} ->
{'EXCEPTION', E};
X ->
case catch update_property(X, Name, both, Value, Mode) of
{'EXCEPTION', E}
when is_record(E, 'CosPropertyService_PropertyNotFound') ->
mnesia_write(State, [{Name, Value, Mode}|X]);
{'EXCEPTION', E} ->
{'EXCEPTION', E};
NewProperties ->
mnesia_write(State, NewProperties)
end
end
end,
{reply, mnesia_transaction(_DF), State};
define_property_with_mode(_OE_THIS, State, Name, Value, Mode) ->
evaluate_property_data(State, Value, Name),
X = lookup_table(?get_DBKey(State)),
case catch update_property(X, Name, both, Value, Mode) of
{'EXCEPTION', E} when is_record(E, 'CosPropertyService_PropertyNotFound') ->
%% Should get not allowed exception.
corba:raise(#'INTERNAL'{completion_status=?COMPLETED_NO});
{'EXCEPTION', E} ->
corba:raise(E);
_ ->
%% Should be impossible.
corba:raise(#'INTERNAL'{completion_status=?COMPLETED_NO})
end.
%%---------------------------------------------------------------------%
%% Function : define_properties_with_modes
%% Arguments :
%% Description:
%% Returns : {ok, State}
%%----------------------------------------------------------------------
define_properties_with_modes(_OE_THIS, State, _) when ?is_NotSetDef(State) ->
corba:raise(#'NO_IMPLEMENT'{completion_status=?COMPLETED_NO});
define_properties_with_modes(_OE_THIS, State, PropertyDefSeq)
when ?is_NotStatic(State)->
{OKProperteDefs, Exc} = evaluate_properties_data(State, PropertyDefSeq),
_DF =
fun() ->
case mnesia_read(State) of
{'EXCEPTION', E} ->
{'EXCEPTION', E};
X ->
case catch define_properties_with_modes_helper(OKProperteDefs,
X, Exc, State) of
{'EXCEPTION', E} ->
{'EXCEPTION', E};
NewProperties ->
mnesia_write(State, NewProperties)
end
end
end,
{reply, mnesia_transaction(_DF), State};
define_properties_with_modes(_OE_THIS, State, PropertyDefSeq) ->
{OKProperteDefs, Exc} = evaluate_properties_data(State, PropertyDefSeq),
X = lookup_table(?get_DBKey(State)),
case define_properties_with_modes_helper(OKProperteDefs, X, Exc, State) of
{'EXCEPTION', E} ->
corba:raise(E);
_ ->
corba:raise(#'INTERNAL'{completion_status=?COMPLETED_NO})
end.
define_properties_with_modes_helper([], NewPropertyDefs, [], _State) ->
%% No exceptions found.
NewPropertyDefs;
define_properties_with_modes_helper([], _, Exc, _) ->
{'EXCEPTION', #'CosPropertyService_MultipleExceptions'{exceptions = Exc}};
define_properties_with_modes_helper([#'CosPropertyService_PropertyDef'
{property_name = Name,
property_value = Value,
property_mode = Mode}|T], X, Exc, State) ->
case catch update_property(X, Name, both, Value, Mode) of
{'EXCEPTION', E} when is_record(E, 'CosPropertyService_PropertyNotFound') ->
define_properties_with_modes_helper(T, [{Name, Value, Mode}|X], Exc, State);
{'EXCEPTION', E} ->
define_properties_with_modes_helper(T, X,
[#'CosPropertyService_PropertyException'
{reason = remap_exception(E),
failing_property_name = Name}|Exc],
State);
NewX ->
define_properties_with_modes_helper(T, NewX, Exc, State)
end.
%%---------------------------------------------------------------------%
%% Function : get_property_mode
%% Arguments :
%% Description:
%% Returns : {ok, State}
%%----------------------------------------------------------------------
get_property_mode(_OE_THIS, State, _) when ?is_NotSetDef(State) ->
corba:raise(#'NO_IMPLEMENT'{completion_status=?COMPLETED_NO});
get_property_mode(_, _, "") ->
corba:raise(#'CosPropertyService_InvalidPropertyName'{});
get_property_mode(_OE_THIS, State, Name) ->
X = lookup_table(?get_DBKey(State)),
{reply, find_property(X, Name, mode), State}.
%%---------------------------------------------------------------------%
%% Function : get_property_modes
%% Arguments :
%% Description:
%% Returns : {ok, State}
%%----------------------------------------------------------------------
get_property_modes(_OE_THIS, State, _) when ?is_NotSetDef(State) ->
corba:raise(#'NO_IMPLEMENT'{completion_status=?COMPLETED_NO});
get_property_modes(_OE_THIS, State, PropertyNames) ->
X = lookup_table(?get_DBKey(State)),
{reply, get_property_modes_helper(PropertyNames, X, [], true), State}.
get_property_modes_helper([], _, Acc, Bool) ->
{Bool, Acc};
get_property_modes_helper([""|T], Properties, Acc, _) ->
get_property_modes_helper(T, Properties,
[#'CosPropertyService_PropertyMode'
{property_name = "",
property_mode = undefined}|Acc], false);
get_property_modes_helper([Name|T], Properties, Acc, Bool) ->
case lists:keysearch(Name, 1, Properties) of
{value, {Name, _, Mode}} ->
get_property_modes_helper(T, Properties,
[#'CosPropertyService_PropertyMode'
{property_name = Name,
property_mode = Mode}|Acc], Bool);
false ->
get_property_modes_helper(T, Properties,
[#'CosPropertyService_PropertyMode'
{property_name = Name,
property_mode = undefined}|Acc], false)
end.
%%---------------------------------------------------------------------%
%% Function : set_property_mode
%% Arguments :
%% Description:
%% Returns : {ok, State}
%%----------------------------------------------------------------------
set_property_mode(_OE_THIS, State, _, _) when ?is_NotSetDef(State) ->
corba:raise(#'NO_IMPLEMENT'{completion_status=?COMPLETED_NO});
set_property_mode(_, _, "", _) ->
corba:raise(#'CosPropertyService_InvalidPropertyName'{});
set_property_mode(_OE_THIS, State, Name, Mode) when ?is_NotStatic(State) ->
_DF =
fun() ->
case mnesia_read(State) of
{'EXCEPTION', E} ->
{'EXCEPTION', E};
X ->
case catch update_property(X, Name, mode, undefined, Mode) of
{'EXCEPTION', E} ->
{'EXCEPTION', E};
NewProperties ->
mnesia_write(State, NewProperties)
end
end
end,
{reply, mnesia_transaction(_DF), State};
set_property_mode(_OE_THIS, State, Name, Mode) ->
X = lookup_table(?get_DBKey(State)),
update_property(X, Name, mode, undefined, Mode),
%% Something is not correct, shouldn't be allowed to update a property when
%% static.
corba:raise(#'INTERNAL'{completion_status=?COMPLETED_NO}).
%%---------------------------------------------------------------------%
%% Function : set_property_modes
%% Arguments :
%% Description:
%% Returns : {ok, State}
%%----------------------------------------------------------------------
set_property_modes(_OE_THIS, State, _) when ?is_NotSetDef(State) ->
corba:raise(#'NO_IMPLEMENT'{completion_status=?COMPLETED_NO});
set_property_modes(_OE_THIS, State, PropertyModes) when ?is_NotStatic(State) ->
_DF =
fun() ->
case mnesia_read(State) of
{'EXCEPTION', E} ->
{'EXCEPTION', E};
X ->
case catch set_property_modes_helper(PropertyModes, X, [],
State) of
{'EXCEPTION', E} ->
{'EXCEPTION', E};
NewProperties ->
mnesia_write(State, NewProperties)
end
end
end,
{reply, mnesia_transaction(_DF), State};
set_property_modes(_OE_THIS, State, PropertyModes) ->
X = lookup_table(?get_DBKey(State)),
case set_property_modes_helper(PropertyModes, X, [], State) of
{'EXCEPTION', E} ->
corba:raise(E);
_ ->
corba:raise(#'INTERNAL'{completion_status=?COMPLETED_NO})
end.
set_property_modes_helper([], NewProperties, [], _State) ->
%% No exceptions, write to DB.
NewProperties;
set_property_modes_helper([], _, Exc, _) ->
{'EXCEPTION', #'CosPropertyService_MultipleExceptions'{exceptions = Exc}};
set_property_modes_helper([#'CosPropertyService_PropertyMode'
{property_name = Name,
property_mode = Mode}|T], X, Exc, State) ->
case catch update_property(X, Name, mode, undefined, Mode) of
{'EXCEPTION', E} ->
set_property_modes_helper(T, X,
[#'CosPropertyService_PropertyException'
{reason = remap_exception(E),
failing_property_name = Name}|Exc],
State);
NewX ->
set_property_modes_helper(T, NewX, Exc, State)
end.
%%======================================================================
%% Internal functions
%%======================================================================
remap_exception(#'CosPropertyService_ConflictingProperty'{}) -> conflicting_property;
remap_exception(#'CosPropertyService_FixedProperty'{}) -> fixed_property;
remap_exception(#'CosPropertyService_InvalidPropertyName'{}) -> invalid_property_name;
remap_exception(#'CosPropertyService_PropertyNotFound'{}) -> property_not_found;
remap_exception(#'CosPropertyService_UnsupportedTypeCode'{}) -> unsupported_type_code;
remap_exception(#'CosPropertyService_UnsupportedProperty'{}) -> unsupported_property;
remap_exception(#'CosPropertyService_ReadOnlyProperty'{}) -> read_only_property;
remap_exception(#'CosPropertyService_UnsupportedMode'{}) -> unsupported_mode.
find_property([], _, _) ->
corba:raise(#'CosPropertyService_PropertyNotFound'{});
find_property([{Name, Value, _}|_], Name, value) ->
Value;
find_property([{Name, _, Mode}|_], Name, mode) ->
Mode;
% Left out for now to avoid dialyzer warning.
%find_property([{Name, Value, Mode}|_], Name, all) ->
% {Name, Value, Mode};
find_property([_|T], Name, Which) ->
find_property(T, Name, Which).
remove_property(PropertList, Name) ->
remove_property(PropertList, Name, []).
remove_property([], _, _) ->
corba:raise(#'CosPropertyService_PropertyNotFound'{});
remove_property([{Name, _, fixed_normal}|_T], Name, _) ->
corba:raise(#'CosPropertyService_FixedProperty'{});
remove_property([{Name, _, fixed_readonly}|_T], Name, _) ->
corba:raise(#'CosPropertyService_FixedProperty'{});
remove_property([{Name, _, _}|T], Name, Acc) ->
T++Acc;
remove_property([H|T], Name, Acc) ->
remove_property(T, Name, [H|Acc]).
update_property(_, "", _, _, _) ->
corba:raise(#'CosPropertyService_InvalidPropertyName'{});
update_property(PropertyList, Name, Which, Value, Mode) ->
update_property(PropertyList, Name, Which, Value, Mode, []).
update_property([], _, _, _, _, _) ->
corba:raise(#'CosPropertyService_PropertyNotFound'{});
update_property([{Name, _, fixed_readonly}|_], Name, value, _, _, _) ->
corba:raise(#'CosPropertyService_FixedProperty'{});
update_property([{Name, _, fixed_normal}|_], Name, both, _, _, _) ->
corba:raise(#'CosPropertyService_FixedProperty'{});
update_property([{Name, _, fixed_readonly}|_], Name, both, _, _, _) ->
corba:raise(#'CosPropertyService_FixedProperty'{});
update_property([{Name, #any{typecode = TC}, Mode}|T], Name,
value, #any{typecode = TC, value = Value}, _Mod, Acc) ->
[{Name, #any{typecode = TC, value = Value}, Mode}|T]++Acc;
update_property([{Name, #any{typecode = TC}, _Mode}|T], Name,
both, #any{typecode = TC, value = Value}, Mod, Acc) ->
[{Name, #any{typecode = TC, value = Value}, Mod}|T]++Acc;
update_property([{Name, _, _}|_], Name, value, _, _, _) ->
corba:raise(#'CosPropertyService_ConflictingProperty'{});
update_property([{Name, _, _}|_], Name, both, _, _, _) ->
corba:raise(#'CosPropertyService_ConflictingProperty'{});
%% Normally we don't need to raise an exception for the two following cases but
%% to be able to manage static Properties we must raise an exception. Well,
%% on the other hand, why should a user try to change a mode to the same value?!
%% But we have no other option.
update_property([{Name, _Value, fixed_normal}|_T], Name, mode, _, fixed_normal, _Acc) ->
corba:raise(#'CosPropertyService_FixedProperty'{});
update_property([{Name, _Value, fixed_readonly}|_T], Name, mode, _, fixed_readonly, _Acc) ->
corba:raise(#'CosPropertyService_FixedProperty'{});
update_property([{Name, _Value, fixed_normal}|_T], Name, mode, _, _Mode, _Acc) ->
corba:raise(#'CosPropertyService_UnsupportedMode'{});
update_property([{Name, _Value, fixed_readonly}|_T], Name, mode, _, _Mode, _Acc) ->
corba:raise(#'CosPropertyService_UnsupportedMode'{});
update_property([{Name, Value, _}|T], Name, mode, _, Mode, Acc) ->
[{Name, Value, Mode}|T]++Acc;
update_property([H|T], Name, Which, Value, Mode, Acc) ->
update_property(T, Name, Which, Value, Mode, [H|Acc]).
lookup_table(Key) when is_binary(Key) ->
_RF = ?read_function({oe_CosPropertyService, Key}),
case mnesia:transaction(_RF) of
{atomic, [#oe_CosPropertyService{properties=Properties}]} ->
Properties;
{atomic, []} ->
corba:raise(#'OBJECT_NOT_EXIST'{completion_status=?COMPLETED_NO});
_Other ->
corba:raise(#'INTERNAL'{completion_status=?COMPLETED_NO})
end;
lookup_table(Key) when is_list(Key) ->
Key;
lookup_table(_) ->
corba:raise(#'INTERNAL'{completion_status=?COMPLETED_NO}).
mnesia_transaction(Fun) ->
case mnesia:transaction(Fun) of
{atomic, {'EXCEPTION', E}} ->
corba:raise(E);
{atomic, ok} ->
ok;
{atomic, Reply} ->
Reply;
_Other ->
corba:raise(#'INTERNAL'{completion_status=?COMPLETED_NO})
end.
mnesia_read(State) ->
case mnesia:wread({oe_CosPropertyService, ?get_DBKey(State)}) of
[#oe_CosPropertyService{properties = X}] ->
X;
{atomic, []} ->
{'EXCEPTION', #'OBJECT_NOT_EXIST'{completion_status=?COMPLETED_NO}};
_Other ->
{'EXCEPTION', #'INTERNAL'{completion_status=?COMPLETED_NO}}
end.
mnesia_write(State, X) ->
mnesia:write(#oe_CosPropertyService{key = ?get_DBKey(State), properties = X}).
%% Check a write transaction
write_result({atomic,ok}) -> ok;
write_result(_Foo) ->
corba:raise(#'INTERNAL'{completion_status=?COMPLETED_NO}).
evaluate_properties_data(State, PropertySeq) ->
evaluate_properties_data(State, PropertySeq, [], []).
evaluate_properties_data(_State, [], OKProperties, Exc) ->
{OKProperties, Exc};
evaluate_properties_data(State, [#'CosPropertyService_Property'
{property_name = Name,
property_value = Value}|T], Acc, Exc) ->
case catch evaluate_property_data(State, Value, Name) of
ok ->
evaluate_properties_data(State, T, [#'CosPropertyService_Property'
{property_name = Name,
property_value = Value}|Acc], Exc);
{'EXCEPTION', E} when is_record(E, 'CosPropertyService_UnsupportedTypeCode') ->
evaluate_properties_data(State, T, Acc,
[#'CosPropertyService_PropertyException'
{reason = unsupported_type_code,
failing_property_name = Name}|Exc]);
{'EXCEPTION', E} when is_record(E, 'CosPropertyService_UnsupportedProperty') ->
evaluate_properties_data(State, T, Acc,
[#'CosPropertyService_PropertyException'
{reason = unsupported_property,
failing_property_name = Name}|Exc])
end;
evaluate_properties_data(State, [#'CosPropertyService_PropertyDef'
{property_name = Name,
property_value = Value,
property_mode = Mode}|T], Acc, Exc) ->
case catch evaluate_property_data(State, Value, Name) of
ok ->
evaluate_properties_data(State, T, [#'CosPropertyService_PropertyDef'
{property_name = Name,
property_value = Value,
property_mode = Mode}|Acc], Exc);
{'EXCEPTION', E} when is_record(E, 'CosPropertyService_UnsupportedTypeCode') ->
evaluate_properties_data(State, T, Acc,
[#'CosPropertyService_PropertyException'
{reason = unsupported_type_code,
failing_property_name = Name}|Exc]);
{'EXCEPTION', E} when is_record(E, 'CosPropertyService_UnsupportedProperty') ->
evaluate_properties_data(State, T, Acc,
[#'CosPropertyService_PropertyException'
{reason = unsupported_property,
failing_property_name = Name}|Exc])
end;
evaluate_properties_data(_, _, _, _) ->
corba:raise(#'BAD_PARAM'{completion_status=?COMPLETED_NO}).
evaluate_property_data(State, _, _) when ?no_PropertyLimits(State),
?no_TypeLimits(State) ->
ok;
evaluate_property_data(State, Value, _Name) when ?no_PropertyLimits(State) ->
case lists:member(any:get_typecode(Value), ?get_okTypes(State)) of
true ->
ok;
_ ->
corba:raise(#'CosPropertyService_UnsupportedTypeCode'{})
end;
evaluate_property_data(State, _Value, Name) when ?no_TypeLimits(State) ->
case lists:any(?MemberName(Name), ?get_okProperties(State)) of
true ->
ok;
_ ->
corba:raise(#'CosPropertyService_UnsupportedProperty'{})
end;
evaluate_property_data(State, Value, Name) ->
case lists:any(?MemberName(Name), ?get_okProperties(State)) of
true ->
case lists:member(any:get_typecode(Value), ?get_okTypes(State)) of
true ->
ok;
_ ->
corba:raise(#'CosPropertyService_UnsupportedTypeCode'{})
end;
_ ->
corba:raise(#'CosPropertyService_UnsupportedProperty'{})
end.
%%----------------------------------------------------------------------
%% Debugging functions
%%----------------------------------------------------------------------
dump() ->
case catch mnesia:dirty_first('oe_CosPropertyService') of
{'EXIT', R} ->
io:format("Exited with ~p\n",[R]);
Key ->
dump_print(Key),
dump_loop(Key)
end.
dump_loop(PreviousKey) ->
case catch mnesia:dirty_next('oe_CosPropertyService', PreviousKey) of
{'EXIT', R} ->
io:format("Exited with ~p\n",[R]);
'$end_of_table' ->
ok;
Key ->
dump_print(Key),
dump_loop(Key)
end.
dump_print(Key) ->
case catch mnesia:dirty_read({'oe_CosPropertyService', Key}) of
{'EXIT', R} ->
io:format("Exited with ~p\n",[R]);
[{_,_,X}] ->
io:format("Property: ~p~n", [X]);
_ ->
ok
end.
%%-------------------------- END OF MODULE -----------------------------
<|start_filename|>lib/erl_interface/src/decode/decode_trace.c<|end_filename|>
/*
* %CopyrightBegin%
*
* Copyright Ericsson AB 1998-2013. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* %CopyrightEnd%
*/
#include "eidef.h"
#include "putget.h"
int ei_decode_trace(const char *buf, int *index, erlang_trace *p)
{
int arity = 0;
int tindex = *index; /* use a temporary index if any function should fail */
long *p_flags, *p_label, *p_serial, *p_prev;
erlang_pid *p_from;
if (p != NULL) {
p_flags = &p->flags;
p_label = &p->label;
p_serial = &p->serial;
p_prev = &p->prev;
p_from = &p->from;
}
else {
p_flags = p_label = p_serial = p_prev = NULL;
p_from = NULL;
}
/* { Flags, Label, Serial, FromPid, Prev } */
if (ei_decode_tuple_header(buf, &tindex, &arity)
|| (arity != 5)
|| ei_decode_long(buf, &tindex, p_flags)
|| ei_decode_long(buf, &tindex, p_label)
|| ei_decode_long(buf, &tindex, p_serial)
|| ei_decode_pid( buf, &tindex, p_from)
|| ei_decode_long(buf, &tindex, p_prev)) return -1;
/* index is updated by the functions we called */
*index = tindex;
return 0;
}
<|start_filename|>lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_htaccess.erl<|end_filename|>
%% ``Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
%% AB. All Rights Reserved.''
%%
%% $Id: mod_htaccess.erl,v 1.1 2008/12/17 09:53:35 mikpe Exp $
-module(mod_htaccess).
-export([do/1, load/2]).
-export([debug/0]).
-include("httpd.hrl").
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Public methods that interface the eswapi %%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%----------------------------------------------------------------------
% Public method called by the webbserver to insert the data about
% Names on accessfiles
%----------------------------------------------------------------------
load([$A,$c,$c,$e,$s,$s,$F,$i,$l,$e,$N,$a,$m,$e|FileNames],Context)->
CleanFileNames=httpd_conf:clean(FileNames),
%%io:format("\n The filenames is:" ++ FileNames ++ "\n"),
{ok,[],{access_files,string:tokens(CleanFileNames," ")}}.
%----------------------------------------------------------------------
% Public method that the webbserver calls to control the page
%----------------------------------------------------------------------
do(Info)->
case httpd_util:key1search(Info#mod.data,status) of
{Status_code,PhraseArgs,Reason}->
{proceed,Info#mod.data};
undefined ->
control_path(Info)
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% %%
%% The functions that start the control if there is a accessfile %%
%% and if so controls if the dir is allowed or not %%
%% %%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%----------------------------------------------------------------------
%Info = record mod as specified in httpd.hrl
%returns either {proceed,Info#mod.data}
%{proceed,[{status,403....}|Info#mod.data]}
%{proceed,[{status,401....}|Info#mod.data]}
%{proceed,[{status,500....}|Info#mod.data]}
%----------------------------------------------------------------------
control_path(Info) ->
Path = mod_alias:path(Info#mod.data,
Info#mod.config_db,
Info#mod.request_uri),
case isErlScriptOrNotAccessibleFile(Path,Info) of
true->
{proceed,Info#mod.data};
false->
case getHtAccessData(Path,Info)of
{ok,public}->
%%There was no restrictions on the page continue
{proceed,Info#mod.data};
{error,Reason} ->
%Something got wrong continue or quit??????????????????/
{proceed,Info#mod.data};
{accessData,AccessData}->
controlAllowedMethod(Info,AccessData)
end
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% %%
%% These methods controls that the method the client used in the %%
%% request is one of the limited %%
%% %%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%----------------------------------------------------------------------
%Control that if the accessmethod used is in the list of modes to challenge
%
%Info is the mod record as specified in httpd.hrl
%AccessData is an ets table whit the data in the .htaccessfiles
%----------------------------------------------------------------------
controlAllowedMethod(Info,AccessData)->
case allowedRequestMethod(Info,AccessData) of
allow->
%%The request didnt use one of the limited methods
ets:delete(AccessData),
{proceed,Info#mod.data};
challenge->
authenticateUser(Info,AccessData)
end.
%----------------------------------------------------------------------
%Check the specified access method in the .htaccessfile
%----------------------------------------------------------------------
allowedRequestMethod(Info,AccessData)->
case ets:lookup(AccessData,limit) of
[{limit,all}]->
challenge;
[{limit,Methods}]->
isLimitedRequestMethod(Info,Methods)
end.
%----------------------------------------------------------------------
%Check the specified accessmethods in the .htaccesfile against the users
%accessmethod
%
%Info is the record from the do call
%Methods is a list of the methods specified in the .htaccessfile
%----------------------------------------------------------------------
isLimitedRequestMethod(Info,Methods)->
case lists:member(Info#mod.method,Methods) of
true->
challenge;
false ->
allow
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% %%
%% These methods controls that the user comes from an allowwed net %%
%% and if so wheather its a valid user or a challenge shall be %%
%% generated %%
%% %%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%----------------------------------------------------------------------
%The first thing to control is that the user is from a network
%that has access to the page
%----------------------------------------------------------------------
authenticateUser(Info,AccessData)->
case controlNet(Info,AccessData) of
allow->
%the network is ok control that it is an allowed user
authenticateUser2(Info,AccessData);
deny->
%The user isnt allowed to access the pages from that network
ets:delete(AccessData),
{proceed,[{status,{403,Info#mod.request_uri,
"Restricted area not allowed from your network"}}|Info#mod.data]}
end.
%----------------------------------------------------------------------
%The network the user comes from is allowed to view the resources
%control whether the user needsto supply a password or not
%----------------------------------------------------------------------
authenticateUser2(Info,AccessData)->
case ets:lookup(AccessData,require) of
[{require,AllowedUsers}]->
case ets:lookup(AccessData,auth_name) of
[{auth_name,Realm}]->
authenticateUser2(Info,AccessData,Realm,AllowedUsers);
_NoAuthName->
ets:delete(AccessData),
{break,[{status,{500,none,
?NICE("mod_htaccess:AuthName directive not specified")}}]}
end;
[] ->
%%No special user is required the network is ok so let
%%the user in
ets:delete(AccessData),
{proceed,Info#mod.data}
end.
%----------------------------------------------------------------------
%The user must send a userId and a password to get the resource
%Control if its already in the http-request
%if the file with users is bad send an 500 response
%----------------------------------------------------------------------
authenticateUser2(Info,AccessData,Realm,AllowedUsers)->
case authenticateUser(Info,AccessData,AllowedUsers) of
allow ->
ets:delete(AccessData),
{user,Name,Pwd}=getAuthenticatingDataFromHeader(Info),
{proceed, [{remote_user_name,Name}|Info#mod.data]};
challenge->
ets:delete(AccessData),
ReasonPhrase = httpd_util:reason_phrase(401),
Message = httpd_util:message(401,none,Info#mod.config_db),
{proceed,
[{response,
{401,
["WWW-Authenticate: Basic realm=\"",Realm,
"\"\r\n\r\n","<HTML>\n<HEAD>\n<TITLE>",
ReasonPhrase,"</TITLE>\n",
"</HEAD>\n<BODY>\n<H1>",ReasonPhrase,
"</H1>\n",Message,"\n</BODY>\n</HTML>\n"]}}|
Info#mod.data]};
deny->
ets:delete(AccessData),
{break,[{status,{500,none,
?NICE("mod_htaccess:Bad path to user or group file")}}]}
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% %%
%% Methods that validate the netwqork the user comes from %%
%% according to the allowed networks %%
%% %%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%---------------------------------------------------------------------
%Controls the users networkaddress agains the specifed networks to
%allow or deny
%
%returns either allow or deny
%----------------------------------------------------------------------
controlNet(Info,AccessData)->
UserNetwork=getUserNetworkAddress(Info),
case getAllowDenyOrder(AccessData) of
{_deny,[],_allow,[]}->
allow;
{deny,[],allow,AllowedNetworks}->
controlIfAllowed(AllowedNetworks,UserNetwork,allow,deny);
{allow,AllowedNetworks,deny,[]}->
controlIfAllowed(AllowedNetworks,UserNetwork,allow,deny);
{deny,DeniedNetworks,allow,[]}->
controlIfAllowed(DeniedNetworks,UserNetwork,allow,deny);
{allow,[],deny,DeniedNetworks}->
controlIfAllowed(DeniedNetworks,UserNetwork,allow,deny);
{deny,DeniedNetworks,allow,AllowedNetworks}->
controlDenyAllow(DeniedNetworks,AllowedNetworks,UserNetwork);
{allow,AllowedNetworks,deny,DeniedNetworks}->
controlAllowDeny(AllowedNetworks,DeniedNetworks,UserNetwork)
end.
%----------------------------------------------------------------------
%Returns the users IP-Number
%----------------------------------------------------------------------
getUserNetworkAddress(Info)->
{_Socket,Address}=(Info#mod.init_data)#init_data.peername,
Address.
%----------------------------------------------------------------------
%Control the users Ip-number against the ip-numbers in the .htaccessfile
%----------------------------------------------------------------------
controlIfAllowed(AllowedNetworks,UserNetwork,IfAllowed,IfDenied)->
case AllowedNetworks of
[{allow,all}]->
IfAllowed;
[{deny,all}]->
IfDenied;
[{deny,Networks}]->
memberNetwork(Networks,UserNetwork,IfDenied,IfAllowed);
[{allow,Networks}]->
memberNetwork(Networks,UserNetwork,IfAllowed,IfDenied);
_Error->
IfDenied
end.
%---------------------------------------------------------------------%
%The Denycontrol isn't neccessary to preform since the allow control %
%override the deny control %
%---------------------------------------------------------------------%
controlDenyAllow(DeniedNetworks,AllowedNetworks,UserNetwork)->
case AllowedNetworks of
[{allow,all}]->
allow;
[{allow,Networks}]->
case memberNetwork(Networks,UserNetwork) of
true->
allow;
false->
deny
end
end.
%----------------------------------------------------------------------%
%Control that the user is in the allowed list if so control that the %
%network is in the denied list
%----------------------------------------------------------------------%
controlAllowDeny(AllowedNetworks,DeniedNetworks,UserNetwork)->
case controlIfAllowed(AllowedNetworks,UserNetwork,allow,deny) of
allow->
controlIfAllowed(DeniedNetworks,UserNetwork,deny,allow);
deny ->
deny
end.
%----------------------------------------------------------------------
%Controls if the users Ipnumber is in the list of either denied or
%allowed networks
%----------------------------------------------------------------------
memberNetwork(Networks,UserNetwork,IfTrue,IfFalse)->
case memberNetwork(Networks,UserNetwork) of
true->
IfTrue;
false->
IfFalse
end.
%----------------------------------------------------------------------
%regexp match the users ip-address against the networks in the list of
%ipadresses or subnet addresses.
memberNetwork(Networks,UserNetwork)->
case lists:filter(fun(Net)->
case regexp:match(UserNetwork,
formatRegexp(Net)) of
{match,1,_}->
true;
_NotSubNet ->
false
end
end,Networks) of
[]->
false;
MemberNetWork ->
true
end.
%----------------------------------------------------------------------
%Creates a regexp from an ip-number i.e "127.0.0-> "^127[.]0[.]0.*"
%"127.0.0.-> "^127[.]0[.]0[.].*"
%----------------------------------------------------------------------
formatRegexp(Net)->
[SubNet1|SubNets]=string:tokens(Net,"."),
NetRegexp=lists:foldl(fun(SubNet,Newnet)->
Newnet ++ "[.]" ++SubNet
end,"^"++SubNet1,SubNets),
case string:len(Net)-string:rchr(Net,$.) of
0->
NetRegexp++"[.].*";
_->
NetRegexp++".*"
end.
%----------------------------------------------------------------------
%If the user has specified if the allow or deny check shall be preformed
%first get that order if no order is specified take
%allow - deny since its harder that deny - allow
%----------------------------------------------------------------------
getAllowDenyOrder(AccessData)->
case ets:lookup(AccessData,order) of
[{order,{deny,allow}}]->
{deny,ets:lookup(AccessData,deny),
allow,ets:lookup(AccessData,allow)};
_DefaultOrder->
{allow,ets:lookup(AccessData,allow),
deny,ets:lookup(AccessData,deny)}
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% %%
%% The methods that validates the user %%
%% %%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%----------------------------------------------------------------------
%Control if there is anyu autheticating data in threquest header
%if so it controls it against the users in the list Allowed Users
%----------------------------------------------------------------------
authenticateUser(Info,AccessData,AllowedUsers)->
case getAuthenticatingDataFromHeader(Info) of
{user,User,PassWord}->
authenticateUser(Info,AccessData,AllowedUsers,
{user,User,PassWord});
{error,nouser}->
challenge;
{error,BadData}->
challenge
end.
%----------------------------------------------------------------------
%Returns the Autheticating data in the http-request
%----------------------------------------------------------------------
getAuthenticatingDataFromHeader(Info)->
PrsedHeader=Info#mod.parsed_header,
case httpd_util:key1search(PrsedHeader,"authorization" ) of
undefined->
{error,nouser};
[$B,$a,$s,$i,$c,$\ |EncodedString]->
UnCodedString=httpd_util:decode_base64(EncodedString),
case httpd_util:split(UnCodedString,":",2) of
{ok,[User,PassWord]}->
{user,User,PassWord};
{error,Error}->
{error,Error}
end;
BadCredentials ->
{error,BadCredentials}
end.
%----------------------------------------------------------------------
%Returns a list of all members of the allowed groups
%----------------------------------------------------------------------
getGroupMembers(Groups,AllowedGroups)->
Allowed=lists:foldl(fun({group,Name,Members},AllowedMembers)->
case lists:member(Name,AllowedGroups) of
true->
AllowedMembers++Members;
false ->
AllowedMembers
end
end,[],Groups),
{ok,Allowed}.
authenticateUser(Info,AccessData,{{users,[]},{groups,Groups}},User)->
authenticateUser(Info,AccessData,{groups,Groups},User);
authenticateUser(Info,AccessData,{{users,Users},{groups,[]}},User)->
authenticateUser(Info,AccessData,{users,Users},User);
authenticateUser(Info,AccessData,{{users,Users},{groups,Groups}},User)->
AllowUser=authenticateUser(Info,AccessData,{users,Users},User),
AllowGroup=authenticateUser(Info,AccessData,{groups,Groups},User),
case {AllowGroup,AllowUser} of
{_,allow}->
allow;
{allow,_}->
allow;
{challenge,_}->
challenge;
{_,challenge}->
challenge;
{_deny,_deny}->
deny
end;
%----------------------------------------------------------------------
%Controls that the user is a member in one of the allowed group
%----------------------------------------------------------------------
authenticateUser(Info,AccessData,{groups,AllowedGroups},{user,User,PassWord})->
case getUsers(AccessData,group_file) of
{group_data,Groups}->
case getGroupMembers(Groups,AllowedGroups) of
{ok,Members}->
authenticateUser(Info,AccessData,{users,Members},
{user,User,PassWord});
{error,BadData}->
deny
end;
{error,BadData}->
deny
end;
%----------------------------------------------------------------------
%Control that the user is one of the allowed users and that the passwd is ok
%----------------------------------------------------------------------
authenticateUser(Info,AccessData,{users,AllowedUsers},{user,User,PassWord})->
case lists:member(User,AllowedUsers) of
true->
%Get the usernames and passwords from the file
case getUsers(AccessData,user_file) of
{error,BadData}->
deny;
{user_data,Users}->
%Users is a list of the users in
%the userfile [{user,User,Passwd}]
checkPassWord(Users,{user,User,PassWord})
end;
false ->
challenge
end.
%----------------------------------------------------------------------
%Control that the user User={user,"UserName","PassWd"} is
%member of the list of Users
%----------------------------------------------------------------------
checkPassWord(Users,User)->
case lists:member(User,Users) of
true->
allow;
false->
challenge
end.
%----------------------------------------------------------------------
%Get the users in the specified file
%UserOrGroup is an atom that specify if its a group file or a user file
%i.e. group_file or user_file
%----------------------------------------------------------------------
getUsers({file,FileName},UserOrGroup)->
case file:open(FileName,[read]) of
{ok,AccessFileHandle} ->
getUsers({stream,AccessFileHandle},[],UserOrGroup);
{error,Reason} ->
{error,{Reason,FileName}}
end;
%----------------------------------------------------------------------
%The method that starts the lokkong for user files
%----------------------------------------------------------------------
getUsers(AccessData,UserOrGroup)->
case ets:lookup(AccessData,UserOrGroup) of
[{UserOrGroup,File}]->
getUsers({file,File},UserOrGroup);
_ ->
{error,noUsers}
end.
%----------------------------------------------------------------------
%Reads data from the filehandle File to the list FileData and when its
%reach the end it returns the list in a tuple {user_file|group_file,FileData}
%----------------------------------------------------------------------
getUsers({stream,File},FileData,UserOrGroup)->
case io:get_line(File,[]) of
eof when UserOrGroup==user_file->
{user_data,FileData};
eof when UserOrGroup ==group_file->
{group_data,FileData};
Line ->
getUsers({stream,File},
formatUser(Line,FileData,UserOrGroup),UserOrGroup)
end.
%----------------------------------------------------------------------
%If the line is a comment remove it
%----------------------------------------------------------------------
formatUser([$#|UserDataComment],FileData,_UserOrgroup)->
FileData;
%----------------------------------------------------------------------
%The user name in the file is Username:Passwd\n
%Remove the newline sign and split the user name in
%UserName and Password
%----------------------------------------------------------------------
formatUser(UserData,FileData,UserOrGroup)->
case string:tokens(UserData," \r\n")of
[User|Whitespace] when UserOrGroup==user_file->
case string:tokens(User,":") of
[Name,PassWord]->
[{user,Name,PassWord}|FileData];
_Error->
FileData
end;
GroupData when UserOrGroup==group_file ->
parseGroupData(GroupData,FileData);
_Error ->
FileData
end.
%----------------------------------------------------------------------
%if everything is right GroupData is on the form
% ["groupName:", "Member1", "Member2", "Member2"
%----------------------------------------------------------------------
parseGroupData([GroupName|GroupData],FileData)->
[{group,formatGroupName(GroupName),GroupData}|FileData].
%----------------------------------------------------------------------
%the line in the file is GroupName: Member1 Member2 .....MemberN
%Remove the : from the group name
%----------------------------------------------------------------------
formatGroupName(GroupName)->
string:strip(GroupName,right,$:).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% %%
%% Functions that parses the accessfiles %%
%% %%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%----------------------------------------------------------------------
%Control that the asset is a real file and not a request for an virtual
%asset
%----------------------------------------------------------------------
isErlScriptOrNotAccessibleFile(Path,Info)->
case file:read_file_info(Path) of
{ok,_fileInfo}->
false;
{error,_Reason} ->
true
end.
%----------------------------------------------------------------------
%Path=PathToTheRequestedFile=String
%Innfo=record#mod
%----------------------------------------------------------------------
getHtAccessData(Path,Info)->
HtAccessFileNames=getHtAccessFileNames(Info),
case getData(Path,Info,HtAccessFileNames) of
{ok,public}->
{ok,public};
{accessData,AccessData}->
{accessData,AccessData};
{error,Reason} ->
{error,Reason}
end.
%----------------------------------------------------------------------
%returns the names of the accessfiles
%----------------------------------------------------------------------
getHtAccessFileNames(Info)->
case httpd_util:lookup(Info#mod.config_db,access_files) of
undefined->
[".htaccess"];
Files->
Files
end.
%----------------------------------------------------------------------
%HtAccessFileNames=["accessfileName1",..."AccessFileName2"]
%----------------------------------------------------------------------
getData(Path,Info,HtAccessFileNames)->
case regexp:split(Path,"/") of
{error,Error}->
{error,Error};
{ok,SplittedPath}->
getData2(HtAccessFileNames,SplittedPath,Info)
end.
%----------------------------------------------------------------------
%Add to together the data in the Splittedpath up to the path
%that is the alias or the document root
%Since we do not need to control after any accessfiles before here
%----------------------------------------------------------------------
getData2(HtAccessFileNames,SplittedPath,Info)->
case getRootPath(SplittedPath,Info) of
{error,Path}->
{error,Path};
{ok,StartPath,RestOfSplittedPath} ->
getData2(HtAccessFileNames,StartPath,RestOfSplittedPath,Info)
end.
%----------------------------------------------------------------------
%HtAccessFilenames is a list the names the accesssfiles can have
%Path is the shortest match agains all alias and documentroot
%rest of splitted path is a list of the parts of the path
%Info is the mod recod from the server
%----------------------------------------------------------------------
getData2(HtAccessFileNames,StartPath,RestOfSplittedPath,Info)->
case getHtAccessFiles(HtAccessFileNames,StartPath,RestOfSplittedPath) of
[]->
%No accessfile qiut its a public directory
{ok,public};
Files ->
loadAccessFilesData(Files)
end.
%----------------------------------------------------------------------
%Loads the data in the accessFiles specifiied by
% AccessFiles=["/hoem/public/html/accefile",
% "/home/public/html/priv/accessfile"]
%----------------------------------------------------------------------
loadAccessFilesData(AccessFiles)->
loadAccessFilesData(AccessFiles,ets:new(accessData,[])).
%----------------------------------------------------------------------
%Returns the found data
%----------------------------------------------------------------------
contextToValues(AccessData)->
case ets:lookup(AccessData,context) of
[{context,Values}]->
ets:delete(AccessData,context),
insertContext(AccessData,Values),
{accessData,AccessData};
_Error->
{error,errorInAccessFile}
end.
insertContext(AccessData,[])->
ok;
insertContext(AccessData,[{allow,From}|Values])->
insertDenyAllowContext(AccessData,{allow,From}),
insertContext(AccessData,Values);
insertContext(AccessData,[{deny,From}|Values])->
insertDenyAllowContext(AccessData,{deny,From}),
insertContext(AccessData,Values);
insertContext(AccessData,[{require,{GrpOrUsr,Members}}|Values])->
case ets:lookup(AccessData,require) of
[]when GrpOrUsr==users->
ets:insert(AccessData,{require,{{users,Members},{groups,[]}}});
[{require,{{users,Users},{groups,Groups}}}]when GrpOrUsr==users ->
ets:insert(AccessData,{require,{{users,Users++Members},
{groups,Groups}}});
[]when GrpOrUsr==groups->
ets:insert(AccessData,{require,{{users,[]},{groups,Members}}});
[{require,{{users,Users},{groups,Groups}}}]when GrpOrUsr==groups ->
ets:insert(AccessData,{require,{{users,Users},
{groups,Groups++Members}}})
end,
insertContext(AccessData,Values);
%%limit and order directive need no transforming they areis just to insert
insertContext(AccessData,[Elem|Values])->
ets:insert(AccessData,Elem),
insertContext(AccessData,Values).
insertDenyAllowContext(AccessData,{AllowDeny,From})->
case From of
all->
ets:insert(AccessData,{AllowDeny,all});
AllowedSubnets->
case ets:lookup(AccessData,AllowDeny) of
[]->
ets:insert(AccessData,{AllowDeny,From});
[{AllowDeny,all}]->
ok;
[{AllowDeny,Networks}]->
ets:insert(AccessData,{allow,Networks++From})
end
end.
loadAccessFilesData([],AccessData)->
%preform context to limits
contextToValues(AccessData),
{accessData,AccessData};
%----------------------------------------------------------------------
%Takes each file in the list and load the data to the ets table
%AccessData
%----------------------------------------------------------------------
loadAccessFilesData([FileName|FileNames],AccessData)->
case loadAccessFileData({file,FileName},AccessData) of
overRide->
loadAccessFilesData(FileNames,AccessData);
noOverRide ->
{accessData,AccessData};
error->
ets:delete(AccessData),
{error,errorInAccessFile}
end.
%----------------------------------------------------------------------
%opens the filehandle to the specified file
%----------------------------------------------------------------------
loadAccessFileData({file,FileName},AccessData)->
case file:open(FileName,[read]) of
{ok,AccessFileHandle}->
loadAccessFileData({stream,AccessFileHandle},AccessData,[]);
{error,Reason} ->
overRide
end.
%----------------------------------------------------------------------
%%look att each line in the file and add them to the database
%%When end of file is reached control i overrride is allowed
%% if so return
%----------------------------------------------------------------------
loadAccessFileData({stream,File},AccessData,FileData)->
case io:get_line(File,[]) of
eof->
insertData(AccessData,FileData),
case ets:match_object(AccessData,{'_',error}) of
[]->
%Case we got no error control that we can override a
%at least some of the values
case ets:match_object(AccessData,
{allow_over_ride,none}) of
[]->
overRide;
_NoOverride->
noOverRide
end;
Errors->
error
end;
Line ->
loadAccessFileData({stream,File},AccessData,
insertLine(string:strip(Line,left),FileData))
end.
%----------------------------------------------------------------------
%AccessData is a ets table where the previous found data is inserted
%FileData is a list of the directives in the last parsed file
%before insertion a control is done that the directive is allowed to
%override
%----------------------------------------------------------------------
insertData(AccessData,{{context,Values},FileData})->
insertData(AccessData,[{context,Values}|FileData]);
insertData(AccessData,FileData)->
case ets:lookup(AccessData,allow_over_ride) of
[{allow_over_ride,all}]->
lists:foreach(fun(Elem)->
ets:insert(AccessData,Elem)
end,FileData);
[]->
lists:foreach(fun(Elem)->
ets:insert(AccessData,Elem)
end,FileData);
[{allow_over_ride,Directives}]when list(Directives)->
lists:foreach(fun({Key,Value})->
case lists:member(Key,Directives) of
true->
ok;
false ->
ets:insert(AccessData,{Key,Value})
end
end,FileData);
[{allow_over_ride,_}]->
%Will never appear if the user
%aint doing very strang econfig files
ok
end.
%----------------------------------------------------------------------
%Take a line in the accessfile and transform it into a tuple that
%later can be inserted in to the ets:table
%----------------------------------------------------------------------
%%%Here is the alternatives that resides inside the limit context
insertLine([$o,$r,$d,$e,$r|Order],{{context,Values},FileData})->
{{context,[{order,getOrder(Order)}|Values]},FileData};
%%Let the user place a tab in the beginning
insertLine([$\t,$o,$r,$d,$e,$r|Order],{{context,Values},FileData})->
{{context,[{order,getOrder(Order)}|Values]},FileData};
insertLine([$a,$l,$l,$o,$w|Allow],{{context,Values},FileData})->
{{context,[{allow,getAllowDenyData(Allow)}|Values]},FileData};
insertLine([$\t,$a,$l,$l,$o,$w|Allow],{{context,Values},FileData})->
{{context,[{allow,getAllowDenyData(Allow)}|Values]},FileData};
insertLine([$d,$e,$n,$y|Deny],{{context,Values},FileData})->
{{context,[{deny,getAllowDenyData(Deny)}|Values]},FileData};
insertLine([$\t,$d,$e,$n,$y|Deny],{{context,Values},FileData})->
{{context,[{deny,getAllowDenyData(Deny)}|Values]},FileData};
insertLine([$r,$e,$q,$u,$i,$r,$e|Require],{{context,Values},FileData})->
{{context,[{require,getRequireData(Require)}|Values]},FileData};
insertLine([$\t,$r,$e,$q,$u,$i,$r,$e|Require],{{context,Values},FileData})->
{{context,[{require,getRequireData(Require)}|Values]},FileData};
insertLine([$<,$/,$L,$i,$m,$i,$t|EndLimit],{Context,FileData})->
[Context|FileData];
insertLine([$<,$L,$i,$m,$i,$t|Limit],FileData)->
{{context,[{limit,getLimits(Limit)}]}, FileData};
insertLine([$A,$u,$t,$h,$U,$s,$e,$r,$F,$i,$l,$e,$\ |AuthUserFile],FileData)->
[{user_file,string:strip(AuthUserFile,right,$\n)}|FileData];
insertLine([$A,$u,$t,$h,$G,$r,$o,$u,$p,$F,$i,$l,$e,$\ |AuthGroupFile],
FileData)->
[{group_file,string:strip(AuthGroupFile,right,$\n)}|FileData];
insertLine([$A,$l,$l,$o,$w,$O,$v,$e,$r,$R,$i,$d,$e|AllowOverRide],FileData)->
[{allow_over_ride,getAllowOverRideData(AllowOverRide)}
|FileData];
insertLine([$A,$u,$t,$h,$N,$a,$m,$e,$\ |AuthName],FileData)->
[{auth_name,string:strip(AuthName,right,$\n)}|FileData];
insertLine([$A,$u,$t,$h,$T,$y,$p,$e|AuthType],FileData)->
[{auth_type,getAuthorizationType(AuthType)}|FileData];
insertLine(_BadDirectiveOrComment,FileData)->
FileData.
%----------------------------------------------------------------------
%transform the Data specified about override to a form that is ieasier
%handled later
%Override data="all"|"md5"|"Directive1 .... DirectioveN"
%----------------------------------------------------------------------
getAllowOverRideData(OverRideData)->
case string:tokens(OverRideData," \r\n") of
[[$a,$l,$l]|_]->
all;
[[$n,$o,$n,$e]|_]->
none;
Directives ->
getOverRideDirectives(Directives)
end.
getOverRideDirectives(Directives)->
lists:map(fun(Directive)->
transformDirective(Directive)
end,Directives).
transformDirective([$A,$u,$t,$h,$U,$s,$e,$r,$F,$i,$l,$e|_])->
user_file;
transformDirective([$A,$u,$t,$h,$G,$r,$o,$u,$p,$F,$i,$l,$e|_]) ->
group_file;
transformDirective([$A,$u,$t,$h,$N,$a,$m,$e|_])->
auth_name;
transformDirective([$A,$u,$t,$h,$T,$y,$p,$e|_])->
auth_type;
transformDirective(_UnAllowedOverRideDirective) ->
unallowed.
%----------------------------------------------------------------------
%Replace the string that specify which method to use for authentication
%and replace it with the atom for easier mathing
%----------------------------------------------------------------------
getAuthorizationType(AuthType)->
[Arg|Crap]=string:tokens(AuthType,"\n\r\ "),
case Arg of
[$B,$a,$s,$i,$c]->
basic;
[$M,$D,$5] ->
md5;
_What ->
error
end.
%----------------------------------------------------------------------
%Returns a list of the specified methods to limit or the atom all
%----------------------------------------------------------------------
getLimits(Limits)->
case regexp:split(Limits,">")of
{ok,[_NoEndOnLimit]}->
error;
{ok,[Methods|Crap]}->
case regexp:split(Methods," ")of
{ok,[]}->
all;
{ok,SplittedMethods}->
SplittedMethods;
{error,Error}->
error
end;
{error,_Error}->
error
end.
%----------------------------------------------------------------------
% Transform the order to prefrom deny allow control to a tuple of atoms
%----------------------------------------------------------------------
getOrder(Order)->
[First|Rest]=lists:map(fun(Part)->
list_to_atom(Part)
end,string:tokens(Order," \n\r")),
case First of
deny->
{deny,allow};
allow->
{allow,deny};
_Error->
error
end.
%----------------------------------------------------------------------
% The string AllowDeny is "from all" or "from Subnet1 Subnet2...SubnetN"
%----------------------------------------------------------------------
getAllowDenyData(AllowDeny)->
case string:tokens(AllowDeny," \n\r") of
[_From|AllowDenyData] when length(AllowDenyData)>=1->
case lists:nth(1,AllowDenyData) of
[$a,$l,$l]->
all;
Hosts->
AllowDenyData
end;
Error->
errror
end.
%----------------------------------------------------------------------
% Fix the string that describes who is allowed to se the page
%----------------------------------------------------------------------
getRequireData(Require)->
[UserOrGroup|UserData]=string:tokens(Require," \n\r"),
case UserOrGroup of
[$u,$s,$e,$r]->
{users,UserData};
[$g,$r,$o,$u,$p] ->
{groups,UserData};
_Whatever ->
error
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% %%
%% Methods that collects the searchways to the accessfiles %%
%% %%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%----------------------------------------------------------------------
% Get the whole path to the different accessfiles
%----------------------------------------------------------------------
getHtAccessFiles(HtAccessFileNames,Path,RestOfSplittedPath)->
getHtAccessFiles(HtAccessFileNames,Path,RestOfSplittedPath,[]).
getHtAccessFiles(HtAccessFileNames,Path,[[]],HtAccessFiles)->
HtAccessFiles ++ accessFilesOfPath(HtAccessFileNames,Path++"/");
getHtAccessFiles(HtAccessFileNames,Path,[],HtAccessFiles)->
HtAccessFiles;
getHtAccessFiles(HtAccessFileNames,Path,[NextDir|RestOfSplittedPath],
AccessFiles)->
getHtAccessFiles(HtAccessFileNames,Path++"/"++NextDir,RestOfSplittedPath,
AccessFiles ++
accessFilesOfPath(HtAccessFileNames,Path++"/")).
%----------------------------------------------------------------------
%Control if therer are any accessfies in the path
%----------------------------------------------------------------------
accessFilesOfPath(HtAccessFileNames,Path)->
lists:foldl(fun(HtAccessFileName,Files)->
case file:read_file_info(Path++HtAccessFileName) of
{ok,FileInfo}->
[Path++HtAccessFileName|Files];
{error,_Error} ->
Files
end
end,[],HtAccessFileNames).
%----------------------------------------------------------------------
%Sake the splitted path and joins it up to the documentroot or the alias
%that match first
%----------------------------------------------------------------------
getRootPath(SplittedPath,Info)->
DocRoot=httpd_util:lookup(Info#mod.config_db,document_root,"/"),
PresumtiveRootPath=
[DocRoot|lists:map(fun({Alias,RealPath})->
RealPath
end,
httpd_util:multi_lookup(Info#mod.config_db,alias))],
getRootPath(PresumtiveRootPath,SplittedPath,Info).
getRootPath(PresumtiveRootPath,[[],Splittedpath],Info)->
getRootPath(PresumtiveRootPath,["/",Splittedpath],Info);
getRootPath(PresumtiveRootPath,[Part,NextPart|SplittedPath],Info)->
case lists:member(Part,PresumtiveRootPath)of
true->
{ok,Part,[NextPart|SplittedPath]};
false ->
getRootPath(PresumtiveRootPath,
[Part++"/"++NextPart|SplittedPath],Info)
end;
getRootPath(PresumtiveRootPath,[Part],Info)->
case lists:member(Part,PresumtiveRootPath)of
true->
{ok,Part,[]};
false ->
{error,Part}
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%Debug methods %%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%----------------------------------------------------------------------
% Simulate the webserver by calling do/1 with apropiate parameters
%----------------------------------------------------------------------
debug()->
Conf=getConfigData(),
Uri=getUri(),
{_Proceed,Data}=getDataFromAlias(Conf,Uri),
Init_data=#init_data{peername={socket,"127.0.0.1"}},
ParsedHeader=headerparts(),
do(#mod{init_data=Init_data,
data=Data,
config_db=Conf,
request_uri=Uri,
parsed_header=ParsedHeader,
method="GET"}).
%----------------------------------------------------------------------
%Add authenticate data to the fake http-request header
%----------------------------------------------------------------------
headerparts()->
[{"authorization","Basic " ++ httpd_util:encode_base64("lotta:potta")}].
getDataFromAlias(Conf,Uri)->
mod_alias:do(#mod{config_db=Conf,request_uri=Uri}).
getUri()->
"/appmon/test/test.html".
getConfigData()->
Tab=ets:new(test_inets,[bag,public]),
ets:insert(Tab,{server_name,"localhost"}),
ets:insert(Tab,{bind_addresss,{127,0,0,1}}),
ets:insert(Tab,{erl_script_alias,{"/webcover/erl",["webcover"]}}),
ets:insert(Tab,{erl_script_alias,{"/erl",["webappmon"]}}),
ets:insert(Tab,{com_type,ip_comm}),
ets:insert(Tab,{modules,[mod_alias,mod_auth,mod_header]}),
ets:insert(Tab,{default_type,"text/plain"}),
ets:insert(Tab,{server_root,
"/home/gandalf/marting/exjobb/webtool-1.0/priv/root"}),
ets:insert(Tab,{port,8888}),
ets:insert(Tab,{document_root,
"/home/gandalf/marting/exjobb/webtool-1.0/priv/root"}),
ets:insert(Tab,
{alias,
{"/appmon"
,"/home/gandalf/marting/exjobb/webappmon-1.0/priv"}}),
ets:insert(Tab,{alias,
{"/webcover"
,"/home/gandalf/marting/exjobb/webcover-1.0/priv"}}),
ets:insert(Tab,{access_file,[".htaccess","kalle","pelle"]}),
Tab.
<|start_filename|>lib/crypto/src/crypto_ec_curves.erl<|end_filename|>
-module(crypto_ec_curves).
-export([curve/1, curves/0]).
curves() ->
CryptoSupport = crypto:supports(),
PubKeys = proplists:get_value(public_keys, CryptoSupport),
HasEC = proplists:get_bool(ecdh, PubKeys),
HasGF2m = proplists:get_bool(ec_gf2m, PubKeys),
prime_curves(HasEC) ++ characteristic_two_curves(HasGF2m).
prime_curves(true) ->
[secp112r1,secp112r2,secp128r1,secp128r2,secp160k1,secp160r1,secp160r2,
secp192r1,secp192k1,secp224k1,secp224r1,secp256k1,secp256r1,secp384r1,
secp521r1,prime192v1,prime192v2,prime192v3,prime239v1,prime239v2,prime239v3,
prime256v1,wtls6,wtls7,wtls8,wtls9,wtls12,
brainpoolP160r1,brainpoolP160t1,brainpoolP192r1,brainpoolP192t1,
brainpoolP224r1,brainpoolP224t1,brainpoolP256r1,brainpoolP256t1,
brainpoolP320r1,brainpoolP320t1,brainpoolP384r1,brainpoolP384t1,
brainpoolP512r1,brainpoolP512t1];
prime_curves(_) ->
[].
characteristic_two_curves(true) ->
[sect113r1,sect113r2,sect131r1,sect131r2,sect163k1,sect163r1,
sect163r2,sect193r1,sect193r2,sect233k1,sect233r1,sect239k1,sect283k1,
sect283r1,sect409k1,sect409r1,sect571k1,sect571r1,c2pnb163v1,c2pnb163v2,
c2pnb163v3,c2pnb176v1,c2tnb191v1,c2tnb191v2,c2tnb191v3,c2pnb208w1,c2tnb239v1,
c2tnb239v2,c2tnb239v3,c2pnb272w1,c2pnb304w1,c2tnb359v1,c2pnb368w1,c2tnb431r1,
wtls1,wtls3,wtls4,wtls5,wtls10,wtls11,ipsec3,ipsec4];
characteristic_two_curves(_) ->
[].
curve(secp112r1) ->
{
{prime_field, <<16#DB7C2ABF62E35E668076BEAD208B:112>>}, %% Prime
{<<16#DB7C2ABF62E35E668076BEAD2088:112>>, %% A
<<16#659EF8BA043916EEDE8911702B22:112>>, %% B
<<16#00F50B028E4D696E676875615175290472783FB1:160>>}, %% Seed
<<16#04:8,
16#09487239995A5EE76B55F9C2F098:112, %% X(p0)
16#A89CE5AF8724C0A23E0E0FF77500:112>>, %% Y(p0)
<<16#DB7C2ABF62E35E7628DFAC6561C5:112>>, %% Order
<<16#01:8>> %% CoFactor
};
curve(secp112r2) ->
{
{prime_field, <<16#DB7C2ABF62E35E668076BEAD208B:112>>}, %% Prime
{<<16#6127C24C05F38A0AAAF65C0EF02C:112>>, %% A
<<16#51DEF1815DB5ED74FCC34C85D709:112>>, %% B
<<16#002757A1114D696E6768756151755316C05E0BD4:160>>}, %% Seed
<<16#04:8,
16#4BA30AB5E892B4E1649DD0928643:112, %% X(p0)
16#ADCD46F5882E3747DEF36E956E97:112>>, %% Y(p0)
<<16#36DF0AAFD8B8D7597CA10520D04B:112>>, %% Order
<<16#04:8>> %% CoFactor
};
curve(secp128r1) ->
{
{prime_field, <<16#FFFFFFFDFFFFFFFFFFFFFFFFFFFFFFFF:128>>}, %% Prime
{<<16#FFFFFFFDFFFFFFFFFFFFFFFFFFFFFFFC:128>>, %% A
<<16#E87579C11079F43DD824993C2CEE5ED3:128>>, %% B
<<16#000E0D4D696E6768756151750CC03A4473D03679:160>>}, %% Seed
<<16#04:8,
16#161FF7528B899B2D0C28607CA52C5B86:128, %% X(p0)
16#CF5AC8395BAFEB13C02DA292DDED7A83:128>>, %% Y(p0)
<<16#FFFFFFFE0000000075A30D1B9038A115:128>>, %% Order
<<16#01:8>> %% CoFactor
};
curve(secp128r2) ->
{
{prime_field, <<16#FFFFFFFDFFFFFFFFFFFFFFFFFFFFFFFF:128>>}, %% Prime
{<<16#D6031998D1B3BBFEBF59CC9BBFF9AEE1:128>>, %% A
<<16#5EEEFCA380D02919DC2C6558BB6D8A5D:128>>, %% B
<<16#004D696E67687561517512D8F03431FCE63B88F4:160>>}, %% Seed
<<16#04:8,
16#7B6AA5D85E572983E6FB32A7CDEBC140:128, %% X(p0)
16#27B6916A894D3AEE7106FE805FC34B44:128>>, %% Y(p0)
<<16#3FFFFFFF7FFFFFFFBE0024720613B5A3:128>>, %% Order
<<16#04:8>> %% CoFactor
};
curve(secp160k1) ->
{
{prime_field, <<16#FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFAC73:160>>}, %% Prime
{<<16#00:8>>, %% A
<<16#07:8>>, %% B
none}, %% Seed
<<16#04:8,
16#3B4C382CE37AA192A4019E763036F4F5DD4D7EBB:160, %% X(p0)
16#938CF935318FDCED6BC28286531733C3F03C4FEE:160>>, %% Y(p0)
<<16#0100000000000000000001B8FA16DFAB9ACA16B6B3:168>>, %% Order
<<16#01:8>> %% CoFactor
};
curve(secp160r1) ->
{
{prime_field, <<16#FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF7FFFFFFF:160>>}, %% Prime
{<<16#FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF7FFFFFFC:160>>, %% A
<<16#1C97BEFC54BD7A8B65ACF89F81D4D4ADC565FA45:160>>, %% B
<<16#1053CDE42C14D696E67687561517533BF3F83345:160>>}, %% Seed
<<16#04:8,
16#4A96B5688EF573284664698968C38BB913CBFC82:160, %% X(p0)
16#23A628553168947D59DCC912042351377AC5FB32:160>>, %% Y(p0)
<<16#0100000000000000000001F4C8F927AED3CA752257:168>>, %% Order
<<16#01:8>> %% CoFactor
};
curve(secp160r2) ->
{
{prime_field, <<16#FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFAC73:160>>}, %% Prime
{<<16#FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFAC70:160>>, %% A
<<16#B4E134D3FB59EB8BAB57274904664D5AF50388BA:160>>, %% B
<<16#B99B99B099B323E02709A4D696E6768756151751:160>>}, %% Seed
<<16#04:8,
16#52DCB034293A117E1F4FF11B30F7199D3144CE6D:160, %% X(p0)
16#FEAFFEF2E331F296E071FA0DF9982CFEA7D43F2E:160>>, %% Y(p0)
<<16#0100000000000000000000351EE786A818F3A1A16B:168>>, %% Order
<<16#01:8>> %% CoFactor
};
curve(secp192r1) ->
{
{prime_field, <<16#FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFFFFFFFFFFFF:192>>}, %% Prime
{<<16#FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFFFFFFFFFFFC:192>>, %% A
<<16#64210519E59C80E70FA7E9AB72243049FEB8DEECC146B9B1:192>>, %% B
<<16#3045AE6FC8422F64ED579528D38120EAE12196D5:160>>}, %% Seed
<<16#04:8,
16#188DA80EB03090F67CBF20EB43A18800F4FF0AFD82FF1012:192, %% X(p0)
16#07192B95FFC8DA78631011ED6B24CDD573F977A11E794811:192>>, %% Y(p0)
<<16#FFFFFFFFFFFFFFFFFFFFFFFF99DEF836146BC9B1B4D22831:192>>, %% Order
<<16#01:8>> %% CoFactor
};
curve(secp192k1) ->
{
{prime_field, <<16#FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFEE37:192>>}, %% Prime
{<<16#00:8>>, %% A
<<16#03:8>>, %% B
none}, %% Seed
<<16#04:8,
16#DB4FF10EC057E9AE26B07D0280B7F4341DA5D1B1EAE06C7D:192, %% X(p0)
16#9B2F2F6D9C5628A7844163D015BE86344082AA88D95E2F9D:192>>, %% Y(p0)
<<16#FFFFFFFFFFFFFFFFFFFFFFFE26F2FC170F69466A74DEFD8D:192>>, %% Order
<<16#01:8>> %% CoFactor
};
curve(secp224k1) ->
{
{prime_field, <<16#FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFE56D:224>>}, %% Prime
{<<16#00:8>>, %% A
<<16#05:8>>, %% B
none}, %% Seed
<<16#04:8,
16#A1455B334DF099DF30FC28A169A467E9E47075A90F7E650EB6B7A45C:224, %% X(p0)
16#7E089FED7FBA344282CAFBD6F7E319F7C0B0BD59E2CA4BDB556D61A5:224>>, %% Y(p0)
<<16#010000000000000000000000000001DCE8D2EC6184CAF0A971769FB1F7:232>>, %% Order
<<16#01:8>> %% CoFactor
};
curve(secp224r1) ->
{
{prime_field, <<16#FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF000000000000000000000001:224>>}, %% Prime
{<<16#FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFFFFFFFFFFFFFFFFFFFE:224>>, %% A
<<16#B4050A850C04B3ABF54132565044B0B7D7BFD8BA270B39432355FFB4:224>>, %% B
<<16#BD71344799D5C7FCDC45B59FA3B9AB8F6A948BC5:160>>}, %% Seed
<<16#04:8,
16#B70E0CBD6BB4BF7F321390B94A03C1D356C21122343280D6115C1D21:224, %% X(p0)
16#BD376388B5F723FB4C22DFE6CD4375A05A07476444D5819985007E34:224>>, %% Y(p0)
<<16#FFFFFFFFFFFFFFFFFFFFFFFFFFFF16A2E0B8F03E13DD29455C5C2A3D:224>>, %% Order
<<16#01:8>> %% CoFactor
};
curve(secp256k1) ->
{
{prime_field, <<16#FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F:256>>}, %% Prime
{<<16#00:8>>, %% A
<<16#07:8>>, %% B
none}, %% Seed
<<16#04:8,
16#79BE667EF9DCBBAC55A06295CE870B07029BFCDB2DCE28D959F2815B16F81798:256, %% X(p0)
16#483ADA7726A3C4655DA4FBFC0E1108A8FD17B448A68554199C47D08FFB10D4B8:256>>, %% Y(p0)
<<16#FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141:256>>, %% Order
<<16#01:8>> %% CoFactor
};
curve(secp256r1) ->
{
{prime_field, <<16#FFFFFFFF00000001000000000000000000000000FFFFFFFFFFFFFFFFFFFFFFFF:256>>}, %% Prime
{<<16#FFFFFFFF00000001000000000000000000000000FFFFFFFFFFFFFFFFFFFFFFFC:256>>, %% A
<<16#5AC635D8AA3A93E7B3EBBD55769886BC651D06B0CC53B0F63BCE3C3E27D2604B:256>>, %% B
<<16#C49D360886E704936A6678E1139D26B7819F7E90:160>>}, %% Seed
<<16#04:8,
16#6B17D1F2E12C4247F8BCE6E563A440F277037D812DEB33A0F4A13945D898C296:256, %% X(p0)
16#4FE342E2FE1A7F9B8EE7EB4A7C0F9E162BCE33576B315ECECBB6406837BF51F5:256>>, %% Y(p0)
<<16#FFFFFFFF00000000FFFFFFFFFFFFFFFFBCE6FAADA7179E84F3B9CAC2FC632551:256>>, %% Order
<<16#01:8>> %% CoFactor
};
curve(secp384r1) ->
{
{prime_field, <<16#FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFE:256, %% Prime
16#FFFFFFFF0000000000000000FFFFFFFF:128>>},
{<<16#FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFE:256, %% A
16#FFFFFFFF0000000000000000FFFFFFFC:128>>,
<<16#B3312FA7E23EE7E4988E056BE3F82D19181D9C6EFE8141120314088F5013875A:256, %% B
16#C656398D8A2ED19D2A85C8EDD3EC2AEF:128>>,
<<16#A335926AA319A27A1D00896A6773A4827ACDAC73:160>>}, %% Seed
<<16#04:8,
16#AA87CA22BE8B05378EB1C71EF320AD746E1D3B628BA79B9859F741E082542A38:256, %% X(p0)
16#5502F25DBF55296C3A545E3872760AB7:128,
16#3617DE4A96262C6F5D9E98BF9292DC29F8F41DBD289A147CE9DA3113B5F0B8C0:256, %% Y(p0)
16#0A60B1CE1D7E819D7A431D7C90EA0E5F:128>>,
<<16#FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFC7634D81F4372DDF:256, %% Order
16#581A0DB248B0A77AECEC196ACCC52973:128>>,
<<16#01:8>> %% CoFactor
};
curve(secp521r1) ->
{
{prime_field, <<16#01FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF:256, %% Prime
16#FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF:256,
16#FFFF:16>>},
{<<16#01FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF:256, %% A
16#FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF:256,
16#FFFC:16>>,
<<16#51953EB9618E1C9A1F929A21A0B68540EEA2DA725B99B315F3B8B489918EF109:256, %% B
16#E156193951EC7E937B1652C0BD3BB1BF073573DF883D2C34F1EF451FD46B503F:256,
16#00:8>>,
<<16#D09E8800291CB85396CC6717393284AAA0DA64BA:160>>}, %% Seed
<<16#04:8,
16#00C6858E06B70404E9CD9E3ECB662395B4429C648139053FB521F828AF606B4D:256, %% X(p0)
16#3DBAA14B5E77EFE75928FE1DC127A2FFA8DE3348B3C1856A429BF97E7E31C2E5:256,
16#BD66:16,
16#011839296A789A3BC0045C8A5FB42C7D1BD998F54449579B446817AFBD17273E:256, %% Y(p0)
16#662C97EE72995EF42640C550B9013FAD0761353C7086A272C24088BE94769FD1:256,
16#6650:16>>,
<<16#01FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF:256, %% Order
16#FFFA51868783BF2F966B7FCC0148F709A5D03BB5C9B8899C47AEBB6FB71E9138:256,
16#6409:16>>,
<<16#01:8>> %% CoFactor
};
curve(prime192v1) ->
{
{prime_field, <<16#FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFFFFFFFFFFFF:192>>}, %% Prime
{<<16#FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFFFFFFFFFFFC:192>>, %% A
<<16#64210519E59C80E70FA7E9AB72243049FEB8DEECC146B9B1:192>>, %% B
<<16#3045AE6FC8422F64ED579528D38120EAE12196D5:160>>}, %% Seed
<<16#04:8,
16#188DA80EB03090F67CBF20EB43A18800F4FF0AFD82FF1012:192, %% X(p0)
16#07192B95FFC8DA78631011ED6B24CDD573F977A11E794811:192>>, %% Y(p0)
<<16#FFFFFFFFFFFFFFFFFFFFFFFF99DEF836146BC9B1B4D22831:192>>, %% Order
<<16#01:8>> %% CoFactor
};
curve(prime192v2) ->
{
{prime_field, <<16#FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFFFFFFFFFFFF:192>>}, %% Prime
{<<16#FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFFFFFFFFFFFC:192>>, %% A
<<16#CC22D6DFB95C6B25E49C0D6364A4E5980C393AA21668D953:192>>, %% B
<<16#31A92EE2029FD10D901B113E990710F0D21AC6B6:160>>}, %% Seed
<<16#04:8,
16#EEA2BAE7E1497842F2DE7769CFE9C989C072AD696F48034A:192, %% X(p0)
16#6574D11D69B6EC7A672BB82A083DF2F2B0847DE970B2DE15:192>>, %% Y(p0)
<<16#FFFFFFFFFFFFFFFFFFFFFFFE5FB1A724DC80418648D8DD31:192>>, %% Order
<<16#01:8>> %% CoFactor
};
curve(prime192v3) ->
{
{prime_field, <<16#FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFFFFFFFFFFFF:192>>}, %% Prime
{<<16#FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFFFFFFFFFFFC:192>>, %% A
<<16#22123DC2395A05CAA7423DAECCC94760A7D462256BD56916:192>>, %% B
<<16#C469684435DEB378C4B65CA9591E2A5763059A2E:160>>}, %% Seed
<<16#04:8,
16#7D29778100C65A1DA1783716588DCE2B8B4AEE8E228F1896:192, %% X(p0)
16#38A90F22637337334B49DCB66A6DC8F9978ACA7648A943B0:192>>, %% Y(p0)
<<16#FFFFFFFFFFFFFFFFFFFFFFFF7A62D031C83F4294F640EC13:192>>, %% Order
<<16#01:8>> %% CoFactor
};
curve(prime239v1) ->
{
{prime_field, <<16#7FFFFFFFFFFFFFFFFFFFFFFF7FFFFFFFFFFF8000000000007FFFFFFFFFFF:240>>}, %% Prime
{<<16#7FFFFFFFFFFFFFFFFFFFFFFF7FFFFFFFFFFF8000000000007FFFFFFFFFFC:240>>, %% A
<<16#6B016C3BDCF18941D0D654921475CA71A9DB2FB27D1D37796185C2942C0A:240>>, %% B
<<16#E43BB460F0B80CC0C0B075798E948060F8321B7D:160>>}, %% Seed
<<16#04:8,
16#0FFA963CDCA8816CCC33B8642BEDF905C3D358573D3F27FBBD3B3CB9AAAF:240, %% X(p0)
16#7DEBE8E4E90A5DAE6E4054CA530BA04654B36818CE226B39FCCB7B02F1AE:240>>, %% Y(p0)
<<16#7FFFFFFFFFFFFFFFFFFFFFFF7FFFFF9E5E9A9F5D9071FBD1522688909D0B:240>>, %% Order
<<16#01:8>> %% CoFactor
};
curve(prime239v2) ->
{
{prime_field, <<16#7FFFFFFFFFFFFFFFFFFFFFFF7FFFFFFFFFFF8000000000007FFFFFFFFFFF:240>>}, %% Prime
{<<16#7FFFFFFFFFFFFFFFFFFFFFFF7FFFFFFFFFFF8000000000007FFFFFFFFFFC:240>>, %% A
<<16#617FAB6832576CBBFED50D99F0249C3FEE58B94BA0038C7AE84C8C832F2C:240>>, %% B
<<16#E8B4011604095303CA3B8099982BE09FCB9AE616:160>>}, %% Seed
<<16#04:8,
16#38AF09D98727705120C921BB5E9E26296A3CDCF2F35757A0EAFD87B830E7:240, %% X(p0)
16#5B0125E4DBEA0EC7206DA0FC01D9B081329FB555DE6EF460237DFF8BE4BA:240>>, %% Y(p0)
<<16#7FFFFFFFFFFFFFFFFFFFFFFF800000CFA7E8594377D414C03821BC582063:240>>, %% Order
<<16#01:8>> %% CoFactor
};
curve(prime239v3) ->
{
{prime_field, <<16#7FFFFFFFFFFFFFFFFFFFFFFF7FFFFFFFFFFF8000000000007FFFFFFFFFFF:240>>}, %% Prime
{<<16#7FFFFFFFFFFFFFFFFFFFFFFF7FFFFFFFFFFF8000000000007FFFFFFFFFFC:240>>, %% A
<<16#255705FA2A306654B1F4CB03D6A750A30C250102D4988717D9BA15AB6D3E:240>>, %% B
<<16#7D7374168FFE3471B60A857686A19475D3BFA2FF:160>>}, %% Seed
<<16#04:8,
16#6768AE8E18BB92CFCF005C949AA2C6D94853D0E660BBF854B1C9505FE95A:240, %% X(p0)
16#1607E6898F390C06BC1D552BAD226F3B6FCFE48B6E818499AF18E3ED6CF3:240>>, %% Y(p0)
<<16#7FFFFFFFFFFFFFFFFFFFFFFF7FFFFF975DEB41B3A6057C3C432146526551:240>>, %% Order
<<16#01:8>> %% CoFactor
};
curve(prime256v1) ->
{
{prime_field, <<16#FFFFFFFF00000001000000000000000000000000FFFFFFFFFFFFFFFFFFFFFFFF:256>>}, %% Prime
{<<16#FFFFFFFF00000001000000000000000000000000FFFFFFFFFFFFFFFFFFFFFFFC:256>>, %% A
<<16#5AC635D8AA3A93E7B3EBBD55769886BC651D06B0CC53B0F63BCE3C3E27D2604B:256>>, %% B
<<16#C49D360886E704936A6678E1139D26B7819F7E90:160>>}, %% Seed
<<16#04:8,
16#6B17D1F2E12C4247F8BCE6E563A440F277037D812DEB33A0F4A13945D898C296:256, %% X(p0)
16#4FE342E2FE1A7F9B8EE7EB4A7C0F9E162BCE33576B315ECECBB6406837BF51F5:256>>, %% Y(p0)
<<16#FFFFFFFF00000000FFFFFFFFFFFFFFFFBCE6FAADA7179E84F3B9CAC2FC632551:256>>, %% Order
<<16#01:8>> %% CoFactor
};
curve(sect113r1) ->
{
{characteristic_two_field, 113, {tpbasis,9}},
{<<16#3088250CA6E7C7FE649CE85820F7:112>>, %% A
<<16#E8BEE4D3E2260744188BE0E9C723:112>>, %% B
<<16#10E723AB14D696E6768756151756FEBF8FCB49A9:160>>}, %% Seed
<<16#04:8,
16#009D73616F35F4AB1407D73562C10F:120, %% X(p0)
16#00A52830277958EE84D1315ED31886:120>>, %% Y(p0)
<<16#0100000000000000D9CCEC8A39E56F:120>>, %% Order
<<16#02:8>> %% CoFactor
};
curve(sect113r2) ->
{
{characteristic_two_field, 113, {tpbasis,9}},
{<<16#689918DBEC7E5A0DD6DFC0AA55C7:112>>, %% A
<<16#95E9A9EC9B297BD4BF36E059184F:112>>, %% B
<<16#10C0FB15760860DEF1EEF4D696E676875615175D:160>>}, %% Seed
<<16#04:8,
16#01A57A6A7B26CA5EF52FCDB8164797:120, %% X(p0)
16#00B3ADC94ED1FE674C06E695BABA1D:120>>, %% Y(p0)
<<16#010000000000000108789B2496AF93:120>>, %% Order
<<16#02:8>> %% CoFactor
};
curve(sect131r1) ->
{
{characteristic_two_field, 131, {ppbasis,2,3,8}},
{<<16#07A11B09A76B562144418FF3FF8C2570B8:136>>, %% A
<<16#0217C05610884B63B9C6C7291678F9D341:136>>, %% B
<<16#4D696E676875615175985BD3ADBADA21B43A97E2:160>>}, %% Seed
<<16#04:8,
16#0081BAF91FDF9833C40F9C181343638399:136, %% X(p0)
16#078C6E7EA38C001F73C8134B1B4EF9E150:136>>, %% Y(p0)
<<16#0400000000000000023123953A9464B54D:136>>, %% Order
<<16#02:8>> %% CoFactor
};
curve(sect131r2) ->
{
{characteristic_two_field, 131, {ppbasis,2,3,8}},
{<<16#03E5A88919D7CAFCBF415F07C2176573B2:136>>, %% A
<<16#04B8266A46C55657AC734CE38F018F2192:136>>, %% B
<<16#985BD3ADBAD4D696E676875615175A21B43A97E3:160>>}, %% Seed
<<16#04:8,
16#0356DCD8F2F95031AD652D23951BB366A8:136, %% X(p0)
16#0648F06D867940A5366D9E265DE9EB240F:136>>, %% Y(p0)
<<16#0400000000000000016954A233049BA98F:136>>, %% Order
<<16#02:8>> %% CoFactor
};
curve(sect163k1) ->
{
{characteristic_two_field, 163, {ppbasis,3,6,7}},
{<<16#01:8>>, %% A
<<16#01:8>>, %% B
none}, %% Seed
<<16#04:8,
16#02FE13C0537BBC11ACAA07D793DE4E6D5E5C94EEE8:168, %% X(p0)
16#0289070FB05D38FF58321F2E800536D538CCDAA3D9:168>>, %% Y(p0)
<<16#04000000000000000000020108A2E0CC0D99F8A5EF:168>>, %% Order
<<16#02:8>> %% CoFactor
};
curve(sect163r1) ->
{
{characteristic_two_field, 163, {ppbasis,3,6,7}},
{<<16#07B6882CAAEFA84F9554FF8428BD88E246D2782AE2:168>>, %% A
<<16#0713612DCDDCB40AAB946BDA29CA91F73AF958AFD9:168>>, %% B
none}, %% Seed
<<16#04:8,
16#0369979697AB43897789566789567F787A7876A654:168, %% X(p0)
16#00435EDB42EFAFB2989D51FEFCE3C80988F41FF883:168>>, %% Y(p0)
<<16#03FFFFFFFFFFFFFFFFFFFF48AAB689C29CA710279B:168>>, %% Order
<<16#02:8>> %% CoFactor
};
curve(sect163r2) ->
{
{characteristic_two_field, 163, {ppbasis,3,6,7}},
{<<16#01:8>>, %% A
<<16#020A601907B8C953CA1481EB10512F78744A3205FD:168>>, %% B
none}, %% Seed
<<16#04:8,
16#03F0EBA16286A2D57EA0991168D4994637E8343E36:168, %% X(p0)
16#00D51FBC6C71A0094FA2CDD545B11C5C0C797324F1:168>>, %% Y(p0)
<<16#040000000000000000000292FE77E70C12A4234C33:168>>, %% Order
<<16#02:8>> %% CoFactor
};
curve(sect193r1) ->
{
{characteristic_two_field, 193, {tpbasis,15}},
{<<16#17858FEB7A98975169E171F77B4087DE098AC8A911DF7B01:192>>, %% A
<<16#FDFB49BFE6C3A89FACADAA7A1E5BBC7CC1C2E5D831478814:192>>, %% B
<<16#103FAEC74D696E676875615175777FC5B191EF30:160>>}, %% Seed
<<16#04:8,
16#01F481BC5F0FF84A74AD6CDF6FDEF4BF6179625372D8C0C5E1:200, %% X(p0)
16#0025E399F2903712CCF3EA9E3A1AD17FB0B3201B6AF7CE1B05:200>>, %% Y(p0)
<<16#01000000000000000000000000C7F34A778F443ACC920EBA49:200>>, %% Order
<<16#02:8>> %% CoFactor
};
curve(sect193r2) ->
{
{characteristic_two_field, 193, {tpbasis,15}},
{<<16#0163F35A5137C2CE3EA6ED8667190B0BC43ECD69977702709B:200>>, %% A
<<16#C9BB9E8927D4D64C377E2AB2856A5B16E3EFB7F61D4316AE:192>>, %% B
<<16#10B7B4D696E676875615175137C8A16FD0DA2211:160>>}, %% Seed
<<16#04:8,
16#00D9B67D192E0367C803F39E1A7E82CA14A651350AAE617E8F:200, %% X(p0)
16#01CE94335607C304AC29E7DEFBD9CA01F596F927224CDECF6C:200>>, %% Y(p0)
<<16#010000000000000000000000015AAB561B005413CCD4EE99D5:200>>, %% Order
<<16#02:8>> %% CoFactor
};
curve(sect233k1) ->
{
{characteristic_two_field, 233, {tpbasis,74}},
{<<16#00:8>>, %% A
<<16#01:8>>, %% B
none}, %% Seed
<<16#04:8,
16#017232BA853A7E731AF129F22FF4149563A419C26BF50A4C9D6EEFAD6126:240, %% X(p0)
16#01DB537DECE819B7F70F555A67C427A8CD9BF18AEB9B56E0C11056FAE6A3:240>>, %% Y(p0)
<<16#8000000000000000000000000000069D5BB915BCD46EFB1AD5F173ABDF:232>>, %% Order
<<16#04:8>> %% CoFactor
};
curve(sect233r1) ->
{
{characteristic_two_field, 233, {tpbasis,74}},
{<<16#01:8>>, %% A
<<16#66647EDE6C332C7F8C0923BB58213B333B20E9CE4281FE115F7D8F90AD:232>>, %% B
<<16#74D59FF07F6B413D0EA14B344B20A2DB049B50C3:160>>}, %% Seed
<<16#04:8,
16#00FAC9DFCBAC8313BB2139F1BB755FEF65BC391F8B36F8F8EB7371FD558B:240, %% X(p0)
16#01006A08A41903350678E58528BEBF8A0BEFF867A7CA36716F7E01F81052:240>>, %% Y(p0)
<<16#01000000000000000000000000000013E974E72F8A6922031D2603CFE0D7:240>>, %% Order
<<16#02:8>> %% CoFactor
};
curve(sect239k1) ->
{
{characteristic_two_field, 239, {tpbasis,158}},
{<<16#00:8>>, %% A
<<16#01:8>>, %% B
none}, %% Seed
<<16#04:8,
16#29A0B6A887A983E9730988A68727A8B2D126C44CC2CC7B2A6555193035DC:240, %% X(p0)
16#76310804F12E549BDB011C103089E73510ACB275FC312A5DC6B76553F0CA:240>>, %% Y(p0)
<<16#2000000000000000000000000000005A79FEC67CB6E91F1C1DA800E478A5:240>>, %% Order
<<16#04:8>> %% CoFactor
};
curve(sect283k1) ->
{
{characteristic_two_field, 283, {ppbasis,5,7,12}},
{<<16#00:8>>, %% A
<<16#01:8>>, %% B
none}, %% Seed
<<16#04:8,
16#0503213F78CA44883F1A3B8162F188E553CD265F23C1567A16876913B0C2AC24:256, %% X(p0)
16#58492836:32,
16#01CCDA380F1C9E318D90F95D07E5426FE87E45C0E8184698E45962364E341161:256, %% Y(p0)
16#77DD2259:32>>,
<<16#01FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFE9AE2ED07577265DFF7F94451E06:256, %% Order
16#1E163C61:32>>,
<<16#04:8>> %% CoFactor
};
curve(sect283r1) ->
{
{characteristic_two_field, 283, {ppbasis,5,7,12}},
{<<16#01:8>>, %% A
<<16#027B680AC8B8596DA5A4AF8A19A0303FCA97FD7645309FA2A581485AF6263E31:256, %% B
16#3B79A2F5:32>>,
<<16#77E2B07370EB0F832A6DD5B62DFC88CD06BB84BE:160>>}, %% Seed
<<16#04:8,
16#05F939258DB7DD90E1934F8C70B0DFEC2EED25B8557EAC9C80E2E198F8CDBECD:256, %% X(p0)
16#86B12053:32,
16#03676854FE24141CB98FE6D4B20D02B4516FF702350EDDB0826779C813F0DF45:256, %% Y(p0)
16#BE8112F4:32>>,
<<16#03FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEF90399660FC938A90165B042A7C:256, %% Order
16#EFADB307:32>>,
<<16#02:8>> %% CoFactor
};
curve(sect409k1) ->
{
{characteristic_two_field, 409, {tpbasis,87}},
{<<16#00:8>>, %% A
<<16#01:8>>, %% B
none}, %% Seed
<<16#04:8,
16#0060F05F658F49C1AD3AB1890F7184210EFD0987E307C84C27ACCFB8F9F67CC2:256, %% X(p0)
16#C460189EB5AAAA62EE222EB1B35540CFE9023746:160,
16#01E369050B7C4E42ACBA1DACBF04299C3460782F918EA427E6325165E9EA10E3:256, %% Y(p0)
16#DA5F6C42E9C55215AA9CA27A5863EC48D8E0286B:160>>,
<<16#7FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFE5F83B2D4EA20:256, %% Order
16#400EC4557D5ED3E3E7CA5B4B5C83B8E01E5FCF:152>>,
<<16#04:8>> %% CoFactor
};
curve(sect409r1) ->
{
{characteristic_two_field, 409, {tpbasis,87}},
{<<16#01:8>>, %% A
<<16#21A5C2C8EE9FEB5C4B9A753B7B476B7FD6422EF1F3DD674761FA99D6AC27C8A9:256, %% B
16#A197B272822F6CD57A55AA4F50AE317B13545F:152>>,
<<16#4099B5A457F9D69F79213D094C4BCD4D4262210B:160>>}, %% Seed
<<16#04:8,
16#015D4860D088DDB3496B0C6064756260441CDE4AF1771D4DB01FFE5B34E59703:256, %% X(p0)
16#DC255A868A1180515603AEAB60794E54BB7996A7:160,
16#0061B1CFAB6BE5F32BBFA78324ED106A7636B9C5A7BD198D0158AA4F5488D08F:256, %% Y(p0)
16#38514F1FDF4B4F40D2181B3681C364BA0273C706:160>>,
<<16#010000000000000000000000000000000000000000000000000001E2AAD6A612:256, %% Order
16#F33307BE5FA47C3C9E052F838164CD37D9A21173:160>>,
<<16#02:8>> %% CoFactor
};
curve(sect571k1) ->
{
{characteristic_two_field, 571, {ppbasis,2,5,10}},
{<<16#00:8>>, %% A
<<16#01:8>>, %% B
none}, %% Seed
<<16#04:8,
16#026EB7A859923FBC82189631F8103FE4AC9CA2970012D5D46024804801841CA4:256, %% X(p0)
16#4370958493B205E647DA304DB4CEB08CBBD1BA39494776FB988B47174DCA88C7:256,
16#E2945283A01C8972:64,
16#0349DC807F4FBF374F4AEADE3BCA95314DD58CEC9F307A54FFC61EFC006D8A2C:256, %% Y(p0)
16#9D4979C0AC44AEA74FBEBBB9F772AEDCB620B01A7BA7AF1B320430C8591984F6:256,
16#01CD4C143EF1C7A3:64>>,
<<16#0200000000000000000000000000000000000000000000000000000000000000:256, %% Order
16#00000000131850E1F19A63E4B391A8DB917F4138B630D84BE5D639381E91DEB4:256,
16#5CFE778F637C1001:64>>,
<<16#04:8>> %% CoFactor
};
curve(sect571r1) ->
{
{characteristic_two_field, 571, {ppbasis,2,5,10}},
{<<16#01:8>>, %% A
<<16#02F40E7E2221F295DE297117B7F3D62F5C6A97FFCB8CEFF1CD6BA8CE4A9A18AD:256, %% B
16#84FFABBD8EFA59332BE7AD6756A66E294AFD185A78FF12AA520E4DE739BACA0C:256,
16#7FFEFF7F2955727A:64>>,
<<16#2AA058F73A0E33AB486B0F610410C53A7F132310:160>>}, %% Seed
<<16#04:8,
16#0303001D34B856296C16C0D40D3CD7750A93D1D2955FA80AA5F40FC8DB7B2ABD:256, %% X(p0)
16#BDE53950F4C0D293CDD711A35B67FB1499AE60038614F1394ABFA3B4C850D927:256,
16#E1E7769C8EEC2D19:64,
16#037BF27342DA639B6DCCFFFEB73D69D78C6C27A6009CBBCA1980F8533921E8A6:256, %% Y(p0)
16#84423E43BAB08A576291AF8F461BB2A8B3531D2F0485C19B16E2F1516E23DD3C:256,
16#1A4827AF1B8AC15B:64>>,
<<16#03FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF:256, %% Order
16#FFFFFFFFE661CE18FF55987308059B186823851EC7DD9CA1161DE93D5174D66E:256,
16#8382E9BB2FE84E47:64>>,
<<16#02:8>> %% CoFactor
};
curve(c2pnb163v1) ->
{
{characteristic_two_field, 163, {ppbasis,1,2,8}},
{<<16#072546B5435234A422E0789675F432C89435DE5242:168>>, %% A
<<16#C9517D06D5240D3CFF38C74B20B6CD4D6F9DD4D9:160>>, %% B
<<16#D2C0FB15760860DEF1EEF4D696E6768756151754:160>>}, %% Seed
<<16#04:8,
16#07AF69989546103D79329FCC3D74880F33BBE803CB:168, %% X(p0)
16#01EC23211B5966ADEA1D3F87F7EA5848AEF0B7CA9F:168>>, %% Y(p0)
<<16#0400000000000000000001E60FC8821CC74DAEAFC1:168>>, %% Order
<<16#02:8>> %% CoFactor
};
curve(c2pnb163v2) ->
{
{characteristic_two_field, 163, {ppbasis,1,2,8}},
{<<16#0108B39E77C4B108BED981ED0E890E117C511CF072:168>>, %% A
<<16#0667ACEB38AF4E488C407433FFAE4F1C811638DF20:168>>, %% B
<<16#53814C050D44D696E67687561517580CA4E29FFD:160>>}, %% Seed
<<16#04:8,
16#0024266E4EB5106D0A964D92C4860E2671DB9B6CC5:168, %% X(p0)
16#079F684DDF6684C5CD258B3890021B2386DFD19FC5:168>>, %% Y(p0)
<<16#03FFFFFFFFFFFFFFFFFFFDF64DE1151ADBB78F10A7:168>>, %% Order
<<16#02:8>> %% CoFactor
};
curve(c2pnb163v3) ->
{
{characteristic_two_field, 163, {ppbasis,1,2,8}},
{<<16#07A526C63D3E25A256A007699F5447E32AE456B50E:168>>, %% A
<<16#03F7061798EB99E238FD6F1BF95B48FEEB4854252B:168>>, %% B
<<16#50CBF1D95CA94D696E676875615175F16A36A3B8:160>>}, %% Seed
<<16#04:8,
16#02F9F87B7C574D0BDECF8A22E6524775F98CDEBDCB:168, %% X(p0)
16#05B935590C155E17EA48EB3FF3718B893DF59A05D0:168>>, %% Y(p0)
<<16#03FFFFFFFFFFFFFFFFFFFE1AEE140F110AFF961309:168>>, %% Order
<<16#02:8>> %% CoFactor
};
curve(c2pnb176v1) ->
{
{characteristic_two_field, 176, {ppbasis,1,2,43}},
{<<16#E4E6DB2995065C407D9D39B8D0967B96704BA8E9C90B:176>>, %% A
<<16#5DDA470ABE6414DE8EC133AE28E9BBD7FCEC0AE0FFF2:176>>, %% B
none}, %% Seed
<<16#04:8,
16#8D16C2866798B600F9F08BB4A8E860F3298CE04A5798:176, %% X(p0)
16#6FA4539C2DADDDD6BAB5167D61B436E1D92BB16A562C:176>>, %% Y(p0)
<<16#010092537397ECA4F6145799D62B0A19CE06FE26AD:168>>, %% Order
<<16#FF6E:16>> %% CoFactor
};
curve(c2tnb191v1) ->
{
{characteristic_two_field, 191, {tpbasis,9}},
{<<16#2866537B676752636A68F56554E12640276B649EF7526267:192>>, %% A
<<16#2E45EF571F00786F67B0081B9495A3D95462F5DE0AA185EC:192>>, %% B
<<16#4E13CA542744D696E67687561517552F279A8C84:160>>}, %% Seed
<<16#04:8,
16#36B3DAF8A23206F9C4F299D7B21A9C369137F2C84AE1AA0D:192, %% X(p0)
16#765BE73433B3F95E332932E70EA245CA2418EA0EF98018FB:192>>, %% Y(p0)
<<16#40000000000000000000000004A20E90C39067C893BBB9A5:192>>, %% Order
<<16#02:8>> %% CoFactor
};
curve(c2tnb191v2) ->
{
{characteristic_two_field, 191, {tpbasis,9}},
{<<16#401028774D7777C7B7666D1366EA432071274F89FF01E718:192>>, %% A
<<16#0620048D28BCBD03B6249C99182B7C8CD19700C362C46A01:192>>, %% B
<<16#0871EF2FEF24D696E6768756151758BEE0D95C15:160>>}, %% Seed
<<16#04:8,
16#3809B2B7CC1B28CC5A87926AAD83FD28789E81E2C9E3BF10:192, %% X(p0)
16#17434386626D14F3DBF01760D9213A3E1CF37AEC437D668A:192>>, %% Y(p0)
<<16#20000000000000000000000050508CB89F652824E06B8173:192>>, %% Order
<<16#04:8>> %% CoFactor
};
curve(c2tnb191v3) ->
{
{characteristic_two_field, 191, {tpbasis,9}},
{<<16#6C01074756099122221056911C77D77E77A777E7E7E77FCB:192>>, %% A
<<16#71FE1AF926CF847989EFEF8DB459F66394D90F32AD3F15E8:192>>, %% B
<<16#E053512DC684D696E676875615175067AE786D1F:160>>}, %% Seed
<<16#04:8,
16#375D4CE24FDE434489DE8746E71786015009E66E38A926DD:192, %% X(p0)
16#545A39176196575D985999366E6AD34CE0A77CD7127B06BE:192>>, %% Y(p0)
<<16#155555555555555555555555610C0B196812BFB6288A3EA3:192>>, %% Order
<<16#06:8>> %% CoFactor
};
curve(c2pnb208w1) ->
{
{characteristic_two_field, 208, {ppbasis,1,2,83}},
{<<16#00:8>>, %% A
<<16#C8619ED45A62E6212E1160349E2BFA844439FAFC2A3FD1638F9E:208>>, %% B
none}, %% Seed
<<16#04:8,
16#89FDFBE4ABE193DF9559ECF07AC0CE78554E2784EB8C1ED1A57A:208, %% X(p0)
16#0F55B51A06E78E9AC38A035FF520D8B01781BEB1A6BB08617DE3:208>>, %% Y(p0)
<<16#0101BAF95C9723C57B6C21DA2EFF2D5ED588BDD5717E212F9D:200>>, %% Order
<<16#FE48:16>> %% CoFactor
};
curve(c2tnb239v1) ->
{
{characteristic_two_field, 239, {tpbasis,36}},
{<<16#32010857077C5431123A46B808906756F543423E8D27877578125778AC76:240>>, %% A
<<16#790408F2EEDAF392B012EDEFB3392F30F4327C0CA3F31FC383C422AA8C16:240>>, %% B
<<16#D34B9A4D696E676875615175CA71B920BFEFB05D:160>>}, %% Seed
<<16#04:8,
16#57927098FA932E7C0A96D3FD5B706EF7E5F5C156E16B7E7C86038552E91D:240, %% X(p0)
16#61D8EE5077C33FECF6F1A16B268DE469C3C7744EA9A971649FC7A9616305:240>>, %% Y(p0)
<<16#2000000000000000000000000000000F4D42FFE1492A4993F1CAD666E447:240>>, %% Order
<<16#04:8>> %% CoFactor
};
curve(c2tnb239v2) ->
{
{characteristic_two_field, 239, {tpbasis,36}},
{<<16#4230017757A767FAE42398569B746325D45313AF0766266479B75654E65F:240>>, %% A
<<16#5037EA654196CFF0CD82B2C14A2FCF2E3FF8775285B545722F03EACDB74B:240>>, %% B
<<16#2AA6982FDFA4D696E676875615175D266727277D:160>>}, %% Seed
<<16#04:8,
16#28F9D04E900069C8DC47A08534FE76D2B900B7D7EF31F5709F200C4CA205:240, %% X(p0)
16#5667334C45AFF3B5A03BAD9DD75E2C71A99362567D5453F7FA6E227EC833:240>>, %% Y(p0)
<<16#1555555555555555555555555555553C6F2885259C31E3FCDF154624522D:240>>, %% Order
<<16#06:8>> %% CoFactor
};
curve(c2tnb239v3) ->
{
{characteristic_two_field, 239, {tpbasis,36}},
{<<16#01238774666A67766D6676F778E676B66999176666E687666D8766C66A9F:240>>, %% A
<<16#6A941977BA9F6A435199ACFC51067ED587F519C5ECB541B8E44111DE1D40:240>>, %% B
<<16#9E076F4D696E676875615175E11E9FDD77F92041:160>>}, %% Seed
<<16#04:8,
16#70F6E9D04D289C4E89913CE3530BFDE903977D42B146D539BF1BDE4E9C92:240, %% X(p0)
16#2E5A0EAF6E5E1305B9004DCE5C0ED7FE59A35608F33837C816D80B79F461:240>>, %% Y(p0)
<<16#0CCCCCCCCCCCCCCCCCCCCCCCCCCCCCAC4912D2D9DF903EF9888B8A0E4CFF:240>>, %% Order
<<16#0A:8>> %% CoFactor
};
curve(c2pnb272w1) ->
{
{characteristic_two_field, 272, {ppbasis,1,3,56}},
{<<16#91A091F03B5FBA4AB2CCF49C4EDD220FB028712D42BE752B2C40094DBACDB586:256, %% A
16#FB20:16>>,
<<16#7167EFC92BB2E3CE7C8AAAFF34E12A9C557003D7C73A6FAF003F99F6CC8482E5:256, %% B
16#40F7:16>>,
none}, %% Seed
<<16#04:8,
16#6108BABB2CEEBCF787058A056CBE0CFE622D7723A289E08A07AE13EF0D10D171:256, %% X(p0)
16#DD8D:16,
16#10C7695716851EEF6BA7F6872E6142FBD241B830FF5EFCACECCAB05E02005DDE:256, %% Y(p0)
16#9D23:16>>,
<<16#0100FAF51354E0E39E4892DF6E319C72C8161603FA45AA7B998A167B8F1E6295:256, %% Order
16#21:8>>,
<<16#FF06:16>> %% CoFactor
};
curve(c2pnb304w1) ->
{
{characteristic_two_field, 304, {ppbasis,1,2,11}},
{<<16#FD0D693149A118F651E6DCE6802085377E5F882D1B510B44160074C128807836:256, %% A
16#5A0396C8E681:48>>,
<<16#BDDB97E555A50A908E43B01C798EA5DAA6788F1EA2794EFCF57166B8C1403960:256, %% B
16#1E55827340BE:48>>,
none}, %% Seed
<<16#04:8,
16#197B07845E9BE2D96ADB0F5F3C7F2CFFBD7A3EB8B6FEC35C7FD67F26DDF6285A:256, %% X(p0)
16#644F740A2614:48,
16#E19FBEB76E0DA171517ECF401B50289BF014103288527A9B416A105E80260B54:256, %% Y(p0)
16#9FDC1B92C03B:48>>,
<<16#0101D556572AABAC800101D556572AABAC8001022D5C91DD173F8FB561DA6899:256, %% Order
16#164443051D:40>>,
<<16#FE2E:16>> %% CoFactor
};
curve(c2tnb359v1) ->
{
{characteristic_two_field, 359, {tpbasis,68}},
{<<16#5667676A654B20754F356EA92017D946567C46675556F19556A04616B567D223:256, %% A
16#A5E05656FB549016A96656A557:104>>,
<<16#2472E2D0197C49363F1FE7F5B6DB075D52B6947D135D8CA445805D39BC345626:256, %% B
16#089687742B6329E70680231988:104>>,
<<16#2B354920B724D696E67687561517585BA1332DC6:160>>}, %% Seed
<<16#04:8,
16#3C258EF3047767E7EDE0F1FDAA79DAEE3841366A132E163ACED4ED2401DF9C6B:256, %% X(p0)
16#DCDE98E8E707C07A2239B1B097:104,
16#53D7E08529547048121E9C95F3791DD804963948F34FAE7BF44EA82365DC7868:256, %% Y(p0)
16#FE57E4AE2DE211305A407104BD:104>>,
<<16#01AF286BCA1AF286BCA1AF286BCA1AF286BCA1AF286BC9FB8F6B85C556892C20:256, %% Order
16#A7EB964FE7719E74F490758D3B:104>>,
<<16#4C:8>> %% CoFactor
};
curve(c2pnb368w1) ->
{
{characteristic_two_field, 368, {ppbasis,1,2,85}},
{<<16#E0D2EE25095206F5E2A4F9ED229F1F256E79A0E2B455970D8D0D865BD94778C5:256, %% A
16#76D62F0AB7519CCD2A1A906AE30D:112>>,
<<16#FC1217D4320A90452C760A58EDCD30C8DD069B3C34453837A34ED50CB54917E1:256, %% B
16#C2112D84D164F444F8F74786046A:112>>,
none}, %% Seed
<<16#04:8,
16#1085E2755381DCCCE3C1557AFA10C2F0C0C2825646C5B34A394CBCFA8BC16B22:256, %% X(p0)
16#E7E789E927BE216F02E1FB136A5F:112,
16#7B3EB1BDDCBA62D5D8B2059B525797FC73822C59059C623A45FF3843CEE8F87C:256, %% Y(p0)
16#D1855ADAA81E2A0750B80FDA2310:112>>,
<<16#010090512DA9AF72B08349D98A5DD4C7B0532ECA51CE03E2D10F3B7AC579BD87:256, %% Order
16#E909AE40A6F131E9CFCE5BD967:104>>,
<<16#FF70:16>> %% CoFactor
};
curve(c2tnb431r1) ->
{
{characteristic_two_field, 431, {tpbasis,120}},
{<<16#1A827EF00DD6FC0E234CAF046C6A5D8A85395B236CC4AD2CF32A0CADBDC9DDF6:256, %% A
16#20B0EB9906D0957F6C6FEACD615468DF104DE296CD8F:176>>,
<<16#10D9B4A3D9047D8B154359ABFB1B7F5485B04CEB868237DDC9DEDA982A679A5A:256, %% B
16#919B626D4E50A8DD731B107A9962381FB5D807BF2618:176>>,
none}, %% Seed
<<16#04:8,
16#120FC05D3C67A99DE161D2F4092622FECA701BE4F50F4758714E8A87BBF2A658:256, %% X(p0)
16#EF8C21E7C5EFE965361F6C2999C0C247B0DBD70CE6B7:176,
16#20D0AF8903A96F8D5FA2C255745D3C451B302C9346D9B7E485E7BCE41F6B591F:256, %% Y(p0)
16#3E8F6ADDCBB0BC4C2F947A7DE1A89B625D6A598B3760:176>>,
<<16#0340340340340340340340340340340340340340340340340340340323C313FA:256, %% Order
16#B50589703B5EC68D3587FEC60D161CC149C1AD4A91:168>>,
<<16#2760:16>> %% CoFactor
};
curve(wtls1) ->
{
{characteristic_two_field, 113, {tpbasis,9}},
{<<16#01:8>>, %% A
<<16#01:8>>, %% B
none}, %% Seed
<<16#04:8,
16#01667979A40BA497E5D5C270780617:120, %% X(p0)
16#00F44B4AF1ECC2630E08785CEBCC15:120>>, %% Y(p0)
<<16#FFFFFFFFFFFFFFFDBF91AF6DEA73:112>>, %% Order
<<16#02:8>> %% CoFactor
};
curve(wtls3) ->
{
{characteristic_two_field, 163, {ppbasis,3,6,7}},
{<<16#01:8>>, %% A
<<16#01:8>>, %% B
none}, %% Seed
<<16#04:8,
16#02FE13C0537BBC11ACAA07D793DE4E6D5E5C94EEE8:168, %% X(p0)
16#0289070FB05D38FF58321F2E800536D538CCDAA3D9:168>>, %% Y(p0)
<<16#04000000000000000000020108A2E0CC0D99F8A5EF:168>>, %% Order
<<16#02:8>> %% CoFactor
};
curve(wtls4) ->
{
{characteristic_two_field, 113, {tpbasis,9}},
{<<16#3088250CA6E7C7FE649CE85820F7:112>>, %% A
<<16#E8BEE4D3E2260744188BE0E9C723:112>>, %% B
<<16#10E723AB14D696E6768756151756FEBF8FCB49A9:160>>}, %% Seed
<<16#04:8,
16#009D73616F35F4AB1407D73562C10F:120, %% X(p0)
16#00A52830277958EE84D1315ED31886:120>>, %% Y(p0)
<<16#0100000000000000D9CCEC8A39E56F:120>>, %% Order
<<16#02:8>> %% CoFactor
};
curve(wtls5) ->
{
{characteristic_two_field, 163, {ppbasis,1,2,8}},
{<<16#072546B5435234A422E0789675F432C89435DE5242:168>>, %% A
<<16#C9517D06D5240D3CFF38C74B20B6CD4D6F9DD4D9:160>>, %% B
<<16#D2C0FB15760860DEF1EEF4D696E6768756151754:160>>}, %% Seed
<<16#04:8,
16#07AF69989546103D79329FCC3D74880F33BBE803CB:168, %% X(p0)
16#01EC23211B5966ADEA1D3F87F7EA5848AEF0B7CA9F:168>>, %% Y(p0)
<<16#0400000000000000000001E60FC8821CC74DAEAFC1:168>>, %% Order
<<16#02:8>> %% CoFactor
};
curve(wtls6) ->
{
{prime_field, <<16#DB7C2ABF62E35E668076BEAD208B:112>>}, %% Prime
{<<16#DB7C2ABF62E35E668076BEAD2088:112>>, %% A
<<16#659EF8BA043916EEDE8911702B22:112>>, %% B
<<16#00F50B028E4D696E676875615175290472783FB1:160>>}, %% Seed
<<16#04:8,
16#09487239995A5EE76B55F9C2F098:112, %% X(p0)
16#A89CE5AF8724C0A23E0E0FF77500:112>>, %% Y(p0)
<<16#DB7C2ABF62E35E7628DFAC6561C5:112>>, %% Order
<<16#01:8>> %% CoFactor
};
curve(wtls7) ->
{
{prime_field, <<16#FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFAC73:160>>}, %% Prime
{<<16#FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFAC70:160>>, %% A
<<16#B4E134D3FB59EB8BAB57274904664D5AF50388BA:160>>, %% B
<<16#B99B99B099B323E02709A4D696E6768756151751:160>>}, %% Seed
<<16#04:8,
16#52DCB034293A117E1F4FF11B30F7199D3144CE6D:160, %% X(p0)
16#FEAFFEF2E331F296E071FA0DF9982CFEA7D43F2E:160>>, %% Y(p0)
<<16#0100000000000000000000351EE786A818F3A1A16B:168>>, %% Order
<<16#01:8>> %% CoFactor
};
curve(wtls8) ->
{
{prime_field, <<16#FFFFFFFFFFFFFFFFFFFFFFFFFDE7:112>>}, %% Prime
{<<16#00:8>>, %% A
<<16#03:8>>, %% B
none}, %% Seed
<<16#04:8,
16#0000000000000000000000000001:112, %% X(p0)
16#0000000000000000000000000002:112>>, %% Y(p0)
<<16#0100000000000001ECEA551AD837E9:120>>, %% Order
<<16#01:8>> %% CoFactor
};
curve(wtls9) ->
{
{prime_field, <<16#FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFC808F:160>>}, %% Prime
{<<16#00:8>>, %% A
<<16#03:8>>, %% B
none}, %% Seed
<<16#04:8,
16#0000000000000000000000000000000000000001:160, %% X(p0)
16#0000000000000000000000000000000000000002:160>>, %% Y(p0)
<<16#0100000000000000000001CDC98AE0E2DE574ABF33:168>>, %% Order
<<16#01:8>> %% CoFactor
};
curve(wtls10) ->
{
{characteristic_two_field, 233, {tpbasis,74}},
{<<16#00:8>>, %% A
<<16#01:8>>, %% B
none}, %% Seed
<<16#04:8,
16#017232BA853A7E731AF129F22FF4149563A419C26BF50A4C9D6EEFAD6126:240, %% X(p0)
16#01DB537DECE819B7F70F555A67C427A8CD9BF18AEB9B56E0C11056FAE6A3:240>>, %% Y(p0)
<<16#8000000000000000000000000000069D5BB915BCD46EFB1AD5F173ABDF:232>>, %% Order
<<16#04:8>> %% CoFactor
};
curve(wtls11) ->
{
{characteristic_two_field, 233, {tpbasis,74}},
{<<16#01:8>>, %% A
<<16#66647EDE6C332C7F8C0923BB58213B333B20E9CE4281FE115F7D8F90AD:232>>, %% B
<<16#74D59FF07F6B413D0EA14B344B20A2DB049B50C3:160>>}, %% Seed
<<16#04:8,
16#00FAC9DFCBAC8313BB2139F1BB755FEF65BC391F8B36F8F8EB7371FD558B:240, %% X(p0)
16#01006A08A41903350678E58528BEBF8A0BEFF867A7CA36716F7E01F81052:240>>, %% Y(p0)
<<16#01000000000000000000000000000013E974E72F8A6922031D2603CFE0D7:240>>, %% Order
<<16#02:8>> %% CoFactor
};
curve(wtls12) ->
{
{prime_field, <<16#FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF000000000000000000000001:224>>}, %% Prime
{<<16#FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFFFFFFFFFFFFFFFFFFFE:224>>, %% A
<<16#B4050A850C04B3ABF54132565044B0B7D7BFD8BA270B39432355FFB4:224>>, %% B
none}, %% Seed
<<16#04:8,
16#B70E0CBD6BB4BF7F321390B94A03C1D356C21122343280D6115C1D21:224, %% X(p0)
16#BD376388B5F723FB4C22DFE6CD4375A05A07476444D5819985007E34:224>>, %% Y(p0)
<<16#FFFFFFFFFFFFFFFFFFFFFFFFFFFF16A2E0B8F03E13DD29455C5C2A3D:224>>, %% Order
<<16#01:8>> %% CoFactor
};
curve(ipsec3) ->
{
{characteristic_two_field, 155, {tpbasis,62}},
{<<16#00:8>>, %% A
<<16#07338F:24>>, %% B
none}, %% Seed
<<16#04:8,
16#000000000000000000000000000000000000007B:160, %% X(p0)
16#00000000000000000000000000000000000001C8:160>>, %% Y(p0)
<<16#02AAAAAAAAAAAAAAAAAAC7F3C7881BD0868FA86C:160>>, %% Order
<<16#03:8>> %% CoFactor
};
curve(ipsec4) ->
{
{characteristic_two_field, 185, {tpbasis,69}},
{<<16#00:8>>, %% A
<<16#1EE9:16>>, %% B
none}, %% Seed
<<16#04:8,
16#000000000000000000000000000000000000000000000018:192, %% X(p0)
16#00000000000000000000000000000000000000000000000D:192>>, %% Y(p0)
<<16#FFFFFFFFFFFFFFFFFFFFFFEDF97C44DB9F2420BAFCA75E:184>>, %% Order
<<16#02:8>> %% CoFactor
};
curve(brainpoolP160r1) ->
{
{prime_field, <<16#E95E4A5F737059DC60DFC7AD95B3D8139515620F:160>>}, %% Prime
{<<16#340E7BE2A280EB74E2BE61BADA745D97E8F7C300:160>>, %% A
<<16#1E589A8595423412134FAA2DBDEC95C8D8675E58:160>>, %% B
none}, %% Seed
<<16#04:8,
16#BED5AF16EA3F6A4F62938C4631EB5AF7BDBCDBC3:160, %% X(p0)
16#1667CB477A1A8EC338F94741669C976316DA6321:160>>, %% Y(p0)
<<16#E95E4A5F737059DC60DF5991D45029409E60FC09:160>>, %% Order
<<16#01:8>> %% CoFactor
};
curve(brainpoolP160t1) ->
{
{prime_field, <<16#E95E4A5F737059DC60DFC7AD95B3D8139515620F:160>>}, %% Prime
{<<16#E95E4A5F737059DC60DFC7AD95B3D8139515620C:160>>, %% A
<<16#7A556B6DAE535B7B51ED2C4D7DAA7A0B5C55F380:160>>, %% B
none}, %% Seed
<<16#04:8,
16#B199B13B9B34EFC1397E64BAEB05ACC265FF2378:160, %% X(p0)
16#ADD6718B7C7C1961F0991B842443772152C9E0AD:160>>, %% Y(p0)
<<16#E95E4A5F737059DC60DF5991D45029409E60FC09:160>>, %% Order
<<16#01:8>> %% CoFactor
};
curve(brainpoolP192r1) ->
{
{prime_field, <<16#C302F41D932A36CDA7A3463093D18DB78FCE476DE1A86297:192>>}, %% Prime
{<<16#6A91174076B1E0E19C39C031FE8685C1CAE040E5C69A28EF:192>>, %% A
<<16#469A28EF7C28CCA3DC721D044F4496BCCA7EF4146FBF25C9:192>>, %% B
none}, %% Seed
<<16#04:8,
16#C0A0647EAAB6A48753B033C56CB0F0900A2F5C4853375FD6:192, %% X(p0)
16#14B690866ABD5BB88B5F4828C1490002E6773FA2FA299B8F:192>>, %% Y(p0)
<<16#C302F41D932A36CDA7A3462F9E9E916B5BE8F1029AC4ACC1:192>>, %% Order
<<16#01:8>> %% CoFactor
};
curve(brainpoolP192t1) ->
{
{prime_field, <<16#C302F41D932A36CDA7A3463093D18DB78FCE476DE1A86297:192>>}, %% Prime
{<<16#C302F41D932A36CDA7A3463093D18DB78FCE476DE1A86294:192>>, %% A
<<16#13D56FFAEC78681E68F9DEB43B35BEC2FB68542E27897B79:192>>, %% B
none}, %% Seed
<<16#04:8,
16#3AE9E58C82F63C30282E1FE7BBF43FA72C446AF6F4618129:192, %% X(p0)
16#097E2C5667C2223A902AB5CA449D0084B7E5B3DE7CCC01C9:192>>, %% Y(p0)
<<16#C302F41D932A36CDA7A3462F9E9E916B5BE8F1029AC4ACC1:192>>, %% Order
<<16#01:8>> %% CoFactor
};
curve(brainpoolP224r1) ->
{
{prime_field, <<16#D7C134AA264366862A18302575D1D787B09F075797DA89F57EC8C0FF:224>>}, %% Prime
{<<16#68A5E62CA9CE6C1C299803A6C1530B514E182AD8B0042A59CAD29F43:224>>, %% A
<<16#2580F63CCFE44138870713B1A92369E33E2135D266DBB372386C400B:224>>, %% B
none}, %% Seed
<<16#04:8,
16#0D9029AD2C7E5CF4340823B2A87DC68C9E4CE3174C1E6EFDEE12C07D:224, %% X(p0)
16#58AA56F772C0726F24C6B89E4ECDAC24354B9E99CAA3F6D3761402CD:224>>, %% Y(p0)
<<16#D7C134AA264366862A18302575D0FB98D116BC4B6DDEBCA3A5A7939F:224>>, %% Order
<<16#01:8>> %% CoFactor
};
curve(brainpoolP224t1) ->
{
{prime_field, <<16#D7C134AA264366862A18302575D1D787B09F075797DA89F57EC8C0FF:224>>}, %% Prime
{<<16#D7C134AA264366862A18302575D1D787B09F075797DA89F57EC8C0FC:224>>, %% A
<<16#4B337D934104CD7BEF271BF60CED1ED20DA14C08B3BB64F18A60888D:224>>, %% B
none}, %% Seed
<<16#04:8,
16#6AB1E344CE25FF3896424E7FFE14762ECB49F8928AC0C76029B4D580:224, %% X(p0)
16#0374E9F5143E568CD23F3F4D7C0D4B1E41C8CC0D1C6ABD5F1A46DB4C:224>>, %% Y(p0)
<<16#D7C134AA264366862A18302575D0FB98D116BC4B6DDEBCA3A5A7939F:224>>, %% Order
<<16#01:8>> %% CoFactor
};
curve(brainpoolP256r1) ->
{
{prime_field, <<16#A9FB57DBA1EEA9BC3E660A909D838D726E3BF623D52620282013481D1F6E5377:256>>}, %% Prime
{<<16#7D5A0975FC2C3057EEF67530417AFFE7FB8055C126DC5C6CE94A4B44F330B5D9:256>>, %% A
<<16#26DC5C6CE94A4B44F330B5D9BBD77CBF958416295CF7E1CE6BCCDC18FF8C07B6:256>>, %% B
none}, %% Seed
<<16#04:8,
16#8BD2AEB9CB7E57CB2C4B482FFC81B7AFB9DE27E1E3BD23C23A4453BD9ACE3262:256, %% X(p0)
16#547EF835C3DAC4FD97F8461A14611DC9C27745132DED8E545C1D54C72F046997:256>>, %% Y(p0)
<<16#A9FB57DBA1EEA9BC3E660A909D838D718C397AA3B561A6F7901E0E82974856A7:256>>, %% Order
<<16#01:8>> %% CoFactor
};
curve(brainpoolP256t1) ->
{
{prime_field, <<16#A9FB57DBA1EEA9BC3E660A909D838D726E3BF623D52620282013481D1F6E5377:256>>}, %% Prime
{<<16#A9FB57DBA1EEA9BC3E660A909D838D726E3BF623D52620282013481D1F6E5374:256>>, %% A
<<16#662C61C430D84EA4FE66A7733D0B76B7BF93EBC4AF2F49256AE58101FEE92B04:256>>, %% B
none}, %% Seed
<<16#04:8,
16#A3E8EB3CC1CFE7B7732213B23A656149AFA142C47AAFBC2B79A191562E1305F4:256, %% X(p0)
16#2D996C823439C56D7F7B22E14644417E69BCB6DE39D027001DABE8F35B25C9BE:256>>, %% Y(p0)
<<16#A9FB57DBA1EEA9BC3E660A909D838D718C397AA3B561A6F7901E0E82974856A7:256>>, %% Order
<<16#01:8>> %% CoFactor
};
curve(brainpoolP320r1) ->
{
{prime_field, <<16#D35E472036BC4FB7E13C785ED201E065F98FCFA6F6F40DEF4F92B9EC7893EC28:256, %% Prime
16#FCD412B1F1B32E27:64>>},
{<<16#3EE30B568FBAB0F883CCEBD46D3F3BB8A2A73513F5EB79DA66190EB085FFA9F4:256, %% A
16#92F375A97D860EB4:64>>,
<<16#520883949DFDBC42D3AD198640688A6FE13F41349554B49ACC31DCCD88453981:256, %% B
16#6F5EB4AC8FB1F1A6:64>>,
none}, %% Seed
<<16#04:8,
16#43BD7E9AFB53D8B85289BCC48EE5BFE6F20137D10A087EB6E7871E2A10A599C7:256, %% X(p0)
16#10AF8D0D39E20611:64,
16#14FDD05545EC1CC8AB4093247F77275E0743FFED117182EAA9C77877AAAC6AC7:256, %% Y(p0)
16#D35245D1692E8EE1:64>>,
<<16#D35E472036BC4FB7E13C785ED201E065F98FCFA5B68F12A32D482EC7EE8658E9:256, %% Order
16#8691555B44C59311:64>>,
<<16#01:8>> %% CoFactor
};
curve(brainpoolP320t1) ->
{
{prime_field, <<16#D35E472036BC4FB7E13C785ED201E065F98FCFA6F6F40DEF4F92B9EC7893EC28:256, %% Prime
16#FCD412B1F1B32E27:64>>},
{<<16#D35E472036BC4FB7E13C785ED201E065F98FCFA6F6F40DEF4F92B9EC7893EC28:256, %% A
16#FCD412B1F1B32E24:64>>,
<<16#A7F561E038EB1ED560B3D147DB782013064C19F27ED27C6780AAF77FB8A547CE:256, %% B
16#B5B4FEF422340353:64>>,
none}, %% Seed
<<16#04:8,
16#925BE9FB01AFC6FB4D3E7D4990010F813408AB106C4F09CB7EE07868CC136FFF:256, %% X(p0)
16#3357F624A21BED52:64,
16#63BA3A7A27483EBF6671DBEF7ABB30EBEE084E58A0B077AD42A5A0989D1EE71B:256, %% Y(p0)
16#1B9BC0455FB0D2C3:64>>,
<<16#D35E472036BC4FB7E13C785ED201E065F98FCFA5B68F12A32D482EC7EE8658E9:256, %% Order
16#8691555B44C59311:64>>,
<<16#01:8>> %% CoFactor
};
curve(brainpoolP384r1) ->
{
{prime_field, <<16#8CB91E82A3386D280F5D6F7E50E641DF152F7109ED5456B412B1DA197FB71123:256, %% Prime
16#ACD3A729901D1A71874700133107EC53:128>>},
{<<16#7BC382C63D8C150C3C72080ACE05AFA0C2BEA28E4FB22787139165EFBA91F90F:256, %% A
16#8AA5814A503AD4EB04A8C7DD22CE2826:128>>,
<<16#04A8C7DD22CE28268B39B55416F0447C2FB77DE107DCD2A62E880EA53EEB62D5:256, %% B
16#7CB4390295DBC9943AB78696FA504C11:128>>,
none}, %% Seed
<<16#04:8,
16#1D1C64F068CF45FFA2A63A81B7C13F6B8847A3E77EF14FE3DB7FCAFE0CBD10E8:256, %% X(p0)
16#E826E03436D646AAEF87B2E247D4AF1E:128,
16#8ABE1D7520F9C2A45CB1EB8E95CFD55262B70B29FEEC5864E19C054FF9912928:256, %% Y(p0)
16#0E4646217791811142820341263C5315:128>>,
<<16#8CB91E82A3386D280F5D6F7E50E641DF152F7109ED5456B31F166E6CAC0425A7:256, %% Order
16#CF3AB6AF6B7FC3103B883202E9046565:128>>,
<<16#01:8>> %% CoFactor
};
curve(brainpoolP384t1) ->
{
{prime_field, <<16#8CB91E82A3386D280F5D6F7E50E641DF152F7109ED5456B412B1DA197FB71123:256, %% Prime
16#ACD3A729901D1A71874700133107EC53:128>>},
{<<16#8CB91E82A3386D280F5D6F7E50E641DF152F7109ED5456B412B1DA197FB71123:256, %% A
16#ACD3A729901D1A71874700133107EC50:128>>,
<<16#7F519EADA7BDA81BD826DBA647910F8C4B9346ED8CCDC64E4B1ABD11756DCE1D:256, %% B
16#2074AA263B88805CED70355A33B471EE:128>>,
none}, %% Seed
<<16#04:8,
16#18DE98B02DB9A306F2AFCD7235F72A819B80AB12EBD653172476FECD462AABFF:256, %% X(p0)
16#C4FF191B946A5F54D8D0AA2F418808CC:128,
16#25AB056962D30651A114AFD2755AD336747F93475B7A1FCA3B88F2B6A208CCFE:256, %% Y(p0)
16#469408584DC2B2912675BF5B9E582928:128>>,
<<16#8CB91E82A3386D280F5D6F7E50E641DF152F7109ED5456B31F166E6CAC0425A7:256, %% Order
16#CF3AB6AF6B7FC3103B883202E9046565:128>>,
<<16#01:8>> %% CoFactor
};
curve(brainpoolP512r1) ->
{
{prime_field, <<16#AADD9DB8DBE9C48B3FD4E6AE33C9FC07CB308DB3B3C9D20ED6639CCA70330871:256, %% Prime
16#7D4D9B009BC66842AECDA12AE6A380E62881FF2F2D82C68528AA6056583A48F3:256>>},
{<<16#7830A3318B603B89E2327145AC234CC594CBDD8D3DF91610A83441CAEA9863BC:256, %% A
16#2DED5D5AA8253AA10A2EF1C98B9AC8B57F1117A72BF2C7B9E7C1AC4D77FC94CA:256>>,
<<16#3DF91610A83441CAEA9863BC2DED5D5AA8253AA10A2EF1C98B9AC8B57F1117A7:256, %% B
16#2BF2C7B9E7C1AC4D77FC94CADC083E67984050B75EBAE5DD2809BD638016F723:256>>,
none}, %% Seed
<<16#04:8,
16#81AEE4BDD82ED9645A21322E9C4C6A9385ED9F70B5D916C1B43B62EEF4D0098E:256, %% X(p0)
16#FF3B1F78E2D0D48D50D1687B93B97D5F7C6D5047406A5E688B352209BCB9F822:256,
16#7DDE385D566332ECC0EABFA9CF7822FDF209F70024A57B1AA000C55B881F8111:256, %% Y(p0)
16#B2DCDE494A5F485E5BCA4BD88A2763AED1CA2B2FA8F0540678CD1E0F3AD80892:256>>,
<<16#AADD9DB8DBE9C48B3FD4E6AE33C9FC07CB308DB3B3C9D20ED6639CCA70330870:256, %% Order
16#553E5C414CA92619418661197FAC10471DB1D381085DDADDB58796829CA90069:256>>,
<<16#01:8>> %% CoFactor
};
curve(brainpoolP512t1) ->
{
{prime_field, <<16#AADD9DB8DBE9C48B3FD4E6AE33C9FC07CB308DB3B3C9D20ED6639CCA70330871:256, %% Prime
16#7D4D9B009BC66842AECDA12AE6A380E62881FF2F2D82C68528AA6056583A48F3:256>>},
{<<16#AADD9DB8DBE9C48B3FD4E6AE33C9FC07CB308DB3B3C9D20ED6639CCA70330871:256, %% A
16#7D4D9B009BC66842AECDA12AE6A380E62881FF2F2D82C68528AA6056583A48F0:256>>,
<<16#7CBBBCF9441CFAB76E1890E46884EAE321F70C0BCB4981527897504BEC3E36A6:256, %% B
16#2BCDFA2304976540F6450085F2DAE145C22553B465763689180EA2571867423E:256>>,
none}, %% Seed
<<16#04:8,
16#640ECE5C12788717B9C1BA06CBC2A6FEBA85842458C56DDE9DB1758D39C0313D:256, %% X(p0)
16#82BA51735CDB3EA499AA77A7D6943A64F7A3F25FE26F06B51BAA2696FA9035DA:256,
16#5B534BD595F5AF0FA2C892376C84ACE1BB4E3019B71634C01131159CAE03CEE9:256, %% Y(p0)
16#D9932184BEEF216BD71DF2DADF86A627306ECFF96DBB8BACE198B61E00F8B332:256>>,
<<16#AADD9DB8DBE9C48B3FD4E6AE33C9FC07CB308DB3B3C9D20ED6639CCA70330870:256, %% Order
16#553E5C414CA92619418661197FAC10471DB1D381085DDADDB58796829CA90069:256>>,
<<16#01:8>> %% CoFactor
}.
<|start_filename|>lib/erl_interface/src/decode/decode_bignum.c<|end_filename|>
/*
* %CopyrightBegin%
*
* Copyright Ericsson AB 2002-2009. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* %CopyrightEnd%
*/
#include "eidef.h"
#if defined(HAVE_GMP_H) && defined(HAVE_LIBGMP)
#include <gmp.h>
#include "eidef.h"
#include "eiext.h"
#include "putget.h"
int ei_decode_bignum(const char *buf, int *index, mpz_t obj)
{
const char *s = buf + *index;
const char *s0 = s;
int arity;
int sign;
unsigned long n;
switch (get8(s)) {
case ERL_SMALL_INTEGER_EXT:
n = get8(s);
mpz_set_ui(obj, n);
break;
case ERL_INTEGER_EXT:
n = get32be(s);
mpz_set_ui(obj, n);
break;
case ERL_SMALL_BIG_EXT:
arity = get8(s);
goto decode_bytes;
case ERL_LARGE_BIG_EXT:
arity = get32be(s);
decode_bytes:
sign = get8(s);
mpz_import(obj, arity, -1, 1, 0, 0, s);
s += arity;
if (sign) {
mpz_neg(obj, obj);
}
break;
default:
return -1;
}
*index += s-s0;
return 0;
}
#endif /* HAVE_GMP_H && HAVE_LIBGMP */
<|start_filename|>lib/gs/doc/src/examples/ex17.erl<|end_filename|>
-module(ex17).
-copyright('Copyright (c) 1991-97 Ericsson Telecom AB').
-vsn('$Revision: /main/release/1 $ ').
-export([start/0,init/0]).
start() -> spawn(ex17, init, []).
init() ->
WH = [{width,200},{height,300}],
Win = gs:window(gs:start(),[{map,true},{configure,true},
{title,"Packer Demo"}|WH]),
gs:frame(packer,Win,[{packer_x,[{stretch,1,50},{stretch,2,50},
{stretch,1,50}]},
{packer_y,[{fixed,30},{stretch,1}]}]),
gs:button(packer,[{label,{text,"left"}},{pack_xy,{1,1}}]),
gs:button(packer,[{label,{text,"middle"}},{pack_xy,{2,1}}]),
gs:button(packer,[{label,{text,"right"}},{pack_xy,{3,1}}]),
gs:editor(packer,[{pack_xy,{{1,3},2}},{vscroll,true},{hscroll,true}]),
gs:config(packer,WH), % refresh to initial size
loop().
loop() ->
receive
{gs,_Id,destroy,_Data,_Arg} -> bye;
{gs,_Id,configure,_Data,[W,H|_]} ->
gs:config(packer,[{width,W},{height,H}]), % repack
loop();
Other ->
io:format("loop got: ~p~n",[Other]),
loop()
end.
<|start_filename|>lib/observer/priv/bin/cdv.bat<|end_filename|>
@ECHO OFF
CALL werl -sname cdv -s crashdump_viewer script_start %*
<|start_filename|>lib/gs/doc/src/examples/ex1.erl<|end_filename|>
-module(ex1).
-copyright('Copyright (c) 1991-97 Ericsson Telecom AB').
-vsn('$Revision: /main/release/2 $ ').
-export([init/0]).
init() ->
S = gs:start(),
%% the parent of a top-level window is the gs server
Win = gs:create(window,S,[{width,200},{height,100}]),
Butt = gs:create(button,Win,[{label, {text,"Press Me"}}]),
gs:config(Win, {map,true}),
loop(Butt).
loop(Butt) ->
receive
{gs, Butt, click, Data, Args} ->
io:format("Hello There~n",[]),
loop(Butt)
end.
<|start_filename|>lib/dialyzer/test/options1_SUITE_data/src/compiler/beam_disasm.erl<|end_filename|>
%% -*- erlang-indent-level: 4 -*-
%%=======================================================================
%% File : beam_disasm.erl
%% Author : <NAME>
%% Description : Disassembles an R5-R10 .beam file into symbolic BEAM code
%%=======================================================================
%% $Id: beam_disasm.erl,v 1.1 2008/12/17 09:53:41 mikpe Exp $
%%=======================================================================
%% Notes:
%% 1. It does NOT work for .beam files of previous BEAM versions.
%% 2. If handling of new BEAM instructions is needed, this should be
%% inserted at the end of function resolve_inst().
%%=======================================================================
-module(beam_disasm).
-export([file/1, format_error/1]).
-author("<NAME>").
-include("beam_opcodes.hrl").
%%-----------------------------------------------------------------------
-define(NO_DEBUG(Str,Xs),ok).
-define(DEBUG(Str,Xs),io:format(Str,Xs)).
-define(exit(Reason),exit({?MODULE,?LINE,Reason})).
%%-----------------------------------------------------------------------
%% Error information
format_error({error, Module, Error}) ->
Module:format_error(Error);
format_error({internal, Error}) ->
io_lib:format("~p: disassembly failed with reason ~P.",
[?MODULE, Error, 25]).
%%-----------------------------------------------------------------------
%% The main exported function
%% File is either a file name or a binary containing the code.
%% Returns `{beam_file, [...]}' or `{error, Module, Reason}'.
%% Call `format_error({error, Module, Reason})' for an error string.
%%-----------------------------------------------------------------------
file(File) ->
case beam_lib:info(File) of
Info when list(Info) ->
{value,{chunks,Chunks}} = lists:keysearch(chunks,1,Info),
case catch process_chunks(File, Chunks) of
{'EXIT', Error} ->
{error, ?MODULE, {internal, Error}};
Result ->
Result
end;
Error ->
Error
end.
%%-----------------------------------------------------------------------
%% Interface might need to be revised -- do not depend on it.
%%-----------------------------------------------------------------------
process_chunks(F,ChunkInfoList) ->
{ok,{_,Chunks}} = beam_lib:chunks(F, ["Atom","Code","StrT","ImpT","ExpT"]),
[{"Atom",AtomBin},{"Code",CodeBin},{"StrT",StrBin},
{"ImpT",ImpBin},{"ExpT",ExpBin}] = Chunks,
LambdaBin = optional_chunk(F, "FunT", ChunkInfoList),
LocBin = optional_chunk(F, "LocT", ChunkInfoList),
AttrBin = optional_chunk(F, "Attr", ChunkInfoList),
CompBin = optional_chunk(F, "CInf", ChunkInfoList),
Atoms = beam_disasm_atoms(AtomBin),
Exports = beam_disasm_exports(ExpBin, Atoms),
Imports = beam_disasm_imports(ImpBin, Atoms),
LocFuns = beam_disasm_exports(LocBin, Atoms),
Lambdas = beam_disasm_lambdas(LambdaBin, Atoms),
Str = beam_disasm_strings(StrBin),
Str1 = binary_to_list(Str), %% for debugging -- use Str as far as poss.
Sym_Code = beam_disasm_code(CodeBin,Atoms,Imports,Str,Lambdas),
Attributes = beam_disasm_attributes(AttrBin),
CompInfo = beam_disasm_compilation_info(CompBin),
All = [{exports,Exports},
{imports,Imports},
{code,Sym_Code},
{atoms,Atoms},
{local_funs,LocFuns},
{strings,Str1},
{attributes,Attributes},
{comp_info,CompInfo}],
{beam_file,[Item || {_Key,Data}=Item <- All, Data =/= none]}.
%%-----------------------------------------------------------------------
%% Retrieve an optional chunk or none if the chunk doesn't exist.
%%-----------------------------------------------------------------------
optional_chunk(F, ChunkTag, ChunkInfo) ->
case lists:keymember(ChunkTag, 1, ChunkInfo) of
true ->
{ok,{_,[{ChunkTag,Chunk}]}} = beam_lib:chunks(F, [ChunkTag]),
Chunk;
false -> none
end.
%%-----------------------------------------------------------------------
%% UTILITIES -- these actually exist in file "beam_lib"
%% -- they should be moved into a common utils file.
%%-----------------------------------------------------------------------
i32([X1,X2,X3,X4]) ->
(X1 bsl 24) bor (X2 bsl 16) bor (X3 bsl 8) bor X4.
get_int(B) ->
{I, B1} = split_binary(B, 4),
{i32(binary_to_list(I)), B1}.
%%-----------------------------------------------------------------------
%% Disassembles the atom table of a BEAM file.
%% - atoms are stored in order 1 ... N (N = Num_atoms, in fact),
%% - each atom name consists of a length byte, followed by that many
%% bytes of name
%% (nb: atom names max 255 chars?!)
%%-----------------------------------------------------------------------
beam_disasm_atoms(AtomTabBin) ->
{_NumAtoms,B} = get_int(AtomTabBin),
disasm_atoms(B).
disasm_atoms(AtomBin) ->
disasm_atoms(binary_to_list(AtomBin),1).
disasm_atoms([Len|Xs],N) ->
{AtomName,Rest} = get_atom_name(Len,Xs),
[{N,list_to_atom(AtomName)}|disasm_atoms(Rest,N+1)];
disasm_atoms([],_) ->
[].
get_atom_name(Len,Xs) ->
get_atom_name(Len,Xs,[]).
get_atom_name(N,[X|Xs],RevName) when N > 0 ->
get_atom_name(N-1,Xs,[X|RevName]);
get_atom_name(0,Xs,RevName) ->
{ lists:reverse(RevName), Xs }.
%%-----------------------------------------------------------------------
%% Disassembles the export table of a BEAM file.
%%-----------------------------------------------------------------------
beam_disasm_exports(none, _) -> none;
beam_disasm_exports(ExpTabBin, Atoms) ->
{_NumAtoms,B} = get_int(ExpTabBin),
disasm_exports(B,Atoms).
disasm_exports(Bin,Atoms) ->
resolve_exports(collect_exports(binary_to_list(Bin)),Atoms).
collect_exports([F3,F2,F1,F0,A3,A2,A1,A0,L3,L2,L1,L0|Exps]) ->
[{i32([F3,F2,F1,F0]), % F = function (atom ID)
i32([A3,A2,A1,A0]), % A = arity (int)
i32([L3,L2,L1,L0])} % L = label (int)
|collect_exports(Exps)];
collect_exports([]) ->
[].
resolve_exports(Exps,Atoms) ->
[ {lookup_key(F,Atoms), A, L} || {F,A,L} <- Exps ].
%%-----------------------------------------------------------------------
%% Disassembles the import table of a BEAM file.
%%-----------------------------------------------------------------------
beam_disasm_imports(ExpTabBin,Atoms) ->
{_NumAtoms,B} = get_int(ExpTabBin),
disasm_imports(B,Atoms).
disasm_imports(Bin,Atoms) ->
resolve_imports(collect_imports(binary_to_list(Bin)),Atoms).
collect_imports([M3,M2,M1,M0,F3,F2,F1,F0,A3,A2,A1,A0|Exps]) ->
[{i32([M3,M2,M1,M0]), % M = module (atom ID)
i32([F3,F2,F1,F0]), % F = function (atom ID)
i32([A3,A2,A1,A0])} % A = arity (int)
|collect_imports(Exps)];
collect_imports([]) ->
[].
resolve_imports(Exps,Atoms) ->
[{extfunc,lookup_key(M,Atoms),lookup_key(F,Atoms),A} || {M,F,A} <- Exps ].
%%-----------------------------------------------------------------------
%% Disassembles the lambda (fun) table of a BEAM file.
%%-----------------------------------------------------------------------
beam_disasm_lambdas(none, _) -> none;
beam_disasm_lambdas(<<_:32,Tab/binary>>, Atoms) ->
disasm_lambdas(Tab, Atoms, 0).
disasm_lambdas(<<F:32,A:32,Lbl:32,Index:32,NumFree:32,OldUniq:32,More/binary>>,
Atoms, OldIndex) ->
Info = {lookup_key(F, Atoms),A,Lbl,Index,NumFree,OldUniq},
[{OldIndex,Info}|disasm_lambdas(More, Atoms, OldIndex+1)];
disasm_lambdas(<<>>, _, _) -> [].
%%-----------------------------------------------------------------------
%% Disassembles the code chunk of a BEAM file:
%% - The code is first disassembled into a long list of instructions.
%% - This list is then split into functions and all names are resolved.
%%-----------------------------------------------------------------------
beam_disasm_code(CodeBin,Atoms,Imports,Str,Lambdas) ->
[_SS3,_SS2,_SS1,_SS0, % Sub-Size (length of information before code)
_IS3,_IS2,_IS1,_IS0, % Instruction Set Identifier (always 0)
_OM3,_OM2,_OM1,_OM0, % Opcode Max
_L3,_L2,_L1,_L0,_F3,_F2,_F1,_F0|Code] = binary_to_list(CodeBin),
case catch disasm_code(Code, Atoms) of
{'EXIT',Rsn} ->
?NO_DEBUG('code disasm failed: ~p~n',[Rsn]),
?exit(Rsn);
DisasmCode ->
Functions = get_function_chunks(DisasmCode),
LocLabels = local_labels(Functions),
[resolve_names(F,Imports,Str,LocLabels,Lambdas) || F <- Functions]
end.
%%-----------------------------------------------------------------------
disasm_code([B|Bs], Atoms) ->
{Instr,RestBs} = disasm_instr(B, Bs, Atoms),
[Instr|disasm_code(RestBs, Atoms)];
disasm_code([], _) -> [].
%%-----------------------------------------------------------------------
%% Splits the code stream into chunks representing the code of functions.
%%
%% NOTE: code actually looks like
%% label L1: ... label Ln:
%% func_info ...
%% label entry:
%% ...
%% <on failure, use label Li to show where things died>
%% ...
%% So the labels before each func_info should be included as well.
%% Ideally, only one such label is needed, but the BEAM compiler
%% before R8 didn't care to remove the redundant ones.
%%-----------------------------------------------------------------------
get_function_chunks([I|Code]) ->
{LastI,RestCode,Labs} = split_head_labels(I,Code,[]),
get_funs(LastI,RestCode,Labs,[]);
get_function_chunks([]) ->
?exit(empty_code_segment).
get_funs(PrevI,[I|Is],RevF,RevFs) ->
case I of
{func_info,_Info} ->
[H|T] = RevF,
{Last,Fun,TrailingLabels} = split_head_labels(H,T,[]),
get_funs(I, Is, [PrevI|TrailingLabels], add_funs([Last|Fun],RevFs));
_ ->
get_funs(I, Is, [PrevI|RevF], RevFs)
end;
get_funs(PrevI,[],RevF,RevFs) ->
case PrevI of
{int_code_end,[]} ->
emit_funs(add_fun(RevF,RevFs));
_ ->
?DEBUG('warning: code segment did not end with int_code_end~n',[]),
emit_funs(add_funs([PrevI|RevF],RevFs))
end.
split_head_labels({label,L},[I|Code],Labs) ->
split_head_labels(I,Code,[{label,L}|Labs]);
split_head_labels(I,Code,Labs) ->
{I,Code,Labs}.
add_fun([],Fs) ->
Fs;
add_fun(F,Fs) ->
add_funs(F,Fs).
add_funs(F,Fs) ->
[ lists:reverse(F) | Fs ].
emit_funs(Fs) ->
lists:reverse(Fs).
%%-----------------------------------------------------------------------
%% Collects local labels -- I am not sure this is 100% what is needed.
%%-----------------------------------------------------------------------
local_labels(Funs) ->
[local_label(Fun) || Fun <- Funs].
%% The first clause below attempts to provide some (limited form of)
%% backwards compatibility; it is not needed for .beam files generated
%% by the R8 compiler. The clause should one fine day be taken out.
local_label([{label,_},{label,L}|Code]) ->
local_label([{label,L}|Code]);
local_label([{label,_},
{func_info,[M0,F0,{u,A}]},
{label,[{u,L1}]}|_]) ->
{atom,M} = resolve_arg(M0),
{atom,F} = resolve_arg(F0),
{L1, {M, F, A}};
local_label(Code) ->
io:format('beam_disasm: no label in ~p~n', [Code]),
{-666,{none,none,0}}.
%%-----------------------------------------------------------------------
%% Disassembles a single BEAM instruction; most instructions are handled
%% in a generic way; indexing instructions are handled separately.
%%-----------------------------------------------------------------------
disasm_instr(B, Bs, Atoms) ->
{SymOp,Arity} = beam_opcodes:opname(B),
case SymOp of
select_val ->
disasm_select_inst(select_val, Bs, Atoms);
select_tuple_arity ->
disasm_select_inst(select_tuple_arity, Bs, Atoms);
_ ->
case catch decode_n_args(Arity, Bs, Atoms) of
{'EXIT',Rsn} ->
?NO_DEBUG("decode_n_args(~p,~p) failed~n",[Arity,Bs]),
{{'EXIT',{SymOp,Arity,Rsn}},[]};
{Args,RestBs} ->
?NO_DEBUG("instr ~p~n",[{SymOp,Args}]),
{{SymOp,Args}, RestBs}
end
end.
%%-----------------------------------------------------------------------
%% Disassembles a BEAM select_* instruction used for indexing.
%% Currently handles {select_val,3} and {select_tuple_arity,3} insts.
%%
%% The arruments of a "select"-type instruction look as follows:
%% <reg>, {f,FailLabel}, {list, <num cases>, [<case1> ... <caseN>]}
%% where each case is of the form [symbol,{f,Label}].
%%-----------------------------------------------------------------------
disasm_select_inst(Inst, Bs, Atoms) ->
{X, Bs1} = decode_arg(Bs, Atoms),
{F, Bs2} = decode_arg(Bs1, Atoms),
{Z, Bs3} = decode_arg(Bs2, Atoms),
{U, Bs4} = decode_arg(Bs3, Atoms),
{u,Len} = U,
{List, RestBs} = decode_n_args(Len, Bs4, Atoms),
{{Inst,[X,F,{Z,U,List}]},RestBs}.
%%-----------------------------------------------------------------------
%% decode_arg([Byte]) -> { Arg, [Byte] }
%%
%% - an arg can have variable length, so we must return arg + remaining bytes
%% - decodes an argument into its 'raw' form: { Tag, Value }
%% several types map to a single tag, so the byte code instr must then
%% assign a type to it
%%-----------------------------------------------------------------------
decode_arg([B|Bs]) ->
Tag = decode_tag(B band 2#111),
?NO_DEBUG('Tag = ~p, B = ~p, Bs = ~p~n',[Tag,B,Bs]),
case Tag of
z ->
decode_z_tagged(Tag, B, Bs);
_ ->
%% all other cases are handled as if they were integers
decode_int(Tag, B, Bs)
end.
decode_arg([B|Bs0], Atoms) ->
Tag = decode_tag(B band 2#111),
?NO_DEBUG('Tag = ~p, B = ~p, Bs = ~p~n',[Tag,B,Bs]),
case Tag of
z ->
decode_z_tagged(Tag, B, Bs0);
a ->
%% atom or nil
case decode_int(Tag, B, Bs0) of
{{a,0},Bs} -> {nil,Bs};
{{a,I},Bs} -> {{atom,lookup_key(I, Atoms)},Bs}
end;
_ ->
%% all other cases are handled as if they were integers
decode_int(Tag, B, Bs0)
end.
%%-----------------------------------------------------------------------
%% Decodes an integer value. Handles positives, negatives, and bignums.
%%
%% Tries to do the opposite of:
%% beam_asm:encode(1, 5) = [81]
%% beam_asm:encode(1, 1000) = [105,232]
%% beam_asm:encode(1, 2047) = [233,255]
%% beam_asm:encode(1, 2048) = [25,8,0]
%% beam_asm:encode(1,-1) = [25,255,255]
%% beam_asm:encode(1,-4294967295) = [121,255,0,0,0,1]
%% beam_asm:encode(1, 4294967295) = [121,0,255,255,255,255]
%% beam_asm:encode(1, 429496729501) = [121,99,255,255,255,157]
%%-----------------------------------------------------------------------
decode_int(Tag,B,Bs) when (B band 16#08) == 0 ->
%% N < 16 = 4 bits, NNNN:0:TTT
N = B bsr 4,
{{Tag,N},Bs};
decode_int(Tag,B,Bs) when (B band 16#10) == 0 ->
%% N < 2048 = 11 bits = 3:8 bits, NNN:01:TTT, NNNNNNNN
[B1|Bs1] = Bs,
Val0 = B band 2#11100000,
N = (Val0 bsl 3) bor B1,
?NO_DEBUG('NNN:01:TTT, NNNNNNNN = ~n~p:01:~p, ~p = ~p~n', [Val0,Tag,B,N]),
{{Tag,N},Bs1};
decode_int(Tag,B,Bs) ->
{Len,Bs1} = decode_int_length(B,Bs),
{IntBs,RemBs} = take_bytes(Len,Bs1),
N = build_arg(IntBs),
[F|_] = IntBs,
Num = if F > 127, Tag == i -> decode_negative(N,Len);
true -> N
end,
?NO_DEBUG('Len = ~p, IntBs = ~p, Num = ~p~n', [Len,IntBs,Num]),
{{Tag,Num},RemBs}.
decode_int_length(B,Bs) ->
%% The following imitates get_erlang_integer() in beam_load.c
%% Len is the size of the integer value in bytes
case B bsr 5 of
7 ->
{Arg,ArgBs} = decode_arg(Bs),
case Arg of
{u,L} ->
{L+9,ArgBs}; % 9 stands for 7+2
_ ->
?exit({decode_int,weird_bignum_sublength,Arg})
end;
L ->
{L+2,Bs}
end.
decode_negative(N,Len) ->
N - (1 bsl (Len*8)). % 8 is number of bits in a byte
%%-----------------------------------------------------------------------
%% Decodes lists and floating point numbers.
%%-----------------------------------------------------------------------
decode_z_tagged(Tag,B,Bs) when (B band 16#08) == 0 ->
N = B bsr 4,
case N of
0 -> % float
decode_float(Bs);
1 -> % list
{{Tag,N},Bs};
2 -> % fr
decode_fr(Bs);
3 -> % allocation list
decode_alloc_list(Bs);
_ ->
?exit({decode_z_tagged,{invalid_extended_tag,N}})
end;
decode_z_tagged(_,B,_) ->
?exit({decode_z_tagged,{weird_value,B}}).
decode_float(Bs) ->
{FL,RestBs} = take_bytes(8,Bs),
<<Float:64/float>> = list_to_binary(FL),
{{float,Float},RestBs}.
decode_fr(Bs) ->
{{u,Fr},RestBs} = decode_arg(Bs),
{{fr,Fr},RestBs}.
decode_alloc_list(Bs) ->
{{u,N},RestBs} = decode_arg(Bs),
decode_alloc_list_1(N, RestBs, []).
decode_alloc_list_1(0, RestBs, Acc) ->
{{u,{alloc,lists:reverse(Acc)}},RestBs};
decode_alloc_list_1(N, Bs0, Acc) ->
{{u,Type},Bs1} = decode_arg(Bs0),
{{u,Val},Bs} = decode_arg(Bs1),
case Type of
0 ->
decode_alloc_list_1(N-1, Bs, [{words,Val}|Acc]);
1 ->
decode_alloc_list_1(N-1, Bs, [{floats,Val}|Acc])
end.
%%-----------------------------------------------------------------------
%% take N bytes from a stream, return { Taken_bytes, Remaining_bytes }
%%-----------------------------------------------------------------------
take_bytes(N,Bs) ->
take_bytes(N,Bs,[]).
take_bytes(N,[B|Bs],Acc) when N > 0 ->
take_bytes(N-1,Bs,[B|Acc]);
take_bytes(0,Bs,Acc) ->
{ lists:reverse(Acc), Bs }.
%%-----------------------------------------------------------------------
%% from a list of bytes Bn,Bn-1,...,B1,B0
%% build (Bn << 8*n) bor ... bor B1 << 8 bor B0 << 0
%%-----------------------------------------------------------------------
build_arg(Bs) ->
build_arg(Bs,0).
build_arg([B|Bs],N) ->
build_arg(Bs, (N bsl 8) bor B);
build_arg([],N) ->
N.
%%-----------------------------------------------------------------------
%% Decodes a bunch of arguments and returns them in a list
%%-----------------------------------------------------------------------
decode_n_args(N, Bs, Atoms) when N >= 0 ->
decode_n_args(N, [], Bs, Atoms).
decode_n_args(N, Acc, Bs0, Atoms) when N > 0 ->
{A1,Bs} = decode_arg(Bs0, Atoms),
decode_n_args(N-1, [A1|Acc], Bs, Atoms);
decode_n_args(0, Acc, Bs, _) ->
{lists:reverse(Acc),Bs}.
%%-----------------------------------------------------------------------
%% Convert a numeric tag value into a symbolic one
%%-----------------------------------------------------------------------
decode_tag(?tag_u) -> u;
decode_tag(?tag_i) -> i;
decode_tag(?tag_a) -> a;
decode_tag(?tag_x) -> x;
decode_tag(?tag_y) -> y;
decode_tag(?tag_f) -> f;
decode_tag(?tag_h) -> h;
decode_tag(?tag_z) -> z;
decode_tag(X) -> ?exit({unknown_tag,X}).
%%-----------------------------------------------------------------------
%% - replace all references {a,I} with the atom with index I (or {atom,A})
%% - replace all references to {i,K} in an external call position with
%% the proper MFA (position in list, first elt = 0, yields MFA to use)
%% - resolve strings, represented as <offset, length>, into their
%% actual values by using string table
%% (note: string table should be passed as a BINARY so that we can
%% use binary_to_list/3!)
%% - convert instruction to its readable form ...
%%
%% Currently, only the first three are done (systematically, at least).
%%
%% Note: It MAY be premature to remove the lists of args, since that
%% representation means it is simpler to iterate over all args, etc.
%%-----------------------------------------------------------------------
resolve_names(Fun, Imports, Str, Lbls, Lambdas) ->
[resolve_inst(Instr, Imports, Str, Lbls, Lambdas) || Instr <- Fun].
%%
%% New make_fun2/4 instruction added in August 2001 (R8).
%% We handle it specially here to avoid adding an argument to
%% the clause for every instruction.
%%
resolve_inst({make_fun2,Args},_,_,Lbls,Lambdas) ->
[OldIndex] = resolve_args(Args),
{value,{OldIndex,{F,A,_Lbl,_Index,NumFree,OldUniq}}} =
lists:keysearch(OldIndex, 1, Lambdas),
[{_,{M,_,_}}|_] = Lbls, % Slighly kludgy.
{make_fun2,{M,F,A},OldIndex,OldUniq,NumFree};
resolve_inst(Instr, Imports, Str, Lbls, _Lambdas) ->
resolve_inst(Instr, Imports, Str, Lbls).
resolve_inst({label,[{u,L}]},_,_,_) ->
{label,L};
resolve_inst({func_info,RawMFA},_,_,_) ->
{func_info,resolve_args(RawMFA)};
% resolve_inst(int_code_end,_,_,_,_) -> % instruction already handled
% int_code_end; % should not really be handled here
resolve_inst({call,[{u,N},{f,L}]},_,_,Lbls) ->
{call,N,catch lookup_key(L,Lbls)};
resolve_inst({call_last,[{u,N},{f,L},{u,U}]},_,_,Lbls) ->
{call_last,N,catch lookup_key(L,Lbls),U};
resolve_inst({call_only,[{u,N},{f,L}]},_,_,Lbls) ->
{call_only,N,catch lookup_key(L,Lbls)};
resolve_inst({call_ext,[{u,N},{u,MFAix}]},Imports,_,_) ->
{call_ext,N,catch lists:nth(MFAix+1,Imports)};
resolve_inst({call_ext_last,[{u,N},{u,MFAix},{u,X}]},Imports,_,_) ->
{call_ext_last,N,catch lists:nth(MFAix+1,Imports),X};
resolve_inst({bif0,Args},Imports,_,_) ->
[Bif,Reg] = resolve_args(Args),
{extfunc,_Mod,BifName,_Arity} = lists:nth(Bif+1,Imports),
%?NO_DEBUG('bif0(~p, ~p)~n',[BifName,Reg]),
{bif,BifName,nofail,[],Reg};
resolve_inst({bif1,Args},Imports,_,_) ->
[F,Bif,A1,Reg] = resolve_args(Args),
{extfunc,_Mod,BifName,_Arity} = lists:nth(Bif+1,Imports),
%?NO_DEBUG('bif1(~p, ~p, ~p, ~p, ~p)~n',[Bif,BifName,F,[A1],Reg]),
{bif,BifName,F,[A1],Reg};
resolve_inst({bif2,Args},Imports,_,_) ->
[F,Bif,A1,A2,Reg] = resolve_args(Args),
{extfunc,_Mod,BifName,_Arity} = lists:nth(Bif+1,Imports),
%?NO_DEBUG('bif2(~p, ~p, ~p, ~p, ~p)~n',[Bif,BifName,F,[A1,A2],Reg]),
{bif,BifName,F,[A1,A2],Reg};
resolve_inst({allocate,[{u,X0},{u,X1}]},_,_,_) ->
{allocate,X0,X1};
resolve_inst({allocate_heap,[{u,X0},{u,X1},{u,X2}]},_,_,_) ->
{allocate_heap,X0,X1,X2};
resolve_inst({allocate_zero,[{u,X0},{u,X1}]},_,_,_) ->
{allocate_zero,X0,X1};
resolve_inst({allocate_heap_zero,[{u,X0},{u,X1},{u,X2}]},_,_,_) ->
{allocate_heap_zero,X0,X1,X2};
resolve_inst({test_heap,[{u,X0},{u,X1}]},_,_,_) ->
{test_heap,X0,X1};
resolve_inst({init,[Dst]},_,_,_) ->
{init,Dst};
resolve_inst({deallocate,[{u,L}]},_,_,_) ->
{deallocate,L};
resolve_inst({return,[]},_,_,_) ->
return;
resolve_inst({send,[]},_,_,_) ->
send;
resolve_inst({remove_message,[]},_,_,_) ->
remove_message;
resolve_inst({timeout,[]},_,_,_) ->
timeout;
resolve_inst({loop_rec,[Lbl,Dst]},_,_,_) ->
{loop_rec,Lbl,Dst};
resolve_inst({loop_rec_end,[Lbl]},_,_,_) ->
{loop_rec_end,Lbl};
resolve_inst({wait,[Lbl]},_,_,_) ->
{wait,Lbl};
resolve_inst({wait_timeout,[Lbl,Int]},_,_,_) ->
{wait_timeout,Lbl,resolve_arg(Int)};
resolve_inst({m_plus,Args},_,_,_) ->
[W,SrcR1,SrcR2,DstR] = resolve_args(Args),
{arithbif,'+',W,[SrcR1,SrcR2],DstR};
resolve_inst({m_minus,Args},_,_,_) ->
[W,SrcR1,SrcR2,DstR] = resolve_args(Args),
{arithbif,'-',W,[SrcR1,SrcR2],DstR};
resolve_inst({m_times,Args},_,_,_) ->
[W,SrcR1,SrcR2,DstR] = resolve_args(Args),
{arithbif,'*',W,[SrcR1,SrcR2],DstR};
resolve_inst({m_div,Args},_,_,_) ->
[W,SrcR1,SrcR2,DstR] = resolve_args(Args),
{arithbif,'/',W,[SrcR1,SrcR2],DstR};
resolve_inst({int_div,Args},_,_,_) ->
[W,SrcR1,SrcR2,DstR] = resolve_args(Args),
{arithbif,'div',W,[SrcR1,SrcR2],DstR};
resolve_inst({int_rem,Args},_,_,_) ->
[W,SrcR1,SrcR2,DstR] = resolve_args(Args),
{arithbif,'rem',W,[SrcR1,SrcR2],DstR};
resolve_inst({int_band,Args},_,_,_) ->
[W,SrcR1,SrcR2,DstR] = resolve_args(Args),
{arithbif,'band',W,[SrcR1,SrcR2],DstR};
resolve_inst({int_bor,Args},_,_,_) ->
[W,SrcR1,SrcR2,DstR] = resolve_args(Args),
{arithbif,'bor',W,[SrcR1,SrcR2],DstR};
resolve_inst({int_bxor,Args},_,_,_) ->
[W,SrcR1,SrcR2,DstR] = resolve_args(Args),
{arithbif,'bxor',W,[SrcR1,SrcR2],DstR};
resolve_inst({int_bsl,Args},_,_,_) ->
[W,SrcR1,SrcR2,DstR] = resolve_args(Args),
{arithbif,'bsl',W,[SrcR1,SrcR2],DstR};
resolve_inst({int_bsr,Args},_,_,_) ->
[W,SrcR1,SrcR2,DstR] = resolve_args(Args),
{arithbif,'bsr',W,[SrcR1,SrcR2],DstR};
resolve_inst({int_bnot,Args},_,_,_) ->
[W,SrcR,DstR] = resolve_args(Args),
{arithbif,'bnot',W,[SrcR],DstR};
resolve_inst({is_lt=I,Args0},_,_,_) ->
[L|Args] = resolve_args(Args0),
{test,I,L,Args};
resolve_inst({is_ge=I,Args0},_,_,_) ->
[L|Args] = resolve_args(Args0),
{test,I,L,Args};
resolve_inst({is_eq=I,Args0},_,_,_) ->
[L|Args] = resolve_args(Args0),
{test,I,L,Args};
resolve_inst({is_ne=I,Args0},_,_,_) ->
[L|Args] = resolve_args(Args0),
{test,I,L,Args};
resolve_inst({is_eq_exact=I,Args0},_,_,_) ->
[L|Args] = resolve_args(Args0),
{test,I,L,Args};
resolve_inst({is_ne_exact=I,Args0},_,_,_) ->
[L|Args] = resolve_args(Args0),
{test,I,L,Args};
resolve_inst({is_integer=I,Args0},_,_,_) ->
[L|Args] = resolve_args(Args0),
{test,I,L,Args};
resolve_inst({is_float=I,Args0},_,_,_) ->
[L|Args] = resolve_args(Args0),
{test,I,L,Args};
resolve_inst({is_number=I,Args0},_,_,_) ->
[L|Args] = resolve_args(Args0),
{test,I,L,Args};
resolve_inst({is_atom=I,Args0},_,_,_) ->
[L|Args] = resolve_args(Args0),
{test,I,L,Args};
resolve_inst({is_pid=I,Args0},_,_,_) ->
[L|Args] = resolve_args(Args0),
{test,I,L,Args};
resolve_inst({is_reference=I,Args0},_,_,_) ->
[L|Args] = resolve_args(Args0),
{test,I,L,Args};
resolve_inst({is_port=I,Args0},_,_,_) ->
[L|Args] = resolve_args(Args0),
{test,I,L,Args};
resolve_inst({is_nil=I,Args0},_,_,_) ->
[L|Args] = resolve_args(Args0),
{test,I,L,Args};
resolve_inst({is_binary=I,Args0},_,_,_) ->
[L|Args] = resolve_args(Args0),
{test,I,L,Args};
resolve_inst({is_constant=I,Args0},_,_,_) ->
[L|Args] = resolve_args(Args0),
{test,I,L,Args};
resolve_inst({is_list=I,Args0},_,_,_) ->
[L|Args] = resolve_args(Args0),
{test,I,L,Args};
resolve_inst({is_nonempty_list=I,Args0},_,_,_) ->
[L|Args] = resolve_args(Args0),
{test,I,L,Args};
resolve_inst({is_tuple=I,Args0},_,_,_) ->
[L|Args] = resolve_args(Args0),
{test,I,L,Args};
resolve_inst({test_arity=I,Args0},_,_,_) ->
[L|Args] = resolve_args(Args0),
{test,I,L,Args};
resolve_inst({select_val,Args},_,_,_) ->
[Reg,FLbl,{{z,1},{u,_Len},List0}] = Args,
List = resolve_args(List0),
{select_val,Reg,FLbl,{list,List}};
resolve_inst({select_tuple_arity,Args},_,_,_) ->
[Reg,FLbl,{{z,1},{u,_Len},List0}] = Args,
List = resolve_args(List0),
{select_tuple_arity,Reg,FLbl,{list,List}};
resolve_inst({jump,[Lbl]},_,_,_) ->
{jump,Lbl};
resolve_inst({'catch',[Dst,Lbl]},_,_,_) ->
{'catch',Dst,Lbl};
resolve_inst({catch_end,[Dst]},_,_,_) ->
{catch_end,Dst};
resolve_inst({move,[Src,Dst]},_,_,_) ->
{move,resolve_arg(Src),Dst};
resolve_inst({get_list,[Src,Dst1,Dst2]},_,_,_) ->
{get_list,Src,Dst1,Dst2};
resolve_inst({get_tuple_element,[Src,{u,Off},Dst]},_,_,_) ->
{get_tuple_element,resolve_arg(Src),Off,resolve_arg(Dst)};
resolve_inst({set_tuple_element,[Src,Dst,{u,Off}]},_,_,_) ->
{set_tuple_element,resolve_arg(Src),resolve_arg(Dst),Off};
resolve_inst({put_string,[{u,Len},{u,Off},Dst]},_,Strings,_) ->
String = if Len > 0 -> binary_to_list(Strings, Off+1, Off+Len);
true -> ""
end,
?NO_DEBUG('put_string(~p, {string,~p}, ~p)~n',[Len,String,Dst]),
{put_string,Len,{string,String},Dst};
resolve_inst({put_list,[Src1,Src2,Dst]},_,_,_) ->
{put_list,resolve_arg(Src1),resolve_arg(Src2),Dst};
resolve_inst({put_tuple,[{u,Arity},Dst]},_,_,_) ->
{put_tuple,Arity,Dst};
resolve_inst({put,[Src]},_,_,_) ->
{put,resolve_arg(Src)};
resolve_inst({badmatch,[X]},_,_,_) ->
{badmatch,resolve_arg(X)};
resolve_inst({if_end,[]},_,_,_) ->
if_end;
resolve_inst({case_end,[X]},_,_,_) ->
{case_end,resolve_arg(X)};
resolve_inst({call_fun,[{u,N}]},_,_,_) ->
{call_fun,N};
resolve_inst({make_fun,Args},_,_,Lbls) ->
[{f,L},Magic,FreeVars] = resolve_args(Args),
{make_fun,catch lookup_key(L,Lbls),Magic,FreeVars};
resolve_inst({is_function=I,Args0},_,_,_) ->
[L|Args] = resolve_args(Args0),
{test,I,L,Args};
resolve_inst({call_ext_only,[{u,N},{u,MFAix}]},Imports,_,_) ->
{call_ext_only,N,catch lists:nth(MFAix+1,Imports)};
%%
%% Instructions for handling binaries added in R7A & R7B
%%
resolve_inst({bs_start_match,[F,Reg]},_,_,_) ->
{bs_start_match,F,Reg};
resolve_inst({bs_get_integer=I,[Lbl,Arg2,{u,N},{u,U},Arg5]},_,_,_) ->
[A2,A5] = resolve_args([Arg2,Arg5]),
{test,I,Lbl,[A2,N,decode_field_flags(U),A5]};
resolve_inst({bs_get_float=I,[Lbl,Arg2,{u,N},{u,U},Arg5]},_,_,_) ->
[A2,A5] = resolve_args([Arg2,Arg5]),
{test,I,Lbl,[A2,N,decode_field_flags(U),A5]};
resolve_inst({bs_get_binary=I,[Lbl,Arg2,{u,N},{u,U},Arg5]},_,_,_) ->
[A2,A5] = resolve_args([Arg2,Arg5]),
{test,I,Lbl,[A2,N,decode_field_flags(U),A5]};
resolve_inst({bs_skip_bits,[Lbl,Arg2,{u,N},{u,U}]},_,_,_) ->
[A2] = resolve_args([Arg2]),
{test,bs_skip_bits,Lbl,[A2,N,decode_field_flags(U)]};
resolve_inst({bs_test_tail,[F,{u,N}]},_,_,_) ->
{test,bs_test_tail,F,[N]};
resolve_inst({bs_save,[{u,N}]},_,_,_) ->
{bs_save,N};
resolve_inst({bs_restore,[{u,N}]},_,_,_) ->
{bs_restore,N};
resolve_inst({bs_init,[{u,N},{u,U}]},_,_,_) ->
{bs_init,N,decode_field_flags(U)};
resolve_inst({bs_final,[F,X]},_,_,_) ->
{bs_final,F,X};
resolve_inst({bs_put_integer,[Lbl,Arg2,{u,N},{u,U},Arg5]},_,_,_) ->
[A2,A5] = resolve_args([Arg2,Arg5]),
{bs_put_integer,Lbl,A2,N,decode_field_flags(U),A5};
resolve_inst({bs_put_binary,[Lbl,Arg2,{u,N},{u,U},Arg5]},_,_,_) ->
[A2,A5] = resolve_args([Arg2,Arg5]),
?NO_DEBUG('bs_put_binary(~p,~p,~p,~p,~p})~n',[Lbl,A2,N,U,A5]),
{bs_put_binary,Lbl,A2,N,decode_field_flags(U),A5};
resolve_inst({bs_put_float,[Lbl,Arg2,{u,N},{u,U},Arg5]},_,_,_) ->
[A2,A5] = resolve_args([Arg2,Arg5]),
?NO_DEBUG('bs_put_float(~p,~p,~p,~p,~p})~n',[Lbl,A2,N,U,A5]),
{bs_put_float,Lbl,A2,N,decode_field_flags(U),A5};
resolve_inst({bs_put_string,[{u,Len},{u,Off}]},_,Strings,_) ->
String = if Len > 0 -> binary_to_list(Strings, Off+1, Off+Len);
true -> ""
end,
?NO_DEBUG('bs_put_string(~p, {string,~p})~n',[Len,String]),
{bs_put_string,Len,{string,String}};
resolve_inst({bs_need_buf,[{u,N}]},_,_,_) ->
{bs_need_buf,N};
%%
%% Instructions for handling floating point numbers added in June 2001 (R8).
%%
resolve_inst({fclearerror,[]},_,_,_) ->
fclearerror;
resolve_inst({fcheckerror,Args},_,_,_) ->
[Fail] = resolve_args(Args),
{fcheckerror,Fail};
resolve_inst({fmove,Args},_,_,_) ->
[FR,Reg] = resolve_args(Args),
{fmove,FR,Reg};
resolve_inst({fconv,Args},_,_,_) ->
[Reg,FR] = resolve_args(Args),
{fconv,Reg,FR};
resolve_inst({fadd=I,Args},_,_,_) ->
[F,A1,A2,Reg] = resolve_args(Args),
{arithfbif,I,F,[A1,A2],Reg};
resolve_inst({fsub=I,Args},_,_,_) ->
[F,A1,A2,Reg] = resolve_args(Args),
{arithfbif,I,F,[A1,A2],Reg};
resolve_inst({fmul=I,Args},_,_,_) ->
[F,A1,A2,Reg] = resolve_args(Args),
{arithfbif,I,F,[A1,A2],Reg};
resolve_inst({fdiv=I,Args},_,_,_) ->
[F,A1,A2,Reg] = resolve_args(Args),
{arithfbif,I,F,[A1,A2],Reg};
resolve_inst({fnegate,Args},_,_,_) ->
[F,Arg,Reg] = resolve_args(Args),
{arithfbif,fnegate,F,[Arg],Reg};
%%
%% Instructions for try expressions added in January 2003 (R10).
%%
resolve_inst({'try',[Reg,Lbl]},_,_,_) -> % analogous to 'catch'
{'try',Reg,Lbl};
resolve_inst({try_end,[Reg]},_,_,_) -> % analogous to 'catch_end'
{try_end,Reg};
resolve_inst({try_case,[Reg]},_,_,_) -> % analogous to 'catch_end'
{try_case,Reg};
resolve_inst({try_case_end,[Reg]},_,_,_) ->
{try_case_end,Reg};
resolve_inst({raise,[Reg1,Reg2]},_,_,_) ->
{bif,raise,{f,0},[Reg1,Reg2],{x,0}};
%%
%% New bit syntax instructions added in February 2004 (R10B).
%%
resolve_inst({bs_init2,[Lbl,Arg2,{u,W},{u,R},{u,F},Arg6]},_,_,_) ->
[A2,A6] = resolve_args([Arg2,Arg6]),
{bs_init2,Lbl,A2,W,R,decode_field_flags(F),A6};
resolve_inst({bs_bits_to_bytes,[Lbl,Arg2,Arg3]},_,_,_) ->
[A2,A3] = resolve_args([Arg2,Arg3]),
{bs_bits_to_bytes,Lbl,A2,A3};
resolve_inst({bs_add=I,[Lbl,Arg2,Arg3,Arg4,Arg5]},_,_,_) ->
[A2,A3,A4,A5] = resolve_args([Arg2,Arg3,Arg4,Arg5]),
{I,Lbl,[A2,A3,A4],A5};
%%
%% New apply instructions added in April 2004 (R10B).
%%
resolve_inst({apply,[{u,Arity}]},_,_,_) ->
{apply,Arity};
resolve_inst({apply_last,[{u,Arity},{u,D}]},_,_,_) ->
{apply_last,Arity,D};
%%
%% New test instruction added in April 2004 (R10B).
%%
resolve_inst({is_boolean=I,Args0},_,_,_) ->
[L|Args] = resolve_args(Args0),
{test,I,L,Args};
%%
%% Catches instructions that are not yet handled.
%%
resolve_inst(X,_,_,_) -> ?exit({resolve_inst,X}).
%%-----------------------------------------------------------------------
%% Resolves arguments in a generic way.
%%-----------------------------------------------------------------------
resolve_args(Args) -> [resolve_arg(A) || A <- Args].
resolve_arg({u,N}) -> N;
resolve_arg({i,N}) -> {integer,N};
resolve_arg({atom,Atom}=A) when is_atom(Atom) -> A;
resolve_arg(nil) -> nil;
resolve_arg(Arg) -> Arg.
%%-----------------------------------------------------------------------
%% The purpose of the following is just to add a hook for future changes.
%% Currently, field flags are numbers 1-2-4-8 and only two of these
%% numbers (BSF_LITTLE 2 -- BSF_SIGNED 4) have a semantic significance;
%% others are just hints for speeding up the execution; see "erl_bits.h".
%%-----------------------------------------------------------------------
decode_field_flags(FF) ->
{field_flags,FF}.
%%-----------------------------------------------------------------------
%% Each string is denoted in the assembled code by its offset into this
%% binary. This binary contains all strings concatenated together.
%%-----------------------------------------------------------------------
beam_disasm_strings(Bin) ->
Bin.
%%-----------------------------------------------------------------------
%% Disassembles the attributes of a BEAM file.
%%-----------------------------------------------------------------------
beam_disasm_attributes(none) -> none;
beam_disasm_attributes(AttrBin) -> binary_to_term(AttrBin).
%%-----------------------------------------------------------------------
%% Disassembles the compilation information of a BEAM file.
%%-----------------------------------------------------------------------
beam_disasm_compilation_info(none) -> none;
beam_disasm_compilation_info(Bin) -> binary_to_term(Bin).
%%-----------------------------------------------------------------------
%% Private Utilities
%%-----------------------------------------------------------------------
%%-----------------------------------------------------------------------
lookup_key(Key,[{Key,Val}|_]) ->
Val;
lookup_key(Key,[_|KVs]) ->
lookup_key(Key,KVs);
lookup_key(Key,[]) ->
?exit({lookup_key,{key_not_found,Key}}).
%%-----------------------------------------------------------------------
<|start_filename|>lib/percept/doc/src/sorter.erl<|end_filename|>
-module(sorter).
-export([go/3,loop/0,main/4]).
go(I,N,M) ->
spawn(?MODULE, main, [I,N,M,self()]),
receive done -> ok end.
main(I,N,M,Parent) ->
Pids = lists:foldl(
fun(_,Ps) ->
[ spawn(?MODULE,loop, []) | Ps]
end, [], lists:seq(1,M)),
lists:foreach(
fun(_) ->
send_work(N,Pids),
gather(Pids)
end, lists:seq(1,I)),
lists:foreach(
fun(Pid) ->
Pid ! {self(), quit}
end, Pids),
gather(Pids), Parent ! done.
send_work(_,[]) -> ok;
send_work(N,[Pid|Pids]) ->
Pid ! {self(),sort,N},
send_work(round(N*1.2),Pids).
loop() ->
receive
{Pid, sort, N} -> dummy_sort(N),Pid ! {self(), done},loop();
{Pid, quit} -> Pid ! {self(), done}
end.
dummy_sort(N) -> lists:sort([ random:uniform(N) || _ <- lists:seq(1,N)]).
gather([]) -> ok;
gather([Pid|Pids]) -> receive {Pid, done} -> gather(Pids) end.
<|start_filename|>erts/emulator/test/driver_SUITE_data/missing_callback_drv.c<|end_filename|>
/* ``Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* The Initial Developer of the Original Code is Ericsson AB. Portions
* created by Ericsson are Copyright 2008, Ericsson Utvecklings AB. All
* Rights Reserved.''
*
* $Id$
*/
#ifndef UNIX
#if !defined(__WIN32__)
#define UNIX 1
#endif
#endif
#ifdef UNIX
#include <errno.h>
#include <stdio.h>
#include <string.h>
#include <sys/types.h>
#include <sys/stat.h>
#include <fcntl.h>
#include <unistd.h>
#ifdef HAVE_POLL_H
# include <poll.h>
#endif
#endif /* UNIX */
#include "erl_driver.h"
typedef struct {
int ofd;
int ifd;
int efd;
#ifdef HAVE_POLL_H
struct erl_drv_event_data edata;
#endif
} mcd_data_t;
static ErlDrvData start(ErlDrvPort port, char *command);
static void stop(ErlDrvData data);
static ErlDrvEntry missing_callback_drv_entry = {
NULL /* init */,
start,
stop,
NULL /* output */,
NULL /* ready_input */,
NULL /* ready_output */,
"missing_callback_drv",
NULL /* finish */,
NULL /* handle */,
NULL /* control */,
NULL /* timeout */,
NULL /* outputv */,
NULL /* ready_async */,
NULL /* flush */,
NULL /* call */,
NULL /* event */,
ERL_DRV_EXTENDED_MARKER,
ERL_DRV_EXTENDED_MAJOR_VERSION,
ERL_DRV_EXTENDED_MINOR_VERSION,
ERL_DRV_FLAG_USE_PORT_LOCKING,
NULL, /* handle2 */
NULL /* process_exit */
};
DRIVER_INIT(missing_callback_drv)
{
return &missing_callback_drv_entry;
}
static ErlDrvData
start(ErlDrvPort port, char *command)
{
mcd_data_t *mcd = driver_alloc(sizeof(mcd_data_t));
if (!mcd)
goto error;
mcd->ofd = -1;
mcd->ifd = -1;
mcd->efd = -1;
#ifdef UNIX
mcd->ofd = open("/dev/null", O_WRONLY);
if (mcd->ofd < 0)
goto error;
if (driver_select(port, (ErlDrvEvent) (long) mcd->ofd, DO_WRITE, 1) != 0)
goto error;
mcd->ifd = open("/dev/zero", O_RDONLY);
if (mcd->ifd < 0)
goto error;
if (driver_select(port, (ErlDrvEvent) (long) mcd->ifd, DO_READ, 1) != 0)
goto error;
#ifdef HAVE_POLL_H
mcd->efd = open("/dev/null", O_WRONLY);
if (mcd->efd < 0)
goto error;
mcd->edata.events = POLLOUT;
mcd->edata.revents = 0;
driver_event(port, (ErlDrvEvent) (long) mcd->efd, &mcd->edata);
#endif
#endif
driver_set_timer(port, 0);
return (ErlDrvData) mcd;
error:
stop((ErlDrvData) mcd);
return ERL_DRV_ERROR_GENERAL;
}
static void
stop(ErlDrvData data)
{
mcd_data_t *mcd = (mcd_data_t *) data;
if (mcd) {
#ifdef UNIX
if (mcd->ofd >= 0)
close(mcd->ofd);
if (mcd->ifd >= 0)
close(mcd->ifd);
#ifdef HAVE_POLL_H
if (mcd->efd >= 0)
close(mcd->efd);
#endif
#endif
driver_free(mcd);
}
}
<|start_filename|>lib/stdlib/src/gb_trees.erl<|end_filename|>
%%
%% %CopyrightBegin%
%%
%% Copyright Ericsson AB 2001-2015. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% %CopyrightEnd%
%%
%% =====================================================================
%% General Balanced Trees - highly efficient dictionaries.
%%
%% Copyright (C) 1999-2001 <NAME>, <NAME>
%%
%% An efficient implementation of Prof. <NAME>'s General
%% Balanced Trees. These have no storage overhead compared to plain
%% unbalanced binary trees, and their performance is in general better
%% than AVL trees.
%% ---------------------------------------------------------------------
%% Operations:
%%
%% - empty(): returns empty tree.
%%
%% - is_empty(T): returns 'true' if T is an empty tree, and 'false'
%% otherwise.
%%
%% - size(T): returns the number of nodes in the tree as an integer.
%% Returns 0 (zero) if the tree is empty.
%%
%% - lookup(X, T): looks up key X in tree T; returns {value, V}, or
%% `none' if the key is not present.
%%
%% - get(X, T): retreives the value stored with key X in tree T. Assumes
%% that the key is present in the tree.
%%
%% - insert(X, V, T): inserts key X with value V into tree T; returns
%% the new tree. Assumes that the key is *not* present in the tree.
%%
%% - update(X, V, T): updates key X to value V in tree T; returns the
%% new tree. Assumes that the key is present in the tree.
%%
%% - enter(X, V, T): inserts key X with value V into tree T if the key
%% is not present in the tree, otherwise updates key X to value V in
%% T. Returns the new tree.
%%
%% - delete(X, T): removes key X from tree T; returns new tree. Assumes
%% that the key is present in the tree.
%%
%% - delete_any(X, T): removes key X from tree T if the key is present
%% in the tree, otherwise does nothing; returns new tree.
%%
%% - balance(T): rebalances tree T. Note that this is rarely necessary,
%% but may be motivated when a large number of entries have been
%% deleted from the tree without further insertions. Rebalancing could
%% then be forced in order to minimise lookup times, since deletion
%% only does not rebalance the tree.
%%
%% - is_defined(X, T): returns `true' if key X is present in tree T, and
%% `false' otherwise.
%%
%% - keys(T): returns an ordered list of all keys in tree T.
%%
%% - values(T): returns the list of values for all keys in tree T,
%% sorted by their corresponding keys. Duplicates are not removed.
%%
%% - to_list(T): returns an ordered list of {Key, Value} pairs for all
%% keys in tree T.
%%
%% - from_orddict(L): turns an ordered list L of {Key, Value} pairs into
%% a tree. The list must not contain duplicate keys.
%%
%% - smallest(T): returns {X, V}, where X is the smallest key in tree T,
%% and V is the value associated with X in T. Assumes that the tree T
%% is nonempty.
%%
%% - largest(T): returns {X, V}, where X is the largest key in tree T,
%% and V is the value associated with X in T. Assumes that the tree T
%% is nonempty.
%%
%% - take_smallest(T): returns {X, V, T1}, where X is the smallest key
%% in tree T, V is the value associated with X in T, and T1 is the
%% tree T with key X deleted. Assumes that the tree T is nonempty.
%%
%% - take_largest(T): returns {X, V, T1}, where X is the largest key
%% in tree T, V is the value associated with X in T, and T1 is the
%% tree T with key X deleted. Assumes that the tree T is nonempty.
%%
%% - iterator(T): returns an iterator that can be used for traversing
%% the entries of tree T; see `next'. The implementation of this is
%% very efficient; traversing the whole tree using `next' is only
%% slightly slower than getting the list of all elements using
%% `to_list' and traversing that. The main advantage of the iterator
%% approach is that it does not require the complete list of all
%% elements to be built in memory at one time.
%%
%% - iterator_from(K, T): returns an iterator that can be used for
%% traversing the entries of tree T with key greater than or
%% equal to K; see `next'.
%%
%% - next(S): returns {X, V, S1} where X is the smallest key referred to
%% by the iterator S, and S1 is the new iterator to be used for
%% traversing the remaining entries, or the atom `none' if no entries
%% remain.
%%
%% - map(F, T): maps the function F(K, V) -> V' to all key-value pairs
%% of the tree T and returns a new tree T' with the same set of keys
%% as T and the new set of values V'.
-module(gb_trees).
-export([empty/0, is_empty/1, size/1, lookup/2, get/2, insert/3,
update/3, enter/3, delete/2, delete_any/2, balance/1,
is_defined/2, keys/1, values/1, to_list/1, from_orddict/1,
smallest/1, largest/1, take_smallest/1, take_largest/1,
iterator/1, iterator_from/2, next/1, map/2]).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Data structure:
%% - {Size, Tree}, where `Tree' is composed of nodes of the form:
%% - {Key, Value, Smaller, Bigger}, and the "empty tree" node:
%% - nil.
%%
%% I make no attempt to balance trees after deletions. Since deletions
%% don't increase the height of a tree, I figure this is OK.
%%
%% Original balance condition h(T) <= ceil(c * log(|T|)) has been
%% changed to the similar (but not quite equivalent) condition 2 ^ h(T)
%% <= |T| ^ c. I figure this should also be OK.
%%
%% Performance is comparable to the AVL trees in the Erlang book (and
%% faster in general due to less overhead); the difference is that
%% deletion works for my trees, but not for the book's trees. Behaviour
%% is logaritmic (as it should be).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Some macros.
-define(p, 2). % It seems that p = 2 is optimal for sorted keys
-define(pow(A, _), A * A). % correct with exponent as defined above.
-define(div2(X), X bsr 1).
-define(mul2(X), X bsl 1).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Some types.
-export_type([tree/0, tree/2, iter/0, iter/2]).
-type gb_tree_node(K, V) :: 'nil'
| {K, V, gb_tree_node(K, V), gb_tree_node(K, V)}.
-opaque tree(Key, Value) :: {non_neg_integer(), gb_tree_node(Key, Value)}.
-type tree() :: tree(_, _).
-opaque iter(Key, Value) :: [gb_tree_node(Key, Value)].
-type iter() :: iter(_, _).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-spec empty() -> tree().
empty() ->
{0, nil}.
-spec is_empty(Tree) -> boolean() when
Tree :: tree().
is_empty({0, nil}) ->
true;
is_empty(_) ->
false.
-spec size(Tree) -> non_neg_integer() when
Tree :: tree().
size({Size, _}) when is_integer(Size), Size >= 0 ->
Size.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-spec lookup(Key, Tree) -> 'none' | {'value', Value} when
Tree :: tree(Key, Value).
lookup(Key, {_, T}) ->
lookup_1(Key, T).
%% The term order is an arithmetic total order, so we should not
%% test exact equality for the keys. (If we do, then it becomes
%% possible that neither `>', `<', nor `=:=' matches.) Testing '<'
%% and '>' first is statistically better than testing for
%% equality, and also allows us to skip the test completely in the
%% remaining case.
lookup_1(Key, {Key1, _, Smaller, _}) when Key < Key1 ->
lookup_1(Key, Smaller);
lookup_1(Key, {Key1, _, _, Bigger}) when Key > Key1 ->
lookup_1(Key, Bigger);
lookup_1(_, {_, Value, _, _}) ->
{value, Value};
lookup_1(_, nil) ->
none.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% This is a specialized version of `lookup'.
-spec is_defined(Key, Tree) -> boolean() when
Tree :: tree(Key, Value :: term()).
is_defined(Key, {_, T}) ->
is_defined_1(Key, T).
is_defined_1(Key, {Key1, _, Smaller, _}) when Key < Key1 ->
is_defined_1(Key, Smaller);
is_defined_1(Key, {Key1, _, _, Bigger}) when Key > Key1 ->
is_defined_1(Key, Bigger);
is_defined_1(_, {_, _, _, _}) ->
true;
is_defined_1(_, nil) ->
false.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% This is a specialized version of `lookup'.
-spec get(Key, Tree) -> Value when
Tree :: tree(Key, Value).
get(Key, {_, T}) ->
get_1(Key, T).
get_1(Key, {Key1, _, Smaller, _}) when Key < Key1 ->
get_1(Key, Smaller);
get_1(Key, {Key1, _, _, Bigger}) when Key > Key1 ->
get_1(Key, Bigger);
get_1(_, {_, Value, _, _}) ->
Value.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-spec update(Key, Value, Tree1) -> Tree2 when
Tree1 :: tree(Key, Value),
Tree2 :: tree(Key, Value).
update(Key, Val, {S, T}) ->
T1 = update_1(Key, Val, T),
{S, T1}.
%% See `lookup' for notes on the term comparison order.
update_1(Key, Value, {Key1, V, Smaller, Bigger}) when Key < Key1 ->
{Key1, V, update_1(Key, Value, Smaller), Bigger};
update_1(Key, Value, {Key1, V, Smaller, Bigger}) when Key > Key1 ->
{Key1, V, Smaller, update_1(Key, Value, Bigger)};
update_1(Key, Value, {_, _, Smaller, Bigger}) ->
{Key, Value, Smaller, Bigger}.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-spec insert(Key, Value, Tree1) -> Tree2 when
Tree1 :: tree(Key, Value),
Tree2 :: tree(Key, Value).
insert(Key, Val, {S, T}) when is_integer(S) ->
S1 = S+1,
{S1, insert_1(Key, Val, T, ?pow(S1, ?p))}.
insert_1(Key, Value, {Key1, V, Smaller, Bigger}, S) when Key < Key1 ->
case insert_1(Key, Value, Smaller, ?div2(S)) of
{T1, H1, S1} ->
T = {Key1, V, T1, Bigger},
{H2, S2} = count(Bigger),
H = ?mul2(erlang:max(H1, H2)),
SS = S1 + S2 + 1,
P = ?pow(SS, ?p),
if
H > P ->
balance(T, SS);
true ->
{T, H, SS}
end;
T1 ->
{Key1, V, T1, Bigger}
end;
insert_1(Key, Value, {Key1, V, Smaller, Bigger}, S) when Key > Key1 ->
case insert_1(Key, Value, Bigger, ?div2(S)) of
{T1, H1, S1} ->
T = {Key1, V, Smaller, T1},
{H2, S2} = count(Smaller),
H = ?mul2(erlang:max(H1, H2)),
SS = S1 + S2 + 1,
P = ?pow(SS, ?p),
if
H > P ->
balance(T, SS);
true ->
{T, H, SS}
end;
T1 ->
{Key1, V, Smaller, T1}
end;
insert_1(Key, Value, nil, S) when S =:= 0 ->
{{Key, Value, nil, nil}, 1, 1};
insert_1(Key, Value, nil, _S) ->
{Key, Value, nil, nil};
insert_1(Key, _, _, _) ->
erlang:error({key_exists, Key}).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-spec enter(Key, Value, Tree1) -> Tree2 when
Tree1 :: tree(Key, Value),
Tree2 :: tree(Key, Value).
enter(Key, Val, T) ->
case is_defined(Key, T) of
true ->
update(Key, Val, T);
false ->
insert(Key, Val, T)
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
count({_, _, nil, nil}) ->
{1, 1};
count({_, _, Sm, Bi}) ->
{H1, S1} = count(Sm),
{H2, S2} = count(Bi),
{?mul2(erlang:max(H1, H2)), S1 + S2 + 1};
count(nil) ->
{1, 0}.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-spec balance(Tree1) -> Tree2 when
Tree1 :: tree(Key, Value),
Tree2 :: tree(Key, Value).
balance({S, T}) ->
{S, balance(T, S)}.
balance(T, S) ->
balance_list(to_list_1(T), S).
balance_list(L, S) ->
{T, []} = balance_list_1(L, S),
T.
balance_list_1(L, S) when S > 1 ->
Sm = S - 1,
S2 = Sm div 2,
S1 = Sm - S2,
{T1, [{K, V} | L1]} = balance_list_1(L, S1),
{T2, L2} = balance_list_1(L1, S2),
T = {K, V, T1, T2},
{T, L2};
balance_list_1([{Key, Val} | L], 1) ->
{{Key, Val, nil, nil}, L};
balance_list_1(L, 0) ->
{nil, L}.
-spec from_orddict(List) -> Tree when
List :: [{Key, Value}],
Tree :: tree(Key, Value).
from_orddict(L) ->
S = length(L),
{S, balance_list(L, S)}.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-spec delete_any(Key, Tree1) -> Tree2 when
Tree1 :: tree(Key, Value),
Tree2 :: tree(Key, Value).
delete_any(Key, T) ->
case is_defined(Key, T) of
true ->
delete(Key, T);
false ->
T
end.
%%% delete. Assumes that key is present.
-spec delete(Key, Tree1) -> Tree2 when
Tree1 :: tree(Key, Value),
Tree2 :: tree(Key, Value).
delete(Key, {S, T}) when is_integer(S), S >= 0 ->
{S - 1, delete_1(Key, T)}.
%% See `lookup' for notes on the term comparison order.
delete_1(Key, {Key1, Value, Smaller, Larger}) when Key < Key1 ->
Smaller1 = delete_1(Key, Smaller),
{Key1, Value, Smaller1, Larger};
delete_1(Key, {Key1, Value, Smaller, Bigger}) when Key > Key1 ->
Bigger1 = delete_1(Key, Bigger),
{Key1, Value, Smaller, Bigger1};
delete_1(_, {_, _, Smaller, Larger}) ->
merge(Smaller, Larger).
merge(Smaller, nil) ->
Smaller;
merge(nil, Larger) ->
Larger;
merge(Smaller, Larger) ->
{Key, Value, Larger1} = take_smallest1(Larger),
{Key, Value, Smaller, Larger1}.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-spec take_smallest(Tree1) -> {Key, Value, Tree2} when
Tree1 :: tree(Key, Value),
Tree2 :: tree(Key, Value).
take_smallest({Size, Tree}) when is_integer(Size), Size >= 0 ->
{Key, Value, Larger} = take_smallest1(Tree),
{Key, Value, {Size - 1, Larger}}.
take_smallest1({Key, Value, nil, Larger}) ->
{Key, Value, Larger};
take_smallest1({Key, Value, Smaller, Larger}) ->
{Key1, Value1, Smaller1} = take_smallest1(Smaller),
{Key1, Value1, {Key, Value, Smaller1, Larger}}.
-spec smallest(Tree) -> {Key, Value} when
Tree :: tree(Key, Value).
smallest({_, Tree}) ->
smallest_1(Tree).
smallest_1({Key, Value, nil, _Larger}) ->
{Key, Value};
smallest_1({_Key, _Value, Smaller, _Larger}) ->
smallest_1(Smaller).
-spec take_largest(Tree1) -> {Key, Value, Tree2} when
Tree1 :: tree(Key, Value),
Tree2 :: tree(Key, Value).
take_largest({Size, Tree}) when is_integer(Size), Size >= 0 ->
{Key, Value, Smaller} = take_largest1(Tree),
{Key, Value, {Size - 1, Smaller}}.
take_largest1({Key, Value, Smaller, nil}) ->
{Key, Value, Smaller};
take_largest1({Key, Value, Smaller, Larger}) ->
{Key1, Value1, Larger1} = take_largest1(Larger),
{Key1, Value1, {Key, Value, Smaller, Larger1}}.
-spec largest(Tree) -> {Key, Value} when
Tree :: tree(Key, Value).
largest({_, Tree}) ->
largest_1(Tree).
largest_1({Key, Value, _Smaller, nil}) ->
{Key, Value};
largest_1({_Key, _Value, _Smaller, Larger}) ->
largest_1(Larger).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-spec to_list(Tree) -> [{Key, Value}] when
Tree :: tree(Key, Value).
to_list({_, T}) ->
to_list(T, []).
to_list_1(T) -> to_list(T, []).
to_list({Key, Value, Small, Big}, L) ->
to_list(Small, [{Key, Value} | to_list(Big, L)]);
to_list(nil, L) -> L.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-spec keys(Tree) -> [Key] when
Tree :: tree(Key, Value :: term()).
keys({_, T}) ->
keys(T, []).
keys({Key, _Value, Small, Big}, L) ->
keys(Small, [Key | keys(Big, L)]);
keys(nil, L) -> L.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-spec values(Tree) -> [Value] when
Tree :: tree(Key :: term(), Value).
values({_, T}) ->
values(T, []).
values({_Key, Value, Small, Big}, L) ->
values(Small, [Value | values(Big, L)]);
values(nil, L) -> L.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-spec iterator(Tree) -> Iter when
Tree :: tree(Key, Value),
Iter :: iter(Key, Value).
iterator({_, T}) ->
iterator_1(T).
iterator_1(T) ->
iterator(T, []).
%% The iterator structure is really just a list corresponding to
%% the call stack of an in-order traversal. This is quite fast.
iterator({_, _, nil, _} = T, As) ->
[T | As];
iterator({_, _, L, _} = T, As) ->
iterator(L, [T | As]);
iterator(nil, As) ->
As.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-spec iterator_from(Key, Tree) -> Iter when
Tree :: tree(Key, Value),
Iter :: iter(Key, Value).
iterator_from(S, {_, T}) ->
iterator_1_from(S, T).
iterator_1_from(S, T) ->
iterator_from(S, T, []).
iterator_from(S, {K, _, _, T}, As) when K < S ->
iterator_from(S, T, As);
iterator_from(_, {_, _, nil, _} = T, As) ->
[T | As];
iterator_from(S, {_, _, L, _} = T, As) ->
iterator_from(S, L, [T | As]);
iterator_from(_, nil, As) ->
As.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-spec next(Iter1) -> 'none' | {Key, Value, Iter2} when
Iter1 :: iter(Key, Value),
Iter2 :: iter(Key, Value).
next([{X, V, _, T} | As]) ->
{X, V, iterator(T, As)};
next([]) ->
none.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-spec map(Function, Tree1) -> Tree2 when
Function :: fun((K :: Key, V1 :: Value1) -> V2 :: Value2),
Tree1 :: tree(Key, Value1),
Tree2 :: tree(Key, Value2).
map(F, {Size, Tree}) when is_function(F, 2) ->
{Size, map_1(F, Tree)}.
map_1(_, nil) -> nil;
map_1(F, {K, V, Smaller, Larger}) ->
{K, F(K, V), map_1(F, Smaller), map_1(F, Larger)}.
<|start_filename|>lib/gs/doc/src/examples/ex13.erl<|end_filename|>
-module(ex13).
-copyright('Copyright (c) 1991-97 Ericsson Telecom AB').
-vsn('$Revision: /main/release/2 $ ').
-export([start/0,init/0]).
start() -> spawn(ex13, init, []).
init() ->
I=gs:start(),
Win=gs:window(I, [{width,200},{height,100},
{title,"menu"},{map, true}]),
Bar = gs:create(menubar, Win, []),
Fmb = gs:create(menubutton, Bar,
[{label,{text,"File"}}]),
Emb = gs:create(menubutton, Bar,
[{label,{text,"Edit"}}]),
Hmb = gs:create(menubutton, Bar,
[{label,{text,"Help"}},{side,right}]),
Fmnu = gs:create(menu, Fmb, []),
Emnu = gs:create(menu, Emb, []),
Hmnu = gs:create(menu, Hmb, []),
gs:create(menuitem, load, Fmnu,
[{label,{text, "Load"}}]),
gs:create(menuitem, save, Fmnu,
[{label,{text, "Save"}}]),
Exit = gs:create(menuitem, Fmnu,
[{label,{text, "Exit"}}]),
Color = gs:create(menuitem, Emnu,
[{label,{text, "Color"}},
{itemtype, cascade}]),
Cmnu = gs:create(menu, Color, [{disabledfg,gray}]),
gs:create(menuitem, Cmnu, [{label, {text,"Red"}},
{data, {new_color, red}},
{itemtype,radio},{group,gr1}]),
gs:create(menuitem, Cmnu, [{label, {text,"Blue"}},
{data, {new_color, blue}},
{itemtype,radio},{group,gr1}]),
gs:create(menuitem,Cmnu, [{label, {text,"Black"}},
{data, {new_color, black}},
{itemtype,radio},{group,gr1}]),
Y = gs:create(menuitem, Hmnu, [{label, {text,"You"}},
{itemtype, check}]),
M = gs:create(menuitem, me, Hmnu, [{label, {text, "Me"}},
{itemtype, check}]),
gs:create(menuitem, Hmnu, [{itemtype, separator}]),
gs:create(menuitem, Hmnu, [{label, {text, "Other"}},
{itemtype, check},
{enable,false}]),
gs:create(menuitem, doit, Hmnu, [{label, {text, "Doit!"}},
{data, {doit, Y, M}}]),
loop(Exit, Win).
loop(Exit, Win) ->
receive
{gs, save, click, _Data, [Txt, Index | Rest]} ->
io:format("Save~n");
{gs, load, click, _Data, [Txt, Index | Rest]} ->
io:format("Load~n");
{gs, Exit, click, _Data, [Txt, Index | Rest]} ->
io:format("Exit~n"),
exit(normal);
{gs, _MnuItem, click, {new_color, Color}, Args} ->
io:format("Change color to ~w. Args:~p~n",
[Color, Args]),
gs:config(Win, [{bg, Color}]);
{gs, doit, click, {doit, YouId, MeId}, Args} ->
HelpMe = gs:read(MeId, select),
HelpYou = gs:read(YouId, select),
io:format("Doit. HelpMe:~w, HelpYou:~w, Args:~p~n",
[HelpMe, HelpYou, Args]);
Other -> io:format("Other:~p~n",[Other])
end,
loop(Exit, Win).
<|start_filename|>lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_locker.erl<|end_filename|>
%% ``Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
%% AB. All Rights Reserved.''
%%
%% $Id: mnesia_locker.erl,v 1.2 2009/07/01 15:45:40 kostis Exp $
-module(mnesia_locker).
-export([
get_held_locks/0,
get_lock_queue/0,
global_lock/5,
ixrlock/5,
init/1,
mnesia_down/2,
release_tid/1,
async_release_tid/2,
send_release_tid/2,
receive_release_tid_acc/2,
rlock/3,
rlock_table/3,
rwlock/3,
sticky_rwlock/3,
start/0,
sticky_wlock/3,
sticky_wlock_table/3,
wlock/3,
wlock_no_exist/4,
wlock_table/3
]).
%% sys callback functions
-export([system_continue/3,
system_terminate/4,
system_code_change/4
]).
-include("mnesia.hrl").
-import(mnesia_lib, [dbg_out/2, error/2, verbose/2]).
-define(dbg(S,V), ok).
%-define(dbg(S,V), dbg_out("~p:~p: " ++ S, [?MODULE, ?LINE] ++ V)).
-define(ALL, '______WHOLETABLE_____').
-define(STICK, '______STICK_____').
-define(GLOBAL, '______GLOBAL_____').
-record(state, {supervisor}).
-record(queue, {oid, tid, op, pid, lucky}).
%% mnesia_held_locks: contain {Oid, Op, Tid} entries (bag)
-define(match_oid_held_locks(Oid), {Oid, '_', '_'}).
%% mnesia_tid_locks: contain {Tid, Oid, Op} entries (bag)
-define(match_oid_tid_locks(Tid), {Tid, '_', '_'}).
%% mnesia_sticky_locks: contain {Oid, Node} entries and {Tab, Node} entries (set)
-define(match_oid_sticky_locks(Oid),{Oid, '_'}).
%% mnesia_lock_queue: contain {queue, Oid, Tid, Op, ReplyTo, WaitForTid} entries (ordered_set)
-define(match_oid_lock_queue(Oid), #queue{oid=Oid, tid='_', op = '_', pid = '_', lucky = '_'}).
%% mnesia_lock_counter: {{write, Tab}, Number} &&
%% {{read, Tab}, Number} entries (set)
start() ->
mnesia_monitor:start_proc(?MODULE, ?MODULE, init, [self()]).
init(Parent) ->
register(?MODULE, self()),
process_flag(trap_exit, true),
proc_lib:init_ack(Parent, {ok, self()}),
loop(#state{supervisor = Parent}).
val(Var) ->
case ?catch_val(Var) of
{'EXIT', _ReASoN_} -> mnesia_lib:other_val(Var, _ReASoN_);
_VaLuE_ -> _VaLuE_
end.
reply(From, R) ->
From ! {?MODULE, node(), R}.
l_request(Node, X, Store) ->
{?MODULE, Node} ! {self(), X},
l_req_rec(Node, Store).
l_req_rec(Node, Store) ->
?ets_insert(Store, {nodes, Node}),
receive
{?MODULE, Node, {switch, Node2, Req}} ->
?ets_insert(Store, {nodes, Node2}),
{?MODULE, Node2} ! Req,
{switch, Node2, Req};
{?MODULE, Node, Reply} ->
Reply;
{mnesia_down, Node} ->
{not_granted, {node_not_running, Node}}
end.
release_tid(Tid) ->
?MODULE ! {release_tid, Tid}.
async_release_tid(Nodes, Tid) ->
rpc:abcast(Nodes, ?MODULE, {release_tid, Tid}).
send_release_tid(Nodes, Tid) ->
rpc:abcast(Nodes, ?MODULE, {self(), {sync_release_tid, Tid}}).
receive_release_tid_acc([Node | Nodes], Tid) ->
receive
{?MODULE, Node, {tid_released, Tid}} ->
receive_release_tid_acc(Nodes, Tid);
{mnesia_down, Node} ->
receive_release_tid_acc(Nodes, Tid)
end;
receive_release_tid_acc([], _Tid) ->
ok.
loop(State) ->
receive
{From, {write, Tid, Oid}} ->
try_sticky_lock(Tid, write, From, Oid),
loop(State);
%% If Key == ?ALL it's a request to lock the entire table
%%
{From, {read, Tid, Oid}} ->
try_sticky_lock(Tid, read, From, Oid),
loop(State);
%% Really do a read, but get hold of a write lock
%% used by mnesia:wread(Oid).
{From, {read_write, Tid, Oid}} ->
try_sticky_lock(Tid, read_write, From, Oid),
loop(State);
%% Tid has somehow terminated, clear up everything
%% and pass locks on to queued processes.
%% This is the purpose of the mnesia_tid_locks table
{release_tid, Tid} ->
do_release_tid(Tid),
loop(State);
%% stick lock, first tries this to the where_to_read Node
{From, {test_set_sticky, Tid, {Tab, _} = Oid, Lock}} ->
case ?ets_lookup(mnesia_sticky_locks, Tab) of
[] ->
reply(From, not_stuck),
loop(State);
[{_,Node}] when Node == node() ->
%% Lock is stuck here, see now if we can just set
%% a regular write lock
try_lock(Tid, Lock, From, Oid),
loop(State);
[{_,Node}] ->
reply(From, {stuck_elsewhere, Node}),
loop(State)
end;
%% If test_set_sticky fails, we send this to all nodes
%% after aquiring a real write lock on Oid
{stick, {Tab, _}, N} ->
?ets_insert(mnesia_sticky_locks, {Tab, N}),
loop(State);
%% The caller which sends this message, must have first
%% aquired a write lock on the entire table
{unstick, Tab} ->
?ets_delete(mnesia_sticky_locks, Tab),
loop(State);
{From, {ix_read, Tid, Tab, IxKey, Pos}} ->
case catch mnesia_index:get_index_table(Tab, Pos) of
{'EXIT', _} ->
reply(From, {not_granted, {no_exists, Tab, {index, [Pos]}}}),
loop(State);
Index ->
Rk = mnesia_lib:elems(2,mnesia_index:db_get(Index, IxKey)),
%% list of real keys
case ?ets_lookup(mnesia_sticky_locks, Tab) of
[] ->
set_read_lock_on_all_keys(Tid, From,Tab,Rk,Rk,
[]),
loop(State);
[{_,N}] when N == node() ->
set_read_lock_on_all_keys(Tid, From,Tab,Rk,Rk,
[]),
loop(State);
[{_,N}] ->
Req = {From, {ix_read, Tid, Tab, IxKey, Pos}},
From ! {?MODULE, node(), {switch, N, Req}},
loop(State)
end
end;
{From, {sync_release_tid, Tid}} ->
do_release_tid(Tid),
reply(From, {tid_released, Tid}),
loop(State);
{release_remote_non_pending, Node, Pending} ->
release_remote_non_pending(Node, Pending),
mnesia_monitor:mnesia_down(?MODULE, Node),
loop(State);
{'EXIT', Pid, _} when Pid == State#state.supervisor ->
do_stop();
{system, From, Msg} ->
verbose("~p got {system, ~p, ~p}~n", [?MODULE, From, Msg]),
Parent = State#state.supervisor,
sys:handle_system_msg(Msg, From, Parent, ?MODULE, [], State);
Msg ->
error("~p got unexpected message: ~p~n", [?MODULE, Msg]),
loop(State)
end.
set_lock(Tid, Oid, Op) ->
?dbg("Granted ~p ~p ~p~n", [Tid,Oid,Op]),
?ets_insert(mnesia_held_locks, {Oid, Op, Tid}),
?ets_insert(mnesia_tid_locks, {Tid, Oid, Op}).
%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Acquire locks
try_sticky_lock(Tid, Op, Pid, {Tab, _} = Oid) ->
case ?ets_lookup(mnesia_sticky_locks, Tab) of
[] ->
try_lock(Tid, Op, Pid, Oid);
[{_,N}] when N == node() ->
try_lock(Tid, Op, Pid, Oid);
[{_,N}] ->
Req = {Pid, {Op, Tid, Oid}},
Pid ! {?MODULE, node(), {switch, N, Req}}
end.
try_lock(Tid, read_write, Pid, Oid) ->
try_lock(Tid, read_write, read, write, Pid, Oid);
try_lock(Tid, Op, Pid, Oid) ->
try_lock(Tid, Op, Op, Op, Pid, Oid).
try_lock(Tid, Op, SimpleOp, Lock, Pid, Oid) ->
case can_lock(Tid, Lock, Oid, {no, bad_luck}) of
yes ->
Reply = grant_lock(Tid, SimpleOp, Lock, Oid),
reply(Pid, Reply);
{no, Lucky} ->
C = #cyclic{op = SimpleOp, lock = Lock, oid = Oid, lucky = Lucky},
?dbg("Rejected ~p ~p ~p ~p ~n", [Tid, Oid, Lock, Lucky]),
reply(Pid, {not_granted, C});
{queue, Lucky} ->
?dbg("Queued ~p ~p ~p ~p ~n", [Tid, Oid, Lock, Lucky]),
%% Append to queue: Nice place for trace output
?ets_insert(mnesia_lock_queue,
#queue{oid = Oid, tid = Tid, op = Op,
pid = Pid, lucky = Lucky}),
?ets_insert(mnesia_tid_locks, {Tid, Oid, {queued, Op}})
end.
grant_lock(Tid, read, Lock, {Tab, Key})
when Key /= ?ALL, Tab /= ?GLOBAL ->
case node(Tid#tid.pid) == node() of
true ->
set_lock(Tid, {Tab, Key}, Lock),
{granted, lookup_in_client};
false ->
case catch mnesia_lib:db_get(Tab, Key) of %% lookup as well
{'EXIT', _Reason} ->
%% Table has been deleted from this node,
%% restart the transaction.
C = #cyclic{op = read, lock = Lock, oid = {Tab, Key},
lucky = nowhere},
{not_granted, C};
Val ->
set_lock(Tid, {Tab, Key}, Lock),
{granted, Val}
end
end;
grant_lock(Tid, read, Lock, Oid) ->
set_lock(Tid, Oid, Lock),
{granted, ok};
grant_lock(Tid, write, Lock, Oid) ->
set_lock(Tid, Oid, Lock),
granted.
%% 1) Impose an ordering on all transactions favour old (low tid) transactions
%% newer (higher tid) transactions may never wait on older ones,
%% 2) When releasing the tids from the queue always begin with youngest (high tid)
%% because of 1) it will avoid the deadlocks.
%% 3) TabLocks is the problem :-) They should not starve and not deadlock
%% handle tablocks in queue as they had locks on unlocked records.
can_lock(Tid, read, {Tab, Key}, AlreadyQ) when Key /= ?ALL ->
%% The key is bound, no need for the other BIF
Oid = {Tab, Key},
ObjLocks = ?ets_match_object(mnesia_held_locks, {Oid, write, '_'}),
TabLocks = ?ets_match_object(mnesia_held_locks, {{Tab, ?ALL}, write, '_'}),
check_lock(Tid, Oid, ObjLocks, TabLocks, yes, AlreadyQ, read);
can_lock(Tid, read, Oid, AlreadyQ) -> % Whole tab
Tab = element(1, Oid),
ObjLocks = ?ets_match_object(mnesia_held_locks, {{Tab, '_'}, write, '_'}),
check_lock(Tid, Oid, ObjLocks, [], yes, AlreadyQ, read);
can_lock(Tid, write, {Tab, Key}, AlreadyQ) when Key /= ?ALL ->
Oid = {Tab, Key},
ObjLocks = ?ets_lookup(mnesia_held_locks, Oid),
TabLocks = ?ets_lookup(mnesia_held_locks, {Tab, ?ALL}),
check_lock(Tid, Oid, ObjLocks, TabLocks, yes, AlreadyQ, write);
can_lock(Tid, write, Oid, AlreadyQ) -> % Whole tab
Tab = element(1, Oid),
ObjLocks = ?ets_match_object(mnesia_held_locks, ?match_oid_held_locks({Tab, '_'})),
check_lock(Tid, Oid, ObjLocks, [], yes, AlreadyQ, write).
%% Check held locks for conflicting locks
check_lock(Tid, Oid, [Lock | Locks], TabLocks, X, AlreadyQ, Type) ->
case element(3, Lock) of
Tid ->
check_lock(Tid, Oid, Locks, TabLocks, X, AlreadyQ, Type);
WaitForTid when WaitForTid > Tid -> % Important order
check_lock(Tid, Oid, Locks, TabLocks, {queue, WaitForTid}, AlreadyQ, Type);
WaitForTid when Tid#tid.pid == WaitForTid#tid.pid ->
dbg_out("Spurious lock conflict ~w ~w: ~w -> ~w~n",
[Oid, Lock, Tid, WaitForTid]),
%% check_lock(Tid, Oid, Locks, TabLocks, {queue, WaitForTid}, AlreadyQ);
%% BUGBUG Fix this if possible
{no, WaitForTid};
WaitForTid ->
{no, WaitForTid}
end;
check_lock(_, _, [], [], X, {queue, bad_luck}, _) ->
X; %% The queue should be correct already no need to check it again
check_lock(_, _, [], [], X = {queue, _Tid}, _AlreadyQ, _) ->
X;
check_lock(Tid, Oid, [], [], X, AlreadyQ, Type) ->
{Tab, Key} = Oid,
if
Type == write ->
check_queue(Tid, Tab, X, AlreadyQ);
Key == ?ALL ->
%% hmm should be solvable by a clever select expr but not today...
check_queue(Tid, Tab, X, AlreadyQ);
true ->
%% If there is a queue on that object, read_lock shouldn't be granted
ObjLocks = ets:lookup(mnesia_lock_queue, Oid),
Greatest = max(ObjLocks),
case Greatest of
empty ->
check_queue(Tid, Tab, X, AlreadyQ);
ObjL when Tid > ObjL ->
{no, ObjL}; %% Starvation Preemption (write waits for read)
ObjL ->
check_queue(Tid, Tab, {queue, ObjL}, AlreadyQ)
end
end;
check_lock(Tid, Oid, [], TabLocks, X, AlreadyQ, Type) ->
check_lock(Tid, Oid, TabLocks, [], X, AlreadyQ, Type).
%% Check queue for conflicting locks
%% Assume that all queued locks belongs to other tid's
check_queue(Tid, Tab, X, AlreadyQ) ->
TabLocks = ets:lookup(mnesia_lock_queue, {Tab,?ALL}),
Greatest = max(TabLocks),
case Greatest of
empty ->
X;
Tid ->
X;
WaitForTid when WaitForTid#queue.tid > Tid -> % Important order
{queue, WaitForTid};
WaitForTid ->
case AlreadyQ of
{no, bad_luck} -> {no, WaitForTid};
_ ->
erlang:error({mnesia_locker, assert, AlreadyQ})
end
end.
max([]) ->
empty;
max([H|R]) ->
max(R, H#queue.tid).
max([H|R], Tid) when H#queue.tid > Tid ->
max(R, H#queue.tid);
max([_|R], Tid) ->
max(R, Tid);
max([], Tid) ->
Tid.
%% We can't queue the ixlock requests since it
%% becomes to complivated for little me :-)
%% If we encounter an object with a wlock we reject the
%% entire lock request
%%
%% BUGBUG: this is actually a bug since we may starve
set_read_lock_on_all_keys(Tid, From, Tab, [RealKey | Tail], Orig, Ack) ->
Oid = {Tab, RealKey},
case can_lock(Tid, read, Oid, {no, bad_luck}) of
yes ->
{granted, Val} = grant_lock(Tid, read, read, Oid),
case opt_lookup_in_client(Val, Oid, read) of % Ought to be invoked
C when record(C, cyclic) -> % in the client
reply(From, {not_granted, C});
Val2 ->
Ack2 = lists:append(Val2, Ack),
set_read_lock_on_all_keys(Tid, From, Tab, Tail, Orig, Ack2)
end;
{no, Lucky} ->
C = #cyclic{op = read, lock = read, oid = Oid, lucky = Lucky},
reply(From, {not_granted, C});
{queue, Lucky} ->
C = #cyclic{op = read, lock = read, oid = Oid, lucky = Lucky},
reply(From, {not_granted, C})
end;
set_read_lock_on_all_keys(_Tid, From, _Tab, [], Orig, Ack) ->
reply(From, {granted, Ack, Orig}).
%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Release of locks
%% Release remote non-pending nodes
release_remote_non_pending(Node, Pending) ->
%% Clear the mnesia_sticky_locks table first, to avoid
%% unnecessary requests to the failing node
?ets_match_delete(mnesia_sticky_locks, {'_' , Node}),
%% Then we have to release all locks held by processes
%% running at the failed node and also simply remove all
%% queue'd requests back to the failed node
AllTids = ?ets_match(mnesia_tid_locks, {'$1', '_', '_'}),
Tids = [T || [T] <- AllTids, Node == node(T#tid.pid), not lists:member(T, Pending)],
do_release_tids(Tids).
do_release_tids([Tid | Tids]) ->
do_release_tid(Tid),
do_release_tids(Tids);
do_release_tids([]) ->
ok.
do_release_tid(Tid) ->
Locks = ?ets_lookup(mnesia_tid_locks, Tid),
?dbg("Release ~p ~p ~n", [Tid, Locks]),
?ets_delete(mnesia_tid_locks, Tid),
release_locks(Locks),
%% Removed queued locks which has had locks
UniqueLocks = keyunique(lists:sort(Locks),[]),
rearrange_queue(UniqueLocks).
keyunique([{_Tid, Oid, _Op}|R], Acc = [{_, Oid, _}|_]) ->
keyunique(R, Acc);
keyunique([H|R], Acc) ->
keyunique(R, [H|Acc]);
keyunique([], Acc) ->
Acc.
release_locks([Lock | Locks]) ->
release_lock(Lock),
release_locks(Locks);
release_locks([]) ->
ok.
release_lock({Tid, Oid, {queued, _}}) ->
?ets_match_delete(mnesia_lock_queue,
#queue{oid=Oid, tid = Tid, op = '_',
pid = '_', lucky = '_'});
release_lock({Tid, Oid, Op}) ->
if
Op == write ->
?ets_delete(mnesia_held_locks, Oid);
Op == read ->
?ets_match_delete(mnesia_held_locks, {Oid, Op, Tid})
end.
rearrange_queue([{_Tid, {Tab, Key}, _} | Locks]) ->
if
Key /= ?ALL->
Queue =
ets:lookup(mnesia_lock_queue, {Tab, ?ALL}) ++
ets:lookup(mnesia_lock_queue, {Tab, Key}),
case Queue of
[] ->
ok;
_ ->
Sorted = lists:reverse(lists:keysort(#queue.tid, Queue)),
try_waiters_obj(Sorted)
end;
true ->
Pat = ?match_oid_lock_queue({Tab, '_'}),
Queue = ?ets_match_object(mnesia_lock_queue, Pat),
Sorted = lists:reverse(lists:keysort(#queue.tid, Queue)),
try_waiters_tab(Sorted)
end,
?dbg("RearrQ ~p~n", [Queue]),
rearrange_queue(Locks);
rearrange_queue([]) ->
ok.
try_waiters_obj([W | Waiters]) ->
case try_waiter(W) of
queued ->
no;
_ ->
try_waiters_obj(Waiters)
end;
try_waiters_obj([]) ->
ok.
try_waiters_tab([W | Waiters]) ->
case W#queue.oid of
{_Tab, ?ALL} ->
case try_waiter(W) of
queued ->
no;
_ ->
try_waiters_tab(Waiters)
end;
Oid ->
case try_waiter(W) of
queued ->
Rest = key_delete_all(Oid, #queue.oid, Waiters),
try_waiters_tab(Rest);
_ ->
try_waiters_tab(Waiters)
end
end;
try_waiters_tab([]) ->
ok.
try_waiter({queue, Oid, Tid, read_write, ReplyTo, _}) ->
try_waiter(Oid, read_write, read, write, ReplyTo, Tid);
try_waiter({queue, Oid, Tid, Op, ReplyTo, _}) ->
try_waiter(Oid, Op, Op, Op, ReplyTo, Tid).
try_waiter(Oid, Op, SimpleOp, Lock, ReplyTo, Tid) ->
case can_lock(Tid, Lock, Oid, {queue, bad_luck}) of
yes ->
%% Delete from queue: Nice place for trace output
?ets_match_delete(mnesia_lock_queue,
#queue{oid=Oid, tid = Tid, op = Op,
pid = ReplyTo, lucky = '_'}),
Reply = grant_lock(Tid, SimpleOp, Lock, Oid),
ReplyTo ! {?MODULE, node(), Reply},
locked;
{queue, _Why} ->
?dbg("Keep ~p ~p ~p ~p~n", [Tid, Oid, Lock, _Why]),
queued; % Keep waiter in queue
{no, Lucky} ->
C = #cyclic{op = SimpleOp, lock = Lock, oid = Oid, lucky = Lucky},
verbose("** WARNING ** Restarted transaction, possible deadlock in lock queue ~w: cyclic = ~w~n",
[Tid, C]),
?ets_match_delete(mnesia_lock_queue,
#queue{oid=Oid, tid = Tid, op = Op,
pid = ReplyTo, lucky = '_'}),
Reply = {not_granted, C},
ReplyTo ! {?MODULE, node(), Reply},
removed
end.
key_delete_all(Key, Pos, TupleList) ->
key_delete_all(Key, Pos, TupleList, []).
key_delete_all(Key, Pos, [H|T], Ack) when element(Pos, H) == Key ->
key_delete_all(Key, Pos, T, Ack);
key_delete_all(Key, Pos, [H|T], Ack) ->
key_delete_all(Key, Pos, T, [H|Ack]);
key_delete_all(_, _, [], Ack) ->
lists:reverse(Ack).
%% ********************* end server code ********************
%% The following code executes at the client side of a transactions
mnesia_down(N, Pending) ->
case whereis(?MODULE) of
undefined ->
%% Takes care of mnesia_down's in early startup
mnesia_monitor:mnesia_down(?MODULE, N);
Pid ->
%% Syncronously call needed in order to avoid
%% race with mnesia_tm's coordinator processes
%% that may restart and acquire new locks.
%% mnesia_monitor ensures the sync.
Pid ! {release_remote_non_pending, N, Pending}
end.
%% Aquire a write lock, but do a read, used by
%% mnesia:wread/1
rwlock(Tid, Store, Oid) ->
{Tab, Key} = Oid,
case val({Tab, where_to_read}) of
nowhere ->
mnesia:abort({no_exists, Tab});
Node ->
Lock = write,
case need_lock(Store, Tab, Key, Lock) of
yes ->
Ns = w_nodes(Tab),
Res = get_rwlocks_on_nodes(Ns, Ns, Node, Store, Tid, Oid),
?ets_insert(Store, {{locks, Tab, Key}, Lock}),
Res;
no ->
if
Key == ?ALL ->
w_nodes(Tab);
Tab == ?GLOBAL ->
w_nodes(Tab);
true ->
dirty_rpc(Node, Tab, Key, Lock)
end
end
end.
get_rwlocks_on_nodes([Node | Tail], Orig, Node, Store, Tid, Oid) ->
Op = {self(), {read_write, Tid, Oid}},
{?MODULE, Node} ! Op,
?ets_insert(Store, {nodes, Node}),
add_debug(Node),
get_rwlocks_on_nodes(Tail, Orig, Node, Store, Tid, Oid);
get_rwlocks_on_nodes([Node | Tail], Orig, OtherNode, Store, Tid, Oid) ->
Op = {self(), {write, Tid, Oid}},
{?MODULE, Node} ! Op,
add_debug(Node),
?ets_insert(Store, {nodes, Node}),
get_rwlocks_on_nodes(Tail, Orig, OtherNode, Store, Tid, Oid);
get_rwlocks_on_nodes([], Orig, _Node, Store, _Tid, Oid) ->
receive_wlocks(Orig, read_write_lock, Store, Oid).
%% Return a list of nodes or abort transaction
%% WE also insert any additional where_to_write nodes
%% in the local store under the key == nodes
w_nodes(Tab) ->
Nodes = ?catch_val({Tab, where_to_write}),
case Nodes of
[_ | _] -> Nodes;
_ -> mnesia:abort({no_exists, Tab})
end.
%% aquire a sticky wlock, a sticky lock is a lock
%% which remains at this node after the termination of the
%% transaction.
sticky_wlock(Tid, Store, Oid) ->
sticky_lock(Tid, Store, Oid, write).
sticky_rwlock(Tid, Store, Oid) ->
sticky_lock(Tid, Store, Oid, read_write).
sticky_lock(Tid, Store, {Tab, Key} = Oid, Lock) ->
N = val({Tab, where_to_read}),
if
node() == N ->
case need_lock(Store, Tab, Key, write) of
yes ->
do_sticky_lock(Tid, Store, Oid, Lock);
no ->
dirty_sticky_lock(Tab, Key, [N], Lock)
end;
true ->
mnesia:abort({not_local, Tab})
end.
do_sticky_lock(Tid, Store, {Tab, Key} = Oid, Lock) ->
?MODULE ! {self(), {test_set_sticky, Tid, Oid, Lock}},
receive
{?MODULE, _N, granted} ->
?ets_insert(Store, {{locks, Tab, Key}, write}),
granted;
{?MODULE, _N, {granted, Val}} -> %% for rwlocks
case opt_lookup_in_client(Val, Oid, write) of
C when record(C, cyclic) ->
exit({aborted, C});
Val2 ->
?ets_insert(Store, {{locks, Tab, Key}, write}),
Val2
end;
{?MODULE, _N, {not_granted, Reason}} ->
exit({aborted, Reason});
{?MODULE, N, not_stuck} ->
not_stuck(Tid, Store, Tab, Key, Oid, Lock, N),
dirty_sticky_lock(Tab, Key, [N], Lock);
{mnesia_down, N} ->
exit({aborted, {node_not_running, N}});
{?MODULE, N, {stuck_elsewhere, _N2}} ->
stuck_elsewhere(Tid, Store, Tab, Key, Oid, Lock),
dirty_sticky_lock(Tab, Key, [N], Lock)
end.
not_stuck(Tid, Store, Tab, _Key, Oid, _Lock, N) ->
rlock(Tid, Store, {Tab, ?ALL}), %% needed?
wlock(Tid, Store, Oid), %% perfect sync
wlock(Tid, Store, {Tab, ?STICK}), %% max one sticker/table
Ns = val({Tab, where_to_write}),
rpc:abcast(Ns, ?MODULE, {stick, Oid, N}).
stuck_elsewhere(Tid, Store, Tab, _Key, Oid, _Lock) ->
rlock(Tid, Store, {Tab, ?ALL}), %% needed?
wlock(Tid, Store, Oid), %% perfect sync
wlock(Tid, Store, {Tab, ?STICK}), %% max one sticker/table
Ns = val({Tab, where_to_write}),
rpc:abcast(Ns, ?MODULE, {unstick, Tab}).
dirty_sticky_lock(Tab, Key, Nodes, Lock) ->
if
Lock == read_write ->
mnesia_lib:db_get(Tab, Key);
Key == ?ALL ->
Nodes;
Tab == ?GLOBAL ->
Nodes;
true ->
ok
end.
sticky_wlock_table(Tid, Store, Tab) ->
sticky_lock(Tid, Store, {Tab, ?ALL}, write).
%% aquire a wlock on Oid
%% We store a {Tabname, write, Tid} in all locktables
%% on all nodes containing a copy of Tabname
%% We also store an item {{locks, Tab, Key}, write} in the
%% local store when we have aquired the lock.
%%
wlock(Tid, Store, Oid) ->
{Tab, Key} = Oid,
case need_lock(Store, Tab, Key, write) of
yes ->
Ns = w_nodes(Tab),
Op = {self(), {write, Tid, Oid}},
?ets_insert(Store, {{locks, Tab, Key}, write}),
get_wlocks_on_nodes(Ns, Ns, Store, Op, Oid);
no when Key /= ?ALL, Tab /= ?GLOBAL ->
[];
no ->
w_nodes(Tab)
end.
wlock_table(Tid, Store, Tab) ->
wlock(Tid, Store, {Tab, ?ALL}).
%% Write lock even if the table does not exist
wlock_no_exist(Tid, Store, Tab, Ns) ->
Oid = {Tab, ?ALL},
Op = {self(), {write, Tid, Oid}},
get_wlocks_on_nodes(Ns, Ns, Store, Op, Oid).
need_lock(Store, Tab, Key, LockPattern) ->
TabL = ?ets_match_object(Store, {{locks, Tab, ?ALL}, LockPattern}),
if
TabL == [] ->
KeyL = ?ets_match_object(Store, {{locks, Tab, Key}, LockPattern}),
if
KeyL == [] ->
yes;
true ->
no
end;
true ->
no
end.
add_debug(Node) -> % Use process dictionary for debug info
case get(mnesia_wlock_nodes) of
undefined ->
put(mnesia_wlock_nodes, [Node]);
NodeList ->
put(mnesia_wlock_nodes, [Node|NodeList])
end.
del_debug(Node) ->
case get(mnesia_wlock_nodes) of
undefined -> % Shouldn't happen
ignore;
[Node] ->
erase(mnesia_wlock_nodes);
List ->
put(mnesia_wlock_nodes, lists:delete(Node, List))
end.
%% We first send lock requests to the lockmanagers on all
%% nodes holding a copy of the table
get_wlocks_on_nodes([Node | Tail], Orig, Store, Request, Oid) ->
{?MODULE, Node} ! Request,
?ets_insert(Store, {nodes, Node}),
add_debug(Node),
get_wlocks_on_nodes(Tail, Orig, Store, Request, Oid);
get_wlocks_on_nodes([], Orig, Store, _Request, Oid) ->
receive_wlocks(Orig, Orig, Store, Oid).
receive_wlocks([Node | Tail], Res, Store, Oid) ->
receive
{?MODULE, Node, granted} ->
del_debug(Node),
receive_wlocks(Tail, Res, Store, Oid);
{?MODULE, Node, {granted, Val}} -> %% for rwlocks
del_debug(Node),
case opt_lookup_in_client(Val, Oid, write) of
C when record(C, cyclic) ->
flush_remaining(Tail, Node, {aborted, C});
Val2 ->
receive_wlocks(Tail, Val2, Store, Oid)
end;
{?MODULE, Node, {not_granted, Reason}} ->
del_debug(Node),
Reason1 = {aborted, Reason},
flush_remaining(Tail, Node, Reason1);
{mnesia_down, Node} ->
del_debug(Node),
Reason1 = {aborted, {node_not_running, Node}},
flush_remaining(Tail, Node, Reason1);
{?MODULE, Node, {switch, Node2, Req}} -> %% for rwlocks
del_debug(Node),
add_debug(Node2),
?ets_insert(Store, {nodes, Node2}),
{?MODULE, Node2} ! Req,
receive_wlocks([Node2 | Tail], Res, Store, Oid)
end;
receive_wlocks([], Res, _Store, _Oid) ->
Res.
flush_remaining([], _SkipNode, Res) ->
exit(Res);
flush_remaining([SkipNode | Tail ], SkipNode, Res) ->
del_debug(SkipNode),
flush_remaining(Tail, SkipNode, Res);
flush_remaining([Node | Tail], SkipNode, Res) ->
receive
{?MODULE, Node, _} ->
del_debug(Node),
flush_remaining(Tail, SkipNode, Res);
{mnesia_down, Node} ->
del_debug(Node),
flush_remaining(Tail, SkipNode, {aborted, {node_not_running, Node}})
end.
opt_lookup_in_client(lookup_in_client, Oid, Lock) ->
{Tab, Key} = Oid,
case catch mnesia_lib:db_get(Tab, Key) of
{'EXIT', _} ->
%% Table has been deleted from this node,
%% restart the transaction.
#cyclic{op = read, lock = Lock, oid = Oid, lucky = nowhere};
Val ->
Val
end;
opt_lookup_in_client(Val, _Oid, _Lock) ->
Val.
return_granted_or_nodes({_, ?ALL} , Nodes) -> Nodes;
return_granted_or_nodes({?GLOBAL, _}, Nodes) -> Nodes;
return_granted_or_nodes(_ , _Nodes) -> granted.
%% We store a {Tab, read, From} item in the
%% locks table on the node where we actually do pick up the object
%% and we also store an item {lock, Oid, read} in our local store
%% so that we can release any locks we hold when we commit.
%% This function not only aquires a read lock, but also reads the object
%% Oid's are always {Tab, Key} tuples
rlock(Tid, Store, Oid) ->
{Tab, Key} = Oid,
case val({Tab, where_to_read}) of
nowhere ->
mnesia:abort({no_exists, Tab});
Node ->
case need_lock(Store, Tab, Key, '_') of
yes ->
R = l_request(Node, {read, Tid, Oid}, Store),
rlock_get_reply(Node, Store, Oid, R);
no ->
if
Key == ?ALL ->
[Node];
Tab == ?GLOBAL ->
[Node];
true ->
dirty_rpc(Node, Tab, Key, read)
end
end
end.
dirty_rpc(nowhere, Tab, Key, _Lock) ->
mnesia:abort({no_exists, {Tab, Key}});
dirty_rpc(Node, _Tab, ?ALL, _Lock) ->
[Node];
dirty_rpc(Node, ?GLOBAL, _Key, _Lock) ->
[Node];
dirty_rpc(Node, Tab, Key, Lock) ->
Args = [Tab, Key],
case rpc:call(Node, mnesia_lib, db_get, Args) of
{badrpc, Reason} ->
case val({Tab, where_to_read}) of
Node ->
ErrorTag = mnesia_lib:dirty_rpc_error_tag(Reason),
mnesia:abort({ErrorTag, Args});
_NewNode ->
%% Table has been deleted from the node,
%% restart the transaction.
C = #cyclic{op = read, lock = Lock, oid = {Tab, Key}, lucky = nowhere},
exit({aborted, C})
end;
Other ->
Other
end.
rlock_get_reply(Node, Store, Oid, {granted, V}) ->
{Tab, Key} = Oid,
?ets_insert(Store, {{locks, Tab, Key}, read}),
?ets_insert(Store, {nodes, Node}),
case opt_lookup_in_client(V, Oid, read) of
C when record(C, cyclic) ->
mnesia:abort(C);
Val ->
Val
end;
rlock_get_reply(Node, Store, Oid, granted) ->
{Tab, Key} = Oid,
?ets_insert(Store, {{locks, Tab, Key}, read}),
?ets_insert(Store, {nodes, Node}),
return_granted_or_nodes(Oid, [Node]);
rlock_get_reply(Node, Store, Tab, {granted, V, RealKeys}) ->
L = fun(K) -> ?ets_insert(Store, {{locks, Tab, K}, read}) end,
lists:foreach(L, RealKeys),
?ets_insert(Store, {nodes, Node}),
V;
rlock_get_reply(_Node, _Store, _Oid, {not_granted , Reason}) ->
exit({aborted, Reason});
rlock_get_reply(_Node, Store, Oid, {switch, N2, Req}) ->
?ets_insert(Store, {nodes, N2}),
{?MODULE, N2} ! Req,
rlock_get_reply(N2, Store, Oid, l_req_rec(N2, Store)).
rlock_table(Tid, Store, Tab) ->
rlock(Tid, Store, {Tab, ?ALL}).
ixrlock(Tid, Store, Tab, IxKey, Pos) ->
case val({Tab, where_to_read}) of
nowhere ->
mnesia:abort({no_exists, Tab});
Node ->
R = l_request(Node, {ix_read, Tid, Tab, IxKey, Pos}, Store),
rlock_get_reply(Node, Store, Tab, R)
end.
%% Grabs the locks or exits
global_lock(Tid, Store, Item, write, Ns) ->
Oid = {?GLOBAL, Item},
Op = {self(), {write, Tid, Oid}},
get_wlocks_on_nodes(Ns, Ns, Store, Op, Oid);
global_lock(Tid, Store, Item, read, Ns) ->
Oid = {?GLOBAL, Item},
send_requests(Ns, {read, Tid, Oid}),
rec_requests(Ns, Oid, Store),
Ns.
send_requests([Node | Nodes], X) ->
{?MODULE, Node} ! {self(), X},
send_requests(Nodes, X);
send_requests([], _X) ->
ok.
rec_requests([Node | Nodes], Oid, Store) ->
Res = l_req_rec(Node, Store),
case catch rlock_get_reply(Node, Store, Oid, Res) of
{'EXIT', Reason} ->
flush_remaining(Nodes, Node, Reason);
_ ->
rec_requests(Nodes, Oid, Store)
end;
rec_requests([], _Oid, _Store) ->
ok.
get_held_locks() ->
?ets_match_object(mnesia_held_locks, '_').
get_lock_queue() ->
Q = ?ets_match_object(mnesia_lock_queue, '_'),
[{Oid, Op, Pid, Tid, WFT} || {queue, Oid, Tid, Op, Pid, WFT} <- Q].
do_stop() ->
exit(shutdown).
%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% System upgrade
system_continue(_Parent, _Debug, State) ->
loop(State).
system_terminate(_Reason, _Parent, _Debug, _State) ->
do_stop().
system_code_change(State, _Module, _OldVsn, _Extra) ->
{ok, State}.
<|start_filename|>lib/gs/doc/src/examples/ex8.erl<|end_filename|>
-module(ex8).
-copyright('Copyright (c) 1991-97 Ericsson Telecom AB').
-vsn('$Revision: /main/release/2 $ ').
-export([start/0]).
start() ->
gs:window(win,gs:start(),{map,true}),
gs:radiobutton(rb1,win,[{label,{text,"rb1"}},{value,a},{y,0}]),
gs:radiobutton(rb2,win,[{label,{text,"rb2"}},{value,a},{y,30}]),
gs:radiobutton(rb3,win,[{label,{text,"rb3"}},{value,b},{y,60}]),
rb_loop().
rb_loop() ->
receive
{gs,Any_Rb,click,Data,[Text, Grp, a | Rest]} ->
io:format("either rb1 or rb2 is on.~n",[]),
rb_loop();
{gs,rb3,click,Data,[Text, Grp, b | Rest]} ->
io:format("rb3 is selected.~n",[]),
rb_loop()
end.
<|start_filename|>lib/gs/doc/src/examples/ex14.erl<|end_filename|>
-module(ex14).
-copyright('Copyright (c) 1991-97 Ericsson Telecom AB').
-vsn('$Revision: /main/release/2 $ ').
-export([start/0,init/0]).
start() -> spawn(ex14, init, []).
init() ->
Y = [{y,0},{height, 30},{width, 90}],
R=[{window, [{width, 400},{height, 300}, {title,"editor"},{map, true}],
[{editor,editor,[{x,0},{y, 35},{width,300},{height,250},
{insert,{'end',"Edit this text!"}},{vscroll,right}]},
{button, clear, [{label, {text, "Clear"}},{x,0} | Y]},
{checkbutton,enable,[{label,{text,"Enable"}},{select,false},{x,100}|Y]},
{button, time, [{label, {text, "Insert Time"}},{x,200} | Y]},
{button, quit, [{label, {text, "Quit"}},{x,300} | Y]}]}],
gs:create_tree(gs:start(),R),
gs:config(editor,{enable,false}),
loop().
loop() ->
receive
{gs, clear, _, _, _} ->
io:format("clear editor~n"),
Enable = gs:read(editor, enable),
gs:config(editor,{enable, true}),
gs:config(editor,clear),
gs:config(editor,{enable, Enable});
{gs, enable, _, _, [_Txt, _Grp, Enable|_]} ->
io:format("Enable: ~w~n", [Enable]),
gs:config(editor,{enable, Enable});
{gs, time, _, _, _} ->
TimeStr = io_lib:format("Hr:Min:Sec is now ~w:~w:~w~n",
tuple_to_list(time())),
io:format("Insert Time: ~s~n", [TimeStr]),
Enable = gs:read(editor, enable),
gs:config(editor,{enable, true}),
gs:config(editor,{insert, {insert, TimeStr}}),
gs:config(editor,{enable, Enable});
{gs, quit, _, _, _} ->
exit(normal);
Other ->
io:format("Other:~w~n",[Other])
end,
loop().
<|start_filename|>erts/emulator/test/trace_call_time_SUITE_data/trace_nif.c<|end_filename|>
#include "erl_nif.h"
static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
{
return 0;
}
static int reload(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
{
return 0;
}
static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info)
{
return 0;
}
static void unload(ErlNifEnv* env, void* priv_data)
{
}
static ERL_NIF_TERM nif_dec_1(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
{
int x = 0;
enif_get_uint(env, argv[0], &x);
return enif_make_int(env, x - 1);
}
static ErlNifFunc nif_funcs[] =
{
{"nif_dec", 1, nif_dec_1}
};
ERL_NIF_INIT(trace_call_time_SUITE,nif_funcs,load,reload,upgrade,unload)
<|start_filename|>erts/emulator/test/port_SUITE_data/echo_drv.c<|end_filename|>
#include <stdio.h>
#include <string.h>
#include "erl_driver.h"
/* -------------------------------------------------------------------------
** Data types
**/
typedef struct _erl_drv_data EchoDrvData;
/* -------------------------------------------------------------------------
** Entry struct
**/
static EchoDrvData *echo_drv_start(ErlDrvPort port, char *command);
static void echo_drv_stop(EchoDrvData *data_p);
static void echo_drv_output(ErlDrvData drv_data, char *buf,
ErlDrvSizeT len);
static void echo_drv_finish(void);
static ErlDrvEntry echo_drv_entry = {
NULL, /* init */
echo_drv_start,
echo_drv_stop,
echo_drv_output,
NULL, /* ready_input */
NULL, /* ready_output */
"echo_drv",
echo_drv_finish,
NULL, /* handle */
NULL, /* control */
NULL, /* timeout */
NULL, /* outputv */
NULL, /* ready_async */
NULL,
NULL,
NULL,
ERL_DRV_EXTENDED_MARKER,
ERL_DRV_EXTENDED_MAJOR_VERSION,
ERL_DRV_EXTENDED_MINOR_VERSION,
0,
NULL,
NULL,
NULL,
};
/* -------------------------------------------------------------------------
** Entry functions
**/
DRIVER_INIT(echo_drv)
{
return &echo_drv_entry;
}
static EchoDrvData *echo_drv_start(ErlDrvPort port, char *command) {
void *void_ptr;
int res = -4711;
if (command) {
while(*command != '\0' && *command != ' ')
++command;
while(*command != '\0' && *command == ' ')
++command;
if(*command == '-') {
res = driver_output(port, command+1, strlen(command) - 1);
}
}
return void_ptr = port;
}
static void echo_drv_stop(EchoDrvData *data_p) {
}
static void echo_drv_output(ErlDrvData drv_data, char *buf, ErlDrvSizeT len) {
EchoDrvData *data_p = (EchoDrvData *) drv_data;
void *void_ptr;
ErlDrvPort port = void_ptr = data_p;
driver_output(port, buf, len);
}
static void echo_drv_finish() {
}
<|start_filename|>lib/dialyzer/test/options1_SUITE_data/src/compiler/sys_pre_expand.erl<|end_filename|>
%% ``Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
%% AB. All Rights Reserved.''
%%
%% $Id: sys_pre_expand.erl,v 1.1 2008/12/17 09:53:42 mikpe Exp $
%%
%% Purpose : Expand some source Erlang constructions. This is part of the
%% pre-processing phase.
%% N.B. Although structs (tagged tuples) are not yet allowed in the
%% language there is code included in pattern/2 and expr/3 (commented out)
%% that handles them by transforming them to tuples.
-module(sys_pre_expand).
%% Main entry point.
-export([module/2]).
-import(ordsets, [from_list/1,add_element/2,
union/1,union/2,intersection/1,intersection/2,subtract/2]).
-import(lists, [member/2,map/2,foldl/3,foldr/3,sort/1,reverse/1,duplicate/2]).
-include("../my_include/erl_bits.hrl").
-record(expand, {module=[], %Module name
parameters=undefined, %Module parameters
package="", %Module package
exports=[], %Exports
imports=[], %Imports
mod_imports, %Module Imports
compile=[], %Compile flags
records=dict:new(), %Record definitions
attributes=[], %Attributes
defined=[], %Defined functions
vcount=0, %Variable counter
func=[], %Current function
arity=[], %Arity for current function
fcount=0, %Local fun count
fun_index=0, %Global index for funs
bitdefault,
bittypes
}).
%% module(Forms, CompileOptions)
%% {ModuleName,Exports,TransformedForms}
%% Expand the forms in one module. N.B.: the lists of predefined
%% exports and imports are really ordsets!
module(Fs, Opts) ->
%% Set pre-defined exported functions.
PreExp = [{module_info,0},{module_info,1}],
%% Set pre-defined module imports.
PreModImp = [{erlang,erlang},{packages,packages}],
%% Build initial expand record.
St0 = #expand{exports=PreExp,
mod_imports=dict:from_list(PreModImp),
compile=Opts,
defined=PreExp,
bitdefault = erl_bits:system_bitdefault(),
bittypes = erl_bits:system_bittypes()
},
%% Expand the functions.
{Tfs,St1} = forms(Fs, foldl(fun define_function/2, St0, Fs)),
{Efs,St2} = expand_pmod(Tfs, St1),
%% Get the correct list of exported functions.
Exports = case member(export_all, St2#expand.compile) of
true -> St2#expand.defined;
false -> St2#expand.exports
end,
%% Generate all functions from stored info.
{Ats,St3} = module_attrs(St2#expand{exports = Exports}),
{Mfs,St4} = module_predef_funcs(St3),
{St4#expand.module, St4#expand.exports, Ats ++ Efs ++ Mfs,
St4#expand.compile}.
expand_pmod(Fs0, St) ->
case St#expand.parameters of
undefined ->
{Fs0,St};
Ps ->
{Fs1,Xs,Ds} = sys_expand_pmod:forms(Fs0, Ps,
St#expand.exports,
St#expand.defined),
A = length(Ps),
Vs = [{var,0,V} || V <- Ps],
N = {atom,0,St#expand.module},
B = [{tuple,0,[N|Vs]}],
F = {function,0,new,A,[{clause,0,Vs,[],B}]},
As = St#expand.attributes,
{[F|Fs1],St#expand{exports=add_element({new,A}, Xs),
defined=add_element({new,A}, Ds),
attributes = [{abstract, true} | As]}}
end.
%% -type define_function(Form, State) -> State.
%% Add function to defined if form a function.
define_function({function,_,N,A,_Cs}, St) ->
St#expand{defined=add_element({N,A}, St#expand.defined)};
define_function(_, St) -> St.
module_attrs(St) ->
{[{attribute,0,Name,Val} || {Name,Val} <- St#expand.attributes],St}.
module_predef_funcs(St) ->
PreDef = [{module_info,0},{module_info,1}],
PreExp = PreDef,
{[{function,0,module_info,0,
[{clause,0,[],[],
[{call,0,{remote,0,{atom,0,erlang},{atom,0,get_module_info}},
[{atom,0,St#expand.module}]}]}]},
{function,0,module_info,1,
[{clause,0,[{var,0,'X'}],[],
[{call,0,{remote,0,{atom,0,erlang},{atom,0,get_module_info}},
[{atom,0,St#expand.module},{var,0,'X'}]}]}]}],
St#expand{defined=union(from_list(PreDef), St#expand.defined),
exports=union(from_list(PreExp), St#expand.exports)}}.
%% forms(Forms, State) ->
%% {TransformedForms,State'}
%% Process the forms. Attributes are lost and just affect the state.
%% Ignore uninteresting forms like eof and type.
forms([{attribute,_,Name,Val}|Fs0], St0) ->
St1 = attribute(Name, Val, St0),
forms(Fs0, St1);
forms([{function,L,N,A,Cs}|Fs0], St0) ->
{Ff,St1} = function(L, N, A, Cs, St0),
{Fs,St2} = forms(Fs0, St1),
{[Ff|Fs],St2};
forms([_|Fs], St) -> forms(Fs, St);
forms([], St) -> {[],St}.
%% -type attribute(Attribute, Value, State) ->
%% State.
%% Process an attribute, this just affects the state.
attribute(module, {Module, As}, St) ->
true = is_atom(Module),
St#expand{module=Module,
parameters=As};
attribute(module, Module, St) ->
true = is_atom(Module),
St#expand{module=Module};
attribute(export, Es, St) ->
St#expand{exports=union(from_list(Es), St#expand.exports)};
attribute(import, Is, St) ->
import(Is, St);
attribute(compile, C, St) when list(C) ->
St#expand{compile=St#expand.compile ++ C};
attribute(compile, C, St) ->
St#expand{compile=St#expand.compile ++ [C]};
attribute(record, {Name,Defs}, St) ->
St#expand{records=dict:store(Name, normalise_fields(Defs),
St#expand.records)};
attribute(file, _File, St) -> St; %This is ignored
attribute(Name, Val, St) when list(Val) ->
St#expand{attributes=St#expand.attributes ++ [{Name,Val}]};
attribute(Name, Val, St) ->
St#expand{attributes=St#expand.attributes ++ [{Name,[Val]}]}.
function(L, N, A, Cs0, St0) ->
{Cs,St} = clauses(Cs0, St0#expand{func=N,arity=A,fcount=0}),
{{function,L,N,A,Cs},St}.
%% -type clauses([Clause], State) ->
%% {[TransformedClause],State}.
%% Expand function clauses.
clauses([{clause,Line,H0,G0,B0}|Cs0], St0) ->
{H,Hvs,_Hus,St1} = head(H0, St0),
{G,Gvs,_Gus,St2} = guard(G0, Hvs, St1),
{B,_Bvs,_Bus,St3} = exprs(B0, union(Hvs, Gvs), St2),
{Cs,St4} = clauses(Cs0, St3),
{[{clause,Line,H,G,B}|Cs],St4};
clauses([], St) -> {[],St}.
%% head(HeadPatterns, State) ->
%% {TransformedPatterns,Variables,UsedVariables,State'}
head(As, St) -> pattern_list(As, St).
%% pattern(Pattern, State) ->
%% {TransformedPattern,Variables,UsedVariables,State'}
%% BITS: added used variables for bit patterns with varaible length
%%
pattern({var,_,'_'}=Var, St) -> %Ignore anonymous variable.
{Var,[],[],St};
pattern({var,_,V}=Var, St) ->
{Var,[V],[],St};
pattern({char,_,_}=Char, St) ->
{Char,[],[],St};
pattern({integer,_,_}=Int, St) ->
{Int,[],[],St};
pattern({float,_,_}=Float, St) ->
{Float,[],[],St};
pattern({atom,_,_}=Atom, St) ->
{Atom,[],[],St};
pattern({string,_,_}=String, St) ->
{String,[],[],St};
pattern({nil,_}=Nil, St) ->
{Nil,[],[],St};
pattern({cons,Line,H,T}, St0) ->
{TH,THvs,Hus,St1} = pattern(H, St0),
{TT,TTvs,Tus,St2} = pattern(T, St1),
{{cons,Line,TH,TT},union(THvs, TTvs),union(Hus,Tus),St2};
pattern({tuple,Line,Ps}, St0) ->
{TPs,TPsvs,Tus,St1} = pattern_list(Ps, St0),
{{tuple,Line,TPs},TPsvs,Tus,St1};
%%pattern({struct,Line,Tag,Ps}, St0) ->
%% {TPs,TPsvs,St1} = pattern_list(Ps, St0),
%% {{tuple,Line,[{atom,Line,Tag}|TPs]},TPsvs,St1};
pattern({record_index,Line,Name,Field}, St) ->
{index_expr(Line, Field, Name, record_fields(Name, St)),[],[],St};
pattern({record,Line,Name,Pfs}, St0) ->
Fs = record_fields(Name, St0),
{TMs,TMsvs,Us,St1} = pattern_list(pattern_fields(Fs, Pfs), St0),
{{tuple,Line,[{atom,Line,Name}|TMs]},TMsvs,Us,St1};
pattern({bin,Line,Es0}, St0) ->
{Es1,Esvs,Esus,St1} = pattern_bin(Es0, St0),
{{bin,Line,Es1},Esvs,Esus,St1};
pattern({op,_,'++',{nil,_},R}, St) ->
pattern(R, St);
pattern({op,_,'++',{cons,Li,H,T},R}, St) ->
pattern({cons,Li,H,{op,Li,'++',T,R}}, St);
pattern({op,_,'++',{string,Li,L},R}, St) ->
pattern(string_to_conses(Li, L, R), St);
pattern({match,Line,Pat1, Pat2}, St0) ->
{TH,Hvt,Hus,St1} = pattern(Pat2, St0),
{TT,Tvt,Tus,St2} = pattern(Pat1, St1),
{{match,Line,TT,TH}, union(Hvt,Tvt), union(Hus,Tus), St2};
%% Compile-time pattern expressions, including unary operators.
pattern({op,Line,Op,A}, St) ->
{ erl_eval:partial_eval({op,Line,Op,A}), [], [], St};
pattern({op,Line,Op,L,R}, St) ->
{ erl_eval:partial_eval({op,Line,Op,L,R}), [], [], St}.
pattern_list([P0|Ps0], St0) ->
{P,Pvs,Pus,St1} = pattern(P0, St0),
{Ps,Psvs,Psus,St2} = pattern_list(Ps0, St1),
{[P|Ps],union(Pvs, Psvs),union(Pus, Psus),St2};
pattern_list([], St) -> {[],[],[],St}.
%% guard(Guard, VisibleVariables, State) ->
%% {TransformedGuard,NewVariables,UsedVariables,State'}
%% Transform a list of guard tests. We KNOW that this has been checked
%% and what the guards test are. Use expr for transforming the guard
%% expressions.
guard([G0|Gs0], Vs, St0) ->
{G,Hvs,Hus,St1} = guard_tests(G0, Vs, St0),
{Gs,Tvs,Tus,St2} = guard(Gs0, Vs, St1),
{[G|Gs],union(Hvs, Tvs),union(Hus, Tus),St2};
guard([], _, St) -> {[],[],[],St}.
guard_tests([Gt0|Gts0], Vs, St0) ->
{Gt1,Gvs,Gus,St1} = guard_test(Gt0, Vs, St0),
{Gts1,Gsvs,Gsus,St2} = guard_tests(Gts0, union(Gvs, Vs), St1),
{[Gt1|Gts1],union(Gvs, Gsvs),union(Gus, Gsus),St2};
guard_tests([], _, St) -> {[],[],[],St}.
guard_test({call,Line,{atom,_,record},[A,{atom,_,Name}]}, Vs, St) ->
record_test_in_guard(Line, A, Name, Vs, St);
guard_test({call,Line,{atom,Lt,Tname},As}, Vs, St) ->
%% XXX This is ugly. We can remove this workaround if/when
%% we'll allow 'andalso' in guards. For now, we must have
%% different code in guards and in bodies.
Test = {remote,Lt,
{atom,Lt,erlang},
{atom,Lt,normalise_test(Tname, length(As))}},
put(sys_pre_expand_in_guard, yes),
R = expr({call,Line,Test,As}, Vs, St),
erase(sys_pre_expand_in_guard),
R;
guard_test(Test, Vs, St) ->
%% XXX See the previous clause.
put(sys_pre_expand_in_guard, yes),
R = expr(Test, Vs, St),
erase(sys_pre_expand_in_guard),
R.
%% record_test(Line, Term, Name, Vs, St) -> TransformedExpr
%% Generate code for is_record/1.
record_test(Line, Term, Name, Vs, St) ->
case get(sys_pre_expand_in_guard) of
undefined ->
record_test_in_body(Line, Term, Name, Vs, St);
yes ->
record_test_in_guard(Line, Term, Name, Vs, St)
end.
record_test_in_guard(Line, Term, Name, Vs, St) ->
%% Notes: (1) To keep is_record/3 properly atomic (e.g. when inverted
%% using 'not'), we cannot convert it to an instruction
%% sequence here. It must remain a single call.
%% (2) Later passes assume that the last argument (the size)
%% is a literal.
%% (3) We don't want calls to erlang:is_record/3 (in the source code)
%% confused we the internal instruction. (Reason: (2) above +
%% code bloat.)
%% (4) Xref may be run on the abstract code, so the name in the
%% abstract code must be erlang:is_record/3.
%% (5) To achive both (3) and (4) at the same time, set the name
%% here to erlang:is_record/3, but mark it as compiler-generated.
%% The v3_core pass will change the name to erlang:internal_is_record/3.
Fs = record_fields(Name, St),
expr({call,-Line,{remote,-Line,{atom,-Line,erlang},{atom,-Line,is_record}},
[Term,{atom,Line,Name},{integer,Line,length(Fs)+1}]},
Vs, St).
record_test_in_body(Line, Expr, Name, Vs, St0) ->
%% As Expr may have side effects, we must evaluate it
%% first and bind the value to a new variable.
%% We must use also handle the case that Expr does not
%% evaluate to a tuple properly.
Fs = record_fields(Name, St0),
{Var,St} = new_var(Line, St0),
expr({block,Line,
[{match,Line,Var,Expr},
{op,Line,
'andalso',
{call,Line,{atom,Line,is_tuple},[Var]},
{op,Line,'andalso',
{op,Line,'=:=',
{call,Line,{atom,Line,size},[Var]},
{integer,Line,length(Fs)+1}},
{op,Line,'=:=',
{call,Line,{atom,Line,element},[{integer,Line,1},Var]},
{atom,Line,Name}}}}]}, Vs, St).
normalise_test(atom, 1) -> is_atom;
normalise_test(binary, 1) -> is_binary;
normalise_test(constant, 1) -> is_constant;
normalise_test(float, 1) -> is_float;
normalise_test(function, 1) -> is_function;
normalise_test(integer, 1) -> is_integer;
normalise_test(list, 1) -> is_list;
normalise_test(number, 1) -> is_number;
normalise_test(pid, 1) -> is_pid;
normalise_test(port, 1) -> is_port;
normalise_test(reference, 1) -> is_reference;
normalise_test(tuple, 1) -> is_tuple;
normalise_test(Name, _) -> Name.
%% exprs(Expressions, VisibleVariables, State) ->
%% {TransformedExprs,NewVariables,UsedVariables,State'}
exprs([E0|Es0], Vs, St0) ->
{E,Evs,Eus,St1} = expr(E0, Vs, St0),
{Es,Esvs,Esus,St2} = exprs(Es0, union(Evs, Vs), St1),
{[E|Es],union(Evs, Esvs),union(Eus, Esus),St2};
exprs([], _, St) -> {[],[],[],St}.
%% expr(Expression, VisibleVariables, State) ->
%% {TransformedExpression,NewVariables,UsedVariables,State'}
expr({var,_,V}=Var, _Vs, St) ->
{Var,[],[V],St};
expr({char,_,_}=Char, _Vs, St) ->
{Char,[],[],St};
expr({integer,_,_}=Int, _Vs, St) ->
{Int,[],[],St};
expr({float,_,_}=Float, _Vs, St) ->
{Float,[],[],St};
expr({atom,_,_}=Atom, _Vs, St) ->
{Atom,[],[],St};
expr({string,_,_}=String, _Vs, St) ->
{String,[],[],St};
expr({nil,_}=Nil, _Vs, St) ->
{Nil,[],[],St};
expr({cons,Line,H0,T0}, Vs, St0) ->
{H,Hvs,Hus,St1} = expr(H0, Vs, St0),
{T,Tvs,Tus,St2} = expr(T0, Vs, St1),
{{cons,Line,H,T},union(Hvs, Tvs),union(Hus, Tus),St2};
expr({lc,Line,E0,Qs0}, Vs, St0) ->
{E1,Qs1,_,Lvs,Lus,St1} = lc_tq(Line, E0, Qs0, {nil,Line}, Vs, St0),
{{lc,Line,E1,Qs1},Lvs,Lus,St1};
expr({tuple,Line,Es0}, Vs, St0) ->
{Es1,Esvs,Esus,St1} = expr_list(Es0, Vs, St0),
{{tuple,Line,Es1},Esvs,Esus,St1};
%%expr({struct,Line,Tag,Es0}, Vs, St0) ->
%% {Es1,Esvs,Esus,St1} = expr_list(Es0, Vs, St0),
%% {{tuple,Line,[{atom,Line,Tag}|Es1]},Esvs,Esus,St1};
expr({record_index,Line,Name,F}, Vs, St) ->
I = index_expr(Line, F, Name, record_fields(Name, St)),
expr(I, Vs, St);
expr({record,Line,Name,Is}, Vs, St) ->
expr({tuple,Line,[{atom,Line,Name}|
record_inits(record_fields(Name, St), Is)]},
Vs, St);
expr({record_field,Line,R,Name,F}, Vs, St) ->
I = index_expr(Line, F, Name, record_fields(Name, St)),
expr({call,Line,{atom,Line,element},[I,R]}, Vs, St);
expr({record,_,R,Name,Us}, Vs, St0) ->
{Ue,St1} = record_update(R, Name, record_fields(Name, St0), Us, St0),
expr(Ue, Vs, St1);
expr({bin,Line,Es0}, Vs, St0) ->
{Es1,Esvs,Esus,St1} = expr_bin(Es0, Vs, St0),
{{bin,Line,Es1},Esvs,Esus,St1};
expr({block,Line,Es0}, Vs, St0) ->
{Es,Esvs,Esus,St1} = exprs(Es0, Vs, St0),
{{block,Line,Es},Esvs,Esus,St1};
expr({'if',Line,Cs0}, Vs, St0) ->
{Cs,Csvss,Csuss,St1} = icr_clauses(Cs0, Vs, St0),
All = new_in_all(Vs, Csvss),
{{'if',Line,Cs},All,union(Csuss),St1};
expr({'case',Line,E0,Cs0}, Vs, St0) ->
{E,Evs,Eus,St1} = expr(E0, Vs, St0),
{Cs,Csvss,Csuss,St2} = icr_clauses(Cs0, union(Evs, Vs), St1),
All = new_in_all(Vs, Csvss),
{{'case',Line,E,Cs},union(Evs, All),union([Eus|Csuss]),St2};
expr({'cond',Line,Cs}, Vs, St0) ->
{V,St1} = new_var(Line,St0),
expr(cond_clauses(Cs,V), Vs, St1);
expr({'receive',Line,Cs0}, Vs, St0) ->
{Cs,Csvss,Csuss,St1} = icr_clauses(Cs0, Vs, St0),
All = new_in_all(Vs, Csvss),
{{'receive',Line,Cs},All,union(Csuss),St1};
expr({'receive',Line,Cs0,To0,ToEs0}, Vs, St0) ->
{To,Tovs,Tous,St1} = expr(To0, Vs, St0),
{ToEs,ToEsvs,_ToEsus,St2} = exprs(ToEs0, Vs, St1),
{Cs,Csvss,Csuss,St3} = icr_clauses(Cs0, Vs, St2),
All = new_in_all(Vs, [ToEsvs|Csvss]),
{{'receive',Line,Cs,To,ToEs},union(Tovs, All),union([Tous|Csuss]),St3};
expr({'fun',Line,Body}, Vs, St) ->
fun_tq(Line, Body, Vs, St);
%%% expr({call,_,{atom,La,this_module},[]}, _Vs, St) ->
%%% {{atom,La,St#expand.module}, [], [], St};
%%% expr({call,_,{atom,La,this_package},[]}, _Vs, St) ->
%%% {{atom,La,list_to_atom(St#expand.package)}, [], [], St};
%%% expr({call,_,{atom,La,this_package},[{atom,_,Name}]}, _Vs, St) ->
%%% M = packages:concat(St#expand.package,Name),
%%% {{atom,La,list_to_atom(M)}, [], [], St};
%%% expr({call,Line,{atom,La,this_package},[A]}, Vs, St) ->
%%% M = {call,Line,{remote,La,{atom,La,packages},{atom,La,concat}},
%%% [{string,La,St#expand.package}, A]},
%%% expr({call,Line,{atom,Line,list_to_atom},[M]}, Vs, St);
expr({call,Line,{atom,_,is_record},[A,{atom,_,Name}]}, Vs, St) ->
record_test(Line, A, Name, Vs, St);
expr({call,Line,{remote,_,{atom,_,erlang},{atom,_,is_record}},
[A,{atom,_,Name}]}, Vs, St) ->
record_test(Line, A, Name, Vs, St);
expr({call,Line,{atom,La,N},As0}, Vs, St0) ->
{As,Asvs,Asus,St1} = expr_list(As0, Vs, St0),
Ar = length(As),
case erl_internal:bif(N, Ar) of
true ->
{{call,Line,{remote,La,{atom,La,erlang},{atom,La,N}},As},
Asvs,Asus,St1};
false ->
case imported(N, Ar, St1) of
{yes,Mod} ->
{{call,Line,{remote,La,{atom,La,Mod},{atom,La,N}},As},
Asvs,Asus,St1};
no ->
case {N,Ar} of
{record_info,2} ->
record_info_call(Line, As, St1);
_ ->
{{call,Line,{atom,La,N},As},Asvs,Asus,St1}
end
end
end;
expr({call,Line,{remote,Lr,M1,F},As0}, Vs, St0) ->
{[M2,F1|As1],Asvs,Asus,St1} = expr_list([M1,F|As0], Vs, St0),
{{call,Line,{remote,Lr,M2,F1},As1},Asvs,Asus,St1};
expr({call,Line,{tuple,_,[{atom,_,_}=M,{atom,_,_}=F]},As}, Vs, St) ->
%% Rewrite {Mod,Function}(Args...) to Mod:Function(Args...).
expr({call,Line,{remote,Line,M,F},As}, Vs, St);
expr({call,Line,F,As0}, Vs, St0) ->
{[Fun1|As1],Asvs,Asus,St1} = expr_list([F|As0], Vs, St0),
{{call,Line,Fun1,As1},Asvs,Asus,St1};
expr({'try',Line,Es0,Scs0,Ccs0,As0}, Vs, St0) ->
{Es1,Esvs,Esus,St1} = exprs(Es0, Vs, St0),
Cvs = union(Esvs, Vs),
{Scs1,Scsvss,Scsuss,St2} = icr_clauses(Scs0, Cvs, St1),
{Ccs1,Ccsvss,Ccsuss,St3} = icr_clauses(Ccs0, Cvs, St2),
Csvss = Scsvss ++ Ccsvss,
Csuss = Scsuss ++ Ccsuss,
All = new_in_all(Vs, Csvss),
{As1,Asvs,Asus,St4} = exprs(As0, Cvs, St3),
{{'try',Line,Es1,Scs1,Ccs1,As1}, union([Asvs,Esvs,All]),
union([Esus,Asus|Csuss]), St4};
expr({'catch',Line,E0}, Vs, St0) ->
%% Catch exports no new variables.
{E,_Evs,Eus,St1} = expr(E0, Vs, St0),
{{'catch',Line,E},[],Eus,St1};
expr({match,Line,P0,E0}, Vs, St0) ->
{E,Evs,Eus,St1} = expr(E0, Vs, St0),
{P,Pvs,Pus,St2} = pattern(P0, St1),
{{match,Line,P,E},
union(subtract(Pvs, Vs), Evs),
union(intersection(Pvs, Vs), union(Eus,Pus)),St2};
expr({op,L,'andalso',E1,E2}, Vs, St0) ->
{V,St1} = new_var(L,St0),
E = make_bool_switch(L,E1,V,
make_bool_switch(L,E2,V,{atom,L,true},
{atom,L,false}),
{atom,L,false}),
expr(E, Vs, St1);
expr({op,L,'orelse',E1,E2}, Vs, St0) ->
{V,St1} = new_var(L,St0),
E = make_bool_switch(L,E1,V,{atom,L,true},
make_bool_switch(L,E2,V,{atom,L,true},
{atom,L,false})),
expr(E, Vs, St1);
expr({op,Line,'++',{lc,Ll,E0,Qs0},M0}, Vs, St0) ->
{E1,Qs1,M1,Lvs,Lus,St1} = lc_tq(Ll, E0, Qs0, M0, Vs, St0),
{{op,Line,'++',{lc,Ll,E1,Qs1},M1},Lvs,Lus,St1};
expr({op,_,'++',{string,L1,S1},{string,_,S2}}, _Vs, St) ->
{{string,L1,S1 ++ S2},[],[],St};
expr({op,Ll,'++',{string,L1,S1}=Str,R0}, Vs, St0) ->
{R1,Rvs,Rus,St1} = expr(R0, Vs, St0),
E = case R1 of
{string,_,S2} -> {string,L1,S1 ++ S2};
_Other when length(S1) < 8 -> string_to_conses(L1, S1, R1);
_Other -> {op,Ll,'++',Str,R1}
end,
{E,Rvs,Rus,St1};
expr({op,Ll,'++',{cons,Lc,H,T},L2}, Vs, St) ->
expr({cons,Ll,H,{op,Lc,'++',T,L2}}, Vs, St);
expr({op,_,'++',{nil,_},L2}, Vs, St) ->
expr(L2, Vs, St);
expr({op,Line,Op,A0}, Vs, St0) ->
{A,Avs,Aus,St1} = expr(A0, Vs, St0),
{{op,Line,Op,A},Avs,Aus,St1};
expr({op,Line,Op,L0,R0}, Vs, St0) ->
{L,Lvs,Lus,St1} = expr(L0, Vs, St0),
{R,Rvs,Rus,St2} = expr(R0, Vs, St1),
{{op,Line,Op,L,R},union(Lvs, Rvs),union(Lus, Rus),St2}.
expr_list([E0|Es0], Vs, St0) ->
{E,Evs,Eus,St1} = expr(E0, Vs, St0),
{Es,Esvs,Esus,St2} = expr_list(Es0, Vs, St1),
{[E|Es],union(Evs, Esvs),union(Eus, Esus),St2};
expr_list([], _, St) ->
{[],[],[],St}.
%% icr_clauses([Clause], [VisibleVariable], State) ->
%% {[TransformedClause],[[NewVariable]],[[UsedVariable]],State'}
%% Be very careful here to return the variables that are really used
%% and really new.
icr_clauses([], _, St) ->
{[],[[]],[],St};
icr_clauses(Clauses, Vs, St) ->
icr_clauses2(Clauses, Vs, St).
icr_clauses2([{clause,Line,H0,G0,B0}|Cs0], Vs, St0) ->
{H,Hvs,Hus,St1} = head(H0, St0), %Hvs is really used!
{G,Gvs,Gus,St2} = guard(G0, union(Hvs, Vs), St1),
{B,Bvs,Bus,St3} = exprs(B0, union([Vs,Hvs,Gvs]), St2),
New = subtract(union([Hvs,Gvs,Bvs]), Vs), %Really new
Used = intersection(union([Hvs,Hus,Gus,Bus]), Vs), %Really used
{Cs,Csvs,Csus,St4} = icr_clauses2(Cs0, Vs, St3),
{[{clause,Line,H,G,B}|Cs],[New|Csvs],[Used|Csus],St4};
icr_clauses2([], _, St) ->
{[],[],[],St}.
%% lc_tq(Line, Expr, Qualifiers, More, [VisibleVar], State) ->
%% {TransExpr,[TransQual],TransMore,[NewVar],[UsedVar],State'}
lc_tq(Line, E0, [{generate,Lg,P0,G0}|Qs0], M0, Vs, St0) ->
{G1,Gvs,Gus,St1} = expr(G0, Vs, St0),
{P1,Pvs,Pus,St2} = pattern(P0, St1),
{E1,Qs1,M1,Lvs,Lus,St3} = lc_tq(Line, E0, Qs0, M0, union(Pvs, Vs), St2),
{E1,[{generate,Lg,P1,G1}|Qs1],M1,
union(Gvs, Lvs),union([Gus,Pus,Lus]),St3};
lc_tq(Line, E0, [F0|Qs0], M0, Vs, St0) ->
%% Allow record/2 and expand out as guard test.
case erl_lint:is_guard_test(F0) of
true ->
{F1,Fvs,_Fus,St1} = guard_tests([F0], Vs, St0),
{E1,Qs1,M1,Lvs,Lus,St2} = lc_tq(Line, E0, Qs0, M0, union(Fvs, Vs), St1),
{E1,F1++Qs1,M1,Lvs,Lus,St2};
false ->
{F1,Fvs,_Fus,St1} = expr(F0, Vs, St0),
{E1,Qs1,M1,Lvs,Lus,St2} = lc_tq(Line, E0, Qs0, M0, union(Fvs, Vs), St1),
{E1,[F1|Qs1],M1,Lvs,Lus,St2}
end;
lc_tq(_Line, E0, [], M0, Vs, St0) ->
{E1,Evs,Eus,St1} = expr(E0, Vs, St0),
{M1,Mvs,Mus,St2} = expr(M0, Vs, St1),
{E1,[],M1,union(Evs, Mvs),union(Eus, Mus),St2}.
%% fun_tq(Line, Body, VisibleVariables, State) ->
%% {Fun,NewVariables,UsedVariables,State'}
%% Transform an "explicit" fun {'fun', Line, {clauses, Cs}} into an
%% extended form {'fun', Line, {clauses, Cs}, Info}, unless it is the
%% name of a BIF (erl_lint has checked that it is not an import).
%% Process the body sequence directly to get the new and used variables.
%% "Implicit" funs {'fun', Line, {function, F, A}} are not changed.
fun_tq(Lf, {function,F,A}, Vs, St0) ->
{As,St1} = new_vars(A, Lf, St0),
Cs = [{clause,Lf,As,[],[{call,Lf,{atom,Lf,F},As}]}],
case erl_internal:bif(F, A) of
true ->
fun_tq(Lf, {clauses,Cs}, Vs, St1);
false ->
Index = St0#expand.fun_index,
Uniq = erlang:hash(Cs, (1 bsl 27)-1),
{Fname,St2} = new_fun_name(St1),
{{'fun',Lf,{function,F,A},{Index,Uniq,Fname}},[],[],
St2#expand{fun_index=Index+1}}
end;
fun_tq(Lf, {clauses,Cs0}, Vs, St0) ->
Uniq = erlang:hash(Cs0, (1 bsl 27)-1),
{Cs1,_Hvss,Frees,St1} = fun_clauses(Cs0, Vs, St0),
Ufrees = union(Frees),
Index = St1#expand.fun_index,
{Fname,St2} = new_fun_name(St1),
{{'fun',Lf,{clauses,Cs1},{Index,Uniq,Fname}},[],Ufrees,
St2#expand{fun_index=Index+1}}.
fun_clauses([{clause,L,H0,G0,B0}|Cs0], Vs, St0) ->
{H,Hvs,Hus,St1} = head(H0, St0),
{G,Gvs,Gus,St2} = guard(G0, union(Hvs, Vs), St1),
{B,Bvs,Bus,St3} = exprs(B0, union([Vs,Hvs,Gvs]), St2),
%% Free variables cannot be new anywhere in the clause.
Free = subtract(union([Gus,Hus,Bus]), union([Hvs,Gvs,Bvs])),
%%io:format(" Gus :~p~n Bvs :~p~n Bus :~p~n Free:~p~n" ,[Gus,Bvs,Bus,Free]),
{Cs,Hvss,Frees,St4} = fun_clauses(Cs0, Vs, St3),
{[{clause,L,H,G,B}|Cs],[Hvs|Hvss],[Free|Frees],St4};
fun_clauses([], _, St) -> {[],[],[],St}.
%% new_fun_name(State) -> {FunName,State}.
new_fun_name(#expand{func=F,arity=A,fcount=I}=St) ->
Name = "-" ++ atom_to_list(F) ++ "/" ++ integer_to_list(A)
++ "-fun-" ++ integer_to_list(I) ++ "-",
{list_to_atom(Name),St#expand{fcount=I+1}}.
%% normalise_fields([RecDef]) -> [Field].
%% Normalise the field definitions to always have a default value. If
%% none has been given then use 'undefined'.
normalise_fields(Fs) ->
map(fun ({record_field,Lf,Field}) ->
{record_field,Lf,Field,{atom,Lf,undefined}};
(F) -> F end, Fs).
%% record_fields(RecordName, State)
%% find_field(FieldName, Fields)
record_fields(R, St) -> dict:fetch(R, St#expand.records).
find_field(F, [{record_field,_,{atom,_,F},Val}|_]) -> {ok,Val};
find_field(F, [_|Fs]) -> find_field(F, Fs);
find_field(_, []) -> error.
%% field_names(RecFields) -> [Name].
%% Return a list of the field names structures.
field_names(Fs) ->
map(fun ({record_field,_,Field,_Val}) -> Field end, Fs).
%% index_expr(Line, FieldExpr, Name, Fields) -> IndexExpr.
%% Return an expression which evaluates to the index of a
%% field. Currently only handle the case where the field is an
%% atom. This expansion must be passed through expr again.
index_expr(Line, {atom,_,F}, _Name, Fs) ->
{integer,Line,index_expr(F, Fs, 2)}.
index_expr(F, [{record_field,_,{atom,_,F},_}|_], I) -> I;
index_expr(F, [_|Fs], I) ->
index_expr(F, Fs, I+1).
%% pattern_fields([RecDefField], [Match]) -> [Pattern].
%% Build a list of match patterns for the record tuple elements.
%% This expansion must be passed through pattern again. N.B. We are
%% scanning the record definition field list!
pattern_fields(Fs, Ms) ->
Wildcard = record_wildcard_init(Ms),
map(fun ({record_field,L,{atom,_,F},_}) ->
case find_field(F, Ms) of
{ok,Match} -> Match;
error when Wildcard =:= none -> {var,L,'_'};
error -> Wildcard
end end,
Fs).
%% record_inits([RecDefField], [Init]) -> [InitExpr].
%% Build a list of initialisation expressions for the record tuple
%% elements. This expansion must be passed through expr
%% again. N.B. We are scanning the record definition field list!
record_inits(Fs, Is) ->
WildcardInit = record_wildcard_init(Is),
map(fun ({record_field,_,{atom,_,F},D}) ->
case find_field(F, Is) of
{ok,Init} -> Init;
error when WildcardInit =:= none -> D;
error -> WildcardInit
end end,
Fs).
record_wildcard_init([{record_field,_,{var,_,'_'},D}|_]) -> D;
record_wildcard_init([_|Is]) -> record_wildcard_init(Is);
record_wildcard_init([]) -> none.
%% record_update(Record, RecordName, [RecDefField], [Update], State) ->
%% {Expr,State'}
%% Build an expression to update fields in a record returning a new
%% record. Try to be smart and optimise this. This expansion must be
%% passed through expr again.
record_update(R, Name, Fs, Us0, St0) ->
Line = element(2, R),
{Pre,Us,St1} = record_exprs(Us0, St0),
Nf = length(Fs), %# of record fields
Nu = length(Us), %# of update fields
Nc = Nf - Nu, %# of copy fields
%% We need a new variable for the record expression
%% to guarantee that it is only evaluated once.
{Var,St2} = new_var(Line, St1),
%% Try to be intelligent about which method of updating record to use.
{Update,St} =
if
Nu == 0 -> {R,St2}; %No fields updated
Nu =< Nc -> %Few fields updated
{record_setel(Var, Name, Fs, Us), St2};
true -> %The wide area inbetween
record_match(Var, Name, Fs, Us, St2)
end,
{{block,element(2, R),Pre ++ [{match,Line,Var,R},Update]},St}.
%% record_match(Record, RecordName, [RecDefField], [Update], State)
%% Build a 'case' expression to modify record fields.
record_match(R, Name, Fs, Us, St0) ->
{Ps,News,St1} = record_upd_fs(Fs, Us, St0),
Lr = element(2, hd(Us)),
{{'case',Lr,R,
[{clause,Lr,[{tuple,Lr,[{atom,Lr,Name}|Ps]}],[],
[{tuple,Lr,[{atom,Lr,Name}|News]}]},
{clause,Lr,[{var,Lr,'_'}],[],
[call_error(Lr, {tuple,Lr,[{atom,Lr,badrecord},{atom,Lr,Name}]})]}
]},
St1}.
record_upd_fs([{record_field,Lf,{atom,_La,F},_Val}|Fs], Us, St0) ->
{P,St1} = new_var(Lf, St0),
{Ps,News,St2} = record_upd_fs(Fs, Us, St1),
case find_field(F, Us) of
{ok,New} -> {[P|Ps],[New|News],St2};
error -> {[P|Ps],[P|News],St2}
end;
record_upd_fs([], _, St) -> {[],[],St}.
%% record_setel(Record, RecordName, [RecDefField], [Update])
%% Build a nested chain of setelement calls to build the
%% updated record tuple.
record_setel(R, Name, Fs, Us0) ->
Us1 = foldl(fun ({record_field,Lf,Field,Val}, Acc) ->
I = index_expr(Lf, Field, Name, Fs),
[{I,Lf,Val}|Acc]
end, [], Us0),
Us = sort(Us1),
Lr = element(2, hd(Us)),
Wildcards = duplicate(length(Fs), {var,Lr,'_'}),
{'case',Lr,R,
[{clause,Lr,[{tuple,Lr,[{atom,Lr,Name}|Wildcards]}],[],
[foldr(fun ({I,Lf,Val}, Acc) ->
{call,Lf,{atom,Lf,setelement},[I,Acc,Val]} end,
R, Us)]},
{clause,Lr,[{var,Lr,'_'}],[],
[call_error(Lr, {tuple,Lr,[{atom,Lr,badrecord},{atom,Lr,Name}]})]}]}.
%% Expand a call to record_info/2. We have checked that it is not
%% shadowed by an import.
record_info_call(Line, [{atom,_Li,Info},{atom,_Ln,Name}], St) ->
case Info of
size ->
{{integer,Line,1+length(record_fields(Name, St))},[],[],St};
fields ->
{make_list(field_names(record_fields(Name, St)), Line),
[],[],St}
end.
%% Break out expressions from an record update list and bind to new
%% variables. The idea is that we will evaluate all update expressions
%% before starting to update the record.
record_exprs(Us, St) ->
record_exprs(Us, St, [], []).
record_exprs([{record_field,Lf,{atom,_La,_F}=Name,Val}=Field0|Us], St0, Pre, Fs) ->
case is_simple_val(Val) of
true ->
record_exprs(Us, St0, Pre, [Field0|Fs]);
false ->
{Var,St} = new_var(Lf, St0),
Bind = {match,Lf,Var,Val},
Field = {record_field,Lf,Name,Var},
record_exprs(Us, St, [Bind|Pre], [Field|Fs])
end;
record_exprs([], St, Pre, Fs) ->
{reverse(Pre),Fs,St}.
is_simple_val({var,_,_}) -> true;
is_simple_val({atom,_,_}) -> true;
is_simple_val({integer,_,_}) -> true;
is_simple_val({float,_,_}) -> true;
is_simple_val({nil,_}) -> true;
is_simple_val(_) -> false.
%% pattern_bin([Element], State) -> {[Element],[Variable],[UsedVar],State}.
pattern_bin(Es0, St) ->
Es1 = bin_expand_strings(Es0),
foldr(fun (E, Acc) -> pattern_element(E, Acc) end, {[],[],[],St}, Es1).
pattern_element({bin_element,Line,Expr,Size,Type}, {Es,Esvs,Esus,St0}) ->
{Expr1,Vs1,Us1,St1} = pattern(Expr, St0),
{Size1,Vs2,Us2,St2} = pat_bit_size(Size, St1),
{Size2,Type1} = make_bit_type(Line, Size1,Type),
{[{bin_element,Line,Expr1,Size2,Type1}|Es],
union([Vs1,Vs2,Esvs]),union([Us1,Us2,Esus]),St2}.
pat_bit_size(default, St) -> {default,[],[],St};
pat_bit_size({atom,_La,all}=All, St) -> {All,[],[],St};
pat_bit_size({var,_Lv,V}=Var, St) -> {Var,[],[V],St};
pat_bit_size(Size, St) ->
Line = element(2, Size),
{value,Sz,_} = erl_eval:expr(Size, erl_eval:new_bindings()),
{{integer,Line,Sz},[],[],St}.
make_bit_type(Line, default, Type0) ->
case erl_bits:set_bit_type(default, Type0) of
{ok,all,Bt} -> {{atom,Line,all},erl_bits:as_list(Bt)};
{ok,Size,Bt} -> {{integer,Line,Size},erl_bits:as_list(Bt)}
end;
make_bit_type(_Line, Size, Type0) -> %Integer or 'all'
{ok,Size,Bt} = erl_bits:set_bit_type(Size, Type0),
{Size,erl_bits:as_list(Bt)}.
%% expr_bin([Element], [VisibleVar], State) ->
%% {[Element],[NewVar],[UsedVar],State}.
expr_bin(Es0, Vs, St) ->
Es1 = bin_expand_strings(Es0),
foldr(fun (E, Acc) -> bin_element(E, Vs, Acc) end, {[],[],[],St}, Es1).
bin_element({bin_element,Line,Expr,Size,Type}, Vs, {Es,Esvs,Esus,St0}) ->
{Expr1,Vs1,Us1,St1} = expr(Expr, Vs, St0),
{Size1,Vs2,Us2,St2} = if Size == default -> {default,[],[],St1};
true -> expr(Size, Vs, St1)
end,
{Size2,Type1} = make_bit_type(Line, Size1, Type),
{[{bin_element,Line,Expr1,Size2,Type1}|Es],
union([Vs1,Vs2,Esvs]),union([Us1,Us2,Esus]),St2}.
bin_expand_strings(Es) ->
foldr(fun ({bin_element,Line,{string,_,S},default,default}, Es1) ->
foldr(fun (C, Es2) ->
[{bin_element,Line,{char,Line,C},default,default}|Es2]
end, Es1, S);
(E, Es1) -> [E|Es1]
end, [], Es).
%% new_var_name(State) -> {VarName,State}.
new_var_name(St) ->
C = St#expand.vcount,
{list_to_atom("pre" ++ integer_to_list(C)),St#expand{vcount=C+1}}.
%% new_var(Line, State) -> {Var,State}.
new_var(L, St0) ->
{New,St1} = new_var_name(St0),
{{var,L,New},St1}.
%% new_vars(Count, Line, State) -> {[Var],State}.
%% Make Count new variables.
new_vars(N, L, St) -> new_vars(N, L, St, []).
new_vars(N, L, St0, Vs) when N > 0 ->
{V,St1} = new_var(L, St0),
new_vars(N-1, L, St1, [V|Vs]);
new_vars(0, _L, St, Vs) -> {Vs,St}.
%% make_list(TermList, Line) -> ConsTerm.
make_list(Ts, Line) ->
foldr(fun (H, T) -> {cons,Line,H,T} end, {nil,Line}, Ts).
string_to_conses(Line, Cs, Tail) ->
foldr(fun (C, T) -> {cons,Line,{char,Line,C},T} end, Tail, Cs).
%% Create a case-switch on true/false, generating badarg for all other
%% values.
make_bool_switch(L, E, V, T, F) ->
make_bool_switch_1(L, E, V, [T], [F]).
make_bool_switch_1(L, E, V, T, F) ->
case get(sys_pre_expand_in_guard) of
undefined -> make_bool_switch_body(L, E, V, T, F);
yes -> make_bool_switch_guard(L, E, V, T, F)
end.
make_bool_switch_guard(_, E, _, [{atom,_,true}], [{atom,_,false}]) -> E;
make_bool_switch_guard(L, E, V, T, F) ->
NegL = -abs(L),
{'case',NegL,E,
[{clause,NegL,[{atom,NegL,true}],[],T},
{clause,NegL,[{atom,NegL,false}],[],F},
{clause,NegL,[V],[],[V]}
]}.
make_bool_switch_body(L, E, V, T, F) ->
NegL = -abs(L),
{'case',NegL,E,
[{clause,NegL,[{atom,NegL,true}],[],T},
{clause,NegL,[{atom,NegL,false}],[],F},
{clause,NegL,[V],[],
[call_error(NegL,{tuple,NegL,[{atom,NegL,badarg},V]})]}
]}.
%% Expand a list of cond-clauses to a sequence of case-switches.
cond_clauses([{clause,L,[],[[E]],B}],V) ->
make_bool_switch_1(L,E,V,B,[call_error(L,{atom,L,cond_clause})]);
cond_clauses([{clause,L,[],[[E]],B} | Cs],V) ->
make_bool_switch_1(L,E,V,B,[cond_clauses(Cs,V)]).
%% call_error(Line, Reason) -> Expr.
%% Build a call to erlang:error/1 with reason Reason.
call_error(L, R) ->
{call,L,{remote,L,{atom,L,erlang},{atom,L,error}},[R]}.
%% new_in_all(Before, RegionList) -> NewInAll
%% Return the variables new in all clauses.
new_in_all(Before, Region) ->
InAll = intersection(Region),
subtract(InAll, Before).
%% import(Line, Imports, State) ->
%% State'
%% imported(Name, Arity, State) ->
%% {yes,Module} | no
%% Handle import declarations and est for imported functions. No need to
%% check when building imports as code is correct.
import({Mod,Fs}, St) ->
true = is_atom(Mod),
Mfs = from_list(Fs),
St#expand{imports=add_imports(Mod, Mfs, St#expand.imports)}.
add_imports(Mod, [F|Fs], Is) ->
add_imports(Mod, Fs, orddict:store(F, Mod, Is));
add_imports(_, [], Is) -> Is.
imported(F, A, St) ->
case orddict:find({F,A}, St#expand.imports) of
{ok,Mod} -> {yes,Mod};
error -> no
end.
<|start_filename|>lib/erl_interface/src/misc/eiext.h<|end_filename|>
/*
* %CopyrightBegin%
*
* Copyright Ericsson AB 1998-2009. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* %CopyrightEnd%
*
*/
#ifndef _EIEXT_H
#define _EIEXT_H
/* FIXME maybe put into eidef.h */
#define ERL_VERSION_MAGIC 131 /* 130 in erlang 4.2 */
/* from erl_eterm.h */
#define ERL_MAX ((1 << 27)-1)
#define ERL_MIN -(1 << 27)
/* FIXME we removed lots of defines, maybe some C files don't need to include
this header any longer? */
#endif /* _EIEXT_H */
<|start_filename|>lib/gs/doc/src/examples/ex2.erl<|end_filename|>
-module(ex2).
-copyright('Copyright (c) 1991-97 Ericsson Telecom AB').
-vsn('$Revision: /main/release/2 $ ').
-export([init/0]).
init() ->
S = gs:start(),
gs:create(window,win1,S,[{width,200},{height,100}]),
gs:create(button,b1,win1,[{label, {text,"Press Me"}}]),
gs:config(win1, {map,true}),
loop().
loop() ->
receive
{gs, b1, click, Data, Args} ->
io:format("Hello World!~n",[]),
loop()
end.
<|start_filename|>lib/dialyzer/test/opaque_SUITE_data/src/dict/dict_use.erl<|end_filename|>
-module(dict_use).
-export([ok1/0, ok2/0, ok3/0, ok4/0, ok5/0, ok6/0]).
-export([middle/0]).
-export([w1/0, w2/0, w3/0, w4/1, w5/0, w6/0, w7/0, w8/1, w9/0]).
-define(DICT, dict).
%%---------------------------------------------------------------------
%% Cases that are OK
%%---------------------------------------------------------------------
ok1() ->
dict:new().
ok2() ->
case dict:new() of X -> X end.
ok3() ->
Dict1 = dict:new(),
Dict2 = dict:new(),
Dict1 =:= Dict2.
ok4() ->
dict:fetch(foo, dict:new()).
ok5() -> % this is OK since some_mod:new/0 might be returning a dict:dict()
dict:fetch(foo, some_mod:new()).
ok6() ->
dict:store(42, elli, dict:new()).
middle() ->
{w1(), w2()}.
%%---------------------------------------------------------------------
%% Cases that are problematic w.r.t. opaqueness of types
%%---------------------------------------------------------------------
w1() ->
gazonk = dict:new().
w2() ->
case dict:new() of
[] -> nil;
42 -> weird
end.
w3() ->
try dict:new() of
[] -> nil;
42 -> weird
catch
_:_ -> exception
end.
w4(Dict) when is_list(Dict) ->
Dict =:= dict:new();
w4(Dict) when is_atom(Dict) ->
Dict =/= dict:new().
w5() ->
case dict:new() of
D when length(D) =/= 42 -> weird;
D when is_atom(D) -> weirder;
D when is_list(D) -> gazonk
end.
w6() ->
is_list(dict:new()).
w7() ->
dict:fetch(foo, [1,2,3]).
w8(Fun) ->
dict:merge(Fun, 42, [1,2]).
w9() ->
dict:store(42, elli,
{dict,0,16,16,8,80,48,
{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},
{{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}}}).
<|start_filename|>lib/diameter/test/modules.mk<|end_filename|>
# %CopyrightBegin%
#
# Copyright Ericsson AB 2010-2015. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# %CopyrightEnd%
TEST_SPEC_FILE = diameter.spec
COVER_SPEC_FILE = diameter.cover
MODULES = \
diameter_ct \
diameter_enum \
diameter_util \
diameter_3xxx_SUITE \
diameter_app_SUITE \
diameter_capx_SUITE \
diameter_codec_SUITE \
diameter_codec_test \
diameter_config_SUITE \
diameter_compiler_SUITE \
diameter_dict_SUITE \
diameter_distribution_SUITE \
diameter_dpr_SUITE \
diameter_event_SUITE \
diameter_examples_SUITE \
diameter_failover_SUITE \
diameter_gen_sctp_SUITE \
diameter_gen_tcp_SUITE \
diameter_length_SUITE \
diameter_pool_SUITE \
diameter_reg_SUITE \
diameter_relay_SUITE \
diameter_stats_SUITE \
diameter_sync_SUITE \
diameter_tls_SUITE \
diameter_traffic_SUITE \
diameter_transport_SUITE \
diameter_watchdog_SUITE
HRL_FILES = \
diameter_ct.hrl
DATA = \
diameter_codec_SUITE_data/avps.dia \
diameter_codec_SUITE_data/send.dia \
diameter_codec_SUITE_data/recv.dia \
diameter_codec_SUITE_data/diameter_test_unknown.erl
<|start_filename|>lib/dialyzer/test/opaque_SUITE_data/src/simple/exact_api.erl<|end_filename|>
-module(exact_api).
-export([new/0, exact_api_test/1, exact_api_new/1,
exact_adt_test/1, exact_adt_new/1]).
-export_type([exact_api/0]).
-record(digraph, {vtab = notable :: ets:tab(),
etab = notable :: ets:tab(),
ntab = notable :: ets:tab(),
cyclic = true :: boolean()}).
-spec new() -> digraph:graph().
new() ->
A = #digraph{},
set_type(A), % does not have an opaque term as 1st argument
A.
-spec set_type(digraph:graph()) -> true.
set_type(G) ->
digraph:delete(G).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%% The derived spec of exact_api_new() is
%%% -spec exact_api_new(exact_api:exact_api()) -> exact_api:exact_api().
%%% This won't happen unless dialyzer_typesig uses
%%% t_is_exactly_equal() rather than t_is_equal().
%%% [As of R17B the latter considers two types equal if nothing but
%%% their ?opaque tags differ.]
-record(exact_api, {}).
-opaque exact_api() :: #exact_api{}.
exact_api_test(X) ->
#exact_api{} = exact_api_set_type(X). % OK
exact_api_new(A) ->
A = #exact_api{},
_ = exact_api_set_type(A), % OK (the opaque type is local)
A.
-spec exact_api_set_type(exact_api()) -> exact_api().
exact_api_set_type(#exact_api{}=E) -> E.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-record(exact_adt, {}).
exact_adt_test(X) ->
#exact_adt{} = exact_adt:exact_adt_set_type(X). % breaks the opaqueness
exact_adt_new(A) ->
A = #exact_adt{},
_ = exact_adt:exact_adt_set_type2(A), % does not have an opaque term as 1st argument
A.
<|start_filename|>lib/erl_interface/src/decode/decode_tuple_header.c<|end_filename|>
/*
* %CopyrightBegin%
*
* Copyright Ericsson AB 1998-2014. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* %CopyrightEnd%
*/
#include "eidef.h"
#include "eiext.h"
#include "putget.h"
int ei_decode_tuple_header(const char *buf, int *index, int *arity)
{
const char *s = buf + *index;
const char *s0 = s;
int i;
switch ((i=get8(s))) {
case ERL_SMALL_TUPLE_EXT:
if (arity) *arity = get8(s);
else s++;
break;
case ERL_LARGE_TUPLE_EXT:
if (arity) *arity = get32be(s);
else s += 4;
break;
default:
return -1;
}
*index += s-s0;
return 0;
}
int ei_decode_map_header(const char *buf, int *index, int *arity)
{
const char *s = buf + *index;
const char *s0 = s;
int i;
switch ((i=get8(s))) {
case ERL_MAP_EXT:
if (arity) *arity = get32be(s);
else s += 4;
break;
default:
return -1;
}
*index += s-s0;
return 0;
}
<|start_filename|>lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_parse.erl<|end_filename|>
%% ``Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
%% AB. All Rights Reserved.''
%%
%% $Id: httpd_parse.erl,v 1.1 2008/12/17 09:53:34 mikpe Exp $
%%
-module(httpd_parse).
-export([
request_header/1,
hsplit/2,
get_request_record/10,
split_lines/1,
tagup_header/1]).
-include("httpd.hrl").
%%----------------------------------------------------------------------
%% request_header
%%
%% Input: The request as sent from the client (list of characters)
%% (may include part of the entity body)
%%
%% Returns:
%% {ok, Info#mod}
%% {not_implemented,Info#mod}
%% {bad_request, Reason}
%%----------------------------------------------------------------------
request_header(Header)->
[RequestLine|HeaderFields] = split_lines(Header),
?DEBUG("request ->"
"~n RequestLine: ~p"
"~n Header: ~p",[RequestLine,Header]),
ParsedHeader = tagup_header(HeaderFields),
?DEBUG("request ->"
"~n ParseHeader: ~p",[ParsedHeader]),
case verify_request(string:tokens(RequestLine," ")) of
["HEAD", RequestURI, [$H,$T,$T,$P,$/,$1,$.,N]] ->
{ok, ["HEAD", RequestURI, [$H,$T,$T,$P,$/,$1,$.,N], RequestLine,
ParsedHeader]};
["GET", RequestURI, "HTTP/0.9"] ->
{ok, ["GET", RequestURI, "HTTP/0.9", RequestLine, ParsedHeader]};
["GET", RequestURI, [$H,$T,$T,$P,$/,$1,$.,N]] ->
{ok, ["GET", RequestURI, [$H,$T,$T,$P,$/,$1,$.,N], RequestLine,
ParsedHeader]};
["POST", RequestURI, [$H,$T,$T,$P,$/,$1,$.,N]] ->
{ok, ["POST", RequestURI, [$H,$T,$T,$P,$/,$1,$.,N], RequestLine,
ParsedHeader]};
%%HTTP must be 1.1 or higher
["TRACE", RequestURI, [$H,$T,$T,$P,$/,$1,$.,N]] when N>48->
{ok, ["TRACE", RequestURI, [$H,$T,$T,$P,$/,$1,$.,N], RequestLine,
ParsedHeader]};
[Method, RequestURI] ->
{not_implemented, RequestLine, Method, RequestURI,ParsedHeader,"HTTP/0.9"};
[Method, RequestURI, HTTPVersion] ->
{not_implemented, RequestLine, Method, RequestURI,ParsedHeader, HTTPVersion};
{bad_request, Reason} ->
{bad_request, Reason};
Reason ->
{bad_request, "Unknown request method"}
end.
%%----------------------------------------------------------------------
%% The request is passed through the server as a record of type mod get it
%% ----------------------------------------------------------------------
get_request_record(Socket,SocketType,ConfigDB,Method,RequestURI,
HTTPVersion,RequestLine,ParsedHeader,EntityBody,InitData)->
PersistentConn=get_persistens(HTTPVersion,ParsedHeader,ConfigDB),
Info=#mod{init_data=InitData,
data=[],
socket_type=SocketType,
socket=Socket,
config_db=ConfigDB,
method=Method,
absolute_uri=formatAbsoluteURI(RequestURI,ParsedHeader),
request_uri=formatRequestUri(RequestURI),
http_version=HTTPVersion,
request_line=RequestLine,
parsed_header=ParsedHeader,
entity_body=maybe_remove_nl(ParsedHeader,EntityBody),
connection=PersistentConn},
{ok,Info}.
%%----------------------------------------------------------------------
%% Conmtrol wheater we shall maintain a persistent connection or not
%%----------------------------------------------------------------------
get_persistens(HTTPVersion,ParsedHeader,ConfigDB)->
case httpd_util:lookup(ConfigDB,persistent_conn,true) of
true->
case HTTPVersion of
%%If it is version prio to 1.1 kill the conneciton
[$H, $T, $T, $P, $\/, $1, $.,N] ->
case httpd_util:key1search(ParsedHeader,"connection","keep-alive")of
%%if the connection isnt ordered to go down let it live
%%The keep-alive value is the older http/1.1 might be older
%%Clients that use it.
"keep-alive" when N >= 49 ->
?DEBUG("CONNECTION MODE: ~p",[true]),
true;
"close" ->
?DEBUG("CONNECTION MODE: ~p",[false]),
false;
Connect ->
?DEBUG("CONNECTION MODE: ~p VALUE: ~p",[false,Connect]),
false
end;
_ ->
?DEBUG("CONNECTION MODE: ~p VERSION: ~p",[false,HTTPVersion]),
false
end;
_ ->
false
end.
%%----------------------------------------------------------------------
%% Control whether the last newline of the body is a part of the message or
%%it is a part of the multipart message.
%%----------------------------------------------------------------------
maybe_remove_nl(Header,Rest) ->
case find_content_type(Header) of
false ->
{ok,EntityBody,_}=regexp:sub(Rest,"\r\n\$",""),
EntityBody;
{ok, Value} ->
case string:str(Value, "multipart/form-data") of
0 ->
{ok,EntityBody,_}=regexp:sub(Rest,"\r\n\$",""),
EntityBody;
_ ->
Rest
end
end.
%%----------------------------------------------------------------------
%% Cet the content type of the incomming request
%%----------------------------------------------------------------------
find_content_type([]) ->
false;
find_content_type([{Name,Value}|Tail]) ->
case httpd_util:to_lower(Name) of
"content-type" ->
{ok, Value};
_ ->
find_content_type(Tail)
end.
%%----------------------------------------------------------------------
%% Split the header to a list of strings where each string represents a
%% HTTP header-field
%%----------------------------------------------------------------------
split_lines(Request) ->
split_lines(Request, [], []).
split_lines([], CAcc, Acc) ->
lists:reverse([lists:reverse(CAcc)|Acc]);
%%White space in the header fields are allowed but the new line must begin with LWS se
%%rfc2616 chap 4.2. The rfc do not say what to
split_lines([$\r, $\n, $\t |Rest], CAcc, Acc) ->
split_lines(Rest, [$\r, $\n |CAcc], Acc);
split_lines([$\r, $\n, $\s |Rest], CAcc, Acc) ->
split_lines(Rest, [$\r, $\n |CAcc], Acc);
split_lines([$\r, $\n|Rest], CAcc, Acc) ->
split_lines(Rest, [], [lists:reverse(CAcc)|Acc]);
split_lines([Chr|Rest], CAcc, Acc) ->
split_lines(Rest, [Chr|CAcc], Acc).
%%----------------------------------------------------------------------
%% This is a 'hack' to stop people from trying to access directories/files
%% relative to the ServerRoot.
%%----------------------------------------------------------------------
verify_request([Request, RequestURI]) ->
verify_request([Request, RequestURI, "HTTP/0.9"]);
verify_request([Request, RequestURI, Protocol]) ->
NewRequestURI =
case string:str(RequestURI, "?") of
0 ->
RequestURI;
Ndx ->
string:left(RequestURI, Ndx)
end,
case string:str(NewRequestURI, "..") of
0 ->
[Request, RequestURI, Protocol];
_ ->
{bad_request, {forbidden, RequestURI}}
end;
verify_request(Request) ->
Request.
%%----------------------------------------------------------------------
%% tagup_header
%%
%% Parses the header of a HTTP request and returns a key,value tuple
%% list containing Name and Value of each header directive as of:
%%
%% Content-Type: multipart/mixed -> {"Content-Type", "multipart/mixed"}
%%
%% But in http/1.1 the field-names are case insencitive so now it must be
%% Content-Type: multipart/mixed -> {"content-type", "multipart/mixed"}
%% The standard furthermore says that leading and traling white space
%% is not a part of the fieldvalue and shall therefore be removed.
%%----------------------------------------------------------------------
tagup_header([]) -> [];
tagup_header([Line|Rest]) -> [tag(Line, [])|tagup_header(Rest)].
tag([], Tag) ->
{httpd_util:to_lower(lists:reverse(Tag)), ""};
tag([$:|Rest], Tag) ->
{httpd_util:to_lower(lists:reverse(Tag)), httpd_util:strip(Rest)};
tag([Chr|Rest], Tag) ->
tag(Rest, [Chr|Tag]).
%%----------------------------------------------------------------------
%% There are 3 possible forms of the reuqest URI
%%
%% 1. * When the request is not for a special assset. is is instead
%% to the server itself
%%
%% 2. absoluteURI the whole servername port and asset is in the request
%%
%% 3. The most common form that http/1.0 used abs path that is a path
%% to the requested asset.
%5----------------------------------------------------------------------
formatRequestUri("*")->
"*";
formatRequestUri([$h,$t,$t,$p,$:,$\/,$\/|ServerAndPath]) ->
removeServer(ServerAndPath);
formatRequestUri([$H,$T,$T,$P,$:,$\/,$\/|ServerAndPath]) ->
removeServer(ServerAndPath);
formatRequestUri(ABSPath) ->
ABSPath.
removeServer([$\/|Url])->
case Url of
[]->
"/";
_->
[$\/|Url]
end;
removeServer([N|Url]) ->
removeServer(Url).
formatAbsoluteURI([$h,$t,$t,$p,$:,$\/,$\/|Uri],ParsedHeader)->
[$H,$T,$T,$P,$:,$\/,$\/|Uri];
formatAbsoluteURI([$H,$T,$T,$P,$:,$\/,$\/|Uri],ParsedHeader)->
[$H,$T,$T,$P,$:,$\/,$\/|Uri];
formatAbsoluteURI(Uri,ParsedHeader)->
case httpd_util:key1search(ParsedHeader,"host") of
undefined ->
nohost;
Host ->
Host++Uri
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%Code below is crap from an older version shall be removed when
%%transformation to http/1.1 is finished
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%request(Request) ->
% ?DEBUG("request -> entry with:"
% "~n Request: ~s",[Request]),
% {BeforeEntityBody, Rest} = hsplit([], Request),
% ?DEBUG("request ->"
% "~n BeforeEntityBody: ~p"
% "~n Rest: ~p",[BeforeEntityBody, Rest]),
% [RequestLine|Header] = split_lines(BeforeEntityBody),
% ?DEBUG("request ->"
% "~n RequestLine: ~p"
% "~n Header: ~p",[RequestLine,Header]),
% ParsedHeader = tagup_header(Header),
% ?DEBUG("request ->"
% "~n ParseHeader: ~p",[ParsedHeader]),
% EntityBody = maybe_remove_nl(ParsedHeader,Rest),
% ?DEBUG("request ->"
% "~n EntityBody: ~p",[EntityBody]),
% case verify_request(string:tokens(RequestLine," ")) of
% ["HEAD", RequestURI, [$H,$T,$T,$P,$/,$1,$.,N]] ->
% {ok, ["HEAD", formatRequestUri(RequestURI), [$H,$T,$T,$P,$/,$1,$.,N], RequestLine,
% ParsedHeader, EntityBody]};
% ["GET", RequestURI, "HTTP/0.9"] ->
% {ok, ["GET", RequestURI, "HTTP/0.9", RequestLine, ParsedHeader,
% EntityBody]};
% ["GET", RequestURI, [$H,$T,$T,$P,$/,$1,$.,N]] ->
% {ok, ["GET", formatRequestUri(RequestURI), [$H,$T,$T,$P,$/,$1,$.,N], RequestLine,
% ParsedHeader,EntityBody]};
%% ["POST", RequestURI, [$H,$T,$T,$P,$/,$1,$.,N]] ->
% {ok, ["POST", formatRequestUri(RequestURI), [$H,$T,$T,$P,$/,$1,$.,N], RequestLine,
% ParsedHeader, EntityBody]};
% [Method, RequestURI] ->
% {not_implemented, RequestLine, Method, RequestURI,ParsedHeader,"HTTP/0.9"};
% [Method, RequestURI, HTTPVersion] ->
% {not_implemented, RequestLine, Method, RequestURI,ParsedHeader, HTTPVersion};
% {bad_request, Reason} ->
% {bad_request, Reason};
% Reason ->
% {bad_request, "Unknown request method"}
% end.
hsplit(Accu,[]) ->
{lists:reverse(Accu), []};
hsplit(Accu, [ $\r, $\n, $\r, $\n | Tail]) ->
{lists:reverse(Accu), Tail};
hsplit(Accu, [H|T]) ->
hsplit([H|Accu],T).
<|start_filename|>lib/dialyzer/test/small_SUITE_data/src/tuple1.erl<|end_filename|>
%%%-------------------------------------------------------------------
%%% File : tuple1.erl
%%% Author : <NAME> <<EMAIL>>
%%% Description : Exposed two bugs in the analysis;
%%% one supressed warning and one crash.
%%%
%%% Created : 13 Nov 2006 by <NAME> <<EMAIL>>
%%%-------------------------------------------------------------------
-module(tuple1).
-export([t1/2, t2/2, t3/2, bar/2]).
t1(List = [_|_], X) ->
lists:mapfoldl(fun foo/2, X, List).
t2(List = [_|_], X) ->
lists:mapfoldl(fun bar/2, X, List).
t3(List = [_|_], X) ->
lists:mapfoldl(fun baz/1, X, List).
foo(1, 1) -> a;
foo(a, 1) -> b.
bar(1, 1) -> {b, b};
bar(a, 1) -> {a, a}.
baz(1) -> 1.
<|start_filename|>erts/emulator/test/nif_SUITE_data/testcase_driver.h<|end_filename|>
/* ``Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* The Initial Developer of the Original Code is Ericsson Utvecklings AB.
* Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
* AB. All Rights Reserved.''
*
* $Id$
*/
#ifndef TESTCASE_DRIVER_H__
#define TESTCASE_DRIVER_H__
#include "erl_nif.h"
#include <stdlib.h>
#include <stdio.h>
typedef struct {
char *testcase_name;
char *command;
int command_len;
void *extra;
} TestCaseState_t;
#define ASSERT_CLNUP(TCS, B, CLN) \
do { \
if (!(B)) { \
CLN; \
testcase_assertion_failed((TCS), __FILE__, __LINE__, #B); \
} \
} while (0)
#define ASSERT(TCS, B) ASSERT_CLNUP(TCS, B, (void) 0)
void testcase_printf(TestCaseState_t *tcs, char *frmt, ...);
void testcase_succeeded(TestCaseState_t *tcs, char *frmt, ...);
void testcase_skipped(TestCaseState_t *tcs, char *frmt, ...);
void testcase_failed(TestCaseState_t *tcs, char *frmt, ...);
int testcase_assertion_failed(TestCaseState_t *tcs, char *file, int line,
char *assertion);
void *testcase_alloc(size_t size);
void *testcase_realloc(void *ptr, size_t size);
void testcase_free(void *ptr);
char *testcase_name(void);
void testcase_run(TestCaseState_t *tcs);
void testcase_cleanup(TestCaseState_t *tcs);
#endif
<|start_filename|>lib/hipe/test/bs_SUITE_data/bs_pmatch_bugs.erl<|end_filename|>
%% -*- erlang-indent-level: 2 -*-
%%--------------------------------------------------------------------
-module(bs_pmatch_bugs).
-export([test/0]).
test() ->
Bin = <<"123.123">>,
<<49,50,51>> = lex_digits1(Bin, 1, []),
<<49,50,51>> = lex_digits2(Bin, 1, []),
ok = var_bind_bug(<<1, 2, 3, 4, 5, 6, 7, 8>>),
ok.
%%--------------------------------------------------------------------
%% One of the lex_digits functions below gave incorrect results due to
%% incorrect pattern matching compilation of binaries by the byte code
%% compiler. Fixed by <NAME> on 5/3/2003.
%% --------------------------------------------------------------------
lex_digits1(<<$., Rest/binary>>, _Val, _Acc) ->
Rest;
lex_digits1(<<N, Rest/binary>>, Val, Acc) when N >= $0, N =< $9 ->
lex_digits1(Rest, Val * 10 + dec(N), Acc);
lex_digits1(_Other, _Val, _Acc) ->
not_ok.
lex_digits2(<<N, Rest/binary>>,Val, Acc) when N >= $0, N =< $9 ->
lex_digits2(Rest, Val * 10 + dec(N), Acc);
lex_digits2(<<$., Rest/binary>>, _Val, _Acc) ->
Rest;
lex_digits2(_Other, _Val, _Acc) ->
not_ok.
dec(A) ->
A - $0.
%%--------------------------------------------------------------------
%% From: <NAME>
%% Date: 11/3/2011
%%
%% I've just run into an interesting little bit of behaviour that
%% doesn't seem quite right. erlc gives me the warning
%%
%% 43: Warning: this clause cannot match because a previous
%% clause at line 42 always matches
%% (line 42 is the "B -> wrong;" line).
%%
%% And sure enough, if you run test/0 you get 'wrong' back.
%%
%% That, in itself, is curious to me since by my understanding B should
%% be bound by the function header, and have no guarantee of being the
%% same as A. I can't see how it could be unbound.
%%
%% Doubly curious, is that if I stop using B as the size specifier of C,
%% like this:
%%
%% match(<<A:1/binary, B:8/integer, _C:1/binary, _Rest/binary>>) ->
%%
%% the warning goes away. And the result becomes 'ok' (in spite of
%% nothing in the body having changed, and the only thing changing in
%% the header being the size of an unused variable at the tail of the
%% binary).
%%--------------------------------------------------------------------
var_bind_bug(<<A:1/binary, B:8/integer, _C:B/binary, _Rest/binary>>) ->
case A of
B -> wrong;
_ -> ok
end.
<|start_filename|>lib/hipe/llvm/hipe_llvm_merge.erl<|end_filename|>
%%% -*- erlang-indent-level: 2 -*-
-module(hipe_llvm_merge).
-export([finalize/3]).
-include("hipe_llvm_arch.hrl").
-include("../../kernel/src/hipe_ext_format.hrl").
-include("../rtl/hipe_literals.hrl").
-include("../main/hipe.hrl").
finalize(CompiledCode, Closures, Exports) ->
CompiledCode1 = [CodePack || {_, CodePack} <- CompiledCode],
Code = [{MFA, [], ConstTab}
|| {MFA, _, _ , ConstTab, _, _} <- CompiledCode1],
{ConstAlign, ConstSize, ConstMap, RefsFromConsts} =
hipe_pack_constants:pack_constants(Code, ?ARCH_REGISTERS:alignment()),
%% Compute total code size separately as a sanity check for alignment
CodeSize = compute_code_size(CompiledCode1, 0),
%% io:format("Code Size (pre-computed): ~w~n", [CodeSize]),
{CodeBinary, ExportMap} = merge_mfas(CompiledCode1, 0, <<>>, []),
%% io:format("Code Size (post-computed): ~w~n", [byte_size(CodeBinary)]),
?VERBOSE_ASSERT(CodeSize =:= byte_size(CodeBinary)),
AccRefs = merge_refs(CompiledCode1, ConstMap, 0, []),
%% Bring CompiledCode to a combine_label_maps-acceptable form.
LabelMap = combine_label_maps(CompiledCode1, 0, gb_trees:empty()),
SC = hipe_pack_constants:slim_constmap(ConstMap),
DataRelocs = hipe_pack_constants:mk_data_relocs(RefsFromConsts, LabelMap),
SSE = hipe_pack_constants:slim_sorted_exportmap(ExportMap, Closures, Exports),
SlimRefs = hipe_pack_constants:slim_refs(AccRefs),
term_to_binary([{?VERSION_STRING(),?HIPE_ERTS_CHECKSUM},
ConstAlign, ConstSize,
SC, % ConstMap
DataRelocs, % LabelMap
SSE, % ExportMap
CodeSize, CodeBinary, SlimRefs,
0,[] % ColdCodeSize, SlimColdRefs
]).
%% Copied from hipe_x86_assemble.erl
nr_pad_bytes(Address) ->
(4 - (Address rem 4)) rem 4. % XXX: 16 or 32 instead?
align_entry(Address) ->
Address + nr_pad_bytes(Address).
compute_code_size([{_MFA, _BinaryCode, CodeSize, _, _, _}|Code], Size) ->
compute_code_size(Code, align_entry(Size+CodeSize));
compute_code_size([], Size) -> Size.
combine_label_maps([{MFA, _, CodeSize, _, _, LabelMap}|Code], Address, CLM) ->
NewCLM = merge_label_map(gb_trees:to_list(LabelMap), MFA, Address, CLM),
combine_label_maps(Code, align_entry(Address+CodeSize), NewCLM);
combine_label_maps([], _Address, CLM) -> CLM.
merge_label_map([{Label,Offset}|Rest], MFA, Address, CLM) ->
NewCLM = gb_trees:insert({MFA,Label}, Address+Offset, CLM),
merge_label_map(Rest, MFA, Address, NewCLM);
merge_label_map([], _MFA, _Address, CLM) -> CLM.
%% @doc Merge the MFAs' binary code to one continuous binary and compute the
%% size of this binary. At the same time create an exportmap in a form
%% of {Address, M, F, A}.
%% XXX: Is alignment correct/optimal for X86/AMD64?
merge_mfas([{{M,F,A}, CodeBinary, CodeSize, _, _, _}|Code],
Address, AccCode, AccExportMap) ->
?VERBOSE_ASSERT(CodeSize =:= byte_size(CodeBinary)),
{Address1, Code1} =
case nr_pad_bytes(Address + CodeSize) of
0 -> %% Retains alignment:
{Address + CodeSize, CodeBinary};
NrPadBytes -> %% Needs padding!
Padding = list_to_binary(lists:duplicate(NrPadBytes, 0)),
{Address + CodeSize + NrPadBytes, % =:= align_entry(Address+CodeSize)
<<CodeBinary/binary, Padding/binary>>}
end,
?VERBOSE_ASSERT(Address1 =:=
align_entry(Address + CodeSize)), %XXX: Should address be aligned?
AccCode1 = <<AccCode/binary, Code1/binary>>,
merge_mfas(Code, Address1, AccCode1, [{Address, M, F, A}|AccExportMap]);
merge_mfas([], _Address, AccCode, AccExportMap) ->
{AccCode, AccExportMap}.
%% @doc Merge the references of relocatable symbols in the binary code. The
%% offsets must be updated because of the merging of the code binaries!
merge_refs([], _ConstMap, _Addr, AccRefs) -> AccRefs;
merge_refs([{MFA, _, CodeSize, _, Refs, _}|Rest], ConstMap, Address, AccRefs) ->
%% Important!: The hipe_pack_constants:pack_constants/2 function assignes
%% unique numbers to constants (ConstNo). This numbers are used from now on,
%% instead of labels that were used before. So, in order to be compatible, we
%% must change all the constant labels in the Refs to the corresponding
%% ConstNo, that can be found in the ConstMap (#pcm_entry{}).
UpdatedRefs = [update_ref(label_to_constno(Ref, MFA, ConstMap), Address)
|| Ref <- Refs],
merge_refs(Rest, ConstMap, align_entry(Address+CodeSize),
UpdatedRefs++AccRefs).
label_to_constno({Type, Offset, {constant, Label}}, MFA, ConstMap) ->
ConstNo = hipe_pack_constants:find_const({MFA, Label}, ConstMap),
{Type, Offset, {constant, ConstNo}};
label_to_constno(Other, _MFA, _ConstMap) ->
Other.
%% @doc Update offset to a reference. In case of stack descriptors we must check
%% if there exists an exception handler, because it must also be updated.
update_ref({?SDESC, Offset, SDesc}, CodeAddr) ->
NewRefAddr = Offset+CodeAddr,
case SDesc of
{[], _, _, _} -> % No handler; only update offset
{?SDESC, NewRefAddr, SDesc};
{ExnHandler, FrameSize, StackArity, Roots} -> % Update exception handler
{?SDESC, NewRefAddr, {ExnHandler+CodeAddr, FrameSize, StackArity, Roots}}
end;
update_ref({Type, Offset, Term}, CodeAddr) ->
{Type, Offset+CodeAddr, Term}.
<|start_filename|>erts/emulator/test/map_SUITE_data/badmap_17.erl<|end_filename|>
-module(badmap_17).
-export([update/1]).
%% Compile this source file with OTP 17.
update(Map) ->
try
update_1(Map),
error(update_did_not_fail)
catch
error:{badmap,Map} ->
ok
end,
try
update_2(Map),
error(update_did_not_fail)
catch
error:{badmap,Map} ->
ok
end.
update_1(M) ->
M#{a=>42}.
update_2(M) ->
M#{a:=42}.
<|start_filename|>lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per_bin.erl<|end_filename|>
%% ``Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% http://www.apache.org/licenses/LICENSE-2.0
%%
%% Unless required by applicable law or agreed to in writing, software
%% distributed under the License is distributed on an "AS IS" BASIS,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
%% AB. All Rights Reserved.''
%%
%% $Id: asn1rt_per_bin.erl,v 1.1 2008/12/17 09:53:31 mikpe Exp $
-module(asn1rt_per_bin).
%% encoding / decoding of PER aligned
-include("asn1_records.hrl").
-export([dec_fixup/3, cindex/3, list_to_record/2]).
-export([setchoiceext/1, setext/1, fixoptionals/2, fixoptionals/3,
fixextensions/2,
getext/1, getextension/2, skipextensions/3, getbit/1, getchoice/3 ]).
-export([getoptionals/2, getoptionals2/2, set_choice/3, encode_integer/2, encode_integer/3 ]).
-export([decode_integer/2, decode_integer/3, encode_small_number/1, encode_boolean/1,
decode_boolean/1, encode_length/2, decode_length/1, decode_length/2,
encode_small_length/1, decode_small_length/1,
decode_compact_bit_string/3]).
-export([decode_enumerated/3,
encode_bit_string/3, decode_bit_string/3 ]).
-export([encode_octet_string/2, decode_octet_string/2,
encode_null/1, decode_null/1,
encode_object_identifier/1, decode_object_identifier/1,
complete/1]).
-export([encode_open_type/2, decode_open_type/2]).
-export([encode_UniversalString/2, decode_UniversalString/2,
encode_PrintableString/2, decode_PrintableString/2,
encode_GeneralString/2, decode_GeneralString/2,
encode_GraphicString/2, decode_GraphicString/2,
encode_TeletexString/2, decode_TeletexString/2,
encode_VideotexString/2, decode_VideotexString/2,
encode_VisibleString/2, decode_VisibleString/2,
encode_BMPString/2, decode_BMPString/2,
encode_IA5String/2, decode_IA5String/2,
encode_NumericString/2, decode_NumericString/2,
encode_ObjectDescriptor/2, decode_ObjectDescriptor/1
]).
-export([complete_bytes/1]).
-define('16K',16384).
-define('32K',32768).
-define('64K',65536).
dec_fixup(Terms,Cnames,RemBytes) ->
dec_fixup(Terms,Cnames,RemBytes,[]).
dec_fixup([novalue|T],[_Hc|Tc],RemBytes,Acc) ->
dec_fixup(T,Tc,RemBytes,Acc);
dec_fixup([{_Name,novalue}|T],[_Hc|Tc],RemBytes,Acc) ->
dec_fixup(T,Tc,RemBytes,Acc);
dec_fixup([H|T],[Hc|Tc],RemBytes,Acc) ->
dec_fixup(T,Tc,RemBytes,[{Hc,H}|Acc]);
dec_fixup([],_Cnames,RemBytes,Acc) ->
{lists:reverse(Acc),RemBytes}.
cindex(Ix,Val,Cname) ->
case element(Ix,Val) of
{Cname,Val2} -> Val2;
X -> X
end.
%% converts a list to a record if necessary
list_to_record(_Name,Tuple) when tuple(Tuple) ->
Tuple;
list_to_record(Name,List) when list(List) ->
list_to_tuple([Name|List]).
%%--------------------------------------------------------
%% setchoiceext(InRootSet) -> [{bit,X}]
%% X is set to 1 when InRootSet==false
%% X is set to 0 when InRootSet==true
%%
setchoiceext(true) ->
[{debug,choiceext},{bits,1,0}];
setchoiceext(false) ->
[{debug,choiceext},{bits,1,1}].
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% setext(true|false) -> CompleteList
%%
setext(false) ->
[{debug,ext},{bits,1,0}];
setext(true) ->
[{debug,ext},{bits,1,1}].
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% This version of fixoptionals/2 are left only because of
%% backward compatibility with older generates
fixoptionals(OptList,Val) when tuple(Val) ->
fixoptionals1(OptList,Val,[]);
fixoptionals(OptList,Val) when list(Val) ->
fixoptionals1(OptList,Val,1,[],[]).
fixoptionals1([],Val,Acc) ->
%% return {Val,Opt}
{Val,lists:reverse(Acc)};
fixoptionals1([{_,Pos}|Ot],Val,Acc) ->
case element(Pos+1,Val) of
asn1_NOVALUE -> fixoptionals1(Ot,Val,[0|Acc]);
asn1_DEFAULT -> fixoptionals1(Ot,Val,[0|Acc]);
_ -> fixoptionals1(Ot,Val,[1|Acc])
end.
fixoptionals1([{Name,Pos}|Ot],[{Name,Val}|Vt],_Opt,Acc1,Acc2) ->
fixoptionals1(Ot,Vt,Pos+1,[1|Acc1],[{Name,Val}|Acc2]);
fixoptionals1([{_Name,Pos}|Ot],V,Pos,Acc1,Acc2) ->
fixoptionals1(Ot,V,Pos+1,[0|Acc1],[asn1_NOVALUE|Acc2]);
fixoptionals1(O,[Vh|Vt],Pos,Acc1,Acc2) ->
fixoptionals1(O,Vt,Pos+1,Acc1,[Vh|Acc2]);
fixoptionals1([],[Vh|Vt],Pos,Acc1,Acc2) ->
fixoptionals1([],Vt,Pos+1,Acc1,[Vh|Acc2]);
fixoptionals1([],[],_,Acc1,Acc2) ->
% return {Val,Opt}
{list_to_tuple([asn1_RECORDNAME|lists:reverse(Acc2)]),lists:reverse(Acc1)}.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% This is the new fixoptionals/3 which is used by the new generates
%%
fixoptionals(OptList,OptLength,Val) when tuple(Val) ->
Bits = fixoptionals(OptList,Val,0),
{Val,{bits,OptLength,Bits}};
fixoptionals([],_Val,Acc) ->
%% Optbits
Acc;
fixoptionals([Pos|Ot],Val,Acc) ->
case element(Pos,Val) of
asn1_NOVALUE -> fixoptionals(Ot,Val,Acc bsl 1);
asn1_DEFAULT -> fixoptionals(Ot,Val,Acc bsl 1);
_ -> fixoptionals(Ot,Val,(Acc bsl 1) + 1)
end.
getext(Bytes) when tuple(Bytes) ->
getbit(Bytes);
getext(Bytes) when binary(Bytes) ->
getbit({0,Bytes});
getext(Bytes) when list(Bytes) ->
getbit({0,Bytes}).
getextension(0, Bytes) ->
{{},Bytes};
getextension(1, Bytes) ->
{Len,Bytes2} = decode_small_length(Bytes),
{Blist, Bytes3} = getbits_as_list(Len,Bytes2),
{list_to_tuple(Blist),Bytes3}.
fixextensions({ext,ExtPos,ExtNum},Val) ->
case fixextensions(ExtPos,ExtNum+ExtPos,Val,0) of
0 -> [];
ExtBits ->
[encode_small_length(ExtNum),{bits,ExtNum,ExtBits}]
end.
fixextensions(Pos,MaxPos,_,Acc) when Pos >= MaxPos ->
Acc;
fixextensions(Pos,ExtPos,Val,Acc) ->
Bit = case catch(element(Pos+1,Val)) of
asn1_NOVALUE ->
0;
asn1_NOEXTVALUE ->
0;
{'EXIT',_} ->
0;
_ ->
1
end,
fixextensions(Pos+1,ExtPos,Val,(Acc bsl 1)+Bit).
skipextensions(Bytes,Nr,ExtensionBitPattern) ->
case (catch element(Nr,ExtensionBitPattern)) of
1 ->
{_,Bytes2} = decode_open_type(Bytes,[]),
skipextensions(Bytes2, Nr+1, ExtensionBitPattern);
0 ->
skipextensions(Bytes, Nr+1, ExtensionBitPattern);
{'EXIT',_} -> % badarg, no more extensions
Bytes
end.
getchoice(Bytes,1,0) -> % only 1 alternative is not encoded
{0,Bytes};
getchoice(Bytes,_,1) ->
decode_small_number(Bytes);
getchoice(Bytes,NumChoices,0) ->
decode_constrained_number(Bytes,{0,NumChoices-1}).
%% old version kept for backward compatibility with generates from R7B
getoptionals(Bytes,NumOpt) ->
{Blist,Bytes1} = getbits_as_list(NumOpt,Bytes),
{list_to_tuple(Blist),Bytes1}.
%% new version used in generates from r8b_patch/3 and later
getoptionals2(Bytes,NumOpt) ->
getbits(Bytes,NumOpt).
%% getbits_as_binary(Num,Bytes) -> {{Unused,BinBits},RestBytes},
%% Num = integer(),
%% Bytes = list() | tuple(),
%% Unused = integer(),
%% BinBits = binary(),
%% RestBytes = tuple()
getbits_as_binary(Num,Bytes) when binary(Bytes) ->
getbits_as_binary(Num,{0,Bytes});
getbits_as_binary(0,Buffer) ->
{{0,<<>>},Buffer};
getbits_as_binary(Num,{0,Bin}) when Num > 16 ->
Used = Num rem 8,
Pad = (8 - Used) rem 8,
% Nbytes = Num div 8,
<<Bits:Num,_:Pad,RestBin/binary>> = Bin,
{{Pad,<<Bits:Num,0:Pad>>},RestBin};
getbits_as_binary(Num,Buffer={_Used,_Bin}) -> % Unaligned buffer
%% Num =< 16,
{Bits2,Buffer2} = getbits(Buffer,Num),
Pad = (8 - (Num rem 8)) rem 8,
{{Pad,<<Bits2:Num,0:Pad>>},Buffer2}.
% integer_from_list(Int,[],BigInt) ->
% BigInt;
% integer_from_list(Int,[H|T],BigInt) when Int < 8 ->
% (BigInt bsl Int) bor (H bsr (8-Int));
% integer_from_list(Int,[H|T],BigInt) ->
% integer_from_list(Int-8,T,(BigInt bsl 8) bor H).
getbits_as_list(Num,Bytes) when binary(Bytes) ->
getbits_as_list(Num,{0,Bytes},[]);
getbits_as_list(Num,Bytes) ->
getbits_as_list(Num,Bytes,[]).
%% If buffer is empty and nothing more will be picked.
getbits_as_list(0, B, Acc) ->
{lists:reverse(Acc),B};
%% If first byte in buffer is full and at least one byte will be picked,
%% then pick one byte.
getbits_as_list(N,{0,Bin},Acc) when N >= 8 ->
<<B7:1,B6:1,B5:1,B4:1,B3:1,B2:1,B1:1,B0:1,Rest/binary>> = Bin,
getbits_as_list(N-8,{0,Rest},[B0,B1,B2,B3,B4,B5,B6,B7|Acc]);
getbits_as_list(N,{Used,Bin},Acc) when N >= 4, Used =< 4 ->
NewUsed = Used + 4,
Rem = 8 - NewUsed,
<<_:Used,B3:1,B2:1,B1:1,B0:1,_:Rem, Rest/binary>> = Bin,
NewRest = case Rem of 0 -> Rest; _ -> Bin end,
getbits_as_list(N-4,{NewUsed rem 8,NewRest},[B0,B1,B2,B3|Acc]);
getbits_as_list(N,{Used,Bin},Acc) when N >= 2, Used =< 6 ->
NewUsed = Used + 2,
Rem = 8 - NewUsed,
<<_:Used,B1:1,B0:1,_:Rem, Rest/binary>> = Bin,
NewRest = case Rem of 0 -> Rest; _ -> Bin end,
getbits_as_list(N-2,{NewUsed rem 8,NewRest},[B0,B1|Acc]);
getbits_as_list(N,{Used,Bin},Acc) when Used =< 7 ->
NewUsed = Used + 1,
Rem = 8 - NewUsed,
<<_:Used,B0:1,_:Rem, Rest/binary>> = Bin,
NewRest = case Rem of 0 -> Rest; _ -> Bin end,
getbits_as_list(N-1,{NewUsed rem 8,NewRest},[B0|Acc]).
getbit({7,<<_:7,B:1,Rest/binary>>}) ->
{B,{0,Rest}};
getbit({0,Buffer = <<B:1,_:7,_/binary>>}) ->
{B,{1,Buffer}};
getbit({Used,Buffer}) ->
Unused = (8 - Used) - 1,
<<_:Used,B:1,_:Unused,_/binary>> = Buffer,
{B,{Used+1,Buffer}};
getbit(Buffer) when binary(Buffer) ->
getbit({0,Buffer}).
getbits({0,Buffer},Num) when (Num rem 8) == 0 ->
<<Bits:Num,Rest/binary>> = Buffer,
{Bits,{0,Rest}};
getbits({Used,Bin},Num) ->
NumPlusUsed = Num + Used,
NewUsed = NumPlusUsed rem 8,
Unused = (8-NewUsed) rem 8,
case Unused of
0 ->
<<_:Used,Bits:Num,Rest/binary>> = Bin,
{Bits,{0,Rest}};
_ ->
Bytes = NumPlusUsed div 8,
<<_:Used,Bits:Num,_UBits:Unused,_/binary>> = Bin,
<<_:Bytes/binary,Rest/binary>> = Bin,
{Bits,{NewUsed,Rest}}
end;
getbits(Bin,Num) when binary(Bin) ->
getbits({0,Bin},Num).
% getoctet(Bytes) when list(Bytes) ->
% getoctet({0,Bytes});
% getoctet(Bytes) ->
% %% io:format("getoctet:Buffer = ~p~n",[Bytes]),
% getoctet1(Bytes).
% getoctet1({0,[H|T]}) ->
% {H,{0,T}};
% getoctet1({Pos,[_,H|T]}) ->
% {H,{0,T}}.
align({0,L}) ->
{0,L};
align({_Pos,<<_H,T/binary>>}) ->
{0,T};
align(Bytes) ->
{0,Bytes}.
%% First align buffer, then pick the first Num octets.
%% Returns octets as an integer with bit significance as in buffer.
getoctets({0,Buffer},Num) ->
<<Val:Num/integer-unit:8,RestBin/binary>> = Buffer,
{Val,{0,RestBin}};
getoctets({U,<<_Padding,Rest/binary>>},Num) when U /= 0 ->
getoctets({0,Rest},Num);
getoctets(Buffer,Num) when binary(Buffer) ->
getoctets({0,Buffer},Num).
% getoctets(Buffer,Num) ->
% %% io:format("getoctets:Buffer = ~p~nNum = ~p~n",[Buffer,Num]),
% getoctets(Buffer,Num,0).
% getoctets(Buffer,0,Acc) ->
% {Acc,Buffer};
% getoctets(Buffer,Num,Acc) ->
% {Oct,NewBuffer} = getoctet(Buffer),
% getoctets(NewBuffer,Num-1,(Acc bsl 8)+Oct).
% getoctets_as_list(Buffer,Num) ->
% getoctets_as_list(Buffer,Num,[]).
% getoctets_as_list(Buffer,0,Acc) ->
% {lists:reverse(Acc),Buffer};
% getoctets_as_list(Buffer,Num,Acc) ->
% {Oct,NewBuffer} = getoctet(Buffer),
% getoctets_as_list(NewBuffer,Num-1,[Oct|Acc]).
%% First align buffer, then pick the first Num octets.
%% Returns octets as a binary
getoctets_as_bin({0,Bin},Num)->
<<Octets:Num/binary,RestBin/binary>> = Bin,
{Octets,{0,RestBin}};
getoctets_as_bin({_U,Bin},Num) ->
<<_Padding,Octets:Num/binary,RestBin/binary>> = Bin,
{Octets,{0,RestBin}};
getoctets_as_bin(Bin,Num) when binary(Bin) ->
getoctets_as_bin({0,Bin},Num).
%% same as above but returns octets as a List
getoctets_as_list(Buffer,Num) ->
{Bin,Buffer2} = getoctets_as_bin(Buffer,Num),
{binary_to_list(Bin),Buffer2}.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% set_choice(Alt,Choices,Altnum) -> ListofBitSettings
%% Alt = atom()
%% Altnum = integer() | {integer(),integer()}% number of alternatives
%% Choices = [atom()] | {[atom()],[atom()]}
%% When Choices is a tuple the first list is the Rootset and the
%% second is the Extensions and then Altnum must also be a tuple with the
%% lengths of the 2 lists
%%
set_choice(Alt,{L1,L2},{Len1,_Len2}) ->
case set_choice_tag(Alt,L1) of
N when integer(N), Len1 > 1 ->
[{bits,1,0}, % the value is in the root set
encode_integer([{'ValueRange',{0,Len1-1}}],N)];
N when integer(N) ->
[{bits,1,0}]; % no encoding if only 0 or 1 alternative
false ->
[{bits,1,1}, % extension value
case set_choice_tag(Alt,L2) of
N2 when integer(N2) ->
encode_small_number(N2);
false ->
unknown_choice_alt
end]
end;
set_choice(Alt,L,Len) ->
case set_choice_tag(Alt,L) of
N when integer(N), Len > 1 ->
encode_integer([{'ValueRange',{0,Len-1}}],N);
N when integer(N) ->
[]; % no encoding if only 0 or 1 alternative
false ->
[unknown_choice_alt]
end.
set_choice_tag(Alt,Choices) ->
set_choice_tag(Alt,Choices,0).
set_choice_tag(Alt,[Alt|_Rest],Tag) ->
Tag;
set_choice_tag(Alt,[_H|Rest],Tag) ->
set_choice_tag(Alt,Rest,Tag+1);
set_choice_tag(_Alt,[],_Tag) ->
false.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% decode_fragmented_XXX; decode of values encoded fragmented according
%% to ITU-T X.691 clause 10.9.3.8. The unit (XXX) is either bits, octets,
%% characters or number of components (in a choice,sequence or similar).
%% Buffer is a buffer {Used, Bin}.
%% C is the constrained length.
%% If the buffer is not aligned, this function does that.
decode_fragmented_bits({0,Buffer},C) ->
decode_fragmented_bits(Buffer,C,[]);
decode_fragmented_bits({_N,<<_,Bs/binary>>},C) ->
decode_fragmented_bits(Bs,C,[]).
decode_fragmented_bits(<<3:2,Len:6,Bin/binary>>,C,Acc) ->
{Value,Bin2} = split_binary(Bin, Len * ?'16K'),
decode_fragmented_bits(Bin2,C,[Value,Acc]);
decode_fragmented_bits(<<0:1,0:7,Bin/binary>>,C,Acc) ->
BinBits = list_to_binary(lists:reverse(Acc)),
case C of
Int when integer(Int),C == size(BinBits) ->
{BinBits,{0,Bin}};
Int when integer(Int) ->
exit({error,{asn1,{illegal_value,C,BinBits}}});
_ ->
{BinBits,{0,Bin}}
end;
decode_fragmented_bits(<<0:1,Len:7,Bin/binary>>,C,Acc) ->
Result = {BinBits,{Used,_Rest}} =
case (Len rem 8) of
0 ->
<<Value:Len/binary-unit:1,Bin2/binary>> = Bin,
{list_to_binary(lists:reverse([Value|Acc])),{0,Bin2}};
Rem ->
Bytes = Len div 8,
U = 8 - Rem,
<<Value:Bytes/binary-unit:8,Bits1:Rem,Bits2:U,Bin2/binary>> = Bin,
{list_to_binary(lists:reverse([Bits1 bsl U,Value|Acc])),
{Rem,<<Bits2,Bin2/binary>>}}
end,
case C of
Int when integer(Int),C == (size(BinBits) - ((8 - Used) rem 8)) ->
Result;
Int when integer(Int) ->
exit({error,{asn1,{illegal_value,C,BinBits}}});
_ ->
Result
end.
decode_fragmented_octets({0,Bin},C) ->
decode_fragmented_octets(Bin,C,[]);
decode_fragmented_octets({_N,<<_,Bs/binary>>},C) ->
decode_fragmented_octets(Bs,C,[]).
decode_fragmented_octets(<<3:2,Len:6,Bin/binary>>,C,Acc) ->
{Value,Bin2} = split_binary(Bin,Len * ?'16K'),
decode_fragmented_octets(Bin2,C,[Value,Acc]);
decode_fragmented_octets(<<0:1,0:7,Bin/binary>>,C,Acc) ->
Octets = list_to_binary(lists:reverse(Acc)),
case C of
Int when integer(Int), C == size(Octets) ->
{Octets,{0,Bin}};
Int when integer(Int) ->
exit({error,{asn1,{illegal_value,C,Octets}}});
_ ->
{Octets,{0,Bin}}
end;
decode_fragmented_octets(<<0:1,Len:7,Bin/binary>>,C,Acc) ->
<<Value:Len/binary-unit:8,Bin2/binary>> = Bin,
BinOctets = list_to_binary(lists:reverse([Value|Acc])),
case C of
Int when integer(Int),size(BinOctets) == Int ->
{BinOctets,Bin2};
Int when integer(Int) ->
exit({error,{asn1,{illegal_value,C,BinOctets}}});
_ ->
{BinOctets,Bin2}
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% encode_open_type(Constraint, Value) -> CompleteList
%% Value = list of bytes of an already encoded value (the list must be flat)
%% | binary
%% Contraint = not used in this version
%%
encode_open_type(_C, Val) when list(Val) ->
Bin = list_to_binary(Val),
[encode_length(undefined,size(Bin)),{octets,Bin}]; % octets implies align
encode_open_type(_C, Val) when binary(Val) ->
[encode_length(undefined,size(Val)),{octets,Val}]. % octets implies align
%% the binary_to_list is not optimal but compatible with the current solution
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% decode_open_type(Buffer,Constraint) -> Value
%% Constraint is not used in this version
%% Buffer = [byte] with PER encoded data
%% Value = [byte] with decoded data (which must be decoded again as some type)
%%
decode_open_type(Bytes, _C) ->
{Len,Bytes2} = decode_length(Bytes,undefined),
getoctets_as_bin(Bytes2,Len).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% encode_integer(Constraint,Value,NamedNumberList) -> CompleteList
%% encode_integer(Constraint,Value) -> CompleteList
%% encode_integer(Constraint,{Name,Value}) -> CompleteList
%%
%%
encode_integer(C,V,NamedNumberList) when atom(V) ->
case lists:keysearch(V,1,NamedNumberList) of
{value,{_,NewV}} ->
encode_integer(C,NewV);
_ ->
exit({error,{asn1,{namednumber,V}}})
end;
encode_integer(C,V,_NamedNumberList) when integer(V) ->
encode_integer(C,V);
encode_integer(C,{Name,V},NamedNumberList) when atom(Name) ->
encode_integer(C,V,NamedNumberList).
encode_integer(C,{Name,Val}) when atom(Name) ->
encode_integer(C,Val);
encode_integer([{Rc,_Ec}],Val) when tuple(Rc) -> % XXX when is this invoked? First argument most often a list,...Ok this is the extension case...but it doesn't work.
case (catch encode_integer([Rc],Val)) of
{'EXIT',{error,{asn1,_}}} ->
[{bits,1,1},encode_unconstrained_number(Val)];
Encoded ->
[{bits,1,0},Encoded]
end;
encode_integer(C,Val ) when list(C) ->
case get_constraint(C,'SingleValue') of
no ->
encode_integer1(C,Val);
V when integer(V),V == Val ->
[]; % a type restricted to a single value encodes to nothing
V when list(V) ->
case lists:member(Val,V) of
true ->
encode_integer1(C,Val);
_ ->
exit({error,{asn1,{illegal_value,Val}}})
end;
_ ->
exit({error,{asn1,{illegal_value,Val}}})
end.
encode_integer1(C, Val) ->
case VR = get_constraint(C,'ValueRange') of
no ->
encode_unconstrained_number(Val);
{Lb,'MAX'} ->
encode_semi_constrained_number(Lb,Val);
%% positive with range
{Lb,Ub} when Val >= Lb,
Ub >= Val ->
encode_constrained_number(VR,Val);
_ ->
exit({error,{asn1,{illegal_value,VR,Val}}})
end.
decode_integer(Buffer,Range,NamedNumberList) ->
{Val,Buffer2} = decode_integer(Buffer,Range),
case lists:keysearch(Val,2,NamedNumberList) of
{value,{NewVal,_}} -> {NewVal,Buffer2};
_ -> {Val,Buffer2}
end.
decode_integer(Buffer,[{Rc,_Ec}]) when tuple(Rc) ->
{Ext,Buffer2} = getext(Buffer),
case Ext of
0 -> decode_integer(Buffer2,[Rc]);
1 -> decode_unconstrained_number(Buffer2)
end;
decode_integer(Buffer,undefined) ->
decode_unconstrained_number(Buffer);
decode_integer(Buffer,C) ->
case get_constraint(C,'SingleValue') of
V when integer(V) ->
{V,Buffer};
V when list(V) ->
{Val,Buffer2} = decode_integer1(Buffer,C),
case lists:member(Val,V) of
true ->
{Val,Buffer2};
_ ->
exit({error,{asn1,{illegal_value,Val}}})
end;
_ ->
decode_integer1(Buffer,C)
end.
decode_integer1(Buffer,C) ->
case VR = get_constraint(C,'ValueRange') of
no ->
decode_unconstrained_number(Buffer);
{Lb, 'MAX'} ->
decode_semi_constrained_number(Buffer,Lb);
{_,_} ->
decode_constrained_number(Buffer,VR)
end.
% X.691:10.6 Encoding of a normally small non-negative whole number
% Use this for encoding of CHOICE index if there is an extension marker in
% the CHOICE
encode_small_number({Name,Val}) when atom(Name) ->
encode_small_number(Val);
encode_small_number(Val) when Val =< 63 ->
% [{bits,1,0},{bits,6,Val}];
[{bits,7,Val}]; % same as above but more efficient
encode_small_number(Val) ->
[{bits,1,1},encode_semi_constrained_number(0,Val)].
decode_small_number(Bytes) ->
{Bit,Bytes2} = getbit(Bytes),
case Bit of
0 ->
getbits(Bytes2,6);
1 ->
decode_semi_constrained_number(Bytes2,0)
end.
%% X.691:10.7 Encoding of a semi-constrained whole number
%% might be an optimization encode_semi_constrained_number(0,Val) ->
encode_semi_constrained_number(C,{Name,Val}) when atom(Name) ->
encode_semi_constrained_number(C,Val);
encode_semi_constrained_number({Lb,'MAX'},Val) ->
encode_semi_constrained_number(Lb,Val);
encode_semi_constrained_number(Lb,Val) ->
Val2 = Val - Lb,
Oct = eint_positive(Val2),
Len = length(Oct),
if
Len < 128 ->
{octets,[Len|Oct]}; % equiv with encode_length(undefined,Len) but faster
true ->
[encode_length(undefined,Len),{octets,Oct}]
end.
decode_semi_constrained_number(Bytes,{Lb,_}) ->
decode_semi_constrained_number(Bytes,Lb);
decode_semi_constrained_number(Bytes,Lb) ->
{Len,Bytes2} = decode_length(Bytes,undefined),
{V,Bytes3} = getoctets(Bytes2,Len),
{V+Lb,Bytes3}.
encode_constrained_number(Range,{Name,Val}) when atom(Name) ->
encode_constrained_number(Range,Val);
encode_constrained_number({Lb,Ub},Val) when Val >= Lb, Ub >= Val ->
Range = Ub - Lb + 1,
Val2 = Val - Lb,
if
Range == 2 ->
{bits,1,Val2};
Range =< 4 ->
{bits,2,Val2};
Range =< 8 ->
{bits,3,Val2};
Range =< 16 ->
{bits,4,Val2};
Range =< 32 ->
{bits,5,Val2};
Range =< 64 ->
{bits,6,Val2};
Range =< 128 ->
{bits,7,Val2};
Range =< 255 ->
{bits,8,Val2};
Range =< 256 ->
{octets,[Val2]};
Range =< 65536 ->
{octets,<<Val2:16>>};
Range =< 16#1000000 ->
Octs = eint_positive(Val2),
[{bits,2,length(Octs)-1},{octets,Octs}];
Range =< 16#100000000 ->
Octs = eint_positive(Val2),
[{bits,2,length(Octs)-1},{octets,Octs}];
Range =< 16#10000000000 ->
Octs = eint_positive(Val2),
[{bits,3,length(Octs)-1},{octets,Octs}];
true ->
exit({not_supported,{integer_range,Range}})
end;
encode_constrained_number(Range,Val) ->
exit({error,{asn1,{integer_range,Range,value,Val}}}).
decode_constrained_number(Buffer,{Lb,Ub}) ->
Range = Ub - Lb + 1,
% Val2 = Val - Lb,
{Val,Remain} =
if
Range == 2 ->
getbits(Buffer,1);
Range =< 4 ->
getbits(Buffer,2);
Range =< 8 ->
getbits(Buffer,3);
Range =< 16 ->
getbits(Buffer,4);
Range =< 32 ->
getbits(Buffer,5);
Range =< 64 ->
getbits(Buffer,6);
Range =< 128 ->
getbits(Buffer,7);
Range =< 255 ->
getbits(Buffer,8);
Range =< 256 ->
getoctets(Buffer,1);
Range =< 65536 ->
getoctets(Buffer,2);
Range =< 16#1000000 ->
{Len,Bytes2} = decode_length(Buffer,{1,3}),
{Octs,Bytes3} = getoctets_as_list(Bytes2,Len),
{dec_pos_integer(Octs),Bytes3};
Range =< 16#100000000 ->
{Len,Bytes2} = decode_length(Buffer,{1,4}),
{Octs,Bytes3} = getoctets_as_list(Bytes2,Len),
{dec_pos_integer(Octs),Bytes3};
Range =< 16#10000000000 ->
{Len,Bytes2} = decode_length(Buffer,{1,5}),
{Octs,Bytes3} = getoctets_as_list(Bytes2,Len),
{dec_pos_integer(Octs),Bytes3};
true ->
exit({not_supported,{integer_range,Range}})
end,
{Val+Lb,Remain}.
%% X.691:10.8 Encoding of an unconstrained whole number
encode_unconstrained_number(Val) when Val >= 0 ->
Oct = eint(Val,[]),
Len = length(Oct),
if
Len < 128 ->
{octets,[Len|Oct]}; % equiv with encode_length(undefined,Len) but faster
true ->
[encode_length(undefined,Len),{octets,Oct}]
end;
encode_unconstrained_number(Val) -> % negative
Oct = enint(Val,[]),
Len = length(Oct),
if
Len < 128 ->
{octets,[Len|Oct]}; % equiv with encode_length(undefined,Len) but faster
true ->
[encode_length(undefined,Len),{octets,Oct}]
end.
%% used for positive Values which don't need a sign bit
%% returns a binary
eint_positive(Val) ->
case eint(Val,[]) of
[0,B1|T] ->
[B1|T];
T ->
T
end.
eint(0, [B|Acc]) when B < 128 ->
[B|Acc];
eint(N, Acc) ->
eint(N bsr 8, [N band 16#ff| Acc]).
enint(-1, [B1|T]) when B1 > 127 ->
[B1|T];
enint(N, Acc) ->
enint(N bsr 8, [N band 16#ff|Acc]).
decode_unconstrained_number(Bytes) ->
{Len,Bytes2} = decode_length(Bytes,undefined),
{Ints,Bytes3} = getoctets_as_list(Bytes2,Len),
{dec_integer(Ints),Bytes3}.
dec_pos_integer(Ints) ->
decpint(Ints, 8 * (length(Ints) - 1)).
dec_integer(Ints) when hd(Ints) band 255 =< 127 -> %% Positive number
decpint(Ints, 8 * (length(Ints) - 1));
dec_integer(Ints) -> %% Negative
decnint(Ints, 8 * (length(Ints) - 1)).
decpint([Byte|Tail], Shift) ->
(Byte bsl Shift) bor decpint(Tail, Shift-8);
decpint([], _) -> 0.
decnint([Byte|Tail], Shift) ->
(-128 + (Byte band 127) bsl Shift) bor decpint(Tail, Shift-8).
% minimum_octets(Val) ->
% minimum_octets(Val,[]).
% minimum_octets(Val,Acc) when Val > 0 ->
% minimum_octets((Val bsr 8),[Val band 16#FF|Acc]);
% minimum_octets(0,Acc) ->
% Acc.
%% X.691:10.9 Encoding of a length determinant
%%encode_small_length(undefined,Len) -> % null means no UpperBound
%% encode_small_number(Len).
%% X.691:10.9.3.5
%% X.691:10.9.3.7
encode_length(undefined,Len) -> % un-constrained
if
Len < 128 ->
{octets,[Len]};
Len < 16384 ->
{octets,<<2:2,Len:14>>};
true -> % should be able to endode length >= 16384
exit({error,{asn1,{encode_length,{nyi,above_16k}}}})
end;
encode_length({0,'MAX'},Len) ->
encode_length(undefined,Len);
encode_length(Vr={Lb,Ub},Len) when Ub =< 65535 ,Lb >= 0 -> % constrained
encode_constrained_number(Vr,Len);
encode_length({Lb,_Ub},Len) when integer(Lb), Lb >= 0 -> % Ub > 65535
encode_length(undefined,Len);
encode_length({Vr={Lb,Ub},[]},Len) when Ub =< 65535 ,Lb >= 0 ->
%% constrained extensible
[{bits,1,0},encode_constrained_number(Vr,Len)];
encode_length(SingleValue,_Len) when integer(SingleValue) ->
[].
%% X.691 10.9.3.4 (only used for length of bitmap that prefixes extension
%% additions in a sequence or set
encode_small_length(Len) when Len =< 64 ->
%% [{bits,1,0},{bits,6,Len-1}];
{bits,7,Len-1}; % the same as above but more efficient
encode_small_length(Len) ->
[{bits,1,1},encode_length(undefined,Len)].
% decode_small_length({Used,<<_:Used,0:1,Num:6,_:((8-Used+1) rem 8),Rest/binary>>}) ->
% case Buffer of
% <<_:Used,0:1,Num:6,_:((8-Used+1) rem 8),Rest/binary>> ->
% {Num,
% case getbit(Buffer) of
% {0,Remain} ->
% {Bits,Remain2} = getbits(Remain,6),
% {Bits+1,Remain2};
% {1,Remain} ->
% decode_length(Remain,undefined)
% end.
decode_small_length(Buffer) ->
case getbit(Buffer) of
{0,Remain} ->
{Bits,Remain2} = getbits(Remain,6),
{Bits+1,Remain2};
{1,Remain} ->
decode_length(Remain,undefined)
end.
decode_length(Buffer) ->
decode_length(Buffer,undefined).
decode_length(Buffer,undefined) -> % un-constrained
{0,Buffer2} = align(Buffer),
case Buffer2 of
<<0:1,Oct:7,Rest/binary>> ->
{Oct,{0,Rest}};
<<2:2,Val:14,Rest/binary>> ->
{Val,{0,Rest}};
<<3:2,_:14,_Rest/binary>> ->
%% this case should be fixed
exit({error,{asn1,{decode_length,{nyi,above_16k}}}})
end;
%% {Bits,_} = getbits(Buffer2,2),
% case Bits of
% 2 ->
% {Val,Bytes3} = getoctets(Buffer2,2),
% {(Val band 16#3FFF),Bytes3};
% 3 ->
% exit({error,{asn1,{decode_length,{nyi,above_16k}}}});
% _ ->
% {Val,Bytes3} = getoctet(Buffer2),
% {Val band 16#7F,Bytes3}
% end;
decode_length(Buffer,{Lb,Ub}) when Ub =< 65535 ,Lb >= 0 -> % constrained
decode_constrained_number(Buffer,{Lb,Ub});
decode_length(_,{Lb,_}) when integer(Lb), Lb >= 0 -> % Ub > 65535
exit({error,{asn1,{decode_length,{nyi,above_64K}}}});
decode_length(Buffer,{{Lb,Ub},[]}) ->
case getbit(Buffer) of
{0,Buffer2} ->
decode_length(Buffer2, {Lb,Ub})
end;
%When does this case occur with {_,_Lb,Ub} ??
% X.691:10.9.3.5
decode_length({Used,Bin},{_,_Lb,_Ub}) -> %when Len =< 127 -> % Unconstrained or large Ub NOTE! this case does not cover case when Ub > 65535
Unused = (8-Used) rem 8,
case Bin of
<<_:Used,0:1,Val:7,R:Unused,Rest/binary>> ->
{Val,{Used,<<R,Rest/binary>>}};
<<_:Used,_:Unused,2:2,Val:14,Rest/binary>> ->
{Val, {0,Rest}};
<<_:Used,_:Unused,3:2,_:14,_Rest/binary>> ->
exit({error,{asn1,{decode_length,{nyi,length_above_64K}}}})
end;
% decode_length(Buffer,{_,_Lb,Ub}) -> %when Len =< 127 -> % Unconstrained or large Ub
% case getbit(Buffer) of
% {0,Remain} ->
% getbits(Remain,7);
% {1,Remain} ->
% {Val,Remain2} = getoctets(Buffer,2),
% {Val band 2#0111111111111111, Remain2}
% end;
decode_length(Buffer,SingleValue) when integer(SingleValue) ->
{SingleValue,Buffer}.
% X.691:11
encode_boolean(true) ->
{bits,1,1};
encode_boolean(false) ->
{bits,1,0};
encode_boolean({Name,Val}) when atom(Name) ->
encode_boolean(Val);
encode_boolean(Val) ->
exit({error,{asn1,{encode_boolean,Val}}}).
decode_boolean(Buffer) -> %when record(Buffer,buffer)
case getbit(Buffer) of
{1,Remain} -> {true,Remain};
{0,Remain} -> {false,Remain}
end.
%% ENUMERATED with extension marker
decode_enumerated(Buffer,C,{Ntup1,Ntup2}) when tuple(Ntup1), tuple(Ntup2) ->
{Ext,Buffer2} = getext(Buffer),
case Ext of
0 -> % not an extension value
{Val,Buffer3} = decode_integer(Buffer2,C),
case catch (element(Val+1,Ntup1)) of
NewVal when atom(NewVal) -> {NewVal,Buffer3};
_Error -> exit({error,{asn1,{decode_enumerated,{Val,[Ntup1,Ntup2]}}}})
end;
1 -> % this an extension value
{Val,Buffer3} = decode_small_number(Buffer2),
case catch (element(Val+1,Ntup2)) of
NewVal when atom(NewVal) -> {NewVal,Buffer3};
_ -> {{asn1_enum,Val},Buffer3}
end
end;
decode_enumerated(Buffer,C,NamedNumberTup) when tuple(NamedNumberTup) ->
{Val,Buffer2} = decode_integer(Buffer,C),
case catch (element(Val+1,NamedNumberTup)) of
NewVal when atom(NewVal) -> {NewVal,Buffer2};
_Error -> exit({error,{asn1,{decode_enumerated,{Val,NamedNumberTup}}}})
end.
%%===============================================================================
%%===============================================================================
%%===============================================================================
%% Bitstring value, ITU_T X.690 Chapter 8.5
%%===============================================================================
%%===============================================================================
%%===============================================================================
%%===============================================================================
%% encode bitstring value
%%===============================================================================
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% bitstring NamedBitList
%% Val can be of:
%% - [identifiers] where only named identifers are set to one,
%% the Constraint must then have some information of the
%% bitlength.
%% - [list of ones and zeroes] all bits
%% - integer value representing the bitlist
%% C is constraint Len, only valid when identifiers
%% when the value is a list of {Unused,BinBits}, where
%% Unused = integer(),
%% BinBits = binary().
encode_bit_string(C,Bin={Unused,BinBits},NamedBitList) when integer(Unused),
binary(BinBits) ->
encode_bin_bit_string(C,Bin,NamedBitList);
%% when the value is a list of named bits
encode_bit_string(C, LoNB=[FirstVal | _RestVal], NamedBitList) when atom(FirstVal) ->
ToSetPos = get_all_bitposes(LoNB, NamedBitList, []),
BitList = make_and_set_list(ToSetPos,0),
encode_bit_string(C,BitList,NamedBitList);
encode_bit_string(C, BL=[{bit,_No} | _RestVal], NamedBitList) ->
ToSetPos = get_all_bitposes(BL, NamedBitList, []),
BitList = make_and_set_list(ToSetPos,0),
encode_bit_string(C,BitList,NamedBitList);
%% when the value is a list of ones and zeroes
% encode_bit_string(C, BitListValue, NamedBitList) when list(BitListValue) ->
% Bl1 =
% case NamedBitList of
% [] -> % dont remove trailing zeroes
% BitListValue;
% _ -> % first remove any trailing zeroes
% lists:reverse(lists:dropwhile(fun(0)->true;(1)->false end,
% lists:reverse(BitListValue)))
% end,
% BitList = [{bit,X} || X <- Bl1],
% %% BListLen = length(BitList),
% case get_constraint(C,'SizeConstraint') of
% 0 -> % fixed length
% []; % nothing to encode
% V when integer(V),V=<16 -> % fixed length 16 bits or less
% pad_list(V,BitList);
% V when integer(V) -> % fixed length 16 bits or more
% [align,pad_list(V,BitList)]; % should be another case for V >= 65537
% {Lb,Ub} when integer(Lb),integer(Ub) ->
% [encode_length({Lb,Ub},length(BitList)),align,BitList];
% no ->
% [encode_length(undefined,length(BitList)),align,BitList];
% Sc -> % extension marker
% [encode_length(Sc,length(BitList)),align,BitList]
% end;
encode_bit_string(C, BitListValue, NamedBitList) when list(BitListValue) ->
BitListToBinary =
%% fun that transforms a list of 1 and 0 to a tuple:
%% {UnusedBitsInLastByte, Binary}
fun([H|T],Acc,N,Fun) ->
Fun(T,(Acc bsl 1)+H,N+1,Fun);
([],Acc,N,_) ->
Unused = (8 - (N rem 8)) rem 8,
{Unused,<<Acc:N,0:Unused>>}
end,
UnusedAndBin =
case NamedBitList of
[] -> % dont remove trailing zeroes
BitListToBinary(BitListValue,0,0,BitListToBinary);
_ ->
BitListToBinary(lists:reverse(
lists:dropwhile(fun(0)->true;(1)->false end,
lists:reverse(BitListValue))),
0,0,BitListToBinary)
end,
encode_bin_bit_string(C,UnusedAndBin,NamedBitList);
%% when the value is an integer
encode_bit_string(C, IntegerVal, NamedBitList) when integer(IntegerVal)->
BitList = int_to_bitlist(IntegerVal),
encode_bit_string(C,BitList,NamedBitList);
%% when the value is a tuple
encode_bit_string(C,{Name,Val}, NamedBitList) when atom(Name) ->
encode_bit_string(C,Val,NamedBitList).
%% encode_bin_bit_string/3, when value is a tuple of Unused and BinBits.
%% Unused = integer(),i.e. number unused bits in least sign. byte of
%% BinBits = binary().
encode_bin_bit_string(C,UnusedAndBin={_Unused,_BinBits},NamedBitList) ->
Constr = get_constraint(C,'SizeConstraint'),
UnusedAndBin1 = {Unused1,Bin1} =
remove_trailing_bin(NamedBitList,UnusedAndBin,lower_bound(Constr)),
case Constr of
0 ->
[];
V when integer(V),V=<16 ->
{Unused2,Bin2} = pad_list(V,UnusedAndBin1),
<<BitVal:V,_:Unused2>> = Bin2,
{bits,V,BitVal};
V when integer(V) ->
[align, pad_list(V, UnusedAndBin1)];
{Lb,Ub} when integer(Lb),integer(Ub) ->
[encode_length({Lb,Ub},size(Bin1)*8 - Unused1),
align,UnusedAndBin1];
no ->
[encode_length(undefined,size(Bin1)*8 - Unused1),
align,UnusedAndBin1];
Sc ->
[encode_length(Sc,size(Bin1)*8 - Unused1),
align,UnusedAndBin1]
end.
remove_trailing_bin([], {Unused,Bin},_) ->
{Unused,Bin};
remove_trailing_bin(NamedNumberList, {_Unused,Bin},C) ->
Size = size(Bin)-1,
<<Bfront:Size/binary, LastByte:8>> = Bin,
%% clear the Unused bits to be sure
% LastByte1 = LastByte band (((1 bsl Unused) -1) bxor 255),
Unused1 = trailingZeroesInNibble(LastByte band 15),
Unused2 =
case Unused1 of
4 ->
4 + trailingZeroesInNibble(LastByte bsr 4);
_ -> Unused1
end,
case Unused2 of
8 ->
remove_trailing_bin(NamedNumberList,{0,Bfront},C);
_ ->
case C of
Int when integer(Int),Int > ((size(Bin)*8)-Unused2) ->
%% this padding see OTP-4353
pad_list(Int,{Unused2,Bin});
_ -> {Unused2,Bin}
end
end.
trailingZeroesInNibble(0) ->
4;
trailingZeroesInNibble(1) ->
0;
trailingZeroesInNibble(2) ->
1;
trailingZeroesInNibble(3) ->
0;
trailingZeroesInNibble(4) ->
2;
trailingZeroesInNibble(5) ->
0;
trailingZeroesInNibble(6) ->
1;
trailingZeroesInNibble(7) ->
0;
trailingZeroesInNibble(8) ->
3;
trailingZeroesInNibble(9) ->
0;
trailingZeroesInNibble(10) ->
1;
trailingZeroesInNibble(11) ->
0;
trailingZeroesInNibble(12) -> %#1100
2;
trailingZeroesInNibble(13) ->
0;
trailingZeroesInNibble(14) ->
1;
trailingZeroesInNibble(15) ->
0.
lower_bound({{Lb,_},_}) when integer(Lb) ->
Lb;
lower_bound({Lb,_}) when integer(Lb) ->
Lb;
lower_bound(C) ->
C.
%%%%%%%%%%%%%%%
%% The result is presented as a list of named bits (if possible)
%% else as a tuple {Unused,Bits}. Unused is the number of unused
%% bits, least significant bits in the last byte of Bits. Bits is
%% the BIT STRING represented as a binary.
%%
decode_compact_bit_string(Buffer, C, NamedNumberList) ->
case get_constraint(C,'SizeConstraint') of
0 -> % fixed length
{{8,0},Buffer};
V when integer(V),V=<16 -> %fixed length 16 bits or less
compact_bit_string(Buffer,V,NamedNumberList);
V when integer(V),V=<65536 -> %fixed length > 16 bits
Bytes2 = align(Buffer),
compact_bit_string(Bytes2,V,NamedNumberList);
V when integer(V) -> % V > 65536 => fragmented value
{Bin,Buffer2} = decode_fragmented_bits(Buffer,V),
case Buffer2 of
{0,_} -> {{0,Bin},Buffer2};
{U,_} -> {{8-U,Bin},Buffer2}
end;
{Lb,Ub} when integer(Lb),integer(Ub) ->
%% This case may demand decoding of fragmented length/value
{Len,Bytes2} = decode_length(Buffer,{Lb,Ub}),
Bytes3 = align(Bytes2),
compact_bit_string(Bytes3,Len,NamedNumberList);
no ->
%% This case may demand decoding of fragmented length/value
{Len,Bytes2} = decode_length(Buffer,undefined),
Bytes3 = align(Bytes2),
compact_bit_string(Bytes3,Len,NamedNumberList);
Sc ->
{Len,Bytes2} = decode_length(Buffer,Sc),
Bytes3 = align(Bytes2),
compact_bit_string(Bytes3,Len,NamedNumberList)
end.
%%%%%%%%%%%%%%%
%% The result is presented as a list of named bits (if possible)
%% else as a list of 0 and 1.
%%
decode_bit_string(Buffer, C, NamedNumberList) ->
case get_constraint(C,'SizeConstraint') of
{Lb,Ub} when integer(Lb),integer(Ub) ->
{Len,Bytes2} = decode_length(Buffer,{Lb,Ub}),
Bytes3 = align(Bytes2),
bit_list_or_named(Bytes3,Len,NamedNumberList);
no ->
{Len,Bytes2} = decode_length(Buffer,undefined),
Bytes3 = align(Bytes2),
bit_list_or_named(Bytes3,Len,NamedNumberList);
0 -> % fixed length
{[],Buffer}; % nothing to encode
V when integer(V),V=<16 -> % fixed length 16 bits or less
bit_list_or_named(Buffer,V,NamedNumberList);
V when integer(V),V=<65536 ->
Bytes2 = align(Buffer),
bit_list_or_named(Bytes2,V,NamedNumberList);
V when integer(V) ->
Bytes2 = align(Buffer),
{BinBits,_} = decode_fragmented_bits(Bytes2,V),
bit_list_or_named(BinBits,V,NamedNumberList);
Sc -> % extension marker
{Len,Bytes2} = decode_length(Buffer,Sc),
Bytes3 = align(Bytes2),
bit_list_or_named(Bytes3,Len,NamedNumberList)
end.
%% if no named bits are declared we will return a
%% {Unused,Bits}. Unused = integer(),
%% Bits = binary().
compact_bit_string(Buffer,Len,[]) ->
getbits_as_binary(Len,Buffer); % {{Unused,BinBits},NewBuffer}
compact_bit_string(Buffer,Len,NamedNumberList) ->
bit_list_or_named(Buffer,Len,NamedNumberList).
%% if no named bits are declared we will return a
%% BitList = [0 | 1]
bit_list_or_named(Buffer,Len,[]) ->
getbits_as_list(Len,Buffer);
%% if there are named bits declared we will return a named
%% BitList where the names are atoms and unnamed bits represented
%% as {bit,Pos}
%% BitList = [atom() | {bit,Pos}]
%% Pos = integer()
bit_list_or_named(Buffer,Len,NamedNumberList) ->
{BitList,Rest} = getbits_as_list(Len,Buffer),
{bit_list_or_named1(0,BitList,NamedNumberList,[]), Rest}.
bit_list_or_named1(Pos,[0|Bt],Names,Acc) ->
bit_list_or_named1(Pos+1,Bt,Names,Acc);
bit_list_or_named1(Pos,[1|Bt],Names,Acc) ->
case lists:keysearch(Pos,2,Names) of
{value,{Name,_}} ->
bit_list_or_named1(Pos+1,Bt,Names,[Name|Acc]);
_ ->
bit_list_or_named1(Pos+1,Bt,Names,[{bit,Pos}|Acc])
end;
bit_list_or_named1(_,[],_,Acc) ->
lists:reverse(Acc).
%%%%%%%%%%%%%%%
%%
int_to_bitlist(Int) when integer(Int), Int > 0 ->
[Int band 1 | int_to_bitlist(Int bsr 1)];
int_to_bitlist(0) ->
[].
%%%%%%%%%%%%%%%%%%
%% get_all_bitposes([list of named bits to set], named_bit_db, []) ->
%% [sorted_list_of_bitpositions_to_set]
get_all_bitposes([{bit,ValPos}|Rest], NamedBitList, Ack) ->
get_all_bitposes(Rest, NamedBitList, [ValPos | Ack ]);
get_all_bitposes([Val | Rest], NamedBitList, Ack) ->
case lists:keysearch(Val, 1, NamedBitList) of
{value, {_ValName, ValPos}} ->
get_all_bitposes(Rest, NamedBitList, [ValPos | Ack]);
_ ->
exit({error,{asn1, {bitstring_namedbit, Val}}})
end;
get_all_bitposes([], _NamedBitList, Ack) ->
lists:sort(Ack).
%%%%%%%%%%%%%%%%%%
%% make_and_set_list([list of positions to set to 1])->
%% returns list with all in SetPos set.
%% in positioning in list the first element is 0, the second 1 etc.., but
%%
make_and_set_list([XPos|SetPos], XPos) ->
[1 | make_and_set_list(SetPos, XPos + 1)];
make_and_set_list([Pos|SetPos], XPos) ->
[0 | make_and_set_list([Pos | SetPos], XPos + 1)];
make_and_set_list([], _) ->
[].
%%%%%%%%%%%%%%%%%
%% pad_list(N,BitList) -> PaddedList
%% returns a padded (with trailing {bit,0} elements) list of length N
%% if Bitlist contains more than N significant bits set an exit asn1_error
%% is generated
pad_list(N,In={Unused,Bin}) ->
pad_list(N, size(Bin)*8 - Unused, In).
pad_list(N,Size,In={_,_}) when N < Size ->
exit({error,{asn1,{range_error,{bit_string,In}}}});
pad_list(N,Size,{Unused,Bin}) when N > Size, Unused > 0 ->
pad_list(N,Size+1,{Unused-1,Bin});
pad_list(N,Size,{_Unused,Bin}) when N > Size ->
pad_list(N,Size+1,{7,<<Bin/binary,0>>});
pad_list(N,N,In={_,_}) ->
In.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% X.691:16
%% encode_octet_string(Constraint,ExtensionMarker,Val)
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
encode_octet_string(C,Val) ->
encode_octet_string(C,false,Val).
encode_octet_string(C,Bool,{_Name,Val}) ->
encode_octet_string(C,Bool,Val);
encode_octet_string(_,true,_) ->
exit({error,{asn1,{'not_supported',extensionmarker}}});
encode_octet_string(C,false,Val) ->
case get_constraint(C,'SizeConstraint') of
0 ->
[];
1 ->
[V] = Val,
{bits,8,V};
2 ->
[V1,V2] = Val,
[{bits,8,V1},{bits,8,V2}];
Sv when Sv =<65535, Sv == length(Val) -> % fixed length
{octets,Val};
{Lb,Ub} ->
[encode_length({Lb,Ub},length(Val)),{octets,Val}];
Sv when list(Sv) ->
[encode_length({hd(Sv),lists:max(Sv)},length(Val)),{octets,Val}];
no ->
[encode_length(undefined,length(Val)),{octets,Val}]
end.
decode_octet_string(Bytes,Range) ->
decode_octet_string(Bytes,Range,false).
decode_octet_string(Bytes,C,false) ->
case get_constraint(C,'SizeConstraint') of
0 ->
{[],Bytes};
1 ->
{B1,Bytes2} = getbits(Bytes,8),
{[B1],Bytes2};
2 ->
{Bs,Bytes2}= getbits(Bytes,16),
{binary_to_list(<<Bs:16>>),Bytes2};
{_,0} ->
{[],Bytes};
Sv when integer(Sv), Sv =<65535 -> % fixed length
getoctets_as_list(Bytes,Sv);
Sv when integer(Sv) -> % fragmented encoding
Bytes2 = align(Bytes),
decode_fragmented_octets(Bytes2,Sv);
{Lb,Ub} ->
{Len,Bytes2} = decode_length(Bytes,{Lb,Ub}),
getoctets_as_list(Bytes2,Len);
Sv when list(Sv) ->
{Len,Bytes2} = decode_length(Bytes,{hd(Sv),lists:max(Sv)}),
getoctets_as_list(Bytes2,Len);
no ->
{Len,Bytes2} = decode_length(Bytes,undefined),
getoctets_as_list(Bytes2,Len)
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Restricted char string types
%% (NumericString, PrintableString,VisibleString,IA5String,BMPString,UniversalString)
%% X.691:26 and X.680:34-36
%%encode_restricted_string(aligned,'BMPString',Constraints,Extension,Val)
encode_restricted_string(aligned,{Name,Val}) when atom(Name) ->
encode_restricted_string(aligned,Val);
encode_restricted_string(aligned,Val) when list(Val)->
[encode_length(undefined,length(Val)),{octets,Val}].
encode_known_multiplier_string(aligned,StringType,C,_Ext,{Name,Val}) when atom(Name) ->
encode_known_multiplier_string(aligned,StringType,C,false,Val);
encode_known_multiplier_string(aligned,StringType,C,_Ext,Val) ->
Result = chars_encode(C,StringType,Val),
NumBits = get_NumBits(C,StringType),
case get_constraint(C,'SizeConstraint') of
Ub when integer(Ub), Ub*NumBits =< 16 ->
case {StringType,Result} of
{'BMPString',{octets,Ol}} ->
[{bits,8,Oct}||Oct <- Ol];
_ ->
Result
end;
0 ->
[];
Ub when integer(Ub),Ub =<65535 -> % fixed length
[align,Result];
{Ub,Lb} ->
[encode_length({Ub,Lb},length(Val)),align,Result];
Vl when list(Vl) ->
[encode_length({lists:min(Vl),lists:max(Vl)},length(Val)),align,Result];
no ->
[encode_length(undefined,length(Val)),align,Result]
end.
decode_restricted_string(Bytes,aligned) ->
{Len,Bytes2} = decode_length(Bytes,undefined),
getoctets_as_list(Bytes2,Len).
decode_known_multiplier_string(Bytes,aligned,StringType,C,_Ext) ->
NumBits = get_NumBits(C,StringType),
case get_constraint(C,'SizeConstraint') of
Ub when integer(Ub), Ub*NumBits =< 16 ->
chars_decode(Bytes,NumBits,StringType,C,Ub);
Ub when integer(Ub),Ub =<65535 -> % fixed length
Bytes1 = align(Bytes),
chars_decode(Bytes1,NumBits,StringType,C,Ub);
0 ->
{[],Bytes};
Vl when list(Vl) ->
{Len,Bytes1} = decode_length(Bytes,{hd(Vl),lists:max(Vl)}),
Bytes2 = align(Bytes1),
chars_decode(Bytes2,NumBits,StringType,C,Len);
no ->
{Len,Bytes1} = decode_length(Bytes,undefined),
Bytes2 = align(Bytes1),
chars_decode(Bytes2,NumBits,StringType,C,Len);
{Lb,Ub}->
{Len,Bytes1} = decode_length(Bytes,{Lb,Ub}),
Bytes2 = align(Bytes1),
chars_decode(Bytes2,NumBits,StringType,C,Len)
end.
encode_NumericString(C,Val) ->
encode_known_multiplier_string(aligned,'NumericString',C,false,Val).
decode_NumericString(Bytes,C) ->
decode_known_multiplier_string(Bytes,aligned,'NumericString',C,false).
encode_PrintableString(C,Val) ->
encode_known_multiplier_string(aligned,'PrintableString',C,false,Val).
decode_PrintableString(Bytes,C) ->
decode_known_multiplier_string(Bytes,aligned,'PrintableString',C,false).
encode_VisibleString(C,Val) -> % equivalent with ISO646String
encode_known_multiplier_string(aligned,'VisibleString',C,false,Val).
decode_VisibleString(Bytes,C) ->
decode_known_multiplier_string(Bytes,aligned,'VisibleString',C,false).
encode_IA5String(C,Val) ->
encode_known_multiplier_string(aligned,'IA5String',C,false,Val).
decode_IA5String(Bytes,C) ->
decode_known_multiplier_string(Bytes,aligned,'IA5String',C,false).
encode_BMPString(C,Val) ->
encode_known_multiplier_string(aligned,'BMPString',C,false,Val).
decode_BMPString(Bytes,C) ->
decode_known_multiplier_string(Bytes,aligned,'BMPString',C,false).
encode_UniversalString(C,Val) ->
encode_known_multiplier_string(aligned,'UniversalString',C,false,Val).
decode_UniversalString(Bytes,C) ->
decode_known_multiplier_string(Bytes,aligned,'UniversalString',C,false).
%% end of known-multiplier strings for which PER visible constraints are
%% applied
encode_GeneralString(_C,Val) ->
encode_restricted_string(aligned,Val).
decode_GeneralString(Bytes,_C) ->
decode_restricted_string(Bytes,aligned).
encode_GraphicString(_C,Val) ->
encode_restricted_string(aligned,Val).
decode_GraphicString(Bytes,_C) ->
decode_restricted_string(Bytes,aligned).
encode_ObjectDescriptor(_C,Val) ->
encode_restricted_string(aligned,Val).
decode_ObjectDescriptor(Bytes) ->
decode_restricted_string(Bytes,aligned).
encode_TeletexString(_C,Val) -> % equivalent with T61String
encode_restricted_string(aligned,Val).
decode_TeletexString(Bytes,_C) ->
decode_restricted_string(Bytes,aligned).
encode_VideotexString(_C,Val) ->
encode_restricted_string(aligned,Val).
decode_VideotexString(Bytes,_C) ->
decode_restricted_string(Bytes,aligned).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% getBMPChars(Bytes,Len) ->{BMPcharList,RemainingBytes}
%%
getBMPChars(Bytes,1) ->
{O1,Bytes2} = getbits(Bytes,8),
{O2,Bytes3} = getbits(Bytes2,8),
if
O1 == 0 ->
{[O2],Bytes3};
true ->
{[{0,0,O1,O2}],Bytes3}
end;
getBMPChars(Bytes,Len) ->
getBMPChars(Bytes,Len,[]).
getBMPChars(Bytes,0,Acc) ->
{lists:reverse(Acc),Bytes};
getBMPChars(Bytes,Len,Acc) ->
{Octs,Bytes1} = getoctets_as_list(Bytes,2),
case Octs of
[0,O2] ->
getBMPChars(Bytes1,Len-1,[O2|Acc]);
[O1,O2]->
getBMPChars(Bytes1,Len-1,[{0,0,O1,O2}|Acc])
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% chars_encode(C,StringType,Value) -> ValueList
%%
%% encodes chars according to the per rules taking the constraint PermittedAlphabet
%% into account.
%% This function does only encode the value part and NOT the length
chars_encode(C,StringType,Value) ->
case {StringType,get_constraint(C,'PermittedAlphabet')} of
{'UniversalString',{_,_Sv}} ->
exit({error,{asn1,{'not implemented',"UniversalString with PermittedAlphabet constraint"}}});
{'BMPString',{_,_Sv}} ->
exit({error,{asn1,{'not implemented',"BMPString with PermittedAlphabet constraint"}}});
_ ->
{NumBits,CharOutTab} = {get_NumBits(C,StringType),get_CharOutTab(C,StringType)},
chars_encode2(Value,NumBits,CharOutTab)
end.
chars_encode2([H|T],NumBits,{Min,Max,notab}) when H =< Max, H >= Min ->
[{bits,NumBits,H-Min}|chars_encode2(T,NumBits,{Min,Max,notab})];
chars_encode2([H|T],NumBits,{Min,Max,Tab}) when H =< Max, H >= Min ->
[{bits,NumBits,exit_if_false(H,element(H-Min+1,Tab))}|chars_encode2(T,NumBits,{Min,Max,Tab})];
chars_encode2([{A,B,C,D}|T],NumBits,{Min,Max,notab}) ->
%% no value range check here (ought to be, but very expensive)
% [{bits,NumBits,(A*B*C*D)-Min}|chars_encode2(T,NumBits,{Min,Max,notab})];
[{bits,NumBits,((((((A bsl 8)+B) bsl 8)+C) bsl 8)+D)-Min}|chars_encode2(T,NumBits,{Min,Max,notab})];
chars_encode2([{A,B,C,D}|T],NumBits,{Min,Max,Tab}) ->
%% no value range check here (ought to be, but very expensive)
% [{bits,NumBits,element((A*B*C*D)-Min,Tab)}|chars_encode2(T,NumBits,{Min,Max,notab})];
[{bits,NumBits,exit_if_false({A,B,C,D},element(((((((A bsl 8)+B) bsl 8)+C) bsl 8)+D)-Min,Tab))}|chars_encode2(T,NumBits,{Min,Max,notab})];
chars_encode2([H|_T],_,{_,_,_}) ->
exit({error,{asn1,{illegal_char_value,H}}});
chars_encode2([],_,_) ->
[].
exit_if_false(V,false)->
exit({error,{asn1,{"illegal value according to Permitted alphabet constraint",V}}});
exit_if_false(_,V) ->V.
get_NumBits(C,StringType) ->
case get_constraint(C,'PermittedAlphabet') of
{'SingleValue',Sv} ->
charbits(length(Sv),aligned);
no ->
case StringType of
'IA5String' ->
charbits(128,aligned); % 16#00..16#7F
'VisibleString' ->
charbits(95,aligned); % 16#20..16#7E
'PrintableString' ->
charbits(74,aligned); % [$\s,$',$(,$),$+,$,,$-,$.,$/,"0123456789",$:,$=,$?,$A..$Z,$a..$z
'NumericString' ->
charbits(11,aligned); % $ ,"0123456789"
'UniversalString' ->
32;
'BMPString' ->
16
end
end.
%%Maybe used later
%%get_MaxChar(C,StringType) ->
%% case get_constraint(C,'PermittedAlphabet') of
%% {'SingleValue',Sv} ->
%% lists:nth(length(Sv),Sv);
%% no ->
%% case StringType of
%% 'IA5String' ->
%% 16#7F; % 16#00..16#7F
%% 'VisibleString' ->
%% 16#7E; % 16#20..16#7E
%% 'PrintableString' ->
%% $z; % [$\s,$',$(,$),$+,$,,$-,$.,$/,"0123456789",$:,$=,$?,$A..$Z,$a..$z
%% 'NumericString' ->
%% $9; % $ ,"0123456789"
%% 'UniversalString' ->
%% 16#ffffffff;
%% 'BMPString' ->
%% 16#ffff
%% end
%% end.
%%Maybe used later
%%get_MinChar(C,StringType) ->
%% case get_constraint(C,'PermittedAlphabet') of
%% {'SingleValue',Sv} ->
%% hd(Sv);
%% no ->
%% case StringType of
%% 'IA5String' ->
%% 16#00; % 16#00..16#7F
%% 'VisibleString' ->
%% 16#20; % 16#20..16#7E
%% 'PrintableString' ->
%% $\s; % [$\s,$',$(,$),$+,$,,$-,$.,$/,"0123456789",$:,$=,$?,$A..$Z,$a..$z
%% 'NumericString' ->
%% $\s; % $ ,"0123456789"
%% 'UniversalString' ->
%% 16#00;
%% 'BMPString' ->
%% 16#00
%% end
%% end.
get_CharOutTab(C,StringType) ->
get_CharTab(C,StringType,out).
get_CharInTab(C,StringType) ->
get_CharTab(C,StringType,in).
get_CharTab(C,StringType,InOut) ->
case get_constraint(C,'PermittedAlphabet') of
{'SingleValue',Sv} ->
get_CharTab2(C,StringType,hd(Sv),lists:max(Sv),Sv,InOut);
no ->
case StringType of
'IA5String' ->
{0,16#7F,notab};
'VisibleString' ->
get_CharTab2(C,StringType,16#20,16#7F,notab,InOut);
'PrintableString' ->
Chars = lists:sort(
" '()+,-./0123456789:=?ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"),
get_CharTab2(C,StringType,hd(Chars),lists:max(Chars),Chars,InOut);
'NumericString' ->
get_CharTab2(C,StringType,16#20,$9," 0123456789",InOut);
'UniversalString' ->
{0,16#FFFFFFFF,notab};
'BMPString' ->
{0,16#FFFF,notab}
end
end.
get_CharTab2(C,StringType,Min,Max,Chars,InOut) ->
BitValMax = (1 bsl get_NumBits(C,StringType))-1,
if
Max =< BitValMax ->
{0,Max,notab};
true ->
case InOut of
out ->
{Min,Max,create_char_tab(Min,Chars)};
in ->
{Min,Max,list_to_tuple(Chars)}
end
end.
create_char_tab(Min,L) ->
list_to_tuple(create_char_tab(Min,L,0)).
create_char_tab(Min,[Min|T],V) ->
[V|create_char_tab(Min+1,T,V+1)];
create_char_tab(_Min,[],_V) ->
[];
create_char_tab(Min,L,V) ->
[false|create_char_tab(Min+1,L,V)].
%% This very inefficient and should be moved to compiletime
charbits(NumOfChars,aligned) ->
case charbits(NumOfChars) of
1 -> 1;
2 -> 2;
B when B =< 4 -> 4;
B when B =< 8 -> 8;
B when B =< 16 -> 16;
B when B =< 32 -> 32
end.
charbits(NumOfChars) when NumOfChars =< 2 -> 1;
charbits(NumOfChars) when NumOfChars =< 4 -> 2;
charbits(NumOfChars) when NumOfChars =< 8 -> 3;
charbits(NumOfChars) when NumOfChars =< 16 -> 4;
charbits(NumOfChars) when NumOfChars =< 32 -> 5;
charbits(NumOfChars) when NumOfChars =< 64 -> 6;
charbits(NumOfChars) when NumOfChars =< 128 -> 7;
charbits(NumOfChars) when NumOfChars =< 256 -> 8;
charbits(NumOfChars) when NumOfChars =< 512 -> 9;
charbits(NumOfChars) when NumOfChars =< 1024 -> 10;
charbits(NumOfChars) when NumOfChars =< 2048 -> 11;
charbits(NumOfChars) when NumOfChars =< 4096 -> 12;
charbits(NumOfChars) when NumOfChars =< 8192 -> 13;
charbits(NumOfChars) when NumOfChars =< 16384 -> 14;
charbits(NumOfChars) when NumOfChars =< 32768 -> 15;
charbits(NumOfChars) when NumOfChars =< 65536 -> 16;
charbits(NumOfChars) when integer(NumOfChars) ->
16 + charbits1(NumOfChars bsr 16).
charbits1(0) ->
0;
charbits1(NumOfChars) ->
1 + charbits1(NumOfChars bsr 1).
chars_decode(Bytes,_,'BMPString',C,Len) ->
case get_constraint(C,'PermittedAlphabet') of
no ->
getBMPChars(Bytes,Len);
_ ->
exit({error,{asn1,
{'not implemented',
"BMPString with PermittedAlphabet constraint"}}})
end;
chars_decode(Bytes,NumBits,StringType,C,Len) ->
CharInTab = get_CharInTab(C,StringType),
chars_decode2(Bytes,CharInTab,NumBits,Len).
chars_decode2(Bytes,CharInTab,NumBits,Len) ->
chars_decode2(Bytes,CharInTab,NumBits,Len,[]).
chars_decode2(Bytes,_CharInTab,_NumBits,0,Acc) ->
{lists:reverse(Acc),Bytes};
chars_decode2(Bytes,{Min,Max,notab},NumBits,Len,Acc) when NumBits > 8 ->
{Char,Bytes2} = getbits(Bytes,NumBits),
Result =
if
Char < 256 -> Char;
true ->
list_to_tuple(binary_to_list(<<Char:32>>))
end,
chars_decode2(Bytes2,{Min,Max,notab},NumBits,Len -1,[Result|Acc]);
% chars_decode2(Bytes,{Min,Max,notab},NumBits,Len,Acc) when NumBits > 8 ->
% {Char,Bytes2} = getbits(Bytes,NumBits),
% Result = case minimum_octets(Char+Min) of
% [NewChar] -> NewChar;
% [C1,C2] -> {0,0,C1,C2};
% [C1,C2,C3] -> {0,C1,C2,C3};
% [C1,C2,C3,C4] -> {C1,C2,C3,C4}
% end,
% chars_decode2(Bytes2,{Min,Max,notab},NumBits,Len -1,[Result|Acc]);
chars_decode2(Bytes,{Min,Max,notab},NumBits,Len,Acc) ->
{Char,Bytes2} = getbits(Bytes,NumBits),
chars_decode2(Bytes2,{Min,Max,notab},NumBits,Len -1,[Char+Min|Acc]);
%% BMPString and UniversalString with PermittedAlphabet is currently not supported
chars_decode2(Bytes,{Min,Max,CharInTab},NumBits,Len,Acc) ->
{Char,Bytes2} = getbits(Bytes,NumBits),
chars_decode2(Bytes2,{Min,Max,CharInTab},NumBits,Len -1,[element(Char+1,CharInTab)|Acc]).
% X.691:17
encode_null(_) -> []; % encodes to nothing
encode_null({Name,Val}) when atom(Name) ->
encode_null(Val).
decode_null(Bytes) ->
{'NULL',Bytes}.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% encode_object_identifier(Val) -> CompleteList
%% encode_object_identifier({Name,Val}) -> CompleteList
%% Val -> {Int1,Int2,...,IntN} % N >= 2
%% Name -> atom()
%% Int1 -> integer(0..2)
%% Int2 -> integer(0..39) when Int1 (0..1) else integer()
%% Int3-N -> integer()
%% CompleteList -> [{bits,8,Val}|{octets,Ol}|align|...]
%%
encode_object_identifier({Name,Val}) when atom(Name) ->
encode_object_identifier(Val);
encode_object_identifier(Val) ->
OctetList = e_object_identifier(Val),
Octets = list_to_binary(OctetList), % performs a flatten at the same time
[{debug,object_identifier},encode_length(undefined,size(Octets)),{octets,Octets}].
%% This code is copied from asn1_encode.erl (BER) and corrected and modified
e_object_identifier({'OBJECT IDENTIFIER',V}) ->
e_object_identifier(V);
e_object_identifier({Cname,V}) when atom(Cname),tuple(V) ->
e_object_identifier(tuple_to_list(V));
e_object_identifier({Cname,V}) when atom(Cname),list(V) ->
e_object_identifier(V);
e_object_identifier(V) when tuple(V) ->
e_object_identifier(tuple_to_list(V));
%% E1 = 0|1|2 and (E2 < 40 when E1 = 0|1)
e_object_identifier([E1,E2|Tail]) when E1 >= 0, E1 < 2, E2 < 40 ; E1==2 ->
Head = 40*E1 + E2, % weird
e_object_elements([Head|Tail],[]);
e_object_identifier(Oid=[_,_|_Tail]) ->
exit({error,{asn1,{'illegal_value',Oid}}}).
e_object_elements([],Acc) ->
lists:reverse(Acc);
e_object_elements([H|T],Acc) ->
e_object_elements(T,[e_object_element(H)|Acc]).
e_object_element(Num) when Num < 128 ->
Num;
%% must be changed to handle more than 2 octets
e_object_element(Num) -> %% when Num < ???
Left = ((Num band 2#11111110000000) bsr 7) bor 2#10000000,
Right = Num band 2#1111111 ,
[Left,Right].
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% decode_object_identifier(Bytes) -> {ObjId,RemainingBytes}
%% ObjId -> {integer(),integer(),...} % at least 2 integers
%% RemainingBytes -> [integer()] when integer() (0..255)
decode_object_identifier(Bytes) ->
{Len,Bytes2} = decode_length(Bytes,undefined),
{Octs,Bytes3} = getoctets_as_list(Bytes2,Len),
[First|Rest] = dec_subidentifiers(Octs,0,[]),
Idlist = if
First < 40 ->
[0,First|Rest];
First < 80 ->
[1,First - 40|Rest];
true ->
[2,First - 80|Rest]
end,
{list_to_tuple(Idlist),Bytes3}.
dec_subidentifiers([H|T],Av,Al) when H >=16#80 ->
dec_subidentifiers(T,(Av bsl 7) + (H band 16#7F),Al);
dec_subidentifiers([H|T],Av,Al) ->
dec_subidentifiers(T,0,[(Av bsl 7) + H |Al]);
dec_subidentifiers([],_Av,Al) ->
lists:reverse(Al).
get_constraint([{Key,V}],Key) ->
V;
get_constraint([],_Key) ->
no;
get_constraint(C,Key) ->
case lists:keysearch(Key,1,C) of
false ->
no;
{value,{_,V}} ->
V
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% complete(InList) -> ByteList
%% Takes a coded list with bits and bytes and converts it to a list of bytes
%% Should be applied as the last step at encode of a complete ASN.1 type
%%
% complete(L) ->
% case complete1(L) of
% {[],0} ->
% <<0>>;
% {Acc,0} ->
% lists:reverse(Acc);
% {[Hacc|Tacc],Acclen} -> % Acclen >0
% Rest = 8 - Acclen,
% NewHacc = Hacc bsl Rest,
% lists:reverse([NewHacc|Tacc])
% end.
% complete1(InList) when list(InList) ->
% complete1(InList,[]);
% complete1(InList) ->
% complete1([InList],[]).
% complete1([{debug,_}|T], Acc) ->
% complete1(T,Acc);
% complete1([H|T],Acc) when list(H) ->
% {NewH,NewAcclen} = complete1(H,Acc),
% complete1(T,NewH,NewAcclen);
% complete1([{0,Bin}|T],Acc,0) when binary(Bin) ->
% complete1(T,[Bin|Acc],0);
% complete1([{Unused,Bin}|T],Acc,0) when integer(Unused),binary(Bin) ->
% Size = size(Bin)-1,
% <<Bs:Size/binary,B>> = Bin,
% complete1(T,[(B bsr Unused),Bs|Acc],8-Unused);
% complete1([{Unused,Bin}|T],[Hacc|Tacc],Acclen) when integer(Unused),binary(Bin) ->
% Rest = 8 - Acclen,
% Used = 8 - Unused,
% case size(Bin) of
% 1 ->
% if
% Rest >= Used ->
% <<B:Used,_:Unused>> = Bin,
% complete1(T,[(Hacc bsl Used) + B|Tacc],
% (Acclen+Used) rem 8);
% true ->
% LeftOver = 8 - Rest - Unused,
% <<Val2:Rest,Val1:LeftOver,_:Unused>> = Bin,
% complete1(T,[Val1,(Hacc bsl Rest) + Val2|Tacc],
% (Acclen+Used) rem 8)
% end;
% N ->
% if
% Rest == Used ->
% N1 = N - 1,
% <<B:Rest,Bs:N1/binary,_:Unused>> = Bin,
% complete1(T,[Bs,(Hacc bsl Rest) + B|Tacc],0);
% Rest > Used ->
% N1 = N - 2,
% N2 = (8 - Rest) + Used,
% <<B1:Rest,Bytes:N1/binary,B2:N2,_:Unused>> = Bin,
% complete1(T,[B2,Bytes,(Hacc bsl Rest) + B1|Tacc],
% (Acclen + Used) rem 8);
% true -> % Rest < Used
% N1 = N - 1,
% N2 = Used - Rest,
% <<B1:Rest,Bytes:N1/binary,B2:N2,_:Unused>> = Bin,
% complete1(T,[B2,Bytes,(Hacc bsl Rest) + B1|Tacc],
% (Acclen + Used) rem 8)
% end
% end;
% %complete1([{octets,N,Val}|T],Acc,Acclen) when N =< 4 ,integer(Val) ->
% % complete1([{octets,<<Val:N/unit:8>>}|T],Acc,Acclen);
% complete1([{octets,N,Val}|T],Acc,Acclen) when N =< 4 ,integer(Val) ->
% Newval = case N of
% 1 ->
% Val4 = Val band 16#FF,
% [Val4];
% 2 ->
% Val3 = (Val bsr 8) band 16#FF,
% Val4 = Val band 16#FF,
% [Val3,Val4];
% 3 ->
% Val2 = (Val bsr 16) band 16#FF,
% Val3 = (Val bsr 8) band 16#FF,
% Val4 = Val band 16#FF,
% [Val2,Val3,Val4];
% 4 ->
% Val1 = (Val bsr 24) band 16#FF,
% Val2 = (Val bsr 16) band 16#FF,
% Val3 = (Val bsr 8) band 16#FF,
% Val4 = Val band 16#FF,
% [Val1,Val2,Val3,Val4]
% end,
% complete1([{octets,Newval}|T],Acc,Acclen);
% complete1([{octets,Bin}|T],Acc,Acclen) when binary(Bin) ->
% Rest = 8 - Acclen,
% if
% Rest == 8 ->
% complete1(T,[Bin|Acc],0);
% true ->
% [Hacc|Tacc]=Acc,
% complete1(T,[Bin, Hacc bsl Rest|Tacc],0)
% end;
% complete1([{octets,Oct}|T],Acc,Acclen) when list(Oct) ->
% Rest = 8 - Acclen,
% if
% Rest == 8 ->
% complete1(T,[list_to_binary(Oct)|Acc],0);
% true ->
% [Hacc|Tacc]=Acc,
% complete1(T,[list_to_binary(Oct), Hacc bsl Rest|Tacc],0)
% end;
% complete1([{bit,Val}|T], Acc, Acclen) ->
% complete1([{bits,1,Val}|T],Acc,Acclen);
% complete1([{octet,Val}|T], Acc, Acclen) ->
% complete1([{octets,1,Val}|T],Acc,Acclen);
% complete1([{bits,N,Val}|T], Acc, 0) when N =< 8 ->
% complete1(T,[Val|Acc],N);
% complete1([{bits,N,Val}|T], [Hacc|Tacc], Acclen) when N =< 8 ->
% Rest = 8 - Acclen,
% if
% Rest >= N ->
% complete1(T,[(Hacc bsl N) + Val|Tacc],(Acclen+N) rem 8);
% true ->
% Diff = N - Rest,
% NewHacc = (Hacc bsl Rest) + (Val bsr Diff),
% Mask = element(Diff,{1,3,7,15,31,63,127,255}),
% complete1(T,[(Val band Mask),NewHacc|Tacc],(Acclen+N) rem 8)
% end;
% complete1([{bits,N,Val}|T], Acc, Acclen) -> % N > 8
% complete1([{bits,N-8,Val bsr 8},{bits,8,Val band 255}|T],Acc,Acclen);
% complete1([align|T],Acc,0) ->
% complete1(T,Acc,0);
% complete1([align|T],[Hacc|Tacc],Acclen) ->
% Rest = 8 - Acclen,
% complete1(T,[Hacc bsl Rest|Tacc],0);
% complete1([{octets,N,Val}|T],Acc,Acclen) when list(Val) -> % no security check here
% complete1([{octets,Val}|T],Acc,Acclen);
% complete1([],Acc,Acclen) ->
% {Acc,Acclen}.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% complete(InList) -> ByteList
%% Takes a coded list with bits and bytes and converts it to a list of bytes
%% Should be applied as the last step at encode of a complete ASN.1 type
%%
complete(L) ->
case complete1(L) of
{[],[]} ->
<<0>>;
{Acc,[]} ->
Acc;
{Acc,Bacc} ->
[Acc|complete_bytes(Bacc)]
end.
%% this function builds the ugly form of lists [E1|E2] to avoid having to reverse it at the end.
%% this is done because it is efficient and that the result always will be sent on a port or
%% converted by means of list_to_binary/1
complete1(InList) when list(InList) ->
complete1(InList,[],[]);
complete1(InList) ->
complete1([InList],[],[]).
complete1([],Acc,Bacc) ->
{Acc,Bacc};
complete1([H|T],Acc,Bacc) when list(H) ->
{NewH,NewBacc} = complete1(H,Acc,Bacc),
complete1(T,NewH,NewBacc);
complete1([{octets,Bin}|T],Acc,[]) ->
complete1(T,[Acc|Bin],[]);
complete1([{octets,Bin}|T],Acc,Bacc) ->
complete1(T,[Acc|[complete_bytes(Bacc),Bin]],[]);
complete1([{debug,_}|T], Acc,Bacc) ->
complete1(T,Acc,Bacc);
complete1([{bits,N,Val}|T],Acc,Bacc) ->
complete1(T,Acc,complete_update_byte(Bacc,Val,N));
complete1([{bit,Val}|T],Acc,Bacc) ->
complete1(T,Acc,complete_update_byte(Bacc,Val,1));
complete1([align|T],Acc,[]) ->
complete1(T,Acc,[]);
complete1([align|T],Acc,Bacc) ->
complete1(T,[Acc|complete_bytes(Bacc)],[]);
complete1([{0,Bin}|T],Acc,[]) when binary(Bin) ->
complete1(T,[Acc|Bin],[]);
complete1([{Unused,Bin}|T],Acc,[]) when integer(Unused),binary(Bin) ->
Size = size(Bin)-1,
<<Bs:Size/binary,B>> = Bin,
NumBits = 8-Unused,
complete1(T,[Acc|Bs],[[B bsr Unused]|NumBits]);
complete1([{Unused,Bin}|T],Acc,Bacc) when integer(Unused),binary(Bin) ->
Size = size(Bin)-1,
<<Bs:Size/binary,B>> = Bin,
NumBits = 8 - Unused,
Bf = complete_bytes(Bacc),
complete1(T,[Acc|[Bf,Bs]],[[B bsr Unused]|NumBits]).
complete_update_byte([],Val,Len) ->
complete_update_byte([[0]|0],Val,Len);
complete_update_byte([[Byte|Bacc]|NumBits],Val,Len) when NumBits + Len == 8 ->
[[0,((Byte bsl Len) + Val) band 255|Bacc]|0];
complete_update_byte([[Byte|Bacc]|NumBits],Val,Len) when NumBits + Len > 8 ->
Rem = 8 - NumBits,
Rest = Len - Rem,
complete_update_byte([[0,((Byte bsl Rem) + (Val bsr Rest)) band 255 |Bacc]|0],Val,Rest);
complete_update_byte([[Byte|Bacc]|NumBits],Val,Len) ->
[[((Byte bsl Len) + Val) band 255|Bacc]|NumBits+Len].
complete_bytes([[_Byte|Bacc]|0]) ->
lists:reverse(Bacc);
complete_bytes([[Byte|Bacc]|NumBytes]) ->
lists:reverse([(Byte bsl (8-NumBytes)) band 255|Bacc]);
complete_bytes([]) ->
[].
% complete_bytes(L) ->
% complete_bytes1(lists:reverse(L),[],[],0,0).
% complete_bytes1([H={V,B}|T],Acc,ReplyAcc,NumBits,NumFields) when ((NumBits+B) rem 8) == 0 ->
% NewReplyAcc = [complete_bytes2([H|Acc],0)|ReplyAcc],
% complete_bytes1(T,[],NewReplyAcc,0,0);
% complete_bytes1([H={V,B}|T],Acc,ReplyAcc,NumBits,NumFields) when NumFields == 7; (NumBits+B) div 8 > 0 ->
% Rem = (NumBits+B) rem 8,
% NewReplyAcc = [complete_bytes2([{V bsr Rem,B - Rem}|Acc],0)|ReplyAcc],
% complete_bytes1([{V,Rem}|T],[],NewReplyAcc,0,0);
% complete_bytes1([H={V,B}|T],Acc,ReplyAcc,NumBits,NumFields) ->
% complete_bytes1(T,[H|Acc],ReplyAcc,NumBits+B,NumFields+1);
% complete_bytes1([],[],ReplyAcc,_,_) ->
% lists:reverse(ReplyAcc);
% complete_bytes1([],Acc,ReplyAcc,NumBits,_) ->
% PadBits = case NumBits rem 8 of
% 0 -> 0;
% Rem -> 8 - Rem
% end,
% lists:reverse([complete_bytes2(Acc,PadBits)|ReplyAcc]).
% complete_bytes2([{V1,B1}],PadBits) ->
% <<V1:B1,0:PadBits>>;
% complete_bytes2([{V2,B2},{V1,B1}],PadBits) ->
% <<V1:B1,V2:B2,0:PadBits>>;
% complete_bytes2([{V3,B3},{V2,B2},{V1,B1}],PadBits) ->
% <<V1:B1,V2:B2,V3:B3,0:PadBits>>;
% complete_bytes2([{V4,B4},{V3,B3},{V2,B2},{V1,B1}],PadBits) ->
% <<V1:B1,V2:B2,V3:B3,V4:B4,0:PadBits>>;
% complete_bytes2([{V5,B5},{V4,B4},{V3,B3},{V2,B2},{V1,B1}],PadBits) ->
% <<V1:B1,V2:B2,V3:B3,V4:B4,V5:B5,0:PadBits>>;
% complete_bytes2([{V6,B6},{V5,B5},{V4,B4},{V3,B3},{V2,B2},{V1,B1}],PadBits) ->
% <<V1:B1,V2:B2,V3:B3,V4:B4,V5:B5,V6:B6,0:PadBits>>;
% complete_bytes2([{V7,B7},{V6,B6},{V5,B5},{V4,B4},{V3,B3},{V2,B2},{V1,B1}],PadBits) ->
% <<V1:B1,V2:B2,V3:B3,V4:B4,V5:B5,V6:B6,V7:B7,0:PadBits>>;
% complete_bytes2([{V8,B8},{V7,B7},{V6,B6},{V5,B5},{V4,B4},{V3,B3},{V2,B2},{V1,B1}],PadBits) ->
% <<V1:B1,V2:B2,V3:B3,V4:B4,V5:B5,V6:B6,V7:B7,V8:B8,0:PadBits>>.
<|start_filename|>lib/gs/doc/src/examples/ex12.erl<|end_filename|>
-module(ex12).
-copyright('Copyright (c) 1991-97 Ericsson Telecom AB').
-vsn('$Revision: /main/release/2 $ ').
-export([start/0,init/0]).
start() -> spawn(ex12, init, []).
init() ->
R=[{window,[{width,200},{height,200},{title,"grid"},{map, true}],
{grid, [{x,10},{y,10},{height,180},{width,180},{columnwidths,[80,60]},
{rows,{1, 20}}],
[{gridline,[{text,{1,"NAME"}},{text,{2,"PHONE"}},
{font,{screen,bold,12}},{row,1},{click,false}]},
{gridline,[{text,{1,"Adam"}},{text,{2,"1234"}},{row,2}]},
{gridline,[{text,{1,"Beata"}},{text,{2,"4321"}},{row,3}]},
{gridline,[{text,{1,"Thomas"}},{text,{2,"1432"}},{row,4}]},
{gridline,[{text,{1,"Bond"}},{text,{2,"007"}},{row,5}]},
{gridline,[{text,{1,"King"}},{text,{2,"112"}},{row,6}]},
{gridline,[{text,{1,"Eva"}},{text,{2,"4123"}},{row,7}]}]}}],
gs:create_tree(gs:start(),R),
loop().
loop() ->
receive
{gs,_Win,destroy,_Data,_Args} -> bye;
{gs,_Gridline,click,_Data,[Col,Row,Text|_]} ->
io:format("Click at col:~p row:~p text:~p~n",[Col,Row,Text]),
loop();
Msg ->
io:format("Got ~p~n",[Msg]),
loop()
end.
<|start_filename|>erts/emulator/test/trace_nif_SUITE_data/trace_nif.c<|end_filename|>
#include "erl_nif.h"
static int load(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
{
return 0;
}
static int reload(ErlNifEnv* env, void** priv_data, ERL_NIF_TERM load_info)
{
return 0;
}
static int upgrade(ErlNifEnv* env, void** priv_data, void** old_priv_data, ERL_NIF_TERM load_info)
{
return 0;
}
static void unload(ErlNifEnv* env, void* priv_data)
{
}
static ERL_NIF_TERM nif_0(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
{
return enif_make_tuple(env,2,
enif_make_atom(env,"ok"),
enif_make_list(env,0));
}
static ERL_NIF_TERM nif_1(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
{
return enif_make_tuple(env,2,
enif_make_atom(env,"ok"),
enif_make_list(env,1,argv[0]));
}
static ErlNifFunc nif_funcs[] =
{
{"nif", 0, nif_0},
{"nif", 1, nif_1}
};
ERL_NIF_INIT(trace_nif_SUITE,nif_funcs,load,reload,upgrade,unload)
<|start_filename|>lib/kernel/src/hipe_ext_format.hrl<|end_filename|>
%% hipe_x86_ext_format.hrl
%% Definitions for unified external object format
%% Currently: sparc, x86, amd64
%% Authors: <NAME>, <NAME>
-define(LOAD_ATOM,0).
-define(LOAD_ADDRESS,1).
-define(CALL_REMOTE,2).
-define(CALL_LOCAL,3).
-define(SDESC,4).
-define(X86ABSPCREL,5).
-define(TERM,0).
-define(BLOCK,1).
-define(SORTEDBLOCK,2).
-define(CONST_TYPE2EXT(T),
case T of
term -> ?TERM;
sorted_block -> ?SORTEDBLOCK;
block -> ?BLOCK
end).
-define(EXT2CONST_TYPE(E),
case E of
?TERM -> term;
?SORTEDBLOCK -> sorted_block;
?BLOCK -> block
end).
-define(EXT2PATCH_TYPE(E),
case E of
?LOAD_ATOM -> load_atom;
?LOAD_ADDRESS -> load_address;
?SDESC -> sdesc;
?X86ABSPCREL -> x86_abs_pcrel;
?CALL_REMOTE -> call_remote;
?CALL_LOCAL -> call_local
end).
-define(STACK_DESC(ExnRA, FSize, Arity, Live), {ExnRA, FSize, Arity, Live}).
<|start_filename|>lib/gs/doc/src/examples/ex3.erl<|end_filename|>
-module(ex3).
-copyright('Copyright (c) 1991-97 Ericsson Telecom AB').
-vsn('$Revision: /main/release/2 $ ').
-export([init/0]).
init() ->
S = gs:start(),
W = gs:create(window,S,[{width,300},{height,200}]),
B1 = gs:create(button,W,[{label, {text,"Button1"}},{y,0}]),
B2 = gs:create(button,W,[{label, {text,"Button2"}},{y,40}]),
gs:config(W, {map,true}),
loop(B1,B2).
loop(B1,B2) ->
receive
{gs,B1,click,_Data,_Arg} -> % button 1 pressed
io:format("Button 1 pressed!~n",[]),
loop(B1,B2);
{gs,B2,click,_Data,_Arg} -> % button 2 pressed
io:format("Button 2 pressed!~n",[]),
loop(B1,B2)
end.
<|start_filename|>lib/tools/emacs/erlang-pkg.el<|end_filename|>
(define-package "erlang" "2.7.0"
"Erlang major mode"
'())
<|start_filename|>lib/percept/doc/src/ipc_tree.erl<|end_filename|>
-module(ipc_tree).
-export([go/1, init/2]).
go(N) ->
start(N, self()),
receive {_,stop} -> ok end.
start(Depth, ParentPid) ->
spawn(?MODULE, init, [Depth, ParentPid]).
init(0, ParentPid) ->
workload(5000),
ParentPid ! {self(),stop},
ok;
init(Depth, ParentPid) ->
Pid1 = spawn(?MODULE, init, [Depth - 1, self()]),
Pid2 = spawn(?MODULE, init, [Depth - 1, self()]),
main([Pid1,Pid2], ParentPid).
main(Pids, ParentPid) ->
workload(5000),
gather(Pids),
ParentPid ! {self(),stop},
ok.
gather([]) -> ok;
gather([Pid|Pids]) -> receive {Pid,stop} -> gather(Pids) end.
workload(0) -> ok;
workload(N) -> math:sin(2), workload(N - 1).
<|start_filename|>lib/erl_interface/src/decode/decode_atom.c<|end_filename|>
/*
* %CopyrightBegin%
*
* Copyright Ericsson AB 1998-2013. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* %CopyrightEnd%
*/
#include <string.h>
#include "eidef.h"
#include "eiext.h"
#include "putget.h"
int ei_decode_atom(const char *buf, int *index, char *p)
{
return ei_decode_atom_as(buf, index, p, MAXATOMLEN, ERLANG_LATIN1, NULL, NULL);
}
int ei_decode_atom_as(const char *buf, int *index, char* p, int destlen,
erlang_char_encoding want_enc,
erlang_char_encoding* was_encp,
erlang_char_encoding* res_encp)
{
const char *s = buf + *index;
const char *s0 = s;
int len;
erlang_char_encoding got_enc;
switch (get8(s)) {
case ERL_ATOM_EXT:
len = get16be(s);
got_enc = ERLANG_LATIN1;
break;
case ERL_SMALL_ATOM_EXT:
len = get8(s);
got_enc = ERLANG_LATIN1;
break;
case ERL_ATOM_UTF8_EXT:
len = get16be(s);
got_enc = ERLANG_UTF8;
break;
case ERL_SMALL_ATOM_UTF8_EXT:
len = get8(s);
got_enc = ERLANG_UTF8;
break;
default:
return -1;
}
if ((want_enc & got_enc) || want_enc == ERLANG_ASCII) {
int i, found_non_ascii = 0;
if (len >= destlen)
return -1;
for (i=0; i<len; i++) {
if (s[i] & 0x80) found_non_ascii = 1;
if (p) p[i] = s[i];
}
if (p) p[len] = 0;
if (want_enc == ERLANG_ASCII && found_non_ascii) {
return -1;
}
if (res_encp) {
*res_encp = found_non_ascii ? got_enc : ERLANG_ASCII;
}
}
else {
int plen = (got_enc == ERLANG_LATIN1) ?
latin1_to_utf8(p, s, len, destlen-1, res_encp) :
utf8_to_latin1(p, s, len, destlen-1, res_encp);
if (plen < 0) return -1;
if (p) p[plen] = 0;
}
if (was_encp) {
*was_encp = got_enc;
}
s += len;
*index += s-s0;
return 0;
}
int utf8_to_latin1(char* dst, const char* src, int slen, int destlen,
erlang_char_encoding* res_encp)
{
const char* const dst_start = dst;
const char* const dst_end = dst + destlen;
int found_non_ascii = 0;
while (slen > 0) {
if (dst >= dst_end) return -1;
if ((src[0] & 0x80) == 0) {
if (dst_start) {
*dst = *src;
}
++dst;
++src;
--slen;
}
else if (slen > 1 &&
(src[0] & 0xFE) == 0xC2 &&
(src[1] & 0xC0) == 0x80) {
if (dst_start) {
*dst = (char) ((src[0] << 6) | (src[1] & 0x3F));
}
++dst;
src += 2;
slen -= 2;
found_non_ascii = 1;
}
else return -1;
}
if (res_encp) {
*res_encp = found_non_ascii ? ERLANG_LATIN1 : ERLANG_ASCII;
}
return dst - dst_start;
}
int latin1_to_utf8(char* dst, const char* src, int slen, int destlen,
erlang_char_encoding* res_encp)
{
const char* const src_end = src + slen;
const char* const dst_start = dst;
const char* const dst_end = dst + destlen;
int found_non_ascii = 0;
while (src < src_end) {
if (dst >= dst_end) return -1;
if ((src[0] & 0x80) == 0) {
if (dst_start) {
*dst = *src;
}
++dst;
}
else {
if (dst_start) {
unsigned char ch = *src;
dst[0] = 0xC0 | (ch >> 6);
dst[1] = 0x80 | (ch & 0x3F);
}
dst += 2;
found_non_ascii = 1;
}
++src;
}
if (res_encp) {
*res_encp = found_non_ascii ? ERLANG_UTF8 : ERLANG_ASCII;
}
return dst - dst_start;
}
int ei_internal_get_atom(const char** bufp, char* p,
erlang_char_encoding* was_encp)
{
int ix = 0;
if (ei_decode_atom_as(*bufp, &ix, p, MAXATOMLEN_UTF8, ERLANG_UTF8, was_encp, NULL) < 0)
return -1;
*bufp += ix;
return 0;
}
<|start_filename|>lib/gs/doc/src/examples/ex16.erl<|end_filename|>
-module(ex16).
-copyright('Copyright (c) 1991-97 Ericsson Telecom AB').
-vsn('$Revision: /main/release/3 $ ').
-export([start/0,init/0]).
start() -> spawn(ex16, init, []).
init() ->
I=gs:start(),
Win=gs:create(window, I,
[{width, 200},{height, 200},
{title,"Default Demo"},{map, true}]),
gs:create(canvas, can1,Win,
[{x,0},{y, 0},{width,200},{height,200},
{default,text,{font,{courier,bold,19}}},
{default,text,{fg,blue}},
{default,rectangle,{fill,red}},{default,text,{text,"Pow!"}},
{default,oval,{fill,green}}]),
{A,B,C} = erlang:now(),
random:seed(A,B,C),
loop().
loop() ->
receive
{gs,_Id,destroy,_Data,_Arg} -> bye
after 500 ->
XY = {random:uniform(200),random:uniform(200)},
draw(random:uniform(3),XY),
loop()
end.
draw(1,XY) ->
gs:create(text,can1,[{coords,[XY]}]);
draw(2,XY) ->
XY2 = {random:uniform(200),random:uniform(200)},
gs:create(rectangle,can1,[{coords,[XY,XY2]}]);
draw(3,XY) ->
XY2 = {random:uniform(200),random:uniform(200)},
gs:create(oval,can1,[{coords,[XY,XY2]}]).
<|start_filename|>lib/gs/doc/src/examples/ex9.erl<|end_filename|>
-module(ex9).
-copyright('Copyright (c) 1991-97 Ericsson Telecom AB').
-vsn('$Revision: /main/release/2 $ ').
-export([start/0,init/1]).
start() ->
spawn(ex9, init, [self()]),
receive
{entry_reply, Reply} -> Reply
end.
init(Pid) ->
S = gs:start(),
Win = gs:create(window,S,[{title,"Entry Demo"},
{width,150},{height,100}]),
gs:create(label,Win,[{label,{text,"What's your name?"}},
{width,150}]),
gs:create(entry,entry,Win,[{x,10},{y,30},{width,130},
{keypress,true}]),
gs:create(button,ok,Win,[{width,45},{y,60},{x,10},
{label,{text,"Ok"}}]),
gs:create(button,cancel,Win,[{width,60},{y,60},{x,80},
{label,{text,"Cancel"}}]),
gs:config(Win,{map,true}),
loop(Pid).
loop(Pid) ->
receive
{gs,entry,keypress,_,['Return'|_]} ->
Text=gs:read(entry,text),
Pid ! {entry_reply,{name,Text}};
{gs,entry,keypress,_,_} -> % all other keypresses
loop(Pid);
{gs,ok,click,_,_} ->
Text=gs:read(entry,text),
Pid ! {entry_reply,{name,Text}};
{gs,cancel,click,_,_} ->
Pid ! {entry_reply,cancel};
X ->
io:format("Got X=~w~n",[X]),
loop(Pid)
end.
<|start_filename|>lib/erl_interface/src/decode/decode_long.c<|end_filename|>
/*
* %CopyrightBegin%
*
* Copyright Ericsson AB 1998-2009. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* %CopyrightEnd%
*/
#include "eidef.h"
#include "eiext.h"
#include "putget.h"
#ifndef EI_64BIT
int ei_decode_long(const char *buf, int *index, long *p)
{
const char *s = buf + *index;
const char *s0 = s;
long n;
int arity;
switch (get8(s)) {
case ERL_SMALL_INTEGER_EXT:
n = get8(s);
break;
case ERL_INTEGER_EXT:
n = get32be(s);
break;
case ERL_SMALL_BIG_EXT:
arity = get8(s);
goto decode_big;
case ERL_LARGE_BIG_EXT:
arity = get32be(s);
decode_big:
{
int sign = get8(s);
int i;
unsigned long u = 0;
/* Little Endian, and n always positive, except for LONG_MIN */
for (i = 0; i < arity; i++) {
if (i < 4) {
u |= get8(s) << (i * 8);
} else if (get8(s) != 0) {
return -1; /* All but first byte have to be 0 */
}
}
/* check for overflow */
if (sign) {
if (u > 0x80000000UL) {
return -1;
}
n = -((long)u);
} else {
if (u > 0x7FFFFFFF) {
return -1;
}
n = (long)u;
}
}
break;
default:
return -1;
}
if (p) *p = n;
*index += s-s0;
return 0;
}
#endif /* !EI_64BIT */
<|start_filename|>lib/percept/doc/src/img_esi.erl<|end_filename|>
-module(img_esi).
-export([image/3]).
image(SessionID, _Env, _Input) ->
mod_esi:deliver(SessionID, header()),
Binary = my_image(),
mod_esi:deliver(SessionID, binary_to_list(Binary)).
my_image() ->
Im = egd:create(300,20),
Black = egd:color({0,0,0}),
Red = egd:color({255,0,0}),
egd:filledRectangle(Im, {30,14}, {270,19}, Red),
egd:rectangle(Im, {30,14}, {270,19}, Black),
Filename = filename:join([code:priv_dir(percept), "fonts", "6x11_latin1.wingsfont"]),
Font = egd_font:load(Filename),
egd:text(Im, {30, 0}, Font, "egd with esi callback", Black),
Bin = egd:render(Im, png),
egd:destroy(Im),
Bin.
header() ->
"Content-Type: image/png\r\n\r\n".
<|start_filename|>lib/gs/doc/src/examples/ex4.erl<|end_filename|>
-module(ex4).
-copyright('Copyright (c) 1991-97 Ericsson Telecom AB').
-vsn('$Revision: /main/release/2 $ ').
-export([init/0]).
init() ->
S = gs:start(),
gs:create(window,win1,S,[{width,300},{height,200}]),
gs:create(button,b1,win1,[{label, {text,"Button1"}},{y,0}]),
gs:create(button,b2,win1,[{label, {text,"Button2"}},{y,40}]),
gs:config(win1, {map,true}),
loop(). %% look, no args!
loop() ->
receive
{gs,b1,click,_,_} -> % button 1 pressed
io:format("Button 1 pressed!~n",[]),
loop();
{gs,b2,click,_,_} -> % button 2 pressed
io:format("Button 2 pressed!~n",[]),
loop()
end.
<|start_filename|>lib/gs/doc/src/examples/ex11.erl<|end_filename|>
-module(ex11).
-copyright('Copyright (c) 1991-97 Ericsson Telecom AB').
-vsn('$Revision: /main/release/3 $ ').
-export([start/0,init/0]).
start() ->
spawn(ex11,init,[]).
init() ->
I= gs:start(),
W= gs:window(I,[{title,"Color Demo"},
{width,300},{height,195}]),
B=gs:button(W,[{label,{image,"die_icon"}},{x,271},{y,166},
{width,30}]),
gs:config(B,[{bg,yellow},{fg,hotpink1},{data,quit}]),
gs:scale(W,[{text,"Red"},{y,0},{range,{0,255}},
{orient,horizontal},
{height,65},{data,red},{pos,42}]),
gs:scale(W,[{text,"Blue"},{y,65},{range,{0,255}},
{orient,horizontal},
{height,65},{data,blue},{pos,42}]),
gs:scale(W,[{text,"Green"},{y,130},{range,{0,255}},
{orient,horizontal},
{height,65},{data,green},{pos,42}]),
gs:config(W,{map,true}),
loop(W,0,0,0).
loop(W,R,G,B) ->
gs:config(W,{bg,{R,G,B}}),
receive
{gs,_,click,red,[New_R|_]} ->
loop(W,New_R,G,B);
{gs,_,click,green,[New_G|_]} ->
loop(W,R,New_G,B);
{gs,_,click,blue,[New_B|_]} ->
loop(W,R,G,New_B);
{gs,_,click,quit,_} ->
true;
{gs,W,destroy,_,_} ->
true
end.
<|start_filename|>lib/tools/examples/xref_examples.erl<|end_filename|>
-module(xref_examples).
-export([script/0]).
%% Used at Erlang/OTP for finding undefined functions and unused local
%% functions. Output are the two files ${HOME}/undefined.txt and
%% ${HOME}/unused_locals.txt.
script() ->
Root = code:root_dir(),
Dir = os:getenv("HOME"),
Server = s,
xref:start(Server),
{ok, _Relname} = xref:add_release(Server, code:lib_dir(), {name,otp}),
%% Exclude undefined functions in some modules...
Exclude = "(CORBA|Cos|Orber|Puller|Pusher|"
"StackModule|oe_Cos|mnesia).*_impl",
UndefS = "XC || (XU - X - B)",
Q = io_lib:format("Undef = ~s,"
"Excluded = ~p:_/_,"
"Undef - Undef || Excluded",
[UndefS, Exclude]),
{ok, Undef} = xref:q(Server, lists:flatten(Q)),
{ok, NotCalled} = xref:analyze(Server, locals_not_used),
dump("%% " ++ Root ++
"\n%% Undefined external functions." ++
"\n%% The second MFA is the undefined function." ++
"\n%% Functions in modules matching the following "
"regular expression have been skipped:" ++
"\n%% " ++ Exclude,
filename:join(Dir, "undefined.txt"),
Undef),
dump("%% " ++ Root ++ "\n%% Unused local functions.",
filename:join(Dir, "unused_locals.txt"),
NotCalled),
catch xref:stop(Server),
halt().
dump(H, F, T) ->
{ok, IoDev} = file:open(F,[write]),
io:format(IoDev, "~s~n", [H]),
io:format(IoDev, "~p.~n", [T]),
file:close(IoDev).
<|start_filename|>lib/gs/doc/src/examples/ex10.erl<|end_filename|>
-module(ex10).
-copyright('Copyright (c) 1991-97 Ericsson Telecom AB').
-vsn('$Revision: /main/release/2 $ ').
-export([start/0, init/3]).
start() ->
start("Pick a fruit:",
[apple, banana, lemon, orange, strawberry,
mango, kiwi, pear, cherry,pineapple,peach,apricot]).
start(Text,Items) ->
spawn(ex10,init,[self(),Text,Items]),
receive
{browser,Result} -> Result
end.
init(Pid,Text,Items) ->
S=gs:start(),
Win=gs:window(S,[{width,250},{height,270},
{title,"Browser"}]),
Lbl=gs:label(Win,[{label,{text,Text}},{width,250}]),
Entry=gs:entry(Win,[{y,35},{width,240},{x,5},
{keypress,true},
{setfocus,true}]),
Lb=gs:listbox(Win,[{x,5},{y,65},{width,160},
{height,195},{vscroll,right},
{click,true},{doubleclick,true}]),
Ok=gs:button(Win,[{label,{text,"OK"}},
{width,40},{x,185},{y,175}]),
Cancel=gs:button(Win,[{label,{text,"Cancel"}},
{x,175},{y,225},{width,65}]),
gs:config(Lb,[{items,Items}]),
gs:config(Win,{map,true}),
browser_loop(Pid,Ok,Cancel,Entry,Lb).
browser_loop(Pid,Ok,Cancel,Entry,Lb) ->
receive
{gs,Ok,click,_,_} ->
Txt=gs:read(Entry,text),
Pid ! {browser,{ok,Txt}};
{gs,Cancel,click,_,_} ->
Pid ! {browser,cancel};
{gs,Entry,keypress,_,['Return'|_]} ->
Txt=gs:read(Entry,text),
Pid ! {browser,{ok,Txt}};
{gs,Entry,keypress,_,_} ->
browser_loop(Pid,Ok,Cancel,Entry,Lb);
{gs,Lb,click,_,[Idx, Txt|_]} ->
gs:config(Entry,{text,Txt}),
browser_loop(Pid,Ok,Cancel,Entry,Lb);
{gs,Lb,doubleclick,_,[Idx, Txt|_]} ->
Pid ! {browser,{ok,Txt}};
{gs,_,destroy,_,_} ->
Pid ! {browser,cancel};
X ->
io:format("Got X=~w~n",[X]),
browser_loop(Pid,Ok,Cancel,Entry,Lb)
end.
<|start_filename|>lib/gs/doc/src/examples/ex6.erl<|end_filename|>
-module(ex6).
-copyright('Copyright (c) 1991-97 Ericsson Telecom AB').
-vsn('$Revision: /main/release/2 $ ').
-export([start/0,init/0]).
start() ->
spawn(ex6,init,[]).
init() ->
S = gs:start(),
W = gs:create(window,S,[{map,true},{keypress,true},
{buttonpress,true},{motion,true}]),
gs:create(button,W,[{label,{text,"PressMe"}},{enter,true},
{leave,true}]),
event_loop().
event_loop() ->
receive
X ->
io:format("Got event: ~w~n",[X]),
event_loop()
end.
<|start_filename|>lib/erl_interface/src/encode/encode_bignum.c<|end_filename|>
/*
* %CopyrightBegin%
*
* Copyright Ericsson AB 2002-2009. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* %CopyrightEnd%
*/
#include "eidef.h"
#if defined(HAVE_GMP_H) && defined(HAVE_LIBGMP)
#include <gmp.h>
#include "eidef.h"
#include "eiext.h"
#include "putget.h"
#include "ei_x_encode.h"
int ei_encode_bignum(char *buf, int *index, mpz_t obj)
{
char *s = buf + *index;
char *s0 = s;
size_t count;
int mpz_sign = mpz_sgn(obj);
/*
* FIXME we could code into ERL_[SMALL_]INTEGER_EXT but to make
* this code simple for now we always code into ERL_SMALL_BIG_EXT
*/
if (mpz_sign == 0) { /* Special case, bignum is zero */
if (!buf) s += 2;
else {
put8(s,ERL_SMALL_INTEGER_EXT);
put8(s,0);
}
} else {
if (!buf) {
int numb = 8; /* # bits in each external format limb */
s += (mpz_sizeinbase(obj, 2) + numb-1) / numb;
} else {
char *arityp;
put8(s,ERL_LARGE_BIG_EXT);
arityp = s; /* fill in later */
s += 4;
put8(s, mpz_sign == 1); /* save sign separately */
mpz_export(s, &count, -1, 1, 0, 0, obj);
s += count;
put32le(arityp, count);
}
}
*index += s-s0;
return 0;
}
int ei_x_encode_bignum(ei_x_buff* x, mpz_t n)
{
int i = x->index;
ei_encode_bignum(NULL, &i, n);
if (!x_fix_buff(x, i))
return -1;
return ei_encode_bignum(x->buff, &x->index, n);
}
#endif /* HAVE_GMP_H && HAVE_LIBGMP */
<|start_filename|>lib/percept/doc/src/img.erl<|end_filename|>
-module(img).
-export([do/0]).
do() ->
Im = egd:create(200,200),
Red = egd:color({255,0,0}),
Green = egd:color({0,255,0}),
Blue = egd:color({0,0,255}),
Black = egd:color({0,0,0}),
Yellow = egd:color({255,255,0}),
% Line and fillRectangle
egd:filledRectangle(Im, {20,20}, {180,180}, Red),
egd:line(Im, {0,0}, {200,200}, Black),
egd:save(egd:render(Im, png), "/home/egil/test1.png"),
egd:filledEllipse(Im, {45, 60}, {55, 70}, Yellow),
egd:filledEllipse(Im, {145, 60}, {155, 70}, Blue),
egd:save(egd:render(Im, png), "/home/egil/test2.png"),
R = 80,
X0 = 99,
Y0 = 99,
Pts = [ { X0 + trunc(R*math:cos(A*math:pi()*2/360)),
Y0 + trunc(R*math:sin(A*math:pi()*2/360))
} || A <- lists:seq(0,359,5)],
lists:map(
fun({X,Y}) ->
egd:rectangle(Im, {X-5, Y-5}, {X+5,Y+5}, Green)
end, Pts),
egd:save(egd:render(Im, png), "/home/egil/test3.png"),
% Text
Filename = filename:join([code:priv_dir(percept), "fonts", "6x11_latin1.wingsfont"]),
Font = egd_font:load(Filename),
{W,H} = egd_font:size(Font),
String = "egd says hello",
Length = length(String),
egd:text(Im, {round(100 - W*Length/2), 200 - H - 5}, Font, String, Black),
egd:save(egd:render(Im, png), "/home/egil/test4.png"),
egd:destroy(Im).
<|start_filename|>system/doc/tutorial/port_driver.c<|end_filename|>
/* port_driver.c */
#include <stdio.h>
#include "erl_driver.h"
typedef struct {
ErlDrvPort port;
} example_data;
static ErlDrvData example_drv_start(ErlDrvPort port, char *buff)
{
example_data* d = (example_data*)driver_alloc(sizeof(example_data));
d->port = port;
return (ErlDrvData)d;
}
static void example_drv_stop(ErlDrvData handle)
{
driver_free((char*)handle);
}
static void example_drv_output(ErlDrvData handle, char *buff,
ErlDrvSizeT bufflen)
{
example_data* d = (example_data*)handle;
char fn = buff[0], arg = buff[1], res;
if (fn == 1) {
res = foo(arg);
} else if (fn == 2) {
res = bar(arg);
}
driver_output(d->port, &res, 1);
}
ErlDrvEntry example_driver_entry = {
NULL, /* F_PTR init, called when driver is loaded */
example_drv_start, /* L_PTR start, called when port is opened */
example_drv_stop, /* F_PTR stop, called when port is closed */
example_drv_output, /* F_PTR output, called when erlang has sent */
NULL, /* F_PTR ready_input, called when input descriptor ready */
NULL, /* F_PTR ready_output, called when output descriptor ready */
"example_drv", /* char *driver_name, the argument to open_port */
NULL, /* F_PTR finish, called when unloaded */
NULL, /* void *handle, Reserved by VM */
NULL, /* F_PTR control, port_command callback */
NULL, /* F_PTR timeout, reserved */
NULL, /* F_PTR outputv, reserved */
NULL, /* F_PTR ready_async, only for async drivers */
NULL, /* F_PTR flush, called when port is about
to be closed, but there is data in driver
queue */
NULL, /* F_PTR call, much like control, sync call
to driver */
NULL, /* F_PTR event, called when an event selected
by driver_event() occurs. */
ERL_DRV_EXTENDED_MARKER, /* int extended marker, Should always be
set to indicate driver versioning */
ERL_DRV_EXTENDED_MAJOR_VERSION, /* int major_version, should always be
set to this value */
ERL_DRV_EXTENDED_MINOR_VERSION, /* int minor_version, should always be
set to this value */
0, /* int driver_flags, see documentation */
NULL, /* void *handle2, reserved for VM use */
NULL, /* F_PTR process_exit, called when a
monitored process dies */
NULL /* F_PTR stop_select, called to close an
event object */
};
DRIVER_INIT(example_drv) /* must match name in driver_entry */
{
return &example_driver_entry;
}
<|start_filename|>lib/kernel/test/code_SUITE_data/upgrade_client.erl<|end_filename|>
-module(upgrade_client).
-export([run/5]).
%%-define(line, io:format("~s:~p\n", [?MODULE,?LINE]),).
-define(line,).
run(Dir, Upgradee1, Upgradee2, Other1, Other2) ->
%% Load version 1 of upgradee
code_SUITE:compile_load(upgradee, Dir, 1, Upgradee1),
?line 1 = upgradee:exp1(),
?line 1 = upgradee:exp1exp2(),
?line 1 = upgradee:exp1loc2(),
?line {'EXIT',{undef,_}} = (catch upgradee:loc1()),
?line {'EXIT',{undef,_}} = (catch upgradee:exp2()),
?line {'EXIT',{undef,_}} = (catch upgradee:loc2()),
?line {'EXIT',{undef,_}} = (catch upgradee:loc1exp2()),
?line {'EXIT',{undef,_}} = (catch upgradee:loc1loc2()),
P = spawn_link(upgradee,dispatch_loop,[]),
?line 1 = proxy_call(P, local, exp1),
?line 1 = proxy_call(P, local, loc1),
?line 1 = proxy_call(P, local, exp1exp2),
?line 1 = proxy_call(P, local, exp1loc2),
?line 1 = proxy_call(P, local, loc1exp2),
?line 1 = proxy_call(P, local, loc1loc2),
?line 1 = proxy_call(P, external, exp1),
?line 1 = proxy_call(P, external, exp1exp2),
?line 1 = proxy_call(P, external, exp1loc2),
?line {'EXIT',{undef,_}} = proxy_call(P, external, loc1),
?line {'EXIT',{undef,_}} = proxy_call(P, external, loc1exp2),
?line {'EXIT',{undef,_}} = proxy_call(P, external, loc1loc2),
?line {'EXIT',{undef,_}} = proxy_call(P, external, exp2),
?line {'EXIT',{undef,_}} = proxy_call(P, external, loc2),
?line {cannot_compile,1} = proxy_call(P, local, exp2),
?line {cannot_compile,1} = proxy_call(P, local, loc2),
?line {'EXIT',{undef,_}} = (catch other:exp1()),
?line {'EXIT',{undef,_}} = (catch other:loc1()),
?line {'EXIT',{undef,_}} = (catch other:exp1loc2()),
?line {'EXIT',{undef,_}} = (catch other:exp1exp2()),
?line {'EXIT',{undef,_}} = (catch other:loc11exp2()),
?line {'EXIT',{undef,_}} = (catch other:loc1loc2()),
?line {'EXIT',{undef,_}} = (catch other:exp2()),
?line {'EXIT',{undef,_}} = (catch other:loc2()),
?line {'EXIT',{undef,_}} = proxy_call(P, other, exp1),
?line {'EXIT',{undef,_}} = proxy_call(P, other, exp1loc2),
?line {'EXIT',{undef,_}} = proxy_call(P, other, exp1exp2),
?line {'EXIT',{undef,_}} = proxy_call(P, other, loc1exp2),
?line {'EXIT',{undef,_}} = proxy_call(P, other, loc1loc2),
?line {'EXIT',{undef,_}} = proxy_call(P, other, exp2),
?line {'EXIT',{undef,_}} = proxy_call(P, other, loc1),
?line {'EXIT',{undef,_}} = proxy_call(P, other, loc2),
%%
%% Load version 1 of other
%%
code_SUITE:compile_load(other, Dir, 1, Other1),
?line 1 = other:exp1(),
?line 1 = other:exp1loc2(),
?line 1 = other:exp1exp2(),
?line {'EXIT',{undef,_}} = (catch other:loc1()),
?line {'EXIT',{undef,_}} = (catch other:loc1exp2()),
?line {'EXIT',{undef,_}} = (catch other:loc1loc2()),
?line {'EXIT',{undef,_}} = (catch other:exp2()),
?line {'EXIT',{undef,_}} = (catch other:loc2()),
?line 1 = proxy_call(P, other, exp1),
?line 1 = proxy_call(P, other, exp1loc2),
?line 1 = proxy_call(P, other, exp1exp2),
?line {'EXIT',{undef,_}} = proxy_call(P, other, loc1),
?line {'EXIT',{undef,_}} = proxy_call(P, other, loc1exp2),
?line {'EXIT',{undef,_}} = proxy_call(P, other, loc1loc2),
?line {'EXIT',{undef,_}} = proxy_call(P, other, exp2),
?line {'EXIT',{undef,_}} = proxy_call(P, other, loc2),
%%
%% Load version 2 of upgradee
%%
code_SUITE:compile_load(upgradee, Dir, 2, Upgradee2),
?line 2 = upgradee:exp2(),
?line 2 = upgradee:exp1exp2(),
?line 2 = upgradee:loc1exp2(),
?line {'EXIT',{undef,_}} = (catch upgradee:exp1()),
?line {'EXIT',{undef,_}} = (catch upgradee:loc1()),
?line {'EXIT',{undef,_}} = (catch upgradee:exp1loc2()),
?line {'EXIT',{undef,_}} = (catch upgradee:loc1loc2()),
?line {'EXIT',{undef,_}} = (catch upgradee:loc2()),
?line 1 = proxy_call(P, local, exp1),
?line 1 = proxy_call(P, local, loc1),
?line 1 = proxy_call(P, local, exp1exp2),
?line 1 = proxy_call(P, local, exp1loc2),
?line 1 = proxy_call(P, local, loc1exp2),
?line 1 = proxy_call(P, local, loc1loc2),
?line {cannot_compile,1} = proxy_call(P, local, exp2),
?line {cannot_compile,1} = proxy_call(P, local, loc2),
?line 2 = proxy_call(P, external, exp1exp2),
?line 2 = proxy_call(P, external, loc1exp2),
?line 2 = proxy_call(P, external, exp2),
?line {'EXIT',{undef,_}} = proxy_call(P, external, exp1),
?line {'EXIT',{undef,_}} = proxy_call(P, external, loc1),
?line {'EXIT',{undef,_}} = proxy_call(P, external, exp1loc2),
?line {'EXIT',{undef,_}} = proxy_call(P, external, loc1loc2),
?line {'EXIT',{undef,_}} = proxy_call(P, external, loc2),
?line 1 = other:exp1(),
?line 1 = other:exp1loc2(),
?line 1 = other:exp1exp2(),
?line {'EXIT',{undef,_}} = (catch other:loc1()),
?line {'EXIT',{undef,_}} = (catch other:loc1exp2()),
?line {'EXIT',{undef,_}} = (catch other:loc1loc2()),
?line {'EXIT',{undef,_}} = (catch other:exp2()),
?line {'EXIT',{undef,_}} = (catch other:loc2()),
?line 1 = proxy_call(P, other, exp1),
?line 1 = proxy_call(P, other, exp1loc2),
?line 1 = proxy_call(P, other, exp1exp2),
?line {'EXIT',{undef,_}} = proxy_call(P, other, loc1),
?line {'EXIT',{undef,_}} = proxy_call(P, other, loc1exp2),
?line {'EXIT',{undef,_}} = proxy_call(P, other, loc1loc2),
?line {'EXIT',{undef,_}} = proxy_call(P, other, exp2),
?line {'EXIT',{undef,_}} = proxy_call(P, other, loc2),
%%
%% Load version 2 of other
%%
code_SUITE:compile_load(other, Dir, 2, Other2),
?line 2 = upgradee:exp2(),
?line 2 = upgradee:exp1exp2(),
?line 2 = upgradee:loc1exp2(),
?line {'EXIT',{undef,_}} = (catch upgradee:exp1()),
?line {'EXIT',{undef,_}} = (catch upgradee:loc1()),
?line {'EXIT',{undef,_}} = (catch upgradee:exp1loc2()),
?line {'EXIT',{undef,_}} = (catch upgradee:loc1loc2()),
?line {'EXIT',{undef,_}} = (catch upgradee:loc2()),
?line 1 = proxy_call(P, local, exp1),
?line 1 = proxy_call(P, local, loc1),
?line 1 = proxy_call(P, local, exp1exp2),
?line 1 = proxy_call(P, local, exp1loc2),
?line 1 = proxy_call(P, local, loc1exp2),
?line 1 = proxy_call(P, local, loc1loc2),
?line {cannot_compile,1} = proxy_call(P, local, exp2),
?line {cannot_compile,1} = proxy_call(P, local, loc2),
?line 2 = proxy_call(P, external, exp1exp2),
?line 2 = proxy_call(P, external, loc1exp2),
?line 2 = proxy_call(P, external, exp2),
?line {'EXIT',{undef,_}} = proxy_call(P, external, exp1),
?line {'EXIT',{undef,_}} = proxy_call(P, external, loc1),
?line {'EXIT',{undef,_}} = proxy_call(P, external, exp1loc2),
?line {'EXIT',{undef,_}} = proxy_call(P, external, loc1loc2),
?line {'EXIT',{undef,_}} = proxy_call(P, external, loc2),
?line 2 = other:exp2(),
?line 2 = other:loc1exp2(),
?line 2 = other:exp1exp2(),
?line {'EXIT',{undef,_}} = (catch other:exp1()),
?line {'EXIT',{undef,_}} = (catch other:loc1()),
?line {'EXIT',{undef,_}} = (catch other:exp1loc2()),
?line {'EXIT',{undef,_}} = (catch other:loc1loc2()),
?line {'EXIT',{undef,_}} = (catch other:loc2()),
?line 2 = proxy_call(P, other, exp2),
?line 2 = proxy_call(P, other, loc1exp2),
?line 2 = proxy_call(P, other, exp1exp2),
?line {'EXIT',{undef,_}} = proxy_call(P, other, exp1),
?line {'EXIT',{undef,_}} = proxy_call(P, other, loc1),
?line {'EXIT',{undef,_}} = proxy_call(P, other, exp1loc2),
?line {'EXIT',{undef,_}} = proxy_call(P, other, loc1loc2),
?line {'EXIT',{undef,_}} = proxy_call(P, other, loc2),
%%
%% Upgrade proxy to version 2
%%
P ! upgrade_order,
%%
io:format("Delete version 2 of 'upgradee'\n",[]),
%%
code:purge(upgradee),
code:delete(upgradee),
?line {'EXIT',{undef,_}} = (catch upgradee:exp2()),
?line {'EXIT',{undef,_}} = (catch upgradee:exp1exp2()),
?line {'EXIT',{undef,_}} = (catch upgradee:loc1exp2()),
?line {'EXIT',{undef,_}} = (catch upgradee:exp1()),
?line {'EXIT',{undef,_}} = (catch upgradee:loc1()),
?line {'EXIT',{undef,_}} = (catch upgradee:exp1loc2()),
?line {'EXIT',{undef,_}} = (catch upgradee:loc1loc2()),
?line {'EXIT',{undef,_}} = (catch upgradee:loc2()),
?line 2 = proxy_call(P, local, exp2),
?line 2 = proxy_call(P, local, loc2),
?line 2 = proxy_call(P, local, exp1exp2),
?line 2 = proxy_call(P, local, exp1loc2),
?line 2 = proxy_call(P, local, loc1exp2),
?line 2 = proxy_call(P, local, loc1loc2),
?line {cannot_compile,2} = proxy_call(P, local, exp1),
?line {cannot_compile,2} = proxy_call(P, local, loc1),
?line {'EXIT',{undef,_}} = proxy_call(P, external, exp1exp2),
?line {'EXIT',{undef,_}} = proxy_call(P, external, loc1exp2),
?line {'EXIT',{undef,_}} = proxy_call(P, external, exp2),
?line {'EXIT',{undef,_}} = proxy_call(P, external, exp1),
?line {'EXIT',{undef,_}} = proxy_call(P, external, loc1),
?line {'EXIT',{undef,_}} = proxy_call(P, external, exp1loc2),
?line {'EXIT',{undef,_}} = proxy_call(P, external, loc1loc2),
?line {'EXIT',{undef,_}} = proxy_call(P, external, loc2),
?line 2 = other:exp2(),
?line 2 = other:loc1exp2(),
?line 2 = other:exp1exp2(),
?line {'EXIT',{undef,_}} = (catch other:exp1()),
?line {'EXIT',{undef,_}} = (catch other:loc1()),
?line {'EXIT',{undef,_}} = (catch other:exp1loc2()),
?line {'EXIT',{undef,_}} = (catch other:loc1loc2()),
?line {'EXIT',{undef,_}} = (catch other:loc2()),
?line 2 = proxy_call(P, other, exp2),
?line 2 = proxy_call(P, other, loc1exp2),
?line 2 = proxy_call(P, other, exp1exp2),
?line {'EXIT',{undef,_}} = proxy_call(P, other, exp1),
?line {'EXIT',{undef,_}} = proxy_call(P, other, loc1),
?line {'EXIT',{undef,_}} = proxy_call(P, other, exp1loc2),
?line {'EXIT',{undef,_}} = proxy_call(P, other, loc1loc2),
?line {'EXIT',{undef,_}} = proxy_call(P, other, loc2),
unlink(P),
exit(P, die_please),
io:format("Purge 'upgradee'\n",[]),
code:purge(upgradee),
io:format("Delete and purge 'other'\n",[]),
code:purge(other),
code:delete(other),
code:purge(other),
ok.
proxy_call(Pid, CallType, Func) ->
Pid ! {self(), CallType, Func},
receive
{Pid, call_result, Func, Ret} -> Ret
end.
<|start_filename|>lib/gs/doc/src/examples/ex7.erl<|end_filename|>
-module(ex7).
-copyright('Copyright (c) 1991-97 Ericsson Telecom AB').
-vsn('$Revision: /main/release/2 $ ').
-export([mk_window/0]).
mk_window() ->
S= gs:start(),
Win= gs:create(window,S,[{motion,true},{map,true}]),
gs:config(Win,[{configure,true},{keypress,true}]),
gs:config(Win,[{buttonpress,true}]),
gs:config(Win,[{buttonrelease,true}]),
event_loop(Win).
event_loop(Win) ->
receive
{gs,Win,motion,Data,[X,Y | Rest]} ->
%% mouse moved to position X Y
io:format("mouse moved to X:~w Y:~w~n",[X,Y]);
{gs,Win,configure,Data,[W,H | Rest]} ->
%% window was resized by user
io:format("window resized W:~w H:~w~n",[W,H]);
{gs,Win,buttonpress,Data,[1,X,Y | Rest]} ->
%% button 1 was pressed at location X Y
io:format("button 1 pressed X:~w Y:~w~n",[X,Y]);
{gs,Win,buttonrelease,Data,[_,X,Y | Rest]} ->
%% Any button (1-3) was released over X Y
io:format("Any button released X:~w Y:~w~n",[X,Y]);
{gs,Win,keypress,Data,[a | Rest]} ->
%% key `a' was pressed in window
io:format("key a was pressed in window~n");
{gs,Win,keypress,Data,[_,65,1 | Rest]} ->
%% Key shift-a
io:format("shift-a was pressed in window~n");
{gs,Win,keypress,Data,[c,_,_,1 | Rest]} ->
%% CTRL_C pressed
io:format("CTRL_C was pressed in window~n");
{gs,Win,keypress,Data, ['Return' | Rest]} ->
%% Return key pressed
io:format("Return key was pressed in window~n")
end,
event_loop(Win).
<|start_filename|>lib/gs/doc/src/examples/ex5.erl<|end_filename|>
-module(ex5).
-copyright('Copyright (c) 1991-97 Ericsson Telecom AB').
-vsn('$Revision: /main/release/2 $ ').
-export([start/0, init/0, b1/0, b2/0]).
start() ->
spawn(ex5, init, []).
init() ->
S = gs:start(),
W = gs:create(window,S,[{map,true}]),
gs:create(button,W,[{label,{text,"Button1"}},{data,{ex5,b1,[]}},{y,0}]),
gs:create(button,W,[{label,{text,"Button2"}},{data,{ex5,b2,[]}},{y,40}]),
loop().
loop()->
receive
{gs,_,click,{M,F,A},_} -> % any button pressed
apply(M,F,A),
loop()
end.
b1() ->
io:format("Button 1 pressed!~n",[]).
b2() ->
io:format("Button 2 pressed!~n",[]).
<|start_filename|>lib/dialyzer/test/opaque_SUITE_data/src/simple/rec_api.erl<|end_filename|>
-module(rec_api).
-export([t1/0, t2/0, t3/0, adt_t1/0, adt_t1/1, adt_r1/0,
t/1, t_adt/0, r/0, r_adt/0, u1/0, u2/0, u3/0, v1/0, v2/0, v3/0]).
-export_type([{a,0},{r1,0}, r2/0, r3/0]).
-export_type([f/0, op_t/0, r/0, tup/0]).
-opaque a() :: a | b.
-record(r1,
{f1 :: a()}).
-opaque r1() :: #r1{}.
t1() ->
A = #r1{f1 = a},
{r1, a} = A.
t2() ->
A = {r1, 10},
{r1, 10} = A,
A = #r1{f1 = 10}, % violates the type of field f1
#r1{f1 = 10} = A.
t3() ->
A = {r1, 10},
#r1{f1 = 10} = A. % violates the type of #r1{}
adt_t1() ->
R = rec_adt:r1(),
{r1, a} = R. % breaks the opaqueness
-spec adt_t1(rec_adt:r1()) -> rec_adt:r1(). % invalid type spec
adt_t1(R) ->
{r1, a} = R.
-spec adt_r1() -> rec_adt:r1(). % invalid type spec
adt_r1() ->
#r1{f1 = a}.
-opaque f() :: fun((_) -> _).
-opaque op_t() :: integer().
-spec t(f()) -> _.
t(A) ->
T = term(),
%% 3(T), % cannot test this: dialyzer_dep deliberately crashes
A(T).
-spec term() -> op_t().
term() ->
3.
t_adt() ->
A = rec_adt:f(),
T = term(),
A(T).
-record(r, {f = fun(_) -> 3 end :: f(), o = 1 :: op_t()}).
-opaque r() :: #r{}.
-opaque tup() :: {'r', f(), op_t()}.
-spec r() -> _.
r() ->
{{r, f(), 2},
#r{f = f(), o = 2}}. % OK, f() is a local opaque type
-spec f() -> f().
f() ->
fun(_) -> 3 end.
r_adt() ->
{{r, rec_adt:f(), 2},
#r{f = rec_adt:f(), o = 2}}. % breaks the opaqueness
-record(r2, % like #r1{}, but with initial value
{f1 = a :: a()}).
-opaque r2() :: #r2{}.
u1() ->
A = #r2{f1 = a},
{r2, a} = A.
u2() ->
A = {r2, 10},
{r2, 10} = A,
A = #r2{f1 = 10}, % violates the type of field f1
#r2{f1 = 10} = A.
u3() ->
A = {r2, 10},
#r2{f1 = 10} = A. % violates the type of #r2{}
-record(r3, % like #r1{}, but an opaque type
{f1 = queue:new():: queue:queue()}).
-opaque r3() :: #r3{}.
v1() ->
A = #r3{f1 = queue:new()},
{r3, a} = A. % breaks the opaqueness
v2() ->
A = {r3, 10},
{r3, 10} = A,
A = #r3{f1 = 10}, % violates the type of field f1
#r3{f1 = 10} = A.
v3() ->
A = {r3, 10},
#r3{f1 = 10} = A. % breaks the opaqueness
<|start_filename|>lib/gs/doc/src/examples/ex15.erl<|end_filename|>
-module(ex15).
-copyright('Copyright (c) 1991-97 Ericsson Telecom AB').
-vsn('$Revision: /main/release/3 $ ').
-export([start/0,init/0]).
start() -> spawn(ex15, init, []).
init() ->
I=gs:start(),
Win=gs:create(window, I,
[{width, 400},{height, 250},
{title,"Font Demo"},{map, true}]),
E = gs:create(canvas, can1,Win,
[{x,0},{y, 0},{width,400},{height,250}]),
Fonts = [{times,19},{screen,16},{helvetica,bold,21},
{symbol,12},{times,[bold,italic],33},{courier,6}],
show_fonts_in_boxes(Fonts,0),
receive
{gs,_Id,destroy,_Data,_Arg} -> bye
end.
show_fonts_in_boxes([],_) -> done;
show_fonts_in_boxes([Font|Fonts],Y) ->
Txt = io_lib:format("Hi! ~p",[Font]),
{Width,Height} = gs:read(can1,{font_wh,{Font,Txt}}),
Y2=Y+Height+2,
gs:create(rectangle,can1,[{coords,[{0,Y},{Width,Y2}]}]),
gs:create(text,can1,[{font,Font},{text,Txt},{coords,[{0,Y+1}]}]),
show_fonts_in_boxes(Fonts,Y2+1).
| lucafavatella/otp |
<|start_filename|>src/demo-iframe.js<|end_filename|>
import '@/styles/base.scss'
import Vue from 'vue'
import Grid from '@/components/grid/Grid'
import store from '@/store'
import { debounce } from 'lodash-es'
Vue.config.productionTip = false
Vue.config.devtools = process.env.NODE_ENV === 'development'
const container = document.querySelector('#app')
const app = new Vue({
store,
render: h => h(Grid)
}).$mount(container)
const handleResize = debounce(() => {
const {width, height} = app.$el.getBoundingClientRect()
try {
window.parent.postMessage({
'event-type': 'iframe-content-resize',
width,
height
},
document.location.origin)
} catch (e) {
// nothing to do here
}
}, 300)
handleResize()
window.addEventListener('resize', handleResize)
<|start_filename|>vue.config.js<|end_filename|>
module.exports = {
publicPath: process.env.NODE_ENV === 'production'
? '/article-assets/build-a-responsive-dashboard-with-vue-js/demo/'
: '/',
devServer: {
port: 47000
},
pages: {
'main': 'src/main.js',
'demo-iframe': 'src/demo-iframe.js'
}
}
| reymalahay/responsive-dashboard |
<|start_filename|>utils/schema.go<|end_filename|>
/*
* Copyright 2017-2019 Kopano and its licensors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package utils
import (
"github.com/gorilla/schema"
)
// Create a Decoder instance as a package global, because it caches
// meta-data about structs, and an instance can be shared safely.
var urlSchemaDecoder = schema.NewDecoder()
// DecodeURLSchema decodes request for mdata in to the provided dst url struct.
func DecodeURLSchema(dst interface{}, src map[string][]string) error {
return urlSchemaDecoder.Decode(dst, src)
}
func init() {
urlSchemaDecoder.SetAliasTag("url")
urlSchemaDecoder.IgnoreUnknownKeys(true)
}
<|start_filename|>utils/redirect.go<|end_filename|>
/*
* Copyright 2017-2019 Kopano and its licensors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package utils
import (
"net/http"
"net/url"
"strings"
"github.com/google/go-querystring/query"
)
// WriteRedirect crates a URL out of the provided uri and params and writes a
// a HTTP response with the provided HTTP status code to the provided
// http.ResponseWriter incliding HTTP caching headers to prevent caching. If
// asFragment is true, the provided params are added as URL fragment, otherwise
// they replace the query. If params is nil, the provided uri is taken as is.
func WriteRedirect(rw http.ResponseWriter, code int, uri *url.URL, params interface{}, asFragment bool) error {
if params != nil {
paramValues, err := query.Values(params)
if err != nil {
return err
}
u, _ := url.Parse(uri.String())
if asFragment {
if u.Fragment != "" {
u.Fragment += "&"
}
f := paramValues.Encode() // This encods into URL encoded form with QueryEscape.
f, _ = url.QueryUnescape(f) // But we need it unencoded since its the fragment, it is encoded later (when serializing the URL).
u.Fragment += f // Append fragment extension.
} else {
queryValues := u.Query()
for k, vs := range paramValues {
for _, v := range vs {
queryValues.Add(k, v)
}
}
u.RawQuery = strings.ReplaceAll(queryValues.Encode(), "+", "%20") // NOTE(longsleep): Ensure we use %20 instead of +.
}
uri = u
}
rw.Header().Set("Location", uri.String())
rw.Header().Set("Cache-Control", "no-store")
rw.Header().Set("Pragma", "no-cache")
rw.WriteHeader(code)
return nil
}
<|start_filename|>identifier/modes.go<|end_filename|>
/*
* Copyright 2017-2019 Kopano and its licensors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package identifier
const (
// ModeLogonUsernameEmptyPasswordCookie is the logon mode which requires a
// username which matches the currently signed in user in the cookie and an
// empty password.
ModeLogonUsernameEmptyPasswordCookie = "0"
// ModeLogonUsernamePassword is the logon mode which requires a username
// and a password.
ModeLogonUsernamePassword = "1"
)
const (
// MustBeSignedIn is a authorize mode which tells the authorization code,
// that it is expected to have a signed in user and everything else should
// be treated as error.
MustBeSignedIn = "must"
)
const (
// StateModeEndSession is a state mode which selects end session specific
// actions when processing state requests.
StateModeEndSession = "0"
)
<|start_filename|>identity/authorities/samlext/logoutresponse.go<|end_filename|>
/*
* Copyright 2017-2020 Kopano and its licensors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package samlext
import (
"bytes"
"compress/flate"
"encoding/base64"
"encoding/xml"
"fmt"
"net/url"
"github.com/crewjam/saml"
"stash.kopano.io/kgol/rndm"
)
func MakeLogoutResponse(sp *saml.ServiceProvider, req *saml.LogoutRequest, status *saml.Status, binding string) (*LogoutResponse, error) {
res := &LogoutResponse{&saml.LogoutResponse{
ID: fmt.Sprintf("id-%x", rndm.GenerateRandomBytes(20)),
InResponseTo: req.ID,
Version: "2.0",
IssueInstant: saml.TimeNow(),
Destination: sp.GetSLOBindingLocation(binding),
Issuer: &saml.Issuer{
Format: "urn:oasis:names:tc:SAML:2.0:nameid-format:entity",
Value: firstSet(sp.EntityID, sp.MetadataURL.String()),
},
}}
if status != nil {
res.LogoutResponse.Status = *status
}
return res, nil
}
func firstSet(a, b string) string {
if a == "" {
return b
}
return a
}
type LogoutResponse struct {
*saml.LogoutResponse
}
// Redirect returns a URL suitable for using the redirect binding with the response.
func (res *LogoutResponse) Redirect(relayState string) *url.URL {
w := &bytes.Buffer{}
w1 := base64.NewEncoder(base64.StdEncoding, w)
w2, _ := flate.NewWriter(w1, 9)
e := xml.NewEncoder(w2)
if err := e.Encode(res); err != nil {
panic(err)
}
w2.Close()
w1.Close()
rv, _ := url.Parse(res.Destination)
query := rv.Query()
query.Set("SAMLResponse", w.String())
if relayState != "" {
query.Set("RelayState", relayState)
}
rv.RawQuery = query.Encode()
return rv
}
<|start_filename|>identity/clients/clients.go<|end_filename|>
/*
* Copyright 2017-2019 Kopano and its licensors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package clients
import (
"crypto"
"net/url"
)
// Details hold detail information about clients identified by ID.
type Details struct {
ID string `json:"id"`
DisplayName string `json:"display_name"`
RedirectURI string `json:"redirect_uri"`
Trusted bool `json:"trusted"`
Registration *ClientRegistration `json:"-"`
}
// A Secured is a client records public key identified by ID.
type Secured struct {
ID string
DisplayName string
ApplicationType string
Kid string
PublicKey crypto.PublicKey
TrustedScopes []string
Registration *ClientRegistration
}
// IsLocalNativeHTTPURI returns true if the provided URI qualifies to be used
// as http redirect URI for a native client.
func IsLocalNativeHTTPURI(uri *url.URL) bool {
if uri.Scheme != "http" {
return false
}
return IsLocalNativeHostURI(uri)
}
// IsLocalNativeHostURI returns true if the provided URI hostname is considered
// as localhost for a native client.
func IsLocalNativeHostURI(uri *url.URL) bool {
hostname := uri.Hostname()
return hostname == "localhost" || hostname == "127.0.0.1" || hostname == "::1"
}
<|start_filename|>identity/authorities/samlext/idplogoutrequest.go<|end_filename|>
/*
* Copyright 2017-2020 Kopano and its licensors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package samlext
import (
"bytes"
"compress/flate"
"crypto"
"encoding/base64"
"encoding/xml"
"fmt"
"io/ioutil"
"net/http"
"time"
"github.com/crewjam/saml"
)
// IdpLogoutRequest is used by IdentityProvider to handle a single logout request.
type IdpLogoutRequest struct {
HTTPRequest *http.Request
Binding string
RequestBuffer []byte
Request *saml.LogoutRequest
Now time.Time
RelayState string
SigAlg *string
Signature []byte
}
func NewIdpLogoutRequest(r *http.Request) (*IdpLogoutRequest, error) {
req := &IdpLogoutRequest{
HTTPRequest: r,
Now: saml.TimeNow(),
}
switch r.Method {
case http.MethodGet:
req.Binding = saml.HTTPRedirectBinding
compressedRequest, err := base64.StdEncoding.DecodeString(r.URL.Query().Get("SAMLRequest"))
if err != nil {
return nil, fmt.Errorf("cannot decode request: %w", err)
}
req.RequestBuffer, err = ioutil.ReadAll(flate.NewReader(bytes.NewReader(compressedRequest)))
if err != nil {
return nil, fmt.Errorf("cannot decompress request: %w", err)
}
req.RelayState = r.URL.Query().Get("RelayState")
sigAlgRaw := r.URL.Query().Get("SigAlg")
if sigAlgRaw != "" {
req.SigAlg = &sigAlgRaw
signature, err := base64.StdEncoding.DecodeString(r.URL.Query().Get("Signature"))
if err != nil {
return nil, fmt.Errorf("cannot decode signature: %w", err)
}
req.Signature = signature
}
case http.MethodPost:
if err := r.ParseForm(); err != nil {
return nil, err
}
req.Binding = saml.HTTPPostBinding
var err error
req.RequestBuffer, err = base64.StdEncoding.DecodeString(r.PostForm.Get("SAMLRequest"))
if err != nil {
return nil, err
}
req.RelayState = r.PostForm.Get("RelayState")
return nil, fmt.Errorf("parsing logout request from POST is not implemented")
default:
return nil, fmt.Errorf("method not allowed")
}
return req, nil
}
// Validate checks that the authentication request is valid and assigns
// the LogoutRequest and Metadata properties. Returns a non-nil error if the
// request is not valid.
func (req *IdpLogoutRequest) Validate() error {
request := &saml.LogoutRequest{}
if err := xml.Unmarshal(req.RequestBuffer, request); err != nil {
return err
}
req.Request = request
if req.Request.IssueInstant.Add(saml.MaxIssueDelay).Before(req.Now) {
return fmt.Errorf("request expired at %s", req.Request.IssueInstant.Add(saml.MaxIssueDelay))
}
if req.Request.Version != "2.0" {
return fmt.Errorf("expected SAML request version 2.0 got %v", req.Request.Version)
}
return nil
}
// VerifySignature verifies the associated IdpLogoutRequest data with the
// associated Signature using the provided public key.
func (req *IdpLogoutRequest) VerifySignature(pubKey crypto.PublicKey) error {
return VerifySignedHTTPRedirectQuery("SAMLRequest", req.HTTPRequest.URL.RawQuery, *req.SigAlg, req.Signature, pubKey)
}
<|start_filename|>identity/authorities/samlext/dsig.go<|end_filename|>
/*
* Copyright 2020 Kopano and its licensors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package samlext
import (
"crypto"
"crypto/rsa"
_ "crypto/sha1" // Import all supported hashers.
_ "crypto/sha256"
_ "crypto/sha512"
"fmt"
"strings"
dsig "github.com/russellhaering/goxmldsig"
)
// VerifySignedHTTPRedirectQuery implements validation for signed SAML HTTP
// redirect binding parameters provides via URL query.
func VerifySignedHTTPRedirectQuery(kind string, rawQuery string, sigAlg string, signature []byte, pubKey crypto.PublicKey) error {
var hasher crypto.Hash
// Validate signature.
switch sigAlg {
case dsig.RSASHA1SignatureMethod:
hasher = crypto.SHA1
case dsig.RSASHA256SignatureMethod:
hasher = crypto.SHA256
case dsig.RSASHA512SignatureMethod:
hasher = crypto.SHA512
default:
return fmt.Errorf("unsupported sig alg: %v", sigAlg)
}
if len(signature) == 0 {
return fmt.Errorf("signature data is empty")
}
// The signed data format goes like this:
// SAMLRequest=urlencode(base64(deflate($xml)))&RelayState=urlencode($(relay_state))&SigAlg=urlencode($sig_alg)
// We rebuild it ourselves from the raw request, to avoid differences when url decoding/encoding.
signedQuery := func(query string) string {
m := make(map[string]string)
for query != "" {
key := query
if i := strings.IndexAny(key, "&;"); i >= 0 {
key, query = key[:i], key[i+1:]
} else {
query = ""
}
if key == "" {
continue
}
value := ""
if i := strings.Index(key, "="); i >= 0 {
key, value = key[:i], key[i+1:]
}
m[key] = value // Support only one value, but thats ok since we only want one and if really someone signed multiple values then its fine to fail.
}
s := new(strings.Builder)
for idx, key := range []string{kind, "RelayState", "SigAlg"} {
if value, ok := m[key]; ok {
if idx > 0 {
s.WriteString("&")
}
s.WriteString(key)
s.WriteString("=")
s.WriteString(value)
}
}
return s.String()
}(rawQuery)
// Create hash for the alg.
hash := hasher.New()
if _, hashErr := hash.Write([]byte(signedQuery)); hashErr != nil {
return fmt.Errorf("failed to hash: %w", hashErr)
}
hashed := hash.Sum(nil)
rsaPubKey, ok := pubKey.(*rsa.PublicKey)
if !ok {
return fmt.Errorf("invalid RSA public key")
}
// NOTE(longsleep): All sig algs above, use PKCS1v15 with RSA.
if verifyErr := rsa.VerifyPKCS1v15(rsaPubKey, hasher, hashed, signature); verifyErr != nil {
return fmt.Errorf("signature verification failed: %w", verifyErr)
}
return nil
}
<|start_filename|>identity/clients/registry_test.go<|end_filename|>
package clients
import (
"context"
"testing"
)
func TestRedirectUriWithDynamicPort(t *testing.T) {
redirectURIs := []struct {
uri string
shallFail bool
}{
{"http://localhost:12345", false},
{"http://localhost:12345/callback", false},
{"http://127.0.0.1:12345/callback", false},
{"http://192.168.88.4:8080/callback", true},
{"http://[::1]:12345/callback", false},
{"http://localhost", false},
{"custom://callback.example.net", false},
{"http://localhost:12345/other-callback", false},
{"http://localhost.example.net/callback", true},
{"http://host-with-port:1234/callback", false},
{"http://host-with-port:123/callback", true},
{"https://localhost:123/callback", true},
}
registry, _ := NewRegistry(context.Background(), nil, "", true, 0, nil)
clientRegistration := ClientRegistration{
ID: "native",
Secret: "secret",
Trusted: true,
TrustedScopes: nil,
Insecure: false,
Dynamic: false,
ApplicationType: "native",
RedirectURIs: []string{"http://localhost", "http://localhost/callback", "custom://callback.example.net", "http://host-with-port:1234/callback"},
}
for _, redirectURI := range redirectURIs {
err := registry.Validate(&clientRegistration, "secret", redirectURI.uri, "", false)
if !redirectURI.shallFail && err != nil {
t.Errorf("Native client with dynamic port for redirectURI '%v' failed: %v", redirectURI.uri, err)
}
if redirectURI.shallFail && err == nil {
t.Errorf("Native client with dynamic port for redirectURI '%v' did not fail as expected.", redirectURI.uri)
}
}
}
func TestRedirectUriWithSpecificPath(t *testing.T) {
redirectURIs := []struct {
uri string
shallFail bool
}{
{"http://localhost:12345", true},
{"http://localhost:12345/callback", false},
{"http://127.0.0.1:12345/callback", false},
{"http://[::1]:12345/callback", false},
{"http://localhost", true},
{"custom://callback.example.net", true},
{"http://localhost:12345/callback-disallowed", true},
{"http://localhost.example.net/callback", true},
{"http://host-with-port:1234/callback", true},
{"http://host-with-port:123/callback", true},
{"https://localhost:123/callback", true},
{"http://localhost/other-callback", false},
{"http://127.0.0.1/other-callback", false},
{"http://10.0.0.1/other-callback", true},
{"http://[::1]/other-callback", false},
{"http://localhost:8080/other-callback", false},
}
registry, _ := NewRegistry(context.Background(), nil, "", true, 0, nil)
clientRegistration := ClientRegistration{
ID: "native",
Secret: "secret",
Trusted: true,
TrustedScopes: nil,
Insecure: false,
Dynamic: false,
ApplicationType: "native",
RedirectURIs: []string{"http://localhost/callback", "http://localhost/other-callback"},
}
for _, redirectURI := range redirectURIs {
err := registry.Validate(&clientRegistration, "secret", redirectURI.uri, "", false)
if !redirectURI.shallFail && err != nil {
t.Errorf("Native client specific path in redirectURI '%v' failed: %v", redirectURI.uri, err)
}
if redirectURI.shallFail && err == nil {
t.Errorf("Native client with specific path in for redirectURI '%v' did not fail as expected.", redirectURI.uri)
}
}
}
<|start_filename|>identity/authorities/registry.go<|end_filename|>
/*
* Copyright 2017-2019 Kopano and its licensors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package authorities
import (
"context"
"errors"
"fmt"
"io/ioutil"
"net/url"
"sync"
"github.com/sirupsen/logrus"
"gopkg.in/yaml.v2"
)
// Registry implements the registry for registered authorities.
type Registry struct {
mutex sync.RWMutex
baseURI *url.URL
defaultID string
authorities map[string]AuthorityRegistration
logger logrus.FieldLogger
}
// NewRegistry creates a new authorizations Registry with the provided parameters.
func NewRegistry(ctx context.Context, baseURI *url.URL, registrationConfFilepath string, logger logrus.FieldLogger) (*Registry, error) {
registryData := &authorityRegistryData{}
if registrationConfFilepath != "" {
logger.Debugf("parsing authorities registration conf from %v", registrationConfFilepath)
registryFile, err := ioutil.ReadFile(registrationConfFilepath)
if err != nil {
return nil, err
}
err = yaml.Unmarshal(registryFile, registryData)
if err != nil {
return nil, err
}
}
r := &Registry{
baseURI: baseURI,
authorities: make(map[string]AuthorityRegistration),
logger: logger,
}
var defaultAuthorityRegistrationData *authorityRegistrationData
var defaultAuthority AuthorityRegistration
for _, registrationData := range registryData.Authorities {
var authority AuthorityRegistration
var validateErr error
if registrationData.ID == "" {
registrationData.ID = registrationData.Name
r.logger.WithField("id", registrationData.ID).Warnln("authority has no id, using name")
}
switch registrationData.AuthorityType {
case AuthorityTypeOIDC:
authority, validateErr = newOIDCAuthorityRegistration(r, registrationData)
case AuthorityTypeSAML2:
authority, validateErr = newSAML2AuthorityRegistration(r, registrationData)
}
fields := logrus.Fields{
"id": registrationData.ID,
"authority_type": registrationData.AuthorityType,
"insecure": registrationData.Insecure,
"trusted": registrationData.Trusted,
"default": registrationData.Default,
"alias_required": registrationData.IdentityAliasRequired,
}
if validateErr != nil {
logger.WithError(validateErr).WithFields(fields).Warnln("skipped registration of invalid authority entry")
continue
}
if authority == nil {
logger.WithFields(fields).Warnln("skipped registration of authority of unknown type")
continue
}
if registerErr := r.Register(authority); registerErr != nil {
logger.WithError(registerErr).WithFields(fields).Warnln("skipped registration of invalid authority")
continue
}
if registrationData.Default || defaultAuthorityRegistrationData == nil {
if defaultAuthorityRegistrationData == nil || !defaultAuthorityRegistrationData.Default {
defaultAuthorityRegistrationData = registrationData
defaultAuthority = authority
} else {
logger.Warnln("ignored default authority flag since already have a default")
}
} else {
// TODO(longsleep): Implement authority selection.
logger.Warnln("non-default additional authorities are not supported yet")
}
go func() {
if initializeErr := authority.Initialize(ctx, r); initializeErr != nil {
logger.WithError(initializeErr).WithFields(fields).Warnln("failed to initialize authority")
}
}()
logger.WithFields(fields).Debugln("registered authority")
}
if defaultAuthority != nil {
if defaultAuthorityRegistrationData.Default {
r.defaultID = defaultAuthorityRegistrationData.ID
logger.WithField("id", defaultAuthorityRegistrationData.ID).Infoln("using external default authority")
} else {
logger.Warnln("non-default authorities are not supported yet")
}
}
return r, nil
}
// Register validates the provided authority registration and adds the authority
// to the accociated registry if valid. Returns error otherwise.
func (r *Registry) Register(authority AuthorityRegistration) error {
id := authority.ID()
if id == "" {
return errors.New("no authority id")
}
if err := authority.Validate(); err != nil {
return fmt.Errorf("authority data validation error: %w", err)
}
switch authority.AuthorityType() {
case AuthorityTypeOIDC:
// breaks
case AuthorityTypeSAML2:
// breaks
default:
return fmt.Errorf("unknown authority type: %v", authority.AuthorityType())
}
r.mutex.Lock()
defer r.mutex.Unlock()
r.authorities[id] = authority
return nil
}
// Lookup returns and validates the authority Detail information for the provided
// parameters from the accociated authority registry.
func (r *Registry) Lookup(ctx context.Context, authorityID string) (*Details, error) {
registration, ok := r.Get(ctx, authorityID)
if !ok {
return nil, fmt.Errorf("unknown authority id: %v", authorityID)
}
details := registration.Authority()
return details, nil
}
// Get returns the registered authorities registration for the provided client ID.
func (r *Registry) Get(ctx context.Context, authorityID string) (AuthorityRegistration, bool) {
if authorityID == "" {
return nil, false
}
// Lookup authority registration.
r.mutex.RLock()
registration, ok := r.authorities[authorityID]
r.mutex.RUnlock()
return registration, ok
}
// Find returns the first registered authority that satisfies the provided
// selector function.
func (r *Registry) Find(ctx context.Context, selector func(authority AuthorityRegistration) bool) (AuthorityRegistration, bool) {
r.mutex.RLock()
defer r.mutex.RUnlock()
for _, authority := range r.authorities {
if selector(authority) {
return authority, true
}
}
return nil, false
}
// Default returns the default authority from the associated registry if any.
func (r *Registry) Default(ctx context.Context) *Details {
authority, _ := r.Lookup(ctx, r.defaultID)
return authority
}
<|start_filename|>identity/authorities/samlext/logoutrequest.go<|end_filename|>
/*
* Copyright 2017-2020 Kopano and its licensors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package samlext
import (
"bytes"
"compress/flate"
"encoding/base64"
"net/url"
"github.com/beevik/etree"
"github.com/crewjam/saml"
)
type LogoutRequest struct {
*saml.LogoutRequest
}
// Redirect returns a URL suitable for using the redirect binding with the response.
func (req *LogoutRequest) Redirect(relayState string) *url.URL {
w := &bytes.Buffer{}
w1 := base64.NewEncoder(base64.StdEncoding, w)
w2, _ := flate.NewWriter(w1, 9)
doc := etree.NewDocument()
doc.SetRoot(req.Element())
if _, err := doc.WriteTo(w2); err != nil {
panic(err)
}
w2.Close()
w1.Close()
rv, _ := url.Parse(req.Destination)
query := rv.Query()
query.Set("SAMLRequest", w.String())
if relayState != "" {
query.Set("RelayState", relayState)
}
rv.RawQuery = query.Encode()
return rv
}
<|start_filename|>identity/authorities/models.go<|end_filename|>
/*
* Copyright 2017-2019 Kopano and its licensors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package authorities
import (
"context"
"net/http"
"net/url"
"gopkg.in/square/go-jose.v2"
)
// Supported Authority kind string values.
const (
AuthorityTypeOIDC = "oidc"
AuthorityTypeSAML2 = "saml2"
)
type authorityRegistrationData struct {
ID string `yaml:"id"`
Name string `yaml:"name"`
AuthorityType string `yaml:"authority_type"`
Iss string `yaml:"iss"`
ClientID string `yaml:"client_id"`
ClientSecret string `yaml:"client_secret"`
EntityID string `yaml:"entity_id"`
Trusted bool `yaml:"trusted"`
Insecure bool `yaml:"insecure"`
Default bool `yaml:"default"`
Discover *bool `yaml:"discover"`
Scopes []string `yaml:"scopes"`
ResponseType string `yaml:"response_type"`
CodeChallengeMethod string `yaml:"code_challenge_method"`
RawMetadataEndpoint string `yaml:"metadata_endpoint"`
RawAuthorizationEndpoint string `yaml:"authorization_endpoint"`
JWKS *jose.JSONWebKeySet `yaml:"jwks"`
IdentityClaimName string `yaml:"identity_claim_name"`
IdentityAliases map[string]string `yaml:"identity_aliases,flow"`
IdentityAliasRequired bool `yaml:"identity_alias_required"`
EndSessionEnabled bool `yaml:"end_session_enabled"`
}
type authorityRegistryData struct {
Authorities []*authorityRegistrationData `yaml:"authorities,flow"`
}
// AuthorityRegistration defines an authority with its properties.
type AuthorityRegistration interface {
ID() string
Name() string
AuthorityType() string
Authority() *Details
Issuer() string
Validate() error
Initialize(ctx context.Context, registry *Registry) error
MakeRedirectAuthenticationRequestURL(state string) (*url.URL, map[string]interface{}, error)
MakeRedirectEndSessionRequestURL(ref interface{}, state string) (*url.URL, map[string]interface{}, error)
MakeRedirectEndSessionResponseURL(req interface{}, state string) (*url.URL, map[string]interface{}, error)
ParseStateResponse(req *http.Request, state string, extra map[string]interface{}) (interface{}, error)
ValidateIdpEndSessionRequest(req interface{}, state string) (bool, error)
ValidateIdpEndSessionResponse(res interface{}, state string) (bool, error)
IdentityClaimValue(data interface{}) (string, map[string]interface{}, error)
Metadata() AuthorityMetadata
}
type AuthorityMetadata interface {
}
<|start_filename|>identity/authorities/samlext/idplogoutresponse.go<|end_filename|>
/*
* Copyright 2017-2020 Kopano and its licensors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package samlext
import (
"bytes"
"compress/flate"
"crypto"
"encoding/base64"
"encoding/xml"
"fmt"
"io/ioutil"
"net/http"
"time"
"github.com/crewjam/saml"
)
// IdpLogoutResponse is used by IdentityProvider to handle a single logout
// response callbacks.
type IdpLogoutResponse struct {
HTTPRequest *http.Request
Binding string
ResponseBuffer []byte
Response *saml.LogoutResponse
Now time.Time
RelayState string
SigAlg *string
Signature []byte
}
func NewIdpLogoutResponse(r *http.Request) (*IdpLogoutResponse, error) {
res := &IdpLogoutResponse{
HTTPRequest: r,
Now: saml.TimeNow(),
}
switch r.Method {
case http.MethodGet:
res.Binding = saml.HTTPRedirectBinding
compressedResponse, err := base64.StdEncoding.DecodeString(r.URL.Query().Get("SAMLResponse"))
if err != nil {
return nil, fmt.Errorf("cannot decode response: %w", err)
}
res.ResponseBuffer, err = ioutil.ReadAll(flate.NewReader(bytes.NewReader(compressedResponse)))
if err != nil {
return nil, fmt.Errorf("cannot decompress response: %w", err)
}
res.RelayState = r.URL.Query().Get("RelayState")
sigAlgRaw := r.URL.Query().Get("SigAlg")
if sigAlgRaw != "" {
res.SigAlg = &sigAlgRaw
signature, err := base64.StdEncoding.DecodeString(r.URL.Query().Get("Signature"))
if err != nil {
return nil, fmt.Errorf("cannot decode signature: %w", err)
}
res.Signature = signature
}
case http.MethodPost:
if err := r.ParseForm(); err != nil {
return nil, err
}
res.Binding = saml.HTTPPostBinding
var err error
res.ResponseBuffer, err = base64.StdEncoding.DecodeString(r.PostForm.Get("SAMLResponse"))
if err != nil {
return nil, err
}
res.RelayState = r.PostForm.Get("RelayState")
return nil, fmt.Errorf("parsing logout response from POST is not implemented")
default:
return nil, fmt.Errorf("method not allowed")
}
return res, nil
}
// Validate checks that the associated response is valid and assigns
// the LogoutResponse and Metadata properties. Returns a non-nil error if the
// request is not valid.
func (res *IdpLogoutResponse) Validate() error {
response := &saml.LogoutResponse{}
if err := xml.Unmarshal(res.ResponseBuffer, response); err != nil {
return err
}
res.Response = response
if res.Response.IssueInstant.Add(saml.MaxIssueDelay).Before(res.Now) {
return fmt.Errorf("response expired at %s", res.Response.IssueInstant.Add(saml.MaxIssueDelay))
}
if res.Response.Version != "2.0" {
return fmt.Errorf("expected SAML response version 2.0 got %v", res.Response.Version)
}
return nil
}
// VerifySignature verifies the associated IdpLogoutResponse data with the
// associated Signature using the provided public key.
func (res *IdpLogoutResponse) VerifySignature(pubKey crypto.PublicKey) error {
return VerifySignedHTTPRedirectQuery("SAMLResponse", res.HTTPRequest.URL.RawQuery, *res.SigAlg, res.Signature, pubKey)
}
<|start_filename|>cmd/licod/env.go<|end_filename|>
/*
* Copyright 2017-2020 Kopano and its licensors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package main
import (
"os"
"strings"
)
// envOrDefault returns the value of an env-variable or the default if the env-var is not set
func envOrDefault(name string, def string) string {
v := os.Getenv(name)
if v == "" {
return def
}
return v
}
// listEnvArg parses an env-arg which has a space separated list as value
func listEnvArg(name string) []string {
list := make([]string, 0)
for _, keyFn := range strings.Split(os.Getenv(name), " ") {
keyFn = strings.TrimSpace(keyFn)
if keyFn != "" {
list = append(list, keyFn)
}
}
return list
}
| rhafer/lico |
<|start_filename|>angular2-indexeddb.js<|end_filename|>
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
var AngularIndexedDB = /** @class */ (function () {
function AngularIndexedDB(dbName, version) {
this.utils = new Utils();
this.dbWrapper = new DbWrapper(dbName, version);
}
AngularIndexedDB.prototype.openDatabase = function (version, upgradeCallback) {
var _this = this;
var self = this;
return new Promise(function (resolve, reject) {
_this.dbWrapper.dbVersion = version;
var request = _this.utils.indexedDB.open(_this.dbWrapper.dbName, version);
request.onsuccess = function (e) {
self.dbWrapper.db = request.result;
resolve();
};
request.onerror = function (e) {
reject('IndexedDB error: ' + e.target.errorCode ?
e.target.errorCode + ' (' + e.target.error + ')' :
e.target.errorCode);
};
if (typeof upgradeCallback === "function") {
request.onupgradeneeded = function (e) {
upgradeCallback(e, self.dbWrapper.db);
};
}
});
};
AngularIndexedDB.prototype.getByKey = function (storeName, key) {
var self = this;
return new Promise(function (resolve, reject) {
self.dbWrapper.validateBeforeTransaction(storeName, reject);
var transaction = self.dbWrapper.createTransaction({ storeName: storeName,
dbMode: "readonly",
error: function (e) {
reject(e);
},
complete: function (e) {
}
}), objectStore = transaction.objectStore(storeName), request;
request = objectStore.get(key);
request.onsuccess = function (event) {
resolve(event.target.result);
};
});
};
AngularIndexedDB.prototype.getAll = function (storeName, keyRange, indexDetails) {
var self = this;
return new Promise(function (resolve, reject) {
self.dbWrapper.validateBeforeTransaction(storeName, reject);
var transaction = self.dbWrapper.createTransaction({ storeName: storeName,
dbMode: "readonly",
error: function (e) {
reject(e);
},
complete: function (e) {
}
}), objectStore = transaction.objectStore(storeName), result = [], request;
if (indexDetails) {
var index = objectStore.index(indexDetails.indexName), order = (indexDetails.order === 'desc') ? 'prev' : 'next';
request = index.openCursor(keyRange, order);
}
else {
request = objectStore.openCursor(keyRange);
}
request.onerror = function (e) {
reject(e);
};
request.onsuccess = function (evt) {
var cursor = evt.target.result;
if (cursor) {
result.push(cursor["value"]);
cursor["continue"]();
}
else {
resolve(result);
}
};
});
};
AngularIndexedDB.prototype.add = function (storeName, value, key) {
var self = this;
return new Promise(function (resolve, reject) {
self.dbWrapper.validateBeforeTransaction(storeName, reject);
var transaction = self.dbWrapper.createTransaction({ storeName: storeName,
dbMode: "readwrite",
error: function (e) {
reject(e);
},
complete: function (e) {
resolve({ key: key, value: value });
}
}), objectStore = transaction.objectStore(storeName);
var request = objectStore.add(value, key);
request.onsuccess = function (evt) {
key = evt.target.result;
};
});
};
AngularIndexedDB.prototype.update = function (storeName, value, key) {
var self = this;
return new Promise(function (resolve, reject) {
self.dbWrapper.validateBeforeTransaction(storeName, reject);
var transaction = self.dbWrapper.createTransaction({ storeName: storeName,
dbMode: "readwrite",
error: function (e) {
reject(e);
},
complete: function (e) {
resolve(value);
},
abort: function (e) {
reject(e);
}
}), objectStore = transaction.objectStore(storeName);
objectStore.put(value, key);
});
};
AngularIndexedDB.prototype.delete = function (storeName, key) {
var self = this;
return new Promise(function (resolve, reject) {
self.dbWrapper.validateBeforeTransaction(storeName, reject);
var transaction = self.dbWrapper.createTransaction({ storeName: storeName,
dbMode: "readwrite",
error: function (e) {
reject(e);
},
complete: function (e) {
resolve();
},
abort: function (e) {
reject(e);
}
}), objectStore = transaction.objectStore(storeName);
objectStore["delete"](key);
});
};
AngularIndexedDB.prototype.openCursor = function (storeName, cursorCallback, keyRange) {
var self = this;
return new Promise(function (resolve, reject) {
self.dbWrapper.validateBeforeTransaction(storeName, reject);
var transaction = self.dbWrapper.createTransaction({ storeName: storeName,
dbMode: "readonly",
error: function (e) {
reject(e);
},
complete: function (e) {
resolve();
},
abort: function (e) {
reject(e);
}
}), objectStore = transaction.objectStore(storeName), request = objectStore.openCursor(keyRange);
request.onsuccess = function (evt) {
cursorCallback(evt);
resolve();
};
});
};
AngularIndexedDB.prototype.clear = function (storeName) {
var self = this;
return new Promise(function (resolve, reject) {
self.dbWrapper.validateBeforeTransaction(storeName, reject);
var transaction = self.dbWrapper.createTransaction({ storeName: storeName,
dbMode: "readwrite",
error: function (e) {
reject(e);
},
complete: function (e) {
resolve();
},
abort: function (e) {
reject(e);
}
}), objectStore = transaction.objectStore(storeName);
objectStore.clear();
resolve();
});
};
AngularIndexedDB.prototype.getByIndex = function (storeName, indexName, key) {
var self = this;
return new Promise(function (resolve, reject) {
self.dbWrapper.validateBeforeTransaction(storeName, reject);
var transaction = self.dbWrapper.createTransaction({ storeName: storeName,
dbMode: "readonly",
error: function (e) {
reject(e);
},
abort: function (e) {
reject(e);
},
complete: function (e) {
}
}), objectStore = transaction.objectStore(storeName), index = objectStore.index(indexName), request = index.get(key);
request.onsuccess = function (event) {
resolve(event.target.result);
};
});
};
return AngularIndexedDB;
}());
exports.AngularIndexedDB = AngularIndexedDB;
var Utils = /** @class */ (function () {
function Utils() {
this.indexedDB = window.indexedDB || window.mozIndexedDB || window.webkitIndexedDB || window.msIndexedDB;
}
return Utils;
}());
exports.Utils = Utils;
var DbWrapper = /** @class */ (function () {
function DbWrapper(dbName, version) {
this.dbName = dbName;
this.dbVersion = version || 1;
this.db = null;
}
DbWrapper.prototype.validateStoreName = function (storeName) {
return this.db.objectStoreNames.contains(storeName);
};
;
DbWrapper.prototype.validateBeforeTransaction = function (storeName, reject) {
if (!this.db) {
reject('You need to use the openDatabase function to create a database before you query it!');
}
if (!this.validateStoreName(storeName)) {
reject(('objectStore does not exists: ' + storeName));
}
};
DbWrapper.prototype.createTransaction = function (options) {
var trans = this.db.transaction(options.storeName, options.dbMode);
trans.onerror = options.error;
trans.oncomplete = options.complete;
trans.onabort = options.abort;
return trans;
};
return DbWrapper;
}());
exports.DbWrapper = DbWrapper;
//# sourceMappingURL=angular2-indexeddb.js.map
<|start_filename|>angular2-indexeddb.metadata.json<|end_filename|>
[{"__symbolic":"module","version":3,"metadata":{"AngularIndexedDB":{"__symbolic":"class","members":{"__ctor__":[{"__symbolic":"constructor","parameters":[{"__symbolic":"reference","name":"string"},{"__symbolic":"reference","name":"number"}]}],"openDatabase":[{"__symbolic":"method"}],"getByKey":[{"__symbolic":"method"}],"getAll":[{"__symbolic":"method"}],"add":[{"__symbolic":"method"}],"update":[{"__symbolic":"method"}],"delete":[{"__symbolic":"method"}],"openCursor":[{"__symbolic":"method"}],"clear":[{"__symbolic":"method"}],"getByIndex":[{"__symbolic":"method"}]}},"Utils":{"__symbolic":"class","members":{"__ctor__":[{"__symbolic":"constructor"}]}},"DbWrapper":{"__symbolic":"class","members":{"__ctor__":[{"__symbolic":"constructor","parameters":[{"__symbolic":"reference","name":"string"},{"__symbolic":"reference","name":"number"}]}],"validateStoreName":[{"__symbolic":"method"}],"validateBeforeTransaction":[{"__symbolic":"method"}],"createTransaction":[{"__symbolic":"method"}]}}}},{"__symbolic":"module","version":1,"metadata":{"AngularIndexedDB":{"__symbolic":"class","members":{"__ctor__":[{"__symbolic":"constructor","parameters":[{"__symbolic":"reference","name":"string"},{"__symbolic":"reference","name":"number"}]}],"openDatabase":[{"__symbolic":"method"}],"getByKey":[{"__symbolic":"method"}],"getAll":[{"__symbolic":"method"}],"add":[{"__symbolic":"method"}],"update":[{"__symbolic":"method"}],"delete":[{"__symbolic":"method"}],"openCursor":[{"__symbolic":"method"}],"clear":[{"__symbolic":"method"}],"getByIndex":[{"__symbolic":"method"}]}},"Utils":{"__symbolic":"class","members":{"__ctor__":[{"__symbolic":"constructor"}]}},"DbWrapper":{"__symbolic":"class","members":{"__ctor__":[{"__symbolic":"constructor","parameters":[{"__symbolic":"reference","name":"string"},{"__symbolic":"reference","name":"number"}]}],"validateStoreName":[{"__symbolic":"method"}],"validateBeforeTransaction":[{"__symbolic":"method"}],"createTransaction":[{"__symbolic":"method"}]}}}}] | jhuseinovic/angular2-indexeddb |
<|start_filename|>app/src/main/java/com/suvikas/conware/Activities/DetectContainer.java<|end_filename|>
package com.suvikas.conware.Activities;
import androidx.appcompat.app.AlertDialog;
import androidx.appcompat.app.AppCompatActivity;
import android.content.DialogInterface;
import android.os.Bundle;
import android.view.View;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.ListView;
import android.widget.Toast;
import com.suvikas.conbeer.ConBeer;
import com.suvikas.conware.KeystoreUtil;
import com.suvikas.conware.R;
import java.util.ArrayList;
public class DetectContainer extends AppCompatActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_detect_container);
ConBeer cb = new ConBeer(getApplicationContext(), null);
if (cb.isContainer()) {
Toast.makeText(getApplicationContext(), "Running in Container", Toast.LENGTH_SHORT).show();
}
final ArrayList<String> checksDetected = cb.getListOfChecksDetected();
ArrayAdapter adapter = new ArrayAdapter<>(this, R.layout.activity_listview, checksDetected);
final AlertDialog.Builder alertDialog = new AlertDialog.Builder(this);
ListView listView = (ListView) findViewById(R.id.keys_list);
listView.setAdapter(adapter);
listView.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> adapterView, View view, int pos, long l) {
KeystoreUtil keystoreUtil = new KeystoreUtil();
String keyInfo = keystoreUtil.getKeyInfo(checksDetected.get(pos));
alertDialog.setTitle("Container Checks Detected")
.setMessage(keyInfo)
.setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
// Continue with delete operation
}
})
.show();
}
});
}
}
<|start_filename|>conbeerlib/src/main/java/com/suvikas/conbeer/Constants.java<|end_filename|>
package com.suvikas.conbeer;
public class Constants {
public static String[] PARALLELSPACE_SERVICES = {
"com.lbe.parallel.service.KeyguardService",
"cn.thinkingdata.android.TDQuitSafelyService$TDKeepAliveService",
"com.lbe.parallel.install.AppInstallService",
"com.lbe.doubleagent.service.proxy.KeepAliveService"
};
public static String [] BLACKLISTED_ENV_VARIABLES = {
"V_REPLACE_ITEM","V_KEEP_ITEM","V_SO_PATH",
"REPLACE_ITEM_ORIG","REPLACE_ITEM_DST","ZM_ENV_TPN","ENV_IOR_RULES",
"REDIRECT_SRC","WHITELIST_SRC"
};
public static String[] BLACKLISTED_STACKTRACE_CLASSES = {
"com.doubleagent", //Parallel Space
"com.lody", //Dual Space, Virtual App
"org.nl", // Super Clone
"io.tt", // Multi-Parallel App
"com.trendmicro.tmmssandbox", //DrClone
"com.estrongs.vbox", //Clone App
"com.excelliance", //2Accounts
"com.prism.gaia"
};
}
<|start_filename|>conbeerlib/src/main/java/com/suvikas/conbeer/ConBeer.java<|end_filename|>
package com.suvikas.conbeer;
import android.app.ActivityManager;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.content.pm.PackageInfo;
import android.content.pm.PackageManager;
import android.util.Log;
import android.util.Pair;
import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.List;
/**
* ConBeer class implements various mechanisms for detecting presence of Android Virtual Containers.
*/
public class ConBeer {
private Context mContext;
private String TAG = "CONBEER";
private ArrayList<String> checksDetected ;
private ArrayList<String> appServiceNames ;
/**
* @param context - Android Application context.
* @param appServiceNames - ArrayList of service, which will be created by the application.
*/
public ConBeer(Context context, ArrayList<String> appServiceNames){
this.mContext = context;
this.checksDetected = new ArrayList<>();
this.appServiceNames = appServiceNames;
}
/**
* Get the list of checks which were detected.
* @return List of checks detected.
*/
public ArrayList<String> getListOfChecksDetected(){
return checksDetected;
}
/**
* Check if the application is running inside the container. The checks are divided into 3 parts
* @return True, if virtual container detected
*/
public boolean isContainer(){
boolean isManifest = false;
boolean isAppComponents = false;
boolean isAppRuntime = false;
isAppRuntime = checkAppRuntime();
isManifest = checkManifest();
isAppComponents = checkAppComponents();
if(isAppRuntime || isManifest || isAppComponents ) {
return true;
}
return false;
}
/**
* Check application's manifest related information.
* @return True, if virtual container is detected.
*/
public boolean checkManifest(){
boolean bIsContainer = this.checkPermissions();
if(bIsContainer){checksDetected.add(Checks.MANIFEST_PERMISSIONS);}
//checkAppServiceName(null);
return bIsContainer;
}
/**
* Check application's runtime related information
* 1. /proc/self/maps if any suspicious files included
* 2. Check Environment variables
* 3. Check Internal storage directory
* 4. Check StackTrace
* @return True, if virtual container is detected.
*/
public boolean checkAppRuntime(){
boolean isCheckProcMaps = checkProcMaps();
if(isCheckProcMaps){ checksDetected.add(Checks.PROC_MAPS);}
boolean isCheckEnvironment = checkEnvironment();
if(isCheckEnvironment){checksDetected.add(Checks.ENVIRONMENT);}
boolean isInternalStorageDir = checkInternalStorageDir();
if(isInternalStorageDir){checksDetected.add(Checks.INTENAL_STORAGE_DIR);}
boolean isInStackTrace = checkStackTrace();
if(isInStackTrace) {checksDetected.add(Checks.STACKTRACE);}
if(isCheckEnvironment || isCheckProcMaps || isInternalStorageDir || isInStackTrace){
return true;
}
return false;
}
/**
* Checks for application's component
* 1. Check for Running App services
* 2. Dynamically enable app components
*
* @return True, if virtual container is detected
*/
public boolean checkAppComponents(){
// TODO: somehow get input for this function from caller;
boolean isCheckRunningAppServices = checkRunningAppServices(this.appServiceNames);
if(isCheckRunningAppServices){checksDetected.add(Checks.RUNNING_SERVICES);}
boolean isCheckAppComponentPropertyAtRuntime = checkAppComponentPropertyAtRuntime();
if(isCheckAppComponentPropertyAtRuntime){checksDetected.add(Checks.COMPONENT_RUNTIME);}
if(isCheckRunningAppServices || isCheckAppComponentPropertyAtRuntime){
return true;
}
return false;
}
/**
* Compares the MAC groups the app is assigned to with the permissions requested.
* If there is an anomaly, then it is an indication of running inside a container
*
* @return True, if virtual container detected.
*/
private boolean checkPermissions() {
boolean bIsContainer = false;
List<Pair<String,String>> permissionSEgroupList = new ArrayList<>();
permissionSEgroupList.add(new Pair<>("android.permission.INTERNET", "inet"));
permissionSEgroupList.add(new Pair<>("android.permission.BLUETOOTH_ADMIN","net_bt_admin"));
permissionSEgroupList.add(new Pair<>("android.permission.BLUETOOTH", "net_bt"));
try {
PackageInfo packageInfo = mContext.getPackageManager().getPackageInfo(mContext.getPackageName(), PackageManager.GET_PERMISSIONS);
String[] requestedPermissions = packageInfo.requestedPermissions;
String segroupInfo = execute("id");
if(segroupInfo != null) {
for (int i = 0; i < permissionSEgroupList.size(); i++) {
Pair<String, String> pair = permissionSEgroupList.get(i);
if (segroupInfo.contains(pair.second)) {
if (requestedPermissions!= null && requestedPermissions.length != 0) {
boolean bIsPresent = false;
for (String s : requestedPermissions) {
if (s.contains(pair.first)) {
bIsPresent = true;
break;
}
}
if(!bIsPresent){
bIsContainer = true;
break;
}
} else {
bIsContainer = true;
break;
}
}
}
}
}catch(PackageManager.NameNotFoundException e) {
e.printStackTrace();
}
return bIsContainer;
}
/**
* /proc/self/maps should not have libraries or APK from other /data/app and /data/data folders,
* other than the one of the app's.
* Example:
* Dr.Clone: /data/app/com.trendmicro.tmas-nX-nxxGWSIQ3FOKGnz-Xbg==/lib/arm/libnativehook.so
* /data/app/com.trendmicro.tmas-nX-nxxGWSIQ3FOKGnz-Xbg==/lib/arm/libsubstrate.so
* Parallel Space:
* /data/app/com.lbe.parallel.intl-bp5H8cQ_sHHz72STgLNWfg==/lib/arm/libdaclient_64.so
*
* @return True, if virtual container detected
*/
private boolean checkProcMaps(){
boolean isContainer = false;
try {
if(BuildConfig.DEBUG){
Log.d(TAG, ">>>>>>>>>>>>>> CHECK PROC MAPS <<<<<<<<<<<<<<<");
}
String packageName = mContext.getPackageName();
InputStreamReader reader = new InputStreamReader(
new FileInputStream("/proc/self/maps"),
Charset.defaultCharset());
BufferedReader bufferedReader = new BufferedReader(reader);
List<String> paths = new ArrayList<String>();
// get all the paths in proc/self/maps
try {
String line;
do {
line = bufferedReader.readLine();
if (line == null)
break;
String[] parts = line.split(" ");
String tmp = parts[parts.length - 1];
paths.add(tmp);
} while (true);
// Check paths does not contain files from other /data/data and /data/app locations
for(String p: paths){
if(p.startsWith("/data/app") || p.startsWith("/data/data")){
if(!p.contains(packageName)) {
isContainer = true;
if(BuildConfig.DEBUG) {
Log.d(TAG, "checkProcMaps: Suspicious file: " + p);
}
break;
}
}
}
} finally {
try{
bufferedReader.close();
}catch(IOException e){
e.printStackTrace();
}
}
} catch (IOException e){
throw new RuntimeException("Unable to open /proc/self/maps");
}
if(BuildConfig.DEBUG) {Log.d(TAG, "checkProcMaps: " + isContainer);}
if(BuildConfig.DEBUG){
Log.d(TAG, ">>>>>>>>>>>>>> CHECK PROC MAPS: DONE <<<<<<<<<<<<<<<");
}
return isContainer;
}
/**
* Checks the internal storage directory of the application.
* In a non-container scenario, it will be /data/data/package_name, while in case of container
* it will be withing containers internal storage.
* @return True, if virtual container detected
*/
private boolean checkInternalStorageDir(){
// Package Name: com.container.com.suvikas.conware Dir: /data/data/com.lbe.parallel.intl.arm64/parallel_intl/0/com.container.com.suvikas.conware
//TODO: improve exception handling, throw or handle it here?
if(BuildConfig.DEBUG){
Log.d(TAG, ">>>>>>>>>>>>>> CHECK INTERNAL STORAGE DIR <<<<<<<<<<<<<<<");
}
boolean isContainer = false;
try {
PackageManager pm = mContext.getPackageManager();
String packageName = mContext.getPackageName();
PackageInfo p = pm.getPackageInfo(packageName, 0);
String appDir = p.applicationInfo.dataDir;
if(BuildConfig.DEBUG) {
Log.d(TAG, "Package Name: " + packageName + " Dir: " + appDir);
}
String expectedAppDir = "/data/data/" + packageName;
//TODO: check what will be case with multiple users
String expectedAppDir2 = "/data/user/0/" + packageName;
// generally container will have something like:
// "/data/data/com.lbe.parallel.intl.arm64/parallel_intl/0/com.container.com.suvikas.conware"
if(!appDir.startsWith(expectedAppDir) && !appDir.startsWith(expectedAppDir2)){
Log.d(TAG, "check1: " + expectedAppDir);
Log.d(TAG, "check2: " + expectedAppDir2);
isContainer = true;
}
}catch(Exception e){ e.printStackTrace(); }
if(BuildConfig.DEBUG){
Log.d(TAG, "checkInternalStorageDir: " + isContainer);
Log.d(TAG, ">>>>>>>>>>>>>> CHECK INTERNAL STORAGE DIR: DONE <<<<<<<<<<<<<<<");
}
return isContainer;
}
// TODO: check external storage, parallel space altered that too.
/**
* Check the env command output to see if LD_PRELOAD is present and points to a legitimate path.
* If path is pointing to local sandbox data, then it is an indication of running inside a
* container
*/
private boolean checkEnvironment(){
boolean bIsContainer = false;
if(BuildConfig.DEBUG){Log.d(TAG,">>>>>> Environment Variables <<<<<");}
String environmentStatus = execute("env");
String [] blacklistedEnvs = Constants.BLACKLISTED_ENV_VARIABLES;
if(environmentStatus != null) {
String[] envs = environmentStatus.split("\n");
for(String env: envs){
if(env.contains("LD_PRELOAD")){
String [] preload = env.split("=");
if(BuildConfig.DEBUG){Log.d(TAG, "LD_PRELOAD: " + preload[1]);}
if(preload.length > 0) {
if( (preload[1].contains("/data/data")) || preload[1].contains("/data/app")){
bIsContainer = true;
break;
}
}
}else{
for(String blacklistedEnv : blacklistedEnvs){
if(env.contains(blacklistedEnv)){
bIsContainer = true;
break;
}
}
if(bIsContainer)
break;
}
}
}
if(BuildConfig.DEBUG){Log.d(TAG,">>>>>> Environment Variables: DONE <<<<<");}
return bIsContainer;
}
/**
* Container may create stub component to fool the AMS in order to create a component that not
* defined in the manifest file. API getRunningServices of ActivityManager to get the
* information of the running service, we will get the name of the stub service,
* such as stub.ServiceStubStubP08P00 if using DroidPlugin.
* Example:
* In ParallelSpace:
* com.lbe.parallel.service.KeyguardService
* cn.thinkingdata.android.TDQuitSafelyService$TDKeepAliveService
* com.lbe.parallel.install.AppInstallService
* com.lbe.doubleagent.service.proxy.KeepAliveService
* @param appServices: ArrayList of application' services
*
* @return True, if container detected.
*/
@SuppressWarnings("deprecation")
private boolean checkRunningAppServices(ArrayList<String> appServices){
// IDEAS: https://stackoverflow.com/questions/600207/how-to-check-if-a-service-is-running-on-android/608600#608600
boolean isContainer = false;
ArrayList<String> runningServices = new ArrayList<>();
ActivityManager manager = (ActivityManager) mContext.getSystemService(Context.ACTIVITY_SERVICE);
if(BuildConfig.DEBUG){Log.d(TAG, ">>>>>>>> APP SERVICE NAMES <<<<<<<<<<<<<");}
// get services running for this application
for (ActivityManager.RunningServiceInfo service : manager.getRunningServices(Integer.MAX_VALUE)) {
String tmp = service.service.getClassName();
runningServices.add(tmp);
if(BuildConfig.DEBUG){Log.d(TAG, tmp);}
}
if(appServices == null){
if(runningServices.size() > 0)
// If no app services specified, there should be services running
isContainer = true;
}else{
for(String srvc: appServices){
runningServices.remove(srvc);
}
// apart from app services no other services should be there.
if(runningServices.size() > 0)
isContainer = true;
}
Log.d(TAG, "checkAppServiceName: " + isContainer);
Log.d(TAG, ">>>>>>>> APP SERVICE NAMES: DONE <<<<<<<<<<<<<");
return isContainer;
}
private boolean checkAppComponentPropertyAtRuntime(){
boolean isContainer = false;
if(BuildConfig.DEBUG){
Log.d(TAG, ">>>>>>>>>>>> APP COMPONENT PROPERTY AT RUNTIME: <<<<<<<<<<<<<<<<");
}
// send message
Intent intent = new Intent("com.container.conbeer.intent.TEST");
intent.setPackage("com.container.com.suvikas.conware");
//LocalBroadcastManager.getInstance(this).sendBroadcast(intent);
mContext.sendBroadcast(intent);
try{
Thread.sleep(1000); // to ensure broadcastreceiver process runs and updates sharedprefs
}catch(Exception e){
e.printStackTrace();
}
SharedPreferences settings = mContext.getSharedPreferences("Prefs", 0);
boolean isReceived = settings.getBoolean("received", false);
if(isReceived){
if(BuildConfig.DEBUG){ Log.d(TAG, "Broadcast intent received"); }
isContainer = false;
}else{
if(BuildConfig.DEBUG){ Log.d(TAG, "Broadcast intent NOT received"); }
isContainer = true;
}
if(BuildConfig.DEBUG){
Log.d(TAG, ">>>>>>>>>>>> APP COMPONENT PROPERTY AT RUNTIME: DONE<<<<<<<<<<<<<<<<");
}
return isContainer;
}
private String execute(String command) {
try{
Process process = Runtime.getRuntime().exec(command);
process.waitFor();
BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(process.getInputStream()));
StringBuilder builder = new StringBuilder();
String buffer = null;
while((buffer = bufferedReader.readLine()) != null) {
builder.append("\n");
builder.append(buffer);
}
return builder.toString();
}catch (Exception e){
e.printStackTrace();
}
return null;
}
/**
* Get the stacktrace for the current execution and check for the presence of blacklisted
* classes in the stacktrace
* @return True, if virtual container detected
*/
private boolean checkStackTrace() {
if(BuildConfig.DEBUG){
Log.d(TAG, ">>>>>>>>>>>> STACKTRACE AT RUNTIME: START<<<<<<<<<<<<<<<<");
}
String[] blackListedClassNameList = Constants.BLACKLISTED_STACKTRACE_CLASSES;
boolean bRet = false;
StackTraceElement[] stackTraces = new Throwable().getStackTrace();
for (StackTraceElement stackTrace : stackTraces) {
final String clazzName = stackTrace.getClassName();
Log.d(TAG, clazzName);
for(String blacklistedClassName: blackListedClassNameList){
if(clazzName.contains(blacklistedClassName)){
bRet = true;
}
}
}
if(BuildConfig.DEBUG){
Log.d(TAG, ">>>>>>>>>>>> STACKTRACE AT RUNTIME: DONE<<<<<<<<<<<<<<<<");
}
return bRet;
}
}
<|start_filename|>app/src/main/java/com/suvikas/conware/MainActivity.java<|end_filename|>
package com.suvikas.conware;
import android.content.ComponentName;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.os.Bundle;
import androidx.appcompat.app.AppCompatActivity;
import androidx.appcompat.widget.Toolbar;
import android.util.Log;
import android.view.View;
import android.view.Menu;
import android.view.MenuItem;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.ListView;
import com.suvikas.conware.R;
import com.suvikas.conbeer.utils.FakeBroadcastReceiver;
import com.suvikas.conware.Activities.AppListActivity;
import com.suvikas.conware.Activities.AttackAndOTP;
import com.suvikas.conware.Activities.CommandExecActivity;
import com.suvikas.conware.Activities.DetectContainer;
import com.suvikas.conware.Activities.KeystoreActivity;
import com.suvikas.conware.Activities.RunningAppsListActivity;
public class MainActivity extends AppCompatActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
Toolbar toolbar = findViewById(R.id.toolbar);
setSupportActionBar(toolbar);
String [] list = new String[]{"List Installed Apps", "List Running Apps", "Keys in Keystore", "Command Execution", "Attack andOTP", "Detect Virtual Container"};
ArrayAdapter adapter = new ArrayAdapter<>(this, R.layout.activity_listview, list);
ListView listView = (ListView) findViewById(R.id.main_list);
listView.setAdapter(adapter);
listView.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> adapterView, View view, int pos, long l) {
switch(pos){
case 0:
startActivity(new Intent(getApplicationContext(), AppListActivity.class));
break;
case 1:
startActivity(new Intent(getApplicationContext(), RunningAppsListActivity.class));
break;
case 2:
startActivity(new Intent(getApplicationContext(), KeystoreActivity.class));
break;
case 3:
startActivity(new Intent(getApplicationContext(), CommandExecActivity.class));
break;
case 4:
startActivity(new Intent(getApplicationContext(), AttackAndOTP.class));
break;
case 5:
startActivity(new Intent(getApplicationContext(), DetectContainer.class));
break;
default:
break;
}
}
});
}
@Override
public void onResume(){
super.onResume();
if(BuildConfig.DEBUG) {
Log.d("CONBEER", ">> Enabling BroadcastReceiver...");
}
//ComponentName componentName = new ComponentName("com.container.conbeer", FakeBroadcastReceiver.class.getName());
ComponentName componentName = new ComponentName(this.getApplicationContext(), FakeBroadcastReceiver.class);
this.getPackageManager().setComponentEnabledSetting(componentName,
PackageManager.COMPONENT_ENABLED_STATE_ENABLED,
PackageManager.DONT_KILL_APP);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.menu_main, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
//noinspection SimplifiableIfStatement
if (id == R.id.action_settings) {
return true;
}
return super.onOptionsItemSelected(item);
}
}
<|start_filename|>app/src/main/java/com/suvikas/conware/Activities/KeystoreActivity.java<|end_filename|>
package com.suvikas.conware.Activities;
import android.content.DialogInterface;
import android.os.Bundle;
import com.suvikas.conbeer.ContainerRecon;
import com.suvikas.conware.KeystoreUtil;
import androidx.appcompat.app.AlertDialog;
import androidx.appcompat.app.AppCompatActivity;
import androidx.appcompat.widget.Toolbar;
import android.view.View;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.ListView;
import com.suvikas.conware.R;
import java.util.ArrayList;
public class KeystoreActivity extends AppCompatActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_keystore);
Toolbar toolbar = findViewById(R.id.toolbar);
setSupportActionBar(toolbar);
ContainerRecon cr = new ContainerRecon(getApplicationContext());
final ArrayList<String> keys = cr.getAllKeyAliases();
ArrayAdapter adapter = new ArrayAdapter<>(this, R.layout.activity_listview, keys);
final AlertDialog.Builder alertDialog = new AlertDialog.Builder(this);
ListView listView = (ListView) findViewById(R.id.keys_list);
listView.setAdapter(adapter);
listView.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> adapterView, View view, int pos, long l) {
KeystoreUtil keystoreUtil = new KeystoreUtil();
String keyInfo = keystoreUtil.getKeyInfo(keys.get(pos));
alertDialog.setTitle("Android KeyInfo")
.setMessage(keyInfo)
.setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
// Continue with delete operation
}
})
.show();
}
});
}
}
<|start_filename|>conbeerlib/src/main/java/com/suvikas/conbeer/Checks.java<|end_filename|>
package com.suvikas.conbeer;
public class Checks{
public static String MANIFEST_PERMISSIONS = "Manifest Permissions";
public static String ENVIRONMENT = "Environment Variables";
public static String PROC_MAPS = "Process Memory";
public static String INTENAL_STORAGE_DIR = "Internal Storage Directory";
public static String RUNNING_SERVICES = "Running Services";
public static String COMPONENT_RUNTIME = "App Components Runtime Check";
public static String STACKTRACE = "Stack Trace";
}
<|start_filename|>app/src/main/java/com/suvikas/conware/Activities/RunningAppsListActivity.java<|end_filename|>
package com.suvikas.conware.Activities;
import android.app.ActivityManager;
import android.content.Context;
import android.content.pm.ApplicationInfo;
import android.content.pm.PackageManager;
import android.graphics.drawable.Drawable;
import android.os.Bundle;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ArrayAdapter;
import android.widget.ImageView;
import android.widget.ListView;
import android.widget.TextView;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.appcompat.app.AppCompatActivity;
import androidx.appcompat.widget.Toolbar;
import com.suvikas.conware.Constants;
import com.suvikas.conware.R;
import java.util.ArrayList;
import java.util.List;
public class RunningAppsListActivity extends AppCompatActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_app_list);
Toolbar toolbar = findViewById(R.id.toolbar);
setSupportActionBar(toolbar);
final List<RunningAppData> apps = getListofRunningApps();
ArrayAdapter adapter = new CustomArrayAdapter(this, R.layout.activity_listview, apps);
ListView listView = (ListView) findViewById(R.id.applist);
listView.setAdapter(adapter);
}
private class RunningAppData {
private String uid;
private String pid;
private String appName;
RunningAppData(String uid, String pid, String appName) {
this.uid = uid;
this.pid = pid;
this.appName = appName;
}
}
private class CustomArrayAdapter extends ArrayAdapter<RunningAppsListActivity.RunningAppData> {
List<RunningAppsListActivity.RunningAppData> appDataList = new ArrayList<>();
CustomArrayAdapter(@NonNull Context context, int resource, @NonNull List<RunningAppsListActivity.RunningAppData> objects) {
super(context, resource, objects);
appDataList = objects;
}
@NonNull
@Override
public View getView(int position, @Nullable View convertView, @NonNull ViewGroup parent) {
LayoutInflater inflater = (LayoutInflater) getApplicationContext()
.getSystemService(Context.LAYOUT_INFLATER_SERVICE);
View rowView = inflater.inflate(R.layout.rowlayout_runningapps, parent, false);
TextView textView = (TextView) rowView.findViewById(R.id.label);
ImageView imageView = (ImageView) rowView.findViewById(R.id.icon);
RunningAppData appData = appDataList.get(position);
if(position == 0){
String textToDisplay = appData.uid + "\t\t\t\t\t" + appData.pid + "\t\t\t\t\t" + appData.appName;
textView.setText(textToDisplay);
imageView.setImageResource(R.drawable.app);
}else {
PackageManager pm = getApplicationContext().getPackageManager();
Drawable drawableIcon = null;
String textToDisplay = null;
try {
ApplicationInfo applicationInfo = pm.getApplicationInfo(appData.appName, 0);
drawableIcon = applicationInfo.loadIcon(pm);
imageView.setImageDrawable(drawableIcon);
CharSequence appName = pm.getApplicationLabel(applicationInfo);
textToDisplay = appData.uid + "\t\t\t" + appData.pid + "\t\t\t" + appName;
textView.setText(textToDisplay);
} catch (PackageManager.NameNotFoundException e) {
imageView.setImageResource(R.drawable.app);
textToDisplay = appData.uid + "\t\t\t" + appData.pid + "\t\t\t" + appData.appName;
textView.setText(textToDisplay);
}
}
return rowView;
}
}
private List<RunningAppsListActivity.RunningAppData> getListofRunningApps(){
ActivityManager activityManager = (ActivityManager) getApplicationContext().getSystemService(Context.ACTIVITY_SERVICE);
List<RunningAppsListActivity.RunningAppData> appDataList = new ArrayList<>();
appDataList.add(new RunningAppData("UID","PID","Process Name"));
List<ActivityManager.RunningAppProcessInfo> runningAppProcesses = activityManager.getRunningAppProcesses();
if(runningAppProcesses != null) {
for (int i = 0; i < runningAppProcesses.size(); i++) {
Log.d(Constants.TAG, "Process:"+ runningAppProcesses.get(i).processName);
appDataList.add(new RunningAppData(Integer.toString(runningAppProcesses.get(i).uid),
Integer.toString(runningAppProcesses.get(i).pid), runningAppProcesses.get(i).processName));
}
}
return appDataList;
}
}
| su-vikas/conbeerlib |
<|start_filename|>documentation/css/polyfill.css<|end_filename|>
[hidden], template {
display: none;
}
article, main, section {
display:block;
}
<|start_filename|>tests/performance/package.json<|end_filename|>
{
"name": "performance-dom99",
"version": "1.0.0",
"description": "Here are some tests for performance.",
"main": "perf-test.html",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"author": "",
"license": "ISC"
}
<|start_filename|>examples/css/test.css<|end_filename|>
.d-explorer {
border-style: dotted;
margin-bottom: 1.5em;
}
.grey{
background:#ddd;
}
<|start_filename|>examples/css/index.css<|end_filename|>
.todo-item {
list-style-type: none;
}
.todo-item > [type=checkbox]:checked + [type=text] {
text-decoration: line-through;
}
<|start_filename|>examples/css/dcomment.css<|end_filename|>
d-comment, .d-comment {
border: 1px solid;
display:block;
margin: 3px;
}
<|start_filename|>examples/css/basics.css<|end_filename|>
body {
color: #302f40;
font-family: 'Open Sans',Arial,sans-serif;
}
[hidden], template {
display: none;
}
<|start_filename|>tests/performance/css/currentStyle.css<|end_filename|>
@import url("./bootstrap/dist/css/bootstrap.min.css");
@import url("./main.css");
<|start_filename|>tests/performance/common.js<|end_filename|>
export {createMeasured, store};
import {timeFunction} from "../../node_modules/utilsac/utility.js";
const createMeasured = function (benchmarked) {
return function () {
return [benchmarked.name, timeFunction(benchmarked)];
}
};
function _random(max) {
return Math.round(Math.random()*1000)%max;
}
class Store {
constructor() {
this.data = [];
this.backup = null;
this.selected = null;
this.id = 1;
}
buildData(count = 1000) {
// returns an array of data
var adjectives = ["pretty", "large", "big", "small", "tall", "short", "long", "handsome", "plain", "quaint", "clean", "elegant", "easy", "angry", "crazy", "helpful", "mushy", "odd", "unsightly", "adorable", "important", "inexpensive", "cheap", "expensive", "fancy"];
var colours = ["red", "yellow", "blue", "green", "pink", "brown", "purple", "brown", "white", "black", "orange"];
var nouns = ["table", "chair", "house", "bbq", "desk", "car", "pony", "cookie", "sandwich", "burger", "pizza", "mouse", "keyboard"];
var data = [];
for (var i = 0; i < count; i++)
data.push({id: this.id++, label: adjectives[_random(adjectives.length)] + " " + colours[_random(colours.length)] + " " + nouns[_random(nouns.length)] });
return data;
}
updateData(mod = 10) {
// updates all 10th data
for (let i=0;i<this.data.length;i+=10) {
this.data[i].label += ' !!!';
// this.data[i] = Object.assign({}, this.data[i], {label: this.data[i].label +' !!!'});
}
}
delete(id) {
// deletes a data by id
const idx = this.data.findIndex(d => d.id==id);
this.data = this.data.filter((e,i) => i!=idx);
return this;
}
run() {
this.data = this.buildData();
this.selected = null;
}
add() {
this.data = this.data.concat(this.buildData(1000));
this.selected = null;
}
update() {
this.updateData();
this.selected = null;
}
select(id) {
this.selected = id;
}
// hideAll() {
// this.backup = this.data;
// this.data = [];
// this.selected = null;
// }
// showAll() {
// this.data = this.backup;
// this.backup = null;
// this.selected = null;
// }
runLots() {
this.data = this.buildData(10000);
this.selected = null;
}
clear() {
this.data = [];
this.selected = null;
}
swapRows() {
if(this.data.length > 998) {
var a = this.data[1];
this.data[1] = this.data[998];
this.data[998] = a;
}
}
}
const store = new Store();
<|start_filename|>components/yesNoDialog/yesNoDialog.css<|end_filename|>
/*
spacings : 0.3, 0.6, 0.9 , 2.7 ems
font-size: 21px;
colors: #72b4b0, #98c9c8
*/
.yes-no-dialog {
height: 100%;
margin: 0;
}
.yes-no-dialog {
display: none;
text-align: center;
font-size: 21px;
}
.yes-no-dialog-text {
margin: 0;
padding: 0.9em;
}
.yes-no-dialog-button {
margin: 0.9em;
padding: 0.6em 2.7em;
font-size: 21px;
}
.yes-no-dialog-button:focus,
.yes-no-dialog-button:hover {
background-color: #98c9c8;
}
.yes-no-dialog-button:active {
background-color: #72b4b0;
}
.yes-no-dialog-input-label {
min-width: 60%;
display: inline-block;
}
.yes-no-dialog-input-label + button {
/* only for button after <input type="text"> */
margin: 0.9em 0.3em;
}
.yes-no-dialog-input {
min-width: 60%;
margin: 0.9em 0.3em;
padding: 0.6em 0.3em;
border: 2px black solid;
font-size: 21px;
}
.yes-no-dialog-input:hover {
border-color: #98c9c8;
}
.yes-no-dialog-input:focus {
border-color: #72b4b0;
}
.yes-no-dialog-input-label > span {
min-width: 20%;
padding: 0.6em 0.3em;
}
.yes-no-dialog-active > *:not(.yes-no-dialog) {
display: none;
}
.yes-no-dialog-active > .yes-no-dialog {
display: block;
}
| liquorburn/DOM99 |
<|start_filename|>test.js<|end_filename|>
const OQO = require('./dist');
const people = [
{ name: 'John', age: 25, gender: 'male' },
{ name: 'Alice', age: 14, gender: 'female' },
{ name: 'Bob', age: 19, gender: 'male' },
{ name: 'Charlie', age: 20, gender: 'female' },
{ name: 'David', age: 21, gender: 'male' }
];
const result = OQO
.select(['name', 'gender'])
.from(people)
.where('age >= 20')
.order('age', 'desc')
.run();
console.log(result);
| rulyox/oqo |
<|start_filename|>src/http/get-user-000name/index.js<|end_filename|>
// Enable secure sessions, express-style middleware, and more:
// https://docs.begin.com/en/functions/http/
//
// let begin = require('@architect/functions')
let html = `
<!doctype html>
<html lang=en>
<head>
<meta charset=utf-8>
<title>Hi!</title>
<link rel="stylesheet" href="https://static.begin.app/starter/default.css">
<link href="data:image/x-icon;base64,iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAQAAAC1HAwCAAAAC0lEQVR42mNkYAAAAAYAAjCB0C8AAAAASUVORK5CYII=" rel="icon" type="image/x-icon" />
</head>
<body>
<h1 class="center-text">
Hello world!
</h1>
<p class="center-text">
Your new route is ready to go!
</p>
<p class="center-text">
Learn more about building <a href="https://docs.begin.com/en/functions/http/" class="link" target="_blank">Begin HTTP functions here</a>.
</p>
</body>
</html>
`
// HTTP function
exports.handler = async function http(req) {
console.log(req)
return {
headers: {
'content-type': 'text/html; charset=utf8',
'cache-control': 'no-cache, no-store, must-revalidate, max-age=0, s-maxage=0'
},
body: html
}
}
| christianfroseth/begin-vue-app |
<|start_filename|>src/app/speaker-registration/speaker-registration-step-three/speaker-registration-step-three.component.html<|end_filename|>
<div class="container" id="step-three-container">
<div class="col-md-4 col-md-offset-4">
<div class="panel panel-default">
<div class="panel-heading">
<h3 class="panel-title" id="step-three-page-title">
Speaker registration - step 3/3
</h3>
</div>
<div class="panel-body">
<div *ngIf="!validated">
<div class="alert alert-info">
<p>
Please verify your personal & speaker details!
</p>
</div>
<form role="form" novalidate id="step-three-form">
<h4>Personal details:</h4>
<table>
<tr>
<th>Name:</th>
<td>{{ (data$ | async )?.personal.name}}</td>
</tr>
<tr>
<th>Email:</th>
<td>{{ (data$ | async )?.personal.email}}</td>
</tr>
<tr>
<th>Description:</th>
<td>{{ (data$ | async )?.personal.description}}</td>
</tr>
</table>
<button (click)="goBackToStepOne($event)" class="btn btn-warning"
id="step-three-gobacktostepone">
Edit personal details
</button>
<h4>Session details:</h4>
<table>
<tr>
<th>Type:</th>
<td>{{ (data$ | async )?.session.type}}</td>
</tr>
<tr>
<th>Title:</th>
<td>{{ (data$ | async )?.session.title}}</td>
</tr>
<tr>
<th>Description:</th>
<td>{{ (data$ | async )?.session.description}}</td>
</tr>
</table>
<button (click)="goBackToStepTwo($event)" class="btn btn-warning"
id="step-three-gobacktosteptwo">
Edit session details
</button>
<hr />
<button (click)="submit($event)" class="btn btn-success" id="step-three-submit">Submit your
session!
</button>
</form>
</div>
<div *ngIf="validated">
<div class="alert alert-success">
Thanks for registering as a speaker! See you soon :)
</div>
</div>
</div>
</div>
</div>
</div> | samvloeberghs/protractor-gherkin-cucumberjs-angular2 |
<|start_filename|>tests/mocks/contentItem.json<|end_filename|>
{
"contentItems": [
{
"content": "Humans have evolved to their relatively high state by retaining the immature characteristics of their ancestors. Humans are the most advanced of mammals – although a case could be made for the dolphins – because they seldom grow up. Behavioral traits such as curiosity about the world, flexibility of response, and playfulness are common to practically all young mammals but are usually rapidly lost with the onset of maturity in all but humans. Humanity has advanced, when it has advanced, not because it has been sober, responsible and cautious, but because it has been playful, rebellious, and immature. <NAME>, Still Life with Woodpecker.",
"id": "string",
"forward": "false",
"contenttype": "text/plain"
}
]
}
| harryspink/watson-personality-insights-php |
<|start_filename|>test/unit/stats.js<|end_filename|>
'use strict'
const test = require('tape')
const Stats = require('../../lib/stats')
test('stats', function (t) {
const s1 = new Stats(false, 0, 0)
t.is(s1.ok, false)
t.is(s1.pass, 0)
t.is(s1.fail, 0)
t.is(s1.count, 0)
const s2 = new Stats(true, 0, 1)
t.is(s2.ok, false)
t.is(s2.pass, 0)
t.is(s2.fail, 1)
t.is(s2.count, 1)
const s3 = new Stats(true, 1, 0)
t.is(s3.ok, true)
t.is(s3.pass, 1)
t.is(s3.fail, 0)
t.is(s3.count, 1)
s3.ok = false
t.is(s3.ok, false)
t.end()
})
<|start_filename|>test/integration/index.js<|end_filename|>
'use strict'
const Airtap = require('../..')
const fs = require('fs')
const path = require('path')
module.exports = function (test, provider, options) {
if (!options) options = {}
test(function (t) {
const airtap = new Airtap()
// Temporary workaround, airtap-multi wants an id
provider.prototype.id = 'anonymous'
airtap.provider(provider, options.provider)
airtap.manifests(options.wanted, function (err, manifests) {
if (err) return t.fail(err)
if (!manifests.length) {
return t.fail('Zero manifests')
} else if (manifests.length > 100) {
return t.fail('Too many manifests, unsafe to test')
}
testTap(t, airtap, manifests, options.test)
testTimeout(t, airtap, manifests, options.test)
t.end()
})
})
}
function testTap (t, airtap, manifests, options) {
const cwd = path.resolve(__dirname, 'fixtures', 'tap')
const read = (fp) => fs.readFileSync(path.join(cwd, fp), 'utf8')
const expectedStats = JSON.parse(read('stats.json'))
const expectedOut = read('out').trim()
t.test('tap', function (t) {
airtap.test(manifests, ['test.js'], { ...options, cwd, annotate: false })
.on('error', t.fail.bind(t))
.on('context', function (context) {
const title = context.browser.title
context.on('session', function (session) {
t.pass(`${title} started`)
let out = ''
session.on('data', function (chunk) {
out += chunk
})
session.on('complete', function (stats) {
t.is(strip(out), expectedOut, `${title} tap`)
for (const k in expectedStats) {
t.is(stats[k], expectedStats[k], `${title} stats.${k}`)
}
})
})
})
.on('complete', function (stats) {
t.is(stats.ok, expectedStats.ok)
t.is(stats.count, manifests.length, manifests.length + ' completed')
t.is(stats.pass, expectedStats.ok ? manifests.length : 0, 'n passed')
t.is(stats.fail, expectedStats.ok ? 0 : manifests.length, 'n failed')
t.end()
})
})
}
function testTimeout (t, airtap, manifests, options) {
const cwd = path.resolve(__dirname, 'fixtures', 'timeout')
// The timeout here must be less than the tape timeout in test.js
options = { ...options, cwd, timeout: 1e3, concurrency: 2, retries: 0 }
t.test('timeout', function (t) {
airtap.test(manifests, ['test.js'], options)
.on('error', function (err) {
// Will either timeout on session start or session output
t.ok(err, String(err))
t.end()
})
.on('context', function (context) {
context.on('session', function (session) {
// This may or may not happen
t.pass(`${context.browser.title} started`)
session.on('complete', function () {
t.fail('session should not complete')
}).resume()
})
})
.on('complete', function () {
t.fail('test should not complete')
})
})
}
function strip (tap) {
const lines = tap.trim().split(/\r?\n/)
const isDiagnostic = (line) => line === '' || line[0] === '#'
// Don't care about diagnostics after completion
while (lines.length && isDiagnostic(lines[lines.length - 1])) {
lines.pop()
}
// Don't care about YAML blocks (that may contain stack traces)
for (let i = 0, inblock = false; i < lines.length; i++) {
if (inblock) {
if (lines[i].startsWith(' ...')) inblock = false
lines.splice(i--, 1)
} else if (lines[i].startsWith(' ---')) {
inblock = true
lines.splice(i--, 1)
}
}
return lines.join('\n').trim()
}
<|start_filename|>bin/airtap.js<|end_filename|>
#!/usr/bin/env node
'use strict'
if (process.version.match(/^v(\d+)\./)[1] < 10) {
console.error('airtap: Node 10 or greater is required. `airtap` did not run.')
process.exit(0)
}
require('make-promises-safe')
const nearest = require('find-nearest-file')
const yaml = require('js-yaml')
const os = require('os')
const fs = require('fs')
const path = require('path')
const Airtap = require('../lib/airtap')
const hasOwnProperty = Object.prototype.hasOwnProperty
const argv = require('minimist')(process.argv.slice(2), {
string: [
'concurrency',
'retries',
'timeout',
'preset',
'server',
'loopback'
],
boolean: [
'version',
'help',
'list-browsers',
'all',
'coverage',
'live',
'verbose',
'silly',
// Legacy options (handled below)
'local',
'open',
'electron'
],
alias: {
v: 'version',
h: 'help',
l: 'list-browsers',
a: 'all',
c: 'concurrency',
r: 'retries',
t: 'timeout',
p: 'preset',
s: 'server'
}
})
if (argv.help) {
console.log(read('help.txt'))
process.exit()
} else if (argv.version) {
console.log(require('../package.json').version)
process.exit()
}
const config = {
watchify: !process.env.CI,
...readYAML(nearest('.airtaprc') || path.join(os.homedir(), '.airtaprc')),
...readYAML('.airtap.yml'),
...wash(argv)
}
if (argv.preset) {
usePreset(config, argv.preset)
}
if (config.silly) {
require('debug').enable('*,-babel')
} else if (config.verbose) {
require('debug').enable('airtap*')
}
// Reject flags that have been removed in airtap 4
if (config.local && config.open) {
fail(read('no-local-open.txt'), true)
} else if (config.local) {
fail(read('no-local.txt'), true)
} else if (config.electron) {
fail(read('no-electron.txt'), true)
}
// Take credentials from root config for airtap < 4 compatibility
// TODO: remove in next major. Can be specified via env or provider options.
setCredentials(config, process.env)
const airtap = new Airtap()
const wanted = config.all ? null : config.browsers || []
const files = argv._.length ? argv._ : config.files || []
if (!config.providers) {
config.providers = ['airtap-default']
if (wanted) wanted.splice(0, wanted.length, { name: 'default' })
}
if (!files.length && !argv['list-browsers']) {
fail('At least one file must be specified.', true)
} else if (!config.providers.length) {
fail(read('no-input.txt'), true)
} else if (wanted && !wanted.length) {
fail(read('no-input.txt'), true)
}
// Load providers
airtap.provider(config.providers)
// Match provider manifests against wanted manifests
airtap.manifests(wanted, function (err, manifests) {
if (err) return fail(err)
if (argv['list-browsers']) {
manifests.forEach(simplifyManifest)
console.log(toYAML(manifests))
return
}
airtap.test(manifests, files, config)
.on('error', fail)
.on('context', function (context) {
// Emits one session or more (on page reload)
context.on('session', function (session) {
// TODO (later): merge TAP from multiple sessions
session.pipe(process.stdout, { end: false })
})
})
.on('complete', function (stats) {
console.log('# %d of %d browsers ok', stats.pass, stats.count)
process.exit(stats.ok ? 0 : 1)
})
})
function fail (err, expected) {
if (err.expected || expected) {
if (err.code === 'ERR_MANIFEST_NOT_FOUND') {
const isEmpty = Object.keys(err.input).length === 0
const wanted = isEmpty ? '<empty>' : toYAML(err.input)
console.error('No manifest found matching:\n\n%s', indent(wanted))
} else {
console.error(err.message || err)
}
process.exit(err.exitCode || 1)
}
throw err
}
function readYAML (fp) {
try {
return yaml.load(fs.readFileSync(fp, 'utf8'))
} catch (err) {
if (err.code !== 'ENOENT') fail(err)
}
}
function toYAML (value) {
return yaml.dump(value, { noRefs: true }).trim()
}
function indent (str) {
return ' ' + str.replace(/\r?\n/g, '\n ')
}
function wash (opts) {
const copy = {}
for (const k in opts) {
if (k.startsWith('_') || !hasOwnProperty.call(opts, k)) continue
if (opts[k] != null && opts[k] !== '') copy[k] = opts[k]
}
return copy
}
function usePreset (config, preset) {
const presets = config.presets
const overrides = presets && presets[preset]
if (typeof presets !== 'object' || presets === null) {
return fail('No presets are available', true)
} else if (typeof overrides !== 'object' || overrides === null) {
return fail(`Preset '${preset}' not found`, true)
}
for (const k in overrides) {
if (k === 'presets') continue
if (!hasOwnProperty.call(overrides, k)) continue
config[k] = overrides[k]
}
}
function setCredentials (config, env) {
const username = config.sauce_username || config.username
const key = config.sauce_key || config.sauce_access_key || config.key
if (username || key) console.error(read('deprecated-creds.txt'))
if (username && !env.SAUCE_USERNAME) env.SAUCE_USERNAME = username
if (key && !env.SAUCE_ACCESS_KEY) env.SAUCE_ACCESS_KEY = key
}
function simplifyManifest (m) {
// Remove irrelevant properties, to ease copy-pasting into airtap.yml.
if (Object.keys(m.supports).length === 0) delete m.supports
if (Object.keys(m.wants).length === 0) delete m.wants
if (Object.keys(m.options).length === 0) delete m.options
}
function read (filename) {
const fp = path.join(__dirname, filename)
return fs.readFileSync(fp, 'utf8').trim()
}
<|start_filename|>test/unit/browser-session.js<|end_filename|>
'use strict'
const test = require('tape')
const BrowserSession = require('../../lib/browser-session')
function messages () {
return [
{ type: 'console', level: 'log', args: ['TAP version 13'] },
{ type: 'console', level: 'log', args: ['# ok'] },
{ type: 'console', level: 'log', args: ['ok 1 (unnamed assert)'] },
{ type: 'console', level: 'log', args: ['# fail'] },
{ type: 'console', level: 'log', args: ['not ok 2 should be truthy'] },
{ type: 'console', level: 'log', args: ['# suite'] },
{ type: 'console', level: 'log', args: ['ok 3 yeah'] },
{ type: 'console', level: 'log', args: ['not ok 4 WOOPS'] },
{ type: 'console', level: 'log', args: ['not ok 5'] },
{ type: 'console', level: 'log', args: ['# pass'] },
{ type: 'console', level: 'log', args: ['ok 6 (unnamed assert)'] },
{ type: 'console', level: 'log', args: ['# plan'] },
{ type: 'console', level: 'log', args: ['ok 7 true is true AWESOME'] },
{ type: 'console', level: 'log', args: ['# failed plan'] },
{ type: 'console', level: 'log', args: ['ok 8 one assert'] },
{ type: 'console', level: 'log', args: ['not ok 9 test timed out after 200ms'] },
{ type: 'console', level: 'log', args: [''] },
{ type: 'console', level: 'log', args: ['1..9'] },
{ type: 'console', level: 'log', args: ['# tests 9'] },
{ type: 'console', level: 'log', args: ['# pass 5'] },
{ type: 'console', level: 'log', args: ['# fail 4'] },
{ type: 'console', level: 'log', args: [''] }
]
}
test('session', function (t) {
BrowserSession.reset()
t.plan(4)
let output = ''
const session = new BrowserSession('abc', 30e3, '.')
session.on('data', function (s) {
output += s
})
session.on('complete', function (stats) {
t.is(stats.pass, 5)
t.is(stats.fail, 4)
t.is(stats.ok, false)
session.write({ type: 'end' })
})
for (const m of messages()) {
session.write(m)
}
session.on('close', function () {
t.same(output.trim().split('\n'), [
'TAP version 13',
'# abc [1]',
'# ok [1]',
'ok 1 (unnamed assert) [1]',
'# fail [1]',
'not ok 2 should be truthy [1]',
'# suite [1]',
'ok 3 yeah [1]',
'not ok 4 WOOPS [1]',
'not ok 5 [1]',
'# pass [1]',
'ok 6 (unnamed assert) [1]',
'# plan [1]',
'ok 7 true is true AWESOME [1]',
'# failed plan [1]',
'ok 8 one assert [1]',
'not ok 9 test timed out after 200ms [1]',
'',
'1..9',
'# tests 9 [1]',
'# pass 5 [1]',
'# fail 4 [1]'
])
})
})
test('session start timeout', function (t) {
t.plan(2)
let output = ''
const session = new BrowserSession('abc', 200, '.')
session.on('data', function (s) {
output += s
})
session.on('complete', function (stats) {
t.fail('should not complete')
})
session.on('error', function (err) {
t.is(err.message, 'Did not receive output from \'abc\' (0.2 seconds)')
session.on('close', function () {
t.same(output, '')
})
})
})
test('session output timeout', function (t) {
t.plan(2)
let output = ''
const session = new BrowserSession('abc', 200, '.')
session.on('data', function (s) {
output += s
})
session.on('complete', function (stats) {
t.fail('should not complete')
})
session.on('error', function (err) {
t.is(err.message, 'Did not receive output from \'abc\' (0.2 seconds)')
session.on('close', function () {
t.same(output, 'TAP version 13\n# abc [3]\n')
})
})
session.write({
type: 'console',
level: 'log',
args: ['TAP version 13']
})
})
test('session unknown message', function (t) {
t.plan(2)
const session = new BrowserSession('abc', 30e3, '.')
session.on('error', function (err) {
t.is(err.message, 'Unknown message type')
})
session.on('close', function () {
t.pass('closed')
})
session.resume()
session.write({ type: 'foo' })
})
<|start_filename|>lib/test.js<|end_filename|>
'use strict'
const EventEmitter = require('events')
const Stats = require('./stats')
module.exports = class Test extends EventEmitter {
constructor () {
super()
this.stats = new Stats(true, 0, 0)
this.destroyed = false
}
aggregate (stats) {
if (stats.ok) this.stats.pass++
else this.stats.fail++
}
complete () {
if (this.destroyed) return
this.emit('complete', this.stats)
this.destroy()
}
destroy (err) {
if (this.destroyed) return
this.destroyed = true
if (err) {
this.emit('error', err)
}
process.nextTick(() => {
this.emit('close')
})
}
}
<|start_filename|>lib/browser-context.js<|end_filename|>
'use strict'
const EventEmitter = require('events')
const osc = require('on-stream-close')
const combine = require('maybe-combine-errors')
const ms = require('bruce-millis-option')
const transient = require('transient-error')
const debug = require('debug')('airtap:browser-context')
const path = require('path')
const BrowserSession = require('./browser-session')
const timeout = require('./timeout')
const kBrowser = Symbol('kBrowser')
const kCwd = Symbol('kCwd')
const kAnnotate = Symbol('kAnnotate')
const kLive = Symbol('kLive')
const kErrors = Symbol('kErrors')
const kReady = Symbol('kReady')
const kDestroyed = Symbol('kDestroyed')
const kSession = Symbol('kSession')
const kOnReady = Symbol('kOnReady')
const kOnSignal = Symbol('kOnSignal')
const kOffSignal = Symbol('kOffSignal')
const kStartTimer = Symbol('kStartTimer')
const kTimeout = Symbol('kTimeout')
module.exports = class BrowserContext extends EventEmitter {
constructor (browser, options) {
super()
this[kBrowser] = browser
this[kCwd] = path.resolve(options.cwd || '.')
this[kLive] = !!options.live
this[kAnnotate] = options.annotate !== false
this[kErrors] = []
this[kReady] = false
this[kDestroyed] = false
this[kSession] = null
this[kOffSignal] = null
this[kStartTimer] = null
this[kTimeout] = options.live ? 0 : ms(options.timeout || '5m')
}
get live () {
return this[kLive]
}
get browser () {
return this[kBrowser]
}
run (callback) {
if (this[kDestroyed]) return
debug('opening %o', this[kBrowser].title)
this.once(kDestroyed, callback)
this[kBrowser].open((err) => {
if (this[kDestroyed]) return
if (err) return this.destroy(err)
if (!this[kSession]) {
// Browser must connect within timeout. Don't put a timeout on
// open(), because close() waits for opening to complete, so
// we would leak resources if closing also times out.
this[kStartTimer] = timeout.optional(() => {
const msg = `Browser '${this[kBrowser].title}' did not connect`
const err = new timeout.Error(msg, this[kTimeout])
this.destroy(err)
}, this[kTimeout])
}
this[kBrowser].on('error', (err) => this.destroy(err))
this[kOffSignal] = onSignal(this[kOnSignal], this)
this[kReady] = true
this.emit(kReady)
})
}
destroy (err) {
if (this[kDestroyed]) return
if (err) this[kErrors].push(err)
if (this[kOffSignal]) this[kOffSignal]()
if (this[kSession]) this[kSession].destroy()
if (this[kStartTimer]) clearTimeout(this[kStartTimer])
this[kDestroyed] = true
this.removeAllListeners('reload')
let called = 0
const onclose = (err) => {
if (called++) return
if (err) this[kErrors].push(err)
if (timer) clearTimeout(timer)
if (this[kErrors].length) {
this.emit(kDestroyed, combine(this[kErrors]))
} else if (!this[kSession] || !this[kSession].stats.count) {
// Indicates potential error to even run tests
this.emit(kDestroyed, transient(new Error('Premature close')))
} else {
// The last session dictates the final result
this.emit(kDestroyed, null, this[kSession].stats)
}
}
const timer = timeout.optional(() => {
const msg = `Browser '${this[kBrowser].title}' did not close`
const err = new timeout.Error(msg, this[kTimeout])
onclose(err)
}, this[kTimeout])
debug('closing %o', this[kBrowser].title)
this[kBrowser].close(onclose)
}
reload () {
if (!this[kDestroyed]) {
debug('reload %o', this[kBrowser].title)
this.emit('reload')
}
}
createSession (callback) {
if (this[kDestroyed]) return
// Abort previous session if any
if (this[kSession]) this[kSession].destroy()
const title = this[kBrowser].title
const session = new BrowserSession(title, this[kTimeout], this[kCwd], this[kAnnotate])
// Keep track of current session
this[kSession] = session
// Once session is done, close browser if errored or not in live mode. In
// the latter case there can only be one session per browser and context.
osc(session, (err) => {
if (this[kSession] !== session) {
debug('old session of %o was closed: %O', title, err)
} else if (err) {
this.destroy(err)
} else if (!this[kLive] && !this[kDestroyed]) {
this[kBrowser].setStatus(this[kSession].stats.ok, (err) => {
this.destroy(err)
})
}
})
// Client may connect before browser.open() completes. In that case,
// defer starting the tests so that the order of events is consistent.
this[kOnReady](() => {
if (session !== this[kSession]) {
debug('session of %o was replaced before context was ready', title)
} else if (session.destroyed) {
debug('session of %o was destroyed before context was ready', title)
} else if (this[kDestroyed]) {
debug('context of %o was destroyed before it was ready', title)
} else {
debug('ready %o', title)
if (this[kStartTimer]) clearTimeout(this[kStartTimer])
this.emit('session', session)
callback(session)
}
})
}
[kOnReady] (fn) {
if (this[kReady]) process.nextTick(fn)
else this.once(kReady, fn)
}
// TODO: move to cli
[kOnSignal] (name, signal) {
const err = new Error(`Received signal ${name}`)
Object.defineProperty(err, 'exitCode', { value: 128 + signal })
Object.defineProperty(err, 'expected', { value: true })
this.destroy(err)
}
}
function onSignal (fn, thisArg) {
const hup = fn.bind(thisArg, 'SIGHUP', 1)
const int = fn.bind(thisArg, 'SIGINT', 2)
const term = fn.bind(thisArg, 'SIGTERM', 15)
process.once('SIGHUP', hup)
process.once('SIGINT', int)
process.once('SIGTERM', term)
return function detach () {
process.removeListener('SIGHUP', hup)
process.removeListener('SIGINT', int)
process.removeListener('SIGTERM', term)
}
}
<|start_filename|>test/unit/browser-context.js<|end_filename|>
'use strict'
const test = require('tape')
const Browser = require('abstract-browser')
const BrowserContext = require('../../lib/browser-context')
test('context can be destroyed', function (t) {
t.plan(13)
run(function (browser, context, callback) {
context.destroy(new Error('test'))
callback()
}, 'test', true, '5m')
run(function (browser, context, callback) {
callback()
context.destroy(new Error('test'))
}, 'test', true, '5m')
run(function (browser, context, callback) {
context.createSession(function (session) {
session.on('close', () => t.pass('session closed'))
session.on('complete', () => t.fail('session should not complete'))
context.destroy(new Error('test'))
})
callback()
}, 'test', true, '5m')
run(function (browser, context, callback) {
context.createSession(function (session) {
session.on('close', () => t.pass('session closed'))
session.on('complete', () => t.fail('session should not complete'))
session.destroy(new Error('test'))
})
callback()
}, 'test', true, '5m')
run(function (browser, context, callback) {
callback()
setImmediate(() => {
browser.emit('error', new Error('test'))
})
}, 'test', true, '5m')
run(function (browser, context, callback) {
callback(new Error('test'))
}, 'test', false, '5m')
function run (fn, expectedError, expectClose, timeout) {
class MockBrowser extends Browser {
_open (callback) {
fn(browser, context, callback)
}
_close (callback) {
if (expectClose) t.pass('browser closed')
callback()
}
}
const manifest = { name: 'test' }
const target = { url: 'http://localhost' }
const browser = new MockBrowser(manifest, target)
const context = new BrowserContext(browser, {
cwd: '.',
live: false,
timeout
})
context.run(function (err, stats) {
t.is(err.message, expectedError)
})
}
})
<|start_filename|>lib/watchify-options.js<|end_filename|>
'use strict'
// Get browserify options required for watchify
module.exports = function () {
return {
cache: {},
packageCache: {},
fullPaths: true
}
}
<|start_filename|>lib/timeout.js<|end_filename|>
'use strict'
const humanizeDuration = require('humanize-duration')
exports.optional = function (fn, ms) {
if (ms > 0 && ms < Infinity) {
return setTimeout(fn, ms)
}
}
exports.Error = class TimeoutError extends Error {
constructor (message, ms) {
super(`${message} (${humanizeDuration(ms)})`)
Object.defineProperty(this, 'name', { value: 'TimeoutError' })
Object.defineProperty(this, 'expected', { value: true })
}
}
<|start_filename|>client/index.css<|end_filename|>
code {
display: block;
white-space: pre-wrap;
}
<|start_filename|>test/unit/message-server.js<|end_filename|>
'use strict'
const test = require('tape')
const engineClient = require('engine.io-client')
const EventEmitter = require('events')
const MessageServer = require('../../lib/message-server')
class MockContext extends EventEmitter {
constructor () {
super()
this.destroyed = false
}
createSession (callback) {
process.nextTick(callback, new MockSession())
}
}
class MockSession extends EventEmitter {
destroy () {
// ..
}
}
test('message server closes open connections', function (t) {
t.plan(5)
const server = new MessageServer()
server.open(function (err) {
t.ifError(err, 'no open error')
t.ok(Number.isInteger(server.port) && server.port > 0, 'has port')
const url = `ws://localhost:${server.port}`
const cid = 'fake'
const mockContext = new MockContext()
server.register(cid, mockContext)
const socket = engineClient(url, {
path: '/airtap/msg',
extraHeaders: {
'x-airtap-context-id': cid
}
})
socket.on('open', function () {
t.pass('opened')
socket.on('close', function () {
t.pass('closed')
})
server.close(function (err) {
t.ifError(err, 'no close error')
})
})
})
})
test('message server closes if connections were already closed', function (t) {
t.plan(5)
const server = new MessageServer()
server.open(function (err) {
t.ifError(err, 'no open error')
t.ok(Number.isInteger(server.port) && server.port > 0, 'has port')
const url = `ws://localhost:${server.port}`
const cid = 'fake'
const mockContext = new MockContext()
server.register(cid, mockContext)
const socket = engineClient(url, {
path: '/airtap/msg',
extraHeaders: {
'x-airtap-context-id': cid
}
})
socket.on('open', function () {
t.pass('opened')
socket.on('close', function () {
t.pass('closed')
server.close(function (err) {
t.ifError(err, 'no close error')
})
})
socket.close()
})
})
})
test('message server closes connection without valid cid', function (t) {
t.plan(3)
const server = new MessageServer()
server.open(function (err) {
t.ifError(err, 'no open error')
const socket = engineClient(`ws://localhost:${server.port}`, {
path: '/airtap/msg'
})
socket.on('close', function () {
t.pass('closed')
server.close(function (err) {
t.ifError(err, 'no close error')
})
})
})
})
<|start_filename|>test/unit/fixtures/bundler/foo-to-bar-transform.js<|end_filename|>
const { Transform } = require('readable-stream')
module.exports = function (file) {
return new Transform({
transform (buf, enc, next) {
this.push(buf.toString('utf8').replace(/foo/g, 'bar'))
next()
}
})
}
<|start_filename|>test/index.js<|end_filename|>
require('./unit/bundler')
require('./unit/cli')
require('./unit/coverage')
require('./unit/stats')
require('./unit/timeout')
require('./unit/test')
require('./unit/browser-context')
require('./unit/browser-session')
require('./unit/content-server')
require('./unit/message-server')
// TODO
// require('./unit/proxy-server')
// require('./unit/support-server')
if (!process.env.CI) {
const test = require('tape')
const integration = require('./integration')
const Default = require('airtap-default')
integration(test, Default)
}
<|start_filename|>lib/message-server.js<|end_filename|>
'use strict'
const http = require('http')
const Engine = require('engine.io')
const debug = require('debug')('airtap:message-server')
const Nanoresource = require('nanoresource')
const enableDestroy = require('server-destroy')
const kServer = Symbol('kServer')
const kEngine = Symbol('kEngine')
const kContexts = Symbol('kContexts')
const kOnConnection = Symbol('kOnConnection')
module.exports = class MessageServer extends Nanoresource {
constructor () {
super()
this[kServer] = http.createServer()
this[kEngine] = Engine.attach(this[kServer], { path: '/airtap/msg' })
this[kContexts] = new Map()
this[kEngine].on('connection', this[kOnConnection].bind(this))
enableDestroy(this[kServer])
}
_open (callback) {
this[kServer].listen(0, () => {
debug('active on port %o', this.port)
callback()
})
}
get port () {
return this[kServer].address().port
}
[kOnConnection] (socket) {
const cid = socket.request.headers['x-airtap-context-id']
const context = this[kContexts].get(cid)
if (context == null || context.destroyed) {
debug('client connected to old or non-existing context: %o', cid)
socket.close()
return
}
context.createSession(function (session) {
if (socket.readyState !== 'open') {
debug('client disconnected before session could start')
return
}
startSession(socket, session, context)
})
}
register (cid, context) {
this[kContexts].set(cid, context)
}
deregister (cid) {
this[kContexts].delete(cid)
}
_close (callback) {
// Forcefully close sockets
for (const k in this[kEngine].clients) {
this[kEngine].clients[k].closeTransport(true)
}
if (this[kEngine].ws) {
this[kEngine].ws.close()
}
// Terminate connections and close server
this[kServer].destroy(callback)
}
}
function startSession (socket, session, context) {
socket.on('message', onMessage)
socket.on('close', onCloseSocket)
session.on('complete', onComplete)
session.on('close', onCloseSession)
context.on('reload', onReload)
socket.send(JSON.stringify({ type: 'start' }))
function onMessage (json) {
const msg = JSON.parse(json)
if (session.destroyed) {
debug('received message for destroyed session: %O', msg)
return
}
session.write(msg)
}
function onReload () {
socket.send(JSON.stringify({ type: 'reload' }))
}
function onCloseSocket () {
socket.removeListener('message', onMessage)
socket.removeListener('close', onCloseSocket)
session.removeListener('complete', onComplete)
session.removeListener('close', onCloseSession)
context.removeListener('reload', onReload)
session.destroy()
}
function onCloseSession () {
session.removeListener('complete', onComplete)
session.removeListener('close', onCloseSession)
}
function onComplete (stats) {
const supports = context.browser.manifest.supports
socket.send(JSON.stringify({
type: 'end',
ok: stats.ok,
live: context.live,
selfclosing: supports.selfclosing
}))
}
}
<|start_filename|>test/unit/coverage.js<|end_filename|>
'use strict'
const test = require('tape')
const tempy = require('tempy')
const fs = require('fs')
const path = require('path')
const cc = require('../../lib/coverage')
test('write and clean coverage', function (t) {
t.plan(3)
const cwd = tempy.directory()
const nyc = path.join(cwd, '.nyc_output')
cc.write(cwd, { fake: 123 }, function (err) {
t.ifError(err)
t.same(fs.readdirSync(nyc), [
'airtap-0772e0c7c3987f74bb8a740830f7ed2a462a3067.json'
])
fs.writeFileSync(path.join(nyc, 'other'), '')
cc.clean(cwd)
t.same(fs.readdirSync(nyc), ['other'])
})
})
for (const arg of [null, undefined, {}]) {
test(`coverage is ignored if it's ${JSON.stringify(arg)}`, function (t) {
t.plan(2)
const cwd = tempy.directory()
const nyc = path.join(cwd, '.nyc_output')
cc.write(cwd, arg, function (err) {
t.ifError(err)
fs.readdir(nyc, function (err) {
t.is(err.code, 'ENOENT')
})
})
})
}
<|start_filename|>test/unit/test.js<|end_filename|>
'use strict'
const test = require('tape')
const Test = require('../../lib/test')
test('test', function (t) {
t.plan(17)
const test = new Test()
t.is(test.destroyed, false)
t.is(test.stats.count, 0)
t.is(test.stats.pass, 0)
t.is(test.stats.fail, 0)
t.is(test.stats.ok, true)
test.aggregate({ ok: true })
t.is(test.stats.count, 1)
t.is(test.stats.pass, 1)
t.is(test.stats.fail, 0)
t.is(test.stats.ok, true)
test.aggregate({ ok: false })
t.is(test.stats.count, 2)
t.is(test.stats.pass, 1)
t.is(test.stats.fail, 1)
t.is(test.stats.ok, false)
test.on('complete', function (stats) {
t.ok(stats === test.stats)
t.is(test.destroyed, false)
process.nextTick(function () {
t.is(test.destroyed, true)
test.on('close', function () {
t.pass('emitted close in next tick')
})
})
})
test.complete()
})
test('destroy test', function (t) {
t.plan(2)
const test = new Test()
test.on('complete', function () {
t.fail('should not be called')
})
test.on('error', function (err) {
t.is(err.message, 'test')
test.on('close', function () {
t.pass('closed')
})
})
test.destroy(new Error('test'))
test.complete()
})
<|start_filename|>lib/stats.js<|end_filename|>
'use strict'
const kOk = Symbol('kOk')
module.exports = class Stats {
constructor (ok, pass, fail) {
this[kOk] = ok
this.pass = pass
this.fail = fail
}
get ok () {
return this[kOk] && this.fail === 0
}
set ok (value) {
this[kOk] = value
}
get count () {
return this.pass + this.fail
}
}
<|start_filename|>lib/content-server.js<|end_filename|>
'use strict'
const express = require('express')
const compression = require('compression')
const browserify = require('browserify')
const watchify = require('watchify')
const enableDestroy = require('server-destroy')
const Nanoresource = require('nanoresource/emitter')
const debug = require('debug')('airtap:content-server')
const path = require('path')
const fs = require('fs')
const bundler = require('./bundler')
const watchifyOptions = require('./watchify-options')
const clientDir = path.resolve(__dirname, '../client')
const clientHtml = fs.readFileSync(path.join(clientDir, 'index.html'))
const kApp = Symbol('kApp')
const kServer = Symbol('kServer')
const kBundlers = Symbol('kBundlers')
module.exports = class ContentServer extends Nanoresource {
constructor (files, options) {
super()
// Speed up repeated bundle() calls
const enableWatchify = options.watchify !== false
const wrap = (b) => enableWatchify ? watchify(b, { ignoreWatch: true }) : b
const { cwd, coverage } = options
// Create a bundler for the user's test files
const testBundler = wrap(bundler(files, cwd, options.browserify, coverage))
// Create a bundler for our airtap client
const clientPath = path.join(clientDir, 'index.js')
const clientOptions = { debug: true, ...watchifyOptions() }
const clientBundler = wrap(browserify(clientPath, clientOptions))
// Prevent errors in IE < 11; buffer is not actually used.
clientBundler.ignore('buffer')
// Notify clients of changes
if (enableWatchify && options.live) {
testBundler.on('update', () => { this.emit('update') })
}
this[kServer] = null
this[kBundlers] = enableWatchify ? [testBundler, clientBundler] : []
this[kApp] = express()
this[kApp].use(compression())
this[kApp].get('/airtap', function (req, res) {
res.set('Content-Type', 'text/html')
res.send(clientHtml)
})
this[kApp].get('/airtap/client.js', function (req, res) {
res.set('Content-Type', 'application/javascript')
res.set('Cache-Control', 'no-cache')
clientBundler.bundle().on('error', bundleError).pipe(res)
})
this[kApp].get('/airtap/test.js', function (req, res) {
res.set('Content-Type', 'application/javascript')
res.set('Cache-Control', 'no-cache')
testBundler.bundle().on('error', bundleError).pipe(res)
})
this[kApp].use('/airtap', express.static(clientDir))
function bundleError (err) {
console.error(err)
if (!options.live) process.exit(1)
}
}
get port () {
return this[kServer].address().port
}
_open (callback) {
this[kServer] = this[kApp].listen(0, () => {
debug('active on port %o', this.port)
callback()
})
enableDestroy(this[kServer])
}
_close (callback) {
// Terminate connections and close server
this[kServer].destroy((err) => {
// Stop watching
for (const bundler of this[kBundlers]) {
bundler.close()
}
callback(err)
})
}
}
<|start_filename|>test/integration/fixtures/timeout/test.js<|end_filename|>
var test = require('tape')
test('timeout', { timeout: 60e3 }, function (t) {
t.plan(1)
})
<|start_filename|>lib/support-server.js<|end_filename|>
'use strict'
const spawn = require('child_process').spawn
const http = require('http')
const parseCmd = require('shell-quote').parse
const Nanoresource = require('nanoresource')
const debug = require('debug')('airtap:support-server')
const path = require('path')
const kProcess = Symbol('kProcess')
module.exports = class SupportServer extends Nanoresource {
constructor (options) {
super()
if (typeof options === 'string' || Array.isArray(options)) {
options = { cmd: options }
}
this.cmd = options.cmd
this.cwd = path.resolve(options.cwd || '.')
this.wait = options.wait || 1e3
this.port = null
this[kProcess] = null
}
_open (callback) {
getOpenPort((err, port) => {
if (err) return callback(err)
// Stdout is reserved for TAP
const stdio = ['ignore', 2, 2]
const vars = { AIRTAP_SUPPORT_PORT: port }
const cmd = Array.isArray(this.cmd) ? this.cmd.slice() : parseCmd(this.cmd, vars)
const env = { ...process.env, ...vars }
if (cmd[0].endsWith('.js')) {
cmd.unshift(process.execPath)
} else if (process.platform === 'win32' && isNpm(cmd[0])) {
cmd[0] = cmd[0] + '.cmd'
}
this[kProcess] = spawn(cmd[0], cmd.slice(1), { cwd: this.cwd, stdio, env })
this[kProcess].on('exit', (code) => {
// No opinion on whether this constitutes an error
debug('exited with code %o', code)
this[kProcess] = null
})
this.port = port
debug('active on port %o, pid %o', port, this[kProcess].pid)
setTimeout(callback, this.wait)
})
}
_close (callback) {
if (this[kProcess] === null) {
return callback()
}
debug('closing pid %d', this[kProcess].pid)
this[kProcess].once('exit', () => callback())
this[kProcess].kill()
}
}
function isNpm (command) {
return command === 'npm' || command === 'npx'
}
function getOpenPort (callback) {
http.createServer().listen(0, function () {
const port = this.address().port
this.close(function () {
callback(null, port)
})
})
}
<|start_filename|>lib/tunnels.js<|end_filename|>
'use strict'
const parallel = require('run-parallel-settled')
const Collection = require('nanoresource-collection')
const Nanoresource = require('nanoresource')
const kProviders = Symbol('kProviders')
const kDomains = Symbol('kDomains')
const kCollection = Symbol('kCollection')
// Launch a tunnel per provider (if supported) for browsers that want it
module.exports = class Tunnels extends Nanoresource {
constructor (multiProvider, manifests, loopback) {
super()
const wanted = manifests.filter(m => m.wants.tunnel)
const ids = new Set(wanted.map(m => m.provider))
this[kProviders] = Array.from(ids).map(id => multiProvider.get(id))
this[kDomains] = ['localhost', 'airtap.local']
this[kCollection] = null
if (loopback && !this[kDomains].includes(loopback)) {
this[kDomains].push(loopback)
}
}
_open (callback) {
if (this[kProviders].length === 0) {
return callback()
}
// Group tunnels into collection that is closed as one
this[kCollection] = new Collection({ opened: true })
const tasks = this[kProviders].map(provider => next => {
provider.tunnel({ domains: this[kDomains] }, (err, tunnel) => {
if (err) return next(err)
if (tunnel) this[kCollection].push(tunnel)
next()
})
})
parallel(tasks, 4, (err) => {
if (err) return this[kCollection].destroy(err, callback)
callback()
})
}
_close (callback) {
if (this[kCollection] === null) {
callback()
} else {
this[kCollection].close(callback)
}
}
}
<|start_filename|>test/unit/bundler.js<|end_filename|>
'use strict'
const test = require('tape')
const path = require('path')
const bundler = require('../../lib/bundler')
const fixtures = path.join(__dirname, 'fixtures/bundler')
// test ensures browserify configuration is applied in order
// https://github.com/defunctzombie/zuul/issues/177
// entry file in this test starts off as:
// console.log('foo')
// if the configuration is applied in order, the result will be:
// console.log('qux')
// if not, it will likely be
// console.log('bar')
test('bundler', function (t) {
const configs = [
{ transform: path.join(fixtures, 'foo-to-bar-transform') },
{ plugin: path.join(fixtures, 'bar-to-baz-plugin') },
{ transform: path.join(fixtures, 'baz-to-qux-transform') }
]
const files = [path.join(fixtures, 'entry.js')]
const b = bundler(files, '.', configs, false)
b.bundle(function (err, src) {
t.ifError(err, 'no bundle error')
t.notEqual(src.indexOf("console.log('qux')"), -1)
t.end()
})
})
<|start_filename|>lib/browser-session.js<|end_filename|>
'use strict'
const { Transform } = require('readable-stream')
const transient = require('transient-error')
const debug = require('debug')('airtap:browser-session')
const format = require('util').format
const Parser = require('tap-completed')
const timeout = require('./timeout')
const Stats = require('./stats')
const cc = require('./coverage')
const kSuffix = Symbol('kSuffix')
const kTitle = Symbol('kTitle')
const kSawLine = Symbol('kSawLine')
const kAnnotate = Symbol('kAnnotate')
const kParser = Symbol('kParser')
const kIdleTimer = Symbol('kIdleTimer')
const kResetIdleTimer = Symbol('kResetIdleTimer')
const kTimeout = Symbol('kTimeout')
const kClientErrored = Symbol('kClientErrored')
const kCwd = Symbol('kCwd')
let seq = 0
class BrowserSession extends Transform {
constructor (title, timeout, cwd, annotate) {
super({
objectMode: true,
// Required until readable-stream updates to node 14
autoDestroy: true
})
this.stats = new Stats(false, 0, 0)
// Would like to merge TAP of multiple browsers into one TAP stream, but
// there's no standardized way to format that. The closest thing is indenting
// subtests in the style of node-tap but that doesn't work for parallel tests
// unless we buffer results which is not ideal for an entire test suite. For
// now, append a sequence number to lines so at least humans can identify
// which lines are from which browser.
this[kSuffix] = `[${++seq}]`
this[kTitle] = title
this[kSawLine] = false
this[kAnnotate] = annotate !== false
this[kIdleTimer] = null
this[kTimeout] = timeout
this[kClientErrored] = false
this[kCwd] = cwd
// Don't care about diagnostics after completion, don't wait.
this[kParser] = Parser({ wait: 0 })
this[kResetIdleTimer]()
this[kParser].on('complete', (results) => {
this.stats.ok = results.ok
this.stats.pass = results.pass
this.stats.fail = results.fail
debug('session %o complete (%s)', this[kTitle], this.stats.ok ? 'ok' : 'not ok')
this.emit('complete', this.stats)
})
debug('session %o', this[kTitle])
}
_destroy (reason, cb) {
if (reason) debug('destroy %o: %O', this[kTitle], reason)
clearTimeout(this[kIdleTimer])
this[kParser].destroy()
cb(reason)
}
[kResetIdleTimer] () {
if (this[kIdleTimer]) {
this[kIdleTimer].refresh()
return
}
this[kIdleTimer] = timeout.optional(() => {
const msg = `Did not receive output from '${this[kTitle]}'`
const err = new timeout.Error(msg, this[kTimeout])
// Retry unless the last thing we saw was an error from the client
if (!this[kClientErrored]) transient(err)
this.destroy(err)
}, this[kTimeout])
}
_transform (msg, enc, next) {
this[kResetIdleTimer]()
this[kClientErrored] = false
if (msg.type === 'console') {
const line = format(...msg.args) + '\n'
if (msg.level !== 'log') {
process.stderr.write(`stderr (${this[kTitle]}): ${line}`)
return next()
} else if (line === 'Bail out!') {
return next(new Error('Bail out'))
} else if (this[kAnnotate] && !this[kSawLine]) {
this[kSawLine] = true
this.push(`${line}# ${this[kTitle]} ${this[kSuffix]}\n`)
} else if (this[kAnnotate] && /^(#|ok|not ok) /.test(line)) {
this.push(line.replace(/\r?\n/, ' ' + this[kSuffix] + '\n'))
} else {
this.push(line)
}
if (!this[kParser].write(line)) {
this[kParser].once('drain', next)
} else {
next()
}
} else if (msg.type === 'error' && msg.fatal) {
next(new Error(String(msg.message || 'Client error')))
} else if (msg.type === 'error') {
this[kClientErrored] = true
const { type, ...rest } = msg
console.error(`client error (${this[kTitle]}): ${format(rest)}`)
next()
} else if (msg.type === 'end') {
debug('ending %o', this[kTitle])
cc.write(this[kCwd], msg.coverage, (err) => {
if (err) return next(err)
this.end()
next()
})
} else {
// TODO: why doesn't this work?
// next(new Error('Unknown message type'))
this.destroy(new Error('Unknown message type'))
}
}
}
module.exports = BrowserSession
// For unit tests. Should move state elsewhere.
BrowserSession.reset = function () {
seq = 0
}
<|start_filename|>lib/coverage.js<|end_filename|>
'use strict'
const crypto = require('crypto')
const path = require('path')
const fs = require('fs')
const prefix = 'airtap-'
exports.write = function (cwd, coverage, callback) {
if (!coverage || Object.keys(coverage).length === 0) {
return process.nextTick(callback)
}
const dir = basedir(cwd)
const json = JSON.stringify(coverage)
const digest = crypto.createHash('sha1').update(json).digest('hex')
const fp = path.join(dir, prefix + digest + '.json')
fs.mkdir(dir, { recursive: true }, function (err) {
if (err) return callback(err)
fs.writeFile(fp, json, callback)
})
}
exports.clean = function (cwd) {
const dir = basedir(cwd)
for (const file of readdirSync(dir)) {
if (file.startsWith(prefix)) {
unlinkSync(path.join(dir, file))
}
}
}
function basedir (cwd) {
return path.join(cwd, '.nyc_output')
}
function readdirSync (dir) {
try {
return fs.readdirSync(dir)
} catch (err) {
if (err.code !== 'ENOENT') {
process.emitWarning(err, 'AirtapWarning')
}
return []
}
}
function unlinkSync (fp) {
try {
fs.unlinkSync(fp)
} catch (err) {
if (err.code !== 'ENOENT') {
process.emitWarning(err, 'AirtapWarning')
}
}
}
<|start_filename|>test/unit/content-server.js<|end_filename|>
'use strict'
const test = require('tape')
const get = require('simple-get')
const fs = require('fs')
const path = require('path')
const ContentServer = require('../../lib/content-server')
test('content server', function (t) {
t.plan(23)
const server = new ContentServer([], {
cwd: '.',
watchify: false,
live: false
})
server.open(function (err) {
t.ifError(err, 'no open error')
t.ok(Number.isInteger(server.port) && server.port > 0, 'has port')
const url = `http://localhost:${server.port}`
server.active()
get.concat(`${url}/airtap`, function (err, res, data) {
t.ifError(err, 'no get error')
t.is(res.statusCode, 200)
t.is(res.headers['content-type'], 'text/html; charset=utf-8')
t.same(data, fs.readFileSync(path.resolve(__dirname, '../../client/index.html')))
server.inactive()
})
server.active()
get.concat(`${url}/airtap/client.js`, function (err, res, data) {
t.ifError(err, 'no get error')
t.is(res.statusCode, 200)
t.is(res.headers['content-type'], 'application/javascript; charset=utf-8')
t.ok(data.length)
server.inactive()
})
server.active()
get.concat(`${url}/airtap/test.js`, function (err, res, data) {
t.ifError(err, 'no get error')
t.is(res.statusCode, 200)
t.is(res.headers['content-type'], 'application/javascript; charset=utf-8')
t.ok(data.length)
server.inactive()
})
server.active()
get.concat(`${url}/airtap/favicon.ico`, function (err, res, data) {
t.ifError(err, 'no get error')
t.is(res.statusCode, 200)
t.is(res.headers['content-type'], 'image/x-icon')
t.same(data, fs.readFileSync(path.resolve(__dirname, '../../client/favicon.ico')))
server.inactive()
})
server.active()
get.concat(`${url}/airtap/nope`, function (err, res) {
t.ifError(err, 'no get error')
t.is(res.statusCode, 404)
server.inactive()
})
server.active()
get.concat(url, function (err, res) {
t.ifError(err, 'no get error')
t.is(res.statusCode, 404)
server.inactive()
})
server.close(function (err) {
t.ifError(err, 'no close error')
})
})
})
<|start_filename|>test/unit/fixtures/bundler/baz-to-qux-transform.js<|end_filename|>
const { Transform } = require('readable-stream')
module.exports = function (file) {
return new Transform({
transform (buf, enc, next) {
this.push(buf.toString('utf8').replace(/baz/g, 'qux'))
next()
}
})
}
<|start_filename|>test/integration/fixtures/tap/test.js<|end_filename|>
var test = require('tape')
// https://github.com/defunctzombie/zuul/issues/145
test('ok', function (t) {
t.pass()
t.end()
})
// https://github.com/defunctzombie/zuul/issues/145
test('fail', function (t) {
t.ok(false)
t.end()
})
test('suite', function (t) {
t.ok(true, 'yeah')
t.ok(false, 'WOOPS')
t.fail(false)
t.end()
})
test('pass', function (t) {
t.pass()
t.end()
})
test('plan', function (t) {
t.plan(1)
setTimeout(function () {
t.equal(true, true, 'true is true AWESOME')
}, 10)
})
test('failed plan', { timeout: 200 }, function (t) {
t.plan(2)
t.ok(true, 'one assert')
})
// nothing to be done
test.skip('skipped', function (t) {
t.ok(false)
})
test('error', function (t) {
t.ifError(new Error('test'))
t.end()
})
// test console still ok
console.log({ hey: 'you' })
console.log(1, 2, [3, 4])
<|start_filename|>test/unit/timeout.js<|end_filename|>
'use strict'
const test = require('tape')
const timeout = require('../../lib/timeout')
test('timeout', function (t) {
t.plan(5)
t.notOk(timeout.optional(function () {
t.fail('should not be called')
}, 0))
t.notOk(timeout.optional(function () {
t.fail('should not be called')
}, -1))
t.notOk(timeout.optional(function () {
t.fail('should not be called')
}, Infinity))
t.ok(timeout.optional(function () {
t.pass()
}, 1))
})
test('timeout error', function (t) {
const err = new timeout.Error('foo', 200)
t.is(err.message, 'foo (0.2 seconds)')
t.is(err.name, 'TimeoutError')
t.is(err.expected, true)
t.end()
})
<|start_filename|>lib/airtap.js<|end_filename|>
'use strict'
const parallel = require('run-parallel-settled')
const Collection = require('nanoresource-collection')
const combine = require('maybe-combine-errors')
const uuid = require('uuid').v4
const Multi = require('airtap-multi')
const path = require('path')
const debug = require('debug')('airtap')
const Test = require('./test')
const BrowserContext = require('./browser-context')
const ContentServer = require('./content-server')
const SupportServer = require('./support-server')
const MessageServer = require('./message-server')
const ProxyServer = require('./proxy-server')
const Tunnels = require('./tunnels')
const cc = require('./coverage')
const kMulti = Symbol('kMulti')
module.exports = class Airtap {
constructor () {
this[kMulti] = new Multi()
}
provider (...args) {
this[kMulti].add(...args)
}
manifests (...args) {
return this[kMulti].manifests(...args)
}
test (manifests, files, config) {
const cwd = path.resolve(config.cwd || '.')
const annotate = 'annotate' in config ? config.annotate : manifests.length > 1
const { watchify, browserify, coverage, live, timeout, loopback, tunnel } = config
cc.clean(cwd)
// Create servers
const contentOptions = { cwd, watchify, browserify, coverage, live }
const contentServer = new ContentServer(files, contentOptions)
const messageServer = new MessageServer()
const supportServer = config.server && new SupportServer(config.server)
const tunnels = tunnel !== false && new Tunnels(this[kMulti], manifests, loopback)
// Group into collection that is opened and closed as one
const resources = [contentServer, messageServer, supportServer, tunnels]
const collection = new Collection(resources.filter(Boolean))
const test = new Test()
collection.open((err) => {
if (err) return test.destroy(err)
// Put servers behind a per-browser proxy to avoid CORS restrictions
// and to route client messages to the corresponding context (below).
const contentPort = contentServer.port
const messagePort = messageServer.port
const supportPort = supportServer ? supportServer.port : null
const proxyOptions = { loopback, contentPort, messagePort, supportPort }
const tasks = manifests.map(manifest => next => {
const cid = uuid()
const proxyServer = new ProxyServer(manifest, cid, proxyOptions)
proxyServer.open((err) => {
if (err) return next(err)
const target = { url: proxyServer.url }
const browser = this[kMulti].browser(manifest, target)
const context = new BrowserContext(browser, { cwd, live, timeout, annotate })
const reload = context.reload.bind(context)
messageServer.register(cid, context)
test.emit('context', context)
contentServer.on('update', reload)
// Open browser and run tests
context.run(function (runErr, stats) {
if (!runErr) test.aggregate(stats)
contentServer.removeListener('update', reload)
messageServer.deregister(cid)
proxyServer.close(onclose)
function onclose (closeErr) {
next(combine([runErr, closeErr]))
}
})
})
})
// Retry on transient errors
// TODO: don't retry if we already saw failed tests
const retries = config.retries != null ? parseInt(config.retries, 10) : 6
const withRetry = tasks.map(t => next => { retry(t, retries, next) })
// Test browsers concurrently if all of them support it
const concurrent = manifests.every(m => m.supports.concurrency !== false)
const concurrency = concurrent ? parseInt(config.concurrency || 5, 10) : 1
parallel(withRetry, concurrency, function (runErr) {
debug('closing')
collection.close(function (closeErr) {
if (runErr || closeErr) {
test.destroy(combine([runErr, closeErr]))
} else {
test.complete()
}
})
})
})
return test
}
}
function retry (task, retries, callback) {
task(function (err, ...rest) {
if (err && err.transient && retries > 0) {
console.error('Retrying due to', err)
return setTimeout(function () {
retry(task, retries - 1, callback)
}, 1e3)
}
callback(err, ...rest)
})
}
<|start_filename|>client/index.js<|end_filename|>
var load = require('load-script')
var engineClient = require('engine.io-client')
// Without Developer Tools open, console is undefined in IE9.
if (typeof global.console === 'undefined') {
global.console = {}
}
var container = document.getElementById('airtap')
var colors = { pending: '#e4a533', fail: '#d83131', ok: '#69cf69' }
var socket = engineClient('ws://' + window.location.host, { path: '/airtap/msg' })
var started = false
socket.on('open', function () {
socket.on('message', function (json) {
var msg = JSON.parse(json)
if (msg.type === 'start') {
started = true
status(colors.pending)
load('/airtap/test.js', function (err) {
if (err) {
status(colors.fail)
send({ type: 'error', fatal: true, message: 'Failed to load test.js' })
}
})
} else if (msg.type === 'end') {
started = false
status(msg.ok ? colors.ok : colors.fail)
send({ type: 'end', coverage: window.__coverage__ }, function () {
if (!msg.live) socket.close()
if (!msg.live && msg.selfclosing && window.close) window.close()
})
} else if (msg.type === 'reload') {
window.location.reload()
}
})
global.console.log = wrap(global.console.log, 'log')
global.console.error = wrap(global.console.error, 'error')
window.onerror = onerror
function send (msg, ondrain) {
socket.send(JSON.stringify(msg), ondrain)
}
function status (color) {
document.body.style.backgroundColor = color
}
function wrap (original, level) {
return function log () {
var args = [].slice.call(arguments)
// Useful for browsers that don't have a console
var code = container.appendChild(document.createElement('code'))
code.textContent = args.join(' ')
if (started) {
send({ type: 'console', level: level, args: args })
}
// In IE9 this is an object that doesn't have Function.prototype.apply
if (typeof original === 'function') {
return original.apply(this, arguments)
}
}
}
function onerror (message, source, lineno, colno, error) {
if (!started) return
send({
type: 'error',
message: message,
source: source,
lineno: lineno,
colno: colno,
error: {
name: error && error.name,
stack: error && error.stack
}
})
}
})
| jzombie/airtap |
<|start_filename|>app/src/main/java/de/example/websockettutorial/MainActivity.kt<|end_filename|>
package de.example.websockettutorial
import androidx.appcompat.app.AppCompatActivity
import android.os.Bundle
import android.util.Log
import kotlinx.android.synthetic.main.activity_main.*
import org.java_websocket.client.WebSocketClient
import org.java_websocket.handshake.ServerHandshake
import org.json.JSONException
import org.json.JSONObject
import java.lang.Exception
import java.net.URI
import javax.net.ssl.SSLSocketFactory
class MainActivity : AppCompatActivity() {
private lateinit var webSocketClient: WebSocketClient
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_main)
}
override fun onResume() {
super.onResume()
initWebSocket()
}
override fun onPause() {
super.onPause()
webSocketClient.close()
}
private fun initWebSocket() {
val coinbaseUri: URI? = URI(WEB_SOCKET_URL)
createWebSocketClient(coinbaseUri)
}
private fun createWebSocketClient(coinbaseUri: URI?) {
webSocketClient = object : WebSocketClient(coinbaseUri) {
override fun onOpen(handshakedata: ServerHandshake?) {
Log.d(TAG, "onOpen")
subscribe()
}
override fun onMessage(message: String?) {
Log.d(TAG, "onMessage: $message")
setUpBtcPriceText(message)
}
override fun onClose(code: Int, reason: String?, remote: Boolean) {
Log.d(TAG, "onClose")
unsubscribe()
}
override fun onError(ex: Exception?) {
Log.e("createWebSocketClient", "onError: ${ex?.message}")
}
}
}
private fun subscribe() {
webSocketClient.send(
"{\n" +
" \"type\": \"subscribe\",\n" +
" \"channels\": [{ \"name\": \"ticker\", \"product_ids\": [\"BTC-EUR\"] }]\n" +
"}"
)
}
companion object {
const val WEB_SOCKET_URL = "wss://ws-feed.pro.coinbase.com"
const val TAG = "Coinbase"
}
}
<|start_filename|>app/src/main/java/de/example/websockettutorial/BitcoinTicker.kt<|end_filename|>
package de.example.websockettutorial
import com.squareup.moshi.JsonClass
@JsonClass(generateAdapter = true)
data class BitcoinTicker(val price: String?) | fahrican/WebSocketTutorial |
<|start_filename|>matrix-sdk/src/main/java/org/matrix/androidsdk/rest/model/message/StickerMessage.java<|end_filename|>
/*
* Copyright 2018 New Vector Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.rest.model.message;
public class StickerMessage extends ImageMessage {
public StickerMessage() {
msgtype = MSGTYPE_STICKER_LOCAL;
}
public StickerMessage(StickerJsonMessage stickerJsonMessage) {
this();
info = stickerJsonMessage.info;
url = stickerJsonMessage.url;
body = stickerJsonMessage.body;
format = stickerJsonMessage.format;
}
/**
* Make a deep copy of this StickerMessage.
*
* @return the copy
*/
public StickerMessage deepCopy() {
StickerMessage copy = new StickerMessage();
copy.info = info;
copy.url = url;
copy.body = body;
copy.format = format;
if (null != file) {
copy.file = file.deepCopy();
}
return copy;
}
}
<|start_filename|>matrix-sdk/src/main/java/org/matrix/androidsdk/call/HeadsetConnectionReceiver.java<|end_filename|>
/*
* Copyright 2016 OpenMarket Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.call;
import android.annotation.SuppressLint;
import android.bluetooth.BluetoothAdapter;
import android.bluetooth.BluetoothDevice;
import android.bluetooth.BluetoothProfile;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.media.AudioManager;
import android.os.Handler;
import android.os.Looper;
import android.text.TextUtils;
import org.matrix.androidsdk.core.Log;
import java.util.HashSet;
import java.util.Set;
// this class detect if the headset is plugged / unplugged
public class HeadsetConnectionReceiver extends BroadcastReceiver {
private static final String LOG_TAG = HeadsetConnectionReceiver.class.getSimpleName();
private static Boolean mIsHeadsetPlugged = null;
private static HeadsetConnectionReceiver mSharedInstance = null;
/**
* Track the headset update.
*/
public interface OnHeadsetStatusUpdateListener {
/**
* A wire headset has been plugged / unplugged.
*
* @param isPlugged true if the headset is now plugged.
*/
void onWiredHeadsetUpdate(boolean isPlugged);
/**
* A bluetooth headset is connected.
*
* @param isConnected true if the bluetooth headset is connected.
*/
void onBluetoothHeadsetUpdate(boolean isConnected);
}
// listeners
private final Set<OnHeadsetStatusUpdateListener> mListeners = new HashSet<>();
public HeadsetConnectionReceiver() {
}
/**
* @param context the application context
* @return the shared instance
*/
public static HeadsetConnectionReceiver getSharedInstance(Context context) {
if (null == mSharedInstance) {
mSharedInstance = new HeadsetConnectionReceiver();
context.registerReceiver(mSharedInstance, new IntentFilter(Intent.ACTION_HEADSET_PLUG));
context.registerReceiver(mSharedInstance, new IntentFilter(BluetoothAdapter.ACTION_CONNECTION_STATE_CHANGED));
context.registerReceiver(mSharedInstance, new IntentFilter(BluetoothAdapter.ACTION_STATE_CHANGED));
context.registerReceiver(mSharedInstance, new IntentFilter(BluetoothDevice.ACTION_ACL_CONNECTED));
context.registerReceiver(mSharedInstance, new IntentFilter(BluetoothDevice.ACTION_ACL_DISCONNECTED));
}
return mSharedInstance;
}
/**
* Add a listener.
*
* @param listener the listener to add.
*/
public void addListener(OnHeadsetStatusUpdateListener listener) {
synchronized (LOG_TAG) {
mListeners.add(listener);
}
}
/**
* Remove a listener.
*
* @param listener the listener to remove.
*/
public void removeListener(OnHeadsetStatusUpdateListener listener) {
synchronized (LOG_TAG) {
mListeners.remove(listener);
}
}
/**
* Dispatch onBluetoothHeadsetUpdate to the listeners.
*
* @param isConnected true if a bluetooth headset is connected.
*/
private void onBluetoothHeadsetUpdate(boolean isConnected) {
synchronized (LOG_TAG) {
for (OnHeadsetStatusUpdateListener listener : mListeners) {
try {
listener.onBluetoothHeadsetUpdate(isConnected);
} catch (Exception e) {
Log.e(LOG_TAG, "## onBluetoothHeadsetUpdate()) failed " + e.getMessage(), e);
}
}
}
}
/**
* Dispatch onWireHeadsetUpdate to the listeners.
*
* @param isPlugged true if the wire headset is plugged.
*/
private void onWiredHeadsetUpdate(boolean isPlugged) {
synchronized (LOG_TAG) {
for (OnHeadsetStatusUpdateListener listener : mListeners) {
try {
listener.onWiredHeadsetUpdate(isPlugged);
} catch (Exception e) {
Log.e(LOG_TAG, "## onWiredHeadsetUpdate()) failed " + e.getMessage(), e);
}
}
}
}
@Override
public void onReceive(final Context aContext, final Intent aIntent) {
Log.d(LOG_TAG, "## onReceive() : " + aIntent.getExtras());
String action = aIntent.getAction();
if (TextUtils.equals(action, Intent.ACTION_HEADSET_PLUG)
|| TextUtils.equals(action, BluetoothAdapter.ACTION_CONNECTION_STATE_CHANGED)
|| TextUtils.equals(action, BluetoothAdapter.ACTION_STATE_CHANGED)
|| TextUtils.equals(action, BluetoothDevice.ACTION_ACL_CONNECTED)
|| TextUtils.equals(action, BluetoothDevice.ACTION_ACL_DISCONNECTED)) {
Boolean newState = null;
final boolean isBTHeadsetUpdate;
if (TextUtils.equals(action, Intent.ACTION_HEADSET_PLUG)) {
int state = aIntent.getIntExtra("state", -1);
switch (state) {
case 0:
Log.d(LOG_TAG, "Headset is unplugged");
newState = false;
break;
case 1:
Log.d(LOG_TAG, "Headset is plugged");
newState = true;
break;
default:
Log.d(LOG_TAG, "undefined state");
}
isBTHeadsetUpdate = false;
} else {
int state;
BluetoothAdapter bluetoothAdapter = BluetoothAdapter.getDefaultAdapter();
if (bluetoothAdapter == null) {
state = BluetoothAdapter.STATE_DISCONNECTED;
} else {
state = bluetoothAdapter.getProfileConnectionState(BluetoothProfile.HEADSET);
}
Log.d(LOG_TAG, "bluetooth headset state " + state);
newState = (BluetoothAdapter.STATE_CONNECTED == state);
isBTHeadsetUpdate = mIsHeadsetPlugged != newState;
}
if (newState != mIsHeadsetPlugged) {
mIsHeadsetPlugged = newState;
// wait a little else route to BT headset does not work.
new Handler(Looper.getMainLooper()).postDelayed(() -> {
if (isBTHeadsetUpdate) {
onBluetoothHeadsetUpdate(mIsHeadsetPlugged);
} else {
onWiredHeadsetUpdate(mIsHeadsetPlugged);
}
}, 1000);
}
}
}
private static AudioManager mAudioManager = null;
/**
* @return the audio manager
*/
private static AudioManager getAudioManager(Context context) {
if (null == mAudioManager) {
mAudioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE);
}
return mAudioManager;
}
/**
* @param context the context
* @return true if the headset is plugged
*/
@SuppressLint("Deprecation")
public static boolean isHeadsetPlugged(Context context) {
if (null == mIsHeadsetPlugged) {
AudioManager audioManager = getAudioManager(context);
//noinspection deprecation
mIsHeadsetPlugged = isBTHeadsetPlugged() || audioManager.isWiredHeadsetOn();
}
return mIsHeadsetPlugged;
}
/**
* @return true if bluetooth headset is plugged
*/
public static boolean isBTHeadsetPlugged() {
BluetoothAdapter bluetoothAdapter = BluetoothAdapter.getDefaultAdapter();
if (bluetoothAdapter == null) {
return false;
} else {
return (BluetoothAdapter.STATE_CONNECTED == bluetoothAdapter.getProfileConnectionState(BluetoothProfile.HEADSET));
}
}
}
<|start_filename|>matrix-sdk/src/main/java/org/matrix/androidsdk/rest/model/message/StickerJsonMessage.java<|end_filename|>
/*
* Copyright 2014 OpenMarket Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.rest.model.message;
// It's just an intermediate object to create a StickerMessage from a m.sticker event type.
public class StickerJsonMessage {
public final String msgtype = Message.MSGTYPE_STICKER_LOCAL;
public String body;
public String url;
public String format;
public ImageInfo info;
}
<|start_filename|>matrix-sdk-crypto/src/main/java/org/matrix/androidsdk/crypto/rest/model/crypto/KeyVerificationStart.kt<|end_filename|>
/*
* Copyright 2019 New Vector Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.crypto.rest.model.crypto
import com.google.gson.annotations.SerializedName
import org.matrix.androidsdk.core.Log
import org.matrix.androidsdk.crypto.verification.SASVerificationTransaction
/**
* Sent by Alice to initiate an interactive key verification.
*/
class KeyVerificationStart : SendToDeviceObject {
/**
* Alice’s device ID
*/
@JvmField
@SerializedName("from_device")
var fromDevice: String? = null
@JvmField
var method: String? = null
/**
* String to identify the transaction.
* This string must be unique for the pair of users performing verification for the duration that the transaction is valid.
* Alice’s device should record this ID and use it in future messages in this transaction.
*/
@JvmField
@SerializedName("transaction_id")
var transactionID: String? = null
/**
* An array of key agreement protocols that Alice’s client understands.
* Must include “curve25519”.
* Other methods may be defined in the future
*/
@JvmField
@SerializedName("key_agreement_protocols")
var keyAgreementProtocols: List<String>? = null
/**
* An array of hashes that Alice’s client understands.
* Must include “sha256”. Other methods may be defined in the future.
*/
@JvmField
var hashes: List<String>? = null
/**
* An array of message authentication codes that Alice’s client understands.
* Must include “hkdf-hmac-sha256”.
* Other methods may be defined in the future.
*/
@JvmField
@SerializedName("message_authentication_codes")
var messageAuthenticationCodes: List<String>? = null
/**
* An array of short authentication string methods that Alice’s client (and Alice) understands.
* Must include “decimal”.
* This document also describes the “emoji” method.
* Other methods may be defined in the future
*/
@JvmField
@SerializedName("short_authentication_string")
var shortAuthenticationStrings: List<String>? = null
companion object {
const val VERIF_METHOD_SAS = "m.sas.v1"
const val SAS_MODE_DECIMAL = "decimal"
const val SAS_MODE_EMOJI = "emoji"
}
fun isValid(): Boolean {
if (transactionID.isNullOrBlank()
|| fromDevice.isNullOrBlank()
|| method != VERIF_METHOD_SAS
|| keyAgreementProtocols.isNullOrEmpty()
|| hashes.isNullOrEmpty()
|| hashes?.contains("sha256") == false
|| messageAuthenticationCodes.isNullOrEmpty()
|| (messageAuthenticationCodes?.contains(SASVerificationTransaction.SAS_MAC_SHA256) == false
&& messageAuthenticationCodes?.contains(SASVerificationTransaction.SAS_MAC_SHA256_LONGKDF) == false)
|| shortAuthenticationStrings.isNullOrEmpty()
|| shortAuthenticationStrings?.contains(KeyVerificationStart.SAS_MODE_DECIMAL) == false) {
Log.e(SASVerificationTransaction.LOG_TAG, "## received invalid verification request")
return false
}
return true
}
}
<|start_filename|>matrix-sdk/src/main/java/org/matrix/androidsdk/data/timeline/StateEventRedactionChecker.java<|end_filename|>
/*
* Copyright 2018 New Vector Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.data.timeline;
import androidx.annotation.NonNull;
import android.text.TextUtils;
import org.matrix.androidsdk.MXDataHandler;
import org.matrix.androidsdk.core.Log;
import org.matrix.androidsdk.core.callback.ApiCallback;
import org.matrix.androidsdk.core.callback.SimpleApiCallback;
import org.matrix.androidsdk.core.model.MatrixError;
import org.matrix.androidsdk.data.Room;
import org.matrix.androidsdk.data.RoomState;
import org.matrix.androidsdk.data.store.IMXStore;
import org.matrix.androidsdk.rest.model.Event;
import org.matrix.androidsdk.rest.model.RoomMember;
import java.util.List;
import javax.annotation.Nonnull;
/**
* This class is responsible of checking state events redaction.
*/
class StateEventRedactionChecker {
private static final String LOG_TAG = StateEventRedactionChecker.class.getSimpleName();
private final EventTimeline mEventTimeline;
private final TimelineStateHolder mTimelineStateHolder;
StateEventRedactionChecker(@NonNull final EventTimeline eventTimeline,
@NonNull final TimelineStateHolder timelineStateHolder) {
mEventTimeline = eventTimeline;
mTimelineStateHolder = timelineStateHolder;
}
/**
* Redaction of a state event might require to reload the timeline
* because the room states has to be updated.
*
* @param redactionEvent the redaction event
*/
public void checkStateEventRedaction(@NonNull final Event redactionEvent) {
final IMXStore store = mEventTimeline.getStore();
final Room room = mEventTimeline.getRoom();
final MXDataHandler dataHandler = room.getDataHandler();
final String roomId = room.getRoomId();
final String eventId = redactionEvent.getRedactedEventId();
final RoomState state = mTimelineStateHolder.getState();
Log.d(LOG_TAG, "checkStateEventRedaction of event " + eventId);
// check if the state events is locally known
state.getStateEvents(store, null, new SimpleApiCallback<List<Event>>() {
@Override
public void onSuccess(List<Event> stateEvents) {
// Check whether the current room state depends on this redacted event.
boolean isFound = false;
for (int index = 0; index < stateEvents.size(); index++) {
Event stateEvent = stateEvents.get(index);
if (TextUtils.equals(stateEvent.eventId, eventId)) {
Log.d(LOG_TAG, "checkStateEventRedaction: the current room state has been modified by the event redaction");
// remove expected keys
stateEvent.prune(redactionEvent);
stateEvents.set(index, stateEvent);
// digest the updated state
mTimelineStateHolder.processStateEvent(stateEvent, EventTimeline.Direction.FORWARDS, true);
isFound = true;
break;
}
}
if (!isFound) {
// Else try to find the redacted event among members which
// are stored apart from other state events
// Reason: The membership events are not anymore stored in the application store
// until we have found a way to improve the way they are stored.
// It used to have many out of memory errors because they are too many stored small memory objects.
// see https://github.com/matrix-org/matrix-android-sdk/issues/196
// Note: if lazy loading is on, getMemberByEventId() can return null, but it is ok, because we just want to update our cache
RoomMember member = state.getMemberByEventId(eventId);
if (member != null) {
Log.d(LOG_TAG, "checkStateEventRedaction: the current room members list has been modified by the event redaction");
// the android SDK does not store stock member events but a representation of them, RoomMember.
// Prune this representation
member.prune();
isFound = true;
}
}
if (isFound) {
store.storeLiveStateForRoom(roomId);
// warn that there was a flush
mEventTimeline.initHistory();
dataHandler.onRoomFlush(roomId);
} else {
Log.d(LOG_TAG, "checkStateEventRedaction: the redacted event is unknown. Fetch it from the homeserver");
checkStateEventRedactionWithHomeserver(dataHandler, roomId, eventId);
}
}
});
}
/**
* Check with the HS whether the redacted event impacts the room data we have locally.
* If yes, local data must be pruned.
*
* @param eventId the redacted event id
*/
private void checkStateEventRedactionWithHomeserver(@Nonnull final MXDataHandler dataHandler,
@Nonnull final String roomId,
@Nonnull final String eventId) {
Log.d(LOG_TAG, "checkStateEventRedactionWithHomeserver on event Id " + eventId);
// We need to figure out if this redacted event is a room state in the past.
// If yes, we must prune the `prev_content` of the state event that replaced it.
// Indeed, redacted information shouldn't spontaneously appear when you backpaginate...
// TODO: This is no more implemented (see https://github.com/vector-im/riot-ios/issues/443).
// The previous implementation based on a room initial sync was too heavy server side
// and has been removed.
if (!TextUtils.isEmpty(eventId)) {
Log.d(LOG_TAG, "checkStateEventRedactionWithHomeserver : retrieving the event");
dataHandler.getDataRetriever().getRoomsRestClient().getEvent(roomId, eventId, new ApiCallback<Event>() {
@Override
public void onSuccess(Event event) {
if (null != event && null != event.stateKey) {
Log.d(LOG_TAG, "checkStateEventRedactionWithHomeserver : the redacted event is a state event in the past." +
" TODO: prune prev_content of the new state event");
} else {
Log.d(LOG_TAG, "checkStateEventRedactionWithHomeserver : the redacted event is a not state event -> job is done");
}
}
@Override
public void onNetworkError(Exception e) {
Log.e(LOG_TAG, "checkStateEventRedactionWithHomeserver : failed to retrieved the redacted event: onNetworkError " + e.getMessage(), e);
}
@Override
public void onMatrixError(MatrixError e) {
Log.e(LOG_TAG, "checkStateEventRedactionWithHomeserver : failed to retrieved the redacted event: onNetworkError " + e.getMessage());
}
@Override
public void onUnexpectedError(Exception e) {
Log.e(LOG_TAG, "checkStateEventRedactionWithHomeserver : failed to retrieved the redacted event: onNetworkError " + e.getMessage(), e);
}
});
}
}
}
<|start_filename|>matrix-sdk-crypto/src/main/java/org/matrix/androidsdk/crypto/model/crypto/EncryptedFileKey.java<|end_filename|>
/*
* Copyright 2016 OpenMarket Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.crypto.model.crypto;
import java.io.Serializable;
import java.util.List;
public class EncryptedFileKey implements Serializable {
public String alg;
public Boolean ext;
public List<String> key_ops;
public String kty;
public String k;
/**
* Make a deep copy.
*
* @return the copy
*/
public EncryptedFileKey deepCopy() {
EncryptedFileKey encryptedFileKey = new EncryptedFileKey();
encryptedFileKey.alg = alg;
encryptedFileKey.ext = ext;
encryptedFileKey.key_ops = key_ops;
encryptedFileKey.kty = kty;
encryptedFileKey.k = k;
return encryptedFileKey;
}
}
<|start_filename|>matrix-sdk-crypto/src/main/java/org/matrix/androidsdk/crypto/interfaces/CryptoSession.kt<|end_filename|>
/*
* Copyright 2018 New Vector Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.crypto.interfaces
import org.matrix.androidsdk.crypto.MXCrypto
interface CryptoSession {
val myUserId: String
fun getDeviceId(): String
fun setDeviceId(deviceId: String)
val dataHandler: CryptoDataHandler
fun requireCrypto(): MXCrypto
}
<|start_filename|>matrix-sdk/src/main/java/org/matrix/androidsdk/rest/json/ConditionDeserializer.java<|end_filename|>
/*
* Copyright 2014 OpenMarket Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.rest.json;
import com.google.gson.JsonDeserializationContext;
import com.google.gson.JsonDeserializer;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.JsonParseException;
import org.matrix.androidsdk.core.Log;
import org.matrix.androidsdk.rest.model.bingrules.Condition;
import org.matrix.androidsdk.rest.model.bingrules.ContainsDisplayNameCondition;
import org.matrix.androidsdk.rest.model.bingrules.DeviceCondition;
import org.matrix.androidsdk.rest.model.bingrules.EventMatchCondition;
import org.matrix.androidsdk.rest.model.bingrules.RoomMemberCountCondition;
import org.matrix.androidsdk.rest.model.bingrules.SenderNotificationPermissionCondition;
import org.matrix.androidsdk.rest.model.bingrules.UnknownCondition;
import java.lang.reflect.Type;
public class ConditionDeserializer implements JsonDeserializer<Condition> {
private static final String LOG_TAG = ConditionDeserializer.class.getSimpleName();
@Override
public Condition deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context) throws JsonParseException {
Condition condition = null;
JsonObject jsonObject = json.getAsJsonObject();
JsonElement kindElement = jsonObject.get("kind");
if (null != kindElement) {
String kind = kindElement.getAsString();
if (null != kind) {
switch (kind) {
case Condition.KIND_EVENT_MATCH:
condition = context.deserialize(json, EventMatchCondition.class);
break;
case Condition.KIND_DEVICE:
condition = context.deserialize(json, DeviceCondition.class);
break;
case Condition.KIND_CONTAINS_DISPLAY_NAME:
condition = context.deserialize(json, ContainsDisplayNameCondition.class);
break;
case Condition.KIND_ROOM_MEMBER_COUNT:
condition = context.deserialize(json, RoomMemberCountCondition.class);
break;
case Condition.KIND_SENDER_NOTIFICATION_PERMISSION:
condition = context.deserialize(json, SenderNotificationPermissionCondition.class);
break;
default:
Log.e(LOG_TAG, "## deserialize() : unsupported kind " + kind + " with value " + json);
condition = context.deserialize(json, UnknownCondition.class);
break;
}
}
}
return condition;
}
}
<|start_filename|>matrix-sdk-core/src/main/java/org/matrix/androidsdk/core/json/BooleanDeserializer.java<|end_filename|>
/*
* Copyright 2018 New Vector Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.core.json;
import com.google.gson.JsonDeserializationContext;
import com.google.gson.JsonDeserializer;
import com.google.gson.JsonElement;
import com.google.gson.JsonParseException;
import com.google.gson.JsonPrimitive;
import org.matrix.androidsdk.core.Log;
import java.lang.reflect.Type;
/**
* Convenient JsonDeserializer to accept various type of Boolean
*/
public class BooleanDeserializer implements JsonDeserializer<Boolean> {
private static final String LOG_TAG = BooleanDeserializer.class.getSimpleName();
private final boolean mCanReturnNull;
/**
* Constructor
*
* @param canReturnNull true if the deserializer can return null in case of error
*/
public BooleanDeserializer(boolean canReturnNull) {
mCanReturnNull = canReturnNull;
}
/**
* @param json The Json data being deserialized
* @param typeOfT The type of the Object to deserialize to
* @param context not used
* @return true if json is: true, 1, "true" or "1". false for other values. null in other cases.
* @throws JsonParseException
*/
@Override
public Boolean deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context) throws JsonParseException {
if (json.isJsonPrimitive()) {
JsonPrimitive jsonPrimitive = json.getAsJsonPrimitive();
if (jsonPrimitive.isBoolean()) {
// Nominal case
return jsonPrimitive.getAsBoolean();
} else if (jsonPrimitive.isNumber()) {
Log.w(LOG_TAG, "Boolean detected as a number");
return jsonPrimitive.getAsInt() == 1;
} else if (jsonPrimitive.isString()) {
Log.w(LOG_TAG, "Boolean detected as a string");
String jsonPrimitiveString = jsonPrimitive.getAsString();
return "1".equals(jsonPrimitiveString)
|| "true".equals(jsonPrimitiveString);
} else {
// Should not happen
Log.e(LOG_TAG, "Unknown primitive");
if (mCanReturnNull) {
return null;
} else {
return false;
}
}
} else if (json.isJsonNull()) {
if (mCanReturnNull) {
return null;
} else {
Log.w(LOG_TAG, "Boolean is null, but not allowed to return null");
return false;
}
}
Log.w(LOG_TAG, "Boolean detected as not a primitive type");
if (mCanReturnNull) {
return null;
} else {
return false;
}
}
}
<|start_filename|>matrix-sdk-crypto/src/main/java/org/matrix/androidsdk/crypto/cryptostore/db/RealmCryptoStoreMigration.kt<|end_filename|>
/*
* Copyright 2018 New Vector Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.crypto.cryptostore.db
import io.realm.DynamicRealm
import io.realm.RealmMigration
import org.matrix.androidsdk.core.Log
import org.matrix.androidsdk.crypto.cryptostore.db.model.IncomingRoomKeyRequestEntityFields
import org.matrix.androidsdk.crypto.cryptostore.db.model.KeysBackupDataEntityFields
import org.matrix.androidsdk.crypto.cryptostore.db.model.OlmSessionEntityFields
import org.matrix.androidsdk.crypto.cryptostore.db.model.OutgoingRoomKeyRequestEntityFields
internal object RealmCryptoStoreMigration : RealmMigration {
private const val LOG_TAG = "RealmCryptoStoreMigration"
const val CRYPTO_STORE_SCHEMA_VERSION = 2L
override fun migrate(realm: DynamicRealm, oldVersion: Long, newVersion: Long) {
Log.d(LOG_TAG, "Migrating Realm Crypto from $oldVersion to $newVersion")
if (oldVersion <= 0) {
Log.d(LOG_TAG, "Step 0 -> 1")
Log.d(LOG_TAG, "Add field lastReceivedMessageTs (Long) and set the value to 0")
realm.schema.get("OlmSessionEntity")
?.addField(OlmSessionEntityFields.LAST_RECEIVED_MESSAGE_TS, Long::class.java)
?.transform {
it.setLong(OlmSessionEntityFields.LAST_RECEIVED_MESSAGE_TS, 0)
}
}
if (oldVersion <= 1) {
Log.d(LOG_TAG, "Step 1 -> 2")
Log.d(LOG_TAG, "Update IncomingRoomKeyRequestEntity format: requestBodyString field is exploded into several fields")
realm.schema.get("IncomingRoomKeyRequestEntity")
?.addField(IncomingRoomKeyRequestEntityFields.REQUEST_BODY_ALGORITHM, String::class.java)
?.addField(IncomingRoomKeyRequestEntityFields.REQUEST_BODY_ROOM_ID, String::class.java)
?.addField(IncomingRoomKeyRequestEntityFields.REQUEST_BODY_SENDER_KEY, String::class.java)
?.addField(IncomingRoomKeyRequestEntityFields.REQUEST_BODY_SESSION_ID, String::class.java)
?.transform { dynamicObject ->
val requestBodyString = dynamicObject.getString("requestBodyString")
try {
// It was a map before
val map: Map<String, String>? = deserializeFromRealm(requestBodyString)
map?.let {
dynamicObject.setString(IncomingRoomKeyRequestEntityFields.REQUEST_BODY_ALGORITHM, it["algorithm"])
dynamicObject.setString(IncomingRoomKeyRequestEntityFields.REQUEST_BODY_ROOM_ID, it["room_id"])
dynamicObject.setString(IncomingRoomKeyRequestEntityFields.REQUEST_BODY_SENDER_KEY, it["sender_key"])
dynamicObject.setString(IncomingRoomKeyRequestEntityFields.REQUEST_BODY_SESSION_ID, it["session_id"])
}
} catch (e: Exception) {
Log.d(LOG_TAG, "Error", e)
}
}
?.removeField("requestBodyString")
Log.d(LOG_TAG, "Update IncomingRoomKeyRequestEntity format: requestBodyString field is exploded into several fields")
realm.schema.get("OutgoingRoomKeyRequestEntity")
?.addField(OutgoingRoomKeyRequestEntityFields.REQUEST_BODY_ALGORITHM, String::class.java)
?.addField(OutgoingRoomKeyRequestEntityFields.REQUEST_BODY_ROOM_ID, String::class.java)
?.addField(OutgoingRoomKeyRequestEntityFields.REQUEST_BODY_SENDER_KEY, String::class.java)
?.addField(OutgoingRoomKeyRequestEntityFields.REQUEST_BODY_SESSION_ID, String::class.java)
?.transform { dynamicObject ->
val requestBodyString = dynamicObject.getString("requestBodyString")
try {
// It was a map before
val map: Map<String, String>? = deserializeFromRealm(requestBodyString)
map?.let {
dynamicObject.setString(IncomingRoomKeyRequestEntityFields.REQUEST_BODY_ALGORITHM, it["algorithm"])
dynamicObject.setString(IncomingRoomKeyRequestEntityFields.REQUEST_BODY_ROOM_ID, it["room_id"])
dynamicObject.setString(IncomingRoomKeyRequestEntityFields.REQUEST_BODY_SENDER_KEY, it["sender_key"])
dynamicObject.setString(IncomingRoomKeyRequestEntityFields.REQUEST_BODY_SESSION_ID, it["session_id"])
}
} catch (e: Exception) {
Log.d(LOG_TAG, "Error", e)
}
}
?.removeField("requestBodyString")
Log.d(LOG_TAG, "Create KeysBackupDataEntity")
realm.schema.create("KeysBackupDataEntity")
.addField(KeysBackupDataEntityFields.PRIMARY_KEY, Integer::class.java)
.addPrimaryKey(KeysBackupDataEntityFields.PRIMARY_KEY)
.setRequired(KeysBackupDataEntityFields.PRIMARY_KEY, true)
.addField(KeysBackupDataEntityFields.BACKUP_LAST_SERVER_HASH, String::class.java)
.addField(KeysBackupDataEntityFields.BACKUP_LAST_SERVER_NUMBER_OF_KEYS, Integer::class.java)
}
}
}
<|start_filename|>matrix-sdk/src/test/java/org/matrix/androidsdk/rest/model/bingrules/ContainsDisplayNameConditionTest.java<|end_filename|>
/*
* Copyright 2014 OpenMarket Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.rest.model.bingrules;
import com.google.gson.JsonParser;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.matrix.androidsdk.rest.model.Event;
import org.robolectric.RobolectricTestRunner;
@RunWith(RobolectricTestRunner.class)
public class ContainsDisplayNameConditionTest {
private ContainsDisplayNameCondition condition = new ContainsDisplayNameCondition();
private Event event;
private String displayName = "Bob";
@Before
public void setUp() {
event = new Event();
event.type = Event.EVENT_TYPE_MESSAGE;
}
private void setEventMessage(String type, String rest) {
String contentJson = "{'msgtype': '" + type + "', " + rest + "}";
event.contentJson = new JsonParser().parse(contentJson);
}
private void setEventTextMessageBody(String body) {
setEventMessage("m.text", "'body': '" + body + "'");
}
@Test
public void testTextMessages() {
setEventTextMessageBody("Bob");
Assert.assertTrue(condition.isSatisfied(event, displayName));
setEventTextMessageBody("bob");
Assert.assertTrue(condition.isSatisfied(event, displayName));
setEventTextMessageBody("Hi Bob!");
Assert.assertTrue(condition.isSatisfied(event, displayName));
setEventTextMessageBody("Hi Bobby!");
Assert.assertFalse(condition.isSatisfied(event, displayName));
setEventTextMessageBody("Hi MrBob");
Assert.assertFalse(condition.isSatisfied(event, displayName));
setEventTextMessageBody("Hi Robert!");
Assert.assertFalse(condition.isSatisfied(event, displayName));
}
@Test
public void testOtherMessageTypes() {
setEventMessage("m.image", "'body': 'Bob.jpeg'");
Assert.assertTrue(condition.isSatisfied(event, displayName));
setEventMessage("m.image", "'url': 'Bob'");
Assert.assertFalse(condition.isSatisfied(event, displayName));
setEventMessage("m.notice", "'body': 'Bob did something or other'");
Assert.assertTrue(condition.isSatisfied(event, displayName));
setEventMessage("m.emote", "'body': 'is angry with Bob'");
Assert.assertTrue(condition.isSatisfied(event, displayName));
}
@Test
public void testOtherEventType() {
event.type = Event.EVENT_TYPE_TYPING;
setEventTextMessageBody("Bob");
Assert.assertFalse(condition.isSatisfied(event, displayName));
}
}
<|start_filename|>matrix-sdk/src/main/java/org/matrix/androidsdk/rest/model/ReceiptData.java<|end_filename|>
/*
* Copyright 2014 OpenMarket Ltd
* Copyright 2018 New Vector Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.rest.model;
import org.matrix.androidsdk.core.interfaces.DatedObject;
public class ReceiptData implements java.io.Serializable, DatedObject {
// the user id
public String userId;
// The event id.
public String eventId;
// The timestamp in ms since Epoch generated by the origin homeserver when it receives the event from the client.
public long originServerTs;
public ReceiptData(String anUserId, String anEventId, long aTs) {
userId = anUserId;
eventId = anEventId;
originServerTs = aTs;
}
@Override
public long getDate() {
return originServerTs;
}
}
<|start_filename|>matrix-sdk/src/main/java/org/matrix/androidsdk/rest/model/bingrules/SenderNotificationPermissionCondition.java<|end_filename|>
/*
* Copyright 2014 OpenMarket Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.rest.model.bingrules;
import org.matrix.androidsdk.rest.model.PowerLevels;
public class SenderNotificationPermissionCondition extends Condition {
private static final String LOG_TAG = SenderNotificationPermissionCondition.class.getSimpleName();
public String key;
public SenderNotificationPermissionCondition() {
kind = Condition.KIND_SENDER_NOTIFICATION_PERMISSION;
}
public boolean isSatisfied(PowerLevels powerLevels, String userId) {
return (null != powerLevels) && (null != userId) && powerLevels.getUserPowerLevel(userId) >= powerLevels.notificationLevel(key);
}
@Override
public String toString() {
return "SenderNotificationPermissionCondition{" + "key=" + key;
}
}
<|start_filename|>matrix-sdk/src/main/java/org/matrix/androidsdk/data/timeline/TimelineEventListeners.java<|end_filename|>
/*
* Copyright 2018 New Vector Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.data.timeline;
import android.os.Handler;
import android.os.Looper;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import org.matrix.androidsdk.core.Log;
import org.matrix.androidsdk.data.RoomState;
import org.matrix.androidsdk.rest.model.Event;
import java.util.ArrayList;
import java.util.List;
/**
* Handle the timeline event listeners
* Is responsible for dispatching events
*/
class TimelineEventListeners {
private static final String LOG_TAG = TimelineEventListeners.class.getSimpleName();
// The inner listeners
private final List<EventTimeline.Listener> mListeners = new ArrayList<>();
/**
* Add an events listener.
*
* @param listener the listener to add.
*/
public void add(@Nullable final EventTimeline.Listener listener) {
if (listener != null) {
synchronized (this) {
if (!mListeners.contains(listener)) {
mListeners.add(listener);
}
}
}
}
/**
* Remove an events listener.
*
* @param listener the listener to remove.
*/
public void remove(@Nullable final EventTimeline.Listener listener) {
if (null != listener) {
synchronized (this) {
mListeners.remove(listener);
}
}
}
/**
* Dispatch the onEvent callback.
*
* @param event the event.
* @param direction the direction.
* @param roomState the roomState.
*/
public void onEvent(@NonNull final Event event,
@NonNull final EventTimeline.Direction direction,
@NonNull final RoomState roomState) {
// ensure that the listeners are called in the UI thread
if (Looper.getMainLooper().getThread() == Thread.currentThread()) {
final List<EventTimeline.Listener> listeners;
synchronized (this) {
listeners = new ArrayList<>(mListeners);
}
for (EventTimeline.Listener listener : listeners) {
try {
listener.onEvent(event, direction, roomState);
} catch (Exception e) {
Log.e(LOG_TAG, "EventTimeline.onEvent " + listener + " crashes " + e.getMessage(), e);
}
}
} else {
final Handler handler = new Handler(Looper.getMainLooper());
handler.post(new Runnable() {
@Override
public void run() {
onEvent(event, direction, roomState);
}
});
}
}
}
<|start_filename|>matrix-sdk-core/src/main/java/org/matrix/androidsdk/core/Log.java<|end_filename|>
/*
* Copyright 2017 OpenMarket Ltd
* Copyright 2018 New Vector Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.core;
import android.text.TextUtils;
import java.io.File;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.List;
import java.util.Locale;
import java.util.TimeZone;
import java.util.logging.FileHandler;
import java.util.logging.Formatter;
import java.util.logging.Level;
import java.util.logging.LogRecord;
import java.util.logging.Logger;
/**
* Intended to mimic {@link android.util.Log} in terms of interface, but with a lot of extra behind the scenes stuff.
*/
public class Log {
private static final String LOG_TAG = "Log";
private static final String LINE_SEPARATOR = System.getProperty("line.separator");
private static final int LOG_SIZE_BYTES = 50 * 1024 * 1024; // 50MB
// relatively large rotation count because closing > opening the app rotates the log (!)
private static final int LOG_ROTATION_COUNT = 15;
private static final Logger sLogger = Logger.getLogger("org.matrix.androidsdk");
private static FileHandler sFileHandler = null;
private static File sCacheDirectory = null;
private static String sFileName = "matrix";
// determine if messages with DEBUG level should be logged or not
public static boolean sShouldLogDebug = BuildConfig.DEBUG;
public enum EventTag {
/**
* A navigation event, e.g. onPause
*/
NAVIGATION,
/**
* A user triggered event, e.g. onClick
*/
USER,
/**
* User-visible notifications
*/
NOTICE,
/**
* A background event e.g. incoming messages
*/
BACKGROUND
}
/**
* Initialises the logger. Should be called AFTER {@link Log#setLogDirectory(File)}.
*
* @param fileName the base file name
*/
public static void init(String fileName) {
try {
if (!TextUtils.isEmpty(fileName)) {
sFileName = fileName;
}
sFileHandler = new FileHandler(sCacheDirectory.getAbsolutePath() + "/" + sFileName + ".%g.txt", LOG_SIZE_BYTES, LOG_ROTATION_COUNT);
sFileHandler.setFormatter(new LogFormatter());
sLogger.setUseParentHandlers(false);
sLogger.setLevel(Level.ALL);
sLogger.addHandler(sFileHandler);
} catch (IOException e) {
}
}
/**
* Set the directory to put log files.
*
* @param cacheDir The directory, usually {@link android.content.ContextWrapper#getCacheDir()}
*/
public static void setLogDirectory(File cacheDir) {
if (!cacheDir.exists()) {
cacheDir.mkdirs();
}
sCacheDirectory = cacheDir;
}
/**
* Set the directory to put log files.
*
* @return the cache directory
*/
public static File getLogDirectory() {
return sCacheDirectory;
}
/**
* Adds our own log files to the provided list of files.
*
* @param files The list of files to add to.
* @return The same list with more files added.
*/
public static List<File> addLogFiles(List<File> files) {
try {
// reported by GA
if (null != sFileHandler) {
sFileHandler.flush();
String absPath = sCacheDirectory.getAbsolutePath();
for (int i = 0; i <= LOG_ROTATION_COUNT; i++) {
String filepath = absPath + "/" + sFileName + "." + i + ".txt";
File file = new File(filepath);
if (file.exists()) {
files.add(file);
}
}
}
} catch (Exception e) {
Log.e(LOG_TAG, "## addLogFiles() failed : " + e.getMessage(), e);
}
return files;
}
private static void logToFile(String level, String tag, String content) {
if (null == sCacheDirectory) {
return;
}
StringBuilder b = new StringBuilder();
b.append(Thread.currentThread().getId());
b.append(" ");
b.append(level);
b.append("/");
b.append(tag);
b.append(": ");
b.append(content);
sLogger.info(b.toString());
}
/**
* Log an Throwable
*
* @param throwable the throwable to log
*/
private static void logToFile(Throwable throwable) {
if (null == sCacheDirectory || throwable == null) {
return;
}
StringWriter errors = new StringWriter();
throwable.printStackTrace(new PrintWriter(errors));
sLogger.info(errors.toString());
}
/**
* Log events which can be automatically analysed
*
* @param tag the EventTag
* @param content Content to log
*/
public static void event(EventTag tag, String content) {
android.util.Log.v(tag.name(), content);
logToFile("EVENT", tag.name(), content);
}
/**
* Log connection information, such as urls hit, incoming data, current connection status.
*
* @param tag Log tag
* @param content Content to log
*/
public static void con(String tag, String content) {
android.util.Log.v(tag, content);
logToFile("CON", tag, content);
}
public static void v(String tag, String content) {
android.util.Log.v(tag, content);
logToFile("V", tag, content);
}
public static void v(String tag, String content, Throwable throwable) {
android.util.Log.v(tag, content, throwable);
logToFile("V", tag, content);
logToFile(throwable);
}
public static void d(String tag, String content) {
if (sShouldLogDebug) {
android.util.Log.d(tag, content);
logToFile("D", tag, content);
}
}
public static void d(String tag, String content, Throwable throwable) {
if (sShouldLogDebug) {
android.util.Log.d(tag, content, throwable);
logToFile("D", tag, content);
logToFile(throwable);
}
}
public static void i(String tag, String content) {
android.util.Log.i(tag, content);
logToFile("I", tag, content);
}
public static void i(String tag, String content, Throwable throwable) {
android.util.Log.i(tag, content, throwable);
logToFile("I", tag, content);
logToFile(throwable);
}
public static void w(String tag, String content) {
android.util.Log.w(tag, content);
logToFile("W", tag, content);
}
public static void w(String tag, String content, Throwable throwable) {
android.util.Log.w(tag, content, throwable);
logToFile("W", tag, content);
logToFile(throwable);
}
public static void e(String tag, String content) {
android.util.Log.e(tag, content);
logToFile("E", tag, content);
}
public static void e(String tag, String content, Throwable throwable) {
android.util.Log.e(tag, content, throwable);
logToFile("E", tag, content);
logToFile(throwable);
}
public static void wtf(String tag, String content) {
android.util.Log.wtf(tag, content);
logToFile("WTF", tag, content);
}
public static void wtf(String tag, Throwable throwable) {
android.util.Log.wtf(tag, throwable);
logToFile("WTF", tag, throwable.getMessage());
logToFile(throwable);
}
public static void wtf(String tag, String content, Throwable throwable) {
android.util.Log.wtf(tag, content, throwable);
logToFile("WTF", tag, content);
logToFile(throwable);
}
public static final class LogFormatter extends Formatter {
private static final SimpleDateFormat DATE_FORMAT = new SimpleDateFormat("MM-dd HH:mm:ss.SSS", Locale.US);
private static boolean mIsTimeZoneSet = false;
@Override
public String format(LogRecord r) {
if (!mIsTimeZoneSet) {
DATE_FORMAT.setTimeZone(TimeZone.getTimeZone("UTC"));
mIsTimeZoneSet = true;
}
Throwable thrown = r.getThrown();
if (thrown != null) {
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw);
sw.write(r.getMessage());
sw.write(LINE_SEPARATOR);
thrown.printStackTrace(pw);
pw.flush();
return sw.toString();
} else {
StringBuilder b = new StringBuilder();
String date = DATE_FORMAT.format(new Date(r.getMillis()));
b.append(date);
b.append("Z ");
b.append(r.getMessage());
b.append(LINE_SEPARATOR);
return b.toString();
}
}
}
}
<|start_filename|>matrix-sdk/src/main/java/org/matrix/androidsdk/data/RoomSummary.java<|end_filename|>
/*
* Copyright 2015 OpenMarket Ltd
* Copyright 2017 Vector Creations Ltd
* Copyright 2018 New Vector Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.data;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import android.text.TextUtils;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import org.matrix.androidsdk.call.MXCallsManager;
import org.matrix.androidsdk.core.Log;
import org.matrix.androidsdk.rest.model.Event;
import org.matrix.androidsdk.rest.model.EventContent;
import org.matrix.androidsdk.rest.model.RoomMember;
import org.matrix.androidsdk.rest.model.message.Message;
import org.matrix.androidsdk.rest.model.sync.RoomSyncSummary;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
/**
* Stores summarised information about the room.
*/
public class RoomSummary implements java.io.Serializable {
private static final String LOG_TAG = RoomSummary.class.getSimpleName();
private static final long serialVersionUID = -3683013938626566489L;
// list of supported types
private static final List<String> sSupportedType = Arrays.asList(
Event.EVENT_TYPE_STATE_ROOM_TOPIC,
Event.EVENT_TYPE_MESSAGE_ENCRYPTED,
Event.EVENT_TYPE_MESSAGE_ENCRYPTION,
Event.EVENT_TYPE_STATE_ROOM_NAME,
Event.EVENT_TYPE_STATE_ROOM_MEMBER,
Event.EVENT_TYPE_STATE_ROOM_CREATE,
Event.EVENT_TYPE_STATE_HISTORY_VISIBILITY,
Event.EVENT_TYPE_STATE_ROOM_THIRD_PARTY_INVITE,
Event.EVENT_TYPE_STICKER);
// List of known unsupported types
private static final List<String> sKnownUnsupportedType = Arrays.asList(
Event.EVENT_TYPE_TYPING,
Event.EVENT_TYPE_STATE_ROOM_POWER_LEVELS,
Event.EVENT_TYPE_STATE_ROOM_JOIN_RULES,
Event.EVENT_TYPE_STATE_CANONICAL_ALIAS,
Event.EVENT_TYPE_STATE_ROOM_ALIASES,
Event.EVENT_TYPE_URL_PREVIEW,
Event.EVENT_TYPE_STATE_RELATED_GROUPS,
Event.EVENT_TYPE_STATE_ROOM_GUEST_ACCESS,
Event.EVENT_TYPE_REDACTION,
Event.EVENT_TYPE_TAGGED_EVENTS);
private String mRoomId = null;
private String mTopic = null;
private Event mLatestReceivedEvent = null;
// the room state is only used to check
// 1- the invitation status
// 2- the members display name
private transient RoomState mLatestRoomState = null;
// defines the latest read message
private String mReadReceiptEventId;
// the read marker event id
private String mReadMarkerEventId;
private Set<String> mRoomTags;
// counters
public int mUnreadEventsCount;
public int mNotificationCount;
public int mHighlightsCount;
// invitation status
// retrieved at initial sync
// the roomstate is not always known
private String mInviterUserId = null;
// retrieved from the roomState
private String mInviterName = null;
private String mUserId = null;
// Info from sync, depending on the room position in the sync
private String mUserMembership;
/**
* Tell if the room is a user conference user one
*/
private Boolean mIsConferenceUserRoom = null;
/**
* Data from RoomSyncSummary
*/
private List<String> mHeroes = new ArrayList<>();
private int mJoinedMembersCountFromSyncRoomSummary;
private int mInvitedMembersCountFromSyncRoomSummary;
public interface RoomSummaryListener {
/**
* Test if the event can be summarized (i.e. used as the last event of the room).
* When the listener is defined this method overrides the default one.
*
* @param event the event to test.
* @return true if the event can be summarized
*/
boolean isSupportedEvent(Event event);
}
private static @Nullable RoomSummaryListener mRoomSummaryListener;
public RoomSummary() {
}
/**
* Create a room summary
*
* @param fromSummary the summary source
* @param event the latest event of the room
* @param roomState the room state - used to display the event
* @param userId our own user id - used to display the room name
*/
public RoomSummary(@Nullable RoomSummary fromSummary,
Event event,
RoomState roomState,
String userId) {
mUserId = userId;
if (null != roomState) {
setRoomId(roomState.roomId);
}
if ((null == getRoomId()) && (null != event)) {
setRoomId(event.roomId);
}
setLatestReceivedEvent(event, roomState);
// if no summary is provided
if (null == fromSummary) {
if (null != event) {
setReadMarkerEventId(event.eventId);
setReadReceiptEventId(event.eventId);
}
if (null != roomState) {
setHighlightCount(roomState.getHighlightCount());
setNotificationCount(roomState.getHighlightCount());
}
setUnreadEventsCount(Math.max(getHighlightCount(), getNotificationCount()));
} else {
// else use the provided summary data
setReadMarkerEventId(fromSummary.getReadMarkerEventId());
setReadReceiptEventId(fromSummary.getReadReceiptEventId());
setUnreadEventsCount(fromSummary.getUnreadEventsCount());
setHighlightCount(fromSummary.getHighlightCount());
setNotificationCount(fromSummary.getNotificationCount());
mHeroes.addAll(fromSummary.mHeroes);
mJoinedMembersCountFromSyncRoomSummary = fromSummary.mJoinedMembersCountFromSyncRoomSummary;
mInvitedMembersCountFromSyncRoomSummary = fromSummary.mInvitedMembersCountFromSyncRoomSummary;
mUserMembership = fromSummary.mUserMembership;
}
}
/**
* Set the listener of the RoomSummary class.
*
* @param roomSummaryListener the listener
*/
public static void setRoomSummaryListener(@Nullable RoomSummaryListener roomSummaryListener) {
mRoomSummaryListener = roomSummaryListener;
}
/**
* Test if the event can be summarized.
* Some event types are not yet supported.
*
* @param event the event to test.
* @return true if the event can be summarized
*/
public static boolean isSupportedEvent(Event event) {
if (mRoomSummaryListener != null) {
return mRoomSummaryListener.isSupportedEvent(event);
} else {
return isSupportedEventDefaultImplementation(event);
}
}
/**
* Test if the event can be summarized.
* Default implementation (used when no listener as been defined to RoomSummary class)
*
* @param event the event to test.
* @return true if the event can be summarized
*/
public static boolean isSupportedEventDefaultImplementation(Event event) {
String type = event.getType();
boolean isSupported = false;
// check if the msgtype is supported
if (TextUtils.equals(Event.EVENT_TYPE_MESSAGE, type)) {
try {
JsonObject eventContent = event.getContentAsJsonObject();
String msgType = "";
JsonElement element = eventContent.get("msgtype");
if (null != element) {
msgType = element.getAsString();
}
isSupported = TextUtils.equals(msgType, Message.MSGTYPE_TEXT)
|| TextUtils.equals(msgType, Message.MSGTYPE_EMOTE)
|| TextUtils.equals(msgType, Message.MSGTYPE_NOTICE)
|| TextUtils.equals(msgType, Message.MSGTYPE_IMAGE)
|| TextUtils.equals(msgType, Message.MSGTYPE_AUDIO)
|| TextUtils.equals(msgType, Message.MSGTYPE_VIDEO)
|| TextUtils.equals(msgType, Message.MSGTYPE_FILE);
if (!isSupported && !TextUtils.isEmpty(msgType)) {
Log.e(LOG_TAG, "isSupportedEvent : Unsupported msg type " + msgType);
}
} catch (Exception e) {
Log.e(LOG_TAG, "isSupportedEvent failed " + e.getMessage(), e);
}
} else if (TextUtils.equals(Event.EVENT_TYPE_MESSAGE_ENCRYPTED, type)) {
isSupported = event.hasContentFields();
} else if (TextUtils.equals(Event.EVENT_TYPE_STATE_ROOM_MEMBER, type)) {
JsonObject eventContentAsJsonObject = event.getContentAsJsonObject();
if (null != eventContentAsJsonObject) {
if (eventContentAsJsonObject.entrySet().isEmpty()) {
Log.d(LOG_TAG, "isSupportedEvent : room member with no content is not supported");
} else {
// do not display the avatar / display name update
EventContent prevEventContent = event.getPrevContent();
EventContent eventContent = event.getEventContent();
String membership = null;
String preMembership = null;
if (eventContent != null) {
membership = eventContent.membership;
}
if (prevEventContent != null) {
preMembership = prevEventContent.membership;
}
isSupported = !TextUtils.equals(membership, preMembership);
if (!isSupported) {
Log.d(LOG_TAG, "isSupportedEvent : do not support avatar display name update");
}
}
}
} else {
isSupported = sSupportedType.contains(type)
|| (event.isCallEvent() && !TextUtils.isEmpty(type) && !Event.EVENT_TYPE_CALL_CANDIDATES.equals(type));
}
if (!isSupported) {
// some events are known to be never traced
// avoid warning when it is not required.
if (!sKnownUnsupportedType.contains(type)) {
Log.e(LOG_TAG, "isSupportedEvent : Unsupported event type " + type);
}
}
return isSupported;
}
/**
* @return the user id
*/
public String getUserId() {
return mUserId;
}
/**
* @return the room id
*/
public String getRoomId() {
return mRoomId;
}
/**
* @return the topic.
*/
public String getRoomTopic() {
return mTopic;
}
/**
* @return the room summary event.
*/
public Event getLatestReceivedEvent() {
return mLatestReceivedEvent;
}
/**
* @return the dedicated room state.
*/
public RoomState getLatestRoomState() {
return mLatestRoomState;
}
/**
* @return true if the current user is invited
*/
public boolean isInvited() {
return RoomMember.MEMBERSHIP_INVITE.equals(mUserMembership);
}
/**
* To call when the room is in the invited section of the sync response
*/
public void setIsInvited() {
mUserMembership = RoomMember.MEMBERSHIP_INVITE;
}
/**
* To call when the room is in the joined section of the sync response
*/
public void setIsJoined() {
mUserMembership = RoomMember.MEMBERSHIP_JOIN;
}
/**
* @return true if the current user is invited
*/
public boolean isJoined() {
return RoomMember.MEMBERSHIP_JOIN.equals(mUserMembership);
}
/**
* @return the inviter user id.
*/
public String getInviterUserId() {
return mInviterUserId;
}
/**
* Set the room's {@link org.matrix.androidsdk.rest.model.Event#EVENT_TYPE_STATE_ROOM_TOPIC}.
*
* @param topic The topic
* @return This summary for chaining calls.
*/
public RoomSummary setTopic(String topic) {
mTopic = topic;
return this;
}
/**
* Set the room's ID..
*
* @param roomId The room ID
* @return This summary for chaining calls.
*/
public RoomSummary setRoomId(String roomId) {
mRoomId = roomId;
return this;
}
/**
* Set the latest tracked event (e.g. the latest m.room.message)
*
* @param event The most-recent event.
* @param roomState The room state
* @return This summary for chaining calls.
*/
public RoomSummary setLatestReceivedEvent(Event event, RoomState roomState) {
setLatestReceivedEvent(event);
setLatestRoomState(roomState);
if (null != roomState) {
setTopic(roomState.topic);
}
return this;
}
/**
* Set the latest tracked event (e.g. the latest m.room.message)
*
* @param event The most-recent event.
* @return This summary for chaining calls.
*/
public RoomSummary setLatestReceivedEvent(Event event) {
mLatestReceivedEvent = event;
return this;
}
/**
* Set the latest RoomState
*
* @param roomState The room state of the latest event.
* @return This summary for chaining calls.
*/
public RoomSummary setLatestRoomState(RoomState roomState) {
mLatestRoomState = roomState;
// Keep this code for compatibility?
boolean isInvited = false;
// check for the invitation status
if (null != mLatestRoomState) {
RoomMember member = mLatestRoomState.getMember(mUserId);
isInvited = (null != member) && RoomMember.MEMBERSHIP_INVITE.equals(member.membership);
}
// when invited, the only received message should be the invitation one
if (isInvited) {
mInviterName = null;
if (null != mLatestReceivedEvent) {
mInviterName = mInviterUserId = mLatestReceivedEvent.getSender();
// try to retrieve a display name
if (null != mLatestRoomState) {
mInviterName = mLatestRoomState.getMemberName(mLatestReceivedEvent.getSender());
}
}
} else {
mInviterUserId = mInviterName = null;
}
return this;
}
/**
* Set the read receipt event Id
*
* @param eventId the read receipt event id.
*/
public void setReadReceiptEventId(String eventId) {
Log.d(LOG_TAG, "## setReadReceiptEventId() : " + eventId + " roomId " + getRoomId());
mReadReceiptEventId = eventId;
}
/**
* @return the read receipt event id
*/
public String getReadReceiptEventId() {
return mReadReceiptEventId;
}
/**
* Set the read marker event Id
*
* @param eventId the read marker event id.
*/
public void setReadMarkerEventId(String eventId) {
Log.d(LOG_TAG, "## setReadMarkerEventId() : " + eventId + " roomId " + getRoomId());
if (TextUtils.isEmpty(eventId)) {
Log.e(LOG_TAG, "## setReadMarkerEventId') : null mReadMarkerEventId, in " + getRoomId());
}
mReadMarkerEventId = eventId;
}
/**
* @return the read receipt event id
*/
public String getReadMarkerEventId() {
if (TextUtils.isEmpty(mReadMarkerEventId)) {
Log.e(LOG_TAG, "## getReadMarkerEventId') : null mReadMarkerEventId, in " + getRoomId());
mReadMarkerEventId = getReadReceiptEventId();
}
return mReadMarkerEventId;
}
/**
* Update the unread message counter
*
* @param count the unread events count.
*/
public void setUnreadEventsCount(int count) {
Log.d(LOG_TAG, "## setUnreadEventsCount() : " + count + " roomId " + getRoomId());
mUnreadEventsCount = count;
}
/**
* @return the unread events count
*/
public int getUnreadEventsCount() {
return mUnreadEventsCount;
}
/**
* Update the notification counter
*
* @param count the notification counter
*/
public void setNotificationCount(int count) {
Log.d(LOG_TAG, "## setNotificationCount() : " + count + " roomId " + getRoomId());
mNotificationCount = count;
}
/**
* @return the notification count
*/
public int getNotificationCount() {
return mNotificationCount;
}
/**
* Update the highlight counter
*
* @param count the highlight counter
*/
public void setHighlightCount(int count) {
Log.d(LOG_TAG, "## setHighlightCount() : " + count + " roomId " + getRoomId());
mHighlightsCount = count;
}
/**
* @return the highlight count
*/
public int getHighlightCount() {
return mHighlightsCount;
}
/**
* @return the room tags
*/
public Set<String> getRoomTags() {
return mRoomTags;
}
/**
* Update the room tags
*
* @param roomTags the room tags
*/
public void setRoomTags(final Set<String> roomTags) {
if (roomTags != null) {
// wraps the set into a serializable one
mRoomTags = new HashSet<>(roomTags);
} else {
mRoomTags = new HashSet<>();
}
}
public boolean isConferenceUserRoom() {
// test if it is not yet initialized
if (null == mIsConferenceUserRoom) {
mIsConferenceUserRoom = false;
// FIXME LazyLoading Heroes does not contains me
// FIXME I'ms not sure this code will work anymore
Collection<String> membersId = getHeroes();
// works only with 1:1 room
if (2 == membersId.size()) {
for (String userId : membersId) {
if (MXCallsManager.isConferenceUserId(userId)) {
mIsConferenceUserRoom = true;
break;
}
}
}
}
return mIsConferenceUserRoom;
}
public void setIsConferenceUserRoom(boolean isConferenceUserRoom) {
mIsConferenceUserRoom = isConferenceUserRoom;
}
public void setRoomSyncSummary(@NonNull RoomSyncSummary roomSyncSummary) {
if (roomSyncSummary.heroes != null) {
mHeroes.clear();
mHeroes.addAll(roomSyncSummary.heroes);
}
if (roomSyncSummary.joinedMembersCount != null) {
// Update the value
mJoinedMembersCountFromSyncRoomSummary = roomSyncSummary.joinedMembersCount;
}
if (roomSyncSummary.invitedMembersCount != null) {
// Update the value
mInvitedMembersCountFromSyncRoomSummary = roomSyncSummary.invitedMembersCount;
}
}
@NonNull
public List<String> getHeroes() {
return mHeroes;
}
public int getNumberOfJoinedMembers() {
return mJoinedMembersCountFromSyncRoomSummary;
}
public int getNumberOfInvitedMembers() {
return mInvitedMembersCountFromSyncRoomSummary;
}
}
<|start_filename|>matrix-sdk-crypto/src/main/java/org/matrix/androidsdk/crypto/rest/model/crypto/RoomKeyShare.java<|end_filename|>
/*
* Copyright 2019 New Vector Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.crypto.rest.model.crypto;
import com.google.gson.JsonElement;
import com.google.gson.annotations.SerializedName;
/**
* Parent class representing an room key action request
* Note: this class cannot be abstract because of {@link org.matrix.androidsdk.core.JsonUtils#toRoomKeyShare(JsonElement)}
*/
public class RoomKeyShare implements SendToDeviceObject {
public static final String ACTION_SHARE_REQUEST = "request";
public static final String ACTION_SHARE_CANCELLATION = "request_cancellation";
public String action;
@SerializedName("requesting_device_id")
public String requestingDeviceId;
@SerializedName("request_id")
public String requestId;
}
<|start_filename|>matrix-sdk/src/main/java/org/matrix/androidsdk/features/integrationmanager/IntegrationManager.kt<|end_filename|>
/*
* Copyright 2019 New Vector Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.features.integrationmanager
import android.content.Context
import com.google.gson.Gson
import com.google.gson.reflect.TypeToken
import org.matrix.androidsdk.MXSession
import org.matrix.androidsdk.core.Log
import org.matrix.androidsdk.core.callback.ApiCallback
import org.matrix.androidsdk.core.model.MatrixError
import org.matrix.androidsdk.listeners.MXEventListener
import org.matrix.androidsdk.login.AutoDiscovery
import org.matrix.androidsdk.rest.model.WellKnownManagerConfig
import org.matrix.androidsdk.rest.model.sync.AccountDataElement
/**
* The integration manager allows to
* - Get the Integration Manager that a user has explicitly set for its account (via account data)
* - Get the recommended/preferred Integration Manager list as defined by the HomeServer (via wellknown)
* - Check if the user has disabled the integration manager feature
* - Allow / Disallow Integration manager (propagated to other riot clients)
*
* The integration manager listen to account data, and can notify observer for changes.
*
* The wellknown is refreshed at each application fresh start
*
*/
class IntegrationManager(val mxSession: MXSession, val context: Context) {
/**
* Return the identity server url, either from AccountData if it has been set, or from the local storage
* This could return a non null value even if integrationAllowed is false, so always check integrationAllowed
* before using it
*/
var integrationServerConfig: IntegrationManagerConfig? = retrieveIntegrationServerConfig()
private set
/**
* Returns false if the user as disabled integration manager feature
*/
var integrationAllowed = true
private set
/**
* Map of stateEventId to Allowed
*/
private var widgetPermissions = emptyMap<String, Boolean>()
/**
* Map of native widgetType to a map of domain to Allowed
* {
* "jitsi" : {
* "jisit.domain.org" : true,
* "jisit.other.org" : false
* }
* }
*/
private var nativeWidgetPermissions = emptyMap<String, Map<String, Boolean>>()
fun getWellKnownIntegrationManagerConfigs(): List<WellKnownManagerConfig> {
//TODO check the 8h refresh for refresh
return getStoreWellknownIM()
}
interface IntegrationManagerManagerListener {
fun onIntegrationManagerChange(managerConfig: IntegrationManager)
}
private val listeners = HashSet<IntegrationManagerManagerListener>()
fun addListener(listener: IntegrationManagerManagerListener) = synchronized(listeners) { listeners.add(listener) }
fun removeListener(listener: IntegrationManagerManagerListener) = synchronized(listeners) { listeners.remove(listener) }
fun enableIntegrationManagerUsage(allowed: Boolean, callback: ApiCallback<Void>) {
// Optimistic update before account data sync
if (integrationAllowed != allowed) {
integrationAllowed = allowed
notifyListeners()
}
mxSession.enableIntegrationManagerUsage(allowed, callback)
}
fun setWidgetAllowed(stateEventId: String, allowed: Boolean, callback: ApiCallback<Void?>?) {
val accountDataContent = widgetPermissions.toMutableMap().apply {
put(stateEventId, allowed)
}
val updatedMap = (
mxSession.dataHandler.store
?.getAccountDataElement(AccountDataElement.ACCOUNT_DATA_TYPE_ALLOWED_WIDGETS)
?.content ?: HashMap()
).apply {
set("widgets", accountDataContent)
}
//optimistic update
widgetPermissions = accountDataContent
notifyListeners()
mxSession.accountDataRestClient.setAccountData(
mxSession.myUserId,
AccountDataElement.ACCOUNT_DATA_TYPE_ALLOWED_WIDGETS,
updatedMap,
callback)
}
fun isWidgetAllowed(stateEventId: String): Boolean {
return widgetPermissions[stateEventId] ?: false
}
fun setNativeWidgetDomainAllowed(widgetType: String, domain: String, allowed: Boolean, callback: ApiCallback<Void?>?) {
val accountDataContent = nativeWidgetPermissions.toMutableMap().apply {
(get(widgetType))?.let {
set(widgetType, it.toMutableMap().apply { set(domain, allowed) })
} ?: run {
set(widgetType, mapOf(domain to allowed))
}
}
//Avoid to override delete unknwon keys
val updatedMap = (
mxSession.dataHandler.store
?.getAccountDataElement(AccountDataElement.ACCOUNT_DATA_TYPE_ALLOWED_WIDGETS)
?.content ?: HashMap()
).apply {
set("native_widgets", accountDataContent)
}
//optimistic update
nativeWidgetPermissions = accountDataContent
notifyListeners()
mxSession.accountDataRestClient.setAccountData(
mxSession.myUserId,
AccountDataElement.ACCOUNT_DATA_TYPE_ALLOWED_WIDGETS,
updatedMap,
callback)
}
fun isNativeWidgetAllowed(widgetType: String, domain: String?): Boolean {
return nativeWidgetPermissions[widgetType]?.get(domain) ?: false
}
private val eventListener = object : MXEventListener() {
override fun onAccountDataUpdated(accountDataElement: AccountDataElement) {
if (accountDataElement.type == AccountDataElement.ACCOUNT_DATA_TYPE_WIDGETS) {
// The integration server has been updated
val accountWidgets =
mxSession.dataHandler.store?.getAccountDataElement(AccountDataElement.ACCOUNT_DATA_TYPE_WIDGETS)
val integrationManager = accountWidgets?.content?.filter {
val widgetContent = it.value as? Map<*, *>
(widgetContent?.get("content") as? Map<*, *>)?.get("type") == INTEGRATION_MANAGER_WIDGET
}?.entries?.firstOrNull()
val config = getConfigFromData(integrationManager)
if (config != integrationServerConfig) {
localSetIntegrationManagerConfig(config)
}
} else if (accountDataElement.type == AccountDataElement.ACCOUNT_DATA_TYPE_INTEGRATION_PROVISIONING) {
val newValue = extractIntegrationProvisioning()
if (integrationAllowed != newValue) {
integrationAllowed = newValue
notifyListeners()
}
} else if (accountDataElement.type == AccountDataElement.ACCOUNT_DATA_TYPE_ALLOWED_WIDGETS) {
// The integration server has been updated
val allowedWidgetList = extractWidgetPermissionFromAccountData()
val allowedNativeWidgets = extractNativeWidgetsPermissionFromAccountData()
val hasChanges = allowedWidgetList != widgetPermissions
|| allowedNativeWidgets != nativeWidgetPermissions
if (hasChanges) {
widgetPermissions = allowedWidgetList
nativeWidgetPermissions = allowedNativeWidgets
notifyListeners()
}
}
}
override fun onStoreReady() {
localSetIntegrationManagerConfig(retrieveIntegrationServerConfig(), false)
integrationAllowed = extractIntegrationProvisioning()
widgetPermissions = extractWidgetPermissionFromAccountData()
nativeWidgetPermissions = extractNativeWidgetsPermissionFromAccountData()
notifyListeners()
}
}
private fun extractIntegrationProvisioning(): Boolean {
return mxSession.dataHandler
.store
?.getAccountDataElement(AccountDataElement.ACCOUNT_DATA_TYPE_INTEGRATION_PROVISIONING)
?.content?.get("enabled") as? Boolean ?: true
}
init {
//All listeners are cleared when session is closed, so no need to release this?
mxSession.dataHandler.addListener(eventListener)
//Refresh wellknown im if needed
AutoDiscovery().getServerPreferredIntegrationManagers(mxSession.homeServerConfig.homeserverUri.toString(),
object : ApiCallback<List<WellKnownManagerConfig>> {
override fun onSuccess(info: List<WellKnownManagerConfig>) {
setStoreWellknownIM(info)
}
override fun onUnexpectedError(e: Exception?) {
}
override fun onNetworkError(e: Exception?) {
}
override fun onMatrixError(e: MatrixError?) {
}
})
}
private fun extractWidgetPermissionFromAccountData(): Map<String, Boolean> {
val widgets = mxSession.dataHandler
.store
?.getAccountDataElement(AccountDataElement.ACCOUNT_DATA_TYPE_ALLOWED_WIDGETS)
?.content
?.get("widgets")
return (widgets as? Map<*, *>)
?.mapNotNull {
(it.key as? String)?.let { eventId ->
(it.value as? Boolean)?.let { allowed ->
eventId to allowed
}
}
}?.toMap() ?: emptyMap()
}
private fun extractNativeWidgetsPermissionFromAccountData(): Map<String, Map<String, Boolean>> {
val nativeWidgets = mxSession.dataHandler
.store
?.getAccountDataElement(AccountDataElement.ACCOUNT_DATA_TYPE_ALLOWED_WIDGETS)
?.content
?.get("native_widgets")
return (nativeWidgets as? Map<*, *>)
?.mapNotNull {
(it.key as? String)?.let { widgetType ->
(it.value as? Map<*, *>)?.let { allowed ->
widgetType to allowed.mapNotNull { permsMap ->
(permsMap.key as? String)?.let { eventId ->
(permsMap.value as? Boolean)?.let { allowed ->
eventId to allowed
}
}
}.toMap()
}
}
}?.toMap() ?: emptyMap()
}
private fun getStoreWellknownIM(): List<WellKnownManagerConfig> {
val prefs = context.getSharedPreferences(PREFS_IM, Context.MODE_PRIVATE)
return prefs.getString(WELLKNOWN_KEY, null)?.let {
try {
Gson().fromJson<List<WellKnownManagerConfig>>(it,
object : TypeToken<List<WellKnownManagerConfig>>() {}.type)
} catch (any: Throwable) {
emptyList<WellKnownManagerConfig>()
}
} ?: emptyList<WellKnownManagerConfig>()
}
private fun setStoreWellknownIM(list: List<WellKnownManagerConfig>) {
val prefs = context.getSharedPreferences(PREFS_IM, Context.MODE_PRIVATE)
try {
val serialized = Gson().toJson(list)
prefs.edit().putString(WELLKNOWN_KEY, serialized).apply()
} catch (any: Throwable) {
//nop
}
}
private fun localSetIntegrationManagerConfig(config: IntegrationManagerConfig?, notify: Boolean = true) {
integrationServerConfig = config
if (notify) notifyListeners()
}
private fun notifyListeners() {
synchronized(listeners) {
listeners.forEach {
try {
it.onIntegrationManagerChange(this)
} catch (t: Throwable) {
Log.e(LOG_TAG, "Failed to notify integration mgr listener", t)
}
}
}
}
private fun retrieveIntegrationServerConfig(): IntegrationManagerConfig? {
val accountWidgets =
mxSession.dataHandler.store?.getAccountDataElement(AccountDataElement.ACCOUNT_DATA_TYPE_WIDGETS)
val integrationManager = accountWidgets?.content?.filter {
val widgetContent = it.value as? Map<*, *>
(widgetContent?.get("content") as? Map<*, *>)?.get("type") == INTEGRATION_MANAGER_WIDGET
}?.entries?.firstOrNull()
return getConfigFromData(integrationManager)
/*
"integration_manager_1570191637240": {
"content": {
"type": "m.integration_manager",
"url": "https://scalar-staging.vector.im",
"name": "Integration Manager: scalar-staging.vector.im",
"data": {
"api_url": "https://scalar-staging.vector.im/api"
}
},
"sender": "@valere35:matrix.org",
"state_key": "integration_manager_1570191637240",
"type": "m.widget",
"id": "integration_manager_1570191637240"
}
*/
}
private fun getConfigFromData(integrationManager: Map.Entry<String, Any>?): IntegrationManagerConfig? {
((integrationManager?.value as? Map<*, *>)?.get("content") as? Map<*, *>)?.let {
val uiUrl = it["url"] as? String
val apiUrl = (it["data"] as? Map<*, *>)?.get("api_url") as? String
if (uiUrl.isNullOrBlank().not()) {
return IntegrationManagerConfig(uiUrl!!, apiUrl ?: uiUrl)
} else {
return null
}
}
return null
}
companion object {
private const val INTEGRATION_MANAGER_WIDGET = "m.integration_manager"
private const val PREFS_IM = "IntegrationManager.Storage"
private const val WELLKNOWN_KEY = "WellKnown"
private val LOG_TAG = IntegrationManager::class.java.simpleName
}
}
<|start_filename|>matrix-sdk/src/main/java/org/matrix/androidsdk/rest/model/filter/RoomEventFilter.java<|end_filename|>
/*
* Copyright 2018 <NAME>
* Copyright 2018 New Vector Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.rest.model.filter;
import com.google.gson.annotations.SerializedName;
import org.matrix.androidsdk.core.JsonUtils;
import java.util.List;
/**
* Represents "RoomEventFilter" as mentioned in the SPEC
* https://matrix.org/docs/spec/client_server/r0.3.0.html#post-matrix-client-r0-user-userid-filter
*/
public class RoomEventFilter {
public Integer limit;
@SerializedName("not_senders")
public List<String> notSenders;
@SerializedName("not_types")
public List<String> notTypes;
public List<String> senders;
public List<String> types;
public List<String> rooms;
@SerializedName("not_rooms")
public List<String> notRooms;
@SerializedName("contains_url")
public Boolean containsUrl;
@SerializedName("lazy_load_members")
public Boolean lazyLoadMembers;
public boolean hasData() {
return limit != null
|| notSenders != null
|| notTypes != null
|| senders != null
|| types != null
|| rooms != null
|| notRooms != null
|| containsUrl != null
|| lazyLoadMembers != null;
}
public String toJSONString() {
return JsonUtils.getGson(false).toJson(this);
}
}
<|start_filename|>matrix-sdk/src/androidTest/java/org/matrix/androidsdk/event/SendCustomEventTest.kt<|end_filename|>
/*
* Copyright 2020 New Vector Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.event
import android.text.TextUtils
import androidx.test.InstrumentationRegistry
import androidx.test.runner.AndroidJUnit4
import com.google.gson.JsonParser
import org.junit.Assert.assertEquals
import org.junit.Assert.assertNotNull
import org.junit.FixMethodOrder
import org.junit.Test
import org.junit.runner.RunWith
import org.junit.runners.MethodSorters
import org.matrix.androidsdk.common.CommonTestHelper
import org.matrix.androidsdk.common.CryptoTestHelper
import org.matrix.androidsdk.common.TestApiCallback
import org.matrix.androidsdk.common.TestConstants
import org.matrix.androidsdk.core.Log
import org.matrix.androidsdk.data.RoomState
import org.matrix.androidsdk.listeners.MXEventListener
import org.matrix.androidsdk.rest.model.Event
import java.util.concurrent.CountDownLatch
@RunWith(AndroidJUnit4::class)
@FixMethodOrder(MethodSorters.NAME_ASCENDING)
class SendCustomEventTest {
private val mTestHelper = CommonTestHelper()
private val mCryptoTestHelper = CryptoTestHelper(mTestHelper)
@Test
fun test01_sendCustomEvent() {
Log.e(LOG_TAG, "test01_sendEvent")
val context = InstrumentationRegistry.getContext()
val bobSession = mTestHelper.createAccount(TestConstants.USER_BOB, mCryptoTestHelper.defaultSessionParams)
var roomId: String? = null
val lock1 = CountDownLatch(1)
bobSession.createRoom(object : TestApiCallback<String>(lock1) {
override fun onSuccess(info: String) {
roomId = info
super.onSuccess(info)
}
})
mTestHelper.await(lock1)
assertNotNull(roomId)
val room = bobSession.dataHandler.getRoom(roomId!!)
// Wait for the event
var receivedEvent: Event? = null
val lock3 = CountDownLatch(1)
bobSession.dataHandler.addListener(object : MXEventListener() {
override fun onLiveEvent(event: Event, roomState: RoomState) {
if (TextUtils.equals(event.getType(), Event.EVENT_TYPE_MESSAGE)) {
receivedEvent = event
lock3.countDown()
}
}
})
// Send event
val parser = JsonParser()
val element = parser.parse("{" +
"\"body\" : \"message body\"," +
"\"msgtype\" : \"m.text\"," +
"\"mirrorIdKey\" : \"customValue\"" +
"}")
val content = element.asJsonObject
val event = Event(Event.EVENT_TYPE_MESSAGE, content, bobSession.myUserId, roomId)
val lock2 = CountDownLatch(1)
room.sendEvent(event, TestApiCallback(lock2))
mTestHelper.await(lock2)
// Wait for the callback
mTestHelper.await(lock3)
assertNotNull(receivedEvent)
assertEquals("message body", receivedEvent!!.content.asJsonObject.get("body")?.asString)
assertEquals("m.text", receivedEvent!!.content.asJsonObject.get("msgtype")?.asString)
assertEquals("customValue", receivedEvent!!.content.asJsonObject.get("mirrorIdKey")?.asString)
bobSession.clear(context)
}
companion object {
private const val LOG_TAG = "EventTest"
}
}
<|start_filename|>matrix-sdk/src/main/java/org/matrix/androidsdk/view/AutoScrollDownListView.java<|end_filename|>
/*
* Copyright 2014 OpenMarket Ltd
* Copyright 2018 New Vector Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.view;
import android.content.Context;
import android.util.AttributeSet;
import android.widget.ListView;
import org.matrix.androidsdk.core.Log;
/**
* The listView automatically scrolls down when its height is updated.
* It is used to scroll the list when the keyboard is displayed
* Note that the list scrolls down automatically thank to android:transcriptMode="normal" in the XML
*/
public class AutoScrollDownListView extends ListView {
private static final String LOG_TAG = AutoScrollDownListView.class.getSimpleName();
private boolean mLockSelectionOnResize = false;
public AutoScrollDownListView(Context context) {
super(context);
}
public AutoScrollDownListView(Context context, AttributeSet attrs) {
super(context, attrs);
}
public AutoScrollDownListView(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
}
@Override
protected void onSizeChanged(int xNew, int yNew, int xOld, int yOld) {
super.onSizeChanged(xNew, yNew, xOld, yOld);
if (!mLockSelectionOnResize) {
// check if the keyboard is displayed
// we don't want that the list scrolls to the bottom when the keyboard is hidden.
if (yNew < yOld) {
postDelayed(new Runnable() {
@Override
public void run() {
setSelection(getCount() - 1);
}
}, 100);
}
}
}
/**
* The listview selection is locked even if the view position is updated.
*/
public void lockSelectionOnResize() {
mLockSelectionOnResize = true;
}
@Override
protected void layoutChildren() {
// the adapter items are added without refreshing the list (back pagination only)
// to reduce the number of refresh
try {
super.layoutChildren();
} catch (Exception e) {
Log.e(LOG_TAG, "## layoutChildren() failed " + e.getMessage(), e);
}
}
@Override
// require to avoid lint errors with MatrixMessageListFragment
public void setSelectionFromTop(int position, int y) {
super.setSelectionFromTop(position, y);
}
}
<|start_filename|>matrix-sdk/src/test/java/org/matrix/androidsdk/test/JSONUtils.java<|end_filename|>
package org.matrix.androidsdk.test;
import junit.framework.Assert;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
/**
* Utility class for making JSON.
*/
public class JSONUtils {
public static JSONObject createChunk(JSONArray array) {
return JSONUtils.createChunk(array, "start_token", "end_token");
}
public static JSONObject createChunk(JSONArray array, String start, String end) {
try {
JSONObject json = new JSONObject();
json.put("start", start);
json.put("end", end);
json.put("chunk", array);
return json;
} catch (JSONException e) {
Assert.assertTrue("JSONUtils.createChunk: " + e, false);
}
return null;
}
public static JSONObject error(int code) {
try {
JSONObject json = new JSONObject();
json.put("errcode", code);
json.put("error", "Uh-oh: " + code);
return json;
} catch (JSONException e) {
Assert.assertTrue("JSONUtils.error: " + e, false);
}
return null;
}
}
<|start_filename|>matrix-sdk-crypto/src/main/java/org/matrix/androidsdk/crypto/verification/VerificationTransaction.kt<|end_filename|>
/*
* Copyright 2019 New Vector Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.crypto.verification
import org.matrix.androidsdk.crypto.interfaces.CryptoSession
import org.matrix.androidsdk.crypto.rest.model.crypto.SendToDeviceObject
/**
* Generic interactive key verification transaction
*/
abstract class VerificationTransaction(val transactionId: String,
val otherUserId: String,
var otherDeviceId: String? = null,
val isIncoming: Boolean) {
interface Listener {
fun transactionUpdated(tx: VerificationTransaction)
}
protected var listeners = ArrayList<Listener>()
fun addListener(listener: Listener) {
if (!listeners.contains(listener)) listeners.add(listener)
}
fun removeListener(listener: Listener) {
listeners.remove(listener)
}
abstract fun acceptToDeviceEvent(session: CryptoSession, senderId: String, event: SendToDeviceObject)
abstract fun cancel(session: CryptoSession, code: CancelCode)
}
<|start_filename|>matrix-sdk/src/main/java/org/matrix/androidsdk/interfaces/HtmlToolbox.java<|end_filename|>
/*
* Copyright 2018 New Vector Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.interfaces;
import androidx.annotation.Nullable;
import android.text.Html;
public interface HtmlToolbox {
/**
* Convert a html String
* Example: remove not supported html tags, etc.
*
* @param html the source HTML
* @return the converted HTML
*/
String convert(String html);
/**
* Get a HTML Image Getter
*
* @return a HTML Image Getter or null
*/
@Nullable
Html.ImageGetter getImageGetter();
/**
* Get a HTML Tag Handler
*
* @param html the source HTML
* @return a HTML Tag Handler or null
*/
@Nullable
Html.TagHandler getTagHandler(String html);
}
<|start_filename|>matrix-sdk-crypto/src/main/java/org/matrix/androidsdk/crypto/keysbackup/KeysBackup.kt<|end_filename|>
/*
* Copyright 2018 New Vector Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.crypto.keysbackup
import androidx.annotation.UiThread
import androidx.annotation.VisibleForTesting
import androidx.annotation.WorkerThread
import org.matrix.androidsdk.HomeServerConnectionConfig
import org.matrix.androidsdk.core.JsonUtility
import org.matrix.androidsdk.core.Log
import org.matrix.androidsdk.core.callback.ApiCallback
import org.matrix.androidsdk.core.callback.SimpleApiCallback
import org.matrix.androidsdk.core.callback.SuccessCallback
import org.matrix.androidsdk.core.callback.SuccessErrorCallback
import org.matrix.androidsdk.core.listeners.ProgressListener
import org.matrix.androidsdk.core.listeners.StepProgressListener
import org.matrix.androidsdk.core.model.MatrixError
import org.matrix.androidsdk.crypto.MXCRYPTO_ALGORITHM_MEGOLM
import org.matrix.androidsdk.crypto.MXCRYPTO_ALGORITHM_MEGOLM_BACKUP
import org.matrix.androidsdk.crypto.MegolmSessionData
import org.matrix.androidsdk.crypto.cryptostore.db.model.KeysBackupDataEntity
import org.matrix.androidsdk.crypto.data.ImportRoomKeysResult
import org.matrix.androidsdk.crypto.data.MXDeviceInfo
import org.matrix.androidsdk.crypto.data.MXOlmInboundGroupSession2
import org.matrix.androidsdk.crypto.internal.MXCryptoImpl
import org.matrix.androidsdk.crypto.model.keys.*
import org.matrix.androidsdk.crypto.model.rest.keys.BackupKeysResult
import org.matrix.androidsdk.crypto.model.rest.keys.UpdateKeysBackupVersionBody
import org.matrix.androidsdk.crypto.rest.RoomKeysRestClient
import org.matrix.androidsdk.crypto.util.computeRecoveryKey
import org.matrix.androidsdk.crypto.util.extractCurveKeyFromRecoveryKey
import org.matrix.olm.OlmException
import org.matrix.olm.OlmPkDecryption
import org.matrix.olm.OlmPkEncryption
import org.matrix.olm.OlmPkMessage
import retrofit2.Converter
import java.security.InvalidParameterException
import java.util.*
import kotlin.collections.HashMap
/**
* A KeysBackup class instance manage incremental backup of e2e keys (megolm keys)
* to the user's homeserver.
*/
class KeysBackup(private val mCrypto: MXCryptoImpl,
homeServerConnectionConfig: HomeServerConnectionConfig) {
private val mRoomKeysRestClient = RoomKeysRestClient(homeServerConnectionConfig)
private val mKeysBackupStateManager = KeysBackupStateManager(mCrypto)
// The backup version
var mKeysBackupVersion: KeysVersionResult? = null
private set
// The backup key being used.
private var mBackupKey: OlmPkEncryption? = null
private val mRandom = Random()
private var backupAllGroupSessionsCallback: ApiCallback<Void?>? = null
private var mKeysBackupStateListener: KeysBackupStateManager.KeysBackupStateListener? = null
val isEnabled: Boolean
get() = mKeysBackupStateManager.isEnabled
val isStucked: Boolean
get() = mKeysBackupStateManager.isStucked
val state: KeysBackupStateManager.KeysBackupState
get() = mKeysBackupStateManager.state
val currentBackupVersion: String?
get() = mKeysBackupVersion?.version
fun addListener(listener: KeysBackupStateManager.KeysBackupStateListener) {
mKeysBackupStateManager.addListener(listener)
}
fun removeListener(listener: KeysBackupStateManager.KeysBackupStateListener) {
mKeysBackupStateManager.removeListener(listener)
}
/**
* Set up the data required to create a new backup version.
* The backup version will not be created and enabled until [createKeysBackupVersion]
* is called.
* The returned [MegolmBackupCreationInfo] object has a `recoveryKey` member with
* the user-facing recovery key string.
*
* @param password an optional passphrase string that can be entered by the user
* when restoring the backup as an alternative to entering the recovery key.
* @param progressListener a progress listener, as generating private key from password may take a while
* @param callback Asynchronous callback
*/
fun prepareKeysBackupVersion(password: String?,
progressListener: ProgressListener?,
callback: SuccessErrorCallback<MegolmBackupCreationInfo>) {
mCrypto.getDecryptingThreadHandler().post {
try {
val olmPkDecryption = OlmPkDecryption()
val megolmBackupAuthData = MegolmBackupAuthData()
if (password != null) {
// Generate a private key from the password
val backgroundProgressListener = if (progressListener == null) {
null
} else {
object : ProgressListener {
override fun onProgress(progress: Int, total: Int) {
mCrypto.getUIHandler().post {
try {
progressListener.onProgress(progress, total)
} catch (e: Exception) {
Log.e(LOG_TAG, "prepareKeysBackupVersion: onProgress failure", e)
}
}
}
}
}
val generatePrivateKeyResult = generatePrivateKeyWithPassword(password, backgroundProgressListener)
megolmBackupAuthData.publicKey = olmPkDecryption.setPrivateKey(generatePrivateKeyResult.privateKey)
megolmBackupAuthData.privateKeySalt = generatePrivateKeyResult.salt
megolmBackupAuthData.privateKeyIterations = generatePrivateKeyResult.iterations
} else {
val publicKey = olmPkDecryption.generateKey()
megolmBackupAuthData.publicKey = publicKey
}
megolmBackupAuthData.signatures = mCrypto.signObject(JsonUtility.getCanonicalizedJsonString(megolmBackupAuthData.signalableJSONDictionary()))
val megolmBackupCreationInfo = MegolmBackupCreationInfo()
megolmBackupCreationInfo.algorithm = MXCRYPTO_ALGORITHM_MEGOLM_BACKUP
megolmBackupCreationInfo.authData = megolmBackupAuthData
megolmBackupCreationInfo.recoveryKey = computeRecoveryKey(olmPkDecryption.privateKey())
mCrypto.getUIHandler().post { callback.onSuccess(megolmBackupCreationInfo) }
} catch (e: OlmException) {
Log.e(LOG_TAG, "OlmException: ", e)
mCrypto.getUIHandler().post { callback.onUnexpectedError(e) }
}
}
}
/**
* Create a new keys backup version and enable it, using the information return from [prepareKeysBackupVersion].
*
* @param keysBackupCreationInfo the info object from [prepareKeysBackupVersion].
* @param callback Asynchronous callback
*/
fun createKeysBackupVersion(keysBackupCreationInfo: MegolmBackupCreationInfo,
callback: ApiCallback<KeysVersion>) {
val createKeysBackupVersionBody = CreateKeysBackupVersionBody()
createKeysBackupVersionBody.algorithm = keysBackupCreationInfo.algorithm
createKeysBackupVersionBody.authData = JsonUtility.getBasicGson().toJsonTree(keysBackupCreationInfo.authData)
mKeysBackupStateManager.state = KeysBackupStateManager.KeysBackupState.Enabling
mRoomKeysRestClient.createKeysBackupVersion(createKeysBackupVersionBody, object : ApiCallback<KeysVersion> {
override fun onSuccess(info: KeysVersion) {
// Reset backup markers.
mCrypto.cryptoStore.resetBackupMarkers()
val keyBackupVersion = KeysVersionResult()
keyBackupVersion.algorithm = createKeysBackupVersionBody.algorithm
keyBackupVersion.authData = createKeysBackupVersionBody.authData
keyBackupVersion.version = info.version
// We can consider that the server does not have keys yet
keyBackupVersion.count = 0
keyBackupVersion.hash = null
enableKeysBackup(keyBackupVersion)
callback.onSuccess(info)
}
override fun onUnexpectedError(e: Exception?) {
mKeysBackupStateManager.state = KeysBackupStateManager.KeysBackupState.Disabled
callback.onUnexpectedError(e)
}
override fun onNetworkError(e: Exception?) {
mKeysBackupStateManager.state = KeysBackupStateManager.KeysBackupState.Disabled
callback.onNetworkError(e)
}
override fun onMatrixError(e: MatrixError?) {
mKeysBackupStateManager.state = KeysBackupStateManager.KeysBackupState.Disabled
callback.onMatrixError(e)
}
})
}
/**
* Delete a keys backup version. It will delete all backed up keys on the server, and the backup itself.
* If we are backing up to this version. Backup will be stopped.
*
* @param version the backup version to delete.
* @param callback Asynchronous callback
*/
fun deleteBackup(version: String, callback: ApiCallback<Void>?) {
mCrypto.getDecryptingThreadHandler().post {
// If we're currently backing up to this backup... stop.
// (We start using it automatically in createKeysBackupVersion so this is symmetrical).
if (mKeysBackupVersion != null && version == mKeysBackupVersion!!.version) {
resetKeysBackupData()
mKeysBackupVersion = null
mKeysBackupStateManager.state = KeysBackupStateManager.KeysBackupState.Unknown
}
mRoomKeysRestClient.deleteBackup(version, object : ApiCallback<Void> {
private fun eventuallyRestartBackup() {
// Do not stay in KeysBackupState.Unknown but check what is available on the homeserver
if (state == KeysBackupStateManager.KeysBackupState.Unknown) {
checkAndStartKeysBackup()
}
}
override fun onSuccess(info: Void?) {
eventuallyRestartBackup()
mCrypto.getUIHandler().post { callback?.onSuccess(null) }
}
override fun onUnexpectedError(e: Exception) {
eventuallyRestartBackup()
mCrypto.getUIHandler().post { callback?.onUnexpectedError(e) }
}
override fun onNetworkError(e: Exception) {
eventuallyRestartBackup()
mCrypto.getUIHandler().post { callback?.onNetworkError(e) }
}
override fun onMatrixError(e: MatrixError) {
eventuallyRestartBackup()
mCrypto.getUIHandler().post { callback?.onMatrixError(e) }
}
})
}
}
/**
* Ask if the backup on the server contains keys that we may do not have locally.
* This should be called when entering in the state READY_TO_BACKUP
*/
fun canRestoreKeys(): Boolean {
// Server contains more keys than locally
val totalNumberOfKeysLocally = getTotalNumbersOfKeys()
val keysBackupData = mCrypto.cryptoStore.keysBackupData
val totalNumberOfKeysServer = keysBackupData?.backupLastServerNumberOfKeys ?: -1
val hashServer = keysBackupData?.backupLastServerHash
return when {
totalNumberOfKeysLocally < totalNumberOfKeysServer -> {
// Server contains more keys than this device
true
}
totalNumberOfKeysLocally == totalNumberOfKeysServer -> {
// Same number, compare hash?
// TODO We have not found any algorithm to determine if a restore is recommended here. Return false for the moment
false
}
else -> false
}
}
/**
* Facility method to get the total number of locally stored keys
*/
fun getTotalNumbersOfKeys(): Int {
return mCrypto.cryptoStore.inboundGroupSessionsCount(false)
}
/**
* Facility method to get the number of backed up keys
*/
fun getTotalNumbersOfBackedUpKeys(): Int {
return mCrypto.cryptoStore.inboundGroupSessionsCount(true)
}
/**
* Start to back up keys immediately.
*
* @param progressListener the callback to follow the progress
* @param callback the main callback
*/
fun backupAllGroupSessions(progressListener: ProgressListener?,
callback: ApiCallback<Void?>?) {
// Get a status right now
getBackupProgress(object : ProgressListener {
override fun onProgress(progress: Int, total: Int) {
// Reset previous listeners if any
resetBackupAllGroupSessionsListeners()
Log.d(LOG_TAG, "backupAllGroupSessions: backupProgress: $progress/$total")
try {
progressListener?.onProgress(progress, total)
} catch (e: Exception) {
Log.e(LOG_TAG, "backupAllGroupSessions: onProgress failure", e)
}
if (progress == total) {
Log.d(LOG_TAG, "backupAllGroupSessions: complete")
callback?.onSuccess(null)
return
}
backupAllGroupSessionsCallback = callback
// Listen to `state` change to determine when to call onBackupProgress and onComplete
mKeysBackupStateListener = object : KeysBackupStateManager.KeysBackupStateListener {
override fun onStateChange(newState: KeysBackupStateManager.KeysBackupState) {
getBackupProgress(object : ProgressListener {
override fun onProgress(progress: Int, total: Int) {
try {
progressListener?.onProgress(progress, total)
} catch (e: Exception) {
Log.e(LOG_TAG, "backupAllGroupSessions: onProgress failure 2", e)
}
// If backup is finished, notify the main listener
if (state === KeysBackupStateManager.KeysBackupState.ReadyToBackUp) {
backupAllGroupSessionsCallback?.onSuccess(null)
resetBackupAllGroupSessionsListeners()
}
}
})
}
}
mKeysBackupStateManager.addListener(mKeysBackupStateListener!!)
backupKeys()
}
})
}
/**
* Check trust on a key backup version.
*
* @param keysBackupVersion the backup version to check.
* @param callback block called when the operations completes.
*/
fun getKeysBackupTrust(keysBackupVersion: KeysVersionResult,
callback: SuccessCallback<KeysBackupVersionTrust>) {
mCrypto.getDecryptingThreadHandler().post {
val keysBackupVersionTrust = getKeysBackupTrustBg(keysBackupVersion)
mCrypto.getUIHandler().post { callback.onSuccess(keysBackupVersionTrust) }
}
}
/**
* Check trust on a key backup version.
* This has to be called on background thread.
*
* @param keysBackupVersion the backup version to check.
* @return a KeysBackupVersionTrust object
*/
@WorkerThread
private fun getKeysBackupTrustBg(keysBackupVersion: KeysVersionResult): KeysBackupVersionTrust {
val myUserId = mCrypto.myDevice.userId
val keysBackupVersionTrust = KeysBackupVersionTrust()
val authData = keysBackupVersion.getAuthDataAsMegolmBackupAuthData()
if (keysBackupVersion.algorithm == null
|| authData == null
|| authData.publicKey.isEmpty()
|| authData.signatures.isNullOrEmpty()) {
Log.d(LOG_TAG, "getKeysBackupTrust: Key backup is absent or missing required data")
return keysBackupVersionTrust
}
val mySigs: Map<String, *> = authData.signatures!![myUserId] as Map<String, *>
if (mySigs.isEmpty()) {
Log.d(LOG_TAG, "getKeysBackupTrust: Ignoring key backup because it lacks any signatures from this user")
return keysBackupVersionTrust
}
for (keyId in mySigs.keys) {
// XXX: is this how we're supposed to get the device id?
var deviceId: String? = null
val components = keyId.split(":")
if (components.size == 2) {
deviceId = components[1]
}
var device: MXDeviceInfo? = null
if (deviceId != null) {
device = mCrypto.cryptoStore.getUserDevice(deviceId, myUserId)
var isSignatureValid = false
if (device == null) {
Log.d(LOG_TAG, "getKeysBackupTrust: Signature from unknown device $deviceId")
} else {
mCrypto.getOlmDevice()?.let {
try {
it.verifySignature(device.fingerprint(), authData.signalableJSONDictionary(), mySigs[keyId] as String)
isSignatureValid = true
} catch (e: OlmException) {
Log.d(LOG_TAG, "getKeysBackupTrust: Bad signature from device " + device.deviceId + " " + e.localizedMessage)
}
}
if (isSignatureValid && device.isVerified) {
keysBackupVersionTrust.usable = true
}
}
val signature = KeysBackupVersionTrustSignature()
signature.device = device
signature.valid = isSignatureValid
signature.deviceId = deviceId
keysBackupVersionTrust.signatures.add(signature)
}
}
return keysBackupVersionTrust
}
/**
* Set trust on a keys backup version.
* It adds (or removes) the signature of the current device to the authentication part of the keys backup version.
*
* @param keysBackupVersion the backup version to check.
* @param trust the trust to set to the keys backup.
* @param callback block called when the operations completes.
*/
fun trustKeysBackupVersion(keysBackupVersion: KeysVersionResult,
trust: Boolean,
callback: ApiCallback<Void>) {
Log.d(LOG_TAG, "trustKeyBackupVersion: $trust, version ${keysBackupVersion.version}")
mCrypto.getDecryptingThreadHandler().post {
val myUserId = mCrypto.myDevice.userId
// Get auth data to update it
val authData = getMegolmBackupAuthData(keysBackupVersion)
if (authData == null) {
Log.w(LOG_TAG, "trustKeyBackupVersion:trust: Key backup is missing required data")
mCrypto.getUIHandler().post {
callback.onUnexpectedError(IllegalArgumentException("Missing element"))
}
return@post
}
// Get current signatures, or create an empty set
val myUserSignatures = (authData.signatures!![myUserId]?.toMutableMap() ?: HashMap())
if (trust) {
// Add current device signature
val deviceSignatures = mCrypto.signObject(JsonUtility.getCanonicalizedJsonString(authData.signalableJSONDictionary()))
deviceSignatures[myUserId]?.forEach { entry ->
myUserSignatures[entry.key] = entry.value
}
} else {
// Remove current device signature
myUserSignatures.remove("ed25519:${mCrypto.myDevice.deviceId}")
}
// Create an updated version of KeysVersionResult
val updateKeysBackupVersionBody = UpdateKeysBackupVersionBody(keysBackupVersion.version!!)
updateKeysBackupVersionBody.algorithm = keysBackupVersion.algorithm
val newMegolmBackupAuthData = authData.copy()
val newSignatures = newMegolmBackupAuthData.signatures!!.toMutableMap()
newSignatures[myUserId] = myUserSignatures
newMegolmBackupAuthData.signatures = newSignatures
updateKeysBackupVersionBody.authData = JsonUtility.getBasicGson().toJsonTree(newMegolmBackupAuthData)
// And send it to the homeserver
mRoomKeysRestClient.updateKeysBackupVersion(keysBackupVersion.version!!, updateKeysBackupVersionBody, object : ApiCallback<Void> {
override fun onSuccess(info: Void?) {
// Relaunch the state machine on this updated backup version
val newKeysBackupVersion = KeysVersionResult()
newKeysBackupVersion.version = keysBackupVersion.version
newKeysBackupVersion.algorithm = keysBackupVersion.algorithm
newKeysBackupVersion.count = keysBackupVersion.count
newKeysBackupVersion.hash = keysBackupVersion.hash
newKeysBackupVersion.authData = updateKeysBackupVersionBody.authData
checkAndStartWithKeysBackupVersion(newKeysBackupVersion)
mCrypto.getUIHandler().post {
callback.onSuccess(null)
}
}
override fun onUnexpectedError(e: Exception?) {
mCrypto.getUIHandler().post {
callback.onUnexpectedError(e)
}
}
override fun onNetworkError(e: Exception?) {
mCrypto.getUIHandler().post {
callback.onNetworkError(e)
}
}
override fun onMatrixError(e: MatrixError?) {
mCrypto.getUIHandler().post {
callback.onMatrixError(e)
}
}
})
}
}
/**
* Set trust on a keys backup version.
*
* @param keysBackupVersion the backup version to check.
* @param recoveryKey the recovery key to challenge with the key backup public key.
* @param callback block called when the operations completes.
*/
fun trustKeysBackupVersionWithRecoveryKey(keysBackupVersion: KeysVersionResult,
recoveryKey: String,
callback: ApiCallback<Void>) {
Log.d(LOG_TAG, "trustKeysBackupVersionWithRecoveryKey: version ${keysBackupVersion.version}")
mCrypto.getDecryptingThreadHandler().post {
if (!isValidRecoveryKeyForKeysBackupVersion(recoveryKey, keysBackupVersion)) {
Log.w(LOG_TAG, "trustKeyBackupVersionWithRecoveryKey: Invalid recovery key.")
mCrypto.getUIHandler().post {
callback.onUnexpectedError(IllegalArgumentException("Invalid recovery key or password"))
}
return@post
}
trustKeysBackupVersion(keysBackupVersion, true, callback)
}
}
/**
* Set trust on a keys backup version.
*
* @param keysBackupVersion the backup version to check.
* @param password the pass phrase to challenge with the keyBackupVersion public key.
* @param callback block called when the operations completes.
*/
fun trustKeysBackupVersionWithPassphrase(keysBackupVersion: KeysVersionResult,
password: String,
callback: ApiCallback<Void>) {
Log.d(LOG_TAG, "trustKeysBackupVersionWithPassphrase: version ${keysBackupVersion.version}")
mCrypto.getDecryptingThreadHandler().post {
val recoveryKey = recoveryKeyFromPassword(password, keysBackupVersion, null)
if (recoveryKey == null) {
Log.w(LOG_TAG, "trustKeysBackupVersionWithPassphrase: Key backup is missing required data")
mCrypto.getUIHandler().post {
callback.onUnexpectedError(IllegalArgumentException("Missing element"))
}
return@post
}
// Check trust using the recovery key
trustKeysBackupVersionWithRecoveryKey(keysBackupVersion, recoveryKey, callback)
}
}
/**
* Get public key from a Recovery key
*
* @param recoveryKey the recovery key
* @return the corresponding public key, from Olm
*/
@WorkerThread
private fun pkPublicKeyFromRecoveryKey(recoveryKey: String): String? {
// Extract the primary key
val privateKey = extractCurveKeyFromRecoveryKey(recoveryKey)
if (privateKey == null) {
Log.w(LOG_TAG, "pkPublicKeyFromRecoveryKey: private key is null")
return null
}
// Built the PK decryption with it
val pkPublicKey: String
try {
val decryption = OlmPkDecryption()
pkPublicKey = decryption.setPrivateKey(privateKey)
} catch (e: OlmException) {
return null
}
return pkPublicKey
}
private fun resetBackupAllGroupSessionsListeners() {
backupAllGroupSessionsCallback = null
mKeysBackupStateListener?.let {
mKeysBackupStateManager.removeListener(it)
}
mKeysBackupStateListener = null
}
/**
* Return the current progress of the backup
*/
fun getBackupProgress(progressListener: ProgressListener) {
mCrypto.getDecryptingThreadHandler().post {
val backedUpKeys = mCrypto.cryptoStore.inboundGroupSessionsCount(true)
val total = mCrypto.cryptoStore.inboundGroupSessionsCount(false)
mCrypto.getUIHandler().post { progressListener.onProgress(backedUpKeys, total) }
}
}
/**
* Restore a backup with a recovery key from a given backup version stored on the homeserver.
*
* @param keysVersionResult the backup version to restore from.
* @param recoveryKey the recovery key to decrypt the retrieved backup.
* @param roomId the id of the room to get backup data from.
* @param sessionId the id of the session to restore.
* @param stepProgressListener the step progress listener
* @param callback Callback. It provides the number of found keys and the number of successfully imported keys.
*/
fun restoreKeysWithRecoveryKey(keysVersionResult: KeysVersionResult,
recoveryKey: String,
roomId: String?,
sessionId: String?,
stepProgressListener: StepProgressListener?,
callback: ApiCallback<ImportRoomKeysResult>) {
Log.d(LOG_TAG, "restoreKeysWithRecoveryKey: From backup version: ${keysVersionResult.version}")
mCrypto.getDecryptingThreadHandler().post(Runnable {
// Check if the recovery is valid before going any further
if (!isValidRecoveryKeyForKeysBackupVersion(recoveryKey, keysVersionResult)) {
Log.e(LOG_TAG, "restoreKeysWithRecoveryKey: Invalid recovery key for this keys version")
mCrypto.getUIHandler().post { callback.onUnexpectedError(InvalidParameterException("Invalid recovery key")) }
return@Runnable
}
// Get a PK decryption instance
val decryption = pkDecryptionFromRecoveryKey(recoveryKey)
if (decryption == null) {
// This should not happen anymore
Log.e(LOG_TAG, "restoreKeysWithRecoveryKey: Invalid recovery key. Error")
mCrypto.getUIHandler().post { callback.onUnexpectedError(InvalidParameterException("Invalid recovery key")) }
return@Runnable
}
if (stepProgressListener != null) {
mCrypto.getUIHandler().post { stepProgressListener.onStepProgress(StepProgressListener.Step.DownloadingKey) }
}
// Get backed up keys from the homeserver
getKeys(sessionId, roomId, keysVersionResult.version!!, object : ApiCallback<KeysBackupData> {
override fun onUnexpectedError(e: Exception) {
mCrypto.getUIHandler().post { callback.onUnexpectedError(e) }
}
override fun onNetworkError(e: Exception) {
mCrypto.getUIHandler().post { callback.onNetworkError(e) }
}
override fun onMatrixError(e: MatrixError) {
mCrypto.getUIHandler().post { callback.onMatrixError(e) }
}
override fun onSuccess(keysBackupData: KeysBackupData) {
val sessionsData = ArrayList<MegolmSessionData>()
// Restore that data
var sessionsFromHsCount = 0
for (roomIdLoop in keysBackupData.roomIdToRoomKeysBackupData.keys) {
for (sessionIdLoop in keysBackupData.roomIdToRoomKeysBackupData[roomIdLoop]!!.sessionIdToKeyBackupData.keys) {
sessionsFromHsCount++
val keyBackupData = keysBackupData.roomIdToRoomKeysBackupData[roomIdLoop]!!.sessionIdToKeyBackupData[sessionIdLoop]!!
val sessionData = decryptKeyBackupData(keyBackupData, sessionIdLoop, roomIdLoop, decryption)
sessionData?.let {
sessionsData.add(it)
}
}
}
Log.d(LOG_TAG, "restoreKeysWithRecoveryKey: Decrypted " + sessionsData.size + " keys out of "
+ sessionsFromHsCount + " from the backup store on the homeserver")
// Do not trigger a backup for them if they come from the backup version we are using
val backUp = keysVersionResult.version != mKeysBackupVersion?.version
if (backUp) {
Log.d(LOG_TAG, "restoreKeysWithRecoveryKey: Those keys will be backed up to backup version: " + mKeysBackupVersion?.version)
}
// Import them into the crypto store
val progressListener = if (stepProgressListener != null) {
object : ProgressListener {
override fun onProgress(progress: Int, total: Int) {
// Note: no need to post to UI thread, importMegolmSessionsData() will do it
stepProgressListener.onStepProgress(StepProgressListener.Step.ImportingKey(progress, total))
}
}
} else {
null
}
mCrypto.importMegolmSessionsData(sessionsData, backUp, progressListener, callback)
}
})
})
}
/**
* Restore a backup with a password from a given backup version stored on the homeserver.
*
* @param keysBackupVersion the backup version to restore from.
* @param password the <PASSWORD> the retrieved backup.
* @param roomId the id of the room to get backup data from.
* @param sessionId the id of the session to restore.
* @param stepProgressListener the step progress listener
* @param callback Callback. It provides the number of found keys and the number of successfully imported keys.
*/
fun restoreKeyBackupWithPassword(keysBackupVersion: KeysVersionResult,
password: String,
roomId: String?,
sessionId: String?,
stepProgressListener: StepProgressListener?,
callback: ApiCallback<ImportRoomKeysResult>) {
Log.d(LOG_TAG, "[MXKeyBackup] restoreKeyBackup with password: From backup version: ${keysBackupVersion.version}")
mCrypto.getDecryptingThreadHandler().post {
val progressListener = if (stepProgressListener != null) {
object : ProgressListener {
override fun onProgress(progress: Int, total: Int) {
mCrypto.getUIHandler().post {
stepProgressListener.onStepProgress(StepProgressListener.Step.ComputingKey(progress, total))
}
}
}
} else {
null
}
val recoveryKey = recoveryKeyFromPassword(password, keysBackupVersion, progressListener)
if (recoveryKey == null) {
mCrypto.getUIHandler().post {
Log.d(LOG_TAG, "backupKeys: Invalid configuration")
callback.onUnexpectedError(IllegalStateException("Invalid configuration"))
}
return@post
}
restoreKeysWithRecoveryKey(keysBackupVersion, recoveryKey, roomId, sessionId, stepProgressListener, callback)
}
}
/**
* Same method as [RoomKeysRestClient.getRoomKey] except that it accepts nullable
* parameters and always returns a KeysBackupData object through the Callback
*/
private fun getKeys(sessionId: String?,
roomId: String?,
version: String,
callback: ApiCallback<KeysBackupData>) {
if (roomId != null && sessionId != null) {
// Get key for the room and for the session
mRoomKeysRestClient.getRoomKey(roomId, sessionId, version, object : SimpleApiCallback<KeyBackupData>(callback) {
override fun onSuccess(info: KeyBackupData) {
// Convert to KeysBackupData
val keysBackupData = KeysBackupData()
keysBackupData.roomIdToRoomKeysBackupData = HashMap()
val roomKeysBackupData = RoomKeysBackupData()
roomKeysBackupData.sessionIdToKeyBackupData = HashMap()
roomKeysBackupData.sessionIdToKeyBackupData[sessionId] = info
keysBackupData.roomIdToRoomKeysBackupData[roomId] = roomKeysBackupData
callback.onSuccess(keysBackupData)
}
})
} else if (roomId != null) {
// Get all keys for the room
mRoomKeysRestClient.getRoomKeys(roomId, version, object : SimpleApiCallback<RoomKeysBackupData>(callback) {
override fun onSuccess(info: RoomKeysBackupData) {
// Convert to KeysBackupData
val keysBackupData = KeysBackupData()
keysBackupData.roomIdToRoomKeysBackupData = HashMap()
keysBackupData.roomIdToRoomKeysBackupData[roomId] = info
callback.onSuccess(keysBackupData)
}
})
} else {
// Get all keys
mRoomKeysRestClient.getKeys(version, callback)
}
}
@VisibleForTesting
@WorkerThread
fun pkDecryptionFromRecoveryKey(recoveryKey: String): OlmPkDecryption? {
// Extract the primary key
val privateKey = extractCurveKeyFromRecoveryKey(recoveryKey)
// Built the PK decryption with it
var decryption: OlmPkDecryption? = null
if (privateKey != null) {
try {
decryption = OlmPkDecryption()
decryption.setPrivateKey(privateKey)
} catch (e: OlmException) {
Log.e(LOG_TAG, "OlmException", e)
}
}
return decryption
}
/**
* Do a backup if there are new keys, with a delay
*/
fun maybeBackupKeys() {
when {
isStucked -> {
// If not already done, or in error case, check for a valid backup version on the homeserver.
// If there is one, maybeBackupKeys will be called again.
checkAndStartKeysBackup()
}
state == KeysBackupStateManager.KeysBackupState.ReadyToBackUp -> {
mKeysBackupStateManager.state = KeysBackupStateManager.KeysBackupState.WillBackUp
// Wait between 0 and 10 seconds, to avoid backup requests from
// different clients hitting the server all at the same time when a
// new key is sent
val delayInMs = mRandom.nextInt(KEY_BACKUP_WAITING_TIME_TO_SEND_KEY_BACKUP_MILLIS).toLong()
mCrypto.getUIHandler().postDelayed({ backupKeys() }, delayInMs)
}
else -> {
Log.d(LOG_TAG, "maybeBackupKeys: Skip it because state: $state")
}
}
}
/**
* Get information about a backup version defined on the homeserver.
*
* It can be different than mKeysBackupVersion.
* @param version the backup version
* @param callback
*/
fun getVersion(version: String,
callback: ApiCallback<KeysVersionResult?>) {
mRoomKeysRestClient.getKeysBackupVersion(version, object : SimpleApiCallback<KeysVersionResult>(callback) {
override fun onSuccess(info: KeysVersionResult) {
callback.onSuccess(info)
}
override fun onMatrixError(e: MatrixError) {
// Workaround because the homeserver currently returns M_NOT_FOUND when there is no key backup
if (e.errcode == MatrixError.NOT_FOUND) {
callback.onSuccess(null)
} else {
// Transmit the error
callback.onMatrixError(e)
}
}
})
}
/**
* Retrieve the current version of the backup from the home server
*
* It can be different than mKeysBackupVersion.
* @param callback onSuccess(null) will be called if there is no backup on the server
*/
fun getCurrentVersion(callback: ApiCallback<KeysVersionResult?>) {
mRoomKeysRestClient.getKeysBackupLastVersion(object : SimpleApiCallback<KeysVersionResult>(callback) {
override fun onSuccess(info: KeysVersionResult) {
callback.onSuccess(info)
}
override fun onMatrixError(e: MatrixError) {
// Workaround because the homeserver currently returns M_NOT_FOUND when there is no key backup
if (e.errcode == MatrixError.NOT_FOUND) {
callback.onSuccess(null)
} else {
// Transmit the error
callback.onMatrixError(e)
}
}
})
}
/**
* This method fetches the last backup version on the server, then compare to the currently backup version use.
* If versions are not the same, the current backup is deleted (on server or locally), then the backup may be started again, using the last version.
*
* @param callback true if backup is already using the last version, and false if it is not the case
*/
fun forceUsingLastVersion(callback: ApiCallback<Boolean>) {
getCurrentVersion(object : SimpleApiCallback<KeysVersionResult?>(callback) {
override fun onSuccess(info: KeysVersionResult?) {
val localBackupVersion = mKeysBackupVersion?.version
val serverBackupVersion = info?.version
if (serverBackupVersion == null) {
if (localBackupVersion == null) {
// No backup on the server, and backup is not active
callback.onSuccess(true)
} else {
// No backup on the server, and we are currently backing up, so stop backing up
callback.onSuccess(false)
resetKeysBackupData()
mKeysBackupVersion = null
mKeysBackupStateManager.state = KeysBackupStateManager.KeysBackupState.Disabled
}
} else {
if (localBackupVersion == null) {
// backup on the server, and backup is not active
callback.onSuccess(false)
// Do a check
checkAndStartWithKeysBackupVersion(info)
} else {
// Backup on the server, and we are currently backing up, compare version
if (localBackupVersion == serverBackupVersion) {
// We are already using the last version of the backup
callback.onSuccess(true)
} else {
// We are not using the last version, so delete the current version we are using on the server
callback.onSuccess(false)
// This will automatically check for the last version then
deleteBackup(localBackupVersion, null)
}
}
}
}
})
}
/**
* Check the server for an active key backup.
*
* If one is present and has a valid signature from one of the user's verified
* devices, start backing up to it.
*/
fun checkAndStartKeysBackup() {
if (!isStucked) {
// Try to start or restart the backup only if it is in unknown or bad state
Log.w(LOG_TAG, "checkAndStartKeysBackup: invalid state: $state")
return
}
mKeysBackupVersion = null
mKeysBackupStateManager.state = KeysBackupStateManager.KeysBackupState.CheckingBackUpOnHomeserver
getCurrentVersion(object : ApiCallback<KeysVersionResult?> {
override fun onSuccess(keyBackupVersion: KeysVersionResult?) {
checkAndStartWithKeysBackupVersion(keyBackupVersion)
}
override fun onUnexpectedError(e: Exception?) {
Log.e(LOG_TAG, "checkAndStartKeysBackup: Failed to get current version", e)
mKeysBackupStateManager.state = KeysBackupStateManager.KeysBackupState.Unknown
}
override fun onNetworkError(e: Exception?) {
Log.e(LOG_TAG, "checkAndStartKeysBackup: Failed to get current version", e)
mKeysBackupStateManager.state = KeysBackupStateManager.KeysBackupState.Unknown
}
override fun onMatrixError(e: MatrixError?) {
Log.e(LOG_TAG, "checkAndStartKeysBackup: Failed to get current version " + e?.localizedMessage)
mKeysBackupStateManager.state = KeysBackupStateManager.KeysBackupState.Unknown
}
})
}
private fun checkAndStartWithKeysBackupVersion(keyBackupVersion: KeysVersionResult?) {
Log.d(LOG_TAG, "checkAndStartWithKeyBackupVersion: ${keyBackupVersion?.version}")
mKeysBackupVersion = keyBackupVersion
if (keyBackupVersion == null) {
Log.d(LOG_TAG, "checkAndStartWithKeysBackupVersion: Found no key backup version on the homeserver")
resetKeysBackupData()
mKeysBackupStateManager.state = KeysBackupStateManager.KeysBackupState.Disabled
} else {
getKeysBackupTrust(keyBackupVersion, SuccessCallback { trustInfo ->
val versionInStore = mCrypto.cryptoStore.keyBackupVersion
if (trustInfo.usable) {
Log.d(LOG_TAG, "checkAndStartWithKeysBackupVersion: Found usable key backup. version: " + keyBackupVersion.version)
// Check the version we used at the previous app run
if (versionInStore != null && versionInStore != keyBackupVersion.version) {
Log.d(LOG_TAG, " -> clean the previously used version $versionInStore")
resetKeysBackupData()
}
Log.d(LOG_TAG, " -> enabling key backups")
enableKeysBackup(keyBackupVersion)
} else {
Log.d(LOG_TAG, "checkAndStartWithKeysBackupVersion: No usable key backup. version: " + keyBackupVersion.version)
if (versionInStore != null) {
Log.d(LOG_TAG, " -> disabling key backup")
resetKeysBackupData()
}
mKeysBackupStateManager.state = KeysBackupStateManager.KeysBackupState.NotTrusted
}
})
}
}
/* ==========================================================================================
* Private
* ========================================================================================== */
/**
* Extract MegolmBackupAuthData data from a backup version.
*
* @param keysBackupData the key backup data
*
* @return the authentication if found and valid, null in other case
*/
private fun getMegolmBackupAuthData(keysBackupData: KeysVersionResult): MegolmBackupAuthData? {
if (keysBackupData.version.isNullOrBlank()
|| keysBackupData.algorithm != MXCRYPTO_ALGORITHM_MEGOLM_BACKUP
|| keysBackupData.authData == null) {
return null
}
val authData = keysBackupData.getAuthDataAsMegolmBackupAuthData()
if (authData.signatures == null
|| authData.publicKey.isBlank()) {
return null
}
return authData
}
/**
* Compute the recovery key from a password and key backup version.
*
* @param password the password.
* @param keysBackupData the backup and its auth data.
*
* @return the recovery key if successful, null in other cases
*/
@WorkerThread
private fun recoveryKeyFromPassword(password: String, keysBackupData: KeysVersionResult, progressListener: ProgressListener?): String? {
val authData = getMegolmBackupAuthData(keysBackupData)
if (authData == null) {
Log.w(LOG_TAG, "recoveryKeyFromPassword: invalid parameter")
return null
}
if (authData.privateKeySalt.isNullOrBlank()
|| authData.privateKeyIterations == null) {
Log.w(LOG_TAG, "recoveryKeyFromPassword: Salt and/or iterations not found in key backup auth data")
return null
}
// Extract the recovery key from the passphrase
val data = retrievePrivateKeyWithPassword(password, authData.privateKeySalt!!, authData.privateKeyIterations!!, progressListener)
return computeRecoveryKey(data)
}
/**
* Check if a recovery key matches key backup authentication data.
*
* @param recoveryKey the recovery key to challenge.
* @param keysBackupData the backup and its auth data.
*
* @return true if successful.
*/
@WorkerThread
private fun isValidRecoveryKeyForKeysBackupVersion(recoveryKey: String, keysBackupData: KeysVersionResult): Boolean {
// Build PK decryption instance with the recovery key
val publicKey = pkPublicKeyFromRecoveryKey(recoveryKey)
if (publicKey == null) {
Log.w(LOG_TAG, "isValidRecoveryKeyForKeysBackupVersion: public key is null")
return false
}
val authData = getMegolmBackupAuthData(keysBackupData)
if (authData == null) {
Log.w(LOG_TAG, "isValidRecoveryKeyForKeysBackupVersion: Key backup is missing required data")
return false
}
// Compare both
if (publicKey != authData.publicKey) {
Log.w(LOG_TAG, "isValidRecoveryKeyForKeysBackupVersion: Public keys mismatch")
return false
}
// Public keys match!
return true
}
/**
* Enable backing up of keys.
* This method will update the state and will start sending keys in nominal case
*
* @param keysVersionResult backup information object as returned by [getCurrentVersion].
*/
private fun enableKeysBackup(keysVersionResult: KeysVersionResult) {
if (keysVersionResult.authData != null) {
val retrievedMegolmBackupAuthData = keysVersionResult.getAuthDataAsMegolmBackupAuthData()
if (retrievedMegolmBackupAuthData != null) {
mKeysBackupVersion = keysVersionResult
mCrypto.cryptoStore.keyBackupVersion = keysVersionResult.version
onServerDataRetrieved(keysVersionResult.count, keysVersionResult.hash)
try {
mBackupKey = OlmPkEncryption().apply {
setRecipientKey(retrievedMegolmBackupAuthData.publicKey)
}
} catch (e: OlmException) {
Log.e(LOG_TAG, "OlmException", e)
mKeysBackupStateManager.state = KeysBackupStateManager.KeysBackupState.Disabled
return
}
mKeysBackupStateManager.state = KeysBackupStateManager.KeysBackupState.ReadyToBackUp
maybeBackupKeys()
} else {
Log.e(LOG_TAG, "Invalid authentication data")
mKeysBackupStateManager.state = KeysBackupStateManager.KeysBackupState.Disabled
}
} else {
Log.e(LOG_TAG, "Invalid authentication data")
mKeysBackupStateManager.state = KeysBackupStateManager.KeysBackupState.Disabled
}
}
/**
* Update the DB with data fetch from the server
*/
private fun onServerDataRetrieved(count: Int?, hash: String?) {
mCrypto.cryptoStore.keysBackupData = KeysBackupDataEntity()
.apply {
backupLastServerNumberOfKeys = count
backupLastServerHash = hash
}
}
/**
* Reset all local key backup data.
*
* Note: This method does not update the state
*/
private fun resetKeysBackupData() {
resetBackupAllGroupSessionsListeners()
mCrypto.cryptoStore.keyBackupVersion = null
mCrypto.cryptoStore.keysBackupData = null
mBackupKey = null
// Reset backup markers
mCrypto.cryptoStore.resetBackupMarkers()
}
/**
* Send a chunk of keys to backup
*/
@UiThread
private fun backupKeys() {
Log.d(LOG_TAG, "backupKeys")
// Sanity check, as this method can be called after a delay, the state may have change during the delay
if (!isEnabled || mBackupKey == null || mKeysBackupVersion == null) {
Log.d(LOG_TAG, "backupKeys: Invalid configuration")
backupAllGroupSessionsCallback?.onUnexpectedError(IllegalStateException("Invalid configuration"))
resetBackupAllGroupSessionsListeners()
return
}
if (state === KeysBackupStateManager.KeysBackupState.BackingUp) {
// Do nothing if we are already backing up
Log.d(LOG_TAG, "backupKeys: Invalid state: $state")
return
}
// Get a chunk of keys to backup
val sessions = mCrypto.cryptoStore.inboundGroupSessionsToBackup(KEY_BACKUP_SEND_KEYS_MAX_COUNT)
Log.d(LOG_TAG, "backupKeys: 1 - " + sessions.size + " sessions to back up")
if (sessions.isEmpty()) {
// Backup is up to date
mKeysBackupStateManager.state = KeysBackupStateManager.KeysBackupState.ReadyToBackUp
backupAllGroupSessionsCallback?.onSuccess(null)
resetBackupAllGroupSessionsListeners()
return
}
mKeysBackupStateManager.state = KeysBackupStateManager.KeysBackupState.BackingUp
mCrypto.encryptingThreadHandler.post {
Log.d(LOG_TAG, "backupKeys: 2 - Encrypting keys")
// Gather data to send to the homeserver
// roomId -> sessionId -> MXKeyBackupData
val keysBackupData = KeysBackupData()
keysBackupData.roomIdToRoomKeysBackupData = HashMap()
for (session in sessions) {
val keyBackupData = encryptGroupSession(session)
if (keysBackupData.roomIdToRoomKeysBackupData[session.mRoomId] == null) {
val roomKeysBackupData = RoomKeysBackupData()
roomKeysBackupData.sessionIdToKeyBackupData = HashMap()
keysBackupData.roomIdToRoomKeysBackupData[session.mRoomId] = roomKeysBackupData
}
try {
keysBackupData.roomIdToRoomKeysBackupData[session.mRoomId]!!.sessionIdToKeyBackupData[session.mSession.sessionIdentifier()] = keyBackupData
} catch (e: OlmException) {
Log.e(LOG_TAG, "OlmException", e)
}
}
Log.d(LOG_TAG, "backupKeys: 4 - Sending request")
// Make the request
mRoomKeysRestClient.backupKeys(mKeysBackupVersion!!.version!!, keysBackupData, object : ApiCallback<BackupKeysResult> {
override fun onNetworkError(e: Exception) {
mCrypto.getUIHandler().post {
backupAllGroupSessionsCallback?.onNetworkError(e)
resetBackupAllGroupSessionsListeners()
onError()
}
}
private fun onError() {
Log.e(LOG_TAG, "backupKeys: backupKeys failed.")
// Retry a bit later
mKeysBackupStateManager.state = KeysBackupStateManager.KeysBackupState.ReadyToBackUp
maybeBackupKeys()
}
override fun onMatrixError(e: MatrixError) {
mCrypto.getUIHandler().post {
Log.e(LOG_TAG, "backupKeys: backupKeys failed. Error: " + e.localizedMessage)
when (e.errcode) {
MatrixError.NOT_FOUND,
MatrixError.WRONG_ROOM_KEYS_VERSION -> {
// Backup has been deleted on the server, or we are not using the last backup version
mKeysBackupStateManager.state = KeysBackupStateManager.KeysBackupState.WrongBackUpVersion
backupAllGroupSessionsCallback?.onMatrixError(e)
resetBackupAllGroupSessionsListeners()
resetKeysBackupData()
mKeysBackupVersion = null
// Do not stay in KeysBackupState.WrongBackUpVersion but check what is available on the homeserver
checkAndStartKeysBackup()
}
else -> // Come back to the ready state so that we will retry on the next received key
mKeysBackupStateManager.state = KeysBackupStateManager.KeysBackupState.ReadyToBackUp
}
}
}
override fun onUnexpectedError(e: Exception) {
mCrypto.getUIHandler().post {
backupAllGroupSessionsCallback?.onUnexpectedError(e)
resetBackupAllGroupSessionsListeners()
onError()
}
}
override fun onSuccess(info: BackupKeysResult) {
mCrypto.getUIHandler().post {
Log.d(LOG_TAG, "backupKeys: 5a - Request complete")
// Mark keys as backed up
mCrypto.cryptoStore.markBackupDoneForInboundGroupSessions(sessions)
if (sessions.size < KEY_BACKUP_SEND_KEYS_MAX_COUNT) {
Log.d(LOG_TAG, "backupKeys: All keys have been backed up")
onServerDataRetrieved(info.count, info.hash)
// Note: Changing state will trigger the call to backupAllGroupSessionsCallback.onSuccess()
mKeysBackupStateManager.state = KeysBackupStateManager.KeysBackupState.ReadyToBackUp
} else {
Log.d(LOG_TAG, "backupKeys: Continue to back up keys")
mKeysBackupStateManager.state = KeysBackupStateManager.KeysBackupState.WillBackUp
backupKeys()
}
}
}
})
}
}
@VisibleForTesting
@WorkerThread
fun encryptGroupSession(session: MXOlmInboundGroupSession2): KeyBackupData {
// Gather information for each key
val device = mCrypto.deviceWithIdentityKey(session.mSenderKey, MXCRYPTO_ALGORITHM_MEGOLM)
// Build the m.megolm_backup.v1.curve25519-aes-sha2 data as defined at
// https://github.com/uhoreg/matrix-doc/blob/e2e_backup/proposals/1219-storing-megolm-keys-serverside.md#mmegolm_backupv1curve25519-aes-sha2-key-format
val sessionData = session.exportKeys()
val sessionBackupData = mapOf(
"algorithm" to sessionData!!.algorithm,
"sender_key" to sessionData.senderKey,
"sender_claimed_keys" to sessionData.senderClaimedKeys,
"forwarding_curve25519_key_chain" to (sessionData.forwardingCurve25519KeyChain ?: ArrayList<Any>()),
"session_key" to sessionData.sessionKey)
var encryptedSessionBackupData: OlmPkMessage? = null
try {
encryptedSessionBackupData = mBackupKey?.encrypt(JsonUtility.getGson(false).toJson(sessionBackupData))
} catch (e: OlmException) {
Log.e(LOG_TAG, "OlmException", e)
}
// Build backup data for that key
val keyBackupData = KeyBackupData()
try {
keyBackupData.firstMessageIndex = session.mSession.firstKnownIndex
} catch (e: OlmException) {
Log.e(LOG_TAG, "OlmException", e)
}
keyBackupData.forwardedCount = session.mForwardingCurve25519KeyChain.size
keyBackupData.isVerified = device?.isVerified == true
val data = mapOf(
"ciphertext" to encryptedSessionBackupData!!.mCipherText,
"mac" to encryptedSessionBackupData.mMac,
"ephemeral" to encryptedSessionBackupData.mEphemeralKey)
keyBackupData.sessionData = JsonUtility.getGson(false).toJsonTree(data)
return keyBackupData
}
@VisibleForTesting
@WorkerThread
fun decryptKeyBackupData(keyBackupData: KeyBackupData, sessionId: String, roomId: String, decryption: OlmPkDecryption): MegolmSessionData? {
var sessionBackupData: MegolmSessionData? = null
val jsonObject = keyBackupData.sessionData?.asJsonObject
val ciphertext = jsonObject?.get("ciphertext")?.asString
val mac = jsonObject?.get("mac")?.asString
val ephemeralKey = jsonObject?.get("ephemeral")?.asString
if (ciphertext != null && mac != null && ephemeralKey != null) {
val encrypted = OlmPkMessage()
encrypted.mCipherText = ciphertext
encrypted.mMac = mac
encrypted.mEphemeralKey = ephemeralKey
try {
val decrypted = decryption.decrypt(encrypted)
sessionBackupData = JsonUtility.toClass(decrypted, MegolmSessionData::class.java)
} catch (e: OlmException) {
Log.e(LOG_TAG, "OlmException", e)
}
if (sessionBackupData != null) {
sessionBackupData.sessionId = sessionId
sessionBackupData.roomId = roomId
}
}
return sessionBackupData
}
fun getRoomKeysRestClient(): RoomKeysRestClient {
return mRoomKeysRestClient
}
companion object {
private val LOG_TAG = KeysBackup::class.java.simpleName
// Maximum delay in ms in {@link maybeBackupKeys}
private const val KEY_BACKUP_WAITING_TIME_TO_SEND_KEY_BACKUP_MILLIS = 10000
// Maximum number of keys to send at a time to the homeserver.
private const val KEY_BACKUP_SEND_KEYS_MAX_COUNT = 100
}
/* ==========================================================================================
* DEBUG INFO
* ========================================================================================== */
override fun toString() = "KeysBackup for $mCrypto"
}
<|start_filename|>matrix-sdk/src/main/java/org/matrix/androidsdk/rest/model/pid/ThirdPartyProtocol.java<|end_filename|>
/*
* Copyright 2017 Vector Creations Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.rest.model.pid;
import java.util.List;
import java.util.Map;
/**
* This class describes the third party server protocols.
*/
public class ThirdPartyProtocol {
// the user fields (domain, nick, username...)
public List<String> userFields;
// the location fields (domain, channels, room...)
public List<String> locationFields;
// the field types
public Map<String, Map<String, String>> fieldTypes;
// the protocol instance
public List<ThirdPartyProtocolInstance> instances;
}
<|start_filename|>matrix-sdk/src/main/java/org/matrix/androidsdk/core/PermalinkUtils.java<|end_filename|>
/*
* Copyright 2018 New Vector Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.core;
import android.net.Uri;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import android.text.TextUtils;
import org.matrix.androidsdk.rest.model.Event;
import java.net.URLDecoder;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* Useful methods to deals with Matrix permalink
*/
public class PermalinkUtils {
private static final String LOG_TAG = PermalinkUtils.class.getSimpleName();
private static final String MATRIX_TO_URL_BASE = "https://matrix.to/#/";
// index of each items in the map when parsing a universal link
public static final String ULINK_ROOM_ID_OR_ALIAS_KEY = "ULINK_ROOM_ID_OR_ALIAS_KEY";
public static final String ULINK_MATRIX_USER_ID_KEY = "ULINK_MATRIX_USER_ID_KEY";
public static final String ULINK_GROUP_ID_KEY = "ULINK_GROUP_ID_KEY";
public static final String ULINK_EVENT_ID_KEY = "ULINK_EVENT_ID_KEY";
/**
* Creates a permalink for an event.
* Ex: "https://matrix.to/#/!nbzmcXAqpxBXjAdgoX:matrix.org/$1531497316352799BevdV:matrix.org"
*
* @param event the event
* @return the permalink, or null in case of error
*/
@Nullable
public static String createPermalink(Event event) {
if (event == null) {
return null;
}
return createPermalink(event.roomId, event.eventId);
}
/**
* Creates a permalink for an id (can be a user Id, Room Id, etc.).
* Ex: "https://matrix.to/#/@benoit:matrix.org"
*
* @param id the id
* @return the permalink, or null in case of error
*/
@Nullable
public static String createPermalink(String id) {
if (TextUtils.isEmpty(id)) {
return null;
}
return MATRIX_TO_URL_BASE + escape(id);
}
/**
* Creates a permalink for an event. If you have an event you can use {@link #createPermalink(Event)}
* Ex: "https://matrix.to/#/!nbzmcXAqpxBXjAdgoX:matrix.org/$1531497316352799BevdV:matrix.org"
*
* @param roomId the id of the room
* @param eventId the id of the event
* @return the permalink
*/
@NonNull
public static String createPermalink(@NonNull String roomId, @NonNull String eventId) {
return MATRIX_TO_URL_BASE + escape(roomId) + "/" + escape(eventId);
}
/**
* Extract the linked id from the universal link
*
* @param url the universal link, Ex: "https://matrix.to/#/@benoit:matrix.org"
* @return the id from the url, ex: "@benoit:matrix.org", or null if the url is not a permalink
*/
public static String getLinkedId(@Nullable String url) {
boolean isSupported = url != null && url.startsWith(MATRIX_TO_URL_BASE);
if (isSupported) {
return url.substring(MATRIX_TO_URL_BASE.length());
}
return null;
}
/**
* Escape '/' in id, because it is used as a separator
* Escape '+' in room v3
*
* @param id the id to escape
* @return the escaped id
*/
private static String escape(String id) {
return id.replaceAll("/", "%2F").replaceAll("\\+", "%2B");
}
/***
* Tries to parse an universal link.
*
* @param uri the uri to parse
* @param supportedHosts list of supported hosts, not including "matrix.to"
* @param supportedPaths list of supported paths, when the host is in supportedHosts
* @return the universal link items, or null if the universal link is invalid
*/
@Nullable
public static Map<String, String> parseUniversalLink(@Nullable Uri uri,
@NonNull List<String> supportedHosts,
@NonNull List<String> supportedPaths) {
Map<String, String> map = null;
try {
// sanity check
if (uri == null || TextUtils.isEmpty(uri.getPath())) {
Log.e(LOG_TAG, "## parseUniversalLink : null");
return null;
}
if (!supportedHosts.contains(uri.getHost()) && !TextUtils.equals(uri.getHost(), "matrix.to")) {
Log.e(LOG_TAG, "## parseUniversalLink : unsupported host " + uri.getHost());
return null;
}
boolean isSupportedHost = supportedHosts.contains(uri.getHost());
// when the uri host is in supportedHosts (and is not "matrix.to"), it is followed by a dedicated path
if (isSupportedHost && !supportedPaths.contains(uri.getPath())) {
Log.e(LOG_TAG, "## parseUniversalLink : not supported");
return null;
}
// remove the server part
String uriFragment;
if ((uriFragment = uri.getEncodedFragment()) != null) {
uriFragment = uriFragment.substring(1); // get rid of first "/"
} else {
Log.e(LOG_TAG, "## parseUniversalLink : cannot extract path");
return null;
}
String temp[] = uriFragment.split("/", 3); // limit to 3 for security concerns (stack overflow injection)
for (int i = 0; i < temp.length; i++) {
temp[i] = URLDecoder.decode(temp[i], "UTF-8");
}
if (!isSupportedHost) {
List<String> compliantList = new ArrayList<>(Arrays.asList(temp));
compliantList.add(0, "room");
temp = compliantList.toArray(new String[compliantList.size()]);
}
if (temp.length < 2) {
Log.e(LOG_TAG, "## parseUniversalLink : too short");
return null;
}
if (!TextUtils.equals(temp[0], "room") && !TextUtils.equals(temp[0], "user")) {
Log.e(LOG_TAG, "## parseUniversalLink : not supported " + temp[0]);
return null;
}
map = new HashMap<>();
String firstParam = temp[1];
if (MXPatterns.isUserId(firstParam)) {
if (temp.length > 2) {
Log.e(LOG_TAG, "## parseUniversalLink : universal link to member id is too long");
return null;
}
map.put(ULINK_MATRIX_USER_ID_KEY, firstParam);
} else if (MXPatterns.isRoomAlias(firstParam) || MXPatterns.isRoomId(firstParam)) {
map.put(ULINK_ROOM_ID_OR_ALIAS_KEY, firstParam);
} else if (MXPatterns.isGroupId(firstParam)) {
map.put(ULINK_GROUP_ID_KEY, firstParam);
}
// room id only ?
if (temp.length > 2) {
String eventId = temp[2];
if (MXPatterns.isEventId(eventId)) {
map.put(ULINK_EVENT_ID_KEY, temp[2]);
} else {
uri = Uri.parse(uri.toString().replace("#/room/", "room/"));
map.put(ULINK_ROOM_ID_OR_ALIAS_KEY, uri.getLastPathSegment());
Set<String> names = uri.getQueryParameterNames();
for (String name : names) {
String value = uri.getQueryParameter(name);
try {
value = URLDecoder.decode(value, "UTF-8");
} catch (Exception e) {
Log.e(LOG_TAG, "## parseUniversalLink : URLDecoder.decode " + e.getMessage(), e);
return null;
}
map.put(name, value);
}
}
}
} catch (Exception e) {
Log.e(LOG_TAG, "## parseUniversalLink : crashes " + e.getLocalizedMessage(), e);
}
// check if the parsing succeeds
if (map != null && map.isEmpty()) {
Log.e(LOG_TAG, "## parseUniversalLink : empty dictionary");
return null;
}
return map;
}
}
<|start_filename|>matrix-sdk/src/main/java/org/matrix/androidsdk/data/RoomTag.java<|end_filename|>
/*
* Copyright 2015 OpenMarket Ltd
* Copyright 2017 Vector Creations Ltd
* Copyright 2018 New Vector Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.data;
import org.matrix.androidsdk.core.JsonUtils;
import org.matrix.androidsdk.core.Log;
import org.matrix.androidsdk.rest.model.Event;
import org.matrix.androidsdk.rest.model.RoomTags;
import java.util.HashMap;
import java.util.Map;
/**
* Class representing a room tag.
*/
public class RoomTag implements java.io.Serializable {
private static final long serialVersionUID = 5172602958896551204L;
private static final String LOG_TAG = RoomTag.class.getSimpleName();
//
public static final String ROOM_TAG_FAVOURITE = "m.favourite";
public static final String ROOM_TAG_LOW_PRIORITY = "m.lowpriority";
public static final String ROOM_TAG_NO_TAG = "m.recent";
public static final String ROOM_TAG_SERVER_NOTICE = "m.server_notice";
/**
* The name of a tag.
*/
public String mName;
/**
* Try to parse order as Double.
* Provides nil if the items cannot be parsed.
*/
public Double mOrder;
/**
* RoomTag creator.
*
* @param aName the tag name.
* @param anOrder the tag order
*/
public RoomTag(String aName, Double anOrder) {
mName = aName;
mOrder = anOrder;
}
/**
* Extract a list of tags from a room tag event.
*
* @param event a room tag event (which can contains several tags)
* @return a dictionary containing the tags the user defined for one room.
*/
public static Map<String, RoomTag> roomTagsWithTagEvent(Event event) {
Map<String, RoomTag> tags = new HashMap<>();
try {
RoomTags roomtags = JsonUtils.toRoomTags(event.getContent());
if ((null != roomtags.tags) && (0 != roomtags.tags.size())) {
for (String tagName : roomtags.tags.keySet()) {
Map<String, Double> params = roomtags.tags.get(tagName);
if (params != null) {
tags.put(tagName, new RoomTag(tagName, params.get("order")));
} else {
tags.put(tagName, new RoomTag(tagName, null));
}
}
}
} catch (Exception e) {
Log.e(LOG_TAG, "roomTagsWithTagEvent fails " + e.getMessage(), e);
}
return tags;
}
}
<|start_filename|>matrix-sdk/src/main/java/org/matrix/androidsdk/rest/client/TermsRestClient.kt<|end_filename|>
/*
* Copyright 2019 New Vector Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.rest.client
import android.net.Uri
import org.matrix.androidsdk.HomeServerConnectionConfig
import org.matrix.androidsdk.RestClient
import org.matrix.androidsdk.core.callback.ApiCallback
import org.matrix.androidsdk.rest.api.TermsApi
import org.matrix.androidsdk.rest.callback.RestAdapterCallback
import org.matrix.androidsdk.rest.model.terms.AcceptTermsBody
import org.matrix.androidsdk.rest.model.terms.TermsResponse
internal class TermsRestClient :
RestClient<TermsApi>(HomeServerConnectionConfig.Builder()
.withHomeServerUri(Uri.parse("https://foo.bar"))
.build(),
TermsApi::class.java, "") {
fun get(prefix: String, callback: ApiCallback<TermsResponse>) {
mApi.getTerms("${prefix}terms")
.enqueue(RestAdapterCallback("getTerms", null, callback, null))
}
fun agreeToTerms(prefix: String, agreedUrls: List<String>, callback: ApiCallback<Unit>) {
mApi.agreeToTerms("${prefix}terms", AcceptTermsBody(agreedUrls))
.enqueue(RestAdapterCallback("agreeToTerms", null, callback, null))
}
}
<|start_filename|>matrix-sdk/src/androidTest/java/org/matrix/androidsdk/login/AutoDiscoveryTest.kt<|end_filename|>
/*
* Copyright 2019 New Vector Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.login
import android.os.Looper
import org.junit.Assert.*
import org.junit.FixMethodOrder
import org.junit.Test
import org.junit.runners.MethodSorters
import org.matrix.androidsdk.RestClientHttpClientFactory
import org.matrix.androidsdk.RestHttpClientFactoryProvider
import org.matrix.androidsdk.common.CommonTestHelper
import org.matrix.androidsdk.common.MockOkHttpInterceptor
import org.matrix.androidsdk.common.TestApiCallback
import java.util.concurrent.CountDownLatch
@FixMethodOrder(MethodSorters.JVM)
class AutoDiscoveryTest {
companion object {
const val WELL_KNOWN_PATH = ".well-known/matrix/client"
const val MX_CLIENT_VERSION_PATH = "_matrix/client/versions"
const val MX_ID_PATH = "_matrix/identity/api/v1"
}
private val mTestHelper = CommonTestHelper()
//If the returned status code is 404, then IGNORE.
@Test
fun testAutoDiscoveryNotFound() {
val discovery = arrangeAndAct(
listOf(
MockOkHttpInterceptor.SimpleRule(WELL_KNOWN_PATH, 404)
)
)
//Assert
assertFail(discovery, AutoDiscovery.Action.IGNORE)
}
//If the returned status code is not 200 then FAIL_PROMPT.
@Test
fun testAutoDiscoveryNotOK() {
val discovery = arrangeAndAct(
listOf(
MockOkHttpInterceptor.SimpleRule(WELL_KNOWN_PATH, 500)
)
)
//Assert
assertFail(discovery, AutoDiscovery.Action.FAIL_PROMPT)
}
// If the response body is empty then FAIL_PROMPT.
@Test
fun testAutoDiscoveryEmptyBody() {
val discovery = arrangeAndAct(
listOf(
MockOkHttpInterceptor.SimpleRule(WELL_KNOWN_PATH, 200, "")
)
)
//Assert
assertFail(discovery, AutoDiscovery.Action.FAIL_PROMPT)
}
// If the content cannot be parsed, then FAIL_PROMPT.
@Test
fun testAutoDiscoveryNotJSON() {
//Arrange
val mockBody = "<html><h1>Hello world!</h1></html>"
val discovery = arrangeAndAct(
listOf(
MockOkHttpInterceptor.SimpleRule(WELL_KNOWN_PATH, 200, mockBody)
)
)
//Assert
assertFail(discovery, AutoDiscovery.Action.FAIL_PROMPT)
}
//If m.homeserver value is not provided, then FAIL_PROMPT.
@Test
fun testAutoDiscoveryMissingHS() {
//Arrange
val mockBody = """
{
"m.homesorv4r" : {}
}
"""
val discovery = arrangeAndAct(
listOf(
MockOkHttpInterceptor.SimpleRule(WELL_KNOWN_PATH, 200, mockBody)
)
)
//Assert
assertFail(discovery, AutoDiscovery.Action.FAIL_PROMPT)
}
// if base_url from m.homeserver is not provided, then FAIL_PROMPT.
@Test
fun testAutoDiscoveryMissingHSBaseURl() {
//Arrange
val mockBody = "{\"m.homeserver\" : {}}"
val discovery = arrangeAndAct(
listOf(
MockOkHttpInterceptor.SimpleRule(WELL_KNOWN_PATH, 200, mockBody)
)
)
//Assert
assertFail(discovery, AutoDiscovery.Action.FAIL_PROMPT)
}
// if base_url from m.homeserver is not an URL, then FAIL_ERROR.
@Test
fun testAutoDiscoveryHSBaseURlInvalid() {
//Arrange
val invalidURL = "foo\$[level]/r\$[y]"
val mockBody = """
{
"m.homeserver" : {
"base_url" : "$invalidURL"
}
}
"""
val discovery = arrangeAndAct(
listOf(
MockOkHttpInterceptor.SimpleRule(WELL_KNOWN_PATH, 200, mockBody)
)
)
//Assert
assertFail(discovery, AutoDiscovery.Action.FAIL_ERROR)
}
// if base_url from m.homeserver is not a valid Home Server, then FAIL_ERROR.
@Test
fun testAutoDiscoveryNotValideHSURL() {
//Arrange
val baseURl = "https://myhs.org"
val mockBody = """
{
"m.homeserver" : {
"base_url" : "$baseURl"
}
}
"""
val discovery = arrangeAndAct(
listOf(
MockOkHttpInterceptor.SimpleRule(WELL_KNOWN_PATH, 200, mockBody),
MockOkHttpInterceptor.SimpleRule(baseURl, 404)
))
//Assert
assertFail(discovery, AutoDiscovery.Action.FAIL_ERROR)
}
@Test
fun testAutoDiscoveryHomeServerSuccess() {
//Arrange
val baseURl = "https://myhs.org"
val mockBody = """
{
"m.homeserver" : {
"base_url" : "$baseURl"
}
}
"""
val hsVersionResponse = """
{
"versions": ["r0.4.0"],
"unstable_features": { "m.lazy_load_members": true}
}
"""
val discovery = arrangeAndAct(
listOf(
MockOkHttpInterceptor.SimpleRule(WELL_KNOWN_PATH, 200, mockBody),
MockOkHttpInterceptor.SimpleRule("$baseURl/$MX_CLIENT_VERSION_PATH", 200, hsVersionResponse)
))
//Assert
assertEquals(AutoDiscovery.Action.PROMPT, discovery.action)
assertNotNull(discovery.wellKnown?.homeServer)
assertEquals(baseURl, discovery.wellKnown!!.homeServer?.baseURL)
assertNull(discovery.wellKnown?.identityServer)
}
@Test
fun testAutoDiscoveryInvalidIdServerMissingBaseURl() {
//Arrange
val baseURl = "https://myhs.org"
val mockBody = """
{
"m.homeserver" : {
"base_url" : "$baseURl"
},
"m.identity_server" : {}
}
"""
val hsVersionResponse = """
{
"versions": ["r0.4.0"],
"unstable_features": { "m.lazy_load_members": true}
}
"""
val discovery = arrangeAndAct(
listOf(
MockOkHttpInterceptor.SimpleRule(WELL_KNOWN_PATH, 200, mockBody),
MockOkHttpInterceptor.SimpleRule("$baseURl/$MX_CLIENT_VERSION_PATH", 200, hsVersionResponse)
))
//Assert
assertFail(discovery, AutoDiscovery.Action.FAIL_ERROR)
}
@Test
fun testAutoDiscoveryInvalidIdServerInvalidBaseURl() {
//Arrange
val baseURl = "https://myhs.org"
val idServerBaseURL = ""
val mockBody = """
{
"m.homeserver" : {
"base_url" : "$baseURl"
},
"m.identity_server" : {
"base_url" : "$idServerBaseURL"
}
}
"""
val hsVersionResponse = """
{
"versions": ["r0.4.0"],
"unstable_features": { "m.lazy_load_members": true}
}
"""
val discovery = arrangeAndAct(
listOf(
MockOkHttpInterceptor.SimpleRule(WELL_KNOWN_PATH, 200, mockBody),
MockOkHttpInterceptor.SimpleRule("$baseURl/$MX_CLIENT_VERSION_PATH", 200, hsVersionResponse)
))
//Assert
assertFail(discovery, AutoDiscovery.Action.FAIL_ERROR)
}
@Test
fun testAutoDiscoveryInvalidIdServer() {
//Arrange
val baseURl = "https://myhs.org"
val idServerBaseURL = "https://myhs.org"
val mockBody = """
{
"m.homeserver" : {
"base_url" : "$baseURl"
},
"m.identity_server" : {
"base_url" : "$idServerBaseURL"
}
}
"""
val hsVersionResponse = """
{
"versions": ["r0.4.0"],
"unstable_features": { "m.lazy_load_members": true}
}
"""
val discovery = arrangeAndAct(
listOf(
MockOkHttpInterceptor.SimpleRule(WELL_KNOWN_PATH, 200, mockBody),
MockOkHttpInterceptor.SimpleRule("$baseURl/$MX_CLIENT_VERSION_PATH", 200, hsVersionResponse)
))
//Assert
assertFail(discovery, AutoDiscovery.Action.FAIL_ERROR)
}
@Test
fun testAutoDiscoverySuccessful() {
//Arrange
val baseURl = "https://myhs.org"
val idServerBaseURL = "https://boom.org"
val mockBody = """
{
"m.homeserver" : {
"base_url" : "$baseURl"
},
"m.identity_server" : {
"base_url" : "$idServerBaseURL"
}
}
"""
val hsVersionResponse = """
{
"versions": ["r0.4.0"],
"unstable_features": { "m.lazy_load_members": true}
}
"""
val idServerResponse = "{}"
val discovery = arrangeAndAct(
listOf(
MockOkHttpInterceptor.SimpleRule(WELL_KNOWN_PATH, 200, mockBody),
MockOkHttpInterceptor.SimpleRule("$baseURl/$MX_CLIENT_VERSION_PATH", 200, hsVersionResponse),
MockOkHttpInterceptor.SimpleRule("$idServerBaseURL/$MX_ID_PATH", 200, idServerResponse)
))
//Assert
assertEquals(AutoDiscovery.Action.PROMPT, discovery.action)
assertNotNull(discovery.wellKnown?.homeServer)
assertNotNull(discovery.wellKnown?.identityServer)
assertEquals(baseURl, discovery.wellKnown?.homeServer?.baseURL)
assertEquals(idServerBaseURL, discovery.wellKnown?.identityServer?.baseURL)
}
/* ==========================================================================================
* Private
* ========================================================================================== */
private fun arrangeAndAct(rules: List<MockOkHttpInterceptor.Rule>)
: AutoDiscovery.DiscoveredClientConfig {
//Arrange
val mockInterceptor = MockOkHttpInterceptor()
rules.forEach {
mockInterceptor.addRule(it)
}
RestHttpClientFactoryProvider.defaultProvider = RestClientHttpClientFactory(mockInterceptor)
val ad = AutoDiscovery()
//Act
var callbackThread: Thread? = null
val lock = CountDownLatch(1)
var discovery: AutoDiscovery.DiscoveredClientConfig? = null
ad.findClientConfig("matrix.org", object : TestApiCallback<AutoDiscovery.DiscoveredClientConfig>(lock) {
override fun onSuccess(info: AutoDiscovery.DiscoveredClientConfig) {
discovery = info
callbackThread = Thread.currentThread()
super.onSuccess(info)
}
})
mTestHelper.await(lock)
//Assert
assertNotNull(discovery)
assertEquals("Callback should be in main thread", Looper.getMainLooper().thread, callbackThread)
return discovery!!
}
private fun assertFail(discovery: AutoDiscovery.DiscoveredClientConfig, expectedAction: AutoDiscovery.Action) {
assertEquals(expectedAction, discovery.action)
assertNull(discovery.wellKnown?.homeServer)
assertNull(discovery.wellKnown?.identityServer)
}
}
<|start_filename|>matrix-sdk/src/main/java/org/matrix/androidsdk/rest/client/PushRulesRestClient.java<|end_filename|>
/*
* Copyright 2014 OpenMarket Ltd
* Copyright 2018 New Vector Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.rest.client;
import org.matrix.androidsdk.HomeServerConnectionConfig;
import org.matrix.androidsdk.RestClient;
import org.matrix.androidsdk.core.JsonUtils;
import org.matrix.androidsdk.core.callback.ApiCallback;
import org.matrix.androidsdk.rest.api.PushRulesApi;
import org.matrix.androidsdk.rest.callback.RestAdapterCallback;
import org.matrix.androidsdk.rest.model.bingrules.BingRule;
import org.matrix.androidsdk.rest.model.bingrules.PushRulesResponse;
public class PushRulesRestClient extends RestClient<PushRulesApi> {
/**
* {@inheritDoc}
*/
public PushRulesRestClient(HomeServerConnectionConfig hsConfig) {
super(hsConfig, PushRulesApi.class, RestClient.URI_API_PREFIX_PATH_R0, JsonUtils.getGson(false));
}
/**
* Retrieve the push rules list.
*
* @param callback the asynchronous callback.
*/
public void getAllRules(final ApiCallback<PushRulesResponse> callback) {
mApi.getAllRules()
.enqueue(new RestAdapterCallback<>("getAllRules", null, callback, null));
}
/**
* Update the rule enable status.
*
* @param Kind the rule kind
* @param ruleId the rule id
* @param status the rule state
* @param callback the asynchronous callback.
*/
public void updateEnableRuleStatus(String Kind, String ruleId, boolean status, final ApiCallback<Void> callback) {
mApi.updateEnableRuleStatus(Kind, ruleId, status)
.enqueue(new RestAdapterCallback<>("updateEnableRuleStatus", null, callback, null));
}
/**
* Update the rule actions lists.
*
* @param Kind the rule kind
* @param ruleId the rule id
* @param actions the rule actions list
* @param callback the asynchronous callback
*/
public void updateRuleActions(String Kind, String ruleId, Object actions, final ApiCallback<Void> callback) {
mApi.updateRuleActions(Kind, ruleId, actions)
.enqueue(new RestAdapterCallback<>("updateRuleActions", null, callback, null));
}
/**
* Delete a rule.
*
* @param Kind the rule kind
* @param ruleId the rule id
* @param callback the asynchronous callback
*/
public void deleteRule(String Kind, String ruleId, final ApiCallback<Void> callback) {
mApi.deleteRule(Kind, ruleId)
.enqueue(new RestAdapterCallback<>("deleteRule", null, callback, null));
}
/**
* Add a rule.
*
* @param rule the rule
* @param callback the asynchronous callback
*/
public void addRule(BingRule rule, final ApiCallback<Void> callback) {
mApi.addRule(rule.kind, rule.ruleId, rule.toJsonElement())
.enqueue(new RestAdapterCallback<>("addRule", null, callback, null));
}
}
<|start_filename|>matrix-sdk/src/main/java/org/matrix/androidsdk/call/IMXCall.java<|end_filename|>
/*
* Copyright 2015 OpenMarket Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.call;
import android.view.View;
import com.google.gson.JsonObject;
import org.matrix.androidsdk.MXSession;
import org.matrix.androidsdk.data.Room;
import org.matrix.androidsdk.rest.model.Event;
import org.webrtc.PeerConnection;
import java.util.List;
import javax.annotation.Nullable;
/**
* Audio/video call interface.
* See {@link MXWebRtcCall}.
*/
public interface IMXCall {
// call ending use cases (see {@link #dispatchOnCallEnd}):
int END_CALL_REASON_UNDEFINED = -1;
/**
* the callee has rejected the incoming call
**/
int END_CALL_REASON_PEER_HANG_UP = 0;
/**
* the callee has rejected the incoming call from another device
**/
int END_CALL_REASON_PEER_HANG_UP_ELSEWHERE = 1;
/**
* call ended by the local user himself
**/
int END_CALL_REASON_USER_HIMSELF = 2;
// call state events
// the call is an empty shell nothing has been initialized
String CALL_STATE_CREATED = "IMXCall.CALL_STATE_CREATED";
// the call view is creating and being inserting.
String CALL_STATE_CREATING_CALL_VIEW = "IMXCall.CALL_STATE_CREATING_CALL_VIEW";
// the call view is managed.
// the call can start from now.
String CALL_STATE_READY = "IMXCall.CALL_STATE_READY";
// incoming/outgoing calls : initializing the local audio / video
String CALL_STATE_WAIT_LOCAL_MEDIA = "IMXCall.CALL_STATE_WAIT_LOCAL_MEDIA";
// incoming calls : the local media is retrieved
String CALL_STATE_WAIT_CREATE_OFFER = "IMXCall.CALL_STATE_WAIT_CREATE_OFFER";
// outgoing calls : the call invitation is sent
String CALL_STATE_INVITE_SENT = "IMXCall.CALL_STATE_INVITE_SENT";
// the device is ringing
// incoming calls : after applying the incoming params
// outgoing calls : after getting the m.call.invite echo
String CALL_STATE_RINGING = "IMXCall.CALL_STATE_RINGING";
// incoming calls : create the call answer
String CALL_STATE_CREATE_ANSWER = "IMXCall.CALL_STATE_CREATE_ANSWER";
// the call is connecting
String CALL_STATE_CONNECTING = "IMXCall.CALL_STATE_CONNECTING";
// the call is in progress
String CALL_STATE_CONNECTED = "IMXCall.CALL_STATE_CONNECTED";
// call is ended
String CALL_STATE_ENDED = "IMXCall.CALL_STATE_ENDED";
// error codes
// cannot initialize the camera
String CALL_ERROR_CAMERA_INIT_FAILED = "IMXCall.CALL_ERROR_CAMERA_INIT_FAILED";
// cannot initialize the call.
String CALL_ERROR_CALL_INIT_FAILED = "IMXCall.CALL_ERROR_CALL_INIT_FAILED";
// ICE error
String CALL_ERROR_ICE_FAILED = "IMXCall.CALL_ERROR_ICE_FAILED";
// the user did not respond to the call.
String CALL_ERROR_USER_NOT_RESPONDING = "IMXCall.CALL_ERROR_USER_NOT_RESPONDING";
// creator
/**
* Create the callview
*/
void createCallView();
/**
* The activity is paused.
*/
void onPause();
/**
* The activity is resumed.
*/
void onResume();
// actions (must be done after dispatchOnViewReady()
/**
* Start a call.
*
* @param aLocalVideoPosition position of the local video attendee
*/
void placeCall(VideoLayoutConfiguration aLocalVideoPosition);
/**
* Prepare a call reception.
*
* @param aCallInviteParams the invitation Event content
* @param aCallId the call ID
* @param aLocalVideoPosition position of the local video attendee
*/
void prepareIncomingCall(JsonObject aCallInviteParams, String aCallId, VideoLayoutConfiguration aLocalVideoPosition);
/**
* The call has been detected as an incoming one.
* The application launched the dedicated activity and expects to launch the incoming call.
*
* @param aLocalVideoPosition position of the local video attendee
*/
void launchIncomingCall(VideoLayoutConfiguration aLocalVideoPosition);
/**
* The video will be displayed according to the values set in aConfigurationToApply.
*
* @param aConfigurationToApply the new position to be applied
*/
void updateLocalVideoRendererPosition(VideoLayoutConfiguration aConfigurationToApply);
// events thread
/**
* Manage the call events.
*
* @param event the call event.
*/
void handleCallEvent(Event event);
// user actions
/**
* The call is accepted.
*/
void answer();
/**
* The call has been has answered on another device.
*/
void onAnsweredElsewhere();
/**
* The call is hung up.
*
* @param reason the reason, or null for no reason. Reasons are used to indicate errors in the current VoIP implementation.
*/
void hangup(@Nullable String reason);
/**
* Add a listener to the call manager.
*
* @param callListener the call listener
*/
void addListener(IMXCallListener callListener);
/**
* Remove a listener from the call manager.
*
* @param callListener the call listener
*/
void removeListener(IMXCallListener callListener);
// getters / setters
/**
* @return the callId
*/
String getCallId();
/**
* Set the callId
*
* @param callId the call id
*/
void setCallId(String callId);
/**
* @return the linked room
*/
Room getRoom();
/**
* Set the linked rooms (conference call)
*
* @param room the room
* @param callSignalingRoom the call signaling room.
*/
void setRooms(Room room, Room callSignalingRoom);
/**
* @return the call signaling room
*/
Room getCallSignalingRoom();
/**
* @return the session
*/
MXSession getSession();
/**
* @return true if the call is an incoming call.
*/
boolean isIncoming();
/**
* Set the call type: video or voice
*
* @param isVideo true for video call, false for VoIP
*/
void setIsVideo(boolean isVideo);
/**
* @return true if the call is a video call.
*/
boolean isVideo();
/**
* Defines the call conference status
*
* @param isConference the conference status
*/
void setIsConference(boolean isConference);
/**
* @return true if the call is a conference call.
*/
boolean isConference();
/**
* @return the callstate (must be a CALL_STATE_XX value)
*/
String getCallState();
/**
* @return the callView
*/
View getCallView();
/**
* @return the callView visibility
*/
int getVisibility();
/**
* Set the callview visibility
*
* @param visibility true to make the callview visible
* @return true if the operation succeeds
*/
boolean setVisibility(int visibility);
/**
* @return the call start time in ms since epoch, -1 if not defined.
*/
long getCallStartTime();
/**
* @return the call elapsed time in seconds, -1 if not defined.
*/
long getCallElapsedTime();
/**
* Switch between device cameras. The transmitted stream is modified
* according to the new camera in use.
* If the camera used in the video call is the front one, calling
* switchRearFrontCamera(), will make the rear one to be used, and vice versa.
* If only one camera is available, nothing is done.
*
* @return true if the switch succeed, false otherwise.
*/
boolean switchRearFrontCamera();
/**
* Indicate if a camera switch was performed or not.
* For some reason switching the camera from front to rear and
* vice versa, could not be performed (ie. only one camera is available).
* <p>
* <br>See {@link #switchRearFrontCamera()}.
*
* @return true if camera was switched, false otherwise
*/
boolean isCameraSwitched();
/**
* Indicate if the device supports camera switching.
* <p>See {@link #switchRearFrontCamera()}.
*
* @return true if switch camera is supported, false otherwise
*/
boolean isSwitchCameraSupported();
/**
* Mute/Unmute the recording of the local video attendee. Set isVideoMuted
* to true to enable the recording of the video, if set to false no recording
* is performed.
*
* @param isVideoMuted true to mute the video recording, false to unmute
*/
void muteVideoRecording(boolean isVideoMuted);
/**
* Return the recording mute status of the local video attendee.
* <p>
* <br>See {@link #muteVideoRecording(boolean)}.
*
* @return true if video recording is muted, false otherwise
*/
boolean isVideoRecordingMuted();
List<PeerConnection.IceServer> getIceServers();
}
<|start_filename|>matrix-sdk/src/main/java/org/matrix/androidsdk/data/comparator/Comparators.java<|end_filename|>
/*
* Copyright 2018 New Vector Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.data.comparator;
import org.matrix.androidsdk.core.interfaces.DatedObject;
import java.util.Comparator;
public class Comparators {
/**
* Comparator to sort DatedObjects from the oldest to the latest.
*/
public static final Comparator<DatedObject> ascComparator = new Comparator<DatedObject>() {
@Override
public int compare(DatedObject datedObject1, DatedObject datedObject2) {
return (int) (datedObject1.getDate() - datedObject2.getDate());
}
};
/**
* Comparator to sort DatedObjects from the latest to the oldest.
*/
public static final Comparator<DatedObject> descComparator = new Comparator<DatedObject>() {
@Override
public int compare(DatedObject datedObject1, DatedObject datedObject2) {
return (int) (datedObject2.getDate() - datedObject1.getDate());
}
};
}
<|start_filename|>matrix-sdk/src/androidTest/java/org/matrix/androidsdk/lazyloading/RoomNameTest.java<|end_filename|>
/*
* Copyright 2018 New Vector Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.lazyloading;
import androidx.test.InstrumentationRegistry;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.FixMethodOrder;
import org.junit.Test;
import org.junit.runners.MethodSorters;
import org.matrix.androidsdk.MXSession;
import org.matrix.androidsdk.common.CommonTestHelper;
import java.util.Arrays;
import java.util.List;
@FixMethodOrder(MethodSorters.JVM)
public class RoomNameTest {
private CommonTestHelper mTestHelper = new CommonTestHelper();
private RoomNameTestHelper mRoomNameTestHelper = new RoomNameTestHelper(mTestHelper);
private List<Integer> userQuantities = Arrays.asList(1, 2, 3, 10);
@BeforeClass
public static void init() {
MXSession.initUserAgent(InstrumentationRegistry.getContext(), null);
}
@Test
public void RoomName_noName_ShouldLoadAllMembers() throws Exception {
RoomState_noName(false);
}
@Test
public void RoomName_noName_LazyLoading() throws Exception {
RoomState_noName(true);
}
private void RoomState_noName(final boolean withLazyLoading) throws Exception {
for (int qty : userQuantities) {
RoomNameScenarioData data = mRoomNameTestHelper.createScenario(qty, null, withLazyLoading);
checkAllName(data, null);
mRoomNameTestHelper.clearAllSessions(data);
}
}
@Test
public void RoomName_name_ShouldLoadAllMembers() throws Exception {
RoomState_name(false);
}
@Test
public void RoomName_name_LazyLoading() throws Exception {
RoomState_name(true);
}
private void RoomState_name(final boolean withLazyLoading) throws Exception {
for (int qty : userQuantities) {
RoomNameScenarioData data = mRoomNameTestHelper.createScenario(qty, "Room name " + qty, withLazyLoading);
checkAllName(data, "Room name " + qty);
mRoomNameTestHelper.clearAllSessions(data);
}
}
/* ==========================================================================================
* PRIVATE
* ========================================================================================== */
private void checkAllName(RoomNameScenarioData roomNameScenarioData, String expectedName) {
for (MXSession session : roomNameScenarioData.userSessions) {
Assert.assertEquals(expectedName, session.getDataHandler().getRoom(roomNameScenarioData.roomId).getState().name);
}
}
}
<|start_filename|>matrix-sdk-crypto/src/main/java/org/matrix/androidsdk/crypto/data/MXOlmInboundGroupSession2.java<|end_filename|>
/*
* Copyright 2016 OpenMarket Ltd
* Copyright 2018 New Vector Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.crypto.data;
import androidx.annotation.Nullable;
import android.text.TextUtils;
import org.matrix.androidsdk.core.Log;
import org.matrix.androidsdk.crypto.CryptoConstantsKt;
import org.matrix.androidsdk.crypto.MegolmSessionData;
import org.matrix.olm.OlmInboundGroupSession;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
/**
* This class adds more context to a OLMInboundGroupSession object.
* This allows additional checks. The class implements NSCoding so that the context can be stored.
*/
public class MXOlmInboundGroupSession2 implements Serializable {
//
private static final String LOG_TAG = MXOlmInboundGroupSession2.class.getSimpleName();
// define a serialVersionUID to avoid having to redefine the class after updates
private static final long serialVersionUID = 201702011617L;
// The associated olm inbound group session.
public OlmInboundGroupSession mSession;
// The room in which this session is used.
public String mRoomId;
// The base64-encoded curve25519 key of the sender.
public String mSenderKey;
// Other keys the sender claims.
public Map<String, String> mKeysClaimed;
// Devices which forwarded this session to us (normally empty).
public List<String> mForwardingCurve25519KeyChain = new ArrayList<>();
/**
* Constructor
*
* @param prevFormatSession the previous session format
*/
public MXOlmInboundGroupSession2(MXOlmInboundGroupSession prevFormatSession) {
mSession = prevFormatSession.mSession;
mRoomId = prevFormatSession.mRoomId;
mSenderKey = prevFormatSession.mSenderKey;
mKeysClaimed = prevFormatSession.mKeysClaimed;
}
/**
* Constructor
*
* @param sessionKey the session key
* @param isImported true if it is an imported session key
*/
public MXOlmInboundGroupSession2(String sessionKey, boolean isImported) {
try {
if (!isImported) {
mSession = new OlmInboundGroupSession(sessionKey);
} else {
mSession = OlmInboundGroupSession.importSession(sessionKey);
}
} catch (Exception e) {
Log.e(LOG_TAG, "Cannot create : " + e.getMessage(), e);
}
}
/**
* Create a new instance from the provided keys map.
*
* @param megolmSessionData the megolm session data
* @throws Exception if the data are invalid
*/
public MXOlmInboundGroupSession2(MegolmSessionData megolmSessionData) throws Exception {
try {
mSession = OlmInboundGroupSession.importSession(megolmSessionData.sessionKey);
if (!TextUtils.equals(mSession.sessionIdentifier(), megolmSessionData.sessionId)) {
throw new Exception("Mismatched group session Id");
}
mSenderKey = megolmSessionData.senderKey;
mKeysClaimed = megolmSessionData.senderClaimedKeys;
mRoomId = megolmSessionData.roomId;
} catch (Exception e) {
throw new Exception(e.getMessage());
}
}
/**
* Export the inbound group session keys
*
* @return the inbound group session as MegolmSessionData if the operation succeeds
*/
@Nullable
public MegolmSessionData exportKeys() {
MegolmSessionData megolmSessionData = new MegolmSessionData();
try {
if (null == mForwardingCurve25519KeyChain) {
mForwardingCurve25519KeyChain = new ArrayList<>();
}
megolmSessionData.senderClaimedEd25519Key = mKeysClaimed.get("ed25519");
megolmSessionData.forwardingCurve25519KeyChain = new ArrayList<>(mForwardingCurve25519KeyChain);
megolmSessionData.senderKey = mSenderKey;
megolmSessionData.senderClaimedKeys = mKeysClaimed;
megolmSessionData.roomId = mRoomId;
megolmSessionData.sessionId = mSession.sessionIdentifier();
megolmSessionData.sessionKey = mSession.export(mSession.getFirstKnownIndex());
megolmSessionData.algorithm = CryptoConstantsKt.MXCRYPTO_ALGORITHM_MEGOLM;
} catch (Exception e) {
megolmSessionData = null;
Log.e(LOG_TAG, "## export() : senderKey " + mSenderKey + " failed " + e.getMessage(), e);
}
return megolmSessionData;
}
/**
* @return the first known message index
*/
public Long getFirstKnownIndex() {
if (null != mSession) {
try {
return mSession.getFirstKnownIndex();
} catch (Exception e) {
Log.e(LOG_TAG, "## getFirstKnownIndex() : getFirstKnownIndex failed " + e.getMessage(), e);
}
}
return null;
}
/**
* Export the session for a message index.
*
* @param messageIndex the message index
* @return the exported data
*/
public String exportSession(long messageIndex) {
if (null != mSession) {
try {
return mSession.export(messageIndex);
} catch (Exception e) {
Log.e(LOG_TAG, "## exportSession() : export failed " + e.getMessage(), e);
}
}
return null;
}
}
<|start_filename|>matrix-sdk/src/main/java/org/matrix/androidsdk/core/ContentUtils.java<|end_filename|>
/*
* Copyright 2014 OpenMarket Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.core;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import org.matrix.androidsdk.rest.model.message.ImageInfo;
import java.io.File;
/**
* Static content utility methods.
*/
public class ContentUtils {
private static final String LOG_TAG = FileContentUtils.class.getSimpleName();
/**
* Build an ImageInfo object based on the image at the given path.
*
* @param filePath the path to the image in storage
* @return the image info
*/
public static ImageInfo getImageInfoFromFile(String filePath) {
ImageInfo imageInfo = new ImageInfo();
try {
Bitmap imageBitmap = BitmapFactory.decodeFile(filePath);
imageInfo.w = imageBitmap.getWidth();
imageInfo.h = imageBitmap.getHeight();
File file = new File(filePath);
imageInfo.size = file.length();
imageInfo.mimetype = FileContentUtils.getMimeType(filePath);
} catch (OutOfMemoryError oom) {
Log.e(LOG_TAG, "## getImageInfoFromFile() : oom", oom);
}
return imageInfo;
}
}
<|start_filename|>matrix-sdk/src/main/java/org/matrix/androidsdk/call/MXCall.java<|end_filename|>
/*
* Copyright 2015 OpenMarket Ltd
* Copyright 2018 New Vector Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.call;
import android.content.Context;
import android.os.Handler;
import android.text.TextUtils;
import android.view.View;
import com.google.gson.JsonArray;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.JsonPrimitive;
import org.matrix.androidsdk.MXSession;
import org.matrix.androidsdk.core.Log;
import org.matrix.androidsdk.core.callback.ApiCallback;
import org.matrix.androidsdk.core.model.MatrixError;
import org.matrix.androidsdk.data.Room;
import org.matrix.androidsdk.rest.model.Event;
import org.webrtc.PeerConnection;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.Timer;
import javax.annotation.Nullable;
/**
* This class is the default implementation
*/
public class MXCall implements IMXCall {
private static final String LOG_TAG = MXCall.class.getSimpleName();
// defines the call timeout
public static final int CALL_TIMEOUT_MS = 120 * 1000;
/**
* The session
*/
protected MXSession mSession;
/**
* The context
*/
protected Context mContext;
/**
* the turn servers
*/
protected JsonElement mTurnServer;
protected PeerConnection.IceServer defaultIceServer;
/**
* The room in which the call is performed.
*/
protected Room mCallingRoom;
/**
* The room in which the call events are sent.
* It might differ from mCallingRoom if it is a conference call.
* For a 1:1 call, it will be equal to mCallingRoom.
*/
protected Room mCallSignalingRoom;
/**
* The call events listeners
*/
private final Set<IMXCallListener> mCallListeners = new HashSet<>();
/**
* the call id
*/
protected String mCallId;
/**
* Tells if it is a video call
*/
protected boolean mIsVideoCall = false;
/**
* Tells if it is an incoming call
*/
protected boolean mIsIncoming = false;
/**
* Tells if it is a conference call.
*/
private boolean mIsConference = false;
/**
* List of events to send to mCallSignalingRoom
*/
protected final List<Event> mPendingEvents = new ArrayList<>();
/**
* The sending event.
*/
private Event mPendingEvent;
/**
* The not responding timer
*/
protected Timer mCallTimeoutTimer;
// call start time
private long mStartTime = -1;
// UI thread handler
final Handler mUIThreadHandler = new Handler();
/**
* Create the call view
*/
public void createCallView() {
}
public List<PeerConnection.IceServer> getIceServers() {
List<PeerConnection.IceServer> iceServers = new ArrayList<>();
if (null != mTurnServer) {
try {
String username = null;
String password = null;
JsonObject object = mTurnServer.getAsJsonObject();
if (object.has("username")) {
username = object.get("username").getAsString();
}
if (object.has("password")) {
password = object.get("password").getAsString();
}
JsonArray uris = object.get("uris").getAsJsonArray();
for (int index = 0; index < uris.size(); index++) {
String url = uris.get(index).getAsString();
PeerConnection.IceServer.Builder iceServerBuilder = PeerConnection.IceServer.builder(url);
if ((null != username) && (null != password)) {
iceServerBuilder.setUsername(username).setPassword(password);
}
iceServers.add(iceServerBuilder.createIceServer());
}
} catch (Exception e) {
Log.e(LOG_TAG, "## createLocalStream(): Exception in ICE servers list Msg=" + e.getMessage(), e);
}
}
Log.d(LOG_TAG, "## createLocalStream(): " + iceServers.size() + " known ice servers");
return iceServers;
}
/**
* The activity is paused.
*/
public void onPause() {
}
/**
* The activity is resumed.
*/
public void onResume() {
}
// actions (must be done after dispatchOnViewReady()
/**
* Start a call.
*/
public void placeCall(VideoLayoutConfiguration aLocalVideoPosition) {
}
/**
* Prepare a call reception.
*
* @param aCallInviteParams the invitation Event content
* @param aCallId the call ID
* @param aLocalVideoPosition position of the local video attendee
*/
public void prepareIncomingCall(JsonObject aCallInviteParams, String aCallId, VideoLayoutConfiguration aLocalVideoPosition) {
setIsIncoming(true);
}
/**
* The call has been detected as an incoming one.
* The application launched the dedicated activity and expects to launch the incoming call.
*
* @param aLocalVideoPosition position of the local video attendee
*/
public void launchIncomingCall(VideoLayoutConfiguration aLocalVideoPosition) {
}
@Override
public void updateLocalVideoRendererPosition(VideoLayoutConfiguration aLocalVideoPosition) {
Log.w(LOG_TAG, "## updateLocalVideoRendererPosition(): not implemented");
}
@Override
public boolean switchRearFrontCamera() {
Log.w(LOG_TAG, "## switchRearFrontCamera(): not implemented");
return false;
}
@Override
public boolean isCameraSwitched() {
Log.w(LOG_TAG, "## isCameraSwitched(): not implemented");
return false;
}
@Override
public boolean isSwitchCameraSupported() {
Log.w(LOG_TAG, "## isSwitchCameraSupported(): not implemented");
return false;
}
// events thread
/**
* Manage the call events.
*
* @param event the call event.
*/
public void handleCallEvent(Event event) {
}
// user actions
/**
* The call is accepted.
*/
public void answer() {
}
/**
* The call has been has answered on another device.
*/
public void onAnsweredElsewhere() {
}
/**
* The call is hung up.
*
* @param reason the reason, or null for no reason. Reasons are used to indicate errors in the current VoIP implementation.
*/
public void hangup(@Nullable String reason) {
}
// getters / setters
/**
* @return the callId
*/
public String getCallId() {
return mCallId;
}
/**
* Set the callId
*/
public void setCallId(String callId) {
mCallId = callId;
}
/**
* @return the linked room
*/
public Room getRoom() {
return mCallingRoom;
}
/**
* @return the call signaling room
*/
public Room getCallSignalingRoom() {
return mCallSignalingRoom;
}
/**
* Set the linked rooms.
*
* @param room the room where the conference take place
* @param callSignalingRoom the call signaling room.
*/
public void setRooms(Room room, Room callSignalingRoom) {
mCallingRoom = room;
mCallSignalingRoom = callSignalingRoom;
}
/**
* @return the session
*/
public MXSession getSession() {
return mSession;
}
/**
* @return true if the call is an incoming call.
*/
public boolean isIncoming() {
return mIsIncoming;
}
/**
* @param isIncoming true if the call is an incoming one.
*/
private void setIsIncoming(boolean isIncoming) {
mIsIncoming = isIncoming;
}
/**
* Defines the call type
*/
public void setIsVideo(boolean isVideo) {
mIsVideoCall = isVideo;
}
/**
* @return true if the call is a video call.
*/
public boolean isVideo() {
return mIsVideoCall;
}
/**
* Defines the call conference status
*/
public void setIsConference(boolean isConference) {
mIsConference = isConference;
}
/**
* @return true if the call is a conference call.
*/
public boolean isConference() {
return mIsConference;
}
/**
* @return the callstate (must be a CALL_STATE_XX value)
*/
public String getCallState() {
return null;
}
/**
* @return the callView
*/
public View getCallView() {
return null;
}
/**
* @return the callView visibility
*/
public int getVisibility() {
return View.GONE;
}
/**
* Set the callview visibility
*
* @return true if the operation succeeds
*/
public boolean setVisibility(int visibility) {
return false;
}
/**
* @return if the call is ended.
*/
public boolean isCallEnded() {
return TextUtils.equals(CALL_STATE_ENDED, getCallState());
}
/**
* @return the call start time in ms since epoch, -1 if not defined.
*/
public long getCallStartTime() {
return mStartTime;
}
/**
* @return the call elapsed time in seconds, -1 if not defined.
*/
public long getCallElapsedTime() {
if (-1 == mStartTime) {
return -1;
}
return (System.currentTimeMillis() - mStartTime) / 1000;
}
//==============================================================================================================
// call events listener
//==============================================================================================================
/**
* Add a listener.
*
* @param callListener the listener to add
*/
public void addListener(IMXCallListener callListener) {
if (null != callListener) {
synchronized (LOG_TAG) {
mCallListeners.add(callListener);
}
}
}
/**
* Remove a listener
*
* @param callListener the listener to remove
*/
public void removeListener(IMXCallListener callListener) {
if (null != callListener) {
synchronized (LOG_TAG) {
mCallListeners.remove(callListener);
}
}
}
/**
* Remove the listeners
*/
public void clearListeners() {
synchronized (LOG_TAG) {
mCallListeners.clear();
}
}
/**
* @return the call listeners
*/
private Collection<IMXCallListener> getCallListeners() {
Collection<IMXCallListener> listeners;
synchronized (LOG_TAG) {
listeners = new HashSet<>(mCallListeners);
}
return listeners;
}
/**
* Dispatch the onCallViewCreated event to the listeners.
*
* @param callView the call view
*/
protected void dispatchOnCallViewCreated(View callView) {
if (isCallEnded()) {
Log.d(LOG_TAG, "## dispatchOnCallViewCreated(): the call is ended");
return;
}
Log.d(LOG_TAG, "## dispatchOnCallViewCreated()");
Collection<IMXCallListener> listeners = getCallListeners();
for (IMXCallListener listener : listeners) {
try {
listener.onCallViewCreated(callView);
} catch (Exception e) {
Log.e(LOG_TAG, "## dispatchOnCallViewCreated(): Exception Msg=" + e.getMessage(), e);
}
}
}
/**
* Dispatch the onViewReady event to the listeners.
*/
protected void dispatchOnReady() {
if (isCallEnded()) {
Log.d(LOG_TAG, "## dispatchOnReady() : the call is ended");
return;
}
Log.d(LOG_TAG, "## dispatchOnReady()");
Collection<IMXCallListener> listeners = getCallListeners();
for (IMXCallListener listener : listeners) {
try {
listener.onReady();
} catch (Exception e) {
Log.e(LOG_TAG, "## dispatchOnReady(): Exception Msg=" + e.getMessage(), e);
}
}
}
/**
* Dispatch the onCallError event to the listeners.
*
* @param error error message
*/
protected void dispatchOnCallError(String error) {
if (isCallEnded()) {
Log.d(LOG_TAG, "## dispatchOnCallError() : the call is ended");
return;
}
Log.d(LOG_TAG, "## dispatchOnCallError()");
Collection<IMXCallListener> listeners = getCallListeners();
for (IMXCallListener listener : listeners) {
try {
listener.onCallError(error);
} catch (Exception e) {
Log.e(LOG_TAG, "## dispatchOnCallError(): " + e.getMessage(), e);
}
}
}
/**
* Dispatch the onStateDidChange event to the listeners.
*
* @param newState the new state
*/
protected void dispatchOnStateDidChange(String newState) {
Log.d(LOG_TAG, "## dispatchOnStateDidChange(): " + newState);
// set the call start time
if (TextUtils.equals(CALL_STATE_CONNECTED, newState) && (-1 == mStartTime)) {
mStartTime = System.currentTimeMillis();
}
// the call is ended.
if (TextUtils.equals(CALL_STATE_ENDED, newState)) {
mStartTime = -1;
}
Collection<IMXCallListener> listeners = getCallListeners();
for (IMXCallListener listener : listeners) {
try {
listener.onStateDidChange(newState);
} catch (Exception e) {
Log.e(LOG_TAG, "## dispatchOnStateDidChange(): Exception Msg=" + e.getMessage(), e);
}
}
}
/**
* Dispatch the onCallAnsweredElsewhere event to the listeners.
*/
protected void dispatchAnsweredElsewhere() {
Log.d(LOG_TAG, "## dispatchAnsweredElsewhere()");
Collection<IMXCallListener> listeners = getCallListeners();
for (IMXCallListener listener : listeners) {
try {
listener.onCallAnsweredElsewhere();
} catch (Exception e) {
Log.e(LOG_TAG, "## dispatchAnsweredElsewhere(): Exception Msg=" + e.getMessage(), e);
}
}
}
/**
* Dispatch the onCallEnd event to the listeners.
*
* @param aEndCallReasonId the reason of the call ending
*/
protected void dispatchOnCallEnd(int aEndCallReasonId) {
Log.d(LOG_TAG, "## dispatchOnCallEnd(): endReason=" + aEndCallReasonId);
Collection<IMXCallListener> listeners = getCallListeners();
for (IMXCallListener listener : listeners) {
try {
listener.onCallEnd(aEndCallReasonId);
} catch (Exception e) {
Log.e(LOG_TAG, "## dispatchOnCallEnd(): Exception Msg=" + e.getMessage(), e);
}
}
}
/**
* Send the next pending events
*/
protected void sendNextEvent() {
mUIThreadHandler.post(() -> {
// do not send any new message
if (isCallEnded()) {
mPendingEvents.clear();
}
// ready to send
if ((null == mPendingEvent) && (0 != mPendingEvents.size())) {
mPendingEvent = mPendingEvents.get(0);
mPendingEvents.remove(mPendingEvent);
Log.d(LOG_TAG, "## sendNextEvent() : sending event of type " + mPendingEvent.getType() + " event id " + mPendingEvent.eventId);
mCallSignalingRoom.sendEvent(mPendingEvent, new ApiCallback<Void>() {
@Override
public void onSuccess(Void info) {
mUIThreadHandler.post(() -> {
Log.d(LOG_TAG, "## sendNextEvent() : event " + mPendingEvent.eventId + " is sent");
mPendingEvent = null;
sendNextEvent();
});
}
private void commonFailure(String reason) {
Log.d(LOG_TAG, "## sendNextEvent() : event " + mPendingEvent.eventId + " failed to be sent " + reason);
// let try next candidate event
if (TextUtils.equals(mPendingEvent.getType(), Event.EVENT_TYPE_CALL_CANDIDATES)) {
mUIThreadHandler.post(() -> {
mPendingEvent = null;
sendNextEvent();
});
} else {
hangup(reason);
}
}
@Override
public void onNetworkError(Exception e) {
commonFailure(e.getLocalizedMessage());
}
@Override
public void onMatrixError(MatrixError e) {
commonFailure(e.getLocalizedMessage());
}
@Override
public void onUnexpectedError(Exception e) {
commonFailure(e.getLocalizedMessage());
}
});
}
});
}
/**
* Dispatch the onPreviewSizeChanged event to the listeners.
*
* @param width the preview width
* @param height the preview height
*/
protected void dispatchOnPreviewSizeChanged(int width, int height) {
Log.d(LOG_TAG, "## dispatchOnPreviewSizeChanged(): width =" + width + " - height =" + height);
Collection<IMXCallListener> listeners = getCallListeners();
for (IMXCallListener listener : listeners) {
try {
listener.onPreviewSizeChanged(width, height);
} catch (Exception e) {
Log.e(LOG_TAG, "## dispatchOnPreviewSizeChanged(): Exception Msg=" + e.getMessage(), e);
}
}
}
/**
* send an hang up event
*
* @param reason the reason
*/
protected void sendHangup(String reason) {
JsonObject hangupContent = new JsonObject();
hangupContent.add("version", new JsonPrimitive(0));
hangupContent.add("call_id", new JsonPrimitive(mCallId));
if (!TextUtils.isEmpty(reason)) {
hangupContent.add("reason", new JsonPrimitive(reason));
}
Event event = new Event(Event.EVENT_TYPE_CALL_HANGUP, hangupContent, mSession.getCredentials().userId, mCallSignalingRoom.getRoomId());
// local notification to indicate the end of call
mUIThreadHandler.post(() -> dispatchOnCallEnd(END_CALL_REASON_USER_HIMSELF));
Log.d(LOG_TAG, "## sendHangup(): reason=" + reason);
// send hang up event to the server
mCallSignalingRoom.sendEvent(event, new ApiCallback<Void>() {
@Override
public void onSuccess(Void info) {
Log.d(LOG_TAG, "## sendHangup(): onSuccess");
}
@Override
public void onNetworkError(Exception e) {
Log.e(LOG_TAG, "## sendHangup(): onNetworkError Msg=" + e.getMessage(), e);
}
@Override
public void onMatrixError(MatrixError e) {
Log.e(LOG_TAG, "## sendHangup(): onMatrixError Msg=" + e.getMessage());
}
@Override
public void onUnexpectedError(Exception e) {
Log.e(LOG_TAG, "## sendHangup(): onUnexpectedError Msg=" + e.getMessage(), e);
}
});
}
@Override
public void muteVideoRecording(boolean isVideoMuted) {
Log.w(LOG_TAG, "## muteVideoRecording(): not implemented");
}
@Override
public boolean isVideoRecordingMuted() {
Log.w(LOG_TAG, "## muteVideoRecording(): not implemented - default value = false");
return false;
}
}
<|start_filename|>matrix-sdk-crypto/src/main/java/org/matrix/androidsdk/crypto/model/crypto/OlmPayloadContent.java<|end_filename|>
/*
* Copyright 2016 OpenMarket Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.crypto.model.crypto;
import java.util.Map;
/**
* Class representing the OLM payload content
*/
public class OlmPayloadContent implements java.io.Serializable {
/**
* The room id
*/
public String room_id;
/**
* The sender
*/
public String sender;
/**
* The recipient
*/
public String recipient;
/**
* the recipient keys
*/
public Map<String, String> recipient_keys;
/**
* The keys
*/
public Map<String, String> keys;
}
<|start_filename|>matrix-sdk/src/androidTest/java/org/matrix/androidsdk/data/room/RoomAvatarResolverTest.java<|end_filename|>
/*
* Copyright 2018 New Vector Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.data.room;
import android.content.Context;
import androidx.test.InstrumentationRegistry;
import junit.framework.Assert;
import org.junit.FixMethodOrder;
import org.junit.Test;
import org.junit.runners.MethodSorters;
import org.matrix.androidsdk.data.Room;
@FixMethodOrder(MethodSorters.JVM)
public class RoomAvatarResolverTest {
private RoomTestHelper mRoomTestHelper = new RoomTestHelper();
@Test
public void RoomAvatar_getAvatar_noLL_avatar() {
RoomAvatar_getAvatar_avatar(false);
}
@Test
public void RoomAvatar_getAvatar_LL_avatar() {
RoomAvatar_getAvatar_avatar(true);
}
private void RoomAvatar_getAvatar_avatar(boolean withLazyLoading) {
Context context = InstrumentationRegistry.getContext();
// It does not depend on the number of users
for (int i = 0; i < 10; i++) {
Room room = mRoomTestHelper.createRoom(context, withLazyLoading, i, false);
room.getState().avatar_url = "mxc://avatar_url";
Assert.assertEquals("mxc://avatar_url", room.getAvatarUrl());
}
}
@Test
public void RoomAvatar_getAvatar_noLL_noAvatar() {
RoomAvatar_getAvatar_noAvatar(false);
}
@Test
public void RoomAvatar_getAvatar_LL_noAvatar() {
RoomAvatar_getAvatar_noAvatar(true);
}
private void RoomAvatar_getAvatar_noAvatar(boolean withLazyLoading) {
Context context = InstrumentationRegistry.getContext();
Room room;
// Only me in the room
room = mRoomTestHelper.createRoom(context, withLazyLoading, 1, false);
Assert.assertNull(room.getAvatarUrl());
// I have an avatar
room.getMember(mRoomTestHelper.getMyUserId()).avatarUrl = "mxc://my_avatar_url";
Assert.assertEquals("mxc://my_avatar_url", room.getAvatarUrl());
// One other user in the room
room = mRoomTestHelper.createRoom(context, withLazyLoading, 2, false);
Assert.assertNull(room.getAvatarUrl());
// I have an avatar
room.getMember(mRoomTestHelper.getMyUserId()).avatarUrl = "mxc://my_avatar_url";
Assert.assertNull(room.getAvatarUrl());
// Other user has an avatar
room.getMember(mRoomTestHelper.getUserId(2)).avatarUrl = "mxc://other_user_avatar_url";
Assert.assertEquals("mxc://other_user_avatar_url", room.getAvatarUrl());
// 2 other users in the room
room = mRoomTestHelper.createRoom(context, withLazyLoading, 3, false);
// I have an avatar
room.getMember(mRoomTestHelper.getMyUserId()).avatarUrl = "mxc://my_avatar_url";
// Other user has an avatar
room.getMember(mRoomTestHelper.getUserId(2)).avatarUrl = "mxc://other_user_avatar_url";
Assert.assertNull(room.getAvatarUrl());
}
@Test
public void RoomAvatar_getAvatar_noLL_invitation() {
RoomAvatar_getAvatar_invitation(false);
}
@Test
public void RoomAvatar_getAvatar_LL_invitation() {
RoomAvatar_getAvatar_invitation(true);
}
private void RoomAvatar_getAvatar_invitation(boolean withLazyLoading) {
Context context = InstrumentationRegistry.getContext();
Room room;
// Only me in the room
room = mRoomTestHelper.createRoom(context, withLazyLoading, 1, true);
Assert.assertNull(room.getAvatarUrl());
// I have an avatar
room.getMember(mRoomTestHelper.getMyUserId()).avatarUrl = "mxc://my_avatar_url";
Assert.assertEquals("mxc://my_avatar_url", room.getAvatarUrl());
// One other user in the room
room = mRoomTestHelper.createRoom(context, withLazyLoading, 2, true);
Assert.assertNull(room.getAvatarUrl());
// I have an avatar
room.getMember(mRoomTestHelper.getMyUserId()).avatarUrl = "mxc://my_avatar_url";
Assert.assertNull(room.getAvatarUrl());
// Inviter has an avatar
room.getMember(mRoomTestHelper.getUserId(2)).avatarUrl = "mxc://other_user_avatar_url";
Assert.assertEquals("mxc://other_user_avatar_url", room.getAvatarUrl());
// 2 other users in the room
room = mRoomTestHelper.createRoom(context, withLazyLoading, 3, true);
// I have an avatar
room.getMember(mRoomTestHelper.getMyUserId()).avatarUrl = "mxc://my_avatar_url";
// Other user has an avatar
room.getMember(mRoomTestHelper.getUserId(2)).avatarUrl = "mxc://other_user_avatar_url";
Assert.assertEquals("mxc://other_user_avatar_url", room.getAvatarUrl());
}
}
<|start_filename|>matrix-sdk/src/main/java/org/matrix/androidsdk/rest/model/IdentityServerRequest3PIDValidationParams.java<|end_filename|>
/*
* Copyright 2019 The Matrix.org Foundation C.I.C.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.rest.model;
public class IdentityServerRequest3PIDValidationParams {
// the email address
public String email;
// the client secret key
public String client_secret;
// the attempt count
public Integer send_attempt;
// the nextlink (given if it is a registration process)
public String next_link;
public String phone_number;
public String country;
public static IdentityServerRequest3PIDValidationParams forEmail(String email, String clientSecret, Integer sendAttempt) {
IdentityServerRequest3PIDValidationParams params = new IdentityServerRequest3PIDValidationParams();
params.email = email;
params.client_secret = clientSecret;
params.send_attempt = sendAttempt;
return params;
}
public static IdentityServerRequest3PIDValidationParams forPhoneNumber(String phoneNumber,
String countryCode,
String clientSecret, Integer sendAttempt) {
IdentityServerRequest3PIDValidationParams params = new IdentityServerRequest3PIDValidationParams();
params.phone_number = phoneNumber;
params.country = countryCode;
params.client_secret = clientSecret;
params.send_attempt = sendAttempt;
return params;
}
}
<|start_filename|>matrix-sdk/src/main/java/org/matrix/androidsdk/rest/client/OpenIdRestClient.java<|end_filename|>
/*
* Copyright 2019 New Vector Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.rest.client;
import org.matrix.androidsdk.HomeServerConnectionConfig;
import org.matrix.androidsdk.RestClient;
import org.matrix.androidsdk.core.JsonUtils;
import org.matrix.androidsdk.core.callback.ApiCallback;
import org.matrix.androidsdk.rest.api.OpenIdApi;
import org.matrix.androidsdk.rest.callback.RestAdapterCallback;
import org.matrix.androidsdk.rest.model.openid.RequestOpenIdTokenResponse;
import java.util.HashMap;
public class OpenIdRestClient extends RestClient<OpenIdApi> {
/**
* {@inheritDoc}
*/
public OpenIdRestClient(HomeServerConnectionConfig hsConfig) {
super(hsConfig, OpenIdApi.class, RestClient.URI_API_PREFIX_PATH_R0, JsonUtils.getGson(false));
}
/**
* Gets a bearer token from the homeserver that the user can
* present to a third party in order to prove their ownership
* of the Matrix account they are logged into.
*
* @param userId the user id
* @param callback the asynchronous callback called when finished
*/
public void requestToken(final String userId, final ApiCallback<RequestOpenIdTokenResponse> callback) {
final String description = "openIdToken userId : " + userId;
mApi.requestToken(userId, new HashMap<>())
.enqueue(new RestAdapterCallback<RequestOpenIdTokenResponse>(description, mUnsentEventsManager, callback,
new RestAdapterCallback.RequestRetryCallBack() {
@Override
public void onRetry() {
requestToken(userId, callback);
}
}));
}
}
<|start_filename|>matrix-sdk/src/main/java/org/matrix/androidsdk/features/terms/TermsManager.kt<|end_filename|>
/*
* Copyright 2019 New Vector Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.features.terms
import android.net.Uri
import org.matrix.androidsdk.HomeServerConnectionConfig
import org.matrix.androidsdk.MXSession
import org.matrix.androidsdk.RestClient
import org.matrix.androidsdk.core.Log
import org.matrix.androidsdk.core.callback.ApiCallback
import org.matrix.androidsdk.core.callback.SimpleApiCallback
import org.matrix.androidsdk.rest.client.IdentityAuthRestClient
import org.matrix.androidsdk.rest.client.TermsRestClient
import org.matrix.androidsdk.rest.model.identityserver.IdentityServerRegisterResponse
import org.matrix.androidsdk.rest.model.openid.RequestOpenIdTokenResponse
import org.matrix.androidsdk.rest.model.sync.AccountDataElement
import org.matrix.androidsdk.rest.model.terms.TermsResponse
class TermsManager(private val mxSession: MXSession) {
private val termsRestClient = TermsRestClient()
enum class ServiceType {
IntegrationManager,
IdentityService
}
fun get(serviceType: ServiceType, baseUrl: String, callback: ApiCallback<GetTermsResponse>) {
val sep = if (baseUrl.endsWith("/")) "" else "/"
val url = when (serviceType) {
ServiceType.IntegrationManager -> "$baseUrl$sep${RestClient.URI_INTEGRATION_MANAGER_PATH}"
ServiceType.IdentityService -> "$baseUrl$sep${RestClient.URI_IDENTITY_PATH_V2}"
}
termsRestClient.get(url, object : SimpleApiCallback<TermsResponse>(callback) {
override fun onSuccess(info: TermsResponse) {
callback.onSuccess(GetTermsResponse(info, getAlreadyAcceptedTermUrlsFromAccountData()))
}
})
}
fun agreeToTerms(serviceType: ServiceType, baseUrl: String, agreedUrls: List<String>, token: String?, callback: ApiCallback<Unit>) {
val sep = if (baseUrl.endsWith("/")) "" else "/"
val url = when (serviceType) {
ServiceType.IntegrationManager -> "$baseUrl$sep${RestClient.URI_INTEGRATION_MANAGER_PATH}"
ServiceType.IdentityService -> "$baseUrl$sep${RestClient.URI_IDENTITY_PATH_V2}"
}
if (token.isNullOrBlank()) {
//We need a token
val alteredHsConfig = HomeServerConnectionConfig.Builder(mxSession.homeServerConfig)
.withIdentityServerUri(Uri.parse(baseUrl))
.build()
val identityAuthRestClient = IdentityAuthRestClient(alteredHsConfig)
mxSession.openIdToken(object : SimpleApiCallback<RequestOpenIdTokenResponse>(callback) {
override fun onSuccess(info: RequestOpenIdTokenResponse) {
identityAuthRestClient.register(info, object : SimpleApiCallback<IdentityServerRegisterResponse>(callback) {
override fun onSuccess(info: IdentityServerRegisterResponse) {
agreeToTerms(serviceType, baseUrl, agreedUrls, info.identityServerAccessToken, callback)
}
})
}
})
return
}
termsRestClient.setAccessToken(token)
termsRestClient.agreeToTerms(url, agreedUrls, object : SimpleApiCallback<Unit>(callback) {
override fun onSuccess(info: Unit) {
//client SHOULD update this account data section adding any the URLs
// of any additional documents that the user agreed to this list.
//Get current m.accepted_terms append new ones and update account data
val listOfAcceptedTerms = getAlreadyAcceptedTermUrlsFromAccountData()
val newList = listOfAcceptedTerms.toMutableSet().apply { addAll(agreedUrls) }.toList()
mxSession.myUserId.let { userId ->
mxSession.accountDataRestClient?.setAccountData(
userId,
AccountDataElement.ACCOUNT_DATA_TYPE_ACCEPTED_TERMS,
mapOf(AccountDataElement.ACCOUNT_DATA_KEY_ACCEPTED_TERMS to newList),
object : SimpleApiCallback<Void?>(callback) {
override fun onSuccess(info: Void?) {
Log.d(LOG_TAG, "Account data accepted terms updated")
callback.onSuccess(Unit)
}
}
)
}
}
})
}
private fun getAlreadyAcceptedTermUrlsFromAccountData(): Set<String> {
val accountDataCurrentAcceptedTerms =
mxSession.dataHandler.store?.getAccountDataElement(AccountDataElement.ACCOUNT_DATA_TYPE_ACCEPTED_TERMS)
return (accountDataCurrentAcceptedTerms?.content
?.get(AccountDataElement.ACCOUNT_DATA_KEY_ACCEPTED_TERMS) as? List<*>)
?.mapNotNull { it as? String }
?.toSet()
?: emptySet()
}
companion object {
private const val LOG_TAG = "TermsManager"
}
}
<|start_filename|>matrix-sdk/src/main/java/org/matrix/androidsdk/features/identityserver/IdentityServerTokensStore.kt<|end_filename|>
/*
* Copyright 2019 New Vector Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.features.identityserver
import android.content.Context
import androidx.preference.PreferenceManager
import org.matrix.androidsdk.core.JsonUtils
class IdentityServerTokensStore(context: Context) {
private val prefs = PreferenceManager.getDefaultSharedPreferences(context)
private val gson = JsonUtils.getBasicGson()
private data class TokensStore(
// Keys are user Id
@JvmField
val userToServerTokens: MutableMap<String, ServerTokens> = mutableMapOf()
)
private data class ServerTokens(
// Keys are server Url, values are token
@JvmField
val serverTokens: MutableMap<String, String> = mutableMapOf()
)
fun getToken(userId: String, serverUrl: String): String? {
return readStore()
.userToServerTokens[userId]
?.serverTokens
?.get(serverUrl)
}
fun setToken(userId: String, serverUrl: String, token: String) {
readStore()
.apply {
userToServerTokens.getOrPut(userId) { ServerTokens() }
.serverTokens[serverUrl] = token
}
.commit()
}
fun resetToken(userId: String, serverUrl: String) {
readStore()
.apply {
userToServerTokens[userId]?.serverTokens?.remove(serverUrl)
if (userToServerTokens[userId]?.serverTokens?.isEmpty() == true) {
userToServerTokens.remove(userId)
}
}
.commit()
}
private fun readStore(): TokensStore {
return prefs.getString(IDENTITY_SERVER_TOKENS_PREFERENCE_KEY, null)
?.toModel()
?: TokensStore()
}
private fun TokensStore.commit() {
prefs.edit()
.putString(IDENTITY_SERVER_TOKENS_PREFERENCE_KEY, [email protected]())
.apply()
}
fun clear() {
prefs.edit()
.remove(IDENTITY_SERVER_TOKENS_PREFERENCE_KEY)
.apply()
}
private fun String.toModel(): TokensStore? {
return gson.fromJson<TokensStore>(this, TokensStore::class.java)
}
private fun TokensStore.fromModel(): String? {
return gson.toJson(this)
}
companion object {
private const val IDENTITY_SERVER_TOKENS_PREFERENCE_KEY = "IDENTITY_SERVER_TOKENS_PREFERENCE_KEY"
}
}
<|start_filename|>matrix-sdk/src/main/java/org/matrix/androidsdk/call/VideoLayoutConfiguration.java<|end_filename|>
/*
* Copyright 2015 OpenMarket Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.call;
import org.jetbrains.annotations.NotNull;
import java.io.Serializable;
/**
* Defines the video call view layout.
*/
public class VideoLayoutConfiguration implements Serializable {
public final static int INVALID_VALUE = -1;
@NotNull
@Override
public String toString() {
return "VideoLayoutConfiguration{" +
"mIsPortrait=" + mIsPortrait +
", X=" + mX +
", Y=" + mY +
", Width=" + mWidth +
", Height=" + mHeight +
'}';
}
// parameters of the video of the local user (small video)
/**
* margin left in percentage of the screen resolution for the local user video
**/
public int mX;
/**
* margin top in percentage of the screen resolution for the local user video
**/
public int mY;
/**
* width in percentage of the screen resolution for the local user video
**/
public int mWidth;
/**
* video height in percentage of the screen resolution for the local user video
**/
public int mHeight;
/**
* the area size in which the video in displayed
**/
public int mDisplayWidth;
public int mDisplayHeight;
/**
* tells if the display in is a portrait orientation
**/
public boolean mIsPortrait;
public VideoLayoutConfiguration(int aX, int aY, int aWidth, int aHeight) {
this(aX, aY, aWidth, aHeight, INVALID_VALUE, INVALID_VALUE);
}
public VideoLayoutConfiguration(int aX, int aY, int aWidth, int aHeight, int aDisplayWidth, int aDisplayHeight) {
mX = aX;
mY = aY;
mWidth = aWidth;
mHeight = aHeight;
mDisplayWidth = aDisplayWidth;
mDisplayHeight = aDisplayHeight;
}
public VideoLayoutConfiguration() {
mX = INVALID_VALUE;
mY = INVALID_VALUE;
mWidth = INVALID_VALUE;
mHeight = INVALID_VALUE;
mDisplayWidth = INVALID_VALUE;
mDisplayHeight = INVALID_VALUE;
}
}
<|start_filename|>matrix-sdk-core/src/main/java/org/matrix/androidsdk/core/model/HttpError.java<|end_filename|>
/*
* Copyright 2018 New Vector Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.core.model;
public final class HttpError {
private final String errorBody;
private final int httpCode;
public HttpError(String errorBody, int httpCode) {
this.errorBody = errorBody;
this.httpCode = httpCode;
}
public String getErrorBody() {
return errorBody;
}
public int getHttpCode() {
return httpCode;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
HttpError httpError = (HttpError) o;
if (httpCode != httpError.httpCode) return false;
return errorBody != null ?
errorBody.equals(httpError.errorBody) :
httpError.errorBody == null;
}
@Override
public int hashCode() {
int result = errorBody != null ? errorBody.hashCode() : 0;
result = 31 * result + httpCode;
return result;
}
@Override
public String toString() {
return "HttpError{" +
"errorBody='" + errorBody + '\'' +
", httpCode=" + httpCode +
'}';
}
}
<|start_filename|>matrix-sdk/src/main/java/org/matrix/androidsdk/data/timeline/TimelineInvitedRoomSyncHandler.java<|end_filename|>
/*
* Copyright 2018 New Vector Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.data.timeline;
import androidx.annotation.NonNull;
import org.matrix.androidsdk.data.Room;
import org.matrix.androidsdk.rest.model.Event;
import org.matrix.androidsdk.rest.model.sync.InvitedRoomSync;
import javax.annotation.Nullable;
/**
* This class is responsible for handling the invitation room events from the SyncResponse
*/
class TimelineInvitedRoomSyncHandler {
private final Room mRoom;
private final TimelineLiveEventHandler mLiveEventHandler;
private final InvitedRoomSync mInvitedRoomSync;
TimelineInvitedRoomSyncHandler(@NonNull final Room room,
@NonNull final TimelineLiveEventHandler liveEventHandler,
@Nullable final InvitedRoomSync invitedRoomSync) {
mRoom = room;
mLiveEventHandler = liveEventHandler;
mInvitedRoomSync = invitedRoomSync;
}
/**
* Handle the invitation room events
*/
public void handle() {
// Handle the state events as live events (the room state will be updated, and the listeners (if any) will be notified).
if (mInvitedRoomSync != null && mInvitedRoomSync.inviteState != null && mInvitedRoomSync.inviteState.events != null) {
final String roomId = mRoom.getRoomId();
for (Event event : mInvitedRoomSync.inviteState.events) {
// Add a fake event id if none in order to be able to store the event
if (event.eventId == null) {
event.eventId = roomId + "-" + System.currentTimeMillis() + "-" + event.hashCode();
}
// The roomId is not defined.
event.roomId = roomId;
mLiveEventHandler.handleLiveEvent(event, false, true);
}
// The room related to the pending invite can be considered as ready from now
mRoom.setReadyState(true);
}
}
}
<|start_filename|>matrix-sdk/src/androidTest/java/org/matrix/androidsdk/lazyloading/SearchTest.java<|end_filename|>
package org.matrix.androidsdk.lazyloading;
import androidx.test.InstrumentationRegistry;
import junit.framework.Assert;
import org.junit.BeforeClass;
import org.junit.FixMethodOrder;
import org.junit.Test;
import org.junit.runners.MethodSorters;
import org.matrix.androidsdk.MXSession;
import org.matrix.androidsdk.common.CommonTestHelper;
import org.matrix.androidsdk.common.TestApiCallback;
import org.matrix.androidsdk.rest.model.search.SearchResponse;
import java.util.Collections;
import java.util.concurrent.CountDownLatch;
@FixMethodOrder(MethodSorters.JVM)
public class SearchTest {
private CommonTestHelper mTestHelper = new CommonTestHelper();
private LazyLoadingTestHelper mLazyLoadingTestHelper = new LazyLoadingTestHelper(mTestHelper);
@BeforeClass
public static void init() {
MXSession.initUserAgent(InstrumentationRegistry.getContext(), null);
}
@Test
public void Search_CheckMessageSearch_LazyLoadedMembers() throws Exception {
Search_CheckMessageSearch_(true);
}
@Test
public void Search_CheckMessageSearch_LoadAllMembers() throws Exception {
Search_CheckMessageSearch_(false);
}
private void Search_CheckMessageSearch_(boolean withLazyLoading) throws Exception {
final LazyLoadingScenarioData data = mLazyLoadingTestHelper.createScenario(withLazyLoading);
final CountDownLatch lock = new CountDownLatch(1);
data.aliceSession.pauseEventStream();
data.aliceSession.resumeEventStream();
data.aliceSession.getDataHandler().clear();
data.aliceSession.searchMessageText("<NAME>", Collections.singletonList(data.roomId), 0, 0, null, new TestApiCallback<SearchResponse>(lock) {
@Override
public void onSuccess(SearchResponse info) {
Assert.assertEquals(1, info.searchCategories.roomEvents.results.size());
super.onSuccess(info);
}
});
mTestHelper.await(lock);
mLazyLoadingTestHelper.clearAllSessions(data);
}
}
<|start_filename|>matrix-sdk/src/main/java/org/matrix/androidsdk/rest/api/IdentityPingApi.kt<|end_filename|>
/*
* Copyright 2019 Vector Creations Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.rest.api
import org.matrix.androidsdk.RestClient
import retrofit2.Call
import retrofit2.http.GET
interface IdentityPingApi {
/**
* https://matrix.org/docs/spec/client_server/r0.4.0.html#server-discovery
* Simple ping call to check if server alive
*
* Ref: https://matrix.org/docs/spec/identity_service/unstable#status-check
*
* @return 200 in case of success
*/
@GET(RestClient.URI_API_PREFIX_IDENTITY)
fun ping(): Call<Void>
}
<|start_filename|>matrix-sdk/src/test/java/org/matrix/androidsdk/core/FileUtilsTest.kt<|end_filename|>
/*
* Copyright 2018 New Vector Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.core
import android.webkit.MimeTypeMap
import org.junit.Assert.assertEquals
import org.junit.Assert.assertNull
import org.junit.FixMethodOrder
import org.junit.Test
import org.junit.runner.RunWith
import org.junit.runners.MethodSorters
import org.robolectric.RobolectricTestRunner
@RunWith(RobolectricTestRunner::class)
@FixMethodOrder(MethodSorters.NAME_ASCENDING)
class FileUtilsTest {
@Test
fun getFileExtension_nominalCase() {
assertEquals("jpg", getFileExtension("test.jpg"))
assertEquals("jpg", getFileExtension("test.Jpg"))
assertEquals("jpg", getFileExtension("test.JPG"))
assertEquals("jpg", getFileExtension("test.foo.JPG"))
assertEquals("jpg", getFileExtension("https://example.org/test.jpg"))
assertEquals("jpg", getFileExtension("https://example.org/test.jpg#fragment.bar"))
assertEquals("jpg", getFileExtension("https://example.org/test.jpg?param=x.foo"))
assertEquals("jpg", getFileExtension("https://example.org/test.jpg?param=x.foo#fragment.bar"))
}
@Test
fun getFileExtension_errorCase() {
assertNull(getFileExtension(""))
assertNull(getFileExtension("."))
assertNull(getFileExtension("test."))
assertNull(getFileExtension("test.foo."))
assertNull(getFileExtension("https://example.org/test"))
assertNull(getFileExtension("https://example.org/test#fragment.bar"))
assertNull(getFileExtension("https://example.org/test?param=x.foo"))
assertNull(getFileExtension("https://example.org/test?param=x.foo#fragment.bar"))
}
@Test
fun getFileExtension_MimeTypeMap_issue() {
// These are the problems
// "ı" (i without point) in file name (like in Turkish)
assertEquals("", MimeTypeMap.getFileExtensionFromUrl("test_ı.jpg"))
// "+" in file name
assertEquals("", MimeTypeMap.getFileExtensionFromUrl("test_+1.jpg"))
// Now fixed
assertEquals("jpg", getFileExtension("test_ı.jpg"))
assertEquals("jpg", getFileExtension("test_+1.jpg"))
}
}
<|start_filename|>matrix-sdk/src/main/java/org/matrix/androidsdk/rest/model/bingrules/ContentRule.java<|end_filename|>
/*
* Copyright 2014 OpenMarket Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.rest.model.bingrules;
public class ContentRule extends BingRule {
public String pattern;
public ContentRule(String ruleKind, String aPattern, boolean notify, boolean highlight, boolean sound) {
super(ruleKind, aPattern, notify, highlight, sound);
pattern = aPattern;
}
}
<|start_filename|>matrix-sdk-crypto/src/main/java/org/matrix/androidsdk/crypto/interfaces/CryptoEvent.kt<|end_filename|>
/*
* Copyright 2018 New Vector Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.crypto.interfaces
import com.google.gson.JsonElement
import com.google.gson.JsonObject
import org.matrix.androidsdk.crypto.MXCryptoError
import org.matrix.androidsdk.crypto.MXEventDecryptionResult
import org.matrix.androidsdk.crypto.model.crypto.EncryptedEventContent
import org.matrix.androidsdk.crypto.model.crypto.ForwardedRoomKeyContent
import org.matrix.androidsdk.crypto.model.crypto.OlmEventContent
import org.matrix.androidsdk.crypto.model.crypto.RoomKeyContent
import org.matrix.androidsdk.crypto.rest.model.crypto.RoomKeyShare
import org.matrix.androidsdk.crypto.rest.model.crypto.RoomKeyShareRequest
interface CryptoEvent {
val contentAsJsonObject: JsonObject
val wireEventContent: CryptoEventContent
val wireType: String
val content: JsonElement
val wireContent: JsonElement
val senderKey: String
val keysClaimed: MutableMap<String, String>
fun getRoomId(): String
fun getEventId(): String
fun getAge(): Long
fun getStateKey(): String
fun getType(): CharSequence
fun getSender(): String
fun toRoomKeyShare(): RoomKeyShare
fun toRoomKeyShareRequest(): RoomKeyShareRequest
fun toRoomKeyContent(): RoomKeyContent
fun toOlmEventContent(): OlmEventContent
fun toEncryptedEventContent(): EncryptedEventContent
fun toForwardedRoomKeyContent(): ForwardedRoomKeyContent
fun setCryptoError(cryptoError: MXCryptoError)
fun setClearData(decryptionResult: MXEventDecryptionResult)
companion object {
const val EVENT_TYPE_ROOM_KEY = "m.room_key"
const val EVENT_TYPE_ROOM_KEY_REQUEST = "m.room_key_request"
const val EVENT_TYPE_FORWARDED_ROOM_KEY = "m.forwarded_room_key"
const val EVENT_TYPE_MESSAGE_ENCRYPTED = "m.room.encrypted"
const val EVENT_TYPE_MESSAGE_ENCRYPTION = "m.room.encryption"
const val EVENT_TYPE_STATE_ROOM_MEMBER = "m.room.member"
// Interactive key verification
const val EVENT_TYPE_KEY_VERIFICATION_START = "m.key.verification.start"
const val EVENT_TYPE_KEY_VERIFICATION_ACCEPT = "m.key.verification.accept"
const val EVENT_TYPE_KEY_VERIFICATION_KEY = "m.key.verification.key"
const val EVENT_TYPE_KEY_VERIFICATION_MAC = "m.key.verification.mac"
const val EVENT_TYPE_KEY_VERIFICATION_CANCEL = "m.key.verification.cancel"
}
}
<|start_filename|>matrix-sdk/src/test/java/org/matrix/androidsdk/login/RegistrationToolsTest.kt<|end_filename|>
/*
* Copyright 2018 New Vector Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.login
import org.junit.Assert.assertEquals
import org.junit.Assert.assertTrue
import org.junit.FixMethodOrder
import org.junit.Test
import org.junit.runners.MethodSorters
import org.matrix.androidsdk.rest.model.login.LocalizedFlowDataLoginTerms
import org.matrix.androidsdk.rest.model.login.RegistrationFlowResponse
@FixMethodOrder(MethodSorters.JVM)
class RegistrationToolsTest {
@Test
fun getLocalizedLoginTerms_defaultParam_en() {
assertEn(getLocalizedLoginTerms(createRegistrationFlowResponse()))
}
@Test
fun getLocalizedLoginTerms_en_en() {
assertEn(getLocalizedLoginTerms(createRegistrationFlowResponse(), "en"))
}
@Test
fun getLocalizedLoginTerms_en_en_en() {
assertEn(getLocalizedLoginTerms(createRegistrationFlowResponse(), "en", "en"))
}
@Test
fun getLocalizedLoginTerms_fr_default_fr() {
assertFr(getLocalizedLoginTerms(createRegistrationFlowResponse(), "fr"))
}
@Test
fun getLocalizedLoginTerms_fr_en_fr() {
assertFr(getLocalizedLoginTerms(createRegistrationFlowResponse(), "fr", "en"))
}
@Test
fun getLocalizedLoginTerms_fr_fr_fr() {
assertFr(getLocalizedLoginTerms(createRegistrationFlowResponse(), "fr", "fr"))
}
@Test
fun getLocalizedLoginTerms_de_en() {
// Test not available language
assertEn(getLocalizedLoginTerms(createRegistrationFlowResponse(), "de"))
}
@Test
fun getLocalizedLoginTerms_de_en_en() {
// Test not available language
assertEn(getLocalizedLoginTerms(createRegistrationFlowResponse(), "de", "en"))
}
@Test
fun getLocalizedLoginTerms_de_fr_fr() {
// Test not available language, with default to fr
assertFr(getLocalizedLoginTerms(createRegistrationFlowResponse(), "de", "fr"))
}
@Test
fun getLocalizedLoginTerms_de_de_en() {
// Test not available language, with not available default -> First language
assertEn(getLocalizedLoginTerms(createRegistrationFlowResponse(), "de", "de"))
}
@Test
fun getLocalizedLoginTerms_empty_policies_empty() {
val registrationFlowResponse = RegistrationFlowResponse()
.apply {
params =
mapOf("m.login.terms" to
mapOf("policies" to
emptyMap<String, String>()
)
)
}
assertTrue(getLocalizedLoginTerms(registrationFlowResponse).isEmpty())
}
@Test
fun getLocalizedLoginTerms_two_policies() {
val registrationFlowResponse = RegistrationFlowResponse()
.apply {
params =
mapOf("m.login.terms" to
mapOf("policies" to
mapOf("policy1" to
mapOf("version" to "1.0",
"en" to mapOf(
"url" to "http:url_en",
"name" to "name_en")
),
"policy2" to
mapOf("version" to "2.0",
"en" to mapOf(
"url" to "http:url_en2",
"name" to "name_en2")
)
)
)
)
}
getLocalizedLoginTerms(registrationFlowResponse).let { result ->
assertEquals(2, result.size)
result[0].let {
assertEquals("policy1", it.policyName)
assertEquals("1.0", it.version)
assertEquals("name_en", it.localizedName)
assertEquals("http:url_en", it.localizedUrl)
}
result[1].let {
assertEquals("policy2", it.policyName)
assertEquals("2.0", it.version)
assertEquals("name_en2", it.localizedName)
assertEquals("http:url_en2", it.localizedUrl)
}
}
}
/* ==========================================================================================
* Private
* ========================================================================================== */
/**
* Example of Data:
* <pre>
* "m.login.terms": {
* "policies": {
* "privacy_policy": {
* "version": "1.0",
* "en": {
* "url": "http:\/\/matrix.org\/_matrix\/consent?v=1.0",
* "name": "Terms and Conditions"
* }
* }
* }
* }
*</pre>
*/
private fun createRegistrationFlowResponse() = RegistrationFlowResponse()
.apply {
params =
mapOf("m.login.terms" to
mapOf("policies" to
mapOf("policy1" to
mapOf("version" to "1.0",
"en" to mapOf(
"url" to "http:url_en",
"name" to "name_en"),
"fr" to mapOf(
"url" to "http:url_fr",
"name" to "name_fr")
)
)
)
)
}
private fun assertEn(localizedLoginTerms: List<LocalizedFlowDataLoginTerms>) {
localizedLoginTerms.let { result ->
assertEquals(1, result.size)
result.first().let {
assertEquals("policy1", it.policyName)
assertEquals("1.0", it.version)
assertEquals("name_en", it.localizedName)
assertEquals("http:url_en", it.localizedUrl)
}
}
}
private fun assertFr(localizedLoginTerms: List<LocalizedFlowDataLoginTerms>) {
localizedLoginTerms.let { result ->
assertEquals(1, result.size)
result.first().let {
assertEquals("policy1", it.policyName)
assertEquals("1.0", it.version)
assertEquals("name_fr", it.localizedName)
assertEquals("http:url_fr", it.localizedUrl)
}
}
}
}
<|start_filename|>matrix-sdk-crypto/src/main/java/org/matrix/androidsdk/crypto/cryptostore/db/model/IncomingRoomKeyRequestEntity.kt<|end_filename|>
/*
* Copyright 2018 New Vector Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.crypto.cryptostore.db.model
import io.realm.RealmObject
import org.matrix.androidsdk.crypto.IncomingRoomKeyRequest
import org.matrix.androidsdk.crypto.model.crypto.RoomKeyRequestBody
internal open class IncomingRoomKeyRequestEntity(
var requestId: String? = null,
var userId: String? = null,
var deviceId: String? = null,
// RoomKeyRequestBody fields
var requestBodyAlgorithm: String? = null,
var requestBodyRoomId: String? = null,
var requestBodySenderKey: String? = null,
var requestBodySessionId: String? = null
) : RealmObject() {
fun toIncomingRoomKeyRequest(): IncomingRoomKeyRequest {
return IncomingRoomKeyRequest().apply {
mRequestId = requestId
mUserId = userId
mDeviceId = deviceId
mRequestBody = RoomKeyRequestBody().apply {
algorithm = requestBodyAlgorithm
roomId = requestBodyRoomId
senderKey = requestBodySenderKey
sessionId = requestBodySessionId
}
}
}
fun putRequestBody(requestBody: RoomKeyRequestBody?) {
requestBody?.let {
requestBodyAlgorithm = it.algorithm
requestBodyRoomId = it.roomId
requestBodySenderKey = it.senderKey
requestBodySessionId = it.sessionId
}
}
}
<|start_filename|>matrix-sdk-crypto/src/main/java/org/matrix/androidsdk/crypto/rest/CryptoRestAdapterCallback.java<|end_filename|>
/*
* Copyright 2018 New Vector Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.crypto.rest;
import com.google.gson.Gson;
import org.matrix.androidsdk.core.Log;
import org.matrix.androidsdk.core.callback.ApiCallback;
import org.matrix.androidsdk.core.model.HttpError;
import org.matrix.androidsdk.core.model.HttpException;
import org.matrix.androidsdk.core.model.MatrixError;
import org.matrix.androidsdk.core.rest.DefaultRetrofit2ResponseHandler;
import java.io.IOException;
import okhttp3.ResponseBody;
import retrofit2.Call;
import retrofit2.Callback;
import retrofit2.Response;
public class CryptoRestAdapterCallback<T> implements Callback<T> {
private static final String LOG_TAG = "CryptoRestAdapterCallback";
/**
* Callback when a request failed after a network error.
* This callback should manage the request auto resent.
*/
public interface RequestRetryCallBack {
void onRetry();
}
// the event description
private final String mEventDescription;
// the callback
// FIXME It should be safer if the type was ApiCallback<T>, else onSuccess() has to be overridden
private final ApiCallback mApiCallback;
// the retry callback
private final RequestRetryCallBack mRequestRetryCallBack;
/**
* Constructor with unsent events management
*
* @param description the event description
* @param apiCallback the callback
* @param requestRetryCallBack the retry callback
*/
public CryptoRestAdapterCallback(String description,
ApiCallback apiCallback,
RequestRetryCallBack requestRetryCallBack) {
if (null != description) {
Log.d(LOG_TAG, "Trigger the event [" + description + "]");
}
mEventDescription = description;
mApiCallback = apiCallback;
mRequestRetryCallBack = requestRetryCallBack;
}
@Override
public void onResponse(Call<T> call, final Response<T> response) {
try {
handleResponse(response);
} catch (IOException e) {
onFailure(call, e);
}
}
private void handleResponse(final Response<T> response) throws IOException {
DefaultRetrofit2ResponseHandler.handleResponse(
response,
new DefaultRetrofit2ResponseHandler.Listener<T>() {
@Override
public void onSuccess(Response<T> response) {
success(response.body(), response);
}
@Override
public void onHttpError(HttpError httpError) {
failure(response, new HttpException(httpError));
}
}
);
}
@Override
public void onFailure(Call<T> call, Throwable t) {
failure(null, (Exception) t);
}
public void success(T t, Response<T> response) {
if (null != mEventDescription) {
Log.d(LOG_TAG, "## Succeed() : [" + mEventDescription + "]");
}
// add try catch to prevent application crashes while managing destroyed object
try {
if (null != mApiCallback) {
try {
mApiCallback.onSuccess(t);
} catch (Exception e) {
Log.e(LOG_TAG, "## succeed() : onSuccess failed " + e.getMessage(), e);
mApiCallback.onUnexpectedError(e);
}
}
} catch (Exception e) {
// privacy
Log.e(LOG_TAG, "## succeed(): Exception " + e.getMessage(), e);
}
}
/**
* Default failure implementation that calls the right error handler
*
* @param response the retrofit response
* @param exception the retrofit exception
*/
public void failure(Response<T> response, Exception exception) {
if (null != mEventDescription) {
String error = exception != null
? exception.getMessage()
: (response != null ? response.message() : "unknown");
Log.d(LOG_TAG, "## failure(): [" + mEventDescription + "]" + " with error " + error);
}
if (exception != null && exception instanceof IOException) {
try {
if (null != mApiCallback) {
try {
mApiCallback.onNetworkError(exception);
} catch (Exception e) {
Log.e(LOG_TAG, "## failure(): onNetworkError " + exception.getLocalizedMessage(), exception);
}
}
} catch (Exception e) {
// privacy
//Log.e(LOG_TAG, "Exception NetworkError " + e.getMessage() + " while managing " + error.getUrl());
Log.e(LOG_TAG, "## failure(): NetworkError " + e.getMessage(), e);
}
} else {
// Try to convert this into a Matrix error
MatrixError mxError;
try {
HttpError error = ((HttpException) exception).getHttpError();
ResponseBody errorBody = response.errorBody();
String bodyAsString = error.getErrorBody();
mxError = new Gson().fromJson(bodyAsString, MatrixError.class);
mxError.mStatus = response.code();
mxError.mReason = response.message();
mxError.mErrorBodyAsString = bodyAsString;
} catch (Exception e) {
mxError = null;
}
if (mxError != null) {
try {
if (null != mApiCallback) {
mApiCallback.onMatrixError(mxError);
}
} catch (Exception e) {
// privacy
//Log.e(LOG_TAG, "Exception MatrixError " + e.getMessage() + " while managing " + error.getUrl());
Log.e(LOG_TAG, "## failure(): MatrixError " + e.getMessage(), e);
}
} else {
try {
if (null != mApiCallback) {
mApiCallback.onUnexpectedError(exception);
}
} catch (Exception e) {
// privacy
//Log.e(LOG_TAG, "Exception UnexpectedError " + e.getMessage() + " while managing " + error.getUrl());
Log.e(LOG_TAG, "## failure(): UnexpectedError " + e.getMessage(), e);
}
}
}
}
}
<|start_filename|>matrix-sdk/src/androidTest/java/org/matrix/androidsdk/data/cryptostore/db/DbHelperTest.kt<|end_filename|>
/*
* Copyright 2018 New Vector Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.data.cryptostore.db
import junit.framework.Assert.*
import org.junit.FixMethodOrder
import org.junit.Test
import org.junit.runners.MethodSorters
import org.matrix.androidsdk.crypto.IncomingRoomKeyRequest
import org.matrix.androidsdk.crypto.cryptostore.db.deserializeFromRealm
import org.matrix.androidsdk.crypto.cryptostore.db.serializeForRealm
import org.matrix.androidsdk.crypto.model.crypto.RoomKeyRequestBody
@FixMethodOrder(MethodSorters.JVM)
class DbHelperTest {
// Base64 is needed here
@Test
fun testSerialization_ok() {
// Create an arbitrary serializable object
val obj = IncomingRoomKeyRequest().apply {
mRequestBody = RoomKeyRequestBody().apply {
algorithm = "algo"
roomId = "roomId"
senderKey = "senderKey"
sessionId = "sessionId"
}
mDeviceId = "deviceId"
mUserId = "userId"
mRequestId = "requestId"
}
val s = serializeForRealm(obj)
assertTrue(s?.isNotEmpty() == true)
val obj2 = deserializeFromRealm<IncomingRoomKeyRequest>(s)
assertNotNull(obj2)
assertEquals(obj.mDeviceId, obj2!!.mDeviceId)
assertEquals(obj.mUserId, obj2.mUserId)
assertEquals(obj.mRequestId, obj2.mRequestId)
assertEquals(obj.mRequestBody.sessionId, obj2.mRequestBody.sessionId)
assertEquals(obj.mRequestBody.algorithm, obj2.mRequestBody.algorithm)
assertEquals(obj.mRequestBody.roomId, obj2.mRequestBody.roomId)
assertEquals(obj.mRequestBody.senderKey, obj2.mRequestBody.senderKey)
}
}
<|start_filename|>matrix-sdk/src/main/java/org/matrix/androidsdk/rest/client/IdentityAuthRestClient.kt<|end_filename|>
/*
* Copyright 2019 New Vector Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.rest.client
import org.matrix.androidsdk.HomeServerConnectionConfig
import org.matrix.androidsdk.RestClient
import org.matrix.androidsdk.core.JsonUtils
import org.matrix.androidsdk.core.callback.ApiCallback
import org.matrix.androidsdk.rest.api.IdentityAuthApi
import org.matrix.androidsdk.rest.callback.RestAdapterCallback
import org.matrix.androidsdk.rest.model.identityserver.IdentityAccountResponse
import org.matrix.androidsdk.rest.model.identityserver.IdentityServerRegisterResponse
import org.matrix.androidsdk.rest.model.openid.RequestOpenIdTokenResponse
class IdentityAuthRestClient(hsConfig: HomeServerConnectionConfig) :
RestClient<IdentityAuthApi>(hsConfig, IdentityAuthApi::class.java, URI_IDENTITY_PATH_V2, JsonUtils.getGson(false), true) {
fun register(openIdTokenResponse: RequestOpenIdTokenResponse, callback: ApiCallback<IdentityServerRegisterResponse>) {
mApi.register(openIdTokenResponse).enqueue(RestAdapterCallback("register", null, callback, null))
}
fun checkAccount(token: String, callback: ApiCallback<IdentityAccountResponse>) {
setAccessToken(token)
mApi.checkAccount().enqueue(RestAdapterCallback("checkAccount", null, callback, null))
}
fun logout(token: String, callback: ApiCallback<Unit>) {
setAccessToken(token)
mApi.logout().enqueue(RestAdapterCallback("logout", null, callback, null))
}
}
<|start_filename|>matrix-sdk/src/main/java/org/matrix/androidsdk/rest/model/group/GroupsSyncResponse.java<|end_filename|>
/*
* Copyright 2014 OpenMarket Ltd
* Copyright 2018 New Vector Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.rest.model.group;
import java.util.Map;
/**
* Group sync response
*/
public class GroupsSyncResponse {
/**
* Joined groups: An array of groups ids.
*/
public Map<String, Object> join;
/**
* Invitations. The groups that the user has been invited to: keys are groups ids.
*/
public Map<String, InvitedGroupSync> invite;
/**
* Left groups. An array of groups ids: the groups that the user has left or been banned from.
*/
public Map<String, Object> leave;
}
<|start_filename|>matrix-sdk-crypto/src/main/java/org/matrix/androidsdk/crypto/cryptostore/db/query/DeviceInfoEntityQueries.kt<|end_filename|>
/*
* Copyright 2018 New Vector Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.crypto.cryptostore.db.query
import io.realm.Realm
import io.realm.kotlin.where
import org.matrix.androidsdk.crypto.cryptostore.db.model.DeviceInfoEntity
import org.matrix.androidsdk.crypto.cryptostore.db.model.DeviceInfoEntityFields
import org.matrix.androidsdk.crypto.cryptostore.db.model.createPrimaryKey
/**
* Get or create a device info
*/
internal fun DeviceInfoEntity.Companion.getOrCreate(realm: Realm, userId: String, deviceId: String): DeviceInfoEntity {
return realm.where<DeviceInfoEntity>()
.equalTo(DeviceInfoEntityFields.PRIMARY_KEY, DeviceInfoEntity.createPrimaryKey(userId, deviceId))
.findFirst()
?: let {
realm.createObject(DeviceInfoEntity::class.java, DeviceInfoEntity.createPrimaryKey(userId, deviceId)).apply {
this.deviceId = deviceId
}
}
}
<|start_filename|>matrix-sdk/src/main/java/org/matrix/androidsdk/rest/model/bingrules/Condition.java<|end_filename|>
/*
* Copyright 2014 OpenMarket Ltd
* Copyright 2018 New Vector Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.rest.model.bingrules;
public class Condition {
// defined in the push rules spec
// https://matrix.org/docs/spec/client_server/r0.3.0.html#push-rules
/* 'key': The dot-separated field of the event to match, eg. content.body
'pattern': The glob-style pattern to match against. Patterns with no special glob characters should be treated as having asterisks prepended
and appended when testing the condition.*/
public static final String KIND_EVENT_MATCH = "event_match";
/* 'profile_tag': The profile_tag to match with.*/
public static final String KIND_PROFILE_TAG = "profile_tag";
/* no parameter */
public static final String KIND_CONTAINS_DISPLAY_NAME = "contains_display_name";
/* 'is': A decimal integer optionally prefixed by one of, '==', '<', '>', '>=' or '<='.
A prefix of '<' matches rooms where the member count is strictly less than the given number and so forth. If no prefix is present, this matches
rooms where the member count is exactly equal to the given number (ie. the same as '==').
*/
public static final String KIND_ROOM_MEMBER_COUNT = "room_member_count";
/* */
public static final String KIND_DEVICE = "device";
public static final String KIND_SENDER_NOTIFICATION_PERMISSION = "sender_notification_permission";
public static final String KIND_UNKNOWN = "unknown_condition";
public String kind;
}
<|start_filename|>matrix-sdk/src/main/java/org/matrix/androidsdk/rest/model/sync/RoomsSyncResponse.java<|end_filename|>
/*
* Copyright 2016 OpenMarket Ltd
* Copyright 2018 New Vector Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.rest.model.sync;
import java.util.Map;
// RoomsSyncResponse represents the rooms list in server sync v2 response.
public class RoomsSyncResponse {
/**
* Joined rooms: keys are rooms ids.
*/
public Map<String, RoomSync> join;
/**
* Invitations. The rooms that the user has been invited to: keys are rooms ids.
*/
public Map<String, InvitedRoomSync> invite;
/**
* Left rooms. The rooms that the user has left or been banned from: keys are rooms ids.
*/
public Map<String, RoomSync> leave;
}
<|start_filename|>matrix-sdk/src/main/java/org/matrix/androidsdk/data/timeline/TimelinePushWorker.java<|end_filename|>
/*
* Copyright 2018 New Vector Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.data.timeline;
import androidx.annotation.NonNull;
import com.google.gson.JsonObject;
import org.matrix.androidsdk.MXDataHandler;
import org.matrix.androidsdk.call.MXCall;
import org.matrix.androidsdk.core.BingRulesManager;
import org.matrix.androidsdk.core.Log;
import org.matrix.androidsdk.data.RoomState;
import org.matrix.androidsdk.rest.model.Event;
import org.matrix.androidsdk.rest.model.bingrules.BingRule;
/**
* This class is responsible for handling push rules for an event
*/
class TimelinePushWorker {
private static final String LOG_TAG = TimelinePushWorker.class.getSimpleName();
private final MXDataHandler mDataHandler;
TimelinePushWorker(@NonNull final MXDataHandler dataHandler) {
mDataHandler = dataHandler;
}
/**
* Trigger a push if there is a dedicated push rules which implies it.
*
* @param event the event
*/
public void triggerPush(@NonNull final RoomState state,
@NonNull final Event event) {
BingRule bingRule;
boolean outOfTimeEvent = false;
long maxLifetime = 0;
long eventLifetime = 0;
final JsonObject eventContent = event.getContentAsJsonObject();
if (eventContent != null && eventContent.has("lifetime")) {
maxLifetime = eventContent.get("lifetime").getAsLong();
eventLifetime = System.currentTimeMillis() - event.getOriginServerTs();
outOfTimeEvent = eventLifetime > maxLifetime;
}
final BingRulesManager bingRulesManager = mDataHandler.getBingRulesManager();
// If the bing rules apply, bing
if (!outOfTimeEvent
&& bingRulesManager != null
&& (bingRule = bingRulesManager.fulfilledBingRule(event)) != null) {
if (bingRule.shouldNotify()) {
// bing the call events only if they make sense
if (Event.EVENT_TYPE_CALL_INVITE.equals(event.getType())) {
long lifeTime = event.getAge();
if (Long.MAX_VALUE == lifeTime) {
lifeTime = System.currentTimeMillis() - event.getOriginServerTs();
}
if (lifeTime > MXCall.CALL_TIMEOUT_MS) {
Log.d(LOG_TAG, "IGNORED onBingEvent rule id " + bingRule.ruleId + " event id " + event.eventId
+ " in " + event.roomId);
return;
}
}
Log.d(LOG_TAG, "onBingEvent rule id " + bingRule.ruleId + " event id " + event.eventId + " in " + event.roomId);
mDataHandler.onBingEvent(event, state, bingRule);
} else {
Log.d(LOG_TAG, "rule id " + bingRule.ruleId + " event id " + event.eventId
+ " in " + event.roomId + " has a mute notify rule");
}
} else if (outOfTimeEvent) {
Log.e(LOG_TAG, "outOfTimeEvent for " + event.eventId + " in " + event.roomId);
Log.e(LOG_TAG, "outOfTimeEvent maxlifetime " + maxLifetime + " eventLifeTime " + eventLifetime);
}
}
}
<|start_filename|>matrix-sdk/src/main/java/org/matrix/androidsdk/rest/model/bingrules/EventMatchCondition.java<|end_filename|>
/*
* Copyright 2014 OpenMarket Ltd
* Copyright 2018 New Vector Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.rest.model.bingrules;
import android.text.TextUtils;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import org.matrix.androidsdk.rest.model.Event;
import java.util.HashMap;
import java.util.Map;
import java.util.regex.Pattern;
public class EventMatchCondition extends Condition {
public String key;
public String pattern;
private static Map<String, Pattern> mPatternByRule = null;
public EventMatchCondition() {
kind = Condition.KIND_EVENT_MATCH;
}
@Override
public String toString() {
return "EventMatchCondition{" + "key='" + key + ", pattern=" + pattern + '}';
}
/**
* Returns whether the given event satisfies the condition.
*
* @param event the event
* @return true if the event satisfies the condition
*/
public boolean isSatisfied(Event event) {
String fieldVal = null;
// some information are in the decrypted event (like type)
if (event.isEncrypted() && (null != event.getClearEvent())) {
JsonObject eventJson = event.getClearEvent().toJsonObject();
fieldVal = extractField(eventJson, key);
}
if (TextUtils.isEmpty(fieldVal)) {
JsonObject eventJson = event.toJsonObject();
fieldVal = extractField(eventJson, key);
}
if (TextUtils.isEmpty(fieldVal)) {
return false;
}
if (TextUtils.equals(pattern, fieldVal)) {
return true;
}
if (null == mPatternByRule) {
mPatternByRule = new HashMap<>();
}
Pattern patternEx = mPatternByRule.get(pattern);
if (null == patternEx) {
patternEx = Pattern.compile(globToRegex(pattern), Pattern.CASE_INSENSITIVE);
mPatternByRule.put(pattern, patternEx);
}
return patternEx.matcher(fieldVal).matches();
}
private String extractField(JsonObject jsonObject, String fieldPath) {
String[] fieldParts = fieldPath.split("\\.");
JsonElement jsonElement = null;
for (String field : fieldParts) {
jsonElement = jsonObject.get(field);
if (jsonElement == null) {
return null;
}
if (jsonElement.isJsonObject()) {
jsonObject = (JsonObject) jsonElement;
}
}
return (jsonElement == null) ? null : jsonElement.getAsString();
}
private String globToRegex(String glob) {
String res = glob.replace("*", ".*").replace("?", ".");
// If no special characters were found (detected here by no replacements having been made),
// add asterisks and boundaries to both sides
if (res.equals(glob)) {
res = "(^|.*\\W)" + res + "($|\\W.*)";
}
return res;
}
}
<|start_filename|>matrix-sdk-crypto/src/main/java/org/matrix/androidsdk/crypto/algorithms/megolm/MXOutboundSessionInfo.java<|end_filename|>
/*
* Copyright 2015 OpenMarket Ltd
* Copyright 2017 Vector Creations Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.crypto.algorithms.megolm;
import org.matrix.androidsdk.core.Log;
import org.matrix.androidsdk.crypto.data.MXDeviceInfo;
import org.matrix.androidsdk.crypto.data.MXUsersDevicesMap;
import java.util.List;
public class MXOutboundSessionInfo {
private static final String LOG_TAG = MXOutboundSessionInfo.class.getSimpleName();
// When the session was created
private final long mCreationTime;
// The id of the session
public final String mSessionId;
// Number of times this session has been used
public int mUseCount;
// Devices with which we have shared the session key
// userId -> {deviceId -> msgindex}
public final MXUsersDevicesMap<Integer> mSharedWithDevices;
// constructor
public MXOutboundSessionInfo(String sessionId) {
mSessionId = sessionId;
mSharedWithDevices = new MXUsersDevicesMap<>();
mCreationTime = System.currentTimeMillis();
mUseCount = 0;
}
public boolean needsRotation(int rotationPeriodMsgs, int rotationPeriodMs) {
boolean needsRotation = false;
long sessionLifetime = System.currentTimeMillis() - mCreationTime;
if ((mUseCount >= rotationPeriodMsgs) || (sessionLifetime >= rotationPeriodMs)) {
Log.d(LOG_TAG, "## needsRotation() : Rotating megolm session after " + mUseCount + ", " + sessionLifetime + "ms");
needsRotation = true;
}
return needsRotation;
}
/**
* Determine if this session has been shared with devices which it shouldn't have been.
*
* @param devicesInRoom the devices map
* @return true if we have shared the session with devices which aren't in devicesInRoom.
*/
public boolean sharedWithTooManyDevices(MXUsersDevicesMap<MXDeviceInfo> devicesInRoom) {
List<String> userIds = mSharedWithDevices.getUserIds();
for (String userId : userIds) {
if (null == devicesInRoom.getUserDeviceIds(userId)) {
Log.d(LOG_TAG, "## sharedWithTooManyDevices() : Starting new session because we shared with " + userId);
return true;
}
List<String> deviceIds = mSharedWithDevices.getUserDeviceIds(userId);
for (String deviceId : deviceIds) {
if (null == devicesInRoom.getObject(deviceId, userId)) {
Log.d(LOG_TAG, "## sharedWithTooManyDevices() : Starting new session because we shared with " + userId + ":" + deviceId);
return true;
}
}
}
return false;
}
}
<|start_filename|>matrix-sdk/src/main/java/org/matrix/androidsdk/core/EventDisplay.java<|end_filename|>
/*
* Copyright 2016 OpenMarket Ltd
* Copyright 2018 New Vector Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.core;
import android.content.Context;
import android.graphics.Typeface;
import android.os.Build;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import android.text.Html;
import android.text.Spannable;
import android.text.SpannableString;
import android.text.SpannableStringBuilder;
import android.text.TextUtils;
import android.text.style.ForegroundColorSpan;
import android.text.style.StyleSpan;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.JsonPrimitive;
import org.matrix.androidsdk.R;
import org.matrix.androidsdk.call.MXCallsManager;
import org.matrix.androidsdk.crypto.MXCryptoError;
import org.matrix.androidsdk.data.RoomState;
import org.matrix.androidsdk.interfaces.HtmlToolbox;
import org.matrix.androidsdk.rest.model.Event;
import org.matrix.androidsdk.rest.model.EventContent;
import org.matrix.androidsdk.rest.model.RedactedBecause;
import org.matrix.androidsdk.rest.model.RoomMember;
import org.matrix.androidsdk.rest.model.message.Message;
import org.matrix.androidsdk.rest.model.pid.RoomThirdPartyInvite;
/**
* Class helper to stringify an event
*/
public class EventDisplay {
private static final String LOG_TAG = EventDisplay.class.getSimpleName();
private static final String MESSAGE_IN_REPLY_TO_FIRST_PART = "<blockquote>";
private static final String MESSAGE_IN_REPLY_TO_LAST_PART = "</a>";
private static final String MESSAGE_IN_REPLY_TO_HREF_TAG_END = "\">";
// members
protected final Context mContext;
@Nullable
protected final HtmlToolbox mHtmlToolbox;
protected boolean mPrependAuthor;
// let the application defines if the redacted events must be displayed
public static final boolean mDisplayRedactedEvents = false;
// constructor
public EventDisplay(Context context) {
this(context, null);
}
// constructor
public EventDisplay(Context context, @Nullable HtmlToolbox htmlToolbox) {
mContext = context.getApplicationContext();
mHtmlToolbox = htmlToolbox;
}
/**
* <p>Prepend the text with the author's name if they have not been mentioned in the text.</p>
* This will prepend text messages with the author's name. This will NOT prepend things like
* emote, room topic changes, etc which already mention the author's name in the message.
*
* @param prepend true to prepend the message author.
*/
public void setPrependMessagesWithAuthor(boolean prepend) {
mPrependAuthor = prepend;
}
/**
* Compute an "human readable" name for an user Id.
*
* @param userId the user id
* @param roomState the room state
* @return the user display name
*/
protected static String getUserDisplayName(String userId, RoomState roomState) {
if (null != roomState) {
return roomState.getMemberName(userId);
} else {
return userId;
}
}
/**
* Stringify the linked event.
*
* @return The text or null if it isn't possible.
*/
public CharSequence getTextualDisplay(Event event, RoomState roomState) {
return getTextualDisplay(null, event, roomState);
}
/**
* Stringify the linked event.
*
* @param displayNameColor the display name highlighted color.
* @return The text or null if it isn't possible.
*/
public CharSequence getTextualDisplay(Integer displayNameColor, Event event, RoomState roomState) {
CharSequence text = null;
try {
JsonObject jsonEventContent = event.getContentAsJsonObject();
String userDisplayName = getUserDisplayName(event.getSender(), roomState);
String eventType = event.getType();
if (event.isCallEvent()) {
if (Event.EVENT_TYPE_CALL_INVITE.equals(eventType)) {
boolean isVideo = false;
// detect call type from the sdp
try {
JsonObject offer = jsonEventContent.get("offer").getAsJsonObject();
JsonElement sdp = offer.get("sdp");
String sdpValue = sdp.getAsString();
isVideo = sdpValue.contains("m=video");
} catch (Exception e) {
Log.e(LOG_TAG, "getTextualDisplay : " + e.getMessage(), e);
}
if (isVideo) {
return mContext.getString(R.string.notice_placed_video_call, userDisplayName);
} else {
return mContext.getString(R.string.notice_placed_voice_call, userDisplayName);
}
} else if (Event.EVENT_TYPE_CALL_ANSWER.equals(eventType)) {
return mContext.getString(R.string.notice_answered_call, userDisplayName);
} else if (Event.EVENT_TYPE_CALL_HANGUP.equals(eventType)) {
return mContext.getString(R.string.notice_ended_call, userDisplayName);
} else {
return eventType;
}
} else if (Event.EVENT_TYPE_STATE_HISTORY_VISIBILITY.equals(eventType)) {
CharSequence subpart;
String historyVisibility = (null != jsonEventContent.get("history_visibility")) ?
jsonEventContent.get("history_visibility").getAsString() : RoomState.HISTORY_VISIBILITY_SHARED;
if (TextUtils.equals(historyVisibility, RoomState.HISTORY_VISIBILITY_SHARED)) {
subpart = mContext.getString(R.string.notice_room_visibility_shared);
} else if (TextUtils.equals(historyVisibility, RoomState.HISTORY_VISIBILITY_INVITED)) {
subpart = mContext.getString(R.string.notice_room_visibility_invited);
} else if (TextUtils.equals(historyVisibility, RoomState.HISTORY_VISIBILITY_JOINED)) {
subpart = mContext.getString(R.string.notice_room_visibility_joined);
} else if (TextUtils.equals(historyVisibility, RoomState.HISTORY_VISIBILITY_WORLD_READABLE)) {
subpart = mContext.getString(R.string.notice_room_visibility_world_readable);
} else {
subpart = mContext.getString(R.string.notice_room_visibility_unknown, historyVisibility);
}
text = mContext.getString(R.string.notice_made_future_room_visibility, userDisplayName, subpart);
} else if (Event.EVENT_TYPE_RECEIPT.equals(eventType)) {
// the read receipt should not be displayed
text = "Read Receipt";
} else if (Event.EVENT_TYPE_MESSAGE.equals(eventType)) {
final String msgtype = (null != jsonEventContent.get("msgtype")) ? jsonEventContent.get("msgtype").getAsString() : "";
// all m.room.message events should support the 'body' key fallback, so use it.
text = jsonEventContent.has("body") ? jsonEventContent.get("body").getAsString() : null;
// check for html formatting
if (jsonEventContent.has("formatted_body") && jsonEventContent.has("format")) {
text = getFormattedMessage(mContext, jsonEventContent, roomState, mHtmlToolbox);
}
// avoid empty image name
if (TextUtils.equals(msgtype, Message.MSGTYPE_IMAGE) && TextUtils.isEmpty(text)) {
text = mContext.getString(R.string.summary_user_sent_image, userDisplayName);
} else if (TextUtils.equals(msgtype, Message.MSGTYPE_EMOTE)) {
text = "* " + userDisplayName + " " + text;
} else if (TextUtils.isEmpty(text)) {
text = "";
} else if (mPrependAuthor) {
text = new SpannableStringBuilder(mContext.getString(R.string.summary_message, userDisplayName, text));
if (null != displayNameColor) {
((SpannableStringBuilder) text).setSpan(new ForegroundColorSpan(displayNameColor),
0, userDisplayName.length() + 1, Spannable.SPAN_EXCLUSIVE_EXCLUSIVE);
((SpannableStringBuilder) text).setSpan(new StyleSpan(Typeface.BOLD),
0, userDisplayName.length() + 1, Spannable.SPAN_EXCLUSIVE_EXCLUSIVE);
}
}
} else if (Event.EVENT_TYPE_STICKER.equals(eventType)) {
// all m.stickers events should support the 'body' key fallback, so use it.
text = jsonEventContent.has("body") ? jsonEventContent.get("body").getAsString() : null;
if (TextUtils.isEmpty(text)) {
text = mContext.getString(R.string.summary_user_sent_sticker, userDisplayName);
}
} else if (Event.EVENT_TYPE_MESSAGE_ENCRYPTION.equals(eventType)) {
text = mContext.getString(R.string.notice_end_to_end, userDisplayName, event.getWireEventContent().algorithm);
} else if (Event.EVENT_TYPE_MESSAGE_ENCRYPTED.equals(eventType)) {
// don't display
if (event.isRedacted()) {
String redactedInfo = getRedactionMessage(mContext, event, roomState);
if (TextUtils.isEmpty(redactedInfo)) {
return null;
} else {
return redactedInfo;
}
} else {
String message = null;
if (null != event.getCryptoError()) {
String errorDescription;
MXCryptoError error = event.getCryptoError();
if (TextUtils.equals(error.errcode, MXCryptoError.UNKNOWN_INBOUND_SESSION_ID_ERROR_CODE)) {
errorDescription = mContext.getResources().getString(R.string.notice_crypto_error_unkwown_inbound_session_id);
} else {
errorDescription = error.getLocalizedMessage();
}
message = mContext.getString(R.string.notice_crypto_unable_to_decrypt, errorDescription);
}
if (TextUtils.isEmpty(message)) {
message = mContext.getString(R.string.encrypted_message);
}
SpannableString spannableStr = new SpannableString(message);
spannableStr.setSpan(new StyleSpan(Typeface.ITALIC), 0, message.length(), Spannable.SPAN_EXCLUSIVE_EXCLUSIVE);
text = spannableStr;
}
} else if (Event.EVENT_TYPE_STATE_ROOM_TOPIC.equals(eventType)) {
String topic = jsonEventContent.getAsJsonPrimitive("topic").getAsString();
if (event.isRedacted()) {
String redactedInfo = getRedactionMessage(mContext, event, roomState);
if (TextUtils.isEmpty(redactedInfo)) {
return null;
}
topic = redactedInfo;
}
if (!TextUtils.isEmpty(topic)) {
text = mContext.getString(R.string.notice_room_topic_changed, userDisplayName, topic);
} else {
text = mContext.getString(R.string.notice_room_topic_removed, userDisplayName);
}
} else if (Event.EVENT_TYPE_STATE_ROOM_NAME.equals(eventType)) {
JsonPrimitive nameAsJson = jsonEventContent.getAsJsonPrimitive("name");
String roomName = (null == nameAsJson) ? null : nameAsJson.getAsString();
if (event.isRedacted()) {
String redactedInfo = getRedactionMessage(mContext, event, roomState);
if (TextUtils.isEmpty(redactedInfo)) {
return null;
}
roomName = redactedInfo;
}
if (!TextUtils.isEmpty(roomName)) {
text = mContext.getString(R.string.notice_room_name_changed, userDisplayName, roomName);
} else {
text = mContext.getString(R.string.notice_room_name_removed, userDisplayName);
}
} else if (Event.EVENT_TYPE_STATE_ROOM_THIRD_PARTY_INVITE.equals(eventType)) {
RoomThirdPartyInvite invite = JsonUtils.toRoomThirdPartyInvite(event.getContent());
String displayName = invite.display_name;
if (event.isRedacted()) {
String redactedInfo = getRedactionMessage(mContext, event, roomState);
if (TextUtils.isEmpty(redactedInfo)) {
return null;
}
displayName = redactedInfo;
}
if (displayName != null) {
text = mContext.getString(R.string.notice_room_third_party_invite, userDisplayName, displayName);
} else {
// Consider the invite has been revoked
JsonObject prevContent = event.getPrevContentAsJsonObject();
if (prevContent != null) {
text = mContext.getString(R.string.notice_room_third_party_revoked_invite, userDisplayName, prevContent.get("display_name"));
} else {
text = null;
}
}
} else if (Event.EVENT_TYPE_STATE_ROOM_MEMBER.equals(eventType)) {
text = getMembershipNotice(mContext, event, roomState);
}
} catch (Exception e) {
Log.e(LOG_TAG, "getTextualDisplay() " + e.getMessage(), e);
}
return text;
}
/**
* Compute the redact text for an event.
*
* @param context the context
* @param event the event
* @param roomState the room state
* @return the redacted event text
*/
@Nullable
public static String getRedactionMessage(Context context, Event event, RoomState roomState) {
// test if the redacted event must be displayed.
if (!mDisplayRedactedEvents) {
return null;
}
String result = null;
// Check first whether the event has been redacted
if (event != null
&& event.isRedacted()
&& event.unsigned != null
&& event.unsigned.redacted_because != null
&& roomState != null) {
RedactedBecause redactedBecause = event.unsigned.redacted_because;
String redactedBy = redactedBecause.sender;
String redactedReason = null;
if (redactedBecause.content != null) {
redactedReason = redactedBecause.content.reason;
}
if (TextUtils.isEmpty(redactedBy)) {
// No by
if (TextUtils.isEmpty(redactedReason)) {
// no reason
result = context.getString(R.string.notice_event_redacted);
} else {
// reason
result = context.getString(R.string.notice_event_redacted_with_reason, redactedReason);
}
} else {
// by
if (TextUtils.isEmpty(redactedReason)) {
// no reason
result = context.getString(R.string.notice_event_redacted_by, redactedBy);
} else {
// by and reason
result = context.getString(R.string.notice_event_redacted_by_with_reason, redactedBy, redactedReason);
}
}
}
return result;
}
/**
* Compute the sender display name
*
* @param event the event
* @param eventContent the event content
* @param prevEventContent the prev event content
* @param roomState the room state
* @return the "human readable" display name
*/
protected static String senderDisplayNameForEvent(Event event, EventContent eventContent, EventContent prevEventContent, RoomState roomState) {
String senderDisplayName = event.getSender();
if (!event.isRedacted()) {
if (null != roomState) {
// Consider first the current display name defined in provided room state
// (Note: this room state is supposed to not take the new event into account)
senderDisplayName = roomState.getMemberName(event.getSender());
}
// Check whether this sender name is updated by the current event (This happens in case of new joined member)
if ((null != eventContent) && TextUtils.equals(RoomMember.MEMBERSHIP_JOIN, eventContent.membership)) {
// detect if it is displayname update
// a display name update is detected when the previous state was join and there was a displayname
if (!TextUtils.isEmpty(eventContent.displayname)
|| ((null != prevEventContent)
&& TextUtils.equals(RoomMember.MEMBERSHIP_JOIN, prevEventContent.membership)
&& !TextUtils.isEmpty(prevEventContent.displayname))) {
senderDisplayName = eventContent.displayname;
}
}
}
return senderDisplayName;
}
/**
* Build a membership notice text from its dedicated event.
*
* @param context the context.
* @param event the event.
* @param roomState the room state.
* @return the membership text.
*/
public static String getMembershipNotice(Context context, Event event, RoomState roomState) {
JsonObject content = event.getContentAsJsonObject();
// don't support redacted membership event
if ((null == content) || (content.entrySet().size() == 0)) {
return null;
}
EventContent eventContent = JsonUtils.toEventContent(event.getContentAsJsonObject());
EventContent prevEventContent = event.getPrevContent();
String senderDisplayName = senderDisplayNameForEvent(event, eventContent, prevEventContent, roomState);
String prevUserDisplayName = null;
String prevMembership = null;
if (null != prevEventContent) {
prevMembership = prevEventContent.membership;
}
if ((null != prevEventContent)) {
prevUserDisplayName = prevEventContent.displayname;
}
// use by default the provided display name
String targetDisplayName = eventContent.displayname;
// if it is not provided, use the stateKey value
// and try to retrieve a valid display name
if (null == targetDisplayName) {
targetDisplayName = event.stateKey;
if ((null != targetDisplayName) && (null != roomState) && !event.isRedacted()) {
targetDisplayName = roomState.getMemberName(targetDisplayName);
}
}
// Check whether the sender has updated his profile (the membership is then unchanged)
if (TextUtils.equals(prevMembership, eventContent.membership)) {
String redactedInfo = getRedactionMessage(context, event, roomState);
// Is redacted event?
if (event.isRedacted()) {
// Here the event is ignored (no display)
if (null == redactedInfo) {
return null;
}
return context.getString(R.string.notice_profile_change_redacted, senderDisplayName, redactedInfo);
} else {
String displayText = "";
if (!TextUtils.equals(senderDisplayName, prevUserDisplayName)) {
if (TextUtils.isEmpty(prevUserDisplayName)) {
if (!TextUtils.equals(event.getSender(), senderDisplayName)) {
displayText = context.getString(R.string.notice_display_name_set, event.getSender(), senderDisplayName);
}
} else if (TextUtils.isEmpty(senderDisplayName)) {
displayText = context.getString(R.string.notice_display_name_removed, event.getSender(), prevUserDisplayName);
} else {
displayText = context.getString(R.string.notice_display_name_changed_from, event.getSender(), prevUserDisplayName, senderDisplayName);
}
}
// Check whether the avatar has been changed
String avatar = eventContent.avatar_url;
String prevAvatar = null;
if (null != prevEventContent) {
prevAvatar = prevEventContent.avatar_url;
}
if (!TextUtils.equals(prevAvatar, avatar)) {
if (!TextUtils.isEmpty(displayText)) {
displayText = displayText + " " + context.getString(R.string.notice_avatar_changed_too);
} else {
displayText = context.getString(R.string.notice_avatar_url_changed, senderDisplayName);
}
}
return displayText;
}
} else if (RoomMember.MEMBERSHIP_INVITE.equals(eventContent.membership)) {
if (null != eventContent.third_party_invite) {
return context.getString(R.string.notice_room_third_party_registered_invite, targetDisplayName, eventContent.third_party_invite.display_name);
} else {
String selfUserId = null;
if ((null != roomState) && (null != roomState.getDataHandler())) {
selfUserId = roomState.getDataHandler().getUserId();
}
if (TextUtils.equals(event.stateKey, selfUserId)) {
return context.getString(R.string.notice_room_invite_you, senderDisplayName);
}
if (null == event.stateKey) {
return context.getString(R.string.notice_room_invite_no_invitee, senderDisplayName);
}
// conference call case
if (targetDisplayName.equals(MXCallsManager.getConferenceUserId(event.roomId))) {
return context.getString(R.string.notice_requested_voip_conference, senderDisplayName);
}
return context.getString(R.string.notice_room_invite, senderDisplayName, targetDisplayName);
}
} else if (RoomMember.MEMBERSHIP_JOIN.equals(eventContent.membership)) {
// conference call case
if (TextUtils.equals(event.sender, MXCallsManager.getConferenceUserId(event.roomId))) {
return context.getString(R.string.notice_voip_started);
}
return context.getString(R.string.notice_room_join, senderDisplayName);
} else if (RoomMember.MEMBERSHIP_LEAVE.equals(eventContent.membership)) {
// conference call case
if (TextUtils.equals(event.sender, MXCallsManager.getConferenceUserId(event.roomId))) {
return context.getString(R.string.notice_voip_finished);
}
// 2 cases here: this member may have left voluntarily or they may have been "left" by someone else ie. kicked
if (TextUtils.equals(event.getSender(), event.stateKey)) {
if ((null != prevEventContent) && TextUtils.equals(prevEventContent.membership, RoomMember.MEMBERSHIP_INVITE)) {
return context.getString(R.string.notice_room_reject, senderDisplayName);
} else {
// use the latest known displayname
if ((null == eventContent.displayname) && (null != prevUserDisplayName)) {
senderDisplayName = prevUserDisplayName;
}
return context.getString(R.string.notice_room_leave, senderDisplayName);
}
} else if (null != prevMembership) {
if (prevMembership.equals(RoomMember.MEMBERSHIP_INVITE)) {
return context.getString(R.string.notice_room_withdraw, senderDisplayName, targetDisplayName);
} else if (prevMembership.equals(RoomMember.MEMBERSHIP_JOIN)) {
return context.getString(R.string.notice_room_kick, senderDisplayName, targetDisplayName);
} else if (prevMembership.equals(RoomMember.MEMBERSHIP_BAN)) {
return context.getString(R.string.notice_room_unban, senderDisplayName, targetDisplayName);
}
}
} else if (RoomMember.MEMBERSHIP_BAN.equals(eventContent.membership)) {
return context.getString(R.string.notice_room_ban, senderDisplayName, targetDisplayName);
} else if (RoomMember.MEMBERSHIP_KICK.equals(eventContent.membership)) {
return context.getString(R.string.notice_room_kick, senderDisplayName, targetDisplayName);
} else {
Log.e(LOG_TAG, "Unknown membership: " + eventContent.membership);
}
return null;
}
/**
* @param context the context
* @param jsonEventContent the current jsonEventContent
* @param roomState the room state
* @param htmlToolbox an optional htmlToolbox to manage html images and tag
* @return the formatted message as CharSequence
*/
private CharSequence getFormattedMessage(@NonNull final Context context,
@NonNull final JsonObject jsonEventContent,
@NonNull final RoomState roomState,
@Nullable final HtmlToolbox htmlToolbox) {
final String format = jsonEventContent.getAsJsonPrimitive("format").getAsString();
CharSequence text = null;
if (Message.FORMAT_MATRIX_HTML.equals(format)) {
String htmlBody = jsonEventContent.getAsJsonPrimitive("formatted_body").getAsString();
if (htmlToolbox != null) {
htmlBody = htmlToolbox.convert(htmlBody);
}
// Special treatment for "In reply to" message
if (jsonEventContent.has("m.relates_to")) {
final JsonElement relatesTo = jsonEventContent.get("m.relates_to");
if (relatesTo.isJsonObject()) {
if (relatesTo.getAsJsonObject().has("m.in_reply_to")) {
// Note: <mx-reply> tag has been removed by HtmlToolbox.convert()
// Replace <blockquote><a href=\"__permalink__\">In reply to</a>
// By <blockquote>['In reply to' from resources]
// To disable the link and to localize the "In reply to" string
if (htmlBody.startsWith(MESSAGE_IN_REPLY_TO_FIRST_PART)) {
final int index = htmlBody.indexOf(MESSAGE_IN_REPLY_TO_LAST_PART);
if (index != -1) {
String bodyRest = htmlBody.substring(index + MESSAGE_IN_REPLY_TO_LAST_PART.length());
// find indices of displayName/mxid
int indexMxidStart = bodyRest.indexOf(MESSAGE_IN_REPLY_TO_HREF_TAG_END);
int indexMxidStop = bodyRest.indexOf(MESSAGE_IN_REPLY_TO_LAST_PART);
if (indexMxidStart != 1 && indexMxidStop != -1) {
indexMxidStart = indexMxidStart + MESSAGE_IN_REPLY_TO_HREF_TAG_END.length();
// extract potential mxid (could also be display name)
String mxid = bodyRest.substring(indexMxidStart, indexMxidStop);
// convert mxid to display name
String userDisplayName = roomState.getMemberName(mxid);
// reconstruct message
if (!userDisplayName.equals(mxid)) {
bodyRest = bodyRest.substring(0, indexMxidStart) + userDisplayName + bodyRest.substring(indexMxidStop);
}
}
htmlBody = MESSAGE_IN_REPLY_TO_FIRST_PART
+ context.getString(R.string.message_reply_to_prefix)
+ bodyRest;
}
}
}
}
}
// some markers are not supported so fallback on an ascii display until to find the right way to manage them
// an issue has been created https://github.com/vector-im/vector-android/issues/38
// BMA re-enable <ol> and <li> support (https://github.com/vector-im/riot-android/issues/2184)
if (!TextUtils.isEmpty(htmlBody)) {
final Html.ImageGetter imageGetter;
final Html.TagHandler tagHandler;
if (htmlToolbox != null) {
imageGetter = htmlToolbox.getImageGetter();
tagHandler = htmlToolbox.getTagHandler(htmlBody);
} else {
imageGetter = null;
tagHandler = null;
}
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) {
text = Html.fromHtml(htmlBody,
Html.FROM_HTML_SEPARATOR_LINE_BREAK_LIST_ITEM | Html.FROM_HTML_SEPARATOR_LINE_BREAK_LIST,
imageGetter, tagHandler);
} else {
text = Html.fromHtml(htmlBody, imageGetter, tagHandler);
}
// fromHtml formats quotes (> character) with two newlines at the end
// remove any newlines at the end of the CharSequence
while (text.length() > 0 && text.charAt(text.length() - 1) == '\n') {
text = text.subSequence(0, text.length() - 1);
}
}
}
return text;
}
}
<|start_filename|>matrix-sdk-crypto/src/main/java/org/matrix/androidsdk/crypto/keysbackup/KeysBackupStateManager.kt<|end_filename|>
/*
* Copyright 2018 New Vector Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.crypto.keysbackup
import org.matrix.androidsdk.core.Log
import org.matrix.androidsdk.crypto.MXCrypto
import java.util.*
class KeysBackupStateManager(val crypto: MXCrypto) {
private val mListeners = ArrayList<KeysBackupStateListener>()
// Backup state
var state = KeysBackupState.Unknown
set(newState) {
Log.d("KeysBackup", "setState: $field -> $newState")
field = newState
// Notify listeners about the state change, on the ui thread
crypto.getUIHandler().post {
synchronized(mListeners) {
mListeners.forEach {
// Use newState because state may have already changed again
it.onStateChange(newState)
}
}
}
}
val isEnabled: Boolean
get() = state == KeysBackupState.ReadyToBackUp
|| state == KeysBackupState.WillBackUp
|| state == KeysBackupState.BackingUp
// True if unknown or bad state
val isStucked: Boolean
get() = state == KeysBackupState.Unknown
|| state == KeysBackupState.Disabled
|| state == KeysBackupState.WrongBackUpVersion
|| state == KeysBackupState.NotTrusted
/**
* E2e keys backup states.
*
* <pre>
* |
* V deleteKeyBackupVersion (on current backup)
* +----------------------> UNKNOWN <-------------
* | |
* | | checkAndStartKeysBackup (at startup or on new verified device or a new detected backup)
* | V
* | CHECKING BACKUP
* | |
* | Network error |
* +<----------+----------------+-------> DISABLED <----------------------+
* | | | | |
* | | | | createKeysBackupVersion |
* | V | V |
* +<--- WRONG VERSION | ENABLING |
* | ^ | | |
* | | V ok | error |
* | | +------> READY <--------+----------------------------+
* V | | |
* NOT TRUSTED | | | on new key
* | | V
* | | WILL BACK UP (waiting a random duration)
* | | |
* | | |
* | | ok V
* | +----- BACKING UP
* | |
* | Error |
* +<---------------+
* </pre>
*/
enum class KeysBackupState {
// Need to check the current backup version on the homeserver
Unknown,
// Checking if backup is enabled on home server
CheckingBackUpOnHomeserver,
// Backup has been stopped because a new backup version has been detected on the homeserver
WrongBackUpVersion,
// Backup from this device is not enabled
Disabled,
// There is a backup available on the homeserver but it is not trusted.
// It is not trusted because the signature is invalid or the device that created it is not verified
// Use [KeysBackup.getKeysBackupTrust()] to get trust details.
// Consequently, the backup from this device is not enabled.
NotTrusted,
// Backup is being enabled: the backup version is being created on the homeserver
Enabling,
// Backup is enabled and ready to send backup to the homeserver
ReadyToBackUp,
// e2e keys are going to be sent to the homeserver
WillBackUp,
// e2e keys are being sent to the homeserver
BackingUp
}
interface KeysBackupStateListener {
fun onStateChange(newState: KeysBackupState)
}
fun addListener(listener: KeysBackupStateListener) {
synchronized(mListeners) {
mListeners.add(listener)
}
}
fun removeListener(listener: KeysBackupStateListener) {
synchronized(mListeners) {
mListeners.remove(listener)
}
}
}
<|start_filename|>matrix-sdk/src/main/java/org/matrix/androidsdk/rest/model/RequestPhoneNumberValidationResponse.java<|end_filename|>
/*
* Copyright 2017 Vector Creations Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.rest.model;
/**
* Response to a request an phone number validation request
*/
public class RequestPhoneNumberValidationResponse {
// the client secret key
public String clientSecret;
// the attempt count
public Integer sendAttempt;
// the sid
public String sid;
// the msisdn
public String msisdn;
// phone number international format
public String intl_fmt;
// The url to submit the code to
public String submit_url;
}
<|start_filename|>matrix-sdk-crypto/src/main/java/org/matrix/androidsdk/crypto/rest/model/crypto/KeyVerificationKey.kt<|end_filename|>
/*
* Copyright 2019 New Vector Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.crypto.rest.model.crypto
import com.google.gson.annotations.SerializedName
/**
* Sent by both devices to send their ephemeral Curve25519 public key to the other device.
*/
class KeyVerificationKey : SendToDeviceObject {
companion object {
fun create(tid: String, key: String): KeyVerificationKey {
return KeyVerificationKey().apply {
this.transactionID = tid
this.key = key
}
}
}
/**
* the ID of the transaction that the message is part of
*/
@SerializedName("transaction_id")
@JvmField
var transactionID: String? = null
/**
* The device’s ephemeral public key, as an unpadded base64 string
*/
@JvmField
var key: String? = null
fun isValid(): Boolean {
if (transactionID.isNullOrBlank() || key.isNullOrBlank()) {
return false
}
return true
}
}
<|start_filename|>matrix-sdk-crypto/src/main/java/org/matrix/androidsdk/crypto/RoomKeysRequestListener.kt<|end_filename|>
/*
* Copyright 2018 New Vector Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.crypto
/**
* Room keys events listener
*/
interface RoomKeysRequestListener {
/**
* An room key request has been received.
*
* @param request the request
*/
fun onRoomKeyRequest(request: IncomingRoomKeyRequest)
/**
* A room key request cancellation has been received.
*
* @param request the cancellation request
*/
fun onRoomKeyRequestCancellation(request: IncomingRoomKeyRequestCancellation)
}
<|start_filename|>matrix-sdk/src/main/java/org/matrix/androidsdk/rest/client/IdentityPingRestClient.kt<|end_filename|>
/*
* Copyright 2019 New Vector Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.rest.client
import org.matrix.androidsdk.HomeServerConnectionConfig
import org.matrix.androidsdk.RestClient
import org.matrix.androidsdk.core.JsonUtils
import org.matrix.androidsdk.core.callback.ApiCallback
import org.matrix.androidsdk.rest.api.IdentityPingApi
import org.matrix.androidsdk.rest.callback.RestAdapterCallback
class IdentityPingRestClient(hsConfig: HomeServerConnectionConfig) :
RestClient<IdentityPingApi>(hsConfig, IdentityPingApi::class.java, "", JsonUtils.getGson(false), true) {
fun ping(callback: ApiCallback<Void>) {
mApi.ping().enqueue(RestAdapterCallback("ping", null, callback, null))
}
}
<|start_filename|>matrix-sdk/src/main/java/org/matrix/androidsdk/rest/api/IdentityAuthApi.kt<|end_filename|>
/*
* Copyright 2019 The Matrix.org Foundation C.I.C.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.rest.api
import org.matrix.androidsdk.rest.model.identityserver.IdentityAccountResponse
import org.matrix.androidsdk.rest.model.identityserver.IdentityServerRegisterResponse
import org.matrix.androidsdk.rest.model.openid.RequestOpenIdTokenResponse
import retrofit2.Call
import retrofit2.http.Body
import retrofit2.http.GET
import retrofit2.http.POST
/**
* Ref:
* - https://github.com/matrix-org/matrix-doc/blob/master/proposals/1961-integrations-auth.md
* - https://github.com/matrix-org/matrix-doc/blob/dbkr/tos_2/proposals/2140-terms-of-service-2.md#is-register-api
*/
interface IdentityAuthApi {
/**
* Check that we can use the identity server. You'll get a 403 if this is not the case
*/
@GET("account")
fun checkAccount(): Call<IdentityAccountResponse>
/**
* register to the server
*
* @param body the body content
*/
@POST("account/register")
fun register(@Body requestOpenIdTokenResponse: RequestOpenIdTokenResponse): Call<IdentityServerRegisterResponse>
/**
* unregister to the server
* Authenticated method
*/
@POST("account/logout")
fun logout(): Call<Unit>
}
<|start_filename|>matrix-sdk-core/src/main/java/org/matrix/androidsdk/core/rest/DefaultRetrofit2ResponseHandler.java<|end_filename|>
/*
* Copyright 2018 New Vector Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.core.rest;
import org.matrix.androidsdk.core.model.HttpError;
import java.io.IOException;
import retrofit2.Response;
public class DefaultRetrofit2ResponseHandler {
public static <T> void handleResponse(Response<T> response, Listener<T> listener)
throws IOException {
if (response.isSuccessful()) {
listener.onSuccess(response);
} else {
String errorBody = response.errorBody().string();
listener.onHttpError(new HttpError(errorBody, response.code()));
}
}
public interface Listener<T> {
void onSuccess(Response<T> response);
void onHttpError(HttpError httpError);
}
}
<|start_filename|>matrix-sdk/src/androidTest/java/org/matrix/androidsdk/lazyloading/RoomMembersTest.java<|end_filename|>
/*
* Copyright 2018 New Vector Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.lazyloading;
import androidx.test.InstrumentationRegistry;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.FixMethodOrder;
import org.junit.Test;
import org.junit.runners.MethodSorters;
import org.matrix.androidsdk.MXSession;
import org.matrix.androidsdk.common.CommonTestHelper;
import org.matrix.androidsdk.common.TestApiCallback;
import org.matrix.androidsdk.data.Room;
import org.matrix.androidsdk.rest.model.RoomMember;
import java.util.List;
import java.util.concurrent.CountDownLatch;
@FixMethodOrder(MethodSorters.JVM)
public class RoomMembersTest {
private CommonTestHelper mTestHelper = new CommonTestHelper();
private LazyLoadingTestHelper mLazyLoadingTestHelper = new LazyLoadingTestHelper(mTestHelper);
@BeforeClass
public static void init() {
MXSession.initUserAgent(InstrumentationRegistry.getContext(), null);
}
@Test
public void RoomMembers_CheckTotalCountAsync_ShouldLoadAllMembers() throws Exception {
RoomMembers_CheckTotalCountAsync(false);
}
@Test
public void RoomMembers_CheckTotalCountAsync_LazyLoadedMembers() throws Exception {
RoomMembers_CheckTotalCountAsync(true);
}
private void RoomMembers_CheckTotalCountAsync(final boolean withLazyLoading) throws Exception {
final LazyLoadingScenarioData data = mLazyLoadingTestHelper.createScenario(withLazyLoading);
mTestHelper.syncSession(data.aliceSession, false);
final Room aliceRoom = data.aliceSession.getDataHandler().getRoom(data.roomId);
final CountDownLatch lock = new CountDownLatch(1);
aliceRoom.getMembersAsync(new TestApiCallback<List<RoomMember>>(lock) {
@Override
public void onSuccess(List<RoomMember> roomMembers) {
Assert.assertEquals(4, roomMembers.size());
super.onSuccess(roomMembers);
}
});
mTestHelper.await(lock);
mLazyLoadingTestHelper.clearAllSessions(data);
}
@Test
public void RoomMembers_CheckActiveCountAsync_ShouldLoadAllMembers() throws Exception {
RoomMembers_CheckActiveCountAsync(false);
}
@Test
public void RoomMembers_CheckActiveCountAsync_LazyLoadedMembers() throws Exception {
RoomMembers_CheckActiveCountAsync(true);
}
private void RoomMembers_CheckActiveCountAsync(final boolean withLazyLoading) throws Exception {
final LazyLoadingScenarioData data = mLazyLoadingTestHelper.createScenario(withLazyLoading);
mTestHelper.syncSession(data.aliceSession, false);
final Room aliceRoom = data.aliceSession.getDataHandler().getRoom(data.roomId);
final CountDownLatch lock = new CountDownLatch(1);
aliceRoom.getActiveMembersAsync(new TestApiCallback<List<RoomMember>>(lock) {
@Override
public void onSuccess(List<RoomMember> roomMembers) {
Assert.assertEquals(4, roomMembers.size());
super.onSuccess(roomMembers);
}
});
mTestHelper.await(lock);
mLazyLoadingTestHelper.clearAllSessions(data);
}
@Test
public void RoomMembers_CheckJoinedCountAsync_ShouldLoadAllMembers() throws Exception {
RoomMembers_CheckJoinedCountAsync(false);
}
@Test
public void RoomMembers_CheckJoinedCountAsync_LazyLoadedMembers() throws Exception {
RoomMembers_CheckJoinedCountAsync(true);
}
private void RoomMembers_CheckJoinedCountAsync(final boolean withLazyLoading) throws Exception {
final LazyLoadingScenarioData data = mLazyLoadingTestHelper.createScenario(withLazyLoading);
mTestHelper.syncSession(data.aliceSession, false);
final Room aliceRoom = data.aliceSession.getDataHandler().getRoom(data.roomId);
final CountDownLatch lock = new CountDownLatch(1);
aliceRoom.getJoinedMembersAsync(new TestApiCallback<List<RoomMember>>(lock) {
@Override
public void onSuccess(List<RoomMember> roomMembers) {
Assert.assertEquals(3, roomMembers.size());
super.onSuccess(roomMembers);
}
});
mTestHelper.await(lock);
mLazyLoadingTestHelper.clearAllSessions(data);
}
@Test
public void RoomMembers_CheckAlreadyLoadedCount_ShouldLoadAllMembers() throws Exception {
RoomMembers_CheckAlreadyLoadedCount(false);
}
@Test
public void RoomMembers_CheckAlreadyLoadedCount_LazyLoadedMembers() throws Exception {
RoomMembers_CheckAlreadyLoadedCount(true);
}
private void RoomMembers_CheckAlreadyLoadedCount(final boolean withLazyLoading) throws Exception {
final LazyLoadingScenarioData data = mLazyLoadingTestHelper.createScenario(withLazyLoading);
mTestHelper.syncSession(data.aliceSession, false);
final Room aliceRoom = data.aliceSession.getDataHandler().getRoom(data.roomId);
final List<RoomMember> members = aliceRoom.getState().getLoadedMembers();
if (withLazyLoading) {
Assert.assertEquals(1, members.size());
} else {
Assert.assertEquals(4, members.size());
}
mLazyLoadingTestHelper.clearAllSessions(data);
}
}
<|start_filename|>matrix-sdk/src/test/java/org/matrix/androidsdk/rest/client/RestClientTest.java<|end_filename|>
package org.matrix.androidsdk.rest.client;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.robolectric.RobolectricTestRunner;
/**
* Unit tests RestClient.
*/
@RunWith(RobolectricTestRunner.class)
public class RestClientTest {
private static final String BASE_URL = "http://localhost:8008/_matrix/client/api/v1";
private static final String PATH = "/publicRooms";
/**
* Tests: MXApiService.loadPublicRooms(LoadPublicRoomsCallback)
* Summary: Mocks up a single public room in the response and asserts that the callback contains
* the mocked information.
*/
@Test
public void testPublicRooms() throws Exception {
/*final String roomId = "!faifuhew9:localhost";
final String roomTopic = "This is a test room.";
final String roomName = "<NAME>";
final int roomMembers = 6;
JSONArray rooms = new JSONArray();
final JSONObject json = new JSONObject();
json.put("chunk", rooms);
json.put("next_batch", "123");
JSONObject room = new JSONObject().put("name", roomName)
.put("num_joined_members", roomMembers).put("room_id", roomId).put("topic", roomTopic);
rooms.put(room);
final PublicRoomsResponse publicRoomsResponse = mGson.fromJson(json.toString(),
new TypeToken<PublicRoomsResponse>(){}.getType());
EventsApi eventsApi = mock(EventsApi.class);
PublicRoomsParams publicRoomsParams = new PublicRoomsParams();
publicRoomsParams.server = "dummyServer";
publicRoomsParams.limit = 10;
publicRoomsParams.since = null;
publicRoomsParams.filter = null;
doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
Callback<PublicRoomsResponse> callback = (Callback<PublicRoomsResponse>)invocation.getArguments()[0];
Response response = null;
try {
response = RetrofitUtils.createJsonResponse(BASE_URL + PATH, 200, json);
}
catch (Exception e) {
Assert.assertTrue("Exception thrown: "+e, false);
}
callback.success(publicRoomsResponse, response);
return null;
}
})
.when(eventsApi).publicRooms(publicRoomsParams, any(Callback.class));
EventsRestClient client = new EventsRestClient(eventsApi);
ApiCallback<PublicRoomsResponse> cb = mock(ApiCallback.class);
// run the method being tested
client.loadPublicRooms("dummyServer", null, null, 10, cb);
ArgumentCaptor<PublicRoomsResponse> captor = ArgumentCaptor.forClass(PublicRoomsResponse.class);
verify(cb, times(1)).onSuccess(captor.capture());
List<PublicRoom> publicRooms = (captor.getValue()).chunk;
assertEquals(1, publicRooms.size());
PublicRoom pr = publicRooms.get(0);
assertEquals(roomName, pr.name);
assertEquals(roomId, pr.roomId);
assertEquals(roomTopic, pr.topic);
assertEquals(roomMembers, pr.numJoinedMembers);*/
}
/**
* Tests: MXApiService.loadPublicRooms(LoadPublicRoomsCallback)
* Summary: Fails the public rooms HTTP call.
*/
@Test
public void testPublicRoomsError() throws Exception {
/*EventsApi eventsApi = mock(EventsApi.class);
PublicRoomsParams publicRoomsParams = new PublicRoomsParams();
publicRoomsParams.server = "dummyServer";
publicRoomsParams.limit = 10;
publicRoomsParams.since = null;
publicRoomsParams.filter = null;
doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
Callback<PublicRoomsResponse> callback =
(Callback<PublicRoomsResponse>) invocation.getArguments()[0];
callback.failure(RetrofitUtils.createMatrixError(BASE_URL + PATH,
JSONUtils.error(500)));
return null;
}
})
.when(eventsApi).publicRooms(publicRoomsParams, any(Callback.class));
EventsRestClient client = new EventsRestClient(eventsApi);
ApiCallback<PublicRoomsResponse> cb = mock(ApiCallback.class);
// run the method being tested
client.loadPublicRooms("dummyServer", null, null, 10, cb);
verify(cb, times(0)).onSuccess(any(PublicRoomsResponse.class));*/
}
}
<|start_filename|>matrix-sdk/src/test/java/org/matrix/androidsdk/core/PermalinkUtilsTest.java<|end_filename|>
/*
* Copyright 2018 New Vector Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.core;
import android.net.Uri;
import org.junit.Assert;
import org.junit.FixMethodOrder;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.MethodSorters;
import org.robolectric.RobolectricTestRunner;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
@RunWith(RobolectricTestRunner.class)
@FixMethodOrder(MethodSorters.NAME_ASCENDING)
public class PermalinkUtilsTest {
// supported host list
private static final List<String> sSupportedVectorHosts = Arrays.asList("vector.im", "riot.im");
// supported paths list
private static final List<String> sSupportedVectorLinkPaths = Arrays.asList("/beta/", "/develop/", "/app/", "/staging/");
@Test
public void parseUniversalLink_standardCase() {
Map<String, String> result = testUri("https://matrix.to/#/!GnEEPYXUhoaHbkFBNX:matrix.org/$154089010924835FMJsT:sorunome.de");
Assert.assertEquals("!GnEEPYXUhoaHbkFBNX:matrix.org", result.get(PermalinkUtils.ULINK_ROOM_ID_OR_ALIAS_KEY));
Assert.assertEquals("$154089010924835FMJsT:sorunome.de", result.get(PermalinkUtils.ULINK_EVENT_ID_KEY));
Assert.assertNull(result.get(PermalinkUtils.ULINK_GROUP_ID_KEY));
Assert.assertNull(result.get(PermalinkUtils.ULINK_MATRIX_USER_ID_KEY));
}
private Map<String, String> testUri(String uri) {
Map<String, String> result = PermalinkUtils.parseUniversalLink(Uri.parse(uri), sSupportedVectorHosts, sSupportedVectorLinkPaths);
Assert.assertNotNull(result);
return result;
}
}
<|start_filename|>matrix-sdk/src/main/java/org/matrix/androidsdk/data/timeline/EventTimeline.java<|end_filename|>
/*
* Copyright 2018 New Vector Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.data.timeline;
import androidx.annotation.NonNull;
import org.matrix.androidsdk.core.callback.ApiCallback;
import org.matrix.androidsdk.data.Room;
import org.matrix.androidsdk.data.RoomState;
import org.matrix.androidsdk.data.store.IMXStore;
import org.matrix.androidsdk.rest.model.Event;
import org.matrix.androidsdk.rest.model.sync.InvitedRoomSync;
import org.matrix.androidsdk.rest.model.sync.RoomSync;
/**
* A `EventTimeline` instance represents a contiguous sequence of events in a room.
* <p>
* There are two kinds of timeline:
* <p>
* - live timelines: they receive live events from the events stream. You can paginate
* backwards but not forwards.
* All (live or backwards) events they receive are stored in the store of the current
* MXSession.
* <p>
* - past timelines: they start in the past from an `initialEventId`. They are filled
* with events on calls of [MXEventTimeline paginate] in backwards or forwards direction.
* Events are stored in a in-memory store (MXMemoryStore).
*/
public interface EventTimeline {
/**
* Defines that the current timeline is an historical one
*
* @param isHistorical true when the current timeline is an historical one
*/
void setIsHistorical(boolean isHistorical);
/**
* Returns true if the current timeline is an historical one
*/
boolean isHistorical();
/**
* @return the unique identifier
*/
String getTimelineId();
/**
* @return the dedicated room
*/
Room getRoom();
/**
* @return the used store
*/
IMXStore getStore();
/**
* @return the initial event id.
*/
String getInitialEventId();
/**
* @return true if this timeline is the live one
*/
boolean isLiveTimeline();
/**
* Get whether we are at the end of the message stream
*
* @return true if end has been reached
*/
boolean hasReachedHomeServerForwardsPaginationEnd();
/**
* Reset the back state so that future history requests start over from live.
* Must be called when opening a room if interested in history.
*/
void initHistory();
/**
* @return The state of the room at the top most recent event of the timeline.
*/
RoomState getState();
/**
* Update the state.
*
* @param state the new state.
*/
void setState(RoomState state);
/**
* Handle the invitation room events
*
* @param invitedRoomSync the invitation room events.
*/
void handleInvitedRoomSync(InvitedRoomSync invitedRoomSync);
/**
* Manage the joined room events.
*
* @param roomSync the roomSync.
* @param isGlobalInitialSync true if the sync has been triggered by a global initial sync
*/
void handleJoinedRoomSync(@NonNull RoomSync roomSync, boolean isGlobalInitialSync);
/**
* Store an outgoing event.
*
* @param event the event to store
*/
void storeOutgoingEvent(Event event);
/**
* Tells if a back pagination can be triggered.
*
* @return true if a back pagination can be triggered.
*/
boolean canBackPaginate();
/**
* Request older messages.
*
* @param callback the asynchronous callback
* @return true if request starts
*/
boolean backPaginate(ApiCallback<Integer> callback);
/**
* Request older messages.
*
* @param eventCount number of events we want to retrieve
* @param callback callback to implement to be informed that the pagination request has been completed. Can be null.
* @return true if request starts
*/
boolean backPaginate(int eventCount, ApiCallback<Integer> callback);
/**
* Request older messages.
*
* @param eventCount number of events we want to retrieve
* @param useCachedOnly to use the cached events list only (i.e no request will be triggered)
* @param callback callback to implement to be informed that the pagination request has been completed. Can be null.
* @return true if request starts
*/
boolean backPaginate(int eventCount, boolean useCachedOnly, ApiCallback<Integer> callback);
/**
* Request newer messages.
*
* @param callback callback to implement to be informed that the pagination request has been completed. Can be null.
* @return true if request starts
*/
boolean forwardPaginate(ApiCallback<Integer> callback);
/**
* Trigger a pagination in the expected direction.
*
* @param direction the direction.
* @param callback the callback.
* @return true if the operation succeeds
*/
boolean paginate(Direction direction, ApiCallback<Integer> callback);
/**
* Cancel any pending pagination requests
*/
void cancelPaginationRequests();
/**
* Reset the pagination timeline and start loading the context around its `initialEventId`.
* The retrieved (backwards and forwards) events will be sent to registered listeners.
*
* @param limit the maximum number of messages to get around the initial event.
* @param callback the operation callback
*/
void resetPaginationAroundInitialEvent(int limit, ApiCallback<Void> callback);
/**
* Add an events listener.
*
* @param listener the listener to add.
*/
void addEventTimelineListener(Listener listener);
/**
* Remove an events listener.
*
* @param listener the listener to remove.
*/
void removeEventTimelineListener(Listener listener);
/**
* The direction from which an incoming event is considered.
*/
enum Direction {
/**
* Forwards when the event is added to the end of the timeline.
* These events come from the /sync stream or from forwards pagination.
*/
FORWARDS,
/**
* Backwards when the event is added to the start of the timeline.
* These events come from a back pagination.
*/
BACKWARDS
}
interface Listener {
/**
* Call when an event has been handled in the timeline.
*
* @param event the event.
* @param direction the direction.
* @param roomState the room state
*/
void onEvent(Event event, Direction direction, RoomState roomState);
}
}
<|start_filename|>matrix-sdk-crypto/src/main/java/org/matrix/androidsdk/crypto/MXCryptoConfig.java<|end_filename|>
/*
* Copyright 2018 New Vector Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.crypto;
/**
* Class to define the parameters used to customize or configure the end-to-end crypto.
*/
public class MXCryptoConfig {
// Tell whether the encryption of the event content is enabled for the invited members.
// By default, we encrypt messages only for the joined members.
// The encryption for the invited members will be blocked if the history visibility is "joined".
public boolean mEnableEncryptionForInvitedMembers = false;
}
<|start_filename|>matrix-sdk-crypto/src/main/java/org/matrix/androidsdk/crypto/rest/model/crypto/KeyVerificationAccept.kt<|end_filename|>
/*
* Copyright 2019 New Vector Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.crypto.rest.model.crypto
import com.google.gson.annotations.SerializedName
import org.matrix.androidsdk.core.Log
import org.matrix.androidsdk.crypto.verification.SASVerificationTransaction
/**
* Sent by Bob to accept a verification from a previously sent m.key.verification.start message.
*/
class KeyVerificationAccept : SendToDeviceObject {
/**
* string to identify the transaction.
* This string must be unique for the pair of users performing verification for the duration that the transaction is valid.
* Alice’s device should record this ID and use it in future messages in this transaction.
*/
@JvmField
@SerializedName("transaction_id")
var transactionID: String? = null
/**
* The key agreement protocol that Bob’s device has selected to use, out of the list proposed by Alice’s device
*/
@JvmField
@SerializedName("key_agreement_protocol")
var keyAgreementProtocol: String? = null
/**
* The hash algorithm that Bob’s device has selected to use, out of the list proposed by Alice’s device
*/
@JvmField
var hash: String? = null
/**
* The message authentication code that Bob’s device has selected to use, out of the list proposed by Alice’s device
*/
@JvmField
@SerializedName("message_authentication_code")
var messageAuthenticationCode: String? = null
/**
* An array of short authentication string methods that Bob’s client (and Bob) understands. Must be a subset of the list proposed by Alice’s device
*/
@JvmField
@SerializedName("short_authentication_string")
var shortAuthenticationStrings: List<String>? = null
/**
* The hash (encoded as unpadded base64) of the concatenation of the device’s ephemeral public key (QB, encoded as unpadded base64)
* and the canonical JSON representation of the m.key.verification.start message.
*/
@JvmField
var commitment: String? = null
fun isValid(): Boolean {
if (transactionID.isNullOrBlank()
|| keyAgreementProtocol.isNullOrBlank()
|| hash.isNullOrBlank()
|| commitment.isNullOrBlank()
|| messageAuthenticationCode.isNullOrBlank()
|| shortAuthenticationStrings.isNullOrEmpty()) {
Log.e(SASVerificationTransaction.LOG_TAG, "## received invalid verification request")
return false
}
return true
}
companion object {
fun create(tid: String,
keyAgreementProtocol: String,
hash: String,
commitment: String,
messageAuthenticationCode: String,
shortAuthenticationStrings: List<String>): KeyVerificationAccept {
return KeyVerificationAccept().apply {
this.transactionID = tid
this.keyAgreementProtocol = keyAgreementProtocol
this.hash = hash
this.commitment = commitment
this.messageAuthenticationCode = messageAuthenticationCode
this.shortAuthenticationStrings = shortAuthenticationStrings
}
}
}
}
<|start_filename|>matrix-sdk-core/src/main/java/org/matrix/androidsdk/core/JsonUtility.java<|end_filename|>
/*
* Copyright 2018 New Vector Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.core;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.google.gson.JsonArray;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import java.lang.reflect.Constructor;
import java.lang.reflect.Modifier;
import java.util.Map;
import java.util.TreeSet;
public class JsonUtility {
private static final String LOG_TAG = JsonUtility.class.getSimpleName();
private static final Gson basicGson = new Gson();
private static final Gson gson = new GsonBuilder()
.excludeFieldsWithModifiers(Modifier.PRIVATE, Modifier.STATIC)
.create();
// add a call to serializeNulls().
// by default the null parameters are not sent in the requests.
// serializeNulls forces to add them.
private static final Gson gsonWithNullSerialization = new GsonBuilder()
.excludeFieldsWithModifiers(Modifier.PRIVATE, Modifier.STATIC)
.serializeNulls()
.create();
// for crypto (canonicalize)
// avoid converting "=" to \u003d
private static final Gson gsonWithoutHtmlEscaping = new GsonBuilder()
.disableHtmlEscaping()
.excludeFieldsWithModifiers(Modifier.PRIVATE, Modifier.STATIC)
.create();
/**
* Canonicalize a JsonElement element
*
* @param src the src
* @return the canonicalize element
*/
public static JsonElement canonicalize(JsonElement src) {
// sanity check
if (null == src) {
return null;
}
if (src instanceof JsonArray) {
// Canonicalize each element of the array
JsonArray srcArray = (JsonArray) src;
JsonArray result = new JsonArray();
for (int i = 0; i < srcArray.size(); i++) {
result.add(canonicalize(srcArray.get(i)));
}
return result;
} else if (src instanceof JsonObject) {
// Sort the attributes by name, and the canonicalize each element of the object
JsonObject srcObject = (JsonObject) src;
JsonObject result = new JsonObject();
TreeSet<String> attributes = new TreeSet<>();
for (Map.Entry<String, JsonElement> entry : srcObject.entrySet()) {
attributes.add(entry.getKey());
}
for (String attribute : attributes) {
result.add(attribute, canonicalize(srcObject.get(attribute)));
}
return result;
} else {
return src;
}
}
/**
* Create a canonicalized json string for an object
*
* @param object the object to convert
* @return the canonicalized string
*/
public static String getCanonicalizedJsonString(Object object) {
String canonicalizedJsonString = null;
if (null != object) {
if (object instanceof JsonElement) {
canonicalizedJsonString = gsonWithoutHtmlEscaping.toJson(canonicalize((JsonElement) object));
} else {
canonicalizedJsonString = gsonWithoutHtmlEscaping.toJson(canonicalize(gsonWithoutHtmlEscaping.toJsonTree(object)));
}
if (null != canonicalizedJsonString) {
canonicalizedJsonString = canonicalizedJsonString.replace("\\/", "/");
}
}
return canonicalizedJsonString;
}
/**
* Provides the basic JSON parser.
*
* @return the basic JSON parser
*/
public static Gson getBasicGson() {
return basicGson;
}
/**
* Provides the JSON parser.
*
* @param withNullSerialization true to serialise the null parameters
* @return the JSON parser
*/
public static Gson getGson(boolean withNullSerialization) {
return withNullSerialization ? gsonWithNullSerialization : gson;
}
/**
* Convert a JSON object into a class instance.
* The returned value cannot be null.
*
* @param jsonObject the json object to convert
* @param aClass the class
* @return the converted object
*/
public static <T> T toClass(JsonElement jsonObject, Class<T> aClass) {
T object = null;
try {
object = gson.fromJson(jsonObject, aClass);
} catch (Exception e) {
Log.e(LOG_TAG, "## toClass failed " + e.getMessage(), e);
}
if (null == object) {
try {
final Constructor<T> constructor = aClass.getConstructor();
object = constructor.newInstance();
} catch (Throwable t) {
Log.e(LOG_TAG, "## toClass failed " + t.getMessage(), t);
}
}
return object;
}
/**
* Convert a stringified JSON into a class instance.
* The returned value cannot be null.
*
* @param jsonObjectAsString the json object as string to convert
* @param aClass the class
* @return the converted object
*/
public static <T> T toClass(String jsonObjectAsString, Class<T> aClass) {
T object = null;
try {
object = gson.fromJson(jsonObjectAsString, aClass);
} catch (Exception e) {
Log.e(LOG_TAG, "## toClass failed " + e.getMessage(), e);
}
if (null == object) {
try {
final Constructor<T> constructor = aClass.getConstructor();
object = constructor.newInstance();
} catch (Throwable t) {
Log.e(LOG_TAG, "## toClass failed " + t.getMessage(), t);
}
}
return object;
}
}
<|start_filename|>matrix-sdk-crypto/src/main/java/org/matrix/androidsdk/crypto/model/crypto/RoomKeyRequestBody.java<|end_filename|>
/*
* Copyright 2016 OpenMarket Ltd
* Copyright 2019 New Vector Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.crypto.model.crypto;
import com.google.gson.annotations.SerializedName;
/**
* Class representing an room key request body content
* <p>
* Note: Keep Serializable interface for the Realm migration
*/
public class RoomKeyRequestBody implements java.io.Serializable {
public String algorithm;
@SerializedName("room_id")
public String roomId;
@SerializedName("sender_key")
public String senderKey;
@SerializedName("session_id")
public String sessionId;
}
<|start_filename|>matrix-sdk/src/main/java/org/matrix/androidsdk/sync/EventsThread.java<|end_filename|>
/*
* Copyright 2014 OpenMarket Ltd
* Copyright 2017 Vector Creations Ltd
* Copyright 2018 New Vector Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.sync;
import android.annotation.SuppressLint;
import android.app.AlarmManager;
import android.app.PendingIntent;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.os.Build;
import android.os.Handler;
import android.os.Looper;
import android.os.PowerManager;
import android.os.SystemClock;
import org.matrix.androidsdk.core.Log;
import org.matrix.androidsdk.core.callback.ApiFailureCallback;
import org.matrix.androidsdk.core.callback.SimpleApiCallback;
import org.matrix.androidsdk.core.listeners.IMXNetworkEventListener;
import org.matrix.androidsdk.core.model.MatrixError;
import org.matrix.androidsdk.data.metrics.MetricsListener;
import org.matrix.androidsdk.network.NetworkConnectivityReceiver;
import org.matrix.androidsdk.rest.client.EventsRestClient;
import org.matrix.androidsdk.rest.model.sync.RoomsSyncResponse;
import org.matrix.androidsdk.rest.model.sync.SyncResponse;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
/**
* Thread that continually watches the event stream and sends events to its listener.
*/
public class EventsThread extends Thread {
private static final String LOG_TAG = EventsThread.class.getSimpleName();
private static final int RETRY_WAIT_TIME_MS = 10000;
private static final int DEFAULT_SERVER_TIMEOUT_MS = 30000;
private static final int DEFAULT_CLIENT_TIMEOUT_MS = 120000;
private EventsRestClient mEventsRestClient;
private EventsThreadListener mListener;
private String mCurrentToken;
private MetricsListener mMetricsListener;
private boolean mPaused = true;
private boolean mIsNetworkSuspended = false;
private boolean mIsCatchingUp = false;
private boolean mIsOnline = false;
private boolean mKilling = false;
private int mDefaultServerTimeoutms = DEFAULT_SERVER_TIMEOUT_MS;
private int mNextServerTimeoutms = DEFAULT_SERVER_TIMEOUT_MS;
// add a delay between two sync requests
private final Context mContext;
private int mRequestDelayMs = 0;
private final AlarmManager mAlarmManager;
private PowerManager mPowerManager;
private PendingIntent mPendingDelayedIntent;
private static final Map<String, EventsThread> mSyncObjectByInstance = new HashMap<>();
// avoid sync on "this" because it might differ if there is a timer.
private final Object mSyncObject = new Object();
// Custom Retrofit error callback that will convert Retrofit errors into our own error callback
private ApiFailureCallback mFailureCallback;
// avoid restarting the listener if there is no network.
// wait that there is an available network.
private NetworkConnectivityReceiver mNetworkConnectivityReceiver;
private boolean mbIsConnected = true;
// use dedicated filter when enable
private String mFilterOrFilterId;
private final IMXNetworkEventListener mNetworkListener = new IMXNetworkEventListener() {
@Override
public void onNetworkConnectionUpdate(boolean isConnected) {
Log.d(LOG_TAG, "onNetworkConnectionUpdate : before " + mbIsConnected + " now " + isConnected);
synchronized (mSyncObject) {
mbIsConnected = isConnected;
}
// the thread has been suspended and there is an available network
if (isConnected && !mKilling) {
Log.d(LOG_TAG, "onNetworkConnectionUpdate : call onNetworkAvailable");
onNetworkAvailable();
}
}
};
/**
* Default constructor.
*
* @param context the context
* @param apiClient API client to make the events API calls
* @param listener a listener to inform
* @param initialToken the sync initial token.
*/
public EventsThread(Context context, EventsRestClient apiClient, EventsThreadListener listener, String initialToken) {
super("Events thread");
mContext = context;
mEventsRestClient = apiClient;
mListener = listener;
mCurrentToken = initialToken;
mSyncObjectByInstance.put(toString(), this);
mAlarmManager = (AlarmManager) context.getSystemService(Context.ALARM_SERVICE);
mPowerManager = (PowerManager) context.getSystemService(Context.POWER_SERVICE);
}
/**
* Update the metrics listener mode
*
* @param metricsListener the metrics listener
*/
public void setMetricsListener(MetricsListener metricsListener) {
this.mMetricsListener = metricsListener;
}
/**
* @return the current sync token
*/
public String getCurrentSyncToken() {
return mCurrentToken;
}
/**
* Set filterOrFilterId used for /sync requests
*
* @param filterOrFilterId
*/
public void setFilterOrFilterId(String filterOrFilterId) {
mFilterOrFilterId = filterOrFilterId;
}
/**
* Update the long poll timeout.
*
* @param ms the timeout in ms
*/
public void setServerLongPollTimeout(int ms) {
mDefaultServerTimeoutms = Math.max(ms, DEFAULT_SERVER_TIMEOUT_MS);
Log.d(LOG_TAG, "setServerLongPollTimeout : " + mDefaultServerTimeoutms);
}
/**
* @return the long poll timeout
*/
public int getServerLongPollTimeout() {
return mDefaultServerTimeoutms;
}
/**
* Set a delay between two sync requests.
*
* @param ms the delay in ms
*/
public void setSyncDelay(int ms) {
mRequestDelayMs = Math.max(0, ms);
Log.d(LOG_TAG, "## setSyncDelay() : " + mRequestDelayMs + " with state " + getState());
if (State.WAITING == getState() && (!mPaused || (0 == mRequestDelayMs) && mIsCatchingUp)) {
if (!mPaused) {
Log.d(LOG_TAG, "## setSyncDelay() : resume the application");
}
if ((0 == mRequestDelayMs) && mIsCatchingUp) {
Log.d(LOG_TAG, "## setSyncDelay() : cancel catchup");
mIsCatchingUp = false;
}
// and sync asap
synchronized (mSyncObject) {
mSyncObject.notify();
}
}
}
/**
* @return the delay between two sync requests.
*/
public int getSyncDelay() {
return mRequestDelayMs;
}
/**
* Set the network connectivity listener.
* It is used to avoid restarting the events threads each 10 seconds when there is no available network.
*
* @param networkConnectivityReceiver the network receiver
*/
public void setNetworkConnectivityReceiver(NetworkConnectivityReceiver networkConnectivityReceiver) {
mNetworkConnectivityReceiver = networkConnectivityReceiver;
}
/**
* Set the failure callback.
*
* @param failureCallback the failure callback.
*/
public void setFailureCallback(ApiFailureCallback failureCallback) {
mFailureCallback = failureCallback;
}
/**
* Pause the thread. It will resume where it left off when unpause()d.
*/
public void pause() {
Log.d(LOG_TAG, "pause()");
mPaused = true;
mIsCatchingUp = false;
}
/**
* A network connection has been retrieved.
*/
private void onNetworkAvailable() {
Log.d(LOG_TAG, "onNetWorkAvailable()");
if (mIsNetworkSuspended) {
mIsNetworkSuspended = false;
if (mPaused) {
Log.d(LOG_TAG, "the event thread is still suspended");
} else {
Log.d(LOG_TAG, "Resume the thread");
// cancel any catchup process.
mIsCatchingUp = false;
synchronized (mSyncObject) {
mSyncObject.notify();
}
}
} else {
Log.d(LOG_TAG, "onNetWorkAvailable() : nothing to do");
}
}
/**
* Unpause the thread if it had previously been paused. If not, this does nothing.
*/
public void unpause() {
Log.d(LOG_TAG, "## unpause() : thread state " + getState());
if (State.WAITING == getState()) {
Log.d(LOG_TAG, "## unpause() : the thread was paused so resume it.");
mPaused = false;
synchronized (mSyncObject) {
mSyncObject.notify();
}
}
// cancel any catchup process.
mIsCatchingUp = false;
}
/**
* Catchup until some events are retrieved.
*/
public void catchup() {
Log.d(LOG_TAG, "## catchup() : thread state " + getState());
if (State.WAITING == getState()) {
Log.d(LOG_TAG, "## catchup() : the thread was paused so wake it up");
mPaused = false;
synchronized (mSyncObject) {
mSyncObject.notify();
}
}
mIsCatchingUp = true;
}
/**
* Allow the thread to finish its current processing, then permanently stop.
*/
public void kill() {
Log.d(LOG_TAG, "killing ...");
mKilling = true;
if (mPaused) {
Log.d(LOG_TAG, "killing : the thread was pause so wake it up");
mPaused = false;
synchronized (mSyncObject) {
mSyncObject.notify();
}
Log.d(LOG_TAG, "Resume the thread to kill it.");
}
}
/**
* Cancel the killing process
*/
public void cancelKill() {
if (mKilling) {
Log.d(LOG_TAG, "## cancelKill() : Cancel the pending kill");
mKilling = false;
} else {
Log.d(LOG_TAG, "## cancelKill() : Nothing to d");
}
}
/**
* Update the online status
*
* @param isOnline true if the client must be seen as online
*/
public void setIsOnline(boolean isOnline) {
Log.d(LOG_TAG, "setIsOnline to " + isOnline);
mIsOnline = isOnline;
}
/**
* Tells if the presence is online.
*
* @return true if the user is seen as online.
*/
public boolean isOnline() {
return mIsOnline;
}
@Override
public void run() {
try {
Looper.prepare();
} catch (Exception e) {
Log.e(LOG_TAG, "## run() : prepare failed " + e.getMessage(), e);
}
startSync();
}
/**
* Tells if a sync request contains some changed devices.
*
* @param syncResponse the sync response
* @return true if the response contains some changed devices.
*/
private static boolean hasDevicesChanged(SyncResponse syncResponse) {
return (null != syncResponse.deviceLists)
&& (null != syncResponse.deviceLists.changed)
&& (syncResponse.deviceLists.changed.size() > 0);
}
/**
* Use a broadcast receiver because the Timer delay might be inaccurate when the screen is turned off.
* For example, request a 1 min delay and get a 6 mins one.
*/
public static class SyncDelayReceiver extends BroadcastReceiver {
public static final String EXTRA_INSTANCE_ID = "EXTRA_INSTANCE_ID";
public void onReceive(Context context, Intent intent) {
String instanceId = intent.getStringExtra(EXTRA_INSTANCE_ID);
if ((null != instanceId) && mSyncObjectByInstance.containsKey(instanceId)) {
EventsThread eventsThread = mSyncObjectByInstance.get(instanceId);
eventsThread.mPendingDelayedIntent = null;
Log.d(LOG_TAG, "start a sync after " + eventsThread.mRequestDelayMs + " ms");
synchronized (eventsThread.mSyncObject) {
eventsThread.mSyncObject.notify();
}
}
}
}
private void resumeInitialSync() {
Log.d(LOG_TAG, "Resuming initial sync from " + mCurrentToken);
// dummy initial sync
// to hide the splash screen
SyncResponse dummySyncResponse = new SyncResponse();
dummySyncResponse.nextBatch = mCurrentToken;
mListener.onSyncResponse(dummySyncResponse, null, true);
}
private void executeInitialSync() {
Log.d(LOG_TAG, "Requesting initial sync...");
long initialSyncStartTime = System.currentTimeMillis();
while (!isInitialSyncDone()) {
final CountDownLatch latch = new CountDownLatch(1);
mEventsRestClient.syncFromToken(null, 0, DEFAULT_CLIENT_TIMEOUT_MS, mIsOnline ? null : "offline", mFilterOrFilterId,
new SimpleApiCallback<SyncResponse>(mFailureCallback) {
@Override
public void onSuccess(SyncResponse syncResponse) {
Log.d(LOG_TAG, "Received initial sync response.");
mNextServerTimeoutms = hasDevicesChanged(syncResponse) ? 0 : mDefaultServerTimeoutms;
mListener.onSyncResponse(syncResponse, null, (0 == mNextServerTimeoutms));
mCurrentToken = syncResponse.nextBatch;
// unblock the events thread
latch.countDown();
}
private void sleepAndUnblock() {
Log.i(LOG_TAG, "Waiting a bit before retrying");
new Handler(Looper.getMainLooper()).postDelayed(new Runnable() {
public void run() {
latch.countDown();
}
}, RETRY_WAIT_TIME_MS);
}
@Override
public void onNetworkError(Exception e) {
if (isInitialSyncDone()) {
// Ignore error
// FIXME I think this is the source of infinite initial sync if a network error occurs
// FIXME because latch is not counted down. TO BE TESTED
onSuccess(null);
} else {
Log.e(LOG_TAG, "Sync V2 onNetworkError " + e.getMessage(), e);
super.onNetworkError(e);
sleepAndUnblock();
}
}
@Override
public void onMatrixError(MatrixError e) {
super.onMatrixError(e);
if (MatrixError.isConfigurationErrorCode(e.errcode)) {
mListener.onConfigurationError(e.errcode);
} else {
mListener.onSyncError(e);
sleepAndUnblock();
}
}
@Override
public void onUnexpectedError(Exception e) {
super.onUnexpectedError(e);
Log.e(LOG_TAG, "Sync V2 onUnexpectedError " + e.getMessage(), e);
sleepAndUnblock();
}
});
// block until the initial sync callback is invoked.
try {
latch.await();
} catch (InterruptedException e) {
Log.e(LOG_TAG, "Interrupted whilst performing initial sync.", e);
} catch (Exception e) {
// reported by GA
// The thread might have been killed.
Log.e(LOG_TAG, "latch.await() failed " + e.getMessage(), e);
}
}
long initialSyncEndTime = System.currentTimeMillis();
long initialSyncDuration = initialSyncEndTime - initialSyncStartTime;
if (mMetricsListener != null) {
mMetricsListener.onInitialSyncFinished(initialSyncDuration);
}
}
/**
* Start the events sync
*/
@SuppressLint("NewApi")
private void startSync() {
int serverTimeout;
mPaused = false;
if (isInitialSyncDone()) {
resumeInitialSync();
serverTimeout = 0;
} else {
// Start with initial sync
executeInitialSync();
serverTimeout = mNextServerTimeoutms;
}
Log.d(LOG_TAG, "Starting event stream from token " + mCurrentToken);
// sanity check
if (null != mNetworkConnectivityReceiver) {
mNetworkConnectivityReceiver.addEventListener(mNetworkListener);
//
mbIsConnected = mNetworkConnectivityReceiver.isConnected();
mIsNetworkSuspended = !mbIsConnected;
}
// Then repeatedly long-poll for events
while (!mKilling) {
// test if a delay between two syncs
if ((!mPaused && !mIsNetworkSuspended) && (0 != mRequestDelayMs)) {
Log.d(LOG_TAG, "startSync : start a delay timer ");
Intent intent = new Intent(mContext, SyncDelayReceiver.class);
intent.putExtra(SyncDelayReceiver.EXTRA_INSTANCE_ID, toString());
mPendingDelayedIntent = PendingIntent.getBroadcast(mContext, 0, intent, PendingIntent.FLAG_UPDATE_CURRENT);
long futureInMillis = SystemClock.elapsedRealtime() + mRequestDelayMs;
if ((Build.VERSION.SDK_INT >= Build.VERSION_CODES.M)
&& mPowerManager.isIgnoringBatteryOptimizations(mContext.getPackageName())) {
mAlarmManager.setAndAllowWhileIdle(AlarmManager.ELAPSED_REALTIME_WAKEUP, futureInMillis, mPendingDelayedIntent);
} else {
mAlarmManager.set(AlarmManager.ELAPSED_REALTIME_WAKEUP, futureInMillis, mPendingDelayedIntent);
}
}
if (mPaused || mIsNetworkSuspended || (null != mPendingDelayedIntent)) {
if (null != mPendingDelayedIntent) {
Log.d(LOG_TAG, "Event stream is paused because there is a timer delay.");
} else if (mIsNetworkSuspended) {
Log.d(LOG_TAG, "Event stream is paused because there is no available network.");
} else {
Log.d(LOG_TAG, "Event stream is paused. Waiting.");
}
try {
Log.d(LOG_TAG, "startSync : wait ...");
synchronized (mSyncObject) {
mSyncObject.wait();
}
if (null != mPendingDelayedIntent) {
Log.d(LOG_TAG, "startSync : cancel mSyncDelayTimer");
mAlarmManager.cancel(mPendingDelayedIntent);
mPendingDelayedIntent.cancel();
mPendingDelayedIntent = null;
}
Log.d(LOG_TAG, "Event stream woken from pause.");
// perform a catchup asap
serverTimeout = 0;
} catch (InterruptedException e) {
Log.e(LOG_TAG, "Unexpected interruption while paused: " + e.getMessage(), e);
}
}
// the service could have been killed while being paused.
if (!mKilling) {
long incrementalSyncStartTime = System.currentTimeMillis();
final CountDownLatch latch = new CountDownLatch(1);
Log.d(LOG_TAG, "Get events from token " + mCurrentToken + " with filterOrFilterId " + mFilterOrFilterId);
final int fServerTimeout = serverTimeout;
mNextServerTimeoutms = mDefaultServerTimeoutms;
mEventsRestClient.syncFromToken(mCurrentToken, serverTimeout, DEFAULT_CLIENT_TIMEOUT_MS, mIsOnline ? null : "offline", mFilterOrFilterId,
new SimpleApiCallback<SyncResponse>(mFailureCallback) {
@Override
public void onSuccess(SyncResponse syncResponse) {
if (!mKilling) {
// poll /sync with timeout=0 until
// we get no to_device messages back.
if (0 == fServerTimeout) {
if (hasDevicesChanged(syncResponse)) {
if (mIsCatchingUp) {
Log.d(LOG_TAG, "Some devices have changed but do not set mNextServerTimeoutms to 0 to avoid infinite loops");
} else {
Log.d(LOG_TAG, "mNextServerTimeoutms is set to 0 because of hasDevicesChanged "
+ syncResponse.deviceLists.changed);
mNextServerTimeoutms = 0;
}
}
}
// the catchup request is suspended when there is no need
// to loop again
if (mIsCatchingUp && (0 != mNextServerTimeoutms)) {
// the catchup triggers sync requests until there are some useful events
int eventCounts = 0;
if (null != syncResponse.rooms) {
RoomsSyncResponse roomsSyncResponse = syncResponse.rooms;
if (null != roomsSyncResponse.join) {
eventCounts += roomsSyncResponse.join.size();
}
if (null != roomsSyncResponse.invite) {
eventCounts += roomsSyncResponse.invite.size();
}
}
// stop any catch up
mIsCatchingUp = false;
mPaused = (0 == mRequestDelayMs);
Log.d(LOG_TAG, "Got " + eventCounts + " useful events while catching up : mPaused is set to " + mPaused);
}
Log.d(LOG_TAG, "Got event response");
mListener.onSyncResponse(syncResponse, mCurrentToken, (0 == mNextServerTimeoutms));
mCurrentToken = syncResponse.nextBatch;
Log.d(LOG_TAG, "mCurrentToken is now set to " + mCurrentToken);
}
// unblock the events thread
latch.countDown();
}
private void onError(String description) {
boolean isConnected;
Log.d(LOG_TAG, "Got an error while polling events " + description);
synchronized (mSyncObject) {
isConnected = mbIsConnected;
}
// detected if the device is connected before trying again
if (isConnected) {
new Handler(Looper.getMainLooper()).postDelayed(new Runnable() {
public void run() {
latch.countDown();
}
}, RETRY_WAIT_TIME_MS);
} else {
// no network -> wait that a network connection comes back.
mIsNetworkSuspended = true;
latch.countDown();
}
}
@Override
public void onNetworkError(Exception e) {
onError(e.getLocalizedMessage());
}
@Override
public void onMatrixError(MatrixError e) {
if (MatrixError.isConfigurationErrorCode(e.errcode)) {
mListener.onConfigurationError(e.errcode);
} else {
mListener.onSyncError(e);
onError(e.getLocalizedMessage());
}
}
@Override
public void onUnexpectedError(Exception e) {
onError(e.getLocalizedMessage());
}
});
// block until the sync callback is invoked.
try {
latch.await();
} catch (InterruptedException e) {
Log.e(LOG_TAG, "Interrupted whilst polling message", e);
} catch (Exception e) {
// reported by GA
// The thread might have been killed.
Log.e(LOG_TAG, "latch.await() failed " + e.getMessage(), e);
}
long incrementalSyncEndTime = System.currentTimeMillis();
long incrementalSyncDuration = incrementalSyncEndTime - incrementalSyncStartTime;
if (mMetricsListener != null) {
mMetricsListener.onIncrementalSyncFinished(incrementalSyncDuration);
}
}
serverTimeout = mNextServerTimeoutms;
}
if (null != mNetworkConnectivityReceiver) {
mNetworkConnectivityReceiver.removeEventListener(mNetworkListener);
}
Log.d(LOG_TAG, "Event stream terminating.");
}
/**
* Ask if the initial sync is done. It means we have a sync token
*
* @return
*/
private boolean isInitialSyncDone() {
return mCurrentToken != null;
}
}
<|start_filename|>matrix-sdk/src/main/java/org/matrix/androidsdk/rest/api/IdentityThirdPidApi.java<|end_filename|>
/*
* Copyright 2014 OpenMarket Ltd
* Copyright 2017 Vector Creations Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.rest.api;
import org.matrix.androidsdk.RestClient;
import org.matrix.androidsdk.rest.model.BulkLookupParams;
import org.matrix.androidsdk.rest.model.BulkLookupResponse;
import org.matrix.androidsdk.rest.model.IdentityServerRequest3PIDValidationParams;
import org.matrix.androidsdk.rest.model.IdentityServerRequestTokenResponse;
import org.matrix.androidsdk.rest.model.RequestEmailValidationParams;
import org.matrix.androidsdk.rest.model.RequestOwnershipParams;
import org.matrix.androidsdk.rest.model.SuccessResult;
import org.matrix.androidsdk.rest.model.identityserver.HashDetailResponse;
import org.matrix.androidsdk.rest.model.identityserver.LookUpV2Params;
import org.matrix.androidsdk.rest.model.identityserver.LookUpV2Response;
import retrofit2.Call;
import retrofit2.http.Body;
import retrofit2.http.GET;
import retrofit2.http.POST;
import retrofit2.http.Path;
import retrofit2.http.Query;
public interface IdentityThirdPidApi {
/**
* Request the hash detail to request a bunch of 3PIDs
*/
@GET(RestClient.URI_IDENTITY_PATH_V2 + "hash_details")
Call<HashDetailResponse> hashDetails();
/**
* Request a bunch of 3PIDs
*
* @param body the body request
*/
@POST(RestClient.URI_IDENTITY_PATH_V2 + "lookup")
Call<LookUpV2Response> bulkLookupV2(@Body LookUpV2Params body);
/**
* Request the ownership validation of an email address or a phone number previously set
* by {@link ProfileApi#requestEmailValidation(RequestEmailValidationParams)}
*
* @param medium the medium of the 3pid
*/
@POST(RestClient.URI_IDENTITY_PATH_V2 + "validate/{medium}/submitToken")
Call<SuccessResult> requestOwnershipValidationV2(@Path("medium") String medium,
@Body RequestOwnershipParams body);
/* ==========================================================================================
* Legacy requests
* ========================================================================================== */
/**
* Request a bunch of 3PIDs
* <p>
* Ref: https://matrix.org/docs/spec/identity_service/unstable#post-matrix-identity-api-v1-bulk-lookup
*
* @param body the body request
*/
@POST(RestClient.URI_IDENTITY_PATH + "bulk_lookup")
Call<BulkLookupResponse> bulkLookup(@Body BulkLookupParams body);
/**
* Request the ownership validation of an email address or a phone number previously set
* by {@link ProfileApi#requestEmailValidation(RequestEmailValidationParams)}
* <p>
* Ref: https://matrix.org/docs/spec/identity_service/unstable#post-matrix-identity-api-v1-validate-email-submittoken for email
* Ref: https://matrix.org/docs/spec/identity_service/unstable#post-matrix-identity-api-v1-validate-msisdn-submittoken for phone
*
* @param medium the medium of the 3pid
* @param token the token generated by the requestToken call
* @param clientSecret the client secret which was supplied in the requestToken call
* @param sid the sid for the session
*/
@POST(RestClient.URI_IDENTITY_PATH + "validate/{medium}/submitToken")
Call<SuccessResult> requestOwnershipValidation(@Path("medium") String medium,
@Query("token") String token,
@Query("client_secret") String clientSecret,
@Query("sid") String sid);
/**
* Create a session to change the bind status of an email to an identity server
*
* @param params
* @return the sid
*/
@POST(RestClient.URI_IDENTITY_PATH_V2 + "validate/email/requestToken")
Call<IdentityServerRequestTokenResponse> requestMailValidationToken(@Body IdentityServerRequest3PIDValidationParams params);
/**
* Create a session to change the bind status of an phone number to an identity server
*
* @param params
* @return the sid
*/
@POST(RestClient.URI_IDENTITY_PATH_V2 + "validate/msisdn/requestToken")
Call<IdentityServerRequestTokenResponse> requestPhoneNumberValidationToken(@Body IdentityServerRequest3PIDValidationParams params);
}
<|start_filename|>matrix-sdk-crypto/src/main/java/org/matrix/androidsdk/crypto/model/crypto/EncryptedFileInfo.java<|end_filename|>
/*
* Copyright 2016 OpenMarket Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.crypto.model.crypto;
import java.io.Serializable;
import java.util.Map;
public class EncryptedFileInfo implements Serializable {
public String url;
public String mimetype;
public EncryptedFileKey key;
public String iv;
public Map<String, String> hashes;
public String v;
/**
* Make a deep copy.
*
* @return the copy
*/
public EncryptedFileInfo deepCopy() {
EncryptedFileInfo encryptedFile = new EncryptedFileInfo();
encryptedFile.url = url;
encryptedFile.mimetype = mimetype;
if (null != key) {
encryptedFile.key = key.deepCopy();
}
encryptedFile.iv = iv;
encryptedFile.hashes = hashes;
return encryptedFile;
}
}
<|start_filename|>matrix-sdk/src/main/java/org/matrix/androidsdk/listeners/IMXMediaDownloadListener.java<|end_filename|>
/*
* Copyright 2016 OpenMarket Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.listeners;
import com.google.gson.JsonElement;
/**
* Interface to monitor a media download.
*/
public interface IMXMediaDownloadListener {
/**
* provide some download stats
*/
class DownloadStats {
/**
* The download id
*/
public String mDownloadId;
/**
* the download progress in percentage
*/
public int mProgress;
/**
* The downloaded size in bytes
*/
public int mDownloadedSize;
/**
* The file size in bytes.
*/
public int mFileSize;
/**
* time in seconds since the download started
*/
public int mElapsedTime;
/**
* estimated remained time in seconds to download the media
*/
public int mEstimatedRemainingTime;
/**
* download bit rate in KB/s
*/
public int mBitRate;
@Override
public String toString() {
String res = "";
res += "mProgress : " + mProgress + "%\n";
res += "mDownloadedSize : " + mDownloadedSize + " bytes\n";
res += "mFileSize : " + mFileSize + "bytes\n";
res += "mElapsedTime : " + mProgress + " seconds\n";
res += "mEstimatedRemainingTime : " + mEstimatedRemainingTime + " seconds\n";
res += "mBitRate : " + mBitRate + " KB/s\n";
return res;
}
}
/**
* The download starts.
*
* @param downloadId the download Identifier
*/
void onDownloadStart(String downloadId);
/**
* The download stats have been updated.
*
* @param downloadId the download Identifier
* @param stats the download stats
*/
void onDownloadProgress(String downloadId, DownloadStats stats);
/**
* The download is completed.
*
* @param downloadId the download Identifier
*/
void onDownloadComplete(String downloadId);
/**
* The download failed.
*
* @param downloadId the download Identifier
* @param jsonElement the error
*/
void onDownloadError(String downloadId, JsonElement jsonElement);
/**
* The download has been cancelled.
*
* @param downloadId the download Identifier
*/
void onDownloadCancel(String downloadId);
}
<|start_filename|>matrix-sdk/src/main/java/org/matrix/androidsdk/data/timeline/TimelineStateHolder.java<|end_filename|>
/*
* Copyright 2018 New Vector Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.data.timeline;
import androidx.annotation.NonNull;
import org.matrix.androidsdk.MXDataHandler;
import org.matrix.androidsdk.data.RoomState;
import org.matrix.androidsdk.data.store.IMXStore;
import org.matrix.androidsdk.rest.model.Event;
/**
* This class is responsible for holding the state and backState of a room timeline
*/
class TimelineStateHolder {
private final MXDataHandler mDataHandler;
private final IMXStore mStore;
private String mRoomId;
/**
* The state of the room at the top most recent event of the timeline.
*/
private RoomState mState;
/**
* The historical state of the room when paginating back.
*/
private RoomState mBackState;
TimelineStateHolder(@NonNull final MXDataHandler dataHandler,
@NonNull final IMXStore store,
@NonNull final String roomId) {
mDataHandler = dataHandler;
mStore = store;
mRoomId = roomId;
initStates();
}
/**
* Clear the states
*/
public void clear() {
initStates();
}
/**
* @return The state of the room at the top most recent event of the timeline.
*/
@NonNull
public RoomState getState() {
return mState;
}
/**
* Update the state.
*
* @param state the new state.
*/
public void setState(@NonNull final RoomState state) {
mState = state;
}
/**
* @return the backState.
*/
@NonNull
public RoomState getBackState() {
return mBackState;
}
/**
* Update the backState.
*
* @param state the new backState.
*/
public void setBackState(@NonNull final RoomState state) {
mBackState = state;
}
/**
* Make a deep copy of the dedicated state.
*
* @param direction the room state direction to deep copy.
*/
public void deepCopyState(final EventTimeline.Direction direction) {
if (direction == EventTimeline.Direction.FORWARDS) {
mState = mState.deepCopy();
} else {
mBackState = mBackState.deepCopy();
}
}
/**
* Process a state event to keep the internal live and back states up to date.
*
* @param event the state event
* @param direction the direction; ie. forwards for live state, backwards for back state
* @param considerNewContent how the event should affect the state: true for applying, false for un-applying (applying the previous state)
* @return true if the event has been processed.
*/
public boolean processStateEvent(@NonNull final Event event,
@NonNull final EventTimeline.Direction direction,
final boolean considerNewContent) {
final RoomState affectedState;
final IMXStore store;
if (direction == EventTimeline.Direction.FORWARDS) {
affectedState = mState;
store = mStore;
} else {
affectedState = mBackState;
// In the case of backward pagination, we do not want to persist the state event
store = null;
}
final boolean isProcessed = affectedState.applyState(event, considerNewContent, store);
if (isProcessed && direction == EventTimeline.Direction.FORWARDS) {
mStore.storeLiveStateForRoom(mRoomId);
}
return isProcessed;
}
/**
* Set the room Id
*
* @param roomId the new room id.
*/
public void setRoomId(@NonNull final String roomId) {
mRoomId = roomId;
mState.roomId = roomId;
mBackState.roomId = roomId;
}
/**
* Initialize the state and backState to default, with roomId and dataHandler
*/
private void initStates() {
mBackState = new RoomState();
mBackState.setDataHandler(mDataHandler);
mBackState.roomId = mRoomId;
mState = new RoomState();
mState.setDataHandler(mDataHandler);
mState.roomId = mRoomId;
}
}
<|start_filename|>matrix-sdk/src/main/java/org/matrix/androidsdk/core/FileUtils.kt<|end_filename|>
/*
* Copyright 2019 New Vector Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.core
import android.text.TextUtils
/**
* Get the file extension of a fileUri or a filename
*
* @param fileUri the fileUri (can be a simple filename)
* @return the file extension, in lower case, or null is extension is not available or empty
*/
fun getFileExtension(fileUri: String): String? {
var reducedStr = fileUri
if (!TextUtils.isEmpty(reducedStr)) {
// Remove fragment
val fragment = fileUri.lastIndexOf('#')
if (fragment > 0) {
reducedStr = fileUri.substring(0, fragment)
}
// Remove query
val query = reducedStr.lastIndexOf('?')
if (query > 0) {
reducedStr = reducedStr.substring(0, query)
}
// Remove path
val filenamePos = reducedStr.lastIndexOf('/')
val filename = if (0 <= filenamePos) reducedStr.substring(filenamePos + 1) else reducedStr
// Contrary to method MimeTypeMap.getFileExtensionFromUrl, we do not check the pattern
// See https://stackoverflow.com/questions/14320527/android-should-i-use-mimetypemap-getfileextensionfromurl-bugs
if (!filename.isEmpty()) {
val dotPos = filename.lastIndexOf('.')
if (0 <= dotPos) {
val ext = filename.substring(dotPos + 1)
if (ext.isNotBlank()) {
return ext.toLowerCase()
}
}
}
}
return null
}
<|start_filename|>matrix-sdk/src/main/java/org/matrix/androidsdk/call/MXCallsManager.java<|end_filename|>
/*
* Copyright 2015 OpenMarket Ltd
* Copyright 2017 Vector Creations Ltd
* Copyright 2018 New Vector Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.call;
import android.content.Context;
import android.os.Handler;
import android.os.Looper;
import android.text.TextUtils;
import android.util.Base64;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import org.matrix.androidsdk.MXSession;
import org.matrix.androidsdk.core.JsonUtils;
import org.matrix.androidsdk.core.Log;
import org.matrix.androidsdk.core.MXPatterns;
import org.matrix.androidsdk.core.callback.ApiCallback;
import org.matrix.androidsdk.core.callback.SimpleApiCallback;
import org.matrix.androidsdk.core.model.MatrixError;
import org.matrix.androidsdk.crypto.MXCryptoError;
import org.matrix.androidsdk.crypto.data.MXDeviceInfo;
import org.matrix.androidsdk.crypto.data.MXUsersDevicesMap;
import org.matrix.androidsdk.data.Room;
import org.matrix.androidsdk.data.RoomState;
import org.matrix.androidsdk.data.store.IMXStore;
import org.matrix.androidsdk.listeners.MXEventListener;
import org.matrix.androidsdk.rest.client.CallRestClient;
import org.matrix.androidsdk.rest.model.CreateRoomParams;
import org.matrix.androidsdk.rest.model.Event;
import org.matrix.androidsdk.rest.model.EventContent;
import org.matrix.androidsdk.rest.model.RoomMember;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.Timer;
import java.util.TimerTask;
public class MXCallsManager {
private static final String LOG_TAG = MXCallsManager.class.getSimpleName();
private MXSession mSession;
private Context mContext;
private CallRestClient mCallResClient;
private JsonElement mTurnServer = null;
private Timer mTurnServerTimer = null;
private boolean mSuspendTurnServerRefresh = false;
// active calls
private final Map<String, IMXCall> mCallsByCallId = new HashMap<>();
// listeners
private final Set<IMXCallsManagerListener> mListeners = new HashSet<>();
// incoming calls
private final Set<String> mxPendingIncomingCallId = new HashSet<>();
// UI handler
private final Handler mUIThreadHandler;
public static String defaultStunServerUri;
/*
* To create an outgoing call
* 1- CallsManager.createCallInRoom()
* 2- on success, IMXCall.createCallView
* 3- IMXCallListener.onCallViewCreated(callview) -> insert the callview
* 4- IMXCallListener.onCallReady() -> IMXCall.placeCall()
* 5- the call states should follow theses steps
* CALL_STATE_WAIT_LOCAL_MEDIA
* CALL_STATE_WAIT_CREATE_OFFER
* CALL_STATE_INVITE_SENT
* CALL_STATE_RINGING
* 6- the callee accepts the call
* CALL_STATE_CONNECTING
* CALL_STATE_CONNECTED
*
* To manage an incoming call
* 1- IMXCall.createCallView
* 2- IMXCallListener.onCallViewCreated(callview) -> insert the callview
* 3- IMXCallListener.onCallReady(), IMXCall.launchIncomingCall()
* 4- the call states should follow theses steps
* CALL_STATE_WAIT_LOCAL_MEDIA
* CALL_STATE_RINGING
* 5- The user accepts the call, IMXCall.answer()
* 6- the states should be
* CALL_STATE_CREATE_ANSWER
* CALL_STATE_CONNECTING
* CALL_STATE_CONNECTED
*/
/**
* Constructor
*
* @param session the session
* @param context the context
*/
public MXCallsManager(MXSession session, Context context) {
mSession = session;
mContext = context;
mUIThreadHandler = new Handler(Looper.getMainLooper());
mCallResClient = mSession.getCallRestClient();
mSession.getDataHandler().addListener(new MXEventListener() {
@Override
public void onLiveEvent(Event event, RoomState roomState) {
if (TextUtils.equals(event.getType(), Event.EVENT_TYPE_STATE_ROOM_MEMBER)) {
// Listen to the membership join/leave events to detect the conference user activity.
// This mechanism detects the presence of an established conf call
if (TextUtils.equals(event.sender, MXCallsManager.getConferenceUserId(event.roomId))) {
EventContent eventContent = JsonUtils.toEventContent(event.getContentAsJsonObject());
if (TextUtils.equals(eventContent.membership, RoomMember.MEMBERSHIP_LEAVE)) {
dispatchOnVoipConferenceFinished(event.roomId);
}
if (TextUtils.equals(eventContent.membership, RoomMember.MEMBERSHIP_JOIN)) {
dispatchOnVoipConferenceStarted(event.roomId);
}
}
}
}
});
if (isSupported()) {
refreshTurnServer();
}
}
/**
* @return true if the call feature is supported
* @apiNote Performs an implicit initialization of the PeerConnectionFactory
*/
public boolean isSupported() {
return MXWebRtcCall.isSupported(mContext);
}
/**
* create a new call
*
* @param callId the call Id (null to use a default value)
* @return the IMXCall
*/
private IMXCall createCall(String callId) {
Log.d(LOG_TAG, "createCall " + callId);
IMXCall call = null;
try {
call = new MXWebRtcCall(mSession, mContext, getTurnServer(), defaultStunServerUri);
// a valid callid is provided
if (null != callId) {
call.setCallId(callId);
}
} catch (Exception e) {
Log.e(LOG_TAG, "createCall " + e.getMessage(), e);
}
return call;
}
/**
* Search a call from its dedicated room id.
*
* @param roomId the room id
* @return the IMXCall if it exists
*/
public IMXCall getCallWithRoomId(String roomId) {
List<IMXCall> calls;
synchronized (this) {
calls = new ArrayList<>(mCallsByCallId.values());
}
for (IMXCall call : calls) {
if (TextUtils.equals(roomId, call.getRoom().getRoomId())) {
if (TextUtils.equals(call.getCallState(), IMXCall.CALL_STATE_ENDED)) {
Log.d(LOG_TAG, "## getCallWithRoomId() : the call " + call.getCallId() + " has been stopped");
synchronized (this) {
mCallsByCallId.remove(call.getCallId());
}
} else {
return call;
}
}
}
return null;
}
/**
* Returns the IMXCall from its callId.
*
* @param callId the call Id
* @return the IMXCall if it exists
*/
public IMXCall getCallWithCallId(String callId) {
return getCallWithCallId(callId, false);
}
/**
* Returns the IMXCall from its callId.
*
* @param callId the call Id
* @param create create the IMXCall if it does not exist
* @return the IMXCall if it exists
*/
private IMXCall getCallWithCallId(String callId, boolean create) {
IMXCall call = null;
// check if the call exists
if (null != callId) {
synchronized (this) {
call = mCallsByCallId.get(callId);
}
}
// test if the call has been stopped
if ((null != call) && TextUtils.equals(call.getCallState(), IMXCall.CALL_STATE_ENDED)) {
Log.d(LOG_TAG, "## getCallWithCallId() : the call " + callId + " has been stopped");
synchronized (this) {
mCallsByCallId.remove(call.getCallId());
}
call = null;
}
// the call does not exist but request to create it
if ((null == call) && create) {
call = createCall(callId);
synchronized (this) {
mCallsByCallId.put(call.getCallId(), call);
}
}
Log.d(LOG_TAG, "getCallWithCallId " + callId + " " + call);
return call;
}
/**
* Tell if a call is in progress.
*
* @param call the call
* @return true if the call is in progress
*/
public static boolean isCallInProgress(IMXCall call) {
boolean res = false;
if (null != call) {
String callState = call.getCallState();
res = TextUtils.equals(callState, IMXCall.CALL_STATE_CREATED)
|| TextUtils.equals(callState, IMXCall.CALL_STATE_CREATING_CALL_VIEW)
|| TextUtils.equals(callState, IMXCall.CALL_STATE_READY)
|| TextUtils.equals(callState, IMXCall.CALL_STATE_WAIT_LOCAL_MEDIA)
|| TextUtils.equals(callState, IMXCall.CALL_STATE_WAIT_CREATE_OFFER)
|| TextUtils.equals(callState, IMXCall.CALL_STATE_INVITE_SENT)
|| TextUtils.equals(callState, IMXCall.CALL_STATE_RINGING)
|| TextUtils.equals(callState, IMXCall.CALL_STATE_CREATE_ANSWER)
|| TextUtils.equals(callState, IMXCall.CALL_STATE_CONNECTING)
|| TextUtils.equals(callState, IMXCall.CALL_STATE_CONNECTED);
}
return res;
}
/**
* @return true if there are some active calls.
*/
public boolean hasActiveCalls() {
synchronized (this) {
List<String> callIdsToRemove = new ArrayList<>();
Set<String> callIds = mCallsByCallId.keySet();
for (String callId : callIds) {
IMXCall call = mCallsByCallId.get(callId);
if (null != call && TextUtils.equals(call.getCallState(), IMXCall.CALL_STATE_ENDED)) {
Log.d(LOG_TAG, "# hasActiveCalls() : the call " + callId + " is not anymore valid");
callIdsToRemove.add(callId);
} else {
Log.d(LOG_TAG, "# hasActiveCalls() : the call " + callId + " is active");
return true;
}
}
for (String callIdToRemove : callIdsToRemove) {
mCallsByCallId.remove(callIdToRemove);
}
}
Log.d(LOG_TAG, "# hasActiveCalls() : no active call");
return false;
}
/**
* Manage the call events.
*
* @param store the dedicated store
* @param event the call event.
*/
public void handleCallEvent(final IMXStore store, final Event event) {
if (event.isCallEvent() && isSupported()) {
Log.d(LOG_TAG, "handleCallEvent " + event.getType());
// always run the call event in the UI thread
// TODO: This was introduced because of MXChromeCall, check if it is required for MXWebRtcCall as well
mUIThreadHandler.post(new Runnable() {
@Override
public void run() {
boolean isMyEvent = TextUtils.equals(event.getSender(), mSession.getMyUserId());
Room room = mSession.getDataHandler().getRoom(store, event.roomId, true);
String callId = null;
JsonObject eventContent = null;
try {
eventContent = event.getContentAsJsonObject();
callId = eventContent.getAsJsonPrimitive("call_id").getAsString();
} catch (Exception e) {
Log.e(LOG_TAG, "handleCallEvent : fail to retrieve call_id " + e.getMessage(), e);
}
// sanity check
if ((null != callId) && (null != room)) {
// receive an invitation
if (Event.EVENT_TYPE_CALL_INVITE.equals(event.getType())) {
long lifeTime = event.getAge();
if (Long.MAX_VALUE == lifeTime) {
lifeTime = System.currentTimeMillis() - event.getOriginServerTs();
}
// ignore older call messages
if (lifeTime < MXCall.CALL_TIMEOUT_MS) {
// create the call only it is triggered from someone else
IMXCall call = getCallWithCallId(callId, !isMyEvent);
// sanity check
if (null != call) {
// init the information
if (null == call.getRoom()) {
call.setRooms(room, room);
}
if (!isMyEvent) {
call.prepareIncomingCall(eventContent, callId, null);
mxPendingIncomingCallId.add(callId);
} else {
call.handleCallEvent(event);
}
}
} else {
Log.d(LOG_TAG, "## handleCallEvent() : " + Event.EVENT_TYPE_CALL_INVITE + " is ignored because it is too old");
}
} else if (Event.EVENT_TYPE_CALL_CANDIDATES.equals(event.getType())) {
if (!isMyEvent) {
IMXCall call = getCallWithCallId(callId);
if (null != call) {
if (null == call.getRoom()) {
call.setRooms(room, room);
}
call.handleCallEvent(event);
}
}
} else if (Event.EVENT_TYPE_CALL_ANSWER.equals(event.getType())) {
IMXCall call = getCallWithCallId(callId);
if (null != call) {
// assume it is a catch up call.
// the creation / candidates /
// the call has been answered on another device
if (IMXCall.CALL_STATE_CREATED.equals(call.getCallState())) {
call.onAnsweredElsewhere();
synchronized (this) {
mCallsByCallId.remove(callId);
}
} else {
if (null == call.getRoom()) {
call.setRooms(room, room);
}
call.handleCallEvent(event);
}
}
} else if (Event.EVENT_TYPE_CALL_HANGUP.equals(event.getType())) {
final IMXCall call = getCallWithCallId(callId);
if (null != call) {
// trigger call events only if the call is active
final boolean isActiveCall = !IMXCall.CALL_STATE_CREATED.equals(call.getCallState());
if (null == call.getRoom()) {
call.setRooms(room, room);
}
if (isActiveCall) {
call.handleCallEvent(event);
}
synchronized (this) {
mCallsByCallId.remove(callId);
}
// warn that a call has been hung up
mUIThreadHandler.post(() -> {
// must warn anyway any listener that the call has been killed
// for example, when the device is in locked screen
// the callview is not created but the device is ringing
// if the other participant ends the call, the ring should stop
dispatchOnCallHangUp(call);
});
}
}
}
}
});
}
}
/**
* check if there is a pending incoming call
*/
public void checkPendingIncomingCalls() {
//Log.d(LOG_TAG, "checkPendingIncomingCalls");
mUIThreadHandler.post(() -> {
if (mxPendingIncomingCallId.size() > 0) {
for (String callId : mxPendingIncomingCallId) {
final IMXCall call = getCallWithCallId(callId);
if (null != call) {
final Room room = call.getRoom();
// for encrypted rooms with 2 members
// check if there are some unknown devices before warning
// of the incoming call.
// If there are some unknown devices, the answer event would not be encrypted.
if ((null != room)
&& room.isEncrypted()
&& mSession.getCrypto() != null
&& mSession.getCrypto().warnOnUnknownDevices()
&& room.getNumberOfJoinedMembers() == 2) {
// test if the encrypted events are sent only to the verified devices (any room)
mSession.getCrypto().getGlobalBlacklistUnverifiedDevices(new SimpleApiCallback<Boolean>() {
@Override
public void onSuccess(Boolean sendToVerifiedDevicesOnly) {
if (sendToVerifiedDevicesOnly) {
dispatchOnIncomingCall(call, null);
} else {
// test if the encrypted events are sent only to the verified devices (only this room)
mSession.getCrypto().isRoomBlacklistUnverifiedDevices(room.getRoomId(), new SimpleApiCallback<Boolean>() {
@Override
public void onSuccess(Boolean sendToVerifiedDevicesOnly) {
if (sendToVerifiedDevicesOnly) {
dispatchOnIncomingCall(call, null);
} else {
room.getJoinedMembersAsync(new ApiCallback<List<RoomMember>>() {
@Override
public void onNetworkError(Exception e) {
dispatchOnIncomingCall(call, null);
}
@Override
public void onMatrixError(MatrixError e) {
dispatchOnIncomingCall(call, null);
}
@Override
public void onUnexpectedError(Exception e) {
dispatchOnIncomingCall(call, null);
}
@Override
public void onSuccess(List<RoomMember> members) {
String userId1 = members.get(0).getUserId();
String userId2 = members.get(1).getUserId();
Log.d(LOG_TAG, "## checkPendingIncomingCalls() : check the unknown devices");
//
mSession.getCrypto()
.checkUnknownDevices(Arrays.asList(userId1, userId2), new ApiCallback<Void>() {
@Override
public void onSuccess(Void anything) {
Log.d(LOG_TAG, "## checkPendingIncomingCalls() : no unknown device");
dispatchOnIncomingCall(call, null);
}
@Override
public void onNetworkError(Exception e) {
Log.e(LOG_TAG,
"## checkPendingIncomingCalls() : checkUnknownDevices failed "
+ e.getMessage(), e);
dispatchOnIncomingCall(call, null);
}
@Override
public void onMatrixError(MatrixError e) {
MXUsersDevicesMap<MXDeviceInfo> unknownDevices = null;
if (e instanceof MXCryptoError) {
MXCryptoError cryptoError = (MXCryptoError) e;
if (MXCryptoError.UNKNOWN_DEVICES_CODE.equals(cryptoError.errcode)) {
unknownDevices =
(MXUsersDevicesMap<MXDeviceInfo>) cryptoError.mExceptionData;
}
}
if (null != unknownDevices) {
Log.d(LOG_TAG, "## checkPendingIncomingCalls() :" +
" checkUnknownDevices found some unknown devices");
} else {
Log.e(LOG_TAG, "## checkPendingIncomingCalls() :" +
" checkUnknownDevices failed " + e.getMessage());
}
dispatchOnIncomingCall(call, unknownDevices);
}
@Override
public void onUnexpectedError(Exception e) {
Log.e(LOG_TAG, "## checkPendingIncomingCalls() :" +
" checkUnknownDevices failed " + e.getMessage(), e);
dispatchOnIncomingCall(call, null);
}
});
}
});
}
}
});
}
}
});
} else {
dispatchOnIncomingCall(call, null);
}
}
}
}
mxPendingIncomingCallId.clear();
});
}
/**
* Create an IMXCall in the room defines by its room Id.
* -> for a 1:1 call, it is a standard call.
* -> for a conference call,
* ----> the conference user is invited to the room (if it was not yet invited)
* ----> the call signaling room is created (or retrieved) with the conference
* ----> and the call is started
*
* @param roomId the room roomId
* @param isVideo true to start a video call
* @param callback the async callback
*/
public void createCallInRoom(final String roomId, final boolean isVideo, final ApiCallback<IMXCall> callback) {
Log.d(LOG_TAG, "createCallInRoom in " + roomId);
final Room room = mSession.getDataHandler().getRoom(roomId);
// sanity check
if (null != room) {
if (isSupported()) {
int joinedMembers = room.getNumberOfJoinedMembers();
Log.d(LOG_TAG, "createCallInRoom : the room has " + joinedMembers + " joined members");
if (joinedMembers > 1) {
if (joinedMembers == 2) {
// when a room is encrypted, test first there is no unknown device
// else the call will fail.
// So it seems safer to reject the call creation it it will fail.
if (room.isEncrypted() && mSession.getCrypto() != null && mSession.getCrypto().warnOnUnknownDevices()) {
room.getJoinedMembersAsync(new SimpleApiCallback<List<RoomMember>>(callback) {
@Override
public void onSuccess(List<RoomMember> members) {
if (members.size() != 2) {
// Safety check
callback.onUnexpectedError(new Exception("Wrong number of members"));
return;
}
String userId1 = members.get(0).getUserId();
String userId2 = members.get(1).getUserId();
// force the refresh to ensure that the devices list is up-to-date
mSession.getCrypto().checkUnknownDevices(Arrays.asList(userId1, userId2), new SimpleApiCallback<Void>(callback) {
@Override
public void onSuccess(Void anything) {
final IMXCall call = getCallWithCallId(null, true);
call.setRooms(room, room);
call.setIsVideo(isVideo);
dispatchOnOutgoingCall(call);
if (null != callback) {
mUIThreadHandler.post(() -> callback.onSuccess(call));
}
}
});
}
});
} else {
final IMXCall call = getCallWithCallId(null, true);
call.setIsVideo(isVideo);
dispatchOnOutgoingCall(call);
call.setRooms(room, room);
if (null != callback) {
mUIThreadHandler.post(() -> callback.onSuccess(call));
}
}
} else {
Log.d(LOG_TAG, "createCallInRoom : inviteConferenceUser");
inviteConferenceUser(room, new ApiCallback<Void>() {
@Override
public void onSuccess(Void info) {
Log.d(LOG_TAG, "createCallInRoom : inviteConferenceUser succeeds");
getConferenceUserRoom(room.getRoomId(), new ApiCallback<Room>() {
@Override
public void onSuccess(Room conferenceRoom) {
Log.d(LOG_TAG, "createCallInRoom : getConferenceUserRoom succeeds");
final IMXCall call = getCallWithCallId(null, true);
call.setRooms(room, conferenceRoom);
call.setIsConference(true);
call.setIsVideo(isVideo);
dispatchOnOutgoingCall(call);
if (null != callback) {
mUIThreadHandler.post(() -> callback.onSuccess(call));
}
}
@Override
public void onNetworkError(Exception e) {
Log.e(LOG_TAG, "createCallInRoom : getConferenceUserRoom failed " + e.getMessage(), e);
if (null != callback) {
callback.onNetworkError(e);
}
}
@Override
public void onMatrixError(MatrixError e) {
Log.e(LOG_TAG, "createCallInRoom : getConferenceUserRoom failed " + e.getMessage());
if (null != callback) {
callback.onMatrixError(e);
}
}
@Override
public void onUnexpectedError(Exception e) {
Log.e(LOG_TAG, "createCallInRoom : getConferenceUserRoom failed " + e.getMessage(), e);
if (null != callback) {
callback.onUnexpectedError(e);
}
}
});
}
@Override
public void onNetworkError(Exception e) {
Log.e(LOG_TAG, "createCallInRoom : inviteConferenceUser fails " + e.getMessage(), e);
if (null != callback) {
callback.onNetworkError(e);
}
}
@Override
public void onMatrixError(MatrixError e) {
Log.e(LOG_TAG, "createCallInRoom : inviteConferenceUser fails " + e.getMessage());
if (null != callback) {
callback.onMatrixError(e);
}
}
@Override
public void onUnexpectedError(Exception e) {
Log.e(LOG_TAG, "createCallInRoom : inviteConferenceUser fails " + e.getMessage(), e);
if (null != callback) {
callback.onUnexpectedError(e);
}
}
});
}
} else {
if (null != callback) {
callback.onMatrixError(new MatrixError(MatrixError.NOT_SUPPORTED, "too few users"));
}
}
} else {
if (null != callback) {
callback.onMatrixError(new MatrixError(MatrixError.NOT_SUPPORTED, "VOIP is not supported"));
}
}
} else {
if (null != callback) {
callback.onMatrixError(new MatrixError(MatrixError.NOT_FOUND, "room not found"));
}
}
}
//==============================================================================================================
// Turn servers management
//==============================================================================================================
/**
* Suspend the turn server refresh
*/
public void pauseTurnServerRefresh() {
mSuspendTurnServerRefresh = true;
}
/**
* Refresh the turn servers until it succeeds.
*/
public void unpauseTurnServerRefresh() {
Log.d(LOG_TAG, "unpauseTurnServerRefresh");
mSuspendTurnServerRefresh = false;
if (null != mTurnServerTimer) {
mTurnServerTimer.cancel();
mTurnServerTimer = null;
}
refreshTurnServer();
}
/**
* Stop the turn servers refresh.
*/
public void stopTurnServerRefresh() {
Log.d(LOG_TAG, "stopTurnServerRefresh");
mSuspendTurnServerRefresh = true;
if (null != mTurnServerTimer) {
mTurnServerTimer.cancel();
mTurnServerTimer = null;
}
}
/**
* @return the turn server
*/
private JsonElement getTurnServer() {
JsonElement res;
synchronized (LOG_TAG) {
res = mTurnServer;
}
// privacy logs
//Log.d(LOG_TAG, "getTurnServer " + res);
Log.d(LOG_TAG, "getTurnServer ");
return res;
}
/**
* Refresh the turn servers.
*/
private void refreshTurnServer() {
if (mSuspendTurnServerRefresh) {
return;
}
Log.d(LOG_TAG, "## refreshTurnServer () starts");
mUIThreadHandler.post(() -> mCallResClient.getTurnServer(new ApiCallback<JsonObject>() {
private void restartAfter(int msDelay) {
// reported by GA
// "ttl" seems invalid
if (msDelay <= 0) {
Log.e(LOG_TAG, "## refreshTurnServer() : invalid delay " + msDelay);
} else {
if (null != mTurnServerTimer) {
mTurnServerTimer.cancel();
}
try {
mTurnServerTimer = new Timer();
mTurnServerTimer.schedule(new TimerTask() {
@Override
public void run() {
if (mTurnServerTimer != null) {
mTurnServerTimer.cancel();
mTurnServerTimer = null;
}
refreshTurnServer();
}
}, msDelay);
} catch (Throwable e) {
Log.e(LOG_TAG, "## refreshTurnServer() failed to start the timer", e);
if (null != mTurnServerTimer) {
mTurnServerTimer.cancel();
mTurnServerTimer = null;
}
refreshTurnServer();
}
}
}
@Override
public void onSuccess(JsonObject info) {
// privacy
Log.d(LOG_TAG, "## refreshTurnServer () : onSuccess");
//Log.d(LOG_TAG, "onSuccess " + info);
if (null != info) {
if (info.has("uris")) {
synchronized (LOG_TAG) {
mTurnServer = info;
}
}
if (info.has("ttl")) {
int ttl = 60000;
try {
ttl = info.get("ttl").getAsInt();
// restart a 90 % before ttl expires
ttl = ttl * 9 / 10;
} catch (Exception e) {
Log.e(LOG_TAG, "Fail to retrieve ttl " + e.getMessage(), e);
}
Log.d(LOG_TAG, "## refreshTurnServer () : onSuccess : retry after " + ttl + " seconds");
restartAfter(ttl * 1000);
}
}
}
@Override
public void onNetworkError(Exception e) {
Log.e(LOG_TAG, "## refreshTurnServer () : onNetworkError", e);
restartAfter(60000);
}
@Override
public void onMatrixError(MatrixError e) {
Log.e(LOG_TAG, "## refreshTurnServer () : onMatrixError() : " + e.errcode);
if (TextUtils.equals(e.errcode, MatrixError.LIMIT_EXCEEDED) && (null != e.retry_after_ms)) {
Log.e(LOG_TAG, "## refreshTurnServer () : onMatrixError() : retry after " + e.retry_after_ms + " ms");
restartAfter(e.retry_after_ms);
}
}
@Override
public void onUnexpectedError(Exception e) {
// should never happen
Log.e(LOG_TAG, "## refreshTurnServer () : onUnexpectedError()", e);
}
}));
}
//==============================================================================================================
// Conference call
//==============================================================================================================
// Copied from vector-web:
// FIXME: This currently forces Vector to try to hit the matrix.org AS for conferencing.
// This is bad because it prevents people running their own ASes from being used.
// This isn't permanent and will be customisable in the future: see the proposal
// at docs/conferencing.md for more info.
private static final String USER_PREFIX = "fs_";
private static final String DOMAIN = "matrix.org";
private static final Map<String, String> mConferenceUserIdByRoomId = new HashMap<>();
/**
* Return the id of the conference user dedicated for a room Id
*
* @param roomId the room id
* @return the conference user id
*/
public static String getConferenceUserId(String roomId) {
// sanity check
if (null == roomId) {
return null;
}
String conferenceUserId = mConferenceUserIdByRoomId.get(roomId);
// it does not exist, compute it.
if (null == conferenceUserId) {
byte[] data = null;
try {
data = roomId.getBytes("UTF-8");
} catch (Exception e) {
Log.e(LOG_TAG, "conferenceUserIdForRoom failed " + e.getMessage(), e);
}
if (null == data) {
return null;
}
String base64 = Base64.encodeToString(data, Base64.NO_WRAP | Base64.URL_SAFE).replace("=", "");
conferenceUserId = "@" + USER_PREFIX + base64 + ":" + DOMAIN;
mConferenceUserIdByRoomId.put(roomId, conferenceUserId);
}
return conferenceUserId;
}
/**
* Test if the provided user is a valid conference user Id
*
* @param userId the user id to test
* @return true if it is a valid conference user id
*/
public static boolean isConferenceUserId(String userId) {
// test first if it a known conference user id
if (mConferenceUserIdByRoomId.values().contains(userId)) {
return true;
}
boolean res = false;
String prefix = "@" + USER_PREFIX;
String suffix = ":" + DOMAIN;
if (!TextUtils.isEmpty(userId) && userId.startsWith(prefix) && userId.endsWith(suffix)) {
String roomIdBase64 = userId.substring(prefix.length(), userId.length() - suffix.length());
try {
res = MXPatterns.isRoomId((new String(Base64.decode(roomIdBase64, Base64.NO_WRAP | Base64.URL_SAFE), "UTF-8")));
} catch (Exception e) {
Log.e(LOG_TAG, "isConferenceUserId : failed " + e.getMessage(), e);
}
}
return res;
}
/**
* Invite the conference user to a room.
* It is mandatory before starting a conference call.
*
* @param room the room
* @param callback the async callback
*/
private void inviteConferenceUser(final Room room, final ApiCallback<Void> callback) {
Log.d(LOG_TAG, "inviteConferenceUser " + room.getRoomId());
String conferenceUserId = getConferenceUserId(room.getRoomId());
RoomMember conferenceMember = room.getMember(conferenceUserId);
if ((null != conferenceMember) && TextUtils.equals(conferenceMember.membership, RoomMember.MEMBERSHIP_JOIN)) {
mUIThreadHandler.post(() -> callback.onSuccess(null));
} else {
room.invite(mSession, conferenceUserId, callback);
}
}
/**
* Get the room with the conference user dedicated for the passed room.
*
* @param roomId the room id.
* @param callback the async callback.
*/
private void getConferenceUserRoom(final String roomId, final ApiCallback<Room> callback) {
Log.d(LOG_TAG, "getConferenceUserRoom with room id " + roomId);
String conferenceUserId = getConferenceUserId(roomId);
Room conferenceRoom = null;
Collection<Room> rooms = mSession.getDataHandler().getStore().getRooms();
// Use an existing 1:1 with the conference user; else make one
for (Room room : rooms) {
if (room.isConferenceUserRoom() && room.getNumberOfMembers() == 2 && null != room.getMember(conferenceUserId)) {
conferenceRoom = room;
break;
}
}
if (null != conferenceRoom) {
Log.d(LOG_TAG, "getConferenceUserRoom : the room already exists");
final Room fConferenceRoom = conferenceRoom;
mSession.getDataHandler().getStore().commit();
mUIThreadHandler.post(new Runnable() {
@Override
public void run() {
callback.onSuccess(fConferenceRoom);
}
});
} else {
Log.d(LOG_TAG, "getConferenceUserRoom : create the room");
CreateRoomParams params = new CreateRoomParams();
params.preset = CreateRoomParams.PRESET_PRIVATE_CHAT;
params.invitedUserIds = Arrays.asList(conferenceUserId);
mSession.createRoom(params, new ApiCallback<String>() {
@Override
public void onSuccess(String roomId) {
Log.d(LOG_TAG, "getConferenceUserRoom : the room creation succeeds");
Room room = mSession.getDataHandler().getRoom(roomId);
if (null != room) {
room.setIsConferenceUserRoom(true);
mSession.getDataHandler().getStore().commit();
callback.onSuccess(room);
}
}
@Override
public void onNetworkError(Exception e) {
Log.e(LOG_TAG, "getConferenceUserRoom : failed " + e.getMessage(), e);
callback.onNetworkError(e);
}
@Override
public void onMatrixError(MatrixError e) {
Log.e(LOG_TAG, "getConferenceUserRoom : failed " + e.getMessage());
callback.onMatrixError(e);
}
@Override
public void onUnexpectedError(Exception e) {
Log.e(LOG_TAG, "getConferenceUserRoom : failed " + e.getMessage(), e);
callback.onUnexpectedError(e);
}
});
}
}
//==============================================================================================================
// listeners management
//==============================================================================================================
/**
* Add a listener
*
* @param listener the listener to add
*/
public void addListener(IMXCallsManagerListener listener) {
if (null != listener) {
synchronized (this) {
mListeners.add(listener);
}
}
}
/**
* Remove a listener
*
* @param listener the listener to remove
*/
public void removeListener(IMXCallsManagerListener listener) {
if (null != listener) {
synchronized (this) {
mListeners.remove(listener);
}
}
}
/**
* @return a copy of the listeners
*/
private Collection<IMXCallsManagerListener> getListeners() {
Collection<IMXCallsManagerListener> listeners;
synchronized (this) {
listeners = new HashSet<>(mListeners);
}
return listeners;
}
/**
* dispatch the onIncomingCall event to the listeners
*
* @param call the call
* @param unknownDevices the unknown e2e devices list.
*/
private void dispatchOnIncomingCall(IMXCall call, final MXUsersDevicesMap<MXDeviceInfo> unknownDevices) {
Log.d(LOG_TAG, "dispatchOnIncomingCall " + call.getCallId());
Collection<IMXCallsManagerListener> listeners = getListeners();
for (IMXCallsManagerListener l : listeners) {
try {
l.onIncomingCall(call, unknownDevices);
} catch (Exception e) {
Log.e(LOG_TAG, "dispatchOnIncomingCall " + e.getMessage(), e);
}
}
}
/**
* dispatch the call creation to the listeners
*
* @param call the call
*/
private void dispatchOnOutgoingCall(IMXCall call) {
Log.d(LOG_TAG, "dispatchOnOutgoingCall " + call.getCallId());
Collection<IMXCallsManagerListener> listeners = getListeners();
for (IMXCallsManagerListener l : listeners) {
try {
l.onOutgoingCall(call);
} catch (Exception e) {
Log.e(LOG_TAG, "dispatchOnOutgoingCall " + e.getMessage(), e);
}
}
}
/**
* dispatch the onCallHangUp event to the listeners
*
* @param call the call
*/
private void dispatchOnCallHangUp(IMXCall call) {
Log.d(LOG_TAG, "dispatchOnCallHangUp");
Collection<IMXCallsManagerListener> listeners = getListeners();
for (IMXCallsManagerListener l : listeners) {
try {
l.onCallHangUp(call);
} catch (Exception e) {
Log.e(LOG_TAG, "dispatchOnCallHangUp " + e.getMessage(), e);
}
}
}
/**
* dispatch the onVoipConferenceStarted event to the listeners
*
* @param roomId the room Id
*/
private void dispatchOnVoipConferenceStarted(String roomId) {
Log.d(LOG_TAG, "dispatchOnVoipConferenceStarted : " + roomId);
Collection<IMXCallsManagerListener> listeners = getListeners();
for (IMXCallsManagerListener l : listeners) {
try {
l.onVoipConferenceStarted(roomId);
} catch (Exception e) {
Log.e(LOG_TAG, "dispatchOnVoipConferenceStarted " + e.getMessage(), e);
}
}
}
/**
* dispatch the onVoipConferenceFinished event to the listeners
*
* @param roomId the room Id
*/
private void dispatchOnVoipConferenceFinished(String roomId) {
Log.d(LOG_TAG, "onVoipConferenceFinished : " + roomId);
Collection<IMXCallsManagerListener> listeners = getListeners();
for (IMXCallsManagerListener l : listeners) {
try {
l.onVoipConferenceFinished(roomId);
} catch (Exception e) {
Log.e(LOG_TAG, "dispatchOnVoipConferenceFinished " + e.getMessage(), e);
}
}
}
}
<|start_filename|>matrix-sdk/src/main/java/org/matrix/androidsdk/rest/model/bingrules/RoomMemberCountCondition.java<|end_filename|>
/*
* Copyright 2014 OpenMarket Ltd
* Copyright 2018 New Vector Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.rest.model.bingrules;
import org.matrix.androidsdk.core.Log;
import org.matrix.androidsdk.data.Room;
public class RoomMemberCountCondition extends Condition {
private static final String LOG_TAG = RoomMemberCountCondition.class.getSimpleName();
// NB: Leave the strings in order of descending length
private static final String[] PREFIX_ARR = new String[]{"==", "<=", ">=", "<", ">", ""};
public String is;
private String comparisonPrefix = null;
private int limit;
private boolean parseError = false;
public RoomMemberCountCondition() {
kind = Condition.KIND_ROOM_MEMBER_COUNT;
}
@Override
public String toString() {
return "RoomMemberCountCondition{" + "is='" + is + "'}'";
}
@SuppressWarnings("SimplifiableIfStatement")
public boolean isSatisfied(Room room) {
// sanity check
if (room == null) return false;
if (parseError) return false;
// Parse the is field into prefix and number the first time
if (comparisonPrefix == null) {
parseIsField();
if (parseError) return false;
}
int numMembers = room.getNumberOfJoinedMembers();
if ("==".equals(comparisonPrefix) || "".equals(comparisonPrefix)) {
return numMembers == limit;
}
if ("<".equals(comparisonPrefix)) {
return numMembers < limit;
}
if (">".equals(comparisonPrefix)) {
return numMembers > limit;
}
if ("<=".equals(comparisonPrefix)) {
return numMembers <= limit;
}
if (">=".equals(comparisonPrefix)) {
return numMembers >= limit;
}
return false;
}
/**
* Parse the is field to extract meaningful information.
*/
protected void parseIsField() {
for (String prefix : PREFIX_ARR) {
if (is.startsWith(prefix)) {
comparisonPrefix = prefix;
break;
}
}
if (comparisonPrefix == null) {
parseError = true;
} else {
try {
limit = Integer.parseInt(is.substring(comparisonPrefix.length()));
} catch (NumberFormatException e) {
parseError = true;
}
}
if (parseError) {
Log.e(LOG_TAG, "parsing error : " + is);
}
}
}
<|start_filename|>matrix-sdk-crypto/src/main/java/org/matrix/androidsdk/crypto/IncomingRoomKeyRequest.java<|end_filename|>
/*
* Copyright 2016 OpenMarket Ltd
* Copyright 2018 New Vector Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.matrix.androidsdk.crypto;
import org.matrix.androidsdk.crypto.interfaces.CryptoEvent;
import org.matrix.androidsdk.crypto.model.crypto.RoomKeyRequestBody;
import org.matrix.androidsdk.crypto.rest.model.crypto.RoomKeyShareRequest;
import java.io.Serializable;
/**
* IncomingRoomKeyRequest class defines the incoming room keys request.
*
* Keep Serializable for legacy FileStore (which will be removed in the future)
*/
public class IncomingRoomKeyRequest implements Serializable {
/**
* The user id
*/
public String mUserId;
/**
* The device id
*/
public String mDeviceId;
/**
* The request id
*/
public String mRequestId;
/**
* The request body
*/
public RoomKeyRequestBody mRequestBody;
/**
* The runnable to call to accept to share the keys
*/
public transient Runnable mShare;
/**
* The runnable to call to ignore the key share request.
*/
public transient Runnable mIgnore;
/**
* Constructor
*
* @param event the event
*/
public IncomingRoomKeyRequest(CryptoEvent event) {
mUserId = event.getSender();
RoomKeyShareRequest roomKeyShareRequest = event.toRoomKeyShareRequest();
mDeviceId = roomKeyShareRequest.requestingDeviceId;
mRequestId = roomKeyShareRequest.requestId;
mRequestBody = (null != roomKeyShareRequest.body) ? roomKeyShareRequest.body : new RoomKeyRequestBody();
}
/**
* Constructor for object creation from crypto store
*/
public IncomingRoomKeyRequest() {
}
}
| Targunitoth/matrix-android-sdk |
<|start_filename|>Content.Shared/Jittering/SharedJitteringSystem.cs<|end_filename|>
using System;
using System.Collections.Generic;
using Robust.Shared.GameObjects;
using Robust.Shared.GameStates;
using Robust.Shared.IoC;
using Robust.Shared.Timing;
namespace Content.Shared.Jittering
{
/// <summary>
/// A system for applying a jitter animation to any entity.
/// </summary>
public abstract class SharedJitteringSystem : EntitySystem
{
[Dependency] protected readonly IGameTiming GameTiming = default!;
public float MaxAmplitude = 300f;
public float MinAmplitude = 1f;
public float MaxFrequency = 10f;
public float MinFrequency = 1f;
/// <summary>
/// List of jitter components to be removed, cached so we don't allocate it every tick.
/// </summary>
private readonly List<JitteringComponent> _removeList = new();
public override void Initialize()
{
SubscribeLocalEvent<JitteringComponent, ComponentGetState>(OnGetState);
SubscribeLocalEvent<JitteringComponent, ComponentHandleState>(OnHandleState);
}
private void OnGetState(EntityUid uid, JitteringComponent component, ref ComponentGetState args)
{
args.State = new JitteringComponentState(component.EndTime, component.Amplitude, component.Frequency);
}
private void OnHandleState(EntityUid uid, JitteringComponent component, ref ComponentHandleState args)
{
if (args.Current is not JitteringComponentState jitteringState)
return;
component.EndTime = jitteringState.EndTime;
component.Amplitude = jitteringState.Amplitude;
component.Frequency = jitteringState.Frequency;
}
/// <summary>
/// Applies a jitter effect to the specified entity.
/// You can apply this to any entity whatsoever, so be careful what you use it on!
/// </summary>
/// <remarks>
/// If the entity is already jittering, the jitter values will be updated but only if they're greater
/// than the current ones and <see cref="forceValueChange"/> is false.
/// </remarks>
/// <param name="uid">Entity in question.</param>
/// <param name="time">For how much time to apply the effect.</param>
/// <param name="amplitude">Jitteriness of the animation. See <see cref="MaxAmplitude"/> and <see cref="MinAmplitude"/>.</param>
/// <param name="frequency">Frequency for jittering. See <see cref="MaxFrequency"/> and <see cref="MinFrequency"/>.</param>
/// <param name="forceValueChange">Whether to change any existing jitter value even if they're greater than the ones we're setting.</param>
public void DoJitter(EntityUid uid, TimeSpan time, float amplitude = 10f, float frequency = 4f, bool forceValueChange = false)
{
var jittering = EntityManager.EnsureComponent<JitteringComponent>(uid);
var endTime = GameTiming.CurTime + time;
amplitude = Math.Clamp(amplitude, MinAmplitude, MaxAmplitude);
frequency = Math.Clamp(frequency, MinFrequency, MaxFrequency);
if (forceValueChange || jittering.EndTime < endTime)
jittering.EndTime = endTime;
if(forceValueChange || jittering.Amplitude < amplitude)
jittering.Amplitude = amplitude;
if (forceValueChange || jittering.Frequency < frequency)
jittering.Frequency = frequency;
jittering.Dirty();
}
/// <summary>
/// Immediately stops any jitter animation from an entity.
/// </summary>
/// <param name="uid">The entity in question.</param>
public void StopJitter(EntityUid uid)
{
if (!EntityManager.HasComponent<JitteringComponent>(uid))
return;
EntityManager.RemoveComponent<JitteringComponent>(uid);
}
public override void Update(float frameTime)
{
foreach (var jittering in EntityManager.EntityQuery<JitteringComponent>())
{
if(jittering.EndTime <= GameTiming.CurTime)
_removeList.Add(jittering);
}
if (_removeList.Count == 0)
return;
foreach (var jittering in _removeList)
{
jittering.Owner.RemoveComponent<JitteringComponent>();
}
_removeList.Clear();
}
}
}
<|start_filename|>Content.Server/GameTicking/Commands/RestartRoundCommand.cs<|end_filename|>
using System;
using Content.Server.Administration;
using Content.Server.RoundEnd;
using Content.Shared.Administration;
using Robust.Shared.Console;
using Robust.Shared.GameObjects;
using Robust.Shared.IoC;
namespace Content.Server.GameTicking.Commands
{
[AdminCommand(AdminFlags.Server)]
public class RestartRoundCommand : IConsoleCommand
{
public string Command => "restartround";
public string Description => "Ends the current round and starts the countdown for the next lobby.";
public string Help => String.Empty;
public void Execute(IConsoleShell shell, string argStr, string[] args)
{
EntitySystem.Get<RoundEndSystem>().EndRound();
}
}
[AdminCommand(AdminFlags.Server)]
public class RestartRoundNowCommand : IConsoleCommand
{
public string Command => "restartroundnow";
public string Description => "Moves the server from PostRound to a new PreRoundLobby.";
public string Help => String.Empty;
public void Execute(IConsoleShell shell, string argStr, string[] args)
{
EntitySystem.Get<GameTicker>().EndRound();
}
}
}
| JasperJRoth/space-station-14 |
<|start_filename|>Dockerfile<|end_filename|>
FROM node:boron
MAINTAINER <NAME> <<EMAIL>>
# Use Alibaba's NPM mirror
RUN npm set registry https://registry.npm.taobao.org/
# creat workdir
RUN mkdir -p /usr/projects/movie-board
WORKDIR /usr/projects/movie-board
# Install dependencies
COPY package.json /usr/projects/movie-board
RUN npm install --production
# copy other codes and resources
COPY . /usr/projects/movie-board
EXPOSE 8080
# ENTRYPOINT diff CMD CDM can be overrided
CMD [ "npm", "start" ]
| MagicCube/movie-board |
<|start_filename|>vitest.config.js<|end_filename|>
/// <reference types="vitest" />
import { defineConfig } from 'vite'
export default defineConfig({
test: {
root: 'tests',
resolve: {
extensions: ['ts', 'tsx', 'js', 'jsx', 'json', 'node']
},
testTimeout: 2000,
global: true
}
})
| fossabot/webpack-to-vite |
<|start_filename|>emulator/include/board/track.hpp<|end_filename|>
#pragma once
#include <risc.hpp>
#include <queue>
#include <optional>
#include <map>
#include <mutex>
#include <imgui.h>
#define IMGUI_DEFINE_MATH_OPERATORS
#include <imgui_internal.h>
namespace vc::pcb {
class Connectable;
enum class Direction {
MISO,
MOSI
};
class Track {
public:
Track(Direction direction, bool buffered, Connectable *from, Connectable *to) : direction(direction), buffered(buffered), from(from), to(to) {
}
[[nodiscard]]
std::optional<u8> getValue() {
std::scoped_lock lk(this->modifyMutex);
if (this->buffered) {
if (this->receivedData.empty())
return { };
auto item = this->receivedData.front();
this->receivedData.pop();
return item;
} else {
if (!this->value.has_value())
return { };
auto item = this->value.value();
this->value.reset();
return item;
}
}
[[nodiscard]]
bool hasValue() {
std::scoped_lock lock(this->modifyMutex);
if (this->buffered)
return !this->receivedData.empty();
else
return this->value.has_value();
}
void setValue(u8 value) {
std::scoped_lock lock(this->modifyMutex);
if (this->buffered)
this->receivedData.emplace(value);
else
this->value = value;
}
[[nodiscard]]
std::pair<Connectable*, Connectable*> getEndpoints() const {
return { from, to };
}
[[nodiscard]]
Direction getDirection() const {
return this->direction;
}
private:
Direction direction;
bool buffered;
std::mutex modifyMutex;
std::optional<u8> value;
std::queue<u8> receivedData;
Connectable *from, *to;
};
class Connectable {
public:
friend class Board;
protected:
auto getTrack(std::string_view name) {
return this->connectedTracks[std::string(name)];
}
void linkTrack(const std::string &name, pcb::Track *track) {
this->connectedTracks.insert({ name, track });
}
[[nodiscard]]
bool dataAvailable() {
for (auto &[name, track] : this->connectedTracks)
if (track->hasValue())
return true;
return false;
}
auto getConnectedTrackNames() {
std::vector<std::string_view> result;
for (auto &[name, track] : this->connectedTracks)
result.push_back(name);
return result;
}
virtual void draw(ImVec2 start, ImDrawList *drawList) {
drawList->AddRectFilled(start + position, start + position + size, ImColor(0x10, 0x10, 0x10, 0xFF));
}
public:
[[nodiscard]]
ImVec2 getPosition() const {
return this->position;
}
void setPosition(ImVec2 pos) {
this->position = pos;
}
[[nodiscard]]
ImVec2 getSize() const {
return this->size;
}
void setSize(ImVec2 size) {
this->size = size;
}
private:
std::map<std::string, pcb::Track*> connectedTracks;
ImVec2 position;
ImVec2 size;
};
}
<|start_filename|>emulator/include/devices/cpu/core/mmio/device.hpp<|end_filename|>
#pragma once
#include <devices/cpu/core/io_pin.hpp>
#include <map>
namespace vc::dev::cpu::mmio {
class MMIODevice {
public:
MMIODevice(std::string_view name, u64 base, size_t size) : name(name), base(base), size(size) { }
constexpr auto operator<=>(const MMIODevice &other) const {
return this->base <=> other.base;
}
[[nodiscard]]
constexpr auto getBase() const noexcept {
return this->base;
}
[[nodiscard]]
constexpr auto getEnd() const noexcept {
return this->base + this->size - 1;
}
[[nodiscard]]
constexpr auto getSize() const noexcept {
return this->size;
}
virtual void doTick() noexcept final {
if (needsUpdate()) {
this->tick();
}
}
[[nodiscard]]
virtual u8& byte(u64 offset) noexcept = 0;
[[nodiscard]]
virtual u16& halfWord(u64 offset) noexcept = 0;
[[nodiscard]]
virtual u32& word(u64 offset) noexcept = 0;
[[nodiscard]]
virtual u64& doubleWord(u64 offset) noexcept = 0;
virtual bool needsUpdate() noexcept { return false; }
[[nodiscard]]
std::string_view getName() const {
return this->name;
}
protected:
virtual void tick() noexcept { }
private:
std::string name;
u64 base;
u64 size;
};
}
<|start_filename|>emulator/include/log.hpp<|end_filename|>
#pragma once
#include <fmt/core.h>
#include <fmt/color.h>
namespace vc::log {
void debug(std::string_view fmt, auto ... args) {
#if defined(DEBUG)
fmt::print(fg(fmt::color::green_yellow) | fmt::emphasis::bold, "[DEBUG] ");
fmt::print(fmt, args...);
fmt::print("\n");
fflush(stdout);
#endif
}
void info(std::string_view fmt, auto ... args) {
fmt::print(fg(fmt::color::cornflower_blue) | fmt::emphasis::bold, "[INFO] ");
fmt::print(fmt, args...);
fmt::print("\n");
fflush(stdout);
}
void warn(std::string_view fmt, auto ... args) {
fmt::print(fg(fmt::color::light_golden_rod_yellow) | fmt::emphasis::bold, "[WARN] ");
fmt::print(fmt, args...);
fmt::print("\n");
fflush(stdout);
}
void error(std::string_view fmt, auto ... args) {
fmt::print(fg(fmt::color::light_coral) | fmt::emphasis::bold, "[ERROR] ");
fmt::print(fmt, args...);
fmt::print("\n");
fflush(stdout);
}
void fatal(std::string_view fmt, auto ... args) {
fmt::print(fg(fmt::color::crimson) | fmt::emphasis::bold, "[FATAL] ");
fmt::print(fmt, args...);
fmt::print("\n");
fflush(stdout);
}
}
<|start_filename|>emulator/include/board/board_test.hpp<|end_filename|>
#pragma once
#include <board/board.hpp>
#include <devices/cpu/cpu.hpp>
#include <devices/cpu/core/mmio/memory.hpp>
#include <devices/cpu/core/mmio/uart.hpp>
#include <devices/cpu/core/mmio/gpio.hpp>
#include <devices/pin_header.hpp>
#include <devices/button.hpp>
#include <devices/led.hpp>
namespace vc::pcb {
class TestBoard : public Board {
public:
TestBoard() : Board("Test Board", { 500, 300 }),
cpu(createDevice<dev::CPUDevice>(1, ImVec2{ 50, 50 })),
uartHeader(createDevice<dev::PinHeader>(ImVec2{ 200, 250 })),
buttonA(createDevice<dev::Button>(ImVec2({ 300, 250 }))),
ledA(createDevice<dev::LED>(ImVec2({ 100, 200 }))),
cpuFlash(0x0000'0000, 1_MiB),
cpuRam(0x1000'0000, 2_MiB),
cpuUartA(0x5000'0000),
cpuGpioA(0x6000'0000) {
auto &cpuAddressSpace = cpu.getAddressSpace();
cpu.attachToPin(0, cpuUartA.txPin);
cpu.attachToPin(1, cpuGpioA.gpioPins[0]);
cpu.attachToPin(2, cpuGpioA.gpioPins[1]);
cpuAddressSpace.addDevice(cpuFlash);
cpuAddressSpace.addDevice(cpuRam);
cpuAddressSpace.addDevice(cpuUartA);
cpuAddressSpace.addDevice(cpuGpioA);
cpuAddressSpace.loadELF("kernel.elf");
this->createTrack(Direction::MOSI, "uarta_tx", cpu, uartHeader, true);
this->createTrack(Direction::MISO, "buttona", cpu, buttonA);
this->createTrack(Direction::MOSI, "leda", cpu, ledA);
cpu.attachPinToTrack(0, "uarta_tx");
cpu.attachPinToTrack(1, "buttona");
cpu.attachPinToTrack(2, "leda");
}
dev::cpu::mmio::Memory cpuFlash;
dev::cpu::mmio::Memory cpuRam;
dev::cpu::mmio::UART cpuUartA;
dev::cpu::mmio::GPIO cpuGpioA;
dev::CPUDevice &cpu;
dev::PinHeader &uartHeader;
dev::Button &buttonA;
dev::LED &ledA;
};
}
<|start_filename|>risc-example/subprojects/kern/source/main.cpp<|end_filename|>
#include <types.hpp>
void print(const char* string) {
char *UARTA_TX = reinterpret_cast<char*>(0x5000'0004);
for (const char* s = string; *s != 0x00; s++)
*UARTA_TX = *s;
}
int main() {
print("Hello RISC-V!\n");
volatile u8 *GPIOA_CR = reinterpret_cast<volatile u8*>(0x6000'0000);
volatile u8 *GPIOA_IN = reinterpret_cast<volatile u8*>(0x6000'0004);
volatile u8 *GPIOA_OUT = reinterpret_cast<volatile u8*>(0x6000'0008);
*GPIOA_CR = 0b10;
while (true) {
if (*GPIOA_IN == 0b01)
*GPIOA_OUT = 0b10;
else
*GPIOA_OUT = 0b00;
}
}
[[gnu::section(".crt0")]]
[[gnu::naked]]
extern "C" void _start() {
asm("lui sp, 0x10020");
asm("tail main");
}
<|start_filename|>emulator/include/devices/button.hpp<|end_filename|>
#pragma once
#include <board/track.hpp>
namespace vc::dev {
class Button : public Device, public pcb::Connectable {
public:
explicit Button(ImVec2 pos) {
this->setPosition(pos);
this->setSize({ 31, 31 });
}
auto get(std::string_view name) {
return this->getTrack(name);
}
void tick() override {
for (auto &trackName : this->getConnectedTrackNames()) {
auto track = this->getTrack(trackName);
track->setValue(this->pressed);
}
}
bool needsUpdate() override { return true; }
void reset() override {
pressed = false;
}
void draw(ImVec2 start, ImDrawList *drawList) override {
this->pressed = ImGui::IsMouseHoveringRect(start + getPosition(), start + getPosition() + getSize()) && ImGui::IsMouseDown(ImGuiMouseButton_Left);
drawList->AddRectFilled(start + getPosition(), start + getPosition() + getSize(), ImColor(0xA0, 0xA0, 0xA0, 0xFF));
drawList->AddCircleFilled(start + getPosition() + getSize() / 2, 9, this->pressed ? ImColor(0x80, 0x20, 0x20, 0xFF) : ImColor(0xA0, 0x20, 0x20, 0xFF));
}
private:
bool pressed = false;
};
}
<|start_filename|>emulator/include/risc.hpp<|end_filename|>
#pragma once
#include <cstdint>
#include <log.hpp>
using u8 = std::uint8_t;
using u16 = std::uint16_t;
using u32 = std::uint32_t;
using u64 = std::uint64_t;
using i8 = std::int8_t;
using i16 = std::int16_t;
using i32 = std::int32_t;
using i64 = std::int64_t;
#define NO_MANGLE extern "C"
#define PACKED [[gnu::packed]]
<|start_filename|>emulator/external/ImGui/include/imgui_vc_extensions.h<|end_filename|>
#pragma once
#include <functional>
#include <imgui.h>
namespace ImGui {
bool Hyperlink(const char* label, const ImVec2& size_arg = ImVec2(0, 0), ImGuiButtonFlags flags = 0);
bool BulletHyperlink(const char* label, const ImVec2& size_arg = ImVec2(0, 0), ImGuiButtonFlags flags = 0);
bool DescriptionButton(const char* label, const char* description, const ImVec2& size_arg = ImVec2(0, 0), ImGuiButtonFlags flags = 0);
void UnderlinedText(const char* label, ImColor color, const ImVec2& size_arg = ImVec2(0, 0));
void Disabled(const std::function<void()> &widgets, bool disabled);
void TextSpinner(const char* label);
void Header(const char *label, bool firstEntry = false);
inline bool HasSecondPassed() {
return static_cast<ImU32>(ImGui::GetTime() * 100) % 100 <= static_cast<ImU32>(ImGui::GetIO().DeltaTime * 100);
}
std::tuple<ImTextureID, int, int> LoadImageFromPath(const char *path);
void UnloadImage(ImTextureID texture);
enum ImGuiCustomCol {
ImGuiCustomCol_DescButton,
ImGuiCustomCol_DescButtonHovered,
ImGuiCustomCol_DescButtonActive,
ImGuiCustomCol_COUNT
};
struct ImHexCustomData {
ImVec4 Colors[ImGuiCustomCol_COUNT];
};
ImU32 GetCustomColorU32(ImGuiCustomCol idx, float alpha_mul = 1.0F);
void StyleCustomColorsDark();
void StyleCustomColorsLight();
void StyleCustomColorsClassic();
}
<|start_filename|>emulator/include/elf.hpp<|end_filename|>
/* SPDX-License-Identifier: GPL-2.0 WITH Linux-syscall-note */
#ifndef _UAPI_LINUX_ELF_H
#define _UAPI_LINUX_ELF_H
#include <risc.hpp>
/* 32-bit ELF base types. */
typedef u32 Elf32_Addr;
typedef u16 Elf32_Half;
typedef u32 Elf32_Off;
typedef i32 Elf32_Sword;
typedef u32 Elf32_Word;
/* 64-bit ELF base types. */
typedef u64 Elf64_Addr;
typedef u16 Elf64_Half;
typedef i16 Elf64_SHalf;
typedef u64 Elf64_Off;
typedef i32 Elf64_Sword;
typedef u32 Elf64_Word;
typedef u64 Elf64_Xword;
typedef i64 Elf64_Sxword;
/* These constants are for the segment types stored in the image headers */
#define PT_NULL 0
#define PT_LOAD 1
#define PT_DYNAMIC 2
#define PT_INTERP 3
#define PT_NOTE 4
#define PT_SHLIB 5
#define PT_PHDR 6
#define PT_TLS 7 /* Thread local storage segment */
#define PT_LOOS 0x60000000 /* OS-specific */
#define PT_HIOS 0x6fffffff /* OS-specific */
#define PT_LOPROC 0x70000000
#define PT_HIPROC 0x7fffffff
#define PT_GNU_EH_FRAME 0x6474e550
#define PT_GNU_PROPERTY 0x6474e553
#define PT_GNU_STACK (PT_LOOS + 0x474e551)
/*
* Extended Numbering
*
* If the real number of program header table entries is larger than
* or equal to PN_XNUM(0xffff), it is set to sh_info field of the
* section header at index 0, and PN_XNUM is set to e_phnum
* field. Otherwise, the section header at index 0 is zero
* initialized, if it exists.
*
* Specifications are available in:
*
* - Oracle: Linker and Libraries.
* Part No: 817–1984–19, August 2011.
* https://docs.oracle.com/cd/E18752_01/pdf/817-1984.pdf
*
* - System V ABI AMD64 Architecture Processor Supplement
* Draft Version 0.99.4,
* January 13, 2010.
* http://www.cs.washington.edu/education/courses/cse351/12wi/supp-docs/abi.pdf
*/
#define PN_XNUM 0xffff
/* These constants define the different elf file types */
#define ET_NONE 0
#define ET_REL 1
#define ET_EXEC 2
#define ET_DYN 3
#define ET_CORE 4
#define ET_LOPROC 0xff00
#define ET_HIPROC 0xffff
/* This is the info that is needed to parse the dynamic section of the file */
#define DT_NULL 0
#define DT_NEEDED 1
#define DT_PLTRELSZ 2
#define DT_PLTGOT 3
#define DT_HASH 4
#define DT_STRTAB 5
#define DT_SYMTAB 6
#define DT_RELA 7
#define DT_RELASZ 8
#define DT_RELAENT 9
#define DT_STRSZ 10
#define DT_SYMENT 11
#define DT_INIT 12
#define DT_FINI 13
#define DT_SONAME 14
#define DT_RPATH 15
#define DT_SYMBOLIC 16
#define DT_REL 17
#define DT_RELSZ 18
#define DT_RELENT 19
#define DT_PLTREL 20
#define DT_DEBUG 21
#define DT_TEXTREL 22
#define DT_JMPREL 23
#define DT_ENCODING 32
#define OLD_DT_LOOS 0x60000000
#define DT_LOOS 0x6000000d
#define DT_HIOS 0x6ffff000
#define DT_VALRNGLO 0x6ffffd00
#define DT_VALRNGHI 0x6ffffdff
#define DT_ADDRRNGLO 0x6ffffe00
#define DT_ADDRRNGHI 0x6ffffeff
#define DT_VERSYM 0x6ffffff0
#define DT_RELACOUNT 0x6ffffff9
#define DT_RELCOUNT 0x6ffffffa
#define DT_FLAGS_1 0x6ffffffb
#define DT_VERDEF 0x6ffffffc
#define DT_VERDEFNUM 0x6ffffffd
#define DT_VERNEED 0x6ffffffe
#define DT_VERNEEDNUM 0x6fffffff
#define OLD_DT_HIOS 0x6fffffff
#define DT_LOPROC 0x70000000
#define DT_HIPROC 0x7fffffff
/* This info is needed when parsing the symbol table */
#define STB_LOCAL 0
#define STB_GLOBAL 1
#define STB_WEAK 2
#define STT_NOTYPE 0
#define STT_OBJECT 1
#define STT_FUNC 2
#define STT_SECTION 3
#define STT_FILE 4
#define STT_COMMON 5
#define STT_TLS 6
#define ELF_ST_BIND(x) ((x) >> 4)
#define ELF_ST_TYPE(x) (((unsigned int) x) & 0xf)
#define ELF32_ST_BIND(x) ELF_ST_BIND(x)
#define ELF32_ST_TYPE(x) ELF_ST_TYPE(x)
#define ELF64_ST_BIND(x) ELF_ST_BIND(x)
#define ELF64_ST_TYPE(x) ELF_ST_TYPE(x)
typedef struct dynamic{
Elf32_Sword d_tag;
union{
Elf32_Sword d_val;
Elf32_Addr d_ptr;
} d_un;
} Elf32_Dyn;
typedef struct {
Elf64_Sxword d_tag; /* entry tag value */
union {
Elf64_Xword d_val;
Elf64_Addr d_ptr;
} d_un;
} Elf64_Dyn;
/* The following are used with relocations */
#define ELF32_R_SYM(x) ((x) >> 8)
#define ELF32_R_TYPE(x) ((x) & 0xff)
#define ELF64_R_SYM(i) ((i) >> 32)
#define ELF64_R_TYPE(i) ((i) & 0xffffffff)
typedef struct elf32_rel {
Elf32_Addr r_offset;
Elf32_Word r_info;
} Elf32_Rel;
typedef struct elf64_rel {
Elf64_Addr r_offset; /* Location at which to apply the action */
Elf64_Xword r_info; /* index and type of relocation */
} Elf64_Rel;
typedef struct elf32_rela{
Elf32_Addr r_offset;
Elf32_Word r_info;
Elf32_Sword r_addend;
} Elf32_Rela;
typedef struct elf64_rela {
Elf64_Addr r_offset; /* Location at which to apply the action */
Elf64_Xword r_info; /* index and type of relocation */
Elf64_Sxword r_addend; /* Constant addend used to compute value */
} Elf64_Rela;
typedef struct elf32_sym{
Elf32_Word st_name;
Elf32_Addr st_value;
Elf32_Word st_size;
unsigned char st_info;
unsigned char st_other;
Elf32_Half st_shndx;
} Elf32_Sym;
typedef struct elf64_sym {
Elf64_Word st_name; /* Symbol name, index in string tbl */
unsigned char st_info; /* Type and binding attributes */
unsigned char st_other; /* No defined meaning, 0 */
Elf64_Half st_shndx; /* Associated section index */
Elf64_Addr st_value; /* Value of the symbol */
Elf64_Xword st_size; /* Associated symbol size */
} Elf64_Sym;
#define EI_NIDENT 16
typedef struct elf32_hdr{
unsigned char e_ident[EI_NIDENT];
Elf32_Half e_type;
Elf32_Half e_machine;
Elf32_Word e_version;
Elf32_Addr e_entry; /* Entry point */
Elf32_Off e_phoff;
Elf32_Off e_shoff;
Elf32_Word e_flags;
Elf32_Half e_ehsize;
Elf32_Half e_phentsize;
Elf32_Half e_phnum;
Elf32_Half e_shentsize;
Elf32_Half e_shnum;
Elf32_Half e_shstrndx;
} Elf32_Ehdr;
typedef struct elf64_hdr {
unsigned char e_ident[EI_NIDENT]; /* ELF "magic number" */
Elf64_Half e_type;
Elf64_Half e_machine;
Elf64_Word e_version;
Elf64_Addr e_entry; /* Entry point virtual address */
Elf64_Off e_phoff; /* Program header table file offset */
Elf64_Off e_shoff; /* Section header table file offset */
Elf64_Word e_flags;
Elf64_Half e_ehsize;
Elf64_Half e_phentsize;
Elf64_Half e_phnum;
Elf64_Half e_shentsize;
Elf64_Half e_shnum;
Elf64_Half e_shstrndx;
} Elf64_Ehdr;
/* These constants define the permissions on sections in the program
header, p_flags. */
#define PF_R 0x4
#define PF_W 0x2
#define PF_X 0x1
typedef struct elf32_phdr{
Elf32_Word p_type;
Elf32_Off p_offset;
Elf32_Addr p_vaddr;
Elf32_Addr p_paddr;
Elf32_Word p_filesz;
Elf32_Word p_memsz;
Elf32_Word p_flags;
Elf32_Word p_align;
} Elf32_Phdr;
typedef struct elf64_phdr {
Elf64_Word p_type;
Elf64_Word p_flags;
Elf64_Off p_offset; /* Segment file offset */
Elf64_Addr p_vaddr; /* Segment virtual address */
Elf64_Addr p_paddr; /* Segment physical address */
Elf64_Xword p_filesz; /* Segment size in file */
Elf64_Xword p_memsz; /* Segment size in memory */
Elf64_Xword p_align; /* Segment alignment, file & memory */
} Elf64_Phdr;
/* sh_type */
#define SHT_NULL 0
#define SHT_PROGBITS 1
#define SHT_SYMTAB 2
#define SHT_STRTAB 3
#define SHT_RELA 4
#define SHT_HASH 5
#define SHT_DYNAMIC 6
#define SHT_NOTE 7
#define SHT_NOBITS 8
#define SHT_REL 9
#define SHT_SHLIB 10
#define SHT_DYNSYM 11
#define SHT_NUM 12
#define SHT_LOPROC 0x70000000
#define SHT_HIPROC 0x7fffffff
#define SHT_LOUSER 0x80000000
#define SHT_HIUSER 0xffffffff
/* sh_flags */
#define SHF_WRITE 0x1
#define SHF_ALLOC 0x2
#define SHF_EXECINSTR 0x4
#define SHF_RELA_LIVEPATCH 0x00100000
#define SHF_RO_AFTER_INIT 0x00200000
#define SHF_MASKPROC 0xf0000000
/* special section indexes */
#define SHN_UNDEF 0
#define SHN_LORESERVE 0xff00
#define SHN_LOPROC 0xff00
#define SHN_HIPROC 0xff1f
#define SHN_LIVEPATCH 0xff20
#define SHN_ABS 0xfff1
#define SHN_COMMON 0xfff2
#define SHN_HIRESERVE 0xffff
typedef struct elf32_shdr {
Elf32_Word sh_name;
Elf32_Word sh_type;
Elf32_Word sh_flags;
Elf32_Addr sh_addr;
Elf32_Off sh_offset;
Elf32_Word sh_size;
Elf32_Word sh_link;
Elf32_Word sh_info;
Elf32_Word sh_addralign;
Elf32_Word sh_entsize;
} Elf32_Shdr;
typedef struct elf64_shdr {
Elf64_Word sh_name; /* Section name, index in string tbl */
Elf64_Word sh_type; /* Type of section */
Elf64_Xword sh_flags; /* Miscellaneous section attributes */
Elf64_Addr sh_addr; /* Section virtual addr at execution */
Elf64_Off sh_offset; /* Section file offset */
Elf64_Xword sh_size; /* Size of section in bytes */
Elf64_Word sh_link; /* Index of another section */
Elf64_Word sh_info; /* Additional section information */
Elf64_Xword sh_addralign; /* Section alignment */
Elf64_Xword sh_entsize; /* Entry size if section holds table */
} Elf64_Shdr;
#define EI_MAG0 0 /* e_ident[] indexes */
#define EI_MAG1 1
#define EI_MAG2 2
#define EI_MAG3 3
#define EI_CLASS 4
#define EI_DATA 5
#define EI_VERSION 6
#define EI_OSABI 7
#define EI_PAD 8
#define ELFMAG0 0x7f /* EI_MAG */
#define ELFMAG1 'E'
#define ELFMAG2 'L'
#define ELFMAG3 'F'
#define ELFMAG "\177ELF"
#define SELFMAG 4
#define ELFCLASSNONE 0 /* EI_CLASS */
#define ELFCLASS32 1
#define ELFCLASS64 2
#define ELFCLASSNUM 3
#define ELFDATANONE 0 /* e_ident[EI_DATA] */
#define ELFDATA2LSB 1
#define ELFDATA2MSB 2
#define EV_NONE 0 /* e_version, EI_VERSION */
#define EV_CURRENT 1
#define EV_NUM 2
#define ELFOSABI_NONE 0
#define ELFOSABI_LINUX 3
#ifndef ELF_OSABI
#define ELF_OSABI ELFOSABI_NONE
#endif
/*
* Notes used in ET_CORE. Architectures export some of the arch register sets
* using the corresponding note types via the PTRACE_GETREGSET and
* PTRACE_SETREGSET requests.
* The note name for all these is "LINUX".
*/
#define NT_PRSTATUS 1
#define NT_PRFPREG 2
#define NT_PRPSINFO 3
#define NT_TASKSTRUCT 4
#define NT_AUXV 6
/*
* Note to userspace developers: size of NT_SIGINFO note may increase
* in the future to accomodate more fields, don't assume it is fixed!
*/
#define NT_SIGINFO 0x53494749
#define NT_FILE 0x46494c45
#define NT_PRXFPREG 0x46e62b7f /* copied from gdb5.1/include/elf/common.h */
#define NT_PPC_VMX 0x100 /* PowerPC Altivec/VMX registers */
#define NT_PPC_SPE 0x101 /* PowerPC SPE/EVR registers */
#define NT_PPC_VSX 0x102 /* PowerPC VSX registers */
#define NT_PPC_TAR 0x103 /* Target Address Register */
#define NT_PPC_PPR 0x104 /* Program Priority Register */
#define NT_PPC_DSCR 0x105 /* Data Stream Control Register */
#define NT_PPC_EBB 0x106 /* Event Based Branch Registers */
#define NT_PPC_PMU 0x107 /* Performance Monitor Registers */
#define NT_PPC_TM_CGPR 0x108 /* TM checkpointed GPR Registers */
#define NT_PPC_TM_CFPR 0x109 /* TM checkpointed FPR Registers */
#define NT_PPC_TM_CVMX 0x10a /* TM checkpointed VMX Registers */
#define NT_PPC_TM_CVSX 0x10b /* TM checkpointed VSX Registers */
#define NT_PPC_TM_SPR 0x10c /* TM Special Purpose Registers */
#define NT_PPC_TM_CTAR 0x10d /* TM checkpointed Target Address Register */
#define NT_PPC_TM_CPPR 0x10e /* TM checkpointed Program Priority Register */
#define NT_PPC_TM_CDSCR 0x10f /* TM checkpointed Data Stream Control Register */
#define NT_PPC_PKEY 0x110 /* Memory Protection Keys registers */
#define NT_386_TLS 0x200 /* i386 TLS slots (struct user_desc) */
#define NT_386_IOPERM 0x201 /* x86 io permission bitmap (1=deny) */
#define NT_X86_XSTATE 0x202 /* x86 extended state using xsave */
#define NT_S390_HIGH_GPRS 0x300 /* s390 upper register halves */
#define NT_S390_TIMER 0x301 /* s390 timer register */
#define NT_S390_TODCMP 0x302 /* s390 TOD clock comparator register */
#define NT_S390_TODPREG 0x303 /* s390 TOD programmable register */
#define NT_S390_CTRS 0x304 /* s390 control registers */
#define NT_S390_PREFIX 0x305 /* s390 prefix register */
#define NT_S390_LAST_BREAK 0x306 /* s390 breaking event address */
#define NT_S390_SYSTEM_CALL 0x307 /* s390 system call restart data */
#define NT_S390_TDB 0x308 /* s390 transaction diagnostic block */
#define NT_S390_VXRS_LOW 0x309 /* s390 vector registers 0-15 upper half */
#define NT_S390_VXRS_HIGH 0x30a /* s390 vector registers 16-31 */
#define NT_S390_GS_CB 0x30b /* s390 guarded storage registers */
#define NT_S390_GS_BC 0x30c /* s390 guarded storage broadcast control block */
#define NT_S390_RI_CB 0x30d /* s390 runtime instrumentation */
#define NT_ARM_VFP 0x400 /* ARM VFP/NEON registers */
#define NT_ARM_TLS 0x401 /* ARM TLS register */
#define NT_ARM_HW_BREAK 0x402 /* ARM hardware breakpoint registers */
#define NT_ARM_HW_WATCH 0x403 /* ARM hardware watchpoint registers */
#define NT_ARM_SYSTEM_CALL 0x404 /* ARM system call number */
#define NT_ARM_SVE 0x405 /* ARM Scalable Vector Extension registers */
#define NT_ARM_PAC_MASK 0x406 /* ARM pointer authentication code masks */
#define NT_ARM_PACA_KEYS 0x407 /* ARM pointer authentication address keys */
#define NT_ARM_PACG_KEYS 0x408 /* ARM pointer authentication generic key */
#define NT_ARM_TAGGED_ADDR_CTRL 0x409 /* arm64 tagged address control (prctl()) */
#define NT_ARM_PAC_ENABLED_KEYS 0x40a /* arm64 ptr auth enabled keys (prctl()) */
#define NT_ARC_V2 0x600 /* ARCv2 accumulator/extra registers */
#define NT_VMCOREDD 0x700 /* Vmcore Device Dump Note */
#define NT_MIPS_DSP 0x800 /* MIPS DSP ASE registers */
#define NT_MIPS_FP_MODE 0x801 /* MIPS floating-point mode */
#define NT_MIPS_MSA 0x802 /* MIPS SIMD registers */
/* Note types with note name "GNU" */
#define NT_GNU_PROPERTY_TYPE_0 5
/* Note header in a PT_NOTE section */
typedef struct elf32_note {
Elf32_Word n_namesz; /* Name size */
Elf32_Word n_descsz; /* Content size */
Elf32_Word n_type; /* Content type */
} Elf32_Nhdr;
/* Note header in a PT_NOTE section */
typedef struct elf64_note {
Elf64_Word n_namesz; /* Name size */
Elf64_Word n_descsz; /* Content size */
Elf64_Word n_type; /* Content type */
} Elf64_Nhdr;
/* .note.gnu.property types for EM_AARCH64: */
#define GNU_PROPERTY_AARCH64_FEATURE_1_AND 0xc0000000
/* Bits for GNU_PROPERTY_AARCH64_FEATURE_1_BTI */
#define GNU_PROPERTY_AARCH64_FEATURE_1_BTI (1U << 0)
#endif /* _UAPI_LINUX_ELF_H */
<|start_filename|>emulator/include/board/board.hpp<|end_filename|>
#pragma once
#include <concepts>
#include <list>
#include <map>
#include <string>
#include <string_view>
#include <devices/device.hpp>
#include <board/track.hpp>
#include <imgui.h>
#define IMGUI_DEFINE_MATH_OPERATORS
#include <imgui_internal.h>
namespace vc::pcb {
class Board {
public:
explicit Board(std::string_view name, ImVec2 size) : boardName(name), dimensions(size) { }
virtual ~Board() {
this->powerDown();
for (auto &device : this->devices)
delete device;
for (auto &[name, track] : this->tracks)
delete track;
}
void powerUp() {
this->hasPower = true;
for (auto &device : this->devices)
device->reset();
bool doneWork;
do {
doneWork = false;
for (auto &device : this->devices) {
if (device->needsUpdate()) {
device->tick();
doneWork = true;
}
}
} while (doneWork && this->hasPower);
}
void powerDown() {
this->hasPower = false;
}
[[nodiscard]]
std::string_view getName() const {
return this->boardName;
}
virtual void draw(ImDrawList *drawList) {
drawList->AddRectFilled(getPosition(), getPosition() + getDimensions(), ImColor(0x09, 0x91, 0x32, 0xFF));
for (auto &[name, track] : this->tracks) {
auto [from, to] = track->getEndpoints();
auto startPos = getPosition() + from->getPosition() + from->getSize() / 2;
auto endPos = getPosition() + to->getPosition() + to->getSize() / 2;
auto middlePos = startPos.x - getPosition().x > startPos.y - getPosition().y ? ImVec2(startPos.x, endPos.y) : ImVec2(endPos.x, startPos.y);
drawList->AddLine(startPos, middlePos, ImColor(0x19, 0xC1, 0x62, 0xFF), 3);
drawList->AddLine(middlePos, endPos, ImColor(0x19, 0xC1, 0x62, 0xFF), 3);
}
for (auto &device : this->devices) {
if (auto connectable = dynamic_cast<Connectable*>(device); connectable != nullptr) {
connectable->draw(getPosition(), drawList);
}
}
}
[[nodiscard]]
ImVec2 getPosition() const {
return this->position;
}
void setPosition(ImVec2 pos) {
this->position = pos;
}
[[nodiscard]]
ImVec2 getDimensions() const {
return this->dimensions;
}
protected:
template<std::derived_from<dev::Device> T, typename ...Args>
auto& createDevice(Args&&... args) {
auto device = new T(std::forward<Args>(args)...);
this->devices.push_back(device);
return *device;
}
void createTrack(Direction direction, const std::string &name, pcb::Connectable &from, pcb::Connectable &to, bool buffered = false) {
if (this->tracks.contains(name)) return;
auto track = new Track(direction, buffered, &from, &to);
this->tracks.insert({ name, track });
from.linkTrack(name, track);
to.linkTrack(name, track);
}
private:
bool hasPower = false;
std::string boardName;
std::list<dev::Device*> devices;
std::map<std::string, pcb::Track*> tracks;
ImVec2 position;
ImVec2 dimensions;
};
}
<|start_filename|>emulator/include/devices/cpu/core/core.hpp<|end_filename|>
#pragma once
#include <risc.hpp>
#include <devices/cpu/core/instructions.hpp>
#include <devices/cpu/core/registers.hpp>
#include <devices/cpu/core/address_space.hpp>
#include <thread>
#include <chrono>
namespace vc::dev::cpu {
class Core {
public:
explicit Core(AddressSpace &addressSpace) : addressSpace(addressSpace) { }
void execute();
[[nodiscard]]
bool isHalted() const { return halted; }
void reset() {
this->regs.pc = 0x00;
for (u8 r = 1; r < 32; r++)
this->regs.x[r] = 0x00;
this->halted = false;
}
void halt(std::string_view message = "", auto ... params) {
if (!message.empty())
log::fatal(message, params...);
log::fatal("Halted CPU Core at {:#x}", regs.pc);
using namespace std::literals::chrono_literals;
std::this_thread::sleep_for(200ms);
this->halted = true;
}
private:
constexpr void executeCompressedInstruction(const CompressedInstruction &instr);
constexpr void executeInstruction(const Instruction &instr);
constexpr void executeOPInstruction(const Instruction &instr);
constexpr void executeOPIMMInstruction(const Instruction &instr);
constexpr void executeOPIMM32Instruction(const Instruction &instr);
constexpr void executeBRANCHInstruction(const Instruction &instr);
constexpr void executeLOADInstruction(const Instruction &instr);
constexpr void executeSTOREInstruction(const Instruction &instr);
constexpr void executeC0Instruction(const CompressedInstruction &instr);
constexpr void executeC1Instruction(const CompressedInstruction &instr);
constexpr void executeC2Instruction(const CompressedInstruction &instr);
u64 nextPC;
bool halted = true;
AddressSpace &addressSpace;
Registers regs;
};
}
<|start_filename|>emulator/include/ui/window.hpp<|end_filename|>
#pragma once
#include <risc.hpp>
#include <vector>
struct GLFWwindow;
namespace vc::ui {
class View;
class Window {
public:
Window();
~Window();
void loop();
template<typename T, typename ... Args>
auto& addView(Args&&... args) {
auto view = new T(std::forward<Args>(args)...);
this->views.push_back(view);
return *view;
}
private:
void frameBegin();
void frame();
void frameEnd();
void initGLFW();
void initImGui();
void deinitGLFW();
void deinitImGui();
GLFWwindow *windowHandle = nullptr;
double targetFps = 60.0;
double lastFrameTime = 0.0;
std::vector<View*> views;
};
}
<|start_filename|>emulator/include/devices/pin_header.hpp<|end_filename|>
#pragma once
#include <board/track.hpp>
namespace vc::dev {
class PinHeader : public Device, public pcb::Connectable {
public:
explicit PinHeader(ImVec2 pos) {
this->setPosition(pos);
}
auto get(std::string_view name) {
return this->getTrack(name);
}
void tick() override { }
bool needsUpdate() override { return this->dataAvailable(); }
void reset() override {
this->receivedData.clear();
}
void draw(ImVec2 start, ImDrawList *drawList) override {
this->numPins = this->getConnectedTrackNames().size();
this->setSize({ 19.0F * this->numPins, 19.0F });
drawList->AddRectFilled(start + getPosition(), start + getPosition() + getSize(), ImColor(0x10, 0x10, 0x10, 0xFF));
for (auto i = 0; i < this->numPins; i++) {
drawList->AddCircleFilled(ImVec2(start + getPosition() + ImVec2(9 + 19 * i, 10)), 4, ImColor(0xB0, 0xB0, 0xC0, 0xFF));
}
for (auto &trackName : this->getConnectedTrackNames()) {
auto c = this->get(trackName)->getValue();
if (c.has_value())
receivedData[std::string(trackName)] += (char)*c;
}
if (ImGui::IsMouseHoveringRect(start + getPosition(), start + getPosition() + getSize())) {
ImGui::BeginTooltip();
ImGui::TextUnformatted("Connected Tracks");
ImGui::Separator();
for (auto &trackName : this->getConnectedTrackNames()) {
ImGui::Text("%s: %s", trackName.data(), receivedData[std::string(trackName)].c_str());
}
ImGui::EndTooltip();
}
}
private:
u32 numPins = 1;
std::map<std::string, std::string> receivedData;
};
}
<|start_filename|>emulator/include/devices/cpu/core/mmio/gpio.hpp<|end_filename|>
#pragma once
#include <devices/cpu/core/mmio/device.hpp>
#include <numeric>
namespace vc::dev::cpu::mmio {
class GPIO : public MMIODevice {
public:
GPIO(u64 base) : MMIODevice("GPIO", base, sizeof(registers)) {
registers = { 0 };
}
[[nodiscard]]
u8& byte(u64 offset) noexcept override {
return *(reinterpret_cast<u8*>(&this->registers) + offset);
}
[[nodiscard]]
u16& halfWord(u64 offset) noexcept override {
return *reinterpret_cast<u16*>((reinterpret_cast<u8*>(&this->registers) + offset));
}
[[nodiscard]]
u32& word(u64 offset) noexcept override {
return *reinterpret_cast<u32*>((reinterpret_cast<u8*>(&this->registers) + offset));
}
[[nodiscard]]
u64& doubleWord(u64 offset) noexcept override {
return *reinterpret_cast<u64*>((reinterpret_cast<u8*>(&this->registers) + offset));
}
std::array<cpu::IOPin, 8> gpioPins;
private:
void tick() noexcept override {
u32 offset = 0;
registers.IN = 0x00;
for (auto &pin : gpioPins) {
if (registers.CR & (0b01 << offset)) /* Output */{
pin.setValue((static_cast<u8>(registers.OUT) & (0b01 << offset)) == 0x00 ? 0 : 1);
} else /* Input */ {
if (pin.hasValue())
registers.IN |= (pin.getValue().value() == 0x00 ? 0 : 1) << offset;
}
offset++;
}
}
bool needsUpdate() noexcept override {
return true;
}
struct {
u32 CR;
u32 IN;
u32 OUT;
} registers;
};
}
<|start_filename|>emulator/include/devices/cpu/core/instructions.hpp<|end_filename|>
#pragma once
#include <risc.hpp>
#define INSTRUCTION_FORMAT(name, ...) struct { __VA_ARGS__ } name; static_assert(sizeof(name) == InstructionSize, "Instruction Format " #name " is invalid!")
#define COMPRESSED_INSTRUCTION_FORMAT(name, ...) struct { __VA_ARGS__ } name; static_assert(sizeof(name) == CompressedInstructionSize, "Compressed instruction Format " #name " is invalid!")
namespace vc::dev::cpu {
using instr_t = u32;
using comp_instr_t = u16;
constexpr static inline size_t InstructionSize = sizeof(instr_t);
constexpr static inline size_t CompressedInstructionSize = sizeof(comp_instr_t);
enum class Opcode : u8 {
LUI = 0b0110111,
AUIPC = 0b0010111,
JAL = 0b1101111,
JALR = 0b1100111,
BRANCH = 0b1100011,
LOAD = 0b0000011,
STORE = 0b0100011,
OP_IMM = 0b0010011,
OP_IMM32 = 0b0011011,
MISC_MEM = 0b0001111,
SYSTEM = 0b1110011,
OP = 0b0100011,
OP_32 = 0b0111011,
AMO = 0b0101111,
LOAD_FP = 0b0000111,
STORE_FP = 0b0100111,
FMADD = 0b1000011,
FMSUB = 0b1000111,
FNMADD = 0b1001111,
FNMSUB = 0b1001011,
OP_FP = 0b1010011,
};
enum class CompressedOpcode : u8 {
C0 = 0b00,
C1 = 0b01,
C2 = 0b10,
};
enum class C0Funct : u8 {
C_ADDI4SPN = 0b000
};
enum class C1Funct : u8 {
C_ADDI = 0b000,
C_ADDIW = 0b001,
C_LI = 0b010,
C_LUI = 0b011,
C_ANDI = 0b100
};
enum class C2Funct : u8 {
C_JUMP = 0b100,
C_LDSP = 0b011,
C_SDSP = 0b111
};
enum class OPFunc3 : u8 {
ADD = 0b000
};
enum class OPFunc7 : u8 {
ADD = 0b0000000
};
enum class OPIMMFunc : u8 {
ADDI = 0b000,
XORI = 0b100,
ORI = 0b110,
ANDI = 0b111
};
enum class OPIMM32Func : u8 {
ADDIW = 0b000
};
enum class STOREFunc : u8 {
SB = 0b000,
SH = 0b001,
SW = 0b010,
SD = 0b011,
};
enum class LOADFunc : u8 {
LB = 0b000,
LD = 0b011,
LBU = 0b100
};
enum class BRANCHFunc : u8 {
BEQ = 0b000,
BNE = 0b001
};
union Instruction {
union {
INSTRUCTION_FORMAT(R,
instr_t opcode : 7;
instr_t rd : 5;
instr_t funct3 : 3;
instr_t rs1 : 5;
instr_t rs2 : 5;
instr_t funct7 : 7;
);
INSTRUCTION_FORMAT(I,
instr_t opcode : 7;
instr_t rd : 5;
instr_t funct3 : 3;
instr_t rs1 : 5;
instr_t imm0_11 : 12;
constexpr u32 getImmediate() const { return this->imm0_11; }
constexpr void setImmediate(u32 value) { this->imm0_11 = value; }
);
INSTRUCTION_FORMAT(S,
instr_t opcode : 7;
instr_t imm0_4 : 5;
instr_t funct3 : 3;
instr_t rs1 : 5;
instr_t rs2 : 5;
instr_t imm5_11 : 7;
constexpr u32 getImmediate() const { return (this->imm5_11 << 5) | this->imm0_4; }
constexpr void setImmediate(u32 value) { this->imm0_4 = value & 0b11111; this->imm5_11 = value >> 5; }
);
INSTRUCTION_FORMAT(U,
instr_t opcode : 7;
instr_t rd : 5;
instr_t imm12_31 : 20;
constexpr u32 getImmediate() const { return this->imm12_31 << 12; }
constexpr void setImmediate(u32 value) { this->imm12_31 = value >> 12; }
);
} Base;
union {
INSTRUCTION_FORMAT(R,
instr_t opcode : 7;
instr_t rd : 5;
instr_t funct3 : 3;
instr_t rs1 : 5;
instr_t rs2 : 5;
instr_t funct7 : 7;
);
INSTRUCTION_FORMAT(I,
instr_t opcode : 7;
instr_t rd : 5;
instr_t funct3 : 3;
instr_t rs1 : 5;
instr_t imm0_11 : 12;
constexpr u32 getImmediate() const { return this->imm0_11; }
);
INSTRUCTION_FORMAT(S,
instr_t opcode : 7;
instr_t imm0_4 : 5;
instr_t funct3 : 3;
instr_t rs1 : 5;
instr_t rs2 : 5;
instr_t imm5_11 : 7;
constexpr u32 getImmediate() const { return (this->imm5_11 << 5) | this->imm0_4; }
);
INSTRUCTION_FORMAT(B,
instr_t opcode : 7;
instr_t imm11 : 1;
instr_t imm1_4 : 4;
instr_t funct3 : 3;
instr_t rs1 : 5;
instr_t rs2 : 5;
instr_t imm5_10 : 6;
instr_t imm12 : 1;
constexpr u32 getImmediate() const { return ((this->imm12 << 12) | (this->imm11 << 11) | (this->imm5_10 << 5) | (this->imm1_4 << 1)) >> 1; }
);
INSTRUCTION_FORMAT(U,
instr_t opcode : 7;
instr_t rd : 5;
instr_t imm12_31 : 20;
constexpr u32 getImmediate() const { return this->imm12_31 << 12; }
);
INSTRUCTION_FORMAT(J,
instr_t opcode : 7;
instr_t rd : 5;
instr_t imm12_19 : 8;
instr_t imm11 : 1;
instr_t imm1_10 : 10;
instr_t imm20 : 1;
constexpr u32 getImmediate() const { return ((this->imm20 << 20) | (this->imm12_19 << 12) | (this->imm11 << 11) | (this->imm1_10 << 1)) >> 1; }
);
} Immediate;
[[nodiscard]]
constexpr Opcode getOpcode() const {
return static_cast<Opcode>(Base.R.opcode);
}
[[nodiscard]]
constexpr u8 getFunction3() const {
return Base.R.funct3;
}
};
static_assert(sizeof(Instruction) == InstructionSize, "Instruction union invalid!");
union CompressedInstruction {
COMPRESSED_INSTRUCTION_FORMAT(CR,
comp_instr_t opcode : 2;
comp_instr_t rs2 : 5;
comp_instr_t rd : 5;
comp_instr_t funct4 : 4;
);
COMPRESSED_INSTRUCTION_FORMAT(CI,
comp_instr_t opcode : 2;
comp_instr_t imm1 : 3;
comp_instr_t imm2 : 2;
comp_instr_t rd : 5;
comp_instr_t imm3 : 1;
comp_instr_t funct3 : 3;
);
COMPRESSED_INSTRUCTION_FORMAT(CSS,
comp_instr_t opcode : 2;
comp_instr_t rs2 : 5;
comp_instr_t imm : 6;
comp_instr_t funct3 : 3;
);
COMPRESSED_INSTRUCTION_FORMAT(CIW,
comp_instr_t opcode : 2;
comp_instr_t rd : 3;
comp_instr_t imm : 8;
comp_instr_t funct3 : 3;
);
COMPRESSED_INSTRUCTION_FORMAT(CL,
comp_instr_t opcode : 2;
comp_instr_t rd : 3;
comp_instr_t imm1 : 2;
comp_instr_t rs1 : 3;
comp_instr_t imm2 : 3;
comp_instr_t funct3 : 3;
);
COMPRESSED_INSTRUCTION_FORMAT(CS,
comp_instr_t opcode : 2;
comp_instr_t rs2 : 3;
comp_instr_t imm1 : 2;
comp_instr_t rs1 : 3;
comp_instr_t imm2 : 3;
comp_instr_t funct3 : 3;
);
COMPRESSED_INSTRUCTION_FORMAT(CB,
comp_instr_t opcode : 2;
comp_instr_t offset1 : 5;
comp_instr_t rs1 : 3;
comp_instr_t offset2 : 3;
comp_instr_t funct3 : 3;
);
COMPRESSED_INSTRUCTION_FORMAT(CJ,
comp_instr_t opcode : 2;
comp_instr_t target : 11;
comp_instr_t funct3 : 3;
);
[[nodiscard]]
constexpr CompressedOpcode getOpcode() const {
return static_cast<CompressedOpcode>(CJ.opcode);
}
[[nodiscard]]
constexpr u8 getFunction3() const {
return CJ.funct3;
}
[[nodiscard]]
constexpr u8 getFunction4() const {
return CR.funct4;
}
};
}
<|start_filename|>emulator/source/main.cpp<|end_filename|>
#include <board/board_test.hpp>
#include <ui/window.hpp>
#include <ui/views/view_control.hpp>
#include <ui/views/view_pcb.hpp>
int main() {
vc::ui::Window window;
vc::pcb::TestBoard board;
window.addView<vc::ui::ViewControl>(board);
window.addView<vc::ui::ViewPCB>(board);
window.loop();
}
<|start_filename|>emulator/include/devices/device.hpp<|end_filename|>
#pragma once
#include <imgui.h>
#define IMGUI_DEFINE_MATH_OPERATORS
#include <imgui_internal.h>
#include <imgui_vc_extensions.h>
namespace vc::dev {
class Device {
public:
Device() = default;
virtual ~Device() = default;
virtual void tick() = 0;
virtual bool needsUpdate() = 0;
virtual void reset() = 0;
};
}
<|start_filename|>emulator/external/ImGui/source/imgui_vc_extensions.cpp<|end_filename|>
#include <imgui_vc_extensions.h>
#include <imgui.h>
#include <imgui_freetype.h>
#define IMGUI_DEFINE_MATH_OPERATORS
#include <imgui_internal.h>
#undef IMGUI_DEFINE_MATH_OPERATORS
#define STB_IMAGE_IMPLEMENTATION
#include <stb_image.h>
#include <string>
#include <glad/glad.h>
namespace ImGui {
bool Hyperlink(const char* label, const ImVec2& size_arg, ImGuiButtonFlags flags) {
ImGuiWindow* window = GetCurrentWindow();
if (window->SkipItems)
return false;
ImGuiContext& g = *GImGui;
const ImGuiStyle& style = g.Style;
const ImGuiID id = window->GetID(label);
const ImVec2 label_size = CalcTextSize(label, NULL, true);
ImVec2 pos = window->DC.CursorPos;
ImVec2 size = CalcItemSize(size_arg, label_size.x, label_size.y);
const ImRect bb(pos, pos + size);
if (!ItemAdd(bb, id))
return false;
if (window->DC.ItemFlags & ImGuiItemFlags_ButtonRepeat)
flags |= ImGuiButtonFlags_Repeat;
bool hovered, held;
bool pressed = ButtonBehavior(bb, id, &hovered, &held, flags);
// Render
const ImU32 col = hovered ? GetColorU32(ImGuiCol_ButtonHovered) : GetColorU32(ImGuiCol_ButtonActive);
PushStyleColor(ImGuiCol_Text, ImU32(col));
TextEx(label, NULL, ImGuiTextFlags_NoWidthForLargeClippedText); // Skip formatting
GetWindowDrawList()->AddLine(ImVec2(pos.x, pos.y + size.y), pos + size, ImU32(col));
PopStyleColor();
IMGUI_TEST_ENGINE_ITEM_INFO(id, label, window->DC.LastItemStatusFlags);
return pressed;
}
bool BulletHyperlink(const char* label, const ImVec2& size_arg, ImGuiButtonFlags flags) {
ImGuiWindow* window = GetCurrentWindow();
if (window->SkipItems)
return false;
ImGuiContext& g = *GImGui;
const ImGuiStyle& style = g.Style;
const ImGuiID id = window->GetID(label);
const ImVec2 label_size = CalcTextSize(label, NULL, true);
ImVec2 pos = window->DC.CursorPos;
ImVec2 size = CalcItemSize(size_arg, label_size.x, label_size.y) + ImVec2(g.FontSize + style.FramePadding.x * 2, 0.0f);
const ImRect bb(pos, pos + size);
if (!ItemAdd(bb, id))
return false;
if (window->DC.ItemFlags & ImGuiItemFlags_ButtonRepeat)
flags |= ImGuiButtonFlags_Repeat;
bool hovered, held;
bool pressed = ButtonBehavior(bb, id, &hovered, &held, flags);
// Render
const ImU32 col = hovered ? GetColorU32(ImGuiCol_ButtonHovered) : GetColorU32(ImGuiCol_ButtonActive);
PushStyleColor(ImGuiCol_Text, ImU32(col));
RenderBullet(window->DrawList, bb.Min + ImVec2(style.FramePadding.x + g.FontSize * 0.5f, g.FontSize * 0.5f), col);
RenderText(bb.Min + ImVec2(g.FontSize + style.FramePadding.x * 2, 0.0f), label, nullptr, false);
GetWindowDrawList()->AddLine(bb.Min + ImVec2(style.FramePadding.x, size.y), pos + size, ImU32(col));
ImGui::NewLine();
PopStyleColor();
IMGUI_TEST_ENGINE_ITEM_INFO(id, label, window->DC.LastItemStatusFlags);
return pressed;
}
bool DescriptionButton(const char* label, const char* description, const ImVec2& size_arg, ImGuiButtonFlags flags) {
ImGuiWindow* window = GetCurrentWindow();
if (window->SkipItems)
return false;
ImGuiContext& g = *GImGui;
const ImGuiStyle& style = g.Style;
const ImGuiID id = window->GetID(label);
const ImVec2 text_size = CalcTextSize((std::string(label) + "\n " + std::string(description)).c_str(), NULL, true);
const ImVec2 label_size = CalcTextSize(label, NULL, true);
ImVec2 pos = window->DC.CursorPos;
if ((flags & ImGuiButtonFlags_AlignTextBaseLine) && style.FramePadding.y < window->DC.CurrLineTextBaseOffset) // Try to vertically align buttons that are smaller/have no padding so that text baseline matches (bit hacky, since it shouldn't be a flag)
pos.y += window->DC.CurrLineTextBaseOffset - style.FramePadding.y;
ImVec2 size = CalcItemSize(size_arg, text_size.x + style.FramePadding.x * 4.0f, text_size.y + style.FramePadding.y * 4.0f);
const ImRect bb(pos, pos + size);
ItemSize(size, style.FramePadding.y);
if (!ItemAdd(bb, id))
return false;
if (window->DC.ItemFlags & ImGuiItemFlags_ButtonRepeat)
flags |= ImGuiButtonFlags_Repeat;
bool hovered, held;
bool pressed = ButtonBehavior(bb, id, &hovered, &held, flags);
ImGui::PushStyleVar(ImGuiStyleVar_ButtonTextAlign, ImVec2(0.0, 0.5));
// Render
const ImU32 col = GetCustomColorU32((held && hovered) ? ImGuiCustomCol_DescButtonActive : hovered ? ImGuiCustomCol_DescButtonHovered : ImGuiCustomCol_DescButton);
RenderNavHighlight(bb, id);
RenderFrame(bb.Min, bb.Max, col, true, style.FrameRounding);
PushStyleColor(ImGuiCol_Text, GetColorU32(ImGuiCol_ButtonActive));
RenderTextWrapped(bb.Min + style.FramePadding * 2, label, nullptr, CalcWrapWidthForPos(window->DC.CursorPos, window->DC.TextWrapPos));
PopStyleColor();
PushStyleColor(ImGuiCol_Text, GetColorU32(ImGuiCol_Text));
RenderTextClipped(bb.Min + style.FramePadding * 2 + ImVec2(style.FramePadding.x * 2, label_size.y), bb.Max - style.FramePadding, description, NULL, &text_size, style.ButtonTextAlign, &bb);
PopStyleColor();
ImGui::PopStyleVar();
// Automatically close popups
//if (pressed && !(flags & ImGuiButtonFlags_DontClosePopups) && (window->Flags & ImGuiWindowFlags_Popup))
// CloseCurrentPopup();
IMGUI_TEST_ENGINE_ITEM_INFO(id, label, window->DC.LastItemStatusFlags);
return pressed;
}
void UnderlinedText(const char* label, ImColor color, const ImVec2& size_arg) {
ImGuiWindow* window = GetCurrentWindow();
const ImVec2 label_size = CalcTextSize(label, NULL, true);
ImVec2 pos = window->DC.CursorPos;
ImVec2 size = CalcItemSize(size_arg, label_size.x, label_size.y);
PushStyleColor(ImGuiCol_Text, ImU32(color));
TextEx(label, NULL, ImGuiTextFlags_NoWidthForLargeClippedText); // Skip formatting
GetWindowDrawList()->AddLine(ImVec2(pos.x, pos.y + size.y), pos + size, ImU32(color));
PopStyleColor();
}
void Disabled(const std::function<void()> &widgets, bool disabled) {
if (disabled) {
ImGui::PushItemFlag(ImGuiItemFlags_Disabled, true);
ImGui::PushStyleVar(ImGuiStyleVar_Alpha, ImGui::GetStyle().Alpha * 0.5F);
widgets();
ImGui::PopStyleVar();
ImGui::PopItemFlag();
} else {
widgets();
}
}
void TextSpinner(const char* label) {
ImGui::Text("[%c] %s", "|/-\\"[ImU32(ImGui::GetTime() * 20) % 4], label);
}
void Header(const char *label, bool firstEntry) {
if (!firstEntry)
ImGui::NewLine();
ImGui::TextUnformatted(label);
ImGui::Separator();
}
ImU32 GetCustomColorU32(ImGuiCustomCol idx, float alpha_mul) {
auto& customData = *static_cast<ImHexCustomData*>(GImGui->IO.UserData);
ImVec4 c = customData.Colors[idx];
c.w *= GImGui->Style.Alpha * alpha_mul;
return ColorConvertFloat4ToU32(c);
}
void StyleCustomColorsDark() {
auto &colors = static_cast<ImHexCustomData*>(GImGui->IO.UserData)->Colors;
colors[ImGuiCustomCol_DescButton] = ImColor(20, 20, 20);
colors[ImGuiCustomCol_DescButtonHovered] = ImColor(40, 40, 40);
colors[ImGuiCustomCol_DescButtonActive] = ImColor(60, 60, 60);
}
void StyleCustomColorsLight() {
auto &colors = static_cast<ImHexCustomData*>(GImGui->IO.UserData)->Colors;
colors[ImGuiCustomCol_DescButton] = ImColor(230, 230, 230);
colors[ImGuiCustomCol_DescButtonHovered] = ImColor(210, 210, 210);
colors[ImGuiCustomCol_DescButtonActive] = ImColor(190, 190, 190);
}
void StyleCustomColorsClassic() {
auto &colors = static_cast<ImHexCustomData*>(GImGui->IO.UserData)->Colors;
colors[ImGuiCustomCol_DescButton] = ImColor(40, 40, 80);
colors[ImGuiCustomCol_DescButtonHovered] = ImColor(60, 60, 100);
colors[ImGuiCustomCol_DescButtonActive] = ImColor(80, 80, 120);
}
std::tuple<ImTextureID, int, int> LoadImageFromPath(const char *path) {
int imageWidth = 0;
int imageHeight = 0;
unsigned char* imageData = stbi_load(path, &imageWidth, &imageHeight, nullptr, 4);
if (imageData == nullptr)
return { nullptr, -1, -1 };
GLuint texture;
glGenTextures(1, &texture);
glBindTexture(GL_TEXTURE_2D, texture);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
#if defined(GL_UNPACK_ROW_LENGTH)
glPixelStorei(GL_UNPACK_ROW_LENGTH, 0);
#endif
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, imageWidth, imageHeight, 0, GL_RGBA, GL_UNSIGNED_BYTE, imageData);
stbi_image_free(imageData);
return { reinterpret_cast<ImTextureID>(static_cast<intptr_t>(texture)), imageWidth, imageHeight };
}
void UnloadImage(ImTextureID texture) {
auto glTextureId = static_cast<GLuint>(reinterpret_cast<intptr_t>(texture));
glDeleteTextures(1, &glTextureId);
}
}
<|start_filename|>emulator/include/devices/cpu/core/mmio/uart.hpp<|end_filename|>
#pragma once
#include <devices/cpu/core/mmio/device.hpp>
#include <numeric>
namespace vc::dev::cpu::mmio {
class UART : public MMIODevice {
public:
UART(u64 base) : MMIODevice("UART", base, sizeof(registers)) {
}
[[nodiscard]]
u8& byte(u64 offset) noexcept override {
this->valueChanged = true;
return *(reinterpret_cast<u8*>(&this->registers) + offset);
}
[[nodiscard]]
u16& halfWord(u64 offset) noexcept override {
this->valueChanged = true;
return *reinterpret_cast<u16*>((reinterpret_cast<u8*>(&this->registers) + offset));
}
[[nodiscard]]
u32& word(u64 offset) noexcept override {
this->valueChanged = true;
return *reinterpret_cast<u32*>((reinterpret_cast<u8*>(&this->registers) + offset));
}
[[nodiscard]]
u64& doubleWord(u64 offset) noexcept override {
this->valueChanged = true;
return *reinterpret_cast<u64*>((reinterpret_cast<u8*>(&this->registers) + offset));
}
cpu::IOPin txPin;
private:
void tick() noexcept override {
txPin.setValue(static_cast<u8>(registers.TX));
registers.TX = 0x00;
this->valueChanged = false;
}
bool needsUpdate() noexcept override {
return this->valueChanged;
}
struct {
u32 CR;
u32 TX;
u32 RX;
} registers;
bool valueChanged = false;
};
}
<|start_filename|>emulator/include/utils.hpp<|end_filename|>
#pragma once
#include <limits>
#include <concepts>
#include <type_traits>
#include <fmt/core.h>
#define TOKEN_CONCAT_IMPL(x, y) x ## y
#define TOKEN_CONCAT(x, y) TOKEN_CONCAT_IMPL(x, y)
#define ANONYMOUS_VARIABLE(prefix) TOKEN_CONCAT(prefix, __COUNTER__)
namespace vc::util {
template<size_t Bits, typename Output>
constexpr Output signExtend(auto value) {
if ((value & (1 << (Bits - 1))) == 0)
return value;
else {
std::make_unsigned_t<Output> mask = std::numeric_limits<Output>::max() << Bits;
return value | mask;
}
}
#define SCOPE_GUARD ::vc::util::scope_guard::ScopeGuardOnExit() + [&]()
#define ON_SCOPE_EXIT auto ANONYMOUS_VARIABLE(SCOPE_EXIT_) = SCOPE_GUARD
namespace scope_guard {
template<class F>
class ScopeGuardImpl {
private:
F m_func;
bool m_active;
public:
constexpr ScopeGuardImpl(F func) : m_func(std::move(func)), m_active(true) { }
~ScopeGuardImpl() { if (this->m_active) { this->m_func(); } }
void release() { this->m_active = false; }
ScopeGuardImpl(ScopeGuardImpl &&other) noexcept : m_func(std::move(other.m_func)), m_active(other.m_active) {
other.cancel();
}
ScopeGuardImpl& operator=(ScopeGuardImpl &&) = delete;
};
enum class ScopeGuardOnExit { };
template <typename F>
constexpr ScopeGuardImpl<F> operator+(ScopeGuardOnExit, F&& f) {
return ScopeGuardImpl<F>(std::forward<F>(f));
}
}
namespace detail {
template<std::size_t...Is, class Model>
auto build_array_impl(std::index_sequence<Is...>, Model&& model)
{
constexpr auto size = sizeof...(Is) + 1;
return std::array<std::decay_t<Model>, size>
{
// N-1 copies
(Is, model)...,
// followed by perfect forwarding for the last one
std::forward<Model>(model)
};
}
}
template<std::size_t N, class Type>
auto build_array(std::integral_constant<std::size_t, N>, Type&& model) {
return detail::build_array_impl(std::make_index_sequence<N-1>(),
std::forward<Type>(model));
}
}
constexpr auto operator ""_kiB(unsigned long long kiB) {
return kiB * 1024;
}
constexpr auto operator ""_MiB(unsigned long long MiB) {
return operator ""_kiB(MiB) * 1024;
}
constexpr auto operator ""_GiB(unsigned long long GiB) {
return operator ""_MiB(GiB) * 1024;
}
<|start_filename|>emulator/include/devices/cpu/core/io_pin.hpp<|end_filename|>
#pragma once
namespace vc::dev::cpu {
class IOPin {
public:
[[nodiscard]]
auto getValue() {
auto copy = this->value;
this->value.reset();
return copy;
}
void setValue(u8 value) {
this->value = value;
}
[[nodiscard]]
bool hasValue() {
return this->value.has_value();
}
private:
std::optional<u8> value;
};
}
<|start_filename|>risc-example/subprojects/kern/include/types.hpp<|end_filename|>
#pragma once
using u8 = unsigned char;
using i8 = signed char;
using u16 = unsigned short;
using i16 = signed short;
using u32 = unsigned int;
using i32 = signed int;
using u64 = unsigned long;
using i64 = signed long;
using u128 = __uint128_t;
using i128 = __int128_t;
using f32 = float;
using f64 = double;
using f128 = long double;
static_assert(sizeof(u8) == 1, "u8 type not 8 bit long!");
static_assert(sizeof(i8) == 1, "i8 type not 8 bit long!");
static_assert(sizeof(u16) == 2, "u16 type not 16 bit long!");
static_assert(sizeof(i16) == 2, "i16 type not 16 bit long!");
static_assert(sizeof(u32) == 4, "u32 type not 32 bit long!");
static_assert(sizeof(i32) == 4, "i32 type not 32 bit long!");
static_assert(sizeof(u64) == 8, "u64 type not 64 bit long!");
static_assert(sizeof(i64) == 8, "i64 type not 64 bit long!");
static_assert(sizeof(u128) == 16, "u128 type not 128 bit long!");
static_assert(sizeof(i128) == 16, "i128 type not 128 bit long!");
static_assert(sizeof(f32) == 4, "f32 type not 32 bit long!");
static_assert(sizeof(f64) == 8, "f64 type not 64 bit long!");
static_assert(sizeof(f128) == 16, "f128 type not 128 bit long!");
<|start_filename|>emulator/source/ui/window.cpp<|end_filename|>
#include <ui/window.hpp>
#include <chrono>
#include <stdexcept>
#include <thread>
#include <log.hpp>
#include <ui/views/view.hpp>
#include <glad/glad.h>
#include <GLFW/glfw3.h>
#include <imgui.h>
#include <imgui_internal.h>
#include <fontawesome_font.h>
#include <imgui_impl_opengl3.h>
#include <imgui_impl_glfw.h>
namespace vc::ui {
Window::Window() {
this->initGLFW();
this->initImGui();
}
Window::~Window() {
this->deinitImGui();
this->deinitGLFW();
for (auto &view : this->views)
delete view;
}
void Window::loop() {
this->lastFrameTime = glfwGetTime();
while (!glfwWindowShouldClose(this->windowHandle)) {
if (!glfwGetWindowAttrib(this->windowHandle, GLFW_VISIBLE) || glfwGetWindowAttrib(this->windowHandle, GLFW_ICONIFIED))
glfwWaitEvents();
glfwPollEvents();
this->frameBegin();
this->frame();
this->frameEnd();
}
}
void Window::frameBegin() {
ImGui_ImplOpenGL3_NewFrame();
ImGui_ImplGlfw_NewFrame();
ImGui::NewFrame();
ImGuiViewport* viewport = ImGui::GetMainViewport();
ImGui::SetNextWindowPos(viewport->GetWorkPos());
ImGui::SetNextWindowSize(viewport->GetWorkSize());
ImGui::SetNextWindowViewport(viewport->ID);
ImGui::PushStyleVar(ImGuiStyleVar_WindowRounding, 0.0f);
ImGui::PushStyleVar(ImGuiStyleVar_WindowBorderSize, 0.0f);
ImGuiWindowFlags windowFlags = ImGuiWindowFlags_MenuBar | ImGuiWindowFlags_NoDocking
| ImGuiWindowFlags_NoTitleBar | ImGuiWindowFlags_NoCollapse
| ImGuiWindowFlags_NoMove | ImGuiWindowFlags_NoResize
| ImGuiWindowFlags_NoNavFocus | ImGuiWindowFlags_NoBringToFrontOnFocus
| ImGuiWindowFlags_NoScrollbar | ImGuiWindowFlags_NoScrollWithMouse;
ImGui::GetIO().ConfigFlags |= ImGuiConfigFlags_NavEnableKeyboard;
if (ImGui::Begin("DockSpace", nullptr, windowFlags)) {
ImGui::PopStyleVar(2);
ImGui::DockSpace(ImGui::GetID("MainDock"), ImVec2(0.0f, ImGui::GetContentRegionAvail().y - ImGui::GetTextLineHeightWithSpacing() - 1));
if (ImGui::BeginMenuBar()) {
if (ImGui::BeginMenu("File")) ImGui::EndMenu();
ImGui::EndMenuBar();
}
}
ImGui::End();
}
void Window::frame() {
for (auto &view : this->views)
view->draw();
}
void Window::frameEnd() {
ImGui::Render();
int displayWidth, displayHeight;
glfwGetFramebufferSize(this->windowHandle, &displayWidth, &displayHeight);
glViewport(0, 0, displayWidth, displayHeight);
glClearColor(0.45f, 0.55f, 0.60f, 1.00f);
glClear(GL_COLOR_BUFFER_BIT);
ImGui_ImplOpenGL3_RenderDrawData(ImGui::GetDrawData());
GLFWwindow* backup_current_context = glfwGetCurrentContext();
ImGui::UpdatePlatformWindows();
ImGui::RenderPlatformWindowsDefault();
glfwMakeContextCurrent(backup_current_context);
glfwSwapBuffers(this->windowHandle);
std::this_thread::sleep_for(std::chrono::milliseconds(u64((this->lastFrameTime + 1 / (ImGui::IsWindowFocused(ImGuiFocusedFlags_AnyWindow) ? this->targetFps : 5.0) - glfwGetTime()) * 1000)));
this->lastFrameTime = glfwGetTime();
}
void Window::initGLFW() {
glfwSetErrorCallback([](int error, const char* desc) {
log::error("GLFW Error [{}] : {}", error, desc);
});
if (!glfwInit())
throw std::runtime_error("Failed to initialize GLFW!");
#ifdef __APPLE__
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE);
#endif
if (auto *monitor = glfwGetPrimaryMonitor(); monitor) {
float xscale, yscale;
glfwGetMonitorContentScale(monitor, &xscale, &yscale);
}
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 2);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE);
this->windowHandle = glfwCreateWindow(1280, 720, "Virtual Console", nullptr, nullptr);
glfwSetWindowUserPointer(this->windowHandle, this);
if (this->windowHandle == nullptr)
throw std::runtime_error("Failed to create window!");
glfwMakeContextCurrent(this->windowHandle);
glfwSwapInterval(1);
glfwSetWindowPosCallback(this->windowHandle, [](GLFWwindow *window, int x, int y) {
auto win = static_cast<Window*>(glfwGetWindowUserPointer(window));
win->frameBegin();
win->frame();
win->frameEnd();
});
glfwSetWindowSizeCallback(this->windowHandle, [](GLFWwindow *window, int width, int height) {
auto win = static_cast<Window*>(glfwGetWindowUserPointer(window));
win->frameBegin();
win->frame();
win->frameEnd();
});
glfwSetKeyCallback(this->windowHandle, [](GLFWwindow *window, int key, int scancode, int action, int mods) {
auto keyName = glfwGetKeyName(key, scancode);
if (keyName != nullptr)
key = std::toupper(keyName[0]);
if (action == GLFW_PRESS) {
auto &io = ImGui::GetIO();
io.KeysDown[key] = true;
io.KeyCtrl = (mods & GLFW_MOD_CONTROL) != 0;
io.KeyShift = (mods & GLFW_MOD_SHIFT) != 0;
io.KeyAlt = (mods & GLFW_MOD_ALT) != 0;
}
else if (action == GLFW_RELEASE) {
auto &io = ImGui::GetIO();
io.KeysDown[key] = false;
io.KeyCtrl = (mods & GLFW_MOD_CONTROL) != 0;
io.KeyShift = (mods & GLFW_MOD_SHIFT) != 0;
io.KeyAlt = (mods & GLFW_MOD_ALT) != 0;
}
});
glfwSetWindowSizeLimits(this->windowHandle, 720, 480, GLFW_DONT_CARE, GLFW_DONT_CARE);
if (gladLoadGL() == 0)
throw std::runtime_error("Failed to initialize OpenGL loader!");
}
void Window::initImGui() {
IMGUI_CHECKVERSION();
GImGui = ImGui::CreateContext();
ImGuiIO& io = ImGui::GetIO();
ImGuiStyle& style = ImGui::GetStyle();
io.ConfigFlags |= ImGuiConfigFlags_DockingEnable | ImGuiConfigFlags_NavEnableKeyboard;
#if !defined(OS_LINUX)
io.ConfigFlags |= ImGuiConfigFlags_ViewportsEnable;
#endif
io.ConfigViewportsNoTaskBarIcon = true;
io.KeyMap[ImGuiKey_Tab] = GLFW_KEY_TAB;
io.KeyMap[ImGuiKey_LeftArrow] = GLFW_KEY_LEFT;
io.KeyMap[ImGuiKey_RightArrow] = GLFW_KEY_RIGHT;
io.KeyMap[ImGuiKey_UpArrow] = GLFW_KEY_UP;
io.KeyMap[ImGuiKey_DownArrow] = GLFW_KEY_DOWN;
io.KeyMap[ImGuiKey_PageUp] = GLFW_KEY_PAGE_UP;
io.KeyMap[ImGuiKey_PageDown] = GLFW_KEY_PAGE_DOWN;
io.KeyMap[ImGuiKey_Home] = GLFW_KEY_HOME;
io.KeyMap[ImGuiKey_End] = GLFW_KEY_END;
io.KeyMap[ImGuiKey_Insert] = GLFW_KEY_INSERT;
io.KeyMap[ImGuiKey_Delete] = GLFW_KEY_DELETE;
io.KeyMap[ImGuiKey_Backspace] = GLFW_KEY_BACKSPACE;
io.KeyMap[ImGuiKey_Space] = GLFW_KEY_SPACE;
io.KeyMap[ImGuiKey_Enter] = GLFW_KEY_ENTER;
io.KeyMap[ImGuiKey_Escape] = GLFW_KEY_ESCAPE;
io.KeyMap[ImGuiKey_KeyPadEnter] = GLFW_KEY_KP_ENTER;
io.KeyMap[ImGuiKey_A] = GLFW_KEY_A;
io.KeyMap[ImGuiKey_C] = GLFW_KEY_C;
io.KeyMap[ImGuiKey_V] = GLFW_KEY_V;
io.KeyMap[ImGuiKey_X] = GLFW_KEY_X;
io.KeyMap[ImGuiKey_Y] = GLFW_KEY_Y;
io.KeyMap[ImGuiKey_Z] = GLFW_KEY_Z;
io.Fonts->Clear();
ImFontConfig cfg;
cfg.OversampleH = cfg.OversampleV = 1, cfg.PixelSnapH = true;
cfg.SizePixels = 13.0f;
io.Fonts->AddFontDefault(&cfg);
cfg.MergeMode = true;
ImWchar fontAwesomeRange[] = {
ICON_MIN_FA, ICON_MAX_FA,
0
};
std::uint8_t *px;
int w, h;
io.Fonts->AddFontFromMemoryCompressedTTF(font_awesome_compressed_data, font_awesome_compressed_size, 11.0f, &cfg, fontAwesomeRange);
io.Fonts->GetTexDataAsRGBA32(&px, &w, &h);
// Create new font atlas
GLuint tex;
glGenTextures(1, &tex);
glBindTexture(GL_TEXTURE_2D, tex);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA8, w, h, 0, GL_RGBA8, GL_UNSIGNED_INT, px);
io.Fonts->SetTexID(reinterpret_cast<ImTextureID>(tex));
style.WindowMenuButtonPosition = ImGuiDir_None;
style.IndentSpacing = 10.0F;
ImGui_ImplGlfw_InitForOpenGL(this->windowHandle, true);
ImGui_ImplOpenGL3_Init("#version 150");
}
void Window::deinitGLFW() {
glfwDestroyWindow(this->windowHandle);
glfwTerminate();
}
void Window::deinitImGui() {
ImGui_ImplOpenGL3_Shutdown();
ImGui_ImplGlfw_Shutdown();
ImGui::DestroyContext();
}
}
<|start_filename|>emulator/include/ui/views/view_control.hpp<|end_filename|>
#pragma once
#include <ui/views/view.hpp>
#include <chrono>
#include <memory>
#include <thread>
#include <future>
#include <board/board_test.hpp>
namespace vc::ui {
class ViewControl : public View {
public:
explicit ViewControl(pcb::Board &board) : View("Control"), board(board) {
}
~ViewControl() override {
this->board.powerDown();
this->boardThread.join();
}
void drawContent() override {
ImGui::Disabled([this] {
if (ImGui::Button("Power up PCB")) {
if (!this->boardRunning) {
this->boardRunning = true;
this->boardThread = std::thread([this] {
this->board.powerUp();
this->boardRunning = false;
});
}
}
}, this->boardRunning);
ImGui::Disabled([this] {
if (ImGui::Button("Unplug PCB")) {
this->board.powerDown();
}
}, !this->boardRunning);
if (this->boardRunning) {
ImGui::TextSpinner("PCB running...");
} else if (this->boardThread.joinable()) {
this->boardThread.join();
}
}
private:
pcb::Board &board;
std::thread boardThread;
bool boardRunning = false;
};
}
<|start_filename|>emulator/include/devices/cpu/core/registers.hpp<|end_filename|>
#pragma once
namespace vc::dev::cpu {
struct Registers {
struct Register {
constexpr virtual Register& operator=(u64) = 0;
constexpr virtual operator u64() = 0;
};
struct ZeroRegister : public Register {
constexpr ZeroRegister& operator=(u64) override { return *this; }
constexpr operator u64() override { return 0; }
};
struct GPRegister : public Register {
constexpr GPRegister& operator=(u64 v) override { this->value = v; return *this; }
constexpr operator u64() override { return this->value; }
private:
u64 value = 0;
};
struct {
constexpr Register& operator[](u8 index) {
if (index == 0) {
return zeroRegister;
} else if (index <= 31) {
return gpRegisters[index];
} else {
__builtin_unreachable();
}
}
private:
ZeroRegister zeroRegister;
GPRegister gpRegisters[32];
} x;
GPRegister &zero = static_cast<GPRegister&>(x[0]);
GPRegister &ra = static_cast<GPRegister&>(x[1]);
GPRegister &sp = static_cast<GPRegister&>(x[2]);
GPRegister &gp = static_cast<GPRegister&>(x[3]);
GPRegister &tp = static_cast<GPRegister&>(x[4]);
GPRegister &t0 = static_cast<GPRegister&>(x[5]);
GPRegister &t1 = static_cast<GPRegister&>(x[6]);
GPRegister &t2 = static_cast<GPRegister&>(x[7]);
GPRegister &fp = static_cast<GPRegister&>(x[8]);
GPRegister &s0 = static_cast<GPRegister&>(x[8]);
GPRegister &s1 = static_cast<GPRegister&>(x[9]);
GPRegister &a0 = static_cast<GPRegister&>(x[10]);
GPRegister &a1 = static_cast<GPRegister&>(x[11]);
GPRegister &a2 = static_cast<GPRegister&>(x[12]);
GPRegister &a3 = static_cast<GPRegister&>(x[13]);
GPRegister &a4 = static_cast<GPRegister&>(x[14]);
GPRegister &a5 = static_cast<GPRegister&>(x[15]);
GPRegister &a6 = static_cast<GPRegister&>(x[16]);
GPRegister &a7 = static_cast<GPRegister&>(x[17]);
GPRegister &s2 = static_cast<GPRegister&>(x[18]);
GPRegister &s3 = static_cast<GPRegister&>(x[19]);
GPRegister &s4 = static_cast<GPRegister&>(x[20]);
GPRegister &s5 = static_cast<GPRegister&>(x[21]);
GPRegister &s6 = static_cast<GPRegister&>(x[22]);
GPRegister &s7 = static_cast<GPRegister&>(x[23]);
GPRegister &s8 = static_cast<GPRegister&>(x[24]);
GPRegister &s9 = static_cast<GPRegister&>(x[25]);
GPRegister &s10 = static_cast<GPRegister&>(x[26]);
GPRegister &s11 = static_cast<GPRegister&>(x[27]);
GPRegister &t3 = static_cast<GPRegister&>(x[28]);
GPRegister &t4 = static_cast<GPRegister&>(x[29]);
GPRegister &t5 = static_cast<GPRegister&>(x[30]);
GPRegister &t6 = static_cast<GPRegister&>(x[31]);
u64 pc = 0;
};
}
<|start_filename|>emulator/include/devices/cpu/core/address_space.hpp<|end_filename|>
#pragma once
#include <risc.hpp>
#include <set>
#include <devices/cpu/core/mmio/device.hpp>
#include <utils.hpp>
#include <elf.hpp>
namespace vc::dev::cpu {
struct AccessFaultException : public std::exception { };
struct UnalignedAccessException : public std::exception { };
struct byte_tag {};
struct hword_tag {};
struct word_tag {};
struct dword_tag {};
class AddressSpace {
public:
void addDevice(mmio::MMIODevice &device) {
for (const auto mappedDevice : this->devices) {
if (device.getBase() >= mappedDevice->getBase() && device.getEnd() <= mappedDevice->getEnd() || mappedDevice->getBase() >= device.getBase() && mappedDevice->getEnd() <= device.getEnd())
log::fatal("Tried to map device to occupied address range");
}
this->devices.insert(&device);
}
auto& operator()(u64 address, byte_tag) {
auto device = findDevice(address, 1);
if (device == nullptr) {
log::error("Invalid memory access at {:#x}", address);
throw AccessFaultException();
}
return device->byte(address - device->getBase());
}
auto& operator()(u64 address, hword_tag) {
auto device = findDevice(address, 2);
if (device == nullptr) {
log::error("Invalid memory access at {:#x}", address);
throw AccessFaultException();
}
return device->halfWord(address - device->getBase());
}
auto& operator()(u64 address, word_tag) {
auto device = findDevice(address, 4);
if (device == nullptr) {
log::error("Invalid memory access at {:#x}", address);
throw AccessFaultException();
}
return device->word(address - device->getBase());
}
auto& operator()(u64 address, dword_tag) {
auto device = findDevice(address, 8);
if (device == nullptr) {
log::error("Invalid memory access at {:#x}", address);
throw AccessFaultException();
}
return device->doubleWord(address - device->getBase());
}
void tickDevices() {
for (auto &device : this->devices)
device->doTick();
}
bool loadELF(std::string_view path) {
std::vector<u8> buffer;
{
FILE *file = fopen(path.data(), "rb");
if (file == nullptr) return false;
ON_SCOPE_EXIT { fclose(file); };
fseek(file, 0, SEEK_END);
size_t size = ftell(file);
rewind(file);
buffer.resize(size, 0x00);
fread(buffer.data(), buffer.size(), 1, file);
}
{
elf64_hdr elfHeader = { 0 };
std::memcpy(&elfHeader, buffer.data() + 0, sizeof(elf64_hdr));
std::vector<elf64_phdr> programHeader(elfHeader.e_phnum - 1, { 0 });
std::memcpy(programHeader.data(), buffer.data() + elfHeader.e_phoff, elfHeader.e_phentsize * programHeader.size());
for (const auto &pheader : programHeader) {
for (u32 offset = 0; offset < pheader.p_filesz; offset++)
this->operator()(pheader.p_paddr + offset, byte_tag{}) = buffer[pheader.p_offset + offset];
log::info("Mapped section to {:#x}:{:#x}", pheader.p_paddr, pheader.p_paddr + pheader.p_memsz);
}
}
return true;
}
[[nodiscard]]
auto& getDevices() {
return this->devices;
}
private:
[[nodiscard]]
mmio::MMIODevice* findDevice(u64 address, u8 accessSize) const {
auto device = std::find_if(devices.begin(), devices.end(), [&](mmio::MMIODevice *curr){
return address >= curr->getBase() && address <= curr->getEnd() - accessSize;
});
if (device == devices.end()) return nullptr;
return *device;
}
std::set<mmio::MMIODevice*> devices;
};
}
<|start_filename|>emulator/include/devices/led.hpp<|end_filename|>
#pragma once
#include <board/track.hpp>
namespace vc::dev {
class LED : public Device, public pcb::Connectable {
public:
explicit LED(ImVec2 pos) {
this->setPosition(pos);
this->setSize({ 20, 10 });
}
auto get(std::string_view name) {
return this->getTrack(name);
}
void tick() override {
for (auto &trackName : this->getConnectedTrackNames()) {
auto track = this->getTrack(trackName);
if (track->hasValue())
glowing = track->getValue().value();
}
}
bool needsUpdate() override { return true; }
void reset() override {
glowing = false;
}
void draw(ImVec2 start, ImDrawList *drawList) override {
drawList->AddRectFilled(start + getPosition(), start + getPosition() + getSize(), ImColor(0xA0, 0xA0, 0xA0, 0xFF));
drawList->AddRectFilled(start + getPosition() + ImVec2(5, 0), start + getPosition() + getSize() - ImVec2(5, 0), glowing ? ImColor(0xA0, 0x10, 0x10, 0xFF) : ImColor(0x30, 0x10, 0x10, 0xFF));
}
private:
bool glowing = false;
};
}
<|start_filename|>emulator/include/ui/views/view.hpp<|end_filename|>
#pragma once
#include <risc.hpp>
#include <imgui.h>
#include <imgui_vc_extensions.h>
#include <fontawesome_font.h>
#include <string>
#include <string_view>
namespace vc::ui {
class View {
public:
explicit View(std::string_view name) : viewName(name) { }
virtual ~View() = default;
virtual void drawContent() = 0;
virtual void draw() final {
if (ImGui::Begin(this->viewName.c_str())) {
this->drawContent();
ImGui::End();
}
}
private:
std::string viewName;
};
}
<|start_filename|>emulator/include/ui/views/view_pcb.hpp<|end_filename|>
#pragma once
#include <ui/views/view.hpp>
#include <memory>
#include <string>
#include <thread>
#include <board/board_test.hpp>
#define IMGUI_DEFINE_MATH_OPERATORS
#include <imgui_internal.h>
namespace vc::ui {
class ViewPCB : public View {
public:
explicit ViewPCB(pcb::Board &board) : View("PCB"), board(board) { }
void drawContent() override {
auto drawList = ImGui::GetWindowDrawList();
auto windowPos = ImGui::GetWindowPos();
auto windowSize = ImGui::GetWindowSize();
board.setPosition(windowPos + (windowSize - board.getDimensions()) / 2);
board.draw(drawList);
}
private:
pcb::Board &board;
std::string console = "Console: ";
};
}
<|start_filename|>emulator/include/devices/cpu/cpu.hpp<|end_filename|>
#pragma once
#include <devices/device.hpp>
#include <devices/cpu/core/core.hpp>
#include <devices/cpu/core/io_pin.hpp>
#include <array>
#include <utils.hpp>
namespace vc::dev {
class CPUDevice : public vc::dev::Device, public pcb::Connectable {
public:
explicit CPUDevice(u32 numCores, ImVec2 pos) {
for (u32 i = 0; i < numCores; i++)
this->cores.emplace_back(addressSpace);
this->setPosition(pos);
this->setSize({ 100, 100 });
for (auto &mmio : this->addressSpace.getDevices()) {
if (auto connectable = dynamic_cast<pcb::Connectable*>(mmio); connectable != nullptr) {
connectable->setPosition(this->getPosition());
connectable->setSize(this->getSize());
}
}
}
~CPUDevice() override = default;
void tick() override {
for (auto &core : this->cores) {
try {
core.execute();
} catch (cpu::AccessFaultException &e) {
core.halt("Access Fault exception thrown!");
} catch (cpu::UnalignedAccessException &e) {
core.halt("Unaligned Access exception thrown!");
} catch (...) {
core.halt("Unknown exception thrown!");
}
}
for (auto &[trackName, pinNumber] : this->pinToTrackConnections) {
auto &pin = this->pins[pinNumber];
auto track = this->getTrack(trackName);
if (track->getDirection() == pcb::Direction::MOSI && pin->hasValue()) {
track->setValue(pin->getValue().value());
}
if (track->getDirection() == pcb::Direction::MISO && track->hasValue()) {
pin->setValue(track->getValue().value());
}
}
}
bool needsUpdate() override {
for (auto &core : this->cores) {
if (!core.isHalted())
return true;
}
return false;
}
void reset() override {
for (auto &core : this->cores)
core.reset();
}
auto& getAddressSpace() {
return this->addressSpace;
}
void draw(ImVec2 start, ImDrawList *drawList) override {
drawList->AddRectFilled(start + getPosition(), start + getPosition() + getSize(), ImColor(0x10, 0x10, 0x10, 0xFF));
drawList->AddText(start + getPosition() + ImVec2(10, 10), ImColor(0xFFFFFFFF), fmt::format("RISC-V\n {} Core", this->cores.size()).c_str());
if (ImGui::IsMouseHoveringRect(start + getPosition(), start + getPosition() + getSize())) {
ImGui::BeginTooltip();
if (this->needsUpdate())
ImGui::TextSpinner("Running...");
else
ImGui::TextUnformatted("Halted");
ImGui::Separator();
for (auto &device : this->getAddressSpace().getDevices()) {
ImGui::TextUnformatted(fmt::format("{}: 0x{:016X} - 0x{:016X}", device->getName(), device->getBase(), device->getEnd()).c_str());
}
ImGui::EndTooltip();
}
}
void attachToPin(u32 pinNumber, cpu::IOPin &pin) {
this->pins.insert({ pinNumber, &pin });
}
void attachPinToTrack(u32 pinNumber, std::string_view trackName) {
this->pinToTrackConnections[std::string(trackName)] = pinNumber;
}
private:
cpu::AddressSpace addressSpace;
std::vector<cpu::Core> cores;
std::map<u32, cpu::IOPin*> pins;
std::map<std::string, u32> pinToTrackConnections;
};
}
<|start_filename|>emulator/include/devices/cpu/core/mmio/memory.hpp<|end_filename|>
#pragma once
#include <devices/cpu/core/mmio/device.hpp>
#include <numeric>
namespace vc::dev::cpu::mmio {
class Memory : public MMIODevice {
public:
Memory(u64 base, size_t size) : MMIODevice("Internal Memory", base, size) {
this->data.resize(size);
}
[[nodiscard]]
u8& byte(u64 offset) noexcept override {
return this->data[offset];
}
[[nodiscard]]
u16& halfWord(u64 offset) noexcept override {
return *reinterpret_cast<u16*>(&this->data[offset]);
}
[[nodiscard]]
u32& word(u64 offset) noexcept override {
return *reinterpret_cast<u32*>(&this->data[offset]);
}
[[nodiscard]]
u64& doubleWord(u64 offset) noexcept override {
return *reinterpret_cast<u64*>(&this->data[offset]);
}
private:
std::vector<u8> data;
};
}
<|start_filename|>emulator/source/devices/cpu/core/core.cpp<|end_filename|>
#include <devices/cpu/core/core.hpp>
#include <utils.hpp>
#define INSTRUCTION(category, type, ...) { .category = { .type = { __VA_ARGS__ } } }
#define INSTR_LOG(fmt, ...) log::debug("({:#x}) " fmt, regs.pc, __VA_ARGS__)
namespace vc::dev::cpu {
[[nodiscard]]
constexpr u8 getOpcode(const u8 &address) {
return address & 0b0111'1111;
}
void Core::execute() {
if (this->halted) return;
auto opcode = getOpcode(this->addressSpace(regs.pc, byte_tag()));
/* Check if instruction is compressed */
if ((opcode & 0b11) != 0b11) {
const auto &instr = reinterpret_cast<CompressedInstruction&>(this->addressSpace(regs.pc, hword_tag()));
this->nextPC = regs.pc + CompressedInstructionSize;
executeCompressedInstruction(instr);
} else {
const auto &instr = reinterpret_cast<Instruction&>(this->addressSpace(regs.pc, word_tag()));
this->nextPC = regs.pc + InstructionSize;
executeInstruction(instr);
}
addressSpace.tickDevices();
regs.pc = this->nextPC;
}
constexpr void Core::executeInstruction(const Instruction &instr) {
switch (instr.getOpcode()) {
case Opcode::OP_IMM:
executeOPIMMInstruction(instr);
break;
case Opcode::OP_IMM32:
executeOPIMM32Instruction(instr);
break;
case Opcode::STORE:
executeSTOREInstruction(instr);
break;
case Opcode::LOAD:
executeLOADInstruction(instr);
break;
case Opcode::AUIPC:
{
auto &i = instr.Base.U;
INSTR_LOG("AUIPC x{}, #{:#x}", i.rd, i.getImmediate());
regs.x[i.rd] = regs.pc + i.getImmediate();
break;
}
case Opcode::JAL:
{
auto &i = instr.Immediate.J;
INSTR_LOG("JAL #{:#x}", regs.pc + (util::signExtend<20, i64>(i.getImmediate()) * 2));
auto link = this->nextPC;
this->nextPC = regs.pc + util::signExtend<20, i64>(i.getImmediate()) * 2;
regs.x[i.rd] = link;
break;
}
case Opcode::JALR:
{
auto &i = instr.Base.I;
INSTR_LOG("JALR x{}, x{}, #{:#x}", i.rd, i.rs1, util::signExtend<12, i64>(i.getImmediate()));
auto link = this->nextPC;
this->nextPC = (util::signExtend<12, i64>(i.getImmediate()) + regs.x[i.rs1]) & u64(~0b1);
regs.x[i.rd] = link;
break;
}
case Opcode::BRANCH:
executeBRANCHInstruction(instr);
break;
case Opcode::LUI:
{
auto &i = instr.Base.U;
INSTR_LOG("LUI x{}, #{:#x}", i.rd, util::signExtend<20, i64>(i.getImmediate()));
regs.x[i.rd] = util::signExtend<12, u64>(i.getImmediate());
break;
}
default: this->halt("Invalid instruction {:x}", instr.getOpcode());
}
}
constexpr void Core::executeOPInstruction(const Instruction &instr) {
const auto &i = instr.Base.R;
#define IS_FUNC(instruction, type) (instruction.funct3 == instr_t(OPFunc3::type) && instruction.funct7 == instr_t(OPFunc7::type))
if (IS_FUNC(i, ADD)) {
regs.x[i.rd] = regs.x[i.rs1] + regs.x[i.rs2];
} else {
this->halt("Invalid OP function {:x} {:x}", i.funct3, i.funct7);
}
#undef IS_FUNC
}
constexpr void Core::executeOPIMMInstruction(const Instruction &instr) {
const auto &i = instr.Base.I;
switch (static_cast<OPIMMFunc>(instr.getFunction3())) {
case OPIMMFunc::XORI:
{
INSTR_LOG("XORI x{}, x{}, #{:#x}", i.rd, i.rs1, util::signExtend<12, i64>(i.getImmediate()));
regs.x[i.rd] = regs.x[i.rs1] ^ util::signExtend<12, i64>(i.getImmediate());
break;
}
case OPIMMFunc::ORI:
{
INSTR_LOG("ORI x{}, x{}, #{:#x}", i.rd, i.rs1, util::signExtend<12, i64>(i.getImmediate()));
regs.x[i.rd] = regs.x[i.rs1] | util::signExtend<12, i64>(i.getImmediate());
break;
}
case OPIMMFunc::ADDI:
{
INSTR_LOG("ADDI x{}, x{}, #{:#x}", i.rd, i.rs1, util::signExtend<12, i64>(i.getImmediate()));
regs.x[i.rd] = regs.x[i.rs1] + util::signExtend<12, i64>(i.getImmediate());
break;
}
case OPIMMFunc::ANDI:
{
INSTR_LOG("ANDI x{}, x{}, #{:#x}", i.rd, i.rs1, util::signExtend<12, i64>(i.getImmediate()));
regs.x[i.rd] = regs.x[i.rs1] & util::signExtend<12, i64>(i.getImmediate());
break;
}
default: this->halt("Invalid OPIMM function {:x}", instr.getFunction3());
}
}
constexpr void Core::executeOPIMM32Instruction(const Instruction &instr) {
const auto &i = instr.Base.I;
switch (static_cast<OPIMM32Func>(instr.getFunction3())) {
case OPIMM32Func::ADDIW:
{
INSTR_LOG("ADDIW x{}, x{}, #{:#x}", i.rd, i.rs1, util::signExtend<12, i32>(i.getImmediate()));
regs.x[i.rd] = util::signExtend<32, i64>((util::signExtend<12, i32>(i.getImmediate()) + regs.x[i.rs1]) & 0xFFFF'FFFF);
break;
}
default: this->halt("Invalid OPIMM32 function {:x}", instr.getFunction3());
}
}
constexpr void Core::executeBRANCHInstruction(const Instruction &instr) {
const auto &i = instr.Immediate.B;
switch (static_cast<BRANCHFunc>(instr.getFunction3())) {
case BRANCHFunc::BEQ:
{
INSTR_LOG("BEQ x{}, x{}, #{:#x}", i.rs1, i.rs2, regs.pc + (util::signExtend<20, i64>(i.getImmediate()) * 2));
if (regs.x[i.rs1] == regs.x[i.rs2]) {
this->nextPC = regs.pc + util::signExtend<20, i64>(i.getImmediate()) * 2;
}
break;
}
case BRANCHFunc::BNE:
{
INSTR_LOG("BNE x{}, x{}, #{:#x}", i.rs1, i.rs2, regs.pc + (util::signExtend<20, i64>(i.getImmediate()) * 2));
if (regs.x[i.rs1] != regs.x[i.rs2]) {
this->nextPC = regs.pc + util::signExtend<20, i64>(i.getImmediate()) * 2;
}
break;
}
default: this->halt("Invalid BRANCH function {:x}", instr.getFunction3());
}
}
constexpr void Core::executeLOADInstruction(const Instruction &instr) {
const auto &i = instr.Base.I;
switch (static_cast<LOADFunc>(instr.getFunction3())) {
case LOADFunc::LB:
{
INSTR_LOG("LB x{}, #{:#x}(x{})", i.rd, util::signExtend<12, i32>(i.getImmediate()), i.rs1);
regs.x[i.rd] = addressSpace(regs.x[i.rs1] + util::signExtend<12, i32>(i.getImmediate()), byte_tag{});
break;
}
case LOADFunc::LD:
{
INSTR_LOG("LD x{}, #{:#x}(x{})", i.rd, util::signExtend<12, i32>(i.getImmediate()), i.rs1);
regs.x[i.rd] = addressSpace(regs.x[i.rs1] + util::signExtend<12, i32>(i.getImmediate()), dword_tag{});
break;
}
case LOADFunc::LBU:
{
INSTR_LOG("LBU x{}, #{:#x}(x{})", i.rd, i.getImmediate(), i.rs1);
regs.x[i.rd] = addressSpace(regs.x[i.rs1] + i.getImmediate(), byte_tag{});
break;
}
default: this->halt("Invalid LOAD function {:x}", instr.getFunction3());
}
}
constexpr void Core::executeSTOREInstruction(const Instruction &instr) {
const auto &i = instr.Base.S;
switch (static_cast<STOREFunc>(instr.getFunction3())) {
case STOREFunc::SB:
{
INSTR_LOG("SB x{}, #{:#x}(x{})", i.rs2, util::signExtend<12, i32>(i.getImmediate()), i.rs1);
addressSpace(util::signExtend<12, i64>(i.getImmediate()) + regs.x[i.rs1], byte_tag{}) = regs.x[i.rs2];
break;
}
case STOREFunc::SH:
{
INSTR_LOG("SH x{}, #{:#x}(x{})", i.rs2, util::signExtend<12, i32>(i.getImmediate()), i.rs1);
addressSpace(util::signExtend<12, i64>(i.getImmediate()) + regs.x[i.rs1], hword_tag{}) = regs.x[i.rs2];
break;
}
case STOREFunc::SW:
{
INSTR_LOG("SW x{}, #{:#x}(x{})", i.rs2, util::signExtend<12, i32>(i.getImmediate()), i.rs1);
addressSpace(util::signExtend<12, i64>(i.getImmediate()) + regs.x[i.rs1], word_tag{}) = regs.x[i.rs2];
break;
}
case STOREFunc::SD:
{
INSTR_LOG("SD x{}, #{:#x}(x{})", i.rs2, util::signExtend<12, i32>(i.getImmediate()), i.rs1);
addressSpace(util::signExtend<12, i64>(i.getImmediate()) + regs.x[i.rs1], dword_tag{}) = regs.x[i.rs2];
break;
}
default: this->halt("Invalid STORE function {:x}", instr.getFunction3());
}
}
/* Compressed instructions */
constexpr void Core::executeCompressedInstruction(const CompressedInstruction &instr) {
switch (instr.getOpcode()) {
case CompressedOpcode::C0:
executeC0Instruction(instr);
break;
case CompressedOpcode::C1:
executeC1Instruction(instr);
break;
case CompressedOpcode::C2:
executeC2Instruction(instr);
break;
default: this->halt("Unknown compressed opcode {:x}!", instr.getOpcode());
}
}
constexpr void Core::executeC0Instruction(const CompressedInstruction &instr) {
Instruction expanded = { 0 };
switch (static_cast<C0Funct>(instr.getFunction3())) {
case C0Funct::C_ADDI4SPN:
{
auto &i = instr.CIW;
if (i.imm == 0)
this->halt("Illegal instruction at {:#x}!", regs.pc);
expanded = INSTRUCTION(Base, I, .opcode = instr_t(Opcode::OP_IMM), .rd = instr_t(i.rd + 8), .funct3 = instr_t(OPIMMFunc::ADDI), .rs1 = 2);
expanded.Base.I.setImmediate(i.imm / 4);
break;
}
default: this->halt("Invalid C0 function {:x}", instr.getFunction3());
}
executeInstruction(expanded);
}
constexpr void Core::executeC1Instruction(const CompressedInstruction &instr) {
Instruction expanded = { 0 };
switch (static_cast<C1Funct>(instr.getFunction3())) {
case C1Funct::C_ADDI:
{
auto &i = instr.CI;
expanded = INSTRUCTION(Base, I, .opcode = instr_t(Opcode::OP_IMM), .rd = i.rd, .funct3 = instr_t(OPIMMFunc::ADDI), .rs1 = i.rd);
expanded.Base.I.setImmediate(util::signExtend<6, i32>((i.imm3 << 5) | (i.imm2 << 3) | (i.imm1)));
break;
}
case C1Funct::C_ADDIW:
{
auto &i = instr.CI;
expanded = INSTRUCTION(Base, I, .opcode = instr_t(Opcode::OP_IMM32), .rd = i.rd, .funct3 = instr_t(OPIMM32Func::ADDIW), .rs1 = i.rd);
expanded.Base.I.setImmediate(util::signExtend<6, i32>((i.imm3 << 5) | (i.imm2 << 3) | (i.imm1)));
break;
}
case C1Funct::C_LI:
{
auto &i = instr.CI;
expanded = INSTRUCTION(Base, I, .opcode = instr_t(Opcode::OP_IMM), .rd = i.rd, .funct3 = instr_t(OPIMMFunc::ADDI), .rs1 = 0);
expanded.Base.I.setImmediate(util::signExtend<6, u32>((i.imm3 << 5) | (i.imm2 << 3) | (i.imm1)));
break;
}
case C1Funct::C_LUI:
{
auto &i = instr.CI;
expanded = INSTRUCTION(Base, I, .opcode = instr_t(Opcode::OP_IMM), .rd = i.rd, .funct3 = instr_t(OPIMMFunc::ADDI), .rs1 = i.rd);
expanded.Base.I.setImmediate(util::signExtend<9, i32>((i.imm3 << 9) | ((i.imm1 >> 1) << 7) | ((i.imm2 & 0b01) << 6) | ((i.imm1 & 0b001) << 5) | (((i.imm1 & 0b010) >> 1) << 4)));
break;
}
case C1Funct::C_ANDI:
{
auto &i = instr.CI;
expanded = INSTRUCTION(Base, I, .opcode = instr_t(Opcode::OP_IMM), .rd = i.rd, .funct3 = instr_t(OPIMMFunc::ANDI), .rs1 = i.rd);
expanded.Base.I.setImmediate(util::signExtend<6, i32>((i.imm3 << 5) | (i.imm2 << 3) | (i.imm1)));
break;
}
default: this->halt("Invalid C1 function {:x}", instr.getFunction3());
}
executeInstruction(expanded);
}
constexpr void Core::executeC2Instruction(const CompressedInstruction &instr) {
Instruction expanded = { 0 };
switch (static_cast<C2Funct>(instr.getFunction3())) {
case C2Funct::C_JUMP:
{
auto &i = instr.CR;
if (i.rd != 0 && i.funct4 == 0b1000 && i.rs2 != 0) /* C.MV */ {
expanded = INSTRUCTION(Base, R, .opcode = instr_t(Opcode::LOAD), .rd = i.rd, .funct3 = instr_t(OPFunc3::ADD), .rs1 = 0, .rs2 = i.rs2, .funct7 = instr_t(OPFunc7::ADD));
} else if (i.rd != 0 && i.funct4 == 0b1000 && i.rs2 == 0) /* C.JR */ {
INSTR_LOG("C.JR x{}, x{}, #{:#x}", 0, i.rd, 0);
this->nextPC = regs.x[i.rd];
return;
} else {
this->halt("Invalid C2 C_JUMP function {:x}", instr.getFunction4());
}
break;
}
case C2Funct::C_LDSP:
{
auto &i = instr.CI;
expanded = INSTRUCTION(Base, I, .opcode = instr_t(Opcode::LOAD), .rd = i.rd, .funct3 = instr_t(LOADFunc::LD), .rs1 = 2);
expanded.Base.I.setImmediate((i.imm1 << 6) | (i.imm3 << 5) | (i.imm2 << 3));
break;
}
case C2Funct::C_SDSP:
{
auto &i = instr.CSS;
expanded = INSTRUCTION(Base, S, .opcode = instr_t(Opcode::STORE), .funct3 = instr_t(STOREFunc::SD), .rs1 = 2, .rs2 = i.rs2);
expanded.Base.S.setImmediate(i.imm);
break;
}
default: this->halt("Invalid C2 function {:x}", instr.getFunction3());
}
executeInstruction(expanded);
}
} | WerWolv/PCBEmulator |
<|start_filename|>src/utils/cssInjector.js<|end_filename|>
const sass = require('sass');
const fs = require('fs');
const path = require('path');
const BASE = `base.scss`;
const MAPPINGS = `mappings.scss`;
const HIDE_DIALER_SIDEBAR_CSS = `gv-call-sidebar { display: none }`;
module.exports = class Injector {
constructor(app, win) {
this.win = win;
this.app = app;
}
showHideDialerSidebar(hide) {
if (!this.win) return;
if (hide) {
this.win.webContents.insertCSS(HIDE_DIALER_SIDEBAR_CSS).then(key => {
this.sidebarStyleKey = key;
});
} else {
if (this.sidebarStyleKey) {
this.win.webContents.removeInsertedCSS(this.sidebarStyleKey);
}
}
}
injectTheme(theme) {
if (this.styleKey) {
this.win.webContents.removeInsertedCSS(this.styleKey);
this.styleKey = null;
}
if (theme !== 'default') {
try {
const file = fs.readFileSync(path.join(this.app.getAppPath(), 'src', 'themes', `${theme}.scss`), 'utf-8');
const data = joinImports(this.app, file);
const result = sass.renderSync({data});
const styles = result.css.toString().replace(/;/g, ' !important;');
if (this.win) {
console.log(styles);
this.win.webContents.insertCSS(styles).then(key => {
this.styleKey = key;
});
}
} catch (e) {
console.log(e);
console.error(`Could not find theme ${theme}`);
}
}
}
}
/**
* The way sass processes use functions just isn't good enough, we need variables that can scope across files and we also
* need to be able to split our selectors and placeholder selectors into different files for neatness. Anyway this is just a
* simple function to recombine multiple files and then let sass process that
*/
function joinImports(app, file) {
const base = fs.readFileSync(path.join(app.getAppPath(), 'src', 'themes', BASE), 'utf-8');
const mappings = fs.readFileSync(path.join(app.getAppPath(), 'src', 'themes', MAPPINGS), 'utf-8');
let contents = file.replace("@use 'base';", base);
contents = contents.replace("@use 'mappings';", mappings);
return contents;
} | jerrod-lankford/google-voice-desktop-app |
<|start_filename|>docs/Examples/Attachments/EzImport.js<|end_filename|>
const READWISE_API_OPTION = "Readwise API key";
const ARTICLE_FORMAT = "Article file name format";
const YOUTUBE_VIDEO_FORMAT = "YouTube video file name format";
const PODCAST_FORMAT = "Podcast episode file name format";
const TWEET_FORMAT = "Tweet file name format";
const BOOK_FORMAT = "Book file name format";
const IGNORE_EMPTY_PROPERTIES = "Ignore empty Properties";
const READWISE_API_URL = "https://readwise.io/api/v2/";
const LogAndThrowError = (error) => {
new Notice("error", 10000);
throw new Error(error);
};
const EzImportType = Object.freeze({
Article: "articles",
YouTube_video: "youtube-video",
Podcast_episode: "podcasts",
Tweet: "tweets",
Book: "books"
});
module.exports = {
entry: () => {
new Notice("Please use one of the specific entry points.", 10000);
},
settings: {
name: "EzImport",
author: "<NAME>",
options: {
[READWISE_API_OPTION]: {
type: "input",
placeholder: "Readwise API key",
},
[IGNORE_EMPTY_PROPERTIES]: {
type: "toggle",
defaultValue: true,
},
[ARTICLE_FORMAT]: {
type: "format",
defaultValue: "",
placeholder: "Article file name format",
},
[YOUTUBE_VIDEO_FORMAT]: {
type: "format",
defaultValue: "",
placeholder: "YouTube video file name format",
},
[PODCAST_FORMAT]: {
type: "format",
defaultValue: "",
placeholder: "Podcast episode file name format",
},
[TWEET_FORMAT]: {
type: "format",
defaultValue: "",
placeholder: "Tweet file name format",
},
[BOOK_FORMAT]: {
type: "format",
defaultValue: "",
placeholder: "Book file name format",
},
}
},
article: (params, settings) => start(params, settings, EzImportType.Article),
podcastEpisode: (params, settings) => start(params, settings, EzImportType.Podcast_episode),
tweet: (params, settings) => start(params, settings, EzImportType.Tweet),
book: (params, settings) => start(params, settings, EzImportType.Book),
};
let QuickAdd;
let Settings;
async function start(params, settings, type) {
QuickAdd = params;
Settings = settings;
if (settings[READWISE_API_OPTION] == null) {
LogAndThrowError("Please provide a valid Readwise API key.");
}
return await getReadwiseHighlights(type);
}
async function getReadwiseHighlights(type) {
const resolve = await getHighlightsByCategory(type);
if (!resolve) {
LogAndThrowError("No highlights found.");
}
const {results} = resolve;
const item = await QuickAdd.quickAddApi.suggester(results.map(item => item.title), results);
if (!item) {
LogAndThrowError("No item selected.");
}
const safeTitle = replaceIllegalFileNameCharactersInString(item.title);
const fileName = await getFileName(type, safeTitle);
QuickAdd.variables = {
...QuickAdd.variables,
safeTitle,
fileName,
title: item.title,
author: `[[${item.author}]]`,
source: item.source_url,
tags: item.tags.map(tag => tag.name).join(", "),
cover: item.cover_image_url,
lastHighlightAt: item.last_highlight_at,
updated: item.updated,
numHighlights: item.num_highlights,
};
if (Settings[IGNORE_EMPTY_PROPERTIES]) {
Object.keys(QuickAdd.variables).forEach(key => {
if (QuickAdd.variables[key] === "") {
QuickAdd.variables[key] = " ";
}
});
}
const file = await QuickAdd.app.vault.getAbstractFileByPath(`${fileName.replace('.md', '')}.md`);
if (file) {
await handleAddToExistingFile(file, item);
} else {
await handleCreateSourceFile(item);
}
return fileName;
}
async function handleAddToExistingFile(file, item) {
const metaEdit = QuickAdd.app.plugins.plugins["metaedit"];
if (!metaEdit) {
LogAndThrowError("MetaEdit not found. Please install it to add highlights to existing files.");
}
const { update } = metaEdit.api;
const lastHighlightAt = QuickAdd.app.metadataCache.getFileCache(file).frontmatter["lastHighlightAt"];
if (!lastHighlightAt) {
LogAndThrowError("File does not have a lastHighlightAt property.");
}
const resolve = await getHighlightsAfterDateForItem(item, lastHighlightAt);
const highlights = resolve.results.reverse();
if (highlights.length > 0) {
QuickAdd.variables.highlights = `\n${formatHighlights(highlights)}`;
await update("lastHighlightAt", item.last_highlight_at, file);
new Notice(`Added ${highlights.length} highlights to '${file.basename}'.`, 10000);
} else {
// Throw so we don't continue the capture flow.
LogAndThrowError(`No highlights found after ${new Date(lastHighlightAt).toISOString()}`);
}
}
async function handleCreateSourceFile(item) {
const resolve = await getHighlightsForItem(item);
if (!resolve) {
LogAndThrowError("No highlights found.");
}
const highlights = resolve.results.reverse();
QuickAdd.variables.highlights = formatHighlights(highlights);
}
async function getFileName(type, safeTitle) {
let fileNameFormat;
switch (type) {
case EzImportType.Article:
fileNameFormat = Settings[ARTICLE_FORMAT];
break;
case EzImportType.YouTube_video:
fileNameFormat = Settings[YOUTUBE_VIDEO_FORMAT];
break;
case EzImportType.Podcast_episode:
fileNameFormat = Settings[PODCAST_FORMAT];
break;
case EzImportType.Tweet:
fileNameFormat = Settings[TWEET_FORMAT];
break;
case EzImportType.Book:
fileNameFormat = Settings[BOOK_FORMAT];
break;
}
fileNameFormat = fileNameFormat.replace(/{{VALUE:safeTitle}}/g, safeTitle);
return await QuickAdd.quickAddApi.format(fileNameFormat);
}
function formatHighlights(highlights) {
return highlights.map(hl => {
if (hl.text == "No title") return;
const {quote, note} = textFormatter(hl.text, hl.note);
return `${quote}${note}`;
}).join("\n\n");
}
function textFormatter(sourceText, sourceNote) {
let quote = sourceText.split("\n").filter(line => line != "").map(line => {
if (sourceNote.includes(".h1"))
return `## ${line}`;
else
return `> ${line}`;
}).join("\n");
let note;
if (sourceNote.includes(".h1") || sourceNote == "" || !sourceNote) {
note = "";
} else {
note = "\n\n" + sourceNote;
}
return {quote, note};
}
async function getHighlightsByCategory(category) {
return apiGet(`${READWISE_API_URL}books`, {category, "page_size": 1000});
}
async function getHighlightsForItem(element) {
return apiGet(`${READWISE_API_URL}highlights`, {book_id: element.id, page_size: 1000});
}
async function getHighlightsAfterDateForItem(element, date) {
return apiGet(`${READWISE_API_URL}highlights`, {book_id: element.id, page_size: 1000, highlighted_at__gt: date});
}
async function apiGet(url, data) {
let finalURL = new URL(url);
if (data)
Object.keys(data).forEach(key => finalURL.searchParams.append(key, data[key]));
return await fetch(finalURL, {
method: 'GET', cache: 'no-cache',
headers: {
'Content-Type': 'application/json',
Authorization: `Token ${Settings[READWISE_API_OPTION]}`
},
}).then(async (res) => await res.json());
}
function replaceIllegalFileNameCharactersInString(string) {
return string
.replace(/[\\,#%&\{\}\/*<>$\'\":@]*/g, '') // Replace illegal file name characters with empty string
.replace(/\n/, ' ') // replace newlines with spaces
.replace(' ', ' '); // replace multiple spaces with single space to make sure we don't have double spaces in the file name
}
<|start_filename|>docs/Examples/Attachments/TodoistScript.js<|end_filename|>
module.exports = {SelectFromAllTasks, GetAllTasksFromProject, GetAllTasksFromSection};
const getTodoistPluginApi = (app) => app.plugins.plugins["todoist-sync-plugin"].api;
/* API */
async function SelectFromAllTasks(params) {
const tasks = await getAllTasks(params);
if (tasks.length === 0) {
new Notice("No tasks.");
return;
}
const selectedTasks = await selectTasks(params, tasks);
await closeSelectedTasks(params.app, selectedTasks);
return formatTasksToTasksPluginTask(selectedTasks);
}
async function GetAllTasksFromProject(params) {
const [allTasks, projects] = await Promise.all([getAllTasks(params), getProjects(params.app)]);
const targetProject = await params.quickAddApi.suggester(project => {
project.tasks = allTasks.filter(task => task.projectID === project.id);
return `${project.name} (${project.tasks.length})`;
}, projects);
if (!targetProject) return;
if (targetProject.tasks.length === 0) {
new Notice(`No tasks in '${targetProject.name}'.`);
return;
} else {
new Notice(`Added ${targetProject.tasks.length} tasks from '${targetProject.name}'.`)
}
await closeSelectedTasks(params.app, targetProject.tasks);
return formatTasksToTasksPluginTask(targetProject.tasks);
}
async function GetAllTasksFromSection(params) {
const [projects, sections, allTasks] = await Promise.all([getProjects(params.app), getSections(params.app), getAllTasks(params)]);
const targetSection = await params.quickAddApi.suggester(section => {
const sectionProject = projects.find(project => project.id === section["project_id"]);
section.tasks = allTasks.filter(task => task.sectionID === section.id);
return `${sectionProject.name} > ${section.name} (${section.tasks.length})`;
}, sections);
if (targetSection.tasks.length === 0) {
new Notice(`No tasks in '${targetSection.name}'.`);
return;
} else {
new Notice(`Added ${targetSection.tasks.length} tasks from '${targetSection.name}'.`)
}
await closeSelectedTasks(targetSection.tasks);
return formatTasksToTasksPluginTask(targetSection.tasks);
}
/* Helpers */
async function getAllTasks(params) {
const api = getTodoistPluginApi(params.app);
const {ok: tasks} = await api.getTasks();
return tasks;
}
async function selectTasks(params, tasks) {
const selectedTaskNames = await params.quickAddApi.checkboxPrompt(tasks.map(task => task.content));
return tasks.filter(task => selectedTaskNames.some(t => t.contains(task.content)));
}
async function closeSelectedTasks(app, tasks) {
const api = getTodoistPluginApi(app);
tasks.forEach(async task => await api.closeTask(task.id));
}
function formatTasksToTasksPluginTask(tasks) {
return tasks.map(task =>
task.rawDatetime ?
task = `- [ ] ${task.content} 📅 ${task.rawDatetime.format("YYYY-MM-DD")}` :
task = `- [ ] ${task.content}`
).join("\n") + "\n";
}
async function getTasksGroupedByProject(app) {
const api = getTodoistPluginApi(app);
const {ok: projects} = await api.getTasksGroupedByProject();
return projects;
}
async function getProjects(app) {
const api = getTodoistPluginApi(app);
const {ok: projects} = await api.getProjects();
return projects;
}
async function getSections(app) {
const api = getTodoistPluginApi(app);
const {ok: sections} = await api.getSections();
return sections;
}
<|start_filename|>docs/Examples/Attachments/citationsManager.js<|end_filename|>
module.exports = {
entry: start,
settings: {
name: "Citations Manager",
author: "<NAME>",
options: {
"Ignore empty values": {
type: "toggle",
defaultValue: true,
},
},
}
}
const ignoreEmpty = "Ignore empty values";
async function start(params, settings) {
const citationsPlugin = params.app.plugins.plugins["obsidian-citation-plugin"];
if (citationsPlugin) {
await handleCitationsPlugin(params, citationsPlugin, settings);
} else {
new Notice("Citations plugin not found.", 5000);
throw new Error("Citations plugin not found.");
}
}
async function handleCitationsPlugin(params, citationsPlugin, settings) {
// Open suggester with library
const library = citationsPlugin.library.entries;
const selectedLibraryEntryKey = await params.quickAddApi.suggester(entry => {
const item = library[entry];
if (item.title) return item.title;
return entry;
}, Object.keys(library));
const entry = library[selectedLibraryEntryKey];
if (!entry && !selectedLibraryEntryKey) {
new Notice("No library entry selected.", 5000);
throw new Error("No library entry selected.");
} else if (!entry) {
new Notice("Invalid entry. Selected library entry: " + selectedLibraryEntryKey, 5000);
throw new Error("Invalid entry. Selected library entry: " + selectedLibraryEntryKey);
}
params.variables = {
...params.variables,
fileName: replaceIllegalFileNameCharactersInString(entry.title),
citekey: selectedLibraryEntryKey,
id: selectedLibraryEntryKey,
author: entry.authorString.split(', ').map(author => `[[${author}]]`).join(", "),
doi: entry.DOI,
// https://github.com/hans/obsidian-citation-plugin/blob/cb601fceda8c70c0404dd250c50cdf83d5d04979/src/types.ts#L46
abstract: entry.abstract,
authorString: entry.authorString,
containerTitle: entry.containerTitle,
DOI: entry.DOI,
eprint: entry.eprint,
eprinttype: entry.eprinttype,
eventPlace: entry.eventPlace,
note: entry.note,
page: entry.page,
publisher: entry.publisher,
publisherPlace: entry.publisherPlace,
title: entry.title,
URL: entry.URL,
year: entry.year?.toString(),
zoteroSelectURI: entry.zoteroSelectURI,
type: entry.type,
issuedDate: entry.issuedDate,
};
if (settings[ignoreEmpty]) {
Object.keys(params.variables).forEach(key => {
if (params.variables[key] === undefined) {
params.variables[key] = " ";
}
});
}
}
function replaceIllegalFileNameCharactersInString(string) {
return string.replace(/[\\,#%&\{\}\/*<>$\'\":@]*/g, '');
}
<|start_filename|>docs/Examples/Attachments/zettelizer.js<|end_filename|>
module.exports = async (params) => {
console.log("Starting...")
console.log(params);
const currentFile = params.app.workspace.getActiveFile();
if (!currentFile) {
new Notice("No active file.");
return;
}
console.log("Found active file: ", currentFile.basename);
const currentFileCache = params.app.metadataCache.getFileCache(currentFile);
const headingsInFile = currentFileCache.headings;
if (!headingsInFile) {
new Notice(`No headers in file ${currentFile.name}`);
return;
}
console.log("Found headings in active file: ", headingsInFile);
const folder = "40 Slipbox/44 Zettels";
if (!params.app.vault.adapter.exists(folder)) {
new Notice(`Could not find folder ${folder}`);
return;
}
console.log("Folder does exist: ", folder);
headingsInFile.forEach(async heading => {
console.log(`Checking ${heading.heading}. It is level ${heading.level}`);
if (heading.level === 3) {
const splitHeading = heading.heading.split(" ");
const location = splitHeading[0].trim();
const text = splitHeading.length > 1 ? [...splitHeading.slice(1)].join(' ').trim() : "";
const path = `${folder}/${text.replace(/[\\,#%&\{\}\/*<>$\'\":@]*/g, '')}.md`;
const content = `![[${currentFile.basename}#${location}${text ? " " + text : ""}]]`;
console.log(`Path: ${path}.\nContent: ${content}`);
if (text && !(await params.app.vault.adapter.exists(path)))
await params.app.vault.create(path, content);
else if (text)
new Notice(`File ${path} already exists.`, 5000);
}
});
console.log("Finished!");
}
<|start_filename|>docs/Examples/Attachments/getLongLatFromAddress.js<|end_filename|>
module.exports = async (params) => {
const {createYamlProperty} = params.app.plugins.plugins["metaedit"].api;
const address = await params.quickAddApi.inputPrompt("🏠 Address");
if (!address) {
new Notice("No address given", 5000);
return;
}
const result = await apiGet(address);
if (!result.length) {
new Notice("No results found", 5000);
return;
}
const {lat, lon} = result[0];
const activeFile = params.app.workspace.getActiveFile();
if (!activeFile) {
new Notice("No active file", 5000);
return;
}
await createYamlProperty("location", `[${lat}, ${lon}]`, activeFile);
}
async function apiGet(searchQuery) {
let finalURL = new URL(`https://nominatim.openstreetmap.org/search?q=${searchQuery}&format=json`);
return await fetch(finalURL, {
method: 'GET', cache: 'no-cache',
headers: {
'Content-Type': 'application/json',
},
}).then(async (res) => await res.json());
} | samuelnunoo/quickadd |