lang
stringclasses 2
values | license
stringclasses 13
values | stderr
stringlengths 0
343
| commit
stringlengths 40
40
| returncode
int64 0
128
| repos
stringlengths 6
87.7k
| new_contents
stringlengths 0
6.23M
| new_file
stringlengths 3
311
| old_contents
stringlengths 0
6.23M
| message
stringlengths 6
9.1k
| old_file
stringlengths 3
311
| subject
stringlengths 0
4k
| git_diff
stringlengths 0
6.31M
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
JavaScript | apache-2.0 | a085ead5f57088e16d9809ae4e6b972dc3cddbc9 | 0 | kenji0711/PICoresight2016-CustomSymbol-PutValue,kenji0711/PICoresight2016-CustomSymbol-PutValue | (function (CS) {
var definition = {
typeName: 'putvalue',
datasourceBehavior: CS.DatasourceBehaviors.Single,
getDefaultConfig: function() {
return {
DataShape: 'Value',
Height: 150,
Width: 150,
TextColor: 'rgb(255,255,255)',
ShowLabel: true,
ShowTime: true,
ShowPutValue: true
};
},
configOptions: function () {
return [{
title: 'Format Symbol',
mode: 'format'
}];
},
init: init
};
function init(scope) {
function onUpdate(data) {
if(data) {
scope.value = data.Value;
scope.time = data.Time;
if(data.Path){
scope.path = data.Path;
}
if(data.Label) {
scope.label = data.Label;
}
}
}
scope.putvalue = function() {
//piwebapiserver machine name - Normally PI Coresight machine contains PI Web API. So localhost should be fine.
var piwebapiaddress = "localhost";
//scope.path contains pi:\\servername\tagname or af:\\servername\databasename\element...|attribute
var ini = scope.path.substr(0,3);
var orgpath = scope.path.substr(3,10000);
//To double the backslash - \\\\servername\\tagname
var path = orgpath.replace(/\\|\\/g,"\\\\");
if(ini=="pi:"){
var urladdress = "\"https://" + piwebapiaddress + "/piwebapi/points?path="+path+"\"";
}
else if(ini=="af:"){
var urladdress = "\"https://" + piwebapiaddress + "/piwebapi/attributes?path="+path+"\"";
}
//Get text box value
var boxval = new String(scope.config.Box, { "type" : "text/plain" });
//Create value contents as json
var jsonval = '"{Value:' + boxval + '}"';
// Create contents of PI Web API batch request
var contents = '{"GetWebID":{"Method": "GET","Resource": '+ urladdress + '},"WriteValuetoPI":{"Method":"POST","Resource": "{0}","Content":' + jsonval + ',"Parameters": ["$.GetWebID.Content.Links.Value"],"ParentIds": ["GetWebID"]}}';
//PI Web API Request
var batchurl = "https://localhost/piwebapi/batch";
var xhr= new XMLHttpRequest();
//true = Async call
xhr.open("POST",batchurl,true);
//Set credential for Kerberos
xhr.withCredentials = true;
xhr.setRequestHeader('Content-Type','application/json');
//Send request
xhr.send(contents);
};
return { dataUpdate: onUpdate };
}
CS.symbolCatalog.register(definition);
})(window.Coresight); | sym-putvalue.js | (function (CS) {
var definition = {
typeName: 'putvalue',
datasourceBehavior: CS.DatasourceBehaviors.Single,
getDefaultConfig: function() {
return {
DataShape: 'Value',
Height: 150,
Width: 150,
TextColor: 'rgb(255,255,255)',
ShowLabel: true,
ShowTime: true,
ShowPutValue: true
};
},
configOptions: function () {
return [{
title: 'Format Symbol',
mode: 'format'
}];
},
init: init
};
function init(scope) {
function onUpdate(data) {
if(data) {
scope.value = data.Value;
scope.time = data.Time;
if(data.Path){
scope.path = data.Path;
}
if(data.Label) {
scope.label = data.Label;
}
}
}
scope.putvalue = function() {
//piwebapiserver machine name - Normally PI Coresight machine contains PI Web API. So localhost should be fine.
var piwebapiaddress = "localhost";
//scope.path contains pi:\\servername\tagname or af:\\servername\databasename\element...|attribute
var ini = scope.path.substr(0,3);
var orgpath = scope.path.substr(3,10000);
//To double the backslash - \\\\servername\\tagname
var path = orgpath.replace(/\\|\\/g,"\\\\");
if(ini=="pi:"){
var urladdress = "\"https://" + piwebapiaddress + "/piwebapi/points?path="+path+"\"";
}
else if(ini=="af:"){
var urladdress = "\"https://" + piwebapiaddress + "/piwebapi/attributes?path="+path+"\"";
}
//Get text box value
//var boxval = new String([ $("#box").val() ], { "type" : "text/plain" });
// $(‘[name=data]’)[i].value
var boxval = new String(scope.config.Box, { "type" : "text/plain" });
var jsonval = '"{Value:' + boxval + '}"';
// Create contents of PI Web API batch request
var contents = '{"GetWebID":{"Method": "GET","Resource": '+ urladdress + '},"WriteValuetoPI":{"Method":"POST","Resource": "{0}","Content":' + jsonval + ',"Parameters": ["$.GetWebID.Content.Links.Value"],"ParentIds": ["GetWebID"]}}';
//PI Web API Request
var batchurl = "https://localhost/piwebapi/batch";
var xhr= new XMLHttpRequest();
//true = Async call
xhr.open("POST",batchurl,true);
//Set credential for Kerberos
xhr.withCredentials = true;
xhr.setRequestHeader('Content-Type','application/json');
//Send request
xhr.send(contents);
};
return { dataUpdate: onUpdate };
}
CS.symbolCatalog.register(definition);
})(window.Coresight); | Comments edited
| sym-putvalue.js | Comments edited | <ide><path>ym-putvalue.js
<ide> },
<ide> init: init
<ide> };
<del>
<ide> function init(scope) {
<ide> function onUpdate(data) {
<ide> if(data) {
<ide> var urladdress = "\"https://" + piwebapiaddress + "/piwebapi/attributes?path="+path+"\"";
<ide> }
<ide> //Get text box value
<del> //var boxval = new String([ $("#box").val() ], { "type" : "text/plain" });
<del>// $(‘[name=data]’)[i].value
<del> var boxval = new String(scope.config.Box, { "type" : "text/plain" });
<del>
<del>
<del> var jsonval = '"{Value:' + boxval + '}"';
<del> // Create contents of PI Web API batch request
<del> var contents = '{"GetWebID":{"Method": "GET","Resource": '+ urladdress + '},"WriteValuetoPI":{"Method":"POST","Resource": "{0}","Content":' + jsonval + ',"Parameters": ["$.GetWebID.Content.Links.Value"],"ParentIds": ["GetWebID"]}}';
<add> var boxval = new String(scope.config.Box, { "type" : "text/plain" });
<add> //Create value contents as json
<add> var jsonval = '"{Value:' + boxval + '}"';
<add> // Create contents of PI Web API batch request
<add> var contents = '{"GetWebID":{"Method": "GET","Resource": '+ urladdress + '},"WriteValuetoPI":{"Method":"POST","Resource": "{0}","Content":' + jsonval + ',"Parameters": ["$.GetWebID.Content.Links.Value"],"ParentIds": ["GetWebID"]}}';
<ide>
<ide> //PI Web API Request
<ide> var batchurl = "https://localhost/piwebapi/batch"; |
|
JavaScript | mit | bf9eb6adb210ffe5ab5f81f58727ecc66e60e15d | 0 | nickolanack/js-simplekml | 'use strict';
/**
* BackgroundKmlReader is meant to be a drop in replacement for KmlReader class
*/
var BackgroundKmlReader = (function() {
var BackgroundKmlReader = function(kml) {
this._worker=new Worker(userFunctionWorker);
worker.postMessage(kml);
this._handlers={};
var me=this;
worker.onmessage=function(e){
if(!me._handers[e.data.method]){
throw 'Unexpected message: '+JSON.stringify(e.data);
}
if(e.data.result=="done"){
delete me._handers[e.data.method];
return;
}
if(me._filter(e.data.feature)){
me._handers[e.data.method](e.data.feature, e.data.total, e.data.index);
}
};
};
BackgroundKmlReader.prototype._execute = function(method, callback) {
this._handlers[method]=callback;
worker.postMessage(method);
};
BackgroundKmlReader.prototype.parseMarkers = function(callback) {
this._execute('parseMarkers', callback);
};
BackgroundKmlReader.prototype.parsePolygons = function(callback) {
this._execute('parsePolygons', callback);
};
BackgroundKmlReader.prototype.parseLines = function(callback) {
this._execute('parseLines', callback);
};
BackgroundKmlReader.prototype.parseNetworklinks = function(callback) {
this._execute('parseNetworklinks', callback);
};
BackgroundKmlReader.prototype.parseGroundOverlays = function(callback) {
this._execute('parseGroundOverlays', callback);
};
BackgroundKmlReader.prototype._filter = function(item) {
if (this._filters) {
var bool = true;
this._filters.forEach(function(f) {
if (typeof f != 'function' && f.type) {
if (item.type === f.type) {
if (f.filter(item) === false) {
bool = false;
}
}
return;
}
if (f(item) === false) {
bool = false;
}
});
return bool;
}
return true
};
BackgroundKmlReader.prototype.addFilter = function(type, fn) {
if (!this._filters) {
this._filters = [];
}
if (typeof type == 'function') {
fn = type;
this._filters.push(fn);
return this;
}
this._filters.push({
type,
filter: fn
});
return this;
};
return BackgroundKmlReader;
})();
| BackgroundKmlReader.js | 'use strict';
/**
* BackgroundKmlReader is meant to be a drop in replacement for KmlReader class
*/
var BackgroundKmlReader = (function() {
var BackgroundKmlReader = function(kml) {
this._worker=new Worker(userFunctionWorker);
worker.postMessage(kml);
this._handlers={};
var me=this;
worker.onmessage=function(e){
if(!me._handers[e.data.method]){
throw 'Unexpected message: '+JSON.stringify(e.data);
}
if(e.data.result=="done"){
delete me._handers[e.data.method];
return;
}
if(me._filter(e.data.feature)){
me._handers[e.data.method](e.data.feature, e.data.total, e.data.index);
}
};
};
BackgroundKmlReader.prototype._execute = function(method, callback) {
this._handlers[method]=callback;
worker.postMessage(method);
};
BackgroundKmlReader.prototype.parseMarkers = function(callback) {
this._execute('parseMarkers', callback);
};
BackgroundKmlReader.prototype.parsePolygons = function(callback) {
this._execute('parsePolygons', callback);
};
BackgroundKmlReader.prototype.parseLines = function(callback) {
this._execute('parseLines', callback);
};
BackgroundKmlReader.prototype.parseNetworklinks = function(callback) {
this._execute('parseNetworklinks', callback);
};
BackgroundKmlReader.prototype.parseGroundOverlays = function(callback) {
this._execute('parseGroundOverlays', callback);
};
BackgroundKmlReader.prototype._filter = function(item) {
if (this._filters) {
var bool = true;
this._filters.forEach(function(f) {
if (typeof f != 'function' && f.type) {
if (item.type === f.type) {
if (f.filter(item) === false) {
bool = false;
}
}
return;
}
if (f(item) === false) {
bool = false;
}
});
return bool;
}
return true
};
BackgroundKmlReader.prototype.addFilter = function(type, fn) {
if (!this._filters) {
this._filters = [];
}
if (typeof type == 'function') {
fn = type;
this._filters.push(fn);
return this;
}
this._filters.push({
type,
filter: fn
});
return this;
};
return BackgroundKmlReader;
});
| execute closure
| BackgroundKmlReader.js | execute closure | <ide><path>ackgroundKmlReader.js
<ide>
<ide>
<ide>
<del>});
<add>})();
<ide> |
|
Java | apache-2.0 | 4b5b1ac3d10f1a190450ad59b8be5c9568921852 | 0 | nandakumar131/hadoop,littlezhou/hadoop,JingchengDu/hadoop,wwjiang007/hadoop,plusplusjiajia/hadoop,wwjiang007/hadoop,JingchengDu/hadoop,steveloughran/hadoop,apurtell/hadoop,JingchengDu/hadoop,steveloughran/hadoop,ucare-uchicago/hadoop,ucare-uchicago/hadoop,nandakumar131/hadoop,xiao-chen/hadoop,wwjiang007/hadoop,mapr/hadoop-common,littlezhou/hadoop,xiao-chen/hadoop,apache/hadoop,apache/hadoop,xiao-chen/hadoop,apurtell/hadoop,ucare-uchicago/hadoop,wwjiang007/hadoop,steveloughran/hadoop,apache/hadoop,wwjiang007/hadoop,xiao-chen/hadoop,steveloughran/hadoop,apurtell/hadoop,apurtell/hadoop,plusplusjiajia/hadoop,nandakumar131/hadoop,JingchengDu/hadoop,littlezhou/hadoop,mapr/hadoop-common,lukmajercak/hadoop,lukmajercak/hadoop,plusplusjiajia/hadoop,littlezhou/hadoop,mapr/hadoop-common,apache/hadoop,littlezhou/hadoop,littlezhou/hadoop,ucare-uchicago/hadoop,apache/hadoop,nandakumar131/hadoop,mapr/hadoop-common,apurtell/hadoop,lukmajercak/hadoop,apache/hadoop,lukmajercak/hadoop,apurtell/hadoop,mapr/hadoop-common,plusplusjiajia/hadoop,ucare-uchicago/hadoop,mapr/hadoop-common,apache/hadoop,nandakumar131/hadoop,JingchengDu/hadoop,wwjiang007/hadoop,xiao-chen/hadoop,lukmajercak/hadoop,wwjiang007/hadoop,steveloughran/hadoop,plusplusjiajia/hadoop,apurtell/hadoop,JingchengDu/hadoop,littlezhou/hadoop,plusplusjiajia/hadoop,nandakumar131/hadoop,ucare-uchicago/hadoop,xiao-chen/hadoop,lukmajercak/hadoop,ucare-uchicago/hadoop,lukmajercak/hadoop,steveloughran/hadoop,steveloughran/hadoop,mapr/hadoop-common,xiao-chen/hadoop,JingchengDu/hadoop,plusplusjiajia/hadoop,nandakumar131/hadoop | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.tools.rumen;
import java.util.regex.Pattern;
import org.apache.hadoop.mapreduce.JobID;
import org.apache.hadoop.mapreduce.v2.hs.JobHistory;
/**
* Job History related constants for Hadoop releases prior to 0.21
*/
public class Pre21JobHistoryConstants {
/**
* Job history files contain key="value" pairs, where keys belong to this enum.
* It acts as a global namespace for all keys.
*/
static enum Keys {
JOBTRACKERID,
START_TIME, FINISH_TIME, JOBID, JOBNAME, USER, JOBCONF, SUBMIT_TIME,
LAUNCH_TIME, TOTAL_MAPS, TOTAL_REDUCES, FAILED_MAPS, FAILED_REDUCES,
FINISHED_MAPS, FINISHED_REDUCES, JOB_STATUS, TASKID, HOSTNAME, TASK_TYPE,
ERROR, TASK_ATTEMPT_ID, TASK_STATUS, COPY_PHASE, SORT_PHASE, REDUCE_PHASE,
SHUFFLE_FINISHED, SORT_FINISHED, MAP_FINISHED, COUNTERS, SPLITS,
JOB_PRIORITY, HTTP_PORT, TRACKER_NAME, STATE_STRING, VERSION
}
/**
* This enum contains some of the values commonly used by history log events.
* since values in history can only be strings - Values.name() is used in
* most places in history file.
*
* Note: "SUCCEEDED" is actually not a pre-0.21 value, but it might appear
* in jhist logs when the event is an unsuccessful job completion, yet, the
* overall job status is "SUCCEEDED".
*/
public static enum Values {
SUCCESS, SUCCEEDED, FAILED, KILLED, MAP, REDUCE, CLEANUP, RUNNING, PREP,
SETUP
}
/**
* Regex for Pre21 V1(old) jobhistory filename
* i.e jt-identifier_job-id_user-name_job-name
*/
static final Pattern JOBHISTORY_FILENAME_REGEX_V1 =
Pattern.compile("[^.].+_(" + JobID.JOBID_REGEX + ")_.+");
/**
* Regex for Pre21 V2(new) jobhistory filename
* i.e job-id_user-name_job-name
*/
static final Pattern JOBHISTORY_FILENAME_REGEX_V2 =
Pattern.compile("(" + JobID.JOBID_REGEX + ")_.+");
static final String OLD_FULL_SUFFIX_REGEX_STRING =
"(?:\\.[0-9]+" + Pattern.quote(JobHistory.OLD_SUFFIX) + ")";
/**
* Regex for Pre21 V1(old) jobhistory conf filename
* i.e jt-identifier_job-id_conf.xml
*/
static final Pattern CONF_FILENAME_REGEX_V1 =
Pattern.compile("[^.].+_(" + JobID.JOBID_REGEX + ")_conf.xml"
+ OLD_FULL_SUFFIX_REGEX_STRING + "?");
/**
* Regex for Pre21 V2(new) jobhistory conf filename
* i.e job-id_conf.xml
*/
static final Pattern CONF_FILENAME_REGEX_V2 =
Pattern.compile("(" + JobID.JOBID_REGEX + ")_conf.xml"
+ OLD_FULL_SUFFIX_REGEX_STRING + "?");
}
| hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/Pre21JobHistoryConstants.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.tools.rumen;
import java.util.regex.Pattern;
import org.apache.hadoop.mapreduce.JobID;
import org.apache.hadoop.mapreduce.v2.hs.JobHistory;
/**
* Job History related constants for Hadoop releases prior to 0.21
*/
public class Pre21JobHistoryConstants {
/**
* Job history files contain key="value" pairs, where keys belong to this enum.
* It acts as a global namespace for all keys.
*/
static enum Keys {
JOBTRACKERID,
START_TIME, FINISH_TIME, JOBID, JOBNAME, USER, JOBCONF, SUBMIT_TIME,
LAUNCH_TIME, TOTAL_MAPS, TOTAL_REDUCES, FAILED_MAPS, FAILED_REDUCES,
FINISHED_MAPS, FINISHED_REDUCES, JOB_STATUS, TASKID, HOSTNAME, TASK_TYPE,
ERROR, TASK_ATTEMPT_ID, TASK_STATUS, COPY_PHASE, SORT_PHASE, REDUCE_PHASE,
SHUFFLE_FINISHED, SORT_FINISHED, MAP_FINISHED, COUNTERS, SPLITS,
JOB_PRIORITY, HTTP_PORT, TRACKER_NAME, STATE_STRING, VERSION
}
/**
* This enum contains some of the values commonly used by history log events.
* since values in history can only be strings - Values.name() is used in
* most places in history file.
*/
public static enum Values {
SUCCESS, FAILED, KILLED, MAP, REDUCE, CLEANUP, RUNNING, PREP, SETUP
}
/**
* Regex for Pre21 V1(old) jobhistory filename
* i.e jt-identifier_job-id_user-name_job-name
*/
static final Pattern JOBHISTORY_FILENAME_REGEX_V1 =
Pattern.compile("[^.].+_(" + JobID.JOBID_REGEX + ")_.+");
/**
* Regex for Pre21 V2(new) jobhistory filename
* i.e job-id_user-name_job-name
*/
static final Pattern JOBHISTORY_FILENAME_REGEX_V2 =
Pattern.compile("(" + JobID.JOBID_REGEX + ")_.+");
static final String OLD_FULL_SUFFIX_REGEX_STRING =
"(?:\\.[0-9]+" + Pattern.quote(JobHistory.OLD_SUFFIX) + ")";
/**
* Regex for Pre21 V1(old) jobhistory conf filename
* i.e jt-identifier_job-id_conf.xml
*/
static final Pattern CONF_FILENAME_REGEX_V1 =
Pattern.compile("[^.].+_(" + JobID.JOBID_REGEX + ")_conf.xml"
+ OLD_FULL_SUFFIX_REGEX_STRING + "?");
/**
* Regex for Pre21 V2(new) jobhistory conf filename
* i.e job-id_conf.xml
*/
static final Pattern CONF_FILENAME_REGEX_V2 =
Pattern.compile("(" + JobID.JOBID_REGEX + ")_conf.xml"
+ OLD_FULL_SUFFIX_REGEX_STRING + "?");
}
| MAPREDUCE-7130. Rumen crashes trying to handle MRAppMaster recovery events. Contributed by Peter Bacsko
| hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/Pre21JobHistoryConstants.java | MAPREDUCE-7130. Rumen crashes trying to handle MRAppMaster recovery events. Contributed by Peter Bacsko | <ide><path>adoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/Pre21JobHistoryConstants.java
<ide> /**
<ide> * This enum contains some of the values commonly used by history log events.
<ide> * since values in history can only be strings - Values.name() is used in
<del> * most places in history file.
<add> * most places in history file.
<add> *
<add> * Note: "SUCCEEDED" is actually not a pre-0.21 value, but it might appear
<add> * in jhist logs when the event is an unsuccessful job completion, yet, the
<add> * overall job status is "SUCCEEDED".
<ide> */
<ide> public static enum Values {
<del> SUCCESS, FAILED, KILLED, MAP, REDUCE, CLEANUP, RUNNING, PREP, SETUP
<add> SUCCESS, SUCCEEDED, FAILED, KILLED, MAP, REDUCE, CLEANUP, RUNNING, PREP,
<add> SETUP
<ide> }
<del>
<add>
<ide> /**
<ide> * Regex for Pre21 V1(old) jobhistory filename
<ide> * i.e jt-identifier_job-id_user-name_job-name |
|
JavaScript | mit | a67c8ebf9e7c187535c60a3588594c8132626912 | 0 | marcomarchesi/the-runner,marcomarchesi/threejs-boilerplate-plus,marcomarchesi/the-runner,marcomarchesi/the-runner,marcomarchesi/threejs-boilerplate-plus,marcomarchesi/threejs-boilerplate-plus,marcomarchesi/threejs-boilerplate-plus | /**
* HUD.js
* @author Pierfrancesco Soffritti
*
*/
var pauseTextMaterial, HUDPauseMaterial;
var pointerMesh, redPointerMesh, LapCounterMesh;
var HUDElementsArray;
var gotStartingPoint = false, hasMoved = false;
var numLap = 0;
function HUD(HUDScene, HUDisVisible, oculusEnabled) {
this.HUDisVisible = HUDisVisible;
var textCanvas = document.createElement('canvas');
var context = textCanvas.getContext('2d');
context.font = "Bold 30px Arial";
context.fillStyle = "rgba(255, 0, 0, 1)";
context.fillText("LAP number: " +numLap, 0, 50);
// canvas contents will be used for a texture
var textTexture = new THREE.Texture(textCanvas)
textTexture.needsUpdate = true;
// load a sample texture
//var texture = THREE.ImageUtils.loadTexture("textures/ui.png");
var HUDSampleMaterial = new THREE.MeshBasicMaterial({ map: textTexture });
HUDSampleMaterial.transparent = true
HUDSampleMaterial.opacity = 1;
LapCounterMesh = new THREE.Mesh(new THREE.PlaneGeometry(1, 1), HUDSampleMaterial);
LapCounterMesh.scale.set(window.innerWidth / 2, window.innerHeight / 2, 1);
LapCounterMesh.position.z = -0.01;
LapCounterMesh.position.x = 1000;
HUDscene.add(LapCounterMesh);
// load minimap texture
var texturePath = THREE.ImageUtils.loadTexture("textures/map.png");
var HUDMinimapMaterial = new THREE.MeshBasicMaterial({ map: texturePath });
HUDMinimapMaterial.transparent = true
HUDMinimapMaterial.opacity = 1;
var hudMinimapMesh = new THREE.Mesh(new THREE.PlaneGeometry(1, 1.586), HUDMinimapMaterial);
if(oculusEnabled)
hudMinimapMesh.scale.set(window.innerWidth / 3, window.innerHeight / 7, 1);
else
hudMinimapMesh.scale.set(window.innerWidth / 3, window.innerHeight / 3, 1);
hudMinimapMesh.position.z = -0.01;
hudMinimapMesh.position.x = 0;//1000;
hudMinimapMesh.position.y = 0;//500;
HUDscene.add(hudMinimapMesh);
this.minimap = hudMinimapMesh;
// load pointer (on minimap) texture
var texturePath = THREE.ImageUtils.loadTexture("textures/pointer_red.png");
var HUDRedPointerMaterial = new THREE.MeshBasicMaterial({ map: texturePath });
HUDRedPointerMaterial.transparent = true
HUDRedPointerMaterial.opacity = 1;
redPointerMesh = new THREE.Mesh(new THREE.PlaneGeometry(1, 1), HUDRedPointerMaterial);
redPointerMesh.scale.set(20, 20, 1);
redPointerMesh.position.z = -0.01;
HUDscene.add(redPointerMesh);
var texturePath = THREE.ImageUtils.loadTexture("textures/pointer.png");
var HUDPointerMaterial = new THREE.MeshBasicMaterial({ map: texturePath });
HUDPointerMaterial.transparent = true
HUDPointerMaterial.opacity = 1;
pointerMesh = new THREE.Mesh(new THREE.PlaneGeometry(1, 1), HUDPointerMaterial);
pointerMesh.scale.set(20, 20, 1);
pointerMesh.position.z = -0.01;
HUDscene.add(pointerMesh);
// ---- PAUSE STATE STUFF
// create text
// create a canvas element
var textCanvas = document.createElement('canvas');
var context = textCanvas.getContext('2d');
context.font = "Bold 40px Arial";
context.fillStyle = "rgba(255,0,0,0.95)";
context.fillText('PAUSE', 0, 50);
// canvas contents will be used for a texture
var textTexture = new THREE.Texture(textCanvas)
textTexture.needsUpdate = true;
pauseTextMaterial = new THREE.MeshBasicMaterial( {map: textTexture, side:THREE.DoubleSide } );
pauseTextMaterial.transparent = true;
var textMesh = new THREE.Mesh( new THREE.PlaneGeometry(textCanvas.width, textCanvas.height), pauseTextMaterial );
textMesh.position.z = -0.01;
HUDscene.add(textMesh);
//var pauseTexture = THREE.ImageUtils.loadTexture("textures/texture.png");
HUDPauseMaterial = new THREE.MeshBasicMaterial({ color: "black" });
HUDPauseMaterial.transparent = true
HUDPauseMaterial.opacity = 1;
var HUDPause = new THREE.Mesh(new THREE.PlaneGeometry(1, 1), HUDPauseMaterial);
HUDPause.scale.set(window.innerWidth*2, window.innerHeight*2, 1);
HUDPause.position.z = -0.01;
HUDscene.add(HUDPause);
// ---
HUDElementsArray = [HUDSampleMaterial, HUDMinimapMaterial, HUDPointerMaterial, HUDRedPointerMaterial];
// if the HUD is not visibile, hide it.
if(!this.HUDisVisible) {
for (i = 0; i < HUDElementsArray.length; ++i) {
HUDElementsArray[i].opacity = 0;
}
}
};
HUD.prototype.update = function(pause, mapX, mapY, sceneX, sceneZ, pathCameraX, pathCameraZ) {
// check if the game is in puase or not.
if(pause) {
// show pause
if(HUDPauseMaterial.opacity < 0.5) {
HUDPauseMaterial.opacity = HUDPauseMaterial.opacity + 0.1;
pauseTextMaterial.opacity = pauseTextMaterial.opacity + 0.1;
}
else {
HUDPauseMaterial.opacity = 0.5;
pauseTextMaterial.opacity = 1;
}
}
else {
// hide pause
if(HUDPauseMaterial.opacity > 0.1) {
HUDPauseMaterial.opacity = HUDPauseMaterial.opacity - 0.1;
pauseTextMaterial.opacity = pauseTextMaterial.opacity - 0.1;
}
else {
HUDPauseMaterial.opacity = 0;
pauseTextMaterial.opacity = 0;
}
}
updateHUDVisibility(HUDElementsArray);
// update pointer on minimap
updatePointerPosition(mapX, mapY, sceneX, sceneZ, pathCameraX, pathCameraZ);
};
function updatePointerPosition(mapX, mapY, sceneX, sceneZ, pathCameraX, pathCameraZ) {
//pointerMesh;
//console.log("camera: " +camera.position.x);
//console.log("pointer: " +pointerMesh.position.x);
pointerMesh.position.x = mapX/3.6 + getPointerXPosition(mapX, sceneX, pathCameraX) *1.45;
pointerMesh.position.y = -mapY/12.6 + getPointerYPosition(mapY, sceneZ, pathCameraZ) *1.20;
pointerMesh.updateMatrix();
pointerMesh.updateMatrixWorld();
// set the red point (start)
if(!gotStartingPoint && !isNaN(pointerMesh.position.x)) {
redPointerMesh.position.x = pointerMesh.position.x;
redPointerMesh.position.y = pointerMesh.position.y;
redPointerMesh.updateMatrix();
redPointerMesh.updateMatrixWorld();
gotStartingPoint = true;
}
var lapCounterPrecision = 2;
if( (gotStartingPoint) &&
( (Math.floor(pointerMesh.position.x) <= Math.floor(redPointerMesh.position.x)-lapCounterPrecision) || Math.floor(pointerMesh.position.x) >= Math.floor(redPointerMesh.position.x)+lapCounterPrecision ) &&
( (Math.floor(pointerMesh.position.y) <= Math.floor(redPointerMesh.position.y)-lapCounterPrecision) || Math.floor(pointerMesh.position.y) >= Math.floor(redPointerMesh.position.y)+lapCounterPrecision ) ) {
hasMoved = true;
}
if( (!isNaN(pointerMesh.position.x) && hasMoved) &&
( (Math.floor(redPointerMesh.position.x) >= Math.floor(pointerMesh.position.x)-lapCounterPrecision) && Math.floor(redPointerMesh.position.x) <= Math.floor(pointerMesh.position.x)+lapCounterPrecision ) &&
( (Math.floor(redPointerMesh.position.y) >= Math.floor(pointerMesh.position.y)-lapCounterPrecision) && Math.floor(redPointerMesh.position.y) <= Math.floor(pointerMesh.position.y)+lapCounterPrecision ) ) {
numLap++;
// not exited the 'lap counter range' yet
hasMoved = false;
var textCanvas = document.createElement('canvas');
var context = textCanvas.getContext('2d');
context.font = "Bold 30px Arial";
context.fillStyle = "rgba(255, 0, 0, 1)";
context.fillText("LAP number: " +numLap, 0, 50);
// canvas contents will be used for a texture
var textTexture = new THREE.Texture(textCanvas)
textTexture.needsUpdate = true;
var HUDSampleMaterial = new THREE.MeshBasicMaterial({ map: textTexture });
HUDSampleMaterial.transparent = true
HUDSampleMaterial.opacity = 1;
LapCounterMesh.material = HUDSampleMaterial;
}
}
function getPointerXPosition(mapX, sceneX, pathCameraX) {
// pointerX : mapX = pathCameraX : sceneX
return (pathCameraX * mapX) / sceneX;
}
function getPointerYPosition(mapY, sceneZ, pathCameraZ) {
// pointerX : mapX = pathCameraX : sceneX
return (pathCameraZ * mapY) / sceneZ;
}
function updateHUDVisibility(HUDElementsArray) {
for (i = 0; i < HUDElementsArray.length; ++i) {
//console.log(HUDElementsArray[i] +" opacity: " +HUDElementsArray[i].opacity, "is HUD visible ?" +HUDisVisible);
// HUD fade out
if(controlGUI.HUDenabled == false && HUDisVisible) {
if(HUDElementsArray[i].opacity != 0)
HUDElementsArray[i].opacity = HUDElementsArray[i].opacity - 0.1;
for(j=0; j<HUDElementsArray.length; j++) {
if(HUDElementsArray[j].opacity < 0.1) {
HUDisVisible = false;
HUDElementsArray[i].opacity = 0;
}
else
HUDisVisible = true;
}
}
// HUD fade in
else if (controlGUI.HUDenabled == true && !HUDisVisible) {
if(HUDElementsArray[i].opacity != 1)
HUDElementsArray[i].opacity = HUDElementsArray[i].opacity + 0.1;
for(j=0; j<HUDElementsArray.length; j++) {
if(HUDElementsArray[j].opacity > 0.9) {
HUDisVisible = true;
HUDElementsArray[i].opacity = 1;
}
else
HUDisVisible = false;
}
}
}
}
| public/js/HUD.js | /**
* HUD.js
* @author Pierfrancesco Soffritti
*
*/
var pauseTextMaterial, HUDPauseMaterial;
var pointerMesh, redPointerMesh, LapCounterMesh;
var HUDElementsArray;
var gotStartingPoint = false, hasMoved;
var numLap = 0;
function HUD(HUDScene, HUDisVisible, oculusEnabled) {
this.HUDisVisible = HUDisVisible;
var textCanvas = document.createElement('canvas');
var context = textCanvas.getContext('2d');
context.font = "Bold 30px Arial";
context.fillStyle = "rgba(255, 0, 0, 1)";
context.fillText('LAP number: 1', 0, 50);
// canvas contents will be used for a texture
var textTexture = new THREE.Texture(textCanvas)
textTexture.needsUpdate = true;
// load a sample texture
//var texture = THREE.ImageUtils.loadTexture("textures/ui.png");
var HUDSampleMaterial = new THREE.MeshBasicMaterial({ map: textTexture });
HUDSampleMaterial.transparent = true
HUDSampleMaterial.opacity = 1;
LapCounterMesh = new THREE.Mesh(new THREE.PlaneGeometry(1, 1), HUDSampleMaterial);
LapCounterMesh.scale.set(window.innerWidth / 2, window.innerHeight / 2, 1);
LapCounterMesh.position.z = -0.01;
LapCounterMesh.position.x = 1000;
HUDscene.add(LapCounterMesh);
// load minimap texture
var texturePath = THREE.ImageUtils.loadTexture("textures/map.png");
var HUDMinimapMaterial = new THREE.MeshBasicMaterial({ map: texturePath });
HUDMinimapMaterial.transparent = true
HUDMinimapMaterial.opacity = 1;
var hudMinimapMesh = new THREE.Mesh(new THREE.PlaneGeometry(1, 1.586), HUDMinimapMaterial);
if(oculusEnabled)
hudMinimapMesh.scale.set(window.innerWidth / 3, window.innerHeight / 7, 1);
else
hudMinimapMesh.scale.set(window.innerWidth / 3, window.innerHeight / 3, 1);
hudMinimapMesh.position.z = -0.01;
hudMinimapMesh.position.x = 0;//1000;
hudMinimapMesh.position.y = 0;//500;
HUDscene.add(hudMinimapMesh);
this.minimap = hudMinimapMesh;
// load pointer (on minimap) texture
var texturePath = THREE.ImageUtils.loadTexture("textures/pointer_red.png");
var HUDRedPointerMaterial = new THREE.MeshBasicMaterial({ map: texturePath });
HUDRedPointerMaterial.transparent = true
HUDRedPointerMaterial.opacity = 1;
redPointerMesh = new THREE.Mesh(new THREE.PlaneGeometry(1, 1), HUDRedPointerMaterial);
redPointerMesh.scale.set(20, 20, 1);
redPointerMesh.position.z = -0.01;
HUDscene.add(redPointerMesh);
var texturePath = THREE.ImageUtils.loadTexture("textures/pointer.png");
var HUDPointerMaterial = new THREE.MeshBasicMaterial({ map: texturePath });
HUDPointerMaterial.transparent = true
HUDPointerMaterial.opacity = 1;
pointerMesh = new THREE.Mesh(new THREE.PlaneGeometry(1, 1), HUDPointerMaterial);
pointerMesh.scale.set(20, 20, 1);
pointerMesh.position.z = -0.01;
HUDscene.add(pointerMesh);
// ---- PAUSE STATE STUFF
// create text
// create a canvas element
var textCanvas = document.createElement('canvas');
var context = textCanvas.getContext('2d');
context.font = "Bold 40px Arial";
context.fillStyle = "rgba(255,0,0,0.95)";
context.fillText('PAUSE', 0, 50);
// canvas contents will be used for a texture
var textTexture = new THREE.Texture(textCanvas)
textTexture.needsUpdate = true;
pauseTextMaterial = new THREE.MeshBasicMaterial( {map: textTexture, side:THREE.DoubleSide } );
pauseTextMaterial.transparent = true;
var textMesh = new THREE.Mesh( new THREE.PlaneGeometry(textCanvas.width, textCanvas.height), pauseTextMaterial );
textMesh.position.z = -0.01;
HUDscene.add(textMesh);
//var pauseTexture = THREE.ImageUtils.loadTexture("textures/texture.png");
HUDPauseMaterial = new THREE.MeshBasicMaterial({ color: "black" });
HUDPauseMaterial.transparent = true
HUDPauseMaterial.opacity = 1;
var HUDPause = new THREE.Mesh(new THREE.PlaneGeometry(1, 1), HUDPauseMaterial);
HUDPause.scale.set(window.innerWidth*2, window.innerHeight*2, 1);
HUDPause.position.z = -0.01;
HUDscene.add(HUDPause);
// ---
HUDElementsArray = [HUDSampleMaterial, HUDMinimapMaterial, HUDPointerMaterial, HUDRedPointerMaterial];
// if the HUD is not visibile, hide it.
if(!this.HUDisVisible) {
for (i = 0; i < HUDElementsArray.length; ++i) {
HUDElementsArray[i].opacity = 0;
}
}
};
HUD.prototype.update = function(pause, mapX, mapY, sceneX, sceneZ, pathCameraX, pathCameraZ) {
// check if the game is in puase or not.
if(pause) {
// show pause
if(HUDPauseMaterial.opacity < 0.5) {
HUDPauseMaterial.opacity = HUDPauseMaterial.opacity + 0.1;
pauseTextMaterial.opacity = pauseTextMaterial.opacity + 0.1;
}
else {
HUDPauseMaterial.opacity = 0.5;
pauseTextMaterial.opacity = 1;
}
}
else {
// hide pause
if(HUDPauseMaterial.opacity > 0.1) {
HUDPauseMaterial.opacity = HUDPauseMaterial.opacity - 0.1;
pauseTextMaterial.opacity = pauseTextMaterial.opacity - 0.1;
}
else {
HUDPauseMaterial.opacity = 0;
pauseTextMaterial.opacity = 0;
}
}
updateHUDVisibility(HUDElementsArray);
// update pointer on minimap
updatePointerPosition(mapX, mapY, sceneX, sceneZ, pathCameraX, pathCameraZ);
};
function updatePointerPosition(mapX, mapY, sceneX, sceneZ, pathCameraX, pathCameraZ) {
//pointerMesh;
//console.log("camera: " +camera.position.x);
//console.log("pointer: " +pointerMesh.position.x);
pointerMesh.position.x = mapX/3.6 + getPointerXPosition(mapX, sceneX, pathCameraX) *1.45;
pointerMesh.position.y = -mapY/12.6 + getPointerYPosition(mapY, sceneZ, pathCameraZ) *1.20;
pointerMesh.updateMatrix();
pointerMesh.updateMatrixWorld();
// set the red point (start)
if(!gotStartingPoint && !isNaN(pointerMesh.position.x)) {
redPointerMesh.position.x = pointerMesh.position.x;
redPointerMesh.position.y = pointerMesh.position.y;
redPointerMesh.updateMatrix();
redPointerMesh.updateMatrixWorld();
gotStartingPoint = true;
}
if(gotStartingPoint && pointerMesh.position.x != redPointerMesh.position.x)
hasMoved = true;
if( (!isNaN(pointerMesh.position.x) && hasMoved) && Math.floor(pointerMesh.position.x) == Math.floor(redPointerMesh.position.x) && Math.floor(pointerMesh.position.y) == Math.floor(redPointerMesh.position.y)) {
numLap++;
console.log(numLap);
var textCanvas = document.createElement('canvas');
var context = textCanvas.getContext('2d');
context.font = "Bold 30px Arial";
context.fillStyle = "rgba(255, 0, 0, 1)";
context.fillText("LAP number: " +numLap, 0, 50);
// canvas contents will be used for a texture
var textTexture = new THREE.Texture(textCanvas)
textTexture.needsUpdate = true;
var HUDSampleMaterial = new THREE.MeshBasicMaterial({ map: textTexture });
HUDSampleMaterial.transparent = true
HUDSampleMaterial.opacity = 1;
LapCounterMesh.material = HUDSampleMaterial;
}
//console.log("pointer: " +pointerMesh.position.x);
}
function getPointerXPosition(mapX, sceneX, pathCameraX) {
// pointerX : mapX = pathCameraX : sceneX
return (pathCameraX * mapX) / sceneX;
}
function getPointerYPosition(mapY, sceneZ, pathCameraZ) {
// pointerX : mapX = pathCameraX : sceneX
return (pathCameraZ * mapY) / sceneZ;
}
function updateHUDVisibility(HUDElementsArray) {
for (i = 0; i < HUDElementsArray.length; ++i) {
//console.log(HUDElementsArray[i] +" opacity: " +HUDElementsArray[i].opacity, "is HUD visible ?" +HUDisVisible);
// HUD fade out
if(controlGUI.HUDenabled == false && HUDisVisible) {
if(HUDElementsArray[i].opacity != 0)
HUDElementsArray[i].opacity = HUDElementsArray[i].opacity - 0.1;
for(j=0; j<HUDElementsArray.length; j++) {
if(HUDElementsArray[j].opacity < 0.1) {
HUDisVisible = false;
HUDElementsArray[i].opacity = 0;
}
else
HUDisVisible = true;
}
}
// HUD fade in
else if (controlGUI.HUDenabled == true && !HUDisVisible) {
if(HUDElementsArray[i].opacity != 1)
HUDElementsArray[i].opacity = HUDElementsArray[i].opacity + 0.1;
for(j=0; j<HUDElementsArray.length; j++) {
if(HUDElementsArray[j].opacity > 0.9) {
HUDisVisible = true;
HUDElementsArray[i].opacity = 1;
}
else
HUDisVisible = false;
}
}
}
}
| improved path counter logic
| public/js/HUD.js | improved path counter logic | <ide><path>ublic/js/HUD.js
<ide>
<ide> var HUDElementsArray;
<ide>
<del>var gotStartingPoint = false, hasMoved;
<add>var gotStartingPoint = false, hasMoved = false;
<ide> var numLap = 0;
<ide>
<ide> function HUD(HUDScene, HUDisVisible, oculusEnabled) {
<ide> var context = textCanvas.getContext('2d');
<ide> context.font = "Bold 30px Arial";
<ide> context.fillStyle = "rgba(255, 0, 0, 1)";
<del> context.fillText('LAP number: 1', 0, 50);
<add> context.fillText("LAP number: " +numLap, 0, 50);
<ide>
<ide> // canvas contents will be used for a texture
<ide> var textTexture = new THREE.Texture(textCanvas)
<ide>
<ide> // load a sample texture
<ide> //var texture = THREE.ImageUtils.loadTexture("textures/ui.png");
<del>
<add>
<ide> var HUDSampleMaterial = new THREE.MeshBasicMaterial({ map: textTexture });
<ide> HUDSampleMaterial.transparent = true
<ide> HUDSampleMaterial.opacity = 1;
<ide> gotStartingPoint = true;
<ide> }
<ide>
<del> if(gotStartingPoint && pointerMesh.position.x != redPointerMesh.position.x)
<del> hasMoved = true;
<del>
<del> if( (!isNaN(pointerMesh.position.x) && hasMoved) && Math.floor(pointerMesh.position.x) == Math.floor(redPointerMesh.position.x) && Math.floor(pointerMesh.position.y) == Math.floor(redPointerMesh.position.y)) {
<add> var lapCounterPrecision = 2;
<add>
<add> if( (gotStartingPoint) &&
<add> ( (Math.floor(pointerMesh.position.x) <= Math.floor(redPointerMesh.position.x)-lapCounterPrecision) || Math.floor(pointerMesh.position.x) >= Math.floor(redPointerMesh.position.x)+lapCounterPrecision ) &&
<add> ( (Math.floor(pointerMesh.position.y) <= Math.floor(redPointerMesh.position.y)-lapCounterPrecision) || Math.floor(pointerMesh.position.y) >= Math.floor(redPointerMesh.position.y)+lapCounterPrecision ) ) {
<add> hasMoved = true;
<add> }
<add>
<add> if( (!isNaN(pointerMesh.position.x) && hasMoved) &&
<add> ( (Math.floor(redPointerMesh.position.x) >= Math.floor(pointerMesh.position.x)-lapCounterPrecision) && Math.floor(redPointerMesh.position.x) <= Math.floor(pointerMesh.position.x)+lapCounterPrecision ) &&
<add> ( (Math.floor(redPointerMesh.position.y) >= Math.floor(pointerMesh.position.y)-lapCounterPrecision) && Math.floor(redPointerMesh.position.y) <= Math.floor(pointerMesh.position.y)+lapCounterPrecision ) ) {
<add>
<ide> numLap++;
<del> console.log(numLap);
<add>
<add> // not exited the 'lap counter range' yet
<add> hasMoved = false;
<ide>
<ide> var textCanvas = document.createElement('canvas');
<ide> var context = textCanvas.getContext('2d');
<ide>
<ide> LapCounterMesh.material = HUDSampleMaterial;
<ide> }
<del>
<del> //console.log("pointer: " +pointerMesh.position.x);
<del>
<del>
<ide> }
<ide>
<ide> function getPointerXPosition(mapX, sceneX, pathCameraX) { |
|
JavaScript | mit | 59efbb8d54ab53b9569bb1d76dba45e0751dd916 | 0 | vutran/dext-core-utils | const { spawn } = require('child_process');
const rimraf = require('rimraf');
const npmName = require('npm-name');
const {
ERR_MODULE_NOT_FOUND,
ERR_MODULE_INSTALLED,
ERR_MODULE_NOT_INSTALLED,
ERR_THEME_ALREADY_ACTIVE,
} = require('../errors');
const Conf = require('../utils/conf');
const { downloadPackage } = require('../utils/download');
const config = new Conf();
/**
* Checks if the plugin/package exists on npm
*
* @param {String} plugin - The name of the plugin/package
* @return {Promise} - Throws an error if the plugin/package doesn't exist
*/
const checkOnNpm = plugin => new Promise(resolve => {
npmName(plugin).then(available => {
if (available) {
throw new Error(ERR_MODULE_NOT_FOUND);
}
resolve();
});
});
/**
* Installs a plugin/package and saves it to the given directory
*
* @param {String} plugin - The name of the plugin/package
* @param {String} outputDir - The directory to install the plugin/package
* @return {Promise}
*/
const install = (plugin, outputDir) => new Promise(resolve => {
const plugins = config.get('plugins') || [];
if (plugins.indexOf(plugin) > -1) {
throw new Error(ERR_MODULE_INSTALLED);
}
checkOnNpm(plugin).then(() => {
// download, install, and update configs
downloadPackage(plugin, outputDir).then(output => {
const installProcess = spawn('npm', ['install', '--prefix', output]);
installProcess.on('close', (code) => {
if (!code) {
plugins.push(plugin);
config.set('plugins', plugins);
resolve();
}
});
});
});
});
/**
* Uninstalls a plugin/package from the given source directory
*
* @param {String} plugin - The name of the plugin/package
* @param {String} srcDir - The source directory of the plugin/package
* @return {Promise}
*/
const uninstall = (plugin, srcDir) => new Promise(resolve => {
const plugins = config.get('plugins') || [];
if (!plugins || !plugins.length) {
throw new Error(ERR_MODULE_NOT_INSTALLED);
}
if (plugins.indexOf(plugin) === -1) {
throw new Error(ERR_MODULE_NOT_INSTALLED);
}
// removes the directory
const pluginDir = srcDir;
rimraf(pluginDir, err => {
if (err) {
throw new Error(err);
}
plugins.splice(plugins.indexOf(plugin), 1);
config.set('plugins', plugins);
resolve();
});
});
/**
* Switches your current theme
*
* @param {String} theme - The name of the theme
* @return {Promise}
*/
const setTheme = theme => new Promise(resolve => {
const currentTheme = config.get('theme');
const plugins = config.get('plugins') || [];
if (currentTheme === theme) {
throw new Error(ERR_THEME_ALREADY_ACTIVE);
}
if (!plugins || !plugins.length) {
throw new Error(ERR_MODULE_NOT_INSTALLED);
}
if (plugins.indexOf(theme) === -1) {
throw new Error(ERR_MODULE_NOT_INSTALLED);
}
config.set('theme', theme);
resolve();
});
/**
* Retrieve the current theme
*
* @return {String} - The current name of the theme
*/
const getTheme = () => new Promise(resolve => {
const currentTheme = config.get('theme');
resolve(currentTheme || '');
});
/**
* Retrieve the current config
*
* @return {Object} - The current configuration
*/
const getConfig = () => new Promise(resolve => {
resolve(config.store);
});
module.exports = {
checkOnNpm,
install,
uninstall,
setTheme,
getTheme,
getConfig,
};
| src/api/index.js | const { spawn } = require('child_process');
const rimraf = require('rimraf');
const npmName = require('npm-name');
const {
ERR_MODULE_NOT_FOUND,
ERR_MODULE_INSTALLED,
ERR_MODULE_NOT_INSTALLED,
ERR_THEME_ALREADY_ACTIVE,
} = require('../errors');
const Conf = require('../utils/conf');
const { downloadPackage } = require('../utils/download');
const config = new Conf();
/**
* Checks if the plugin/package exists on npm
*
* @param {String} plugin - The name of the plugin/package
* @return {Promise} - Throws an error if the plugin/package doesn't exist
*/
const checkOnNpm = plugin => new Promise(resolve => {
npmName(plugin).then(available => {
if (available) {
throw new Error(ERR_MODULE_NOT_FOUND);
}
resolve();
});
});
/**
* Installs a plugin/package and saves it to the given directory
*
* @param {String} plugin - The name of the plugin/package
* @param {String} outputDir - The directory to install the plugin/package
* @return {Promise}
*/
const install = (plugin, outputDir) => new Promise(resolve => {
const plugins = config.get('plugins') || [];
if (plugins.indexOf(plugin) > -1) {
throw new Error(ERR_MODULE_INSTALLED);
}
checkOnNpm(plugin).then(() => {
// download, install, and update configs
downloadPackage(plugin, outputDir).then(output => {
const installProcess = spawn('npm', ['install', '--prefix', output]);
installProcess.on('close', (code) => {
if (!code) {
plugins.push(plugin);
config.set('plugins', plugins);
resolve();
}
});
});
});
});
/**
* Uninstalls a plugin/package from the given source directory
*
* @param {String} plugin - The name of the plugin/package
* @param {String} srcDir - The source directory of the plugin/package
* @return {Promise}
*/
const uninstall = (plugin, srcDir) => new Promise(resolve => {
const plugins = config.get('plugins') || [];
if (!plugins || !plugins.length) {
throw new Error(ERR_MODULE_NOT_INSTALLED);
}
if (plugins.indexOf(plugin) === -1) {
throw new Error(ERR_MODULE_NOT_INSTALLED);
}
// removes the directory
const pluginDir = srcDir;
rimraf(pluginDir, err => {
if (err) {
throw new Error(err);
}
plugins.splice(plugins.indexOf(plugin), 1);
config.set('plugins', plugins);
resolve();
});
});
/**
* Switches your current theme
*
* @param {String} theme - The name of the theme
* @return {Promise}
*/
const setTheme = theme => new Promise(resolve => {
const currentTheme = config.get('theme');
const plugins = config.get('plugins') || [];
if (currentTheme === theme) {
throw new Error(ERR_THEME_ALREADY_ACTIVE);
}
if (!plugins || !plugins.length) {
throw new Error(ERR_MODULE_NOT_INSTALLED);
}
if (plugins.indexOf(theme) === -1) {
throw new Error(ERR_MODULE_NOT_INSTALLED);
}
config.set('theme', theme);
resolve();
});
/**
* Retrieve the current theme
*
* @return {String} - The current name of the theme
*/
const getTheme = () => new Promise(resolve => {
const currentTheme = config.get('theme');
resolve(currentTheme || '');
});
module.exports = {
checkOnNpm,
install,
uninstall,
setTheme,
getTheme,
};
| added getConfig()
| src/api/index.js | added getConfig() | <ide><path>rc/api/index.js
<ide> resolve(currentTheme || '');
<ide> });
<ide>
<add>/**
<add> * Retrieve the current config
<add> *
<add> * @return {Object} - The current configuration
<add> */
<add>const getConfig = () => new Promise(resolve => {
<add> resolve(config.store);
<add>});
<add>
<ide> module.exports = {
<ide> checkOnNpm,
<ide> install,
<ide> uninstall,
<ide> setTheme,
<ide> getTheme,
<add> getConfig,
<ide> }; |
|
Java | apache-2.0 | b64fc60c678993d24b2575a33ec5ccdcd8ec07cf | 0 | grahammendick/navigation,grahammendick/navigation,grahammendick/navigation,grahammendick/navigation,grahammendick/navigation,grahammendick/navigation,grahammendick/navigation | package com.navigation.reactnative;
import android.annotation.SuppressLint;
import android.app.Activity;
import android.app.ActivityOptions;
import android.content.Intent;
import android.content.res.TypedArray;
import android.os.Bundle;
import android.util.Pair;
import android.view.View;
import com.facebook.react.bridge.ReactApplicationContext;
import com.facebook.react.bridge.ReactContextBaseJavaModule;
import com.facebook.react.bridge.ReactMethod;
import com.facebook.react.bridge.ReadableArray;
import java.util.HashMap;
import java.util.HashSet;
public class NavigationModule extends ReactContextBaseJavaModule {
private HashMap<Integer, Intent> mIntents = new HashMap<>();
private int activityOpenEnterAnimationId;
private int activityOpenExitAnimationId;
private int activityCloseEnterAnimationId;
private int activityCloseExitAnimationId;
public NavigationModule(ReactApplicationContext reactContext) {
super(reactContext);
TypedArray activityStyle = getReactApplicationContext().getTheme().obtainStyledAttributes(new int[] {android.R.attr.windowAnimationStyle});
int windowAnimationStyleResId = activityStyle.getResourceId(0, 0);
activityStyle.recycle();
activityStyle = getReactApplicationContext().getTheme().obtainStyledAttributes(windowAnimationStyleResId, new int[] {
android.R.attr.activityOpenEnterAnimation, android.R.attr.activityOpenExitAnimation,
android.R.attr.activityCloseEnterAnimation, android.R.attr.activityCloseExitAnimation
});
activityOpenEnterAnimationId = activityStyle.getResourceId(0, 0);
activityOpenExitAnimationId = activityStyle.getResourceId(1, 0);
activityCloseEnterAnimationId = activityStyle.getResourceId(2, 0);
activityCloseExitAnimationId = activityStyle.getResourceId(3, 0);
activityStyle.recycle();
}
@Override
public String getName() {
return "NavigationModule";
}
@SuppressLint("NewApi")
@ReactMethod
public void render(int crumb, int tab, ReadableArray titles, String appKey, String enterAnim, String exitAnim) {
final Activity currentActivity = getCurrentActivity();
if (mIntents.size() == 0) {
mIntents.put(0, currentActivity.getIntent());
}
int currentCrumb = mIntents.size() - 1;
if (crumb < currentCrumb) {
final Intent intent = mIntents.get(crumb);
for(int i = crumb + 1; i <= currentCrumb; i++) {
mIntents.remove(i);
}
final int enter = this.getAnimationResourceId(enterAnim, this.activityCloseEnterAnimationId);
final int exit = this.getAnimationResourceId(exitAnim, this.activityCloseExitAnimationId);
final boolean backOne = currentCrumb - crumb == 1;
currentActivity.runOnUiThread(new Runnable() {
@Override
public void run() {
if (backOne)
currentActivity.finishAfterTransition();
else
currentActivity.navigateUpTo(intent);
currentActivity.overridePendingTransition(enter, exit);
}
});
}
if (crumb > currentCrumb) {
final Intent[] intents = new Intent[crumb - currentCrumb];
for(int i = 0; i < crumb - currentCrumb; i++) {
int nextCrumb = currentCrumb + i + 1;
Class scene = nextCrumb % 2 == 0 ? SceneActivity.class : AlternateSceneActivity.class;
Intent intent = new Intent(getReactApplicationContext(), scene);
intent.putExtra(SceneActivity.CRUMB, nextCrumb);
intent.putExtra(SceneActivity.APP_KEY, appKey);
mIntents.put(nextCrumb, intent);
intents[i] = intent;
}
final int enter = this.getAnimationResourceId(enterAnim, this.activityOpenEnterAnimationId);
final int exit = this.getAnimationResourceId(exitAnim, this.activityOpenExitAnimationId);
final boolean forwardOne = crumb - currentCrumb == 1;
currentActivity.runOnUiThread(new Runnable() {
@Override
public void run() {
Pair[] sharedElements = forwardOne ? getSharedElements() : null;
if (sharedElements != null) {
Bundle bundle = ActivityOptions.makeSceneTransitionAnimation(currentActivity, sharedElements).toBundle();
currentActivity.startActivity(intents[0], bundle);
} else {
currentActivity.startActivities(intents);
}
currentActivity.overridePendingTransition(enter, exit);
}
});
}
}
private int getAnimationResourceId(String animationName, int defaultId) {
if (animationName == null)
return defaultId;
String packageName = getReactApplicationContext().getPackageName();
return getReactApplicationContext().getResources().getIdentifier(animationName, "anim", packageName);
}
@SuppressLint("NewApi")
private Pair[] getSharedElements() {
View rootView = getCurrentActivity().findViewById(android.R.id.content).getRootView();
HashSet<View> sharedElements = (HashSet<View>) rootView.getTag(R.id.sharedElements);
if (sharedElements == null)
return null;
Pair[] sharedElementPairs = new Pair[sharedElements.size()];
int size = 0;
for(View sharedElement : sharedElements) {
sharedElementPairs[size] = Pair.create(sharedElement, sharedElement.getTransitionName());
size++;
}
return sharedElementPairs;
}
}
| NavigationReactNative/src/android/app/src/main/java/com/navigation/reactnative/NavigationModule.java | package com.navigation.reactnative;
import android.annotation.SuppressLint;
import android.app.Activity;
import android.app.ActivityOptions;
import android.content.Intent;
import android.content.res.TypedArray;
import android.os.Bundle;
import android.util.Pair;
import android.view.View;
import com.facebook.react.bridge.ReactApplicationContext;
import com.facebook.react.bridge.ReactContextBaseJavaModule;
import com.facebook.react.bridge.ReactMethod;
import com.facebook.react.bridge.ReadableArray;
import java.util.HashMap;
import java.util.HashSet;
public class NavigationModule extends ReactContextBaseJavaModule {
private HashMap<Integer, Intent> mIntents = new HashMap<>();
private int activityOpenEnterAnimationId;
private int activityOpenExitAnimationId;
private int activityCloseEnterAnimationId;
private int activityCloseExitAnimationId;
public NavigationModule(ReactApplicationContext reactContext) {
super(reactContext);
TypedArray activityStyle = getReactApplicationContext().getTheme().obtainStyledAttributes(new int[] {android.R.attr.windowAnimationStyle});
int windowAnimationStyleResId = activityStyle.getResourceId(0, 0);
activityStyle.recycle();
activityStyle = getReactApplicationContext().getTheme().obtainStyledAttributes(windowAnimationStyleResId, new int[] {
android.R.attr.activityOpenEnterAnimation, android.R.attr.activityOpenExitAnimation,
android.R.attr.activityCloseEnterAnimation, android.R.attr.activityCloseExitAnimation
});
activityOpenEnterAnimationId = activityStyle.getResourceId(0, 0);
activityOpenExitAnimationId = activityStyle.getResourceId(1, 0);
activityCloseEnterAnimationId = activityStyle.getResourceId(2, 0);
activityCloseExitAnimationId = activityStyle.getResourceId(3, 0);
activityStyle.recycle();
}
@Override
public String getName() {
return "NavigationModule";
}
@SuppressLint("NewApi")
@ReactMethod
public void render(int crumb, int tab, ReadableArray titles, String appKey, String enterAnim, String exitAnim) {
final Activity currentActivity = getCurrentActivity();
if (mIntents.size() == 0) {
mIntents.put(0, currentActivity.getIntent());
}
int currentCrumb = mIntents.size() - 1;
if (crumb < currentCrumb) {
final Intent intent = mIntents.get(crumb);
for(int i = crumb + 1; i <= currentCrumb; i++) {
mIntents.remove(i);
}
final int enter = this.getAnimationResourceId(enterAnim, this.activityCloseEnterAnimationId);
final int exit = this.getAnimationResourceId(exitAnim, this.activityCloseExitAnimationId);
currentActivity.runOnUiThread(new Runnable() {
@Override
public void run() {
if (currentCrumb - crumb == 1)
currentActivity.finishAfterTransition();
else
currentActivity.navigateUpTo(intent);
currentActivity.overridePendingTransition(enter, exit);
}
});
}
if (crumb > currentCrumb) {
final Intent[] intents = new Intent[crumb - currentCrumb];
for(int i = 0; i < crumb - currentCrumb; i++) {
int nextCrumb = currentCrumb + i + 1;
Class scene = nextCrumb % 2 == 0 ? SceneActivity.class : AlternateSceneActivity.class;
Intent intent = new Intent(getReactApplicationContext(), scene);
intent.putExtra(SceneActivity.CRUMB, nextCrumb);
intent.putExtra(SceneActivity.APP_KEY, appKey);
mIntents.put(nextCrumb, intent);
intents[i] = intent;
}
final int enter = this.getAnimationResourceId(enterAnim, this.activityOpenEnterAnimationId);
final int exit = this.getAnimationResourceId(exitAnim, this.activityOpenExitAnimationId);
currentActivity.runOnUiThread(new Runnable() {
@Override
public void run() {
Pair[] sharedElements = crumb - currentCrumb == 1 ? getSharedElements() : null;
if (sharedElements != null) {
Bundle bundle = ActivityOptions.makeSceneTransitionAnimation(currentActivity, sharedElements).toBundle();
currentActivity.startActivity(intents[0], bundle);
} else {
currentActivity.startActivities(intents);
}
currentActivity.overridePendingTransition(enter, exit);
}
});
}
}
private int getAnimationResourceId(String animationName, int defaultId) {
if (animationName == null)
return defaultId;
String packageName = getReactApplicationContext().getPackageName();
return getReactApplicationContext().getResources().getIdentifier(animationName, "anim", packageName);
}
@SuppressLint("NewApi")
private Pair[] getSharedElements() {
View rootView = getCurrentActivity().findViewById(android.R.id.content);
HashSet<View> sharedElements = (HashSet<View>) rootView.getTag(R.id.sharedElements);
if (sharedElements == null)
return null;
Pair[] sharedElementPairs = new Pair[sharedElements.size()];
int size = 0;
for(View sharedElement : sharedElements) {
sharedElementPairs[size] = Pair.create(sharedElement, sharedElement.getTransitionName());
size++;
}
return sharedElementPairs;
}
}
| Used final so inner class can reference
Also got root view instead of content view so it matches the shared element manager
| NavigationReactNative/src/android/app/src/main/java/com/navigation/reactnative/NavigationModule.java | Used final so inner class can reference | <ide><path>avigationReactNative/src/android/app/src/main/java/com/navigation/reactnative/NavigationModule.java
<ide> }
<ide> final int enter = this.getAnimationResourceId(enterAnim, this.activityCloseEnterAnimationId);
<ide> final int exit = this.getAnimationResourceId(exitAnim, this.activityCloseExitAnimationId);
<add> final boolean backOne = currentCrumb - crumb == 1;
<ide> currentActivity.runOnUiThread(new Runnable() {
<ide> @Override
<ide> public void run() {
<del> if (currentCrumb - crumb == 1)
<add> if (backOne)
<ide> currentActivity.finishAfterTransition();
<ide> else
<ide> currentActivity.navigateUpTo(intent);
<ide> }
<ide> final int enter = this.getAnimationResourceId(enterAnim, this.activityOpenEnterAnimationId);
<ide> final int exit = this.getAnimationResourceId(exitAnim, this.activityOpenExitAnimationId);
<add> final boolean forwardOne = crumb - currentCrumb == 1;
<ide> currentActivity.runOnUiThread(new Runnable() {
<ide> @Override
<ide> public void run() {
<del> Pair[] sharedElements = crumb - currentCrumb == 1 ? getSharedElements() : null;
<add> Pair[] sharedElements = forwardOne ? getSharedElements() : null;
<ide> if (sharedElements != null) {
<ide> Bundle bundle = ActivityOptions.makeSceneTransitionAnimation(currentActivity, sharedElements).toBundle();
<ide> currentActivity.startActivity(intents[0], bundle);
<ide>
<ide> @SuppressLint("NewApi")
<ide> private Pair[] getSharedElements() {
<del> View rootView = getCurrentActivity().findViewById(android.R.id.content);
<add> View rootView = getCurrentActivity().findViewById(android.R.id.content).getRootView();
<ide> HashSet<View> sharedElements = (HashSet<View>) rootView.getTag(R.id.sharedElements);
<ide> if (sharedElements == null)
<ide> return null; |
|
Java | agpl-3.0 | 215587ee8ca9978dc201466d1a077bb0ab9d7b09 | 0 | aikuma/aikuma,aikuma/aikuma,hleeldc/aikuma,aikuma/aikuma,lisaslyis/aikuma,hleeldc/aikuma,lisaslyis/aikuma,lisaslyis/aikuma,hleeldc/aikuma,hleeldc/aikuma,lisaslyis/aikuma,aikuma/aikuma,lisaslyis/aikuma,hleeldc/aikuma,aikuma/aikuma | package au.edu.unimelb.boldapp;
import android.content.res.Resources;
import android.util.Log;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.IOException;
import java.util.HashMap;
import java.util.UUID;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
/**
* Class to contain our (minimal number of) global variables.
*
* @author Oliver Adams <[email protected]>
* @author Florian Hanke <[email protected]>
*/
public abstract class GlobalState {
/**
* The user that is currently selected to be the author of new recordings
* and respeakings
*/
private static User currentUser;
/**
* A list of all the users.
*/
private static List<User> users;
/**
* A map from UUIDs to users.
*/
private static HashMap<UUID, User> userMap;
/**
* A list of all the recordings.
*/
private static List<Recording> recordings;
/**
* A map from UUIDs to recordings.
*/
private static HashMap<UUID, Recording> recordingMap;
/**
* currentUser accessor
*/
public static User getCurrentUser() {
return currentUser;
}
/**
* currentUser mutator
*/
public static void setCurrentUser(User currentUser) {
GlobalState.currentUser = currentUser;
}
/**
* users accessor
*/
public static List<User> getUsers() {
return GlobalState.users;
}
/**
* users mutator; creates a userMap while setting users.
*/
public static void setUsers(List<User> users) {
GlobalState.users = users;
HashMap<UUID, User> userMap = new HashMap();
for (User user : users) {
userMap.put(user.getUUID(), user);
}
GlobalState.userMap = userMap;
}
/**
* userMap accessor
*/
public static HashMap<UUID, User> getUserMap() {
return GlobalState.userMap;
}
/**
* recordings mutator
*/
public static void setRecordings(List<Recording> recordings) {
GlobalState.recordings = recordings;
HashMap<UUID, Recording> recordingMap = new HashMap();
for (Recording recording : recordings) {
recordingMap.put(recording.getUUID(), recording);
}
GlobalState.recordingMap = recordingMap;
}
/**
* default recordings accessor
*/
public static List<Recording> getRecordings() {
return GlobalState.recordings;
}
/**
* recordings accessor
*
* @param sortBy String with values either "alphabetical" or "date"
* indicationg how the caller wants the recordings sorted.
*/
public static List<Recording> getRecordings(String sortBy) {
java.util.Collections.sort(GlobalState.recordings, new RecordingComparator(sortBy));
return GlobalState.recordings;
}
/**
* recordingMap accessor
*/
public static HashMap<UUID, Recording> getRecordingMap() {
return GlobalState.recordingMap;
}
/**
* Loads the users from the bold directory.
*/
public static void loadUsers() {
setUsers(FileIO.readUsers());
}
/**
* Loads the users from the bold directory.
*/
public static void loadRecordings() {
setRecordings(FileIO.readRecordings());
}
/**
* The mapping from language names to codes.
*/
private static Map langCodeMap;
/**
* langCodeMap accesor. Ensures that langCodeMap is not null.
*/
public static Map getLangCodeMap(Resources resources) {
if (langCodeMap == null) {
if (GlobalState.loadLangCodesThread == null ||
!GlobalState.loadLangCodesThread.isAlive()) {
loadLangCodeMap(resources);
}
while (langCodeMap == null) {
}
}
return langCodeMap;
}
/**
* langCodeMap mutator
*/
public static void setLangCodeMap(Map langCodeMap) {
GlobalState.langCodeMap = langCodeMap;
}
private static Thread loadLangCodesThread;
/**
* loads the language code map
*
* @param resources resources so that the langCodes can be retrieved
* from the text file if necessary.
*/
public static void loadLangCodeMap(final Resources resources) {
GlobalState.loadLangCodesThread = new Thread(new Runnable() {
@Override
public void run() {
try {
File mapFile =
new File(FileIO.getAppRootPath(), "lang_codes");
if (mapFile.exists()) {
FileInputStream fis = new FileInputStream(mapFile);
ObjectInputStream ois = new ObjectInputStream(fis);
GlobalState.setLangCodeMap((Map) ois.readObject());
} else {
Map langCodeMap =
FileIO.readLangCodes(resources);
GlobalState.setLangCodeMap(langCodeMap);
FileOutputStream fos = new FileOutputStream(mapFile);
ObjectOutputStream oos = new ObjectOutputStream(fos);
oos.writeObject(langCodeMap);
}
} catch (IOException e) {
//This is bad.
} catch (ClassNotFoundException e) {
//This is bad.
}
}
});
GlobalState.loadLangCodesThread.start();
}
}
| BOLDApp/src/au/edu/unimelb/boldapp/GlobalState.java | package au.edu.unimelb.boldapp;
import android.content.res.Resources;
import android.util.Log;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.IOException;
import java.util.HashMap;
import java.util.UUID;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
/**
* Class to contain our (minimal number of) global variables.
*
* @author Oliver Adams <[email protected]>
* @author Florian Hanke <[email protected]>
*/
public abstract class GlobalState {
/**
* The user that is currently selected to be the author of new recordings
* and respeakings
*/
private static User currentUser;
/**
* A list of all the users.
*/
private static List<User> users;
/**
* A map from UUIDs to users.
*/
private static HashMap<UUID, User> userMap;
/**
* A list of all the recordings.
*/
private static List<Recording> recordings;
/**
* A map from UUIDs to recordings.
*/
private static HashMap<UUID, Recording> recordingMap;
/**
* currentUser accessor
*/
public static User getCurrentUser() {
return currentUser;
}
/**
* currentUser mutator
*/
public static void setCurrentUser(User currentUser) {
GlobalState.currentUser = currentUser;
}
/**
* users accessor
*/
public static List<User> getUsers() {
return GlobalState.users;
}
/**
* users mutator; creates a userMap while setting users.
*/
public static void setUsers(List<User> users) {
GlobalState.users = users;
HashMap<UUID, User> userMap = new HashMap();
for (User user : users) {
userMap.put(user.getUUID(), user);
}
GlobalState.userMap = userMap;
}
/**
* userMap accessor
*/
public static HashMap<UUID, User> getUserMap() {
return GlobalState.userMap;
}
/**
* recordings mutator
*/
public static void setRecordings(List<Recording> recordings) {
GlobalState.recordings = recordings;
HashMap<UUID, Recording> recordingMap = new HashMap();
for (Recording recording : recordings) {
recordingMap.put(recording.getUUID(), recording);
}
GlobalState.recordingMap = recordingMap;
}
/**
* default recordings accessor
*/
public static List<Recording> getRecordings() {
return GlobalState.recordings;
}
/**
* recordings accessor
*
* @param sortBy String with values either "alphabetical" or "date"
* indicationg how the caller wants the recordings sorted.
*/
public static List<Recording> getRecordings(String sortBy) {
java.util.Collections.sort(GlobalState.recordings, new RecordingComparator(sortBy));
return GlobalState.recordings;
}
/**
* recordingMap accessor
*/
public static HashMap<UUID, Recording> getRecordingMap() {
return GlobalState.recordingMap;
}
/**
* Loads the users from the bold directory.
*/
public static boolean loadUsers() {
try {
setUsers(FileIO.readUsers());
} catch (IOException e) {
return false;
}
return true;
}
/**
* Loads the users from the bold directory.
*/
public static boolean loadRecordings() {
try {
setRecordings(FileIO.readRecordings());
} catch (IOException e) {
return false;
}
return true;
}
/**
* The mapping from language names to codes.
*/
private static Map langCodeMap;
/**
* langCodeMap accesor. Ensures that langCodeMap is not null.
*/
public static Map getLangCodeMap(Resources resources) {
if (langCodeMap == null) {
if (GlobalState.loadLangCodesThread == null ||
!GlobalState.loadLangCodesThread.isAlive()) {
loadLangCodeMap(resources);
}
while (langCodeMap == null) {
}
}
return langCodeMap;
}
/**
* langCodeMap mutator
*/
public static void setLangCodeMap(Map langCodeMap) {
GlobalState.langCodeMap = langCodeMap;
}
private static Thread loadLangCodesThread;
/**
* loads the language code map
*
* @param resources resources so that the langCodes can be retrieved
* from the text file if necessary.
*/
public static void loadLangCodeMap(final Resources resources) {
GlobalState.loadLangCodesThread = new Thread(new Runnable() {
@Override
public void run() {
try {
File mapFile =
new File(FileIO.getAppRootPath(), "lang_codes");
if (mapFile.exists()) {
FileInputStream fis = new FileInputStream(mapFile);
ObjectInputStream ois = new ObjectInputStream(fis);
GlobalState.setLangCodeMap((Map) ois.readObject());
} else {
Map langCodeMap =
FileIO.readLangCodes(resources);
GlobalState.setLangCodeMap(langCodeMap);
FileOutputStream fos = new FileOutputStream(mapFile);
ObjectOutputStream oos = new ObjectOutputStream(fos);
oos.writeObject(langCodeMap);
}
} catch (IOException e) {
//This is bad.
} catch (ClassNotFoundException e) {
//This is bad.
}
}
});
GlobalState.loadLangCodesThread.start();
}
}
| now that readRecordings and readUsers cannot throw exceptions, removed the try and catch in loadUsers and loadRecordings, and changed the signatures to void
| BOLDApp/src/au/edu/unimelb/boldapp/GlobalState.java | now that readRecordings and readUsers cannot throw exceptions, removed the try and catch in loadUsers and loadRecordings, and changed the signatures to void | <ide><path>OLDApp/src/au/edu/unimelb/boldapp/GlobalState.java
<ide> /**
<ide> * Loads the users from the bold directory.
<ide> */
<del> public static boolean loadUsers() {
<del> try {
<del> setUsers(FileIO.readUsers());
<del> } catch (IOException e) {
<del> return false;
<del> }
<del> return true;
<add> public static void loadUsers() {
<add> setUsers(FileIO.readUsers());
<ide> }
<ide>
<ide> /**
<ide> * Loads the users from the bold directory.
<ide> */
<del> public static boolean loadRecordings() {
<del> try {
<del> setRecordings(FileIO.readRecordings());
<del> } catch (IOException e) {
<del> return false;
<del> }
<del> return true;
<add> public static void loadRecordings() {
<add> setRecordings(FileIO.readRecordings());
<ide> }
<ide>
<ide> /** |
|
JavaScript | apache-2.0 | 77ae15525c2581c32e6b19860fa43583a6dcc8c6 | 0 | samsweco/KosterIOS,samsweco/KosterIOS | Ti.include("geoFunctions.js");
Ti.include("mapFunctions.js");
$.tabs.open();
Alloy.CFG.tabs = $.tabs;
exports.toInteractive = toInteractive;
var infoVisible = false;
//-----------------------------------------------------------
// Metoder för navigeringen
//-----------------------------------------------------------
function toMap() {
try {
var mapWind = Alloy.createController('map').getView();
$.mapWin.add(mapWind);
var mapwinTitle = Ti.UI.createLabel({
font : {
fontSize : '16dp',
fontFamily : 'Raleway-Medium'
},
text : 'Karta'
});
$.mapWin.titleControl = mapwinTitle;
} catch(e) {
newError("Något gick fel när sidan skulle laddas, prova igen!", "Förstasidan");
}
}
function toInteractive() {
try {
var interactive = Alloy.createController('interactive').getView();
$.interactiveWin.add(interactive);
var interacwinTitle = Ti.UI.createLabel({
font : {
fontSize : '16dp',
fontFamily : 'Raleway-Medium'
},
text : 'Bokstavsjakt'
});
$.interactiveWin.titleControl = interacwinTitle;
} catch(e) {
newError("Något gick fel när sidan skulle laddas, prova igen!", "Förstasidan");
}
}
function toTrails() {
try {
var trails = Alloy.createController('trails').getView();
$.hikeWin.add(trails);
var hikewinTitle = Ti.UI.createLabel({
font : {
fontSize : '16dp',
fontFamily : 'Raleway-Medium'
},
text : 'Vandringsleder'
});
$.hikeWin.titleControl = hikewinTitle;
} catch(e) {
newError("Något gick fel när sidan skulle laddas, prova igen!", "Förstasidan");
}
}
function toInfo() {
try {
var info = Alloy.createController('infoList').getView();
$.infoWin.add(info);
var infowinTitle = Ti.UI.createLabel({
font : {
fontSize : '16dp',
fontFamily : 'Raleway-Medium'
},
text : 'Information'
});
$.infoWin.titleControl = infowinTitle;
} catch(e) {
newError("Något gick fel när sidan skulle laddas, prova igen!", "Förstasidan");
}
}
//-----------------------------------------------------------
// Null'ar varje fönster när man trycker på en annan tab,
// för att minska risk för minnesläcka
//-----------------------------------------------------------
$.mapWin.addEventListener('blur', function() {
$.mapWin = null;
// rensar kartan
reloadMap();
Alloy.Globals.reloadMapMenu();
});
$.interactiveWin.addEventListener('blur', function() {
$.interactiveWin = null;
});
$.hikeWin.addEventListener('blur', function() {
$.hikeWin = null;
});
$.infoWin.addEventListener('blur', function() {
$.infoWin = null;
});
$.koster.addEventListener('blur', function() {
$.koster = null;
});
//-----------------------------------------------------------
// Öppnar och stänger menyn på "stora kartan"
//-----------------------------------------------------------
function showMapMenu() {
if(!menuMapVisible){
Alloy.Globals.openMenu();
menuMapVisible = true;
}else{
Alloy.Globals.closeMenu();
menuMapVisible = false;
}
}
//-----------------------------------------------------------
// Skickar till båtleden från startsidan
//-----------------------------------------------------------
function openBoat(){
var args = {
id : 8,
title : 'Båtresan',
length : 10,
infoTxt : 'Välkommen på båtturen mellan Strömstad och Koster. Turen är cirka 10 km lång och tar mellan 30 och 60 minuter. Under resan kommer du få lite information om Kosterhavet och livet där.',
area : 'Strömstad-Koster',
zoomlat : '58.936458',
zoomlon : '11.172279',
color : 'boat'
};
var trailDetail = Alloy.createController("trailDetail", args).getView();
Alloy.CFG.tabs.activeTab.open(trailDetail);
}
//-----------------------------------------------------------
// Visar infoWidget
//-----------------------------------------------------------
function showInfo(){
if(!infoVisible){
$.widgetInfo.show();
$.widgetInfo.height = '75%';
infoVisible = true;
} else {
$.widgetInfo.hide();
$.widgetInfo.height = 0;
infoVisible = false;
}
}
| app/controllers/index.js | Ti.include("geoFunctions.js");
Ti.include("mapFunctions.js");
$.tabs.open();
Alloy.CFG.tabs = $.tabs;
exports.toInteractive = toInteractive;
var infoVisible = false;
//-----------------------------------------------------------
// Metoder för navigeringen
//-----------------------------------------------------------
function toMap() {
try {
var mapWind = Alloy.createController('map').getView();
$.mapWin.add(mapWind);
var mapwinTitle = Ti.UI.createLabel({
font : {
fontSize : '16dp',
fontFamily : 'Raleway-Medium'
},
text : 'Karta'
});
$.mapWin.titleControl = mapwinTitle;
} catch(e) {
newError("Något gick fel när sidan skulle laddas, prova igen!", "Förstasidan");
}
}
function toInteractive() {
try {
var interactive = Alloy.createController('interactive').getView();
$.interactiveWin.add(interactive);
var interacwinTitle = Ti.UI.createLabel({
font : {
fontSize : '16dp',
fontFamily : 'Raleway-Medium'
},
text : 'Bokstavsjakt'
});
$.interactiveWin.titleControl = interacwinTitle;
} catch(e) {
newError("Något gick fel när sidan skulle laddas, prova igen!", "Förstasidan");
}
}
function toTrails() {
try {
var trails = Alloy.createController('trails').getView();
$.hikeWin.add(trails);
var hikewinTitle = Ti.UI.createLabel({
font : {
fontSize : '16dp',
fontFamily : 'Raleway-Medium'
},
text : 'Vandringsleder'
});
$.hikeWin.titleControl = hikewinTitle;
} catch(e) {
newError("Något gick fel när sidan skulle laddas, prova igen!", "Förstasidan");
}
}
function toInfo() {
try {
var info = Alloy.createController('infoList').getView();
$.infoWin.add(info);
var infowinTitle = Ti.UI.createLabel({
font : {
fontSize : '16dp',
fontFamily : 'Raleway-Medium'
},
text : 'Information'
});
$.infoWin.titleControl = infowinTitle;
} catch(e) {
newError("Något gick fel när sidan skulle laddas, prova igen!", "Förstasidan");
}
}
//-----------------------------------------------------------
// Null'ar varje fönster när man trycker på en annan tab,
// för att minska risk för minnesläcka
//-----------------------------------------------------------
$.mapWin.addEventListener('blur', function() {
$.mapWin = null;
// rensar kartan
reloadMap();
Alloy.Globals.reloadMapMenu();
});
$.interactiveWin.addEventListener('blur', function() {
$.interactiveWin = null;
});
$.hikeWin.addEventListener('blur', function() {
$.hikeWin = null;
});
$.infoWin.addEventListener('blur', function() {
$.infoWin = null;
});
$.koster.addEventListener('blur', function() {
$.koster = null;
});
//-----------------------------------------------------------
// Öppnar och stänger menyn på "stora kartan"
//-----------------------------------------------------------
function showMapMenu() {
if(!menuMapVisible){
Alloy.Globals.openMenu();
menuMapVisible = true;
}else{
Alloy.Globals.closeMenu();
menuMapVisible = false;
}
}
//-----------------------------------------------------------
// Skickar till båtleden från startsidan
//-----------------------------------------------------------
function openBoat(){
var args = {
id : 8,
title : 'Båtresan',
length : 10,
infoTxt : 'Välkommen på båtturen mellan Strömstad och Koster. Turen är cirka 10 km lång och tar mellan 30 och 60 minuter. Under resan kommer du få lite information om Kosterhavet och livet där.',
area : 'Strömstad-Koster',
zoomlat : '58.936458',
zoomlon : '11.172279',
color : 'boat'
};
var trailDetail = Alloy.createController("trailDetail", args).getView();
Alloy.CFG.tabs.activeTab.open(trailDetail);
}
//-----------------------------------------------------------
// Visar infoWidget
//-----------------------------------------------------------
function showInfo(){
if(!infoVisible){
$.widgetInfo.show();
$.widgetInfo.height = '80%';
infoVisible = true;
} else {
$.widgetInfo.hide();
$.widgetInfo.height = 0;
infoVisible = false;
}
}
| index
| app/controllers/index.js | index | <ide><path>pp/controllers/index.js
<ide> function showInfo(){
<ide> if(!infoVisible){
<ide> $.widgetInfo.show();
<del> $.widgetInfo.height = '80%';
<add> $.widgetInfo.height = '75%';
<ide> infoVisible = true;
<ide> } else {
<ide> $.widgetInfo.hide(); |
|
Java | apache-2.0 | d85eabbcb3e508182b366dd615acba6eb3753ef1 | 0 | rapidoid/rapidoid,rapidoid/rapidoid,rapidoid/rapidoid,rapidoid/rapidoid | package org.rapidoid.json;
import java.io.OutputStream;
import java.util.Map;
import org.rapidoid.util.U;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
/*
* #%L
* rapidoid-json
* %%
* Copyright (C) 2014 Nikolche Mihajlovski
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
public class JSON {
private static final ObjectMapper MAPPER = mapper();
private static ObjectMapper mapper() {
ObjectMapper mapper = new ObjectMapper();
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
return mapper;
}
public static String stringify(Object value) {
try {
return MAPPER.writeValueAsString(value);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
/**
* @param extras
* extra JSON attributes in format (key1, value1, key2, value2...)
*/
public static String stringifyWithExtras(Object value, Object... extras) {
if (extras.length % 2 != 0) {
throw new IllegalArgumentException(
"Expected even number of extras (key1, value1, key2, value2...), but found: " + extras.length);
}
try {
JsonNode node = MAPPER.valueToTree(value);
if (!(node instanceof ObjectNode)) {
throw new RuntimeException("Cannot add extra attributes on a non-object value: " + value);
}
ObjectNode obj = (ObjectNode) node;
int extrasN = extras.length / 2;
for (int i = 0; i < extrasN; i++) {
Object key = extras[2 * i];
if (key instanceof String) {
obj.put((String) key, String.valueOf(extras[2 * i + 1]));
} else {
throw new RuntimeException("Expected extra key of type String, but found: " + key);
}
}
return MAPPER.writeValueAsString(node);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
public static void stringify(Object value, OutputStream out) {
try {
MAPPER.writeValue(out, value);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
public static <T> T parse(String json, Class<T> valueType) {
try {
return MAPPER.readValue(json, valueType);
} catch (Exception e) {
U.error("Cannot parse JSON!", "json", json, "error", e);
throw new RuntimeException(e);
}
}
@SuppressWarnings("unchecked")
public static Map<String, Object> parseMap(String json) {
return parse(json, Map.class);
}
public static void warmup() {
JSON.stringify(123);
JSON.parse("{}", Map.class);
}
}
| rapidoid-json/src/main/java/org/rapidoid/json/JSON.java | package org.rapidoid.json;
import java.io.OutputStream;
import java.util.Map;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
/*
* #%L
* rapidoid-json
* %%
* Copyright (C) 2014 Nikolche Mihajlovski
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
public class JSON {
private static final ObjectMapper MAPPER = mapper();
private static ObjectMapper mapper() {
ObjectMapper mapper = new ObjectMapper();
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
return mapper;
}
public static String stringify(Object value) {
try {
return MAPPER.writeValueAsString(value);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
/**
* @param extras
* extra JSON attributes in format (key1, value1, key2, value2...)
*/
public static String stringifyWithExtras(Object value, Object... extras) {
if (extras.length % 2 != 0) {
throw new IllegalArgumentException(
"Expected even number of extras (key1, value1, key2, value2...), but found: " + extras.length);
}
try {
JsonNode node = MAPPER.valueToTree(value);
if (!(node instanceof ObjectNode)) {
throw new RuntimeException("Cannot add extra attributes on a non-object value: " + value);
}
ObjectNode obj = (ObjectNode) node;
int extrasN = extras.length / 2;
for (int i = 0; i < extrasN; i++) {
Object key = extras[2 * i];
if (key instanceof String) {
obj.put((String) key, String.valueOf(extras[2 * i + 1]));
} else {
throw new RuntimeException("Expected extra key of type String, but found: " + key);
}
}
return MAPPER.writeValueAsString(node);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
public static void stringify(Object value, OutputStream out) {
try {
MAPPER.writeValue(out, value);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
public static <T> T parse(String json, Class<T> valueType) {
try {
return MAPPER.readValue(json, valueType);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
@SuppressWarnings("unchecked")
public static Map<String, Object> parseMap(String json) {
return parse(json, Map.class);
}
public static void warmup() {
JSON.stringify(123);
JSON.parse("{}", Map.class);
}
}
| Improved JSON parsing error handling.
| rapidoid-json/src/main/java/org/rapidoid/json/JSON.java | Improved JSON parsing error handling. | <ide><path>apidoid-json/src/main/java/org/rapidoid/json/JSON.java
<ide>
<ide> import java.io.OutputStream;
<ide> import java.util.Map;
<add>
<add>import org.rapidoid.util.U;
<ide>
<ide> import com.fasterxml.jackson.databind.DeserializationFeature;
<ide> import com.fasterxml.jackson.databind.JsonNode;
<ide> try {
<ide> return MAPPER.readValue(json, valueType);
<ide> } catch (Exception e) {
<add> U.error("Cannot parse JSON!", "json", json, "error", e);
<ide> throw new RuntimeException(e);
<ide> }
<ide> } |
|
Java | mit | 3b84af6a0dc113490854f9f61eaffc7f8ed8c9f1 | 0 | Adven27/Exam,Adven27/Exam,Adven27/Exam,Adven27/Exam,Adven27/Exam | package com.adven.concordion.extensions.exam.configurators;
import com.adven.concordion.extensions.exam.ExamExtension;
import com.codeborne.selenide.Configuration;
import com.codeborne.selenide.WebDriverRunner;
import io.github.bonigarcia.wdm.ChromeDriverManager;
import io.github.bonigarcia.wdm.FirefoxDriverManager;
import io.github.bonigarcia.wdm.InternetExplorerDriverManager;
import org.openqa.selenium.chrome.ChromeOptions;
import org.openqa.selenium.remote.DesiredCapabilities;
import static java.util.Collections.singletonMap;
import static org.openqa.selenium.chrome.ChromeOptions.CAPABILITY;
public class WebDriverCfg {
private static boolean webDriverInited = false;
private final ExamExtension extension;
private Long timeout;
private String browser;
private String baseUrl;
private String version;
private boolean headless;
public WebDriverCfg(ExamExtension extension) {
this.extension = extension;
}
private static void setUp(ExamExtension extension,
Long timeout,
String browser,
String version,
String baseUrl,
boolean headless) {
if (!webDriverInited) {
if (timeout != null) {
Configuration.timeout = timeout;
}
if (baseUrl != null) {
Configuration.baseUrl = baseUrl;
}
if (browser == null) {
browser = WebDriverRunner.CHROME;
}
Configuration.browser = browser;
switch (browser) {
case WebDriverRunner.FIREFOX:
FirefoxDriverManager.getInstance().version(version).setup();
break;
case WebDriverRunner.INTERNET_EXPLORER:
InternetExplorerDriverManager.getInstance().version(version).setup();
break;
default:
ChromeDriverManager.getInstance().version(version).setup();
if (headless) {
setHeadlessChromeOptions(extension);
}
}
webDriverInited = true;
}
}
private static void setHeadlessChromeOptions(ExamExtension extension) {
final ChromeOptions opt = new ChromeOptions();
opt.addArguments(
"no-sandbox", "headless", "disable-gpu", "disable-extensions", "window-size=1366,768");
extension.webDriverCapabilities(new DesiredCapabilities(singletonMap(CAPABILITY, opt)));
}
public WebDriverCfg timeout(long timeout) {
this.timeout = timeout;
return this;
}
public WebDriverCfg baseUrl(String baseUrl) {
this.baseUrl = baseUrl;
return this;
}
public WebDriverCfg version(String version) {
this.version = version;
return this;
}
public WebDriverCfg browser(String browser) {
this.browser = browser;
return this;
}
//FIXME Only chrome is supported
public WebDriverCfg headless() {
this.headless = true;
return this;
}
public ExamExtension end() {
setUp(extension, timeout, browser, version, baseUrl, headless);
return extension;
}
} | src/main/java/com/adven/concordion/extensions/exam/configurators/WebDriverCfg.java | package com.adven.concordion.extensions.exam.configurators;
import com.adven.concordion.extensions.exam.ExamExtension;
import com.codeborne.selenide.Configuration;
import com.codeborne.selenide.WebDriverRunner;
import io.github.bonigarcia.wdm.ChromeDriverManager;
import io.github.bonigarcia.wdm.FirefoxDriverManager;
import io.github.bonigarcia.wdm.InternetExplorerDriverManager;
import org.openqa.selenium.chrome.ChromeOptions;
import org.openqa.selenium.remote.DesiredCapabilities;
import static java.util.Collections.singletonMap;
import static org.openqa.selenium.chrome.ChromeOptions.CAPABILITY;
public class WebDriverCfg {
private static boolean webDriverInited = false;
private final ExamExtension extension;
private Long timeout;
private String browser;
private String baseUrl;
private String version;
private boolean headless;
public WebDriverCfg(ExamExtension extension) {
this.extension = extension;
}
private static void setUp(ExamExtension extension,
Long timeout,
String browser,
String version,
String baseUrl,
boolean headless) {
if (!webDriverInited) {
if (timeout != null) {
Configuration.timeout = timeout;
}
if (baseUrl != null) {
Configuration.baseUrl = baseUrl;
}
if (browser == null) {
browser = WebDriverRunner.CHROME;
}
Configuration.browser = browser;
switch (browser) {
case WebDriverRunner.FIREFOX:
FirefoxDriverManager.getInstance().version(version).setup();
break;
case WebDriverRunner.INTERNET_EXPLORER:
InternetExplorerDriverManager.getInstance().version(version).setup();
break;
default:
ChromeDriverManager.getInstance().version(version).setup();
if (headless) {
setHeadlessChromeOptions(extension);
}
}
webDriverInited = true;
}
}
private static void setHeadlessChromeOptions(ExamExtension extension) {
final ChromeOptions opt = new ChromeOptions();
opt.addArguments(
"no-sandbox", "headless", "disable-gpu", "disable-extensions", "window-size=1366x768");
extension.webDriverCapabilities(new DesiredCapabilities(singletonMap(CAPABILITY, opt)));
}
public WebDriverCfg timeout(long timeout) {
this.timeout = timeout;
return this;
}
public WebDriverCfg baseUrl(String baseUrl) {
this.baseUrl = baseUrl;
return this;
}
public WebDriverCfg version(String version) {
this.version = version;
return this;
}
public WebDriverCfg browser(String browser) {
this.browser = browser;
return this;
}
//FIXME Only chrome is supported
public WebDriverCfg headless() {
this.headless = true;
return this;
}
public ExamExtension end() {
setUp(extension, timeout, browser, version, baseUrl, headless);
return extension;
}
} | smallfix
| src/main/java/com/adven/concordion/extensions/exam/configurators/WebDriverCfg.java | smallfix | <ide><path>rc/main/java/com/adven/concordion/extensions/exam/configurators/WebDriverCfg.java
<ide> private static void setHeadlessChromeOptions(ExamExtension extension) {
<ide> final ChromeOptions opt = new ChromeOptions();
<ide> opt.addArguments(
<del> "no-sandbox", "headless", "disable-gpu", "disable-extensions", "window-size=1366x768");
<add> "no-sandbox", "headless", "disable-gpu", "disable-extensions", "window-size=1366,768");
<ide> extension.webDriverCapabilities(new DesiredCapabilities(singletonMap(CAPABILITY, opt)));
<ide> }
<ide> |
|
Java | apache-2.0 | 80487e8785f630e04f152a9d56fdecd73b43a9ec | 0 | anhem/urchin,anhem/urchin,anhem/urchin,anhem/urchin | package urchin.selenium;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import urchin.selenium.testutil.SeleniumTest;
import static org.apache.commons.lang3.RandomStringUtils.randomAlphanumeric;
import static urchin.testutil.UnixUserAndGroupCleanup.GROUP_PREFIX;
import static urchin.testutil.UnixUserAndGroupCleanup.USERNAME_PREFIX;
public class aclITCase extends SeleniumTest {
@ClassRule
public static TemporaryFolder temporaryFolder = new TemporaryFolder();
private static String groupName;
private static String username;
private static String folderName;
private static String folderPath;
@BeforeClass
public static void setUp() {
groupName = GROUP_PREFIX + System.currentTimeMillis();
username = USERNAME_PREFIX + System.currentTimeMillis();
folderName = "test-" + System.currentTimeMillis();
folderPath = temporaryFolder.getRoot().getAbsolutePath() + "/" + folderName;
NEW_GROUP.goTo()
.fillGroupName(groupName)
.clickOnCreateGroup();
GROUPS.verifyGroupNameListed(groupName);
NEW_USER.goTo()
.fillUsername(username)
.fillPassword(randomAlphanumeric(10))
.clickOnCreateUser();
USERS.verifyUsernameListed(username);
NEW_FOLDER.goTo()
.fillFolderPath(folderPath)
.clickOnCreateFolder();
FOLDERS.verifyFolderListed(folderName);
}
@AfterClass
public static void tearDown() {
USERS.goTo()
.clickOnUsername(username);
EDIT_USER.verifyAtView()
.clickOnDeleteUser();
GROUPS.goTo()
.clickOnGroupName(groupName);
EDIT_GROUP.verifyAtView()
.clickOnDeleteGroup();
FOLDERS.goTo()
.clickOnFolder(folderName);
EDIT_FOLDER.verifyAtView()
.clickOnDeleteFolder();
}
@Test
public void creatingAndDeletingFolder() {
FOLDERS.goTo()
.clickOnFolder(folderName);
}
}
| src/test/java/urchin/selenium/aclITCase.java | package urchin.selenium;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import urchin.selenium.testutil.SeleniumTest;
import static org.apache.commons.lang3.RandomStringUtils.randomAlphanumeric;
import static urchin.testutil.UnixUserAndGroupCleanup.GROUP_PREFIX;
import static urchin.testutil.UnixUserAndGroupCleanup.USERNAME_PREFIX;
public class aclITCase extends SeleniumTest {
@ClassRule
public static TemporaryFolder temporaryFolder = new TemporaryFolder();
private static String groupName;
private static String username;
private static String folderName;
private static String folderPath;
@BeforeClass
public static void setUp() {
groupName = GROUP_PREFIX + System.currentTimeMillis();
username = USERNAME_PREFIX + System.currentTimeMillis();
folderName = "test-" + System.currentTimeMillis();
folderPath = temporaryFolder.getRoot().getAbsolutePath() + "/" + folderName;
NEW_GROUP.goTo()
.fillGroupName(groupName)
.clickOnCreateGroup();
GROUPS.verifyGroupNameListed(groupName);
NEW_USER.goTo()
.fillUsername(username)
.fillPassword(randomAlphanumeric(10))
.clickOnCreateUser();
USERS.verifyUsernameListed(username);
NEW_FOLDER.goTo()
.fillFolderPath(folderPath)
.clickOnCreateFolder();
FOLDERS.verifyFolderListed(folderName);
}
@Test
public void creatingAndDeletingFolder() {
FOLDERS.goTo()
.clickOnFolder(folderName);
}
}
| cleaning up after test
| src/test/java/urchin/selenium/aclITCase.java | cleaning up after test | <ide><path>rc/test/java/urchin/selenium/aclITCase.java
<ide> package urchin.selenium;
<ide>
<add>import org.junit.AfterClass;
<ide> import org.junit.BeforeClass;
<ide> import org.junit.ClassRule;
<ide> import org.junit.Test;
<ide> FOLDERS.verifyFolderListed(folderName);
<ide> }
<ide>
<add> @AfterClass
<add> public static void tearDown() {
<add> USERS.goTo()
<add> .clickOnUsername(username);
<add>
<add> EDIT_USER.verifyAtView()
<add> .clickOnDeleteUser();
<add>
<add> GROUPS.goTo()
<add> .clickOnGroupName(groupName);
<add>
<add> EDIT_GROUP.verifyAtView()
<add> .clickOnDeleteGroup();
<add>
<add> FOLDERS.goTo()
<add> .clickOnFolder(folderName);
<add>
<add> EDIT_FOLDER.verifyAtView()
<add> .clickOnDeleteFolder();
<add> }
<add>
<ide> @Test
<ide> public void creatingAndDeletingFolder() {
<ide> |
|
Java | apache-2.0 | 21b67bfc7536c0f822cecfc60b38216f7cf50fdf | 0 | database-rider/database-rider,database-rider/database-rider,database-rider/database-rider,database-rider/database-rider | package com.github.database.rider.cdi;
import static org.assertj.core.api.Assertions.assertThat;
import java.util.List;
import javax.inject.Inject;
import javax.persistence.EntityManager;
import org.apache.deltaspike.testcontrol.api.junit.CdiTestRunner;
import org.junit.After;
import org.junit.Test;
import org.junit.runner.RunWith;
import com.github.database.rider.cdi.api.DBUnitInterceptor;
import com.github.database.rider.cdi.model.Tweet;
import com.github.database.rider.cdi.model.User;
import com.github.database.rider.core.api.configuration.DBUnit;
import com.github.database.rider.core.api.dataset.DataSet;
import com.github.database.rider.core.api.dataset.SeedStrategy;
@DBUnitInterceptor
@DBUnit(mergeDataSets = true)
@RunWith(CdiTestRunner.class)
@DataSet(value = "yml/tweet.yml", executeScriptsAfter = "addUser.sql", executeStatementsBefore = {"DELETE FROM FOLLOWER WHERE 1=1", "DELETE FROM TWEET WHERE 1=1", "DELETE FROM USER WHERE 1=1", "INSERT INTO USER VALUES (8,'user8')"})
public class MergeDataSetsCDIIt {
@Inject
EntityManager em;
@Test
@DataSet(value = "yml/usersWithoutTweets.yml", executeScriptsAfter = "tweets.sql", executeStatementsBefore = "INSERT INTO USER VALUES (9,'user9')", strategy = SeedStrategy.INSERT)
public void shouldMergeDataSetsFromClassAndMethod() {
List<User> users = em.createQuery("select u from User u").getResultList(); //2 users from user.yml plus 1 from class level 'executeStatementsBefore' and 1 user from method level 'executeStatementsBefore'
assertThat(users).isNotNull().isNotEmpty().hasSize(4);
User user = (User) em.createQuery("select u from User u where u.id = 9").getSingleResult();//statement before
assertThat(user).isNotNull();
assertThat(user.getId()).isEqualTo(9);
user = (User) em.createQuery("select u from User u where u.id = 1").getSingleResult();
assertThat(user.getTweets()).isNotEmpty(); //tweets comes from class level annotation merged with method level
assertThat(user.getTweets().get(0).getContent()).isEqualTo("dbunit rules again!");
}
@After
public void afterTest() {
em.createQuery("select t from Tweet t").getResultList();
User user = (User) em.createQuery("select u from User u where u.id = 10").getSingleResult();//scripts after
assertThat(user).isNotNull();
assertThat(user.getId()).isEqualTo(10);
Tweet tweet = (Tweet) em.createQuery("select t from Tweet t where t.id = 10").getSingleResult();//scripts after
assertThat(tweet).isNotNull();
assertThat(tweet.getId()).isEqualTo("10");
}
}
| rider-cdi/src/test/java/com/github/database/rider/cdi/MergeDataSetsCDIIt.java | package com.github.database.rider.cdi;
import static org.assertj.core.api.Assertions.assertThat;
import java.util.List;
import javax.inject.Inject;
import javax.persistence.EntityManager;
import org.apache.deltaspike.testcontrol.api.junit.CdiTestRunner;
import org.junit.After;
import org.junit.Test;
import org.junit.runner.RunWith;
import com.github.database.rider.cdi.api.DBUnitInterceptor;
import com.github.database.rider.cdi.model.Tweet;
import com.github.database.rider.cdi.model.User;
import com.github.database.rider.core.api.configuration.DBUnit;
import com.github.database.rider.core.api.dataset.DataSet;
import com.github.database.rider.core.api.dataset.SeedStrategy;
@DBUnitInterceptor
@DBUnit(mergeDataSets = true)
@RunWith(CdiTestRunner.class)
@DataSet(value = "yml/tweet.yml", executeScriptsAfter = "addUser.sql", executeStatementsBefore = {"DELETE FROM USER WHERE 1=1", "INSERT INTO USER VALUES (8,'user8')"})
public class MergeDataSetsCDIIt {
@Inject
EntityManager em;
@Test
@DataSet(value = "yml/usersWithoutTweets.yml", executeScriptsAfter = "tweets.sql", executeStatementsBefore = "INSERT INTO USER VALUES (9,'user9')", strategy = SeedStrategy.INSERT)
public void shouldMergeDataSetsFromClassAndMethod() {
List<User> users = em.createQuery("select u from User u").getResultList(); //2 users from user.yml plus 1 from class level 'executeStatementsBefore' and 1 user from method level 'executeStatementsBefore'
assertThat(users).isNotNull().isNotEmpty().hasSize(4);
User user = (User) em.createQuery("select u from User u where u.id = 9").getSingleResult();//statement before
assertThat(user).isNotNull();
assertThat(user.getId()).isEqualTo(9);
user = (User) em.createQuery("select u from User u where u.id = 1").getSingleResult();
assertThat(user.getTweets()).isNotEmpty(); //tweets comes from class level annotation merged with method level
assertThat(user.getTweets().get(0).getContent()).isEqualTo("dbunit rules again!");
}
@After
public void afterTest() {
em.createQuery("select t from Tweet t").getResultList();
User user = (User) em.createQuery("select u from User u where u.id = 10").getSingleResult();//scripts after
assertThat(user).isNotNull();
assertThat(user.getId()).isEqualTo(10);
Tweet tweet = (Tweet) em.createQuery("select t from Tweet t where t.id = 10").getSingleResult();//scripts after
assertThat(tweet).isNotNull();
assertThat(tweet.getId()).isEqualTo("10");
}
}
| fixes test which fails depending on the order of execution
| rider-cdi/src/test/java/com/github/database/rider/cdi/MergeDataSetsCDIIt.java | fixes test which fails depending on the order of execution | <ide><path>ider-cdi/src/test/java/com/github/database/rider/cdi/MergeDataSetsCDIIt.java
<ide> @DBUnitInterceptor
<ide> @DBUnit(mergeDataSets = true)
<ide> @RunWith(CdiTestRunner.class)
<del>@DataSet(value = "yml/tweet.yml", executeScriptsAfter = "addUser.sql", executeStatementsBefore = {"DELETE FROM USER WHERE 1=1", "INSERT INTO USER VALUES (8,'user8')"})
<add>@DataSet(value = "yml/tweet.yml", executeScriptsAfter = "addUser.sql", executeStatementsBefore = {"DELETE FROM FOLLOWER WHERE 1=1", "DELETE FROM TWEET WHERE 1=1", "DELETE FROM USER WHERE 1=1", "INSERT INTO USER VALUES (8,'user8')"})
<ide> public class MergeDataSetsCDIIt {
<ide>
<ide> @Inject |
|
JavaScript | cc0-1.0 | 305bed8dff9139d5a56da9218dc172b7ffe2c201 | 0 | sleitner/hmda-explorer,m3brown/hmda-explorer,sleitner/hmda-explorer,kave/hmda-explorer,sleitner/hmda-explorer,m3brown/hmda-explorer,kave/hmda-explorer,kave/hmda-explorer,m3brown/hmda-explorer | var PDP = (function ( pdp ) {
'use strict';
// The Query Object
// ----------------
// The `query` object is used to cache filter values and store methods
// that manipulate filter values.
var query = {};
// If debug is set to true, dummy data will be used.
query.debug = false;
// Set a default format for the data download.
query.format = query.debug ? 'json' : 'jsonp?$callback=';
// Set a default endpoint for AJAX requests.
query.endpoint = query.debug ? 'static/js/static_data/' : 'https://api.consumerfinance.gov/data/hmda/';
// Seconds to wait on a response from the API before giving up.
query.secondsToWait = 300;
// Whether or not they want codes in their downloaded file.
query.codes = false;
// `query`'s `params` stores filter values.
query.params = {};
// The `reset` method empties the `params` object.
// If a preset is passed, some defaults will be set.
query.reset = function( preset ) {
// The year is selected independent of the preset, because
// of this we have to ensure it doesn't get overwritten.
var years = $('.field.as_of_year select').val() || [2012];
switch( preset ) {
// All originated mortgages.
case 'originations':
this.params = {
as_of_year: {
values: years,
comparator: '='
},
action_taken: {
values: [1],
comparator: '='
}
};
break;
// First-lien, owner-occupied, 1-4 family homes (including manufactured homes).
case 'common':
this.params = {
as_of_year: {
values: years,
comparator: '='
},
property_type: {
values: [1,2],
comparator: '='
},
owner_occupancy: {
values: [1],
comparator: '='
},
lien_status: {
values: [1],
comparator: '='
},
action_taken: {
values: [1],
comparator: '='
}
};
break;
// Default to an empty state with whatever years they've selected.
case 'all':
this.params = {
as_of_year: {
values: years,
comparator: '='
}
};
break;
// Empty everything.
case 'clear':
this.params = {};
break;
// Default to an empty state with whatever years they've selected.
default:
this.params = {
as_of_year: {
values: [2012],
comparator: '='
}
};
break;
}
return this;
};
// The `updateAll` method runs through all the filter field values the user has selected
// and stores them in the `params` object.
query.updateAll = function( options ) {
var fields,
opts = options || {};
switch( opts.source ) {
case 'url':
fields = pdp.app.getUrlValues();
break;
// State is stored in a cookie. Removing this functionality for now because
// it's confusing the UX team. I'd rather work on other stuff than explain
// to them how this works. Winter is coming.
// case 'session':
// fields = pdp.query.getCookie();
// break;
default:
fields = pdp.form.getFields();
}
this.reset('clear');
// Iterate over all the filter field values and push them into `query.params`.
function _processField( field ) {
if ( field.name && field.values ) {
// Initalize an empty param object if need be.
if ( typeof query.params[ field.name ] === 'undefined' ) {
query.params[ field.name ] = {
values: [],
comparator: '='
};
}
// If the value is a string from a text box we don't want to iterate
// over it because it will be split up the characters.
if ( field.type === 'text' ) {
query.params[ field.name ].values = [ field.values ];
} else {
_.forEach( field.values, function( val, name ){
var values = query.params[ field.name ].values;
// Only push the value if it's not already in there.
if ( !_.contains( values, val ) ) {
values.push( val );
}
});
}
query.params[ field.name ].comparator = field.comparator;
}
}
_.forEach( fields, _processField );
pdp.observer.emitEvent( 'params:updated' );
};
// The `setCookie` method stores the param object in a cookie.
query.setCookie = function() {
$.cookie( '_hmda', pdp.form.getFields(), { expires: 1 } );
};
// The `getCookie` method retrives the param object from a cookie.
query.getCookie = function() {
return $.cookie( '_hmda' );
};
// The `generateUrlHash` method builds and returns a URL hash from `query`'s `params`.
query.generateUrlHash = function() {
var hash,
hashParams = [];
// Loop through params, stringify them and push them into the temp array.
function buildHashParam( param, name ) {
if ( !param.values[0] ) {
return;
}
// If it's not a number, add quotes around the params.
hashParams.push( name + param.comparator + param.values.join(',') );
}
_.forEach( query.params, buildHashParam );
hash = '!/' + hashParams.join('&') + '§ion=' + pdp.app.currentSection;
return hash;
};
query.removeSelectParam = function (params) {
//using a copy of the params means that the select obj
//is still available on query.params for share url generation
var paramsCopy = $.extend(true, {}, params);
try {
delete paramsCopy.clauses.where.select;
} catch (e) {
//nested property doesn't exist
}
delete paramsCopy.select;
return paramsCopy;
};
// The `generateApiUrl` method builds and returns a Qu URL from `query`'s `params`.
query.generateApiUrl = function( format, codes, params ) {
var url,
apiCallParams = params || this.params,
showCodes = codes || this.codes,
downloadFormat = format || this.format;
//remove 'select' from params so it won't be added to where clause
apiCallParams = query.removeSelectParam(apiCallParams);
// Set a base url to append params to
url = this.endpoint + 'slice/hmda_lar.' + downloadFormat + '?';
if ( !showCodes ) {
apiCallParams = {
clauses: {
where: apiCallParams,
select: ['action_taken_name','agency_abbr','agency_name','applicant_ethnicity_name','applicant_race_name_1','applicant_race_name_2','applicant_race_name_3','applicant_race_name_4','applicant_race_name_5','applicant_sex_name','application_date_indicator','as_of_year','census_tract_number','co_applicant_ethnicity_name','co_applicant_race_name_1','co_applicant_race_name_2','co_applicant_race_name_3','co_applicant_race_name_4','co_applicant_race_name_5','co_applicant_sex_name','county_name','denial_reason_name_1','denial_reason_name_2','denial_reason_name_3','edit_status_name','hoepa_status_name','lien_status_name','loan_purpose_name','loan_type_name','msamd_name','owner_occupancy_name','preapproval_name','property_type_name','purchaser_type_name','respondent_id','sequence_number','state_abbr','state_name','applicant_income_000s','hud_median_family_income','loan_amount_000s','number_of_1_to_4_family_units','number_of_owner_occupied_units','minority_population','population','rate_spread','tract_to_msamd_income']
}
};
}
// fetch, compile queries
url += this._buildApiQuery( apiCallParams );
return url;
};
// builds the query string to append to api url
// arg: params, object. if you only need a 'where' clause, passing in
// query.params will do just fine
// if you need to use the 'select' and 'group' clauses, pass an object
// that has a property 'clauses' with an array of objects that correspond to
// each clause. example:
// params = {
// clauses: {
// select: ['var_one', 'var_two'],
// group: ['var_one', 'var_two']
// }
// }
query._buildApiQuery = function( params ) {
var url = '', key;
if ( params.hasOwnProperty('clauses') ) {
for ( key in params.clauses ) {
if ( params.clauses.hasOwnProperty( key ) ) {
url += this._buildClause[key]( params.clauses[key] );
}
}
} else {
url = this._buildClause.where( params );
}
return url;
};
// methods correspond to each type of clause that the API takes
// builds part of api call query string pertaining to clause
query._buildClause = {
// Convert each param to a proper [`$where` clause](http://cfpb.github.io/qu/articles/queries.html#where_in_detail).
where: function( params ) {
var _params = {},
queryVals = [],
locVals = [],
locGroup = {}, //Create a group that finds state / counties with similar numbers.
where;
// In order to compensate for enumerated location fields (state_code-1, county_name-1, etc.)
// we have to go through and consolidate all enumerated params into unified objects.
_.forEach( params, function( param, paramName ) {
//joiner used to allow for congifurable AND/OR statements in query
param.joiner = ' AND ';
if ( !param.values || !param.values[0] ) {
return;
}
var consolidatedName, groupName;
// If the parameter is an enumerated (state-code-1) field then
if ( paramName.match(/\-\d+$/) ) {
// If this is a special case with county, state, or census tract
// then they needs to be grouped together as an object for appropriate query creation
if ( paramName.indexOf('state_code') > -1 || paramName.indexOf('county_code') > -1 || paramName.indexOf('census_tract_number') > -1 ){
// If a number exists, create a location group to bring city, state, and census tract together
groupName = paramName.slice(-2);
// Initialize an empty location group if necessary
if( typeof locGroup[ groupName ] === 'undefined' ){
locGroup[ groupName ] = {
stateValue: '',
countyValues: [],
censusValues: [],
comparator: '=',
joiner: ' OR '
};
}
// Loop through each location group parameter and push it to the appropriate object
_.forEach( param.values, function( value ){
if ( paramName.indexOf('state_code') > -1 ){
locGroup[ groupName ].stateValue = value;
} else if ( paramName.indexOf('county_code') > -1 ){
locGroup[ groupName ].countyValues.push( value );
} else if ( paramName.indexOf('census_tract_number') > -1 ){
locGroup[ groupName ].censusValues.push( '"' + value + '"' );
}
});
// If not state, county, census, then create a consolidated parameter (ie 'msamd')
} else {
// Initalize an empty param object if need be.
consolidatedName = paramName.replace(/\-\d+$/, '');
if ( typeof _params[ consolidatedName ] === 'undefined' ) {
_params[ consolidatedName ] = {
values: [],
comparator: '=',
joiner: ' AND '
};
}
_.forEach( param.values, function( value ){
_params[ consolidatedName ].values.push( value );
});
}
// If the parameter is NOT an enumerated field, push it to _params
} else {
_params[ paramName ] = param;
}
});
// We can now get back to business and generate that WERECLAWS
_.forEach( _params, function( param, paramName ) {
var paramVals;
// Strip `-min/max` from the end of the param. This is mainly done for the loan_amount_000s fields.
paramName = paramName.replace( /\-(min|max)$/, '' );
paramVals = this._formatComparisonValues( param, paramName );
// If calling msamd, then it needs to be joined to location data with "OR" and placed at end of array
if( paramName.indexOf('msamd') > -1 ){
param.joiner = ' OR ';
locVals.push( paramVals );
locVals.push( param.joiner );
// Otherwise, push the parameter to the queryVals array which is joined first (below)
} else {
queryVals.push( paramVals );
}
}.bind( this ));
// For each location group, iterate through and create valid, grouped query string
_.forEach( locGroup, function(i, val){
var queryStr = '', item = locGroup[val];
if( item.stateValue === '' ){
} else if( item.countyValues.length === 0 && item.censusValues.length === 0 ){
queryStr += 'state_code=' + item.stateValue;
locVals.push(queryStr);
locVals.push(item.joiner);
} else if( item.countyValues.length === 0 && item.censusValues.length > 0 ){
queryStr += '(state_code=' + item.stateValue + ' AND census_tract_number IN (' + item.censusValues + '))';
locVals.push(queryStr);
locVals.push(item.joiner);
} else if( item.censusValues.length === 0 ){
queryStr += '(state_code=' + item.stateValue + ' AND county_code IN (' + item.countyValues.toString() + '))';
locVals.push(queryStr);
locVals.push(item.joiner);
} else {
queryStr += '(state_code=' + item.stateValue + ' AND county_code IN (' + item.countyValues.toString() + ') AND census_tract_number IN (' + item.censusValues.toString() + '))';
locVals.push(queryStr);
locVals.push(item.joiner);
}
});
locVals.pop(); //Get rid of the last joiner / operator - not needed.
// Add each queryVals parameter and their joiner string to where variable
if( queryVals.length > 0 ){
where = queryVals.join(' AND ');
// If location information selected, then join that to the existing query
if( locVals.length > 0 ){
where += ' AND (' + locVals.join('') + ')';
}
} else {
// If no queryvals exist, set to empty and append any location filter without AND
where = '';
if( locVals.length > 0 ){
where += '(' + locVals.join('') + ')';
}
}
// A REGEX used to be here that substituted in certain scenarios - this was not sufficient for the use case.
// Encode for URIs and replace spaces with plus signs.
return '&$where=' + encodeURI( where ).replace( /%20/g, '+' );
},
// select and group clauses are formatted the same way sans name
select: function( param ) {
return '&$select=' + this._listVals( param );
},
group: function( param ) {
return '&$group=' + this._listVals( param ) + '&$orderBy=' + this._listVals( _.clone( param ).reverse() );
},
// formats single value clauses
// returns string of comma-delimited values
_listVals: function( param ) {
var i = param.length,
str = '';
while( i-- ) {
if ( typeof param[i] !== 'undefined' ) {
str += param[i];
// if this is not the last value, add comma
if ( i > 0 ) {
str += ',';
}
}
}
return str;
},
// formats api call values that have a comparison operator
_formatComparisonValues: function( param, paramName ){
var paramVal,
paramVals = [];
// If there's only value for the param, meaning they only selected one item or
// it's a radio button that only allows once value, add the stringified
// param to the `queryVals` array.
if ( param.values.length === 1 ) {
// Rate spread specialness
if ( param.values[0] == 'null' ) {
paramVal = paramName + param.comparator + '""';
// Strings
} else if ( isNaN( param.values[0] ) || paramName === 'msamd' || paramName === 'respondent_id' || paramName === 'census_tract_number' ) {
paramVal = paramName + param.comparator + '"' + param.values[0] + '"';
// Numbers
} else {
paramVal = paramName + param.comparator + param.values[0];
}
return paramVal;
// If there are multiple values for a single parameter, we iterate over them and
// put an `OR` operator between them. We then then [group them](http://cfpb.github.io/qu/articles/queries.html#boolean_operators)
// with parens and add the grouping to the `params` array.
} else {
_.forEach( param.values, function( val, key ){
if ( isNaN( val ) || paramName === 'msamd' || paramName === 'respondent_id' || paramName === 'census_tract_number' ) {
paramVals.push( '"' + val + '"' );
} else {
paramVals.push( val );
}
});
return paramName + ' IN (' + paramVals + ')';
}
}
};
// The `fetch` method requests and returns JSON from Qu matching the
// user's filter and grouping clauses
query.fetch = function() {
};
// Export the public API.
pdp.query = query;
return pdp;
}( PDP || {} ));
| src/static/js/modules/query.js | var PDP = (function ( pdp ) {
'use strict';
// The Query Object
// ----------------
// The `query` object is used to cache filter values and store methods
// that manipulate filter values.
var query = {};
// If debug is set to true, dummy data will be used.
query.debug = false;
// Set a default format for the data download.
query.format = query.debug ? 'json' : 'jsonp?$callback=';
// Set a default endpoint for AJAX requests.
query.endpoint = query.debug ? 'static/js/static_data/' : 'https://api.consumerfinance.gov/data/hmda/';
// Seconds to wait on a response from the API before giving up.
query.secondsToWait = 300;
// Whether or not they want codes in their downloaded file.
query.codes = false;
// `query`'s `params` stores filter values.
query.params = {};
// The `reset` method empties the `params` object.
// If a preset is passed, some defaults will be set.
query.reset = function( preset ) {
// The year is selected independent of the preset, because
// of this we have to ensure it doesn't get overwritten.
var years = $('.field.as_of_year select').val() || [2012];
switch( preset ) {
// All originated mortgages.
case 'originations':
this.params = {
as_of_year: {
values: years,
comparator: '='
},
action_taken: {
values: [1],
comparator: '='
}
};
break;
// First-lien, owner-occupied, 1-4 family homes (including manufactured homes).
case 'common':
this.params = {
as_of_year: {
values: years,
comparator: '='
},
property_type: {
values: [1,2],
comparator: '='
},
owner_occupancy: {
values: [1],
comparator: '='
},
lien_status: {
values: [1],
comparator: '='
},
action_taken: {
values: [1],
comparator: '='
}
};
break;
// Default to an empty state with whatever years they've selected.
case 'all':
this.params = {
as_of_year: {
values: years,
comparator: '='
}
};
break;
// Empty everything.
case 'clear':
this.params = {};
break;
// Default to an empty state with whatever years they've selected.
default:
this.params = {
as_of_year: {
values: [2012],
comparator: '='
}
};
break;
}
return this;
};
// The `updateAll` method runs through all the filter field values the user has selected
// and stores them in the `params` object.
query.updateAll = function( options ) {
var fields,
opts = options || {};
switch( opts.source ) {
case 'url':
fields = pdp.app.getUrlValues();
break;
// State is stored in a cookie. Removing this functionality for now because
// it's confusing the UX team. I'd rather work on other stuff than explain
// to them how this works. Winter is coming.
// case 'session':
// fields = pdp.query.getCookie();
// break;
default:
fields = pdp.form.getFields();
}
this.reset('clear');
// Iterate over all the filter field values and push them into `query.params`.
function _processField( field ) {
if ( field.name && field.values ) {
// Initalize an empty param object if need be.
if ( typeof query.params[ field.name ] === 'undefined' ) {
query.params[ field.name ] = {
values: [],
comparator: '='
};
}
// If the value is a string from a text box we don't want to iterate
// over it because it will be split up the characters.
if ( field.type === 'text' ) {
query.params[ field.name ].values = [ field.values ];
} else {
_.forEach( field.values, function( val, name ){
var values = query.params[ field.name ].values;
// Only push the value if it's not already in there.
if ( !_.contains( values, val ) ) {
values.push( val );
}
});
}
query.params[ field.name ].comparator = field.comparator;
}
}
_.forEach( fields, _processField );
pdp.observer.emitEvent( 'params:updated' );
};
// The `setCookie` method stores the param object in a cookie.
query.setCookie = function() {
$.cookie( '_hmda', pdp.form.getFields(), { expires: 1 } );
};
// The `getCookie` method retrives the param object from a cookie.
query.getCookie = function() {
return $.cookie( '_hmda' );
};
// The `generateUrlHash` method builds and returns a URL hash from `query`'s `params`.
query.generateUrlHash = function() {
var hash,
hashParams = [];
// Loop through params, stringify them and push them into the temp array.
function buildHashParam( param, name ) {
if ( !param.values[0] ) {
return;
}
// If it's not a number, add quotes around the params.
hashParams.push( name + param.comparator + param.values.join(',') );
}
_.forEach( query.params, buildHashParam );
hash = '!/' + hashParams.join('&') + '§ion=' + pdp.app.currentSection;
return hash;
};
query.removeSelectParam = function (params) {
//using a copy of the params means that the select obj
//is still available on query.params for share url generation
var paramsCopy = $.extend(true, {}, params);
try {
delete paramsCopy.clauses.where.select;
} catch (e) {
//nested property doesn't exist
}
delete paramsCopy.select;
return paramsCopy;
};
// The `generateApiUrl` method builds and returns a Qu URL from `query`'s `params`.
query.generateApiUrl = function( format, codes, params ) {
var url,
apiCallParams = params || this.params,
showCodes = codes || this.codes,
downloadFormat = format || this.format;
//remove 'select' from params so it won't be added to where clause
apiCallParams = query.removeSelectParam(apiCallParams);
// Set a base url to append params to
url = this.endpoint + 'slice/hmda_lar.' + downloadFormat + '?';
if ( !showCodes ) {
apiCallParams = {
clauses: {
where: apiCallParams,
select: ['action_taken_name','agency_abbr','agency_name','applicant_ethnicity_name','applicant_race_name_1','applicant_race_name_2','applicant_race_name_3','applicant_race_name_4','applicant_race_name_5','applicant_sex_name','application_date_indicator','as_of_year','census_tract_number','co_applicant_ethnicity_name','co_applicant_race_name_1','co_applicant_race_name_2','co_applicant_race_name_3','co_applicant_race_name_4','co_applicant_race_name_5','co_applicant_sex_name','county_name','denial_reason_name_1','denial_reason_name_2','denial_reason_name_3','edit_status_name','hoepa_status_name','lien_status_name','loan_purpose_name','loan_type_name','msamd_name','owner_occupancy_name','preapproval_name','property_type_name','purchaser_type_name','respondent_id','sequence_number','state_abbr','state_name','applicant_income_000s','hud_median_family_income','loan_amount_000s','number_of_1_to_4_family_units','number_of_owner_occupied_units','minority_population','population','rate_spread','tract_to_msamd_income']
}
};
}
// fetch, compile queries
url += this._buildApiQuery( apiCallParams );
return url;
};
// builds the query string to append to api url
// arg: params, object. if you only need a 'where' clause, passing in
// query.params will do just fine
// if you need to use the 'select' and 'group' clauses, pass an object
// that has a property 'clauses' with an array of objects that correspond to
// each clause. example:
// params = {
// clauses: {
// select: ['var_one', 'var_two'],
// group: ['var_one', 'var_two']
// }
// }
query._buildApiQuery = function( params ) {
var url = '', key;
if ( params.hasOwnProperty('clauses') ) {
for ( key in params.clauses ) {
if ( params.clauses.hasOwnProperty( key ) ) {
url += this._buildClause[key]( params.clauses[key] );
}
}
} else {
url = this._buildClause.where( params );
}
return url;
};
// methods correspond to each type of clause that the API takes
// builds part of api call query string pertaining to clause
query._buildClause = {
// Convert each param to a proper [`$where` clause](http://cfpb.github.io/qu/articles/queries.html#where_in_detail).
where: function( params ) {
var _params = {},
queryVals = [],
locVals = [],
locGroup = {}, //Create a group that finds state / counties with similar numbers.
where;
// In order to compensate for enumerated location fields (state_code-1, county_name-1, etc.)
// we have to go through and consolidate all enumerated params into unified objects.
_.forEach( params, function( param, paramName ) {
//joiner used to allow for congifurable AND/OR statements in query
param.joiner = ' AND ';
if ( !param.values || !param.values[0] ) {
return;
}
var consolidatedName, groupName;
// If the parameter is an enumerated (state-code-1) field then
if ( paramName.match(/\-\d+$/) ) {
// If this is a special case with county, state, or census tract
// then they needs to be grouped together as an object for appropriate query creation
if ( paramName.indexOf('state_code') > -1 || paramName.indexOf('county_code') > -1 || paramName.indexOf('census_tract_number') > -1 ){
// If a number exists, create a location group to bring city, state, and census tract together
groupName = paramName.slice(-2);
// Initialize an empty location group if necessary
if( typeof locGroup[ groupName ] === 'undefined' ){
locGroup[ groupName ] = {
stateValue: '',
countyValues: [],
censusValues: [],
comparator: '=',
joiner: ' OR '
};
}
// Loop through each location group parameter and push it to the appropriate object
_.forEach( param.values, function( value ){
if ( paramName.indexOf('state_code') > -1 ){
locGroup[ groupName ].stateValue = value;
} else if ( paramName.indexOf('county_code') > -1 ){
locGroup[ groupName ].countyValues.push( value );
} else if ( paramName.indexOf('census_tract_number') > -1 ){
locGroup[ groupName ].censusValues.push( '"' + value + '"' );
}
});
// If not state, county, census, then create a consolidated parameter (ie 'msamd')
} else {
// Initalize an empty param object if need be.
consolidatedName = paramName.replace(/\-\d+$/, '');
if ( typeof _params[ consolidatedName ] === 'undefined' ) {
_params[ consolidatedName ] = {
values: [],
comparator: '=',
joiner: ' AND '
};
}
_.forEach( param.values, function( value ){
_params[ consolidatedName ].values.push( value );
});
}
// If the parameter is NOT an enumerated field, push it to _params
} else {
_params[ paramName ] = param;
}
});
// We can now get back to business and generate that WERECLAWS
_.forEach( _params, function( param, paramName ) {
var paramVals;
// Strip `-min/max` from the end of the param. This is mainly done for the loan_amount_000s fields.
paramName = paramName.replace( /\-(min|max)$/, '' );
paramVals = this._formatComparisonValues( param, paramName );
// If calling msamd, then it needs to be joined to location data with "OR" and placed at end of array
if( paramName.indexOf('msamd') > -1 ){
param.joiner = ' OR ';
locVals.push( paramVals );
locVals.push( param.joiner );
// Otherwise, push the parameter to the queryVals array which is joined first (below)
} else {
queryVals.push( paramVals );
}
}.bind( this ));
// For each location group, iterate through and create valid, grouped query string
_.forEach( locGroup, function(i, val){
var queryStr = '', item = locGroup[val];
if( item.stateValue === '' ){
} else if( item.countyValues.length === 0 && item.censusValues.length === 0 ){
queryStr += 'state_code=' + item.stateValue;
locVals.push(queryStr);
locVals.push(item.joiner);
} else if( item.countyValues.length === 0 && item.censusValues.length > 0 ){
queryStr += '(state_code=' + item.stateValue + ' AND census_tract_number IN (' + item.censusValues + '))';
locVals.push(queryStr);
locVals.push(item.joiner);
} else if( item.censusValues.length === 0 ){
queryStr += '(state_code=' + item.stateValue + ' AND county_code IN (' + item.countyValues.toString() + '))';
locVals.push(queryStr);
locVals.push(item.joiner);
} else {
queryStr += '(state_code=' + item.stateValue + ' AND county_code IN (' + item.countyValues.toString() + ') AND census_tract_number IN (' + item.censusValues + '))';
locVals.push(queryStr);
locVals.push(item.joiner);
}
});
locVals.pop(); //Get rid of the last joiner / operator - not needed.
// Add each queryVals parameter and their joiner string to where variable
if( queryVals.length > 0 ){
where = queryVals.join(' AND ');
// If location information selected, then join that to the existing query
if( locVals.length > 0 ){
where += ' AND (' + locVals.join('') + ')';
}
} else {
// If no queryvals exist, set to empty and append any location filter without AND
where = '';
if( locVals.length > 0 ){
where += '(' + locVals.join('') + ')';
}
}
// A REGEX used to be here that substituted in certain scenarios - this was not sufficient for the use case.
// Encode for URIs and replace spaces with plus signs.
return '&$where=' + encodeURI( where ).replace( /%20/g, '+' );
},
// select and group clauses are formatted the same way sans name
select: function( param ) {
return '&$select=' + this._listVals( param );
},
group: function( param ) {
return '&$group=' + this._listVals( param ) + '&$orderBy=' + this._listVals( _.clone( param ).reverse() );
},
// formats single value clauses
// returns string of comma-delimited values
_listVals: function( param ) {
var i = param.length,
str = '';
while( i-- ) {
if ( typeof param[i] !== 'undefined' ) {
str += param[i];
// if this is not the last value, add comma
if ( i > 0 ) {
str += ',';
}
}
}
return str;
},
// formats api call values that have a comparison operator
_formatComparisonValues: function( param, paramName ){
var paramVal,
paramVals = [];
// If there's only value for the param, meaning they only selected one item or
// it's a radio button that only allows once value, add the stringified
// param to the `queryVals` array.
if ( param.values.length === 1 ) {
// Rate spread specialness
if ( param.values[0] == 'null' ) {
paramVal = paramName + param.comparator + '""';
// Strings
} else if ( isNaN( param.values[0] ) || paramName === 'msamd' || paramName === 'respondent_id' || paramName === 'census_tract_number' ) {
paramVal = paramName + param.comparator + '"' + param.values[0] + '"';
// Numbers
} else {
paramVal = paramName + param.comparator + param.values[0];
}
return paramVal;
// If there are multiple values for a single parameter, we iterate over them and
// put an `OR` operator between them. We then then [group them](http://cfpb.github.io/qu/articles/queries.html#boolean_operators)
// with parens and add the grouping to the `params` array.
} else {
_.forEach( param.values, function( val, key ){
if ( isNaN( val ) || paramName === 'msamd' || paramName === 'respondent_id' || paramName === 'census_tract_number' ) {
paramVals.push( '"' + val + '"' );
} else {
paramVals.push( val );
}
});
return paramName + ' IN (' + paramVals + ')';
}
}
};
// The `fetch` method requests and returns JSON from Qu matching the
// user's filter and grouping clauses
query.fetch = function() {
};
// Export the public API.
pdp.query = query;
return pdp;
}( PDP || {} ));
| string array fix
| src/static/js/modules/query.js | string array fix | <ide><path>rc/static/js/modules/query.js
<ide> locVals.push(queryStr);
<ide> locVals.push(item.joiner);
<ide> } else {
<del> queryStr += '(state_code=' + item.stateValue + ' AND county_code IN (' + item.countyValues.toString() + ') AND census_tract_number IN (' + item.censusValues + '))';
<add> queryStr += '(state_code=' + item.stateValue + ' AND county_code IN (' + item.countyValues.toString() + ') AND census_tract_number IN (' + item.censusValues.toString() + '))';
<ide> locVals.push(queryStr);
<ide> locVals.push(item.joiner);
<ide> } |
|
JavaScript | mit | 684bf243400e6de2fd6b89446b9ae7b5c8badcd4 | 0 | aigarsdz/olden,aigarsdz/olden | const { ipcRenderer, clipboard } = require('electron');
const path = require('path');
const packageInfo = require(path.join(__dirname, 'package.json'));
const Dexie = require('dexie');
const db = new Dexie('clipboard');
db.version(1).stores({
items: '++id, &text, favorite'
});
const vm = new Vue({
el: '#app',
data: {
clipboardContent: [],
searchResults: [],
lastClipboardItem: '',
clipboardItemCount: 0,
searchItemCount: 0,
selectionIndex: -1,
query: '',
currentPage: 0,
currentSearchPage: 0
},
methods: {
/**
* Loads clipboard data from the database in reverse order sorted by id and
* adds the results to clipboardContent. Only 9 items per page
* are loaded. The offset is calculated using currentPage.
*
* @param {Function} callback Any action that needs to be executed after
* the data is loaded.
*
* @param {Boolean} setLastItem Optional. Set to true only on initial load
* to set lastClipboardItem to the last
* value in user's clipboard. It is used to
* determine if the clipboard has chnaged.
* The default is true.
*/
loadClipboard(callback, setLastItem) {
setLastItem = setLastItem || false;
// NOTE: favorites aren't used for now.
db.items
.where('favorite').equals(1)
.reverse()
.offset(9 * this.currentPage)
.limit(9)
.sortBy('id')
.then((favorites) => {
this.favorites = favorites;
});
db.items
.where('favorite').equals(0)
.reverse()
.offset(9 * this.currentPage)
.limit(9)
.sortBy('id')
.then((items) => {
// NOTE: until favorites are fully implemented we can store only
// text values in the clipboard.
this.clipboardContent = items.map((item) => item.text);
// Store the last value from the clipboard to check if it has changed.
if (items.length > 0 && setLastItem) {
this.lastClipboardItem = items[0].text;
}
}).then(callback);
},
/**
* Navigates between pages. It basically just sets currentPage to the
* given index (0 for the first page, 1 - the second etc.) and relaods
* the clipboard.
*
* @param {Number} pageIndex
* @param {Function} callback
*
* @see {@link loadClipboard}
*/
openPage(pageIndex, callback) {
this.currentPage = pageIndex;
this.loadClipboard(callback);
},
/**
* Hides app window.
*/
hideWindow() {
ipcRenderer.send('hideWindow');
},
/**
* Deletes item from the clipboard history.
*/
deleteItem() {
if (this.selectionIndex !== -1) {
const collection = this.query.length === 0 ? 'clipboardContent' : 'searchResults';
const clipboardItem = this[collection].splice(this.selectionIndex, 1)[0];
db.items.where('text').equals(clipboardItem).delete().then((count) => {
this.selectionIndex = -1;
this.currentPage = 0;
this.currentSearchPage = 0;
if (this.query.length > 0) {
this.searchClipboard(this.query);
} else {
this.loadClipboard();
}
});
}
},
/**
* Takes an item from the clipboard collection and moves it to the top of
* the list.
*/
copyItem() {
const collection = this.query.length === 0 ? 'clipboardContent' : 'searchResults';
const clipboardItem = this[collection].splice(this.selectionIndex, 1)[0];
// Issue #9. If we select the first item we need to nullify the last
// clipboard item stored in memory, otherwise it will just disappear form
// clipboard history.
if (this.selectionIndex === 0) {
this.lastClipboardItem = null;
}
db.items
.where('text').equals(clipboardItem)
.delete()
.then((count) => {
this.clipboardItemCount -= count;
// Navigate back to the first page because the selected item is now
// at the very top of the list.
this.openPage(0, () => {
clipboard.writeText(clipboardItem);
this.selectionIndex = -1;
this.query = '';
this.currentSearchPage = 0;
this.hideWindow();
});
});
},
/**
* Checks if the app is running on MacOS and the asset points to a DMG archive
* and offers to download a MacOS update if the comdisions are met.
*
* @param {String} updateUrl
*/
offerMacOSUpdate(updateUrl) {
if (process.platform === 'darwin' && updateUrl.indexOf('.dmg') !== -1) {
ipcRenderer.send('offer-update', { url: updateUrl });
}
},
/**
* Checks if the app is running on 64bit Windows and the asset points to a
* 64bit ZIP archive and offers to download a 64bit Windows update if the
* comdisions are met.
*
* @param {String} updateUrl
*/
offerWin64Update(updateUrl) {
if (process.platform === 'win32' && process.env.PROCESSOR_ARCHITECTURE === 'AMD64' &&
updateUrl.indexOf('win32-x64') !== -1) {
ipcRenderer.send('offer-update', { url: updateUrl });
}
},
/**
* Checks if the app is running on 32bit Windows and the asset points to a
* 32bit ZIP archive and offers to download a 32bit Windows update if the
* comdisions are met.
*
* @param {String} updateUrl
*/
offerWin32Update(updateUrl) {
if (process.platform === 'win32' && process.env.PROCESSOR_ARCHITECTURE === 'x86' &&
updateUrl.indexOf('win32-ia32') !== -1) {
ipcRenderer.send('offer-update', { url: updateUrl });
}
},
/**
* Checks if the app is running on 32bit Linux and the asset points to a
* 32bit ZIP archive and offers to download a 32bit Linux update if the
* comdisions are met.
*
* @param {String} updateUrl
*/
offerLinux32Update(updateUrl) {
if (process.platform === 'linux' && process.env.PROCESSOR_ARCHITECTURE === 'x86' &&
updateUrl.indexOf('linux-ia32') !== -1) {
ipcRenderer.send('offer-update', { url: updateUrl });
}
},
/**
* Checks if the app is running on 64bit Linux and the asset points to a
* 64bit ZIP archive and offers to download a 64bit Linux update if the
* comdisions are met.
*
* @param {String} updateUrl
*/
offerLinux64Update(updateUrl) {
if (process.platform === 'linux' && process.env.PROCESSOR_ARCHITECTURE === 'AMD64' &&
updateUrl.indexOf('linux-x64') !== -1) {
ipcRenderer.send('offer-update', { url: updateUrl });
}
},
/**
* Performs a clipboard search with the given search needle.
*
* @param {String} needle
*/
searchClipboard(needle) {
db.items.where('text').startsWithIgnoreCase(needle).count((count) => {
vm.searchItemCount = count;
});
db.items
.where('text').startsWithIgnoreCase(needle)
.reverse()
.offset(9 * vm.currentSearchPage)
.limit(9)
.sortBy('id')
.then((items) => {
vm.searchResults = [];
items.forEach((item) => vm.searchResults.push(item.text));
});
},
/**
* Assigns actions to specifickeyboard events.
*/
initActionKeys() {
Mousetrap.bind('up', () => {
if (this.selectionIndex === 0) {
this.selectionIndex = this.clipboardContent.length - 1;
} else {
this.selectionIndex--;
}
});
Mousetrap.bind('right', () => {
if (this.query.length === 0) {
if ((Math.ceil(this.clipboardItemCount / 9)) > this.currentPage + 1) {
this.openPage(this.currentPage + 1);
}
} else {
this.currentSearchPage++;
this.searchClipboard(this.query);
}
});
Mousetrap.bind('down', () => {
if (this.selectionIndex == this.clipboardContent.length - 1) {
this.selectionIndex = 0;
} else {
this.selectionIndex++;
}
});
Mousetrap.bind('left', () => {
if (this.query.length === 0) {
if (this.currentPage > 0) {
this.openPage(this.currentPage - 1);
}
} else {
this.currentSearchPage--;
this.searchClipboard(this.query);
}
});
Mousetrap.bind('esc', this.hideWindow);
Mousetrap.bind('enter', this.copyItem);
if (process.platform === 'darwin') {
Mousetrap.bind('command+backspace', this.deleteItem);
} else {
Mousetrap.bind('ctrl+backspace', this.deleteItem);
}
}
},
/**
* Initializes the application.
*/
ready() {
// Query GitHub to see if a new version of Olden is available.
// TODO: implement autoupdater!
fetch('https://api.github.com/repos/aigarsdz/olden/releases/latest')
.then((response) => { return response.json() })
.then((data) => {
if (data.tag_name && data.tag_name != `v${packageInfo.version}`) {
data.assets.forEach((asset) => {
this.offerMacOSUpdate(asset.browser_download_url);
this.offerWin64Update(asset.browser_download_url);
this.offerWin32Update(asset.browser_download_url);
});
}
}).catch((err) => console.log(err));
this.initActionKeys();
db.items.where('favorite').equals(0).count((count) => {
this.clipboardItemCount = count;
});
this.loadClipboard(() => {
// NOTE: MacOS has no native interface to listen for clipboard changes,
// therefore, polling is the only option. We should do as little
// processing as possible in this function to preserve system resources.
// TODO: Windows has an interface for this purpose. We should at least
// try to integrate it in the app.
setInterval(() => {
const clipboardText = clipboard.readText();
if (clipboardText.length > 0 && clipboardText != this.lastClipboardItem) {
// Delete the item if it's already in the clipboard to avoid extra checks.
db.items.where('text').equals(clipboardText).delete()
.then((count) => {
// TODO: try to remove the item without checking if it's in the array!
if (this.clipboardContent.includes(clipboardText)) {
const clipboardItem = this.clipboardContent.splice(
this.clipboardContent.indexOf(clipboardText), 1
)[0];
} else if (this.clipboardContent.length === 9) {
this.clipboardContent.pop();
}
this.clipboardItemCount -= count;
})
.then(() => {
this.clipboardContent.unshift(clipboardText);
db.items.add({ text: clipboardText, favorite: 0 });
this.lastClipboardItem = clipboardText;
this.clipboardItemCount++;
});
}
}, 500);
}, true);
ipcRenderer.on('clearClipboardHistory', () => {
db.items.clear().then(() => {
this.lastClipboardItem = '';
this.clipboardItemCount = 0;
this.selectionIndex = -1;
this.query = '';
this.openPage(0);
});
});
ipcRenderer.on('exportClipboardHistoryAsJSON', () => {
db.items.toArray().then((items) => {
ipcRenderer.send('saveExportedData', { items: JSON.stringify(items), format: 'json' })
});
});
ipcRenderer.on('exportClipboardHistoryAsTXT', () => {
db.items.toArray().then((items) => {
ipcRenderer.send('saveExportedData', {
items: items.map((item) => item.text).join('\n'),
format: 'txt'
})
});
});
}
});
vm.$watch('query', (value) => {
if (value.length > 0) {
vm.searchClipboard(value);
} else {
vm.searchResults = [];
vm.currentSearchPage = 0;
}
}); | app/js/main.js | const { ipcRenderer, clipboard } = require('electron');
const path = require('path');
const packageInfo = require(path.join(__dirname, 'package.json'));
const Dexie = require('dexie');
const db = new Dexie('clipboard');
db.version(1).stores({
items: '++id, &text, favorite'
});
const vm = new Vue({
el: '#app',
data: {
clipboardContent: [],
searchResults: [],
lastClipboardItem: '',
clipboardItemCount: 0,
searchItemCount: 0,
selectionIndex: -1,
query: '',
currentPage: 0,
currentSearchPage: 0
},
methods: {
/**
* Loads clipboard data from the database in reverse order sorted by id and
* adds the results to clipboardContent. Only 9 items per page
* are loaded. The offset is calculated using currentPage.
*
* @param {Function} callback Any action that needs to be executed after
* the data is loaded.
*
* @param {Boolean} setLastItem Optional. Set to true only on initial load
* to set lastClipboardItem to the last
* value in user's clipboard. It is used to
* determine if the clipboard has chnaged.
* The default is true.
*/
loadClipboard(callback, setLastItem) {
setLastItem = setLastItem || false;
// NOTE: favorites aren't used for now.
db.items
.where('favorite').equals(1)
.reverse()
.offset(9 * this.currentPage)
.limit(9)
.sortBy('id')
.then((favorites) => {
this.favorites = favorites;
});
db.items
.where('favorite').equals(0)
.reverse()
.offset(9 * this.currentPage)
.limit(9)
.sortBy('id')
.then((items) => {
// NOTE: until favorites are fully implemented we can store only
// text values in the clipboard.
this.clipboardContent = items.map((item) => item.text);
// Store the last value from the clipboard to check if it has changed.
if (items.length > 0 && setLastItem) {
this.lastClipboardItem = items[0].text;
}
}).then(callback);
},
/**
* Navigates between pages. It basically just sets currentPage to the
* given index (0 for the first page, 1 - the second etc.) and relaods
* the clipboard.
*
* @param {Number} pageIndex
* @param {Function} callback
*
* @see {@link loadClipboard}
*/
openPage(pageIndex, callback) {
this.currentPage = pageIndex;
this.loadClipboard(callback);
},
/**
* Hides app window.
*/
hideWindow() {
ipcRenderer.send('hideWindow');
},
/**
* Deletes item from the clipboard history.
*/
deleteItem() {
if (this.selectionIndex !== -1) {
const collection = this.query.length === 0 ? 'clipboardContent' : 'searchResults';
const clipboardItem = this[collection].splice(this.selectionIndex, 1)[0];
db.items.where('text').equals(clipboardItem).delete().then((count) => {
this.selectionIndex = -1;
this.currentPage = 0;
this.currentSearchPage = 0;
if (this.query.length > 0) {
this.searchClipboard(this.query);
} else {
this.loadClipboard();
}
});
}
},
/**
* Takes an item from the clipboard collection and moves it to the top of
* the list.
*/
copyItem() {
const collection = this.query.length === 0 ? 'clipboardContent' : 'searchResults';
const clipboardItem = this[collection].splice(this.selectionIndex, 1)[0];
db.items
.where('text').equals(clipboardItem)
.delete()
.then((count) => {
this.clipboardItemCount -= count;
// Navigate back to the first page because the selected item is now
// at the very top of the list.
this.openPage(0, () => {
clipboard.writeText(clipboardItem);
this.selectionIndex = -1;
this.query = '';
this.currentSearchPage = 0;
this.hideWindow();
});
});
},
/**
* Checks if the app is running on MacOS and the asset points to a DMG archive
* and offers to download a MacOS update if the comdisions are met.
*
* @param {String} updateUrl
*/
offerMacOSUpdate(updateUrl) {
if (process.platform === 'darwin' && updateUrl.indexOf('.dmg') !== -1) {
ipcRenderer.send('offer-update', { url: updateUrl });
}
},
/**
* Checks if the app is running on 64bit Windows and the asset points to a
* 64bit ZIP archive and offers to download a 64bit Windows update if the
* comdisions are met.
*
* @param {String} updateUrl
*/
offerWin64Update(updateUrl) {
if (process.platform === 'win32' && process.env.PROCESSOR_ARCHITECTURE === 'AMD64' &&
updateUrl.indexOf('win32-x64') !== -1) {
ipcRenderer.send('offer-update', { url: updateUrl });
}
},
/**
* Checks if the app is running on 32bit Windows and the asset points to a
* 32bit ZIP archive and offers to download a 32bit Windows update if the
* comdisions are met.
*
* @param {String} updateUrl
*/
offerWin32Update(updateUrl) {
if (process.platform === 'win32' && process.env.PROCESSOR_ARCHITECTURE === 'x86' &&
updateUrl.indexOf('win32-ia32') !== -1) {
ipcRenderer.send('offer-update', { url: updateUrl });
}
},
/**
* Checks if the app is running on 32bit Linux and the asset points to a
* 32bit ZIP archive and offers to download a 32bit Linux update if the
* comdisions are met.
*
* @param {String} updateUrl
*/
offerLinux32Update(updateUrl) {
if (process.platform === 'linux' && process.env.PROCESSOR_ARCHITECTURE === 'x86' &&
updateUrl.indexOf('linux-ia32') !== -1) {
ipcRenderer.send('offer-update', { url: updateUrl });
}
},
/**
* Checks if the app is running on 64bit Linux and the asset points to a
* 64bit ZIP archive and offers to download a 64bit Linux update if the
* comdisions are met.
*
* @param {String} updateUrl
*/
offerLinux64Update(updateUrl) {
if (process.platform === 'linux' && process.env.PROCESSOR_ARCHITECTURE === 'AMD64' &&
updateUrl.indexOf('linux-x64') !== -1) {
ipcRenderer.send('offer-update', { url: updateUrl });
}
},
/**
* Performs a clipboard search with the given search needle.
*
* @param {String} needle
*/
searchClipboard(needle) {
db.items.where('text').startsWithIgnoreCase(needle).count((count) => {
vm.searchItemCount = count;
});
db.items
.where('text').startsWithIgnoreCase(needle)
.reverse()
.offset(9 * vm.currentSearchPage)
.limit(9)
.sortBy('id')
.then((items) => {
vm.searchResults = [];
items.forEach((item) => vm.searchResults.push(item.text));
});
},
/**
* Assigns actions to specifickeyboard events.
*/
initActionKeys() {
Mousetrap.bind('up', () => {
if (this.selectionIndex === 0) {
this.selectionIndex = this.clipboardContent.length - 1;
} else {
this.selectionIndex--;
}
});
Mousetrap.bind('right', () => {
if (this.query.length === 0) {
if ((Math.ceil(this.clipboardItemCount / 9)) > this.currentPage + 1) {
this.openPage(this.currentPage + 1);
}
} else {
this.currentSearchPage++;
this.searchClipboard(this.query);
}
});
Mousetrap.bind('down', () => {
if (this.selectionIndex == this.clipboardContent.length - 1) {
this.selectionIndex = 0;
} else {
this.selectionIndex++;
}
});
Mousetrap.bind('left', () => {
if (this.query.length === 0) {
if (this.currentPage > 0) {
this.openPage(this.currentPage - 1);
}
} else {
this.currentSearchPage--;
this.searchClipboard(this.query);
}
});
Mousetrap.bind('esc', this.hideWindow);
Mousetrap.bind('enter', this.copyItem);
if (process.platform === 'darwin') {
Mousetrap.bind('command+backspace', this.deleteItem);
} else {
Mousetrap.bind('ctrl+backspace', this.deleteItem);
}
}
},
/**
* Initializes the application.
*/
ready() {
// Query GitHub to see if a new version of Olden is available.
// TODO: implement autoupdater!
fetch('https://api.github.com/repos/aigarsdz/olden/releases/latest')
.then((response) => { return response.json() })
.then((data) => {
if (data.tag_name && data.tag_name != `v${packageInfo.version}`) {
data.assets.forEach((asset) => {
this.offerMacOSUpdate(asset.browser_download_url);
this.offerWin64Update(asset.browser_download_url);
this.offerWin32Update(asset.browser_download_url);
});
}
}).catch((err) => console.log(err));
this.initActionKeys();
db.items.where('favorite').equals(0).count((count) => {
this.clipboardItemCount = count;
});
this.loadClipboard(() => {
// NOTE: MacOS has no native interface to listen for clipboard changes,
// therefore, polling is the only option. We should do as little
// processing as possible in this function to preserve system resources.
// TODO: Windows has an interface for this purpose. We should at least
// try to integrate it in the app.
setInterval(() => {
const clipboardText = clipboard.readText();
if (clipboardText.length > 0 && clipboardText != this.lastClipboardItem) {
// Delete the item if it's already in the clipboard to avoid extra checks.
db.items.where('text').equals(clipboardText).delete()
.then((count) => {
// TODO: try to remove the item without checking if it's in the array!
if (this.clipboardContent.includes(clipboardText)) {
const clipboardItem = this.clipboardContent.splice(
this.clipboardContent.indexOf(clipboardText), 1
)[0];
} else if (this.clipboardContent.length === 9) {
this.clipboardContent.pop();
}
this.clipboardItemCount -= count;
})
.then(() => {
this.clipboardContent.unshift(clipboardText);
db.items.add({ text: clipboardText, favorite: 0 });
this.lastClipboardItem = clipboardText;
this.clipboardItemCount++;
});
}
}, 500);
}, true);
ipcRenderer.on('clearClipboardHistory', () => {
db.items.clear().then(() => {
this.lastClipboardItem = '';
this.clipboardItemCount = 0;
this.selectionIndex = -1;
this.query = '';
this.openPage(0);
});
});
ipcRenderer.on('exportClipboardHistoryAsJSON', () => {
db.items.toArray().then((items) => {
ipcRenderer.send('saveExportedData', { items: JSON.stringify(items), format: 'json' })
});
});
ipcRenderer.on('exportClipboardHistoryAsTXT', () => {
db.items.toArray().then((items) => {
ipcRenderer.send('saveExportedData', {
items: items.map((item) => item.text).join('\n'),
format: 'txt'
})
});
});
}
});
vm.$watch('query', (value) => {
if (value.length > 0) {
vm.searchClipboard(value);
} else {
vm.searchResults = [];
vm.currentSearchPage = 0;
}
}); | Fixes disappearing of the first item
| app/js/main.js | Fixes disappearing of the first item | <ide><path>pp/js/main.js
<ide> copyItem() {
<ide> const collection = this.query.length === 0 ? 'clipboardContent' : 'searchResults';
<ide> const clipboardItem = this[collection].splice(this.selectionIndex, 1)[0];
<add>
<add> // Issue #9. If we select the first item we need to nullify the last
<add> // clipboard item stored in memory, otherwise it will just disappear form
<add> // clipboard history.
<add> if (this.selectionIndex === 0) {
<add> this.lastClipboardItem = null;
<add> }
<ide>
<ide> db.items
<ide> .where('text').equals(clipboardItem) |
|
Java | mit | 8afc5509fb69a107bf507a9d81dc354b208bbcfe | 0 | meraki-analytics/Orianna,robrua/Orianna | package com.merakianalytics.orianna.types.core.summoner;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.joda.time.DateTime;
import com.google.common.base.Supplier;
import com.google.common.base.Suppliers;
import com.google.common.collect.ImmutableMap;
import com.merakianalytics.orianna.Orianna;
import com.merakianalytics.orianna.types.common.Platform;
import com.merakianalytics.orianna.types.common.Queue;
import com.merakianalytics.orianna.types.common.Region;
import com.merakianalytics.orianna.types.common.Season;
import com.merakianalytics.orianna.types.common.Tier;
import com.merakianalytics.orianna.types.core.GhostObject;
import com.merakianalytics.orianna.types.core.championmastery.ChampionMasteries;
import com.merakianalytics.orianna.types.core.championmastery.ChampionMastery;
import com.merakianalytics.orianna.types.core.championmastery.ChampionMasteryScore;
import com.merakianalytics.orianna.types.core.league.League;
import com.merakianalytics.orianna.types.core.league.LeaguePosition;
import com.merakianalytics.orianna.types.core.league.LeaguePositions;
import com.merakianalytics.orianna.types.core.league.Leagues;
import com.merakianalytics.orianna.types.core.match.Match;
import com.merakianalytics.orianna.types.core.match.MatchHistory;
import com.merakianalytics.orianna.types.core.match.Participant;
import com.merakianalytics.orianna.types.core.searchable.Searchable;
import com.merakianalytics.orianna.types.core.searchable.SearchableList;
import com.merakianalytics.orianna.types.core.spectator.CurrentGame;
import com.merakianalytics.orianna.types.core.staticdata.Champion;
import com.merakianalytics.orianna.types.core.staticdata.ProfileIcon;
import com.merakianalytics.orianna.types.core.thirdpartycode.VerificationString;
public class Summoner extends GhostObject<com.merakianalytics.orianna.types.data.summoner.Summoner> {
public static class Builder {
private Long id, accountId;
private String name;
private Platform platform;
private Builder(final long id, final boolean isSummonerId) {
if(isSummonerId) {
this.id = id;
} else {
accountId = id;
}
}
private Builder(final String name) {
this.name = name;
}
public Summoner get() {
if(name == null && id == null && accountId == null) {
throw new IllegalStateException("Must set an ID, account ID, or name for the Summoner!");
}
if(platform == null) {
platform = Orianna.getSettings().getDefaultPlatform();
if(platform == null) {
throw new IllegalStateException(
"No platform/region was set! Must either set a default platform/region with Orianna.setDefaultPlatform or Orianna.setDefaultRegion, or include a platform/region with the request!");
}
}
final ImmutableMap.Builder<String, Object> builder = ImmutableMap.<String, Object> builder().put("platform", platform);
if(id != null) {
builder.put("id", id);
} else if(accountId != null) {
builder.put("accountId", accountId);
} else {
builder.put("name", name);
}
return Orianna.getSettings().getPipeline().get(Summoner.class, builder.build());
}
public Builder withPlatform(final Platform platform) {
this.platform = platform;
return this;
}
public Builder withRegion(final Region region) {
platform = region.getPlatform();
return this;
}
}
private static final long serialVersionUID = 4280855397190856618L;
public static final String SUMMONER_LOAD_GROUP = "summoner";
public static Builder named(final String name) {
return new Builder(name);
}
public static Builder withAccountId(final long accountId) {
return new Builder(accountId, false);
}
public static Builder withId(final long id) {
return new Builder(id, true);
}
private final Supplier<ProfileIcon> profileIcon = Suppliers.memoize(new Supplier<ProfileIcon>() {
@Override
public ProfileIcon get() {
load(SUMMONER_LOAD_GROUP);
return ProfileIcon.withId(coreData.getProfileIconId()).withPlatform(Platform.withTag(coreData.getPlatform())).get();
}
});
public Summoner(final com.merakianalytics.orianna.types.data.summoner.Summoner coreData) {
super(coreData, 1);
}
@Searchable(long.class)
public long getAccountId() {
if(coreData.getAccountId() == 0L) {
load(SUMMONER_LOAD_GROUP);
}
return coreData.getAccountId();
}
public ChampionMasteries getChampionMasteries() {
return ChampionMasteries.forSummoner(this).get();
}
public SearchableList<ChampionMastery> getChampionMasteries(final Champion... champions) {
return ChampionMasteries.forSummoner(this).withChampions(champions).get();
}
public SearchableList<ChampionMastery> getChampionMasteries(final Iterable<Champion> champions) {
return ChampionMasteries.forSummoner(this).withChampions(champions).get();
}
public ChampionMastery getChampionMastery(final Champion champion) {
return ChampionMastery.forSummoner(this).withChampion(champion).get();
}
public ChampionMasteryScore getChampionMasteryScore() {
return ChampionMasteryScore.forSummoner(this).get();
}
public CurrentGame getCurrentGame() {
return CurrentGame.forSummoner(this).get();
}
public Tier getHighestTier(final Season season) {
final MatchHistory one = MatchHistory.forSummoner(this).withSeasons(season).withStartIndex(0).withEndIndex(1).get();
if(!one.isEmpty()) {
final Match match = one.get(0);
final Participant summoner = match.getParticipants().get(0);
return summoner.getHighestTierInSeason();
}
return null;
}
@Searchable(long.class)
public long getId() {
if(coreData.getId() == 0L) {
load(SUMMONER_LOAD_GROUP);
}
return coreData.getId();
}
public League getLeague(final Queue queue) {
if(!Queue.RANKED.contains(queue)) {
throw new IllegalArgumentException("Can only get league for ranked queues!");
}
final LeaguePositions positions = LeaguePositions.forSummoner(this).get();
for(final LeaguePosition position : positions) {
if(queue == position.getQueue()) {
return League.withId(position.getLeagueId()).withPlatform(Platform.withTag(coreData.getPlatform())).get();
}
}
return null;
}
public LeaguePosition getLeaguePosition(final Queue queue) {
if(!Queue.RANKED.contains(queue)) {
throw new IllegalArgumentException("Can only get league positions for ranked queues!");
}
final LeaguePositions positions = LeaguePositions.forSummoner(this).get();
for(final LeaguePosition position : positions) {
if(queue == position.getQueue()) {
return position;
}
}
return null;
}
public LeaguePositions getLeaguePositions() {
return LeaguePositions.forSummoner(this).get();
}
public SearchableList<League> getLeagues() {
final LeaguePositions positions = LeaguePositions.forSummoner(this).get();
final List<String> ids = new ArrayList<>(positions.size());
for(final LeaguePosition position : positions) {
ids.add(position.getLeagueId());
}
return Leagues.withIds(ids).withPlatform(Platform.withTag(coreData.getPlatform())).get();
}
public int getLevel() {
load(SUMMONER_LOAD_GROUP);
return coreData.getLevel();
}
@Override
protected List<String> getLoadGroups() {
return Arrays.asList(new String[] {
SUMMONER_LOAD_GROUP
});
}
@Searchable(String.class)
public String getName() {
if(coreData.getName() == null) {
load(SUMMONER_LOAD_GROUP);
}
return coreData.getName();
}
public Platform getPlatform() {
return Platform.withTag(coreData.getPlatform());
}
public ProfileIcon getProfileIcon() {
return profileIcon.get();
}
public DateTime getUpdated() {
load(SUMMONER_LOAD_GROUP);
return coreData.getUpdated();
}
public VerificationString getVerificationString() {
return VerificationString.forSummoner(this).get();
}
public boolean isInGame() {
return CurrentGame.forSummoner(this).get().exists();
}
@Override
protected void loadCoreData(final String group) {
ImmutableMap.Builder<String, Object> builder;
switch(group) {
case SUMMONER_LOAD_GROUP:
builder = ImmutableMap.builder();
if(coreData.getId() != 0) {
builder.put("id", coreData.getId());
}
if(coreData.getAccountId() != 0) {
builder.put("accountId", coreData.getAccountId());
}
if(coreData.getName() != null) {
builder.put("name", coreData.getName());
}
if(coreData.getPlatform() != null) {
builder.put("platform", Platform.withTag(coreData.getPlatform()));
}
coreData = Orianna.getSettings().getPipeline().get(com.merakianalytics.orianna.types.data.summoner.Summoner.class, builder.build());
break;
default:
break;
}
}
public MatchHistory.Builder matchHistory() {
return MatchHistory.forSummoner(this);
}
}
| orianna/src/main/java/com/merakianalytics/orianna/types/core/summoner/Summoner.java | package com.merakianalytics.orianna.types.core.summoner;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.joda.time.DateTime;
import com.google.common.base.Supplier;
import com.google.common.base.Suppliers;
import com.google.common.collect.ImmutableMap;
import com.merakianalytics.orianna.Orianna;
import com.merakianalytics.orianna.types.common.Platform;
import com.merakianalytics.orianna.types.common.Queue;
import com.merakianalytics.orianna.types.common.Region;
import com.merakianalytics.orianna.types.common.Season;
import com.merakianalytics.orianna.types.common.Tier;
import com.merakianalytics.orianna.types.core.GhostObject;
import com.merakianalytics.orianna.types.core.championmastery.ChampionMasteries;
import com.merakianalytics.orianna.types.core.championmastery.ChampionMastery;
import com.merakianalytics.orianna.types.core.championmastery.ChampionMasteryScore;
import com.merakianalytics.orianna.types.core.league.League;
import com.merakianalytics.orianna.types.core.league.LeaguePosition;
import com.merakianalytics.orianna.types.core.league.LeaguePositions;
import com.merakianalytics.orianna.types.core.league.Leagues;
import com.merakianalytics.orianna.types.core.match.Match;
import com.merakianalytics.orianna.types.core.match.MatchHistory;
import com.merakianalytics.orianna.types.core.match.Participant;
import com.merakianalytics.orianna.types.core.searchable.Searchable;
import com.merakianalytics.orianna.types.core.searchable.SearchableList;
import com.merakianalytics.orianna.types.core.spectator.CurrentGame;
import com.merakianalytics.orianna.types.core.staticdata.Champion;
import com.merakianalytics.orianna.types.core.staticdata.ProfileIcon;
import com.merakianalytics.orianna.types.core.thirdpartycode.VerificationString;
public class Summoner extends GhostObject<com.merakianalytics.orianna.types.data.summoner.Summoner> {
public static class Builder {
private Long id, accountId;
private String name;
private Platform platform;
private Builder(final long id, final boolean isSummonerId) {
if(isSummonerId) {
this.id = id;
} else {
accountId = id;
}
}
private Builder(final String name) {
this.name = name;
}
public Summoner get() {
if(name == null && id == null && accountId == null) {
throw new IllegalStateException("Must set an ID, account ID, or name for the Summoner!");
}
if(platform == null) {
platform = Orianna.getSettings().getDefaultPlatform();
if(platform == null) {
throw new IllegalStateException(
"No platform/region was set! Must either set a default platform/region with Orianna.setDefaultPlatform or Orianna.setDefaultRegion, or include a platform/region with the request!");
}
}
final ImmutableMap.Builder<String, Object> builder = ImmutableMap.<String, Object> builder().put("platform", platform);
if(id != null) {
builder.put("id", id);
} else if(accountId != null) {
builder.put("accountId", accountId);
} else {
builder.put("name", name);
}
return Orianna.getSettings().getPipeline().get(Summoner.class, builder.build());
}
public Builder withPlatform(final Platform platform) {
this.platform = platform;
return this;
}
public Builder withRegion(final Region region) {
platform = region.getPlatform();
return this;
}
}
private static final long serialVersionUID = 4280855397190856618L;
public static final String SUMMONER_LOAD_GROUP = "summoner";
public static Builder named(final String name) {
return new Builder(name);
}
public static Builder withAccountId(final long accountId) {
return new Builder(accountId, false);
}
public static Builder withId(final long id) {
return new Builder(id, true);
}
private final Supplier<ProfileIcon> profileIcon = Suppliers.memoize(new Supplier<ProfileIcon>() {
@Override
public ProfileIcon get() {
load(SUMMONER_LOAD_GROUP);
return ProfileIcon.withId(coreData.getProfileIconId()).withPlatform(Platform.withTag(coreData.getPlatform())).get();
}
});
public Summoner(final com.merakianalytics.orianna.types.data.summoner.Summoner coreData) {
super(coreData, 1);
}
@Searchable(long.class)
public long getAccountId() {
if(coreData.getAccountId() == 0L) {
load(SUMMONER_LOAD_GROUP);
}
return coreData.getAccountId();
}
public ChampionMasteries getChampionMasteries() {
return ChampionMasteries.forSummoner(this).get();
}
public SearchableList<ChampionMastery> getChampionMasteries(final Champion... champions) {
return ChampionMasteries.forSummoner(this).withChampions(champions).get();
}
public SearchableList<ChampionMastery> getChampionMasteries(final Iterable<Champion> champions) {
return ChampionMasteries.forSummoner(this).withChampions(champions).get();
}
public ChampionMastery getChampionMastery(final Champion champion) {
return ChampionMastery.forSummoner(this).withChampion(champion).get();
}
public ChampionMasteryScore getChampionMasteryScore() {
return ChampionMasteryScore.forSummoner(this).get();
}
public CurrentGame getCurrentGame() {
return CurrentGame.forSummoner(this).get();
}
public Tier getHighestTier(final Season season) {
final MatchHistory one = MatchHistory.forSummoner(this).withSeasons(season).withStartIndex(0).withEndIndex(1).get();
if(!one.isEmpty()) {
final Match match = one.get(0);
final Participant summoner = match.getParticipants().get(0);
return summoner.getHighestTierInSeason();
}
return null;
}
@Searchable(long.class)
public long getId() {
if(coreData.getId() == 0L) {
load(SUMMONER_LOAD_GROUP);
}
return coreData.getId();
}
public League getLeague(final Queue queue) {
final LeaguePositions positions = LeaguePositions.forSummoner(this).get();
for(final LeaguePosition position : positions) {
if(queue == position.getQueue()) {
return League.withId(position.getLeagueId()).withPlatform(Platform.withTag(coreData.getPlatform())).get();
}
}
return null;
}
public LeaguePosition getLeaguePosition(final Queue queue) {
final LeaguePositions positions = LeaguePositions.forSummoner(this).get();
for(final LeaguePosition position : positions) {
if(queue == position.getQueue()) {
return position;
}
}
return null;
}
public LeaguePositions getLeaguePositions() {
return LeaguePositions.forSummoner(this).get();
}
public SearchableList<League> getLeagues() {
final LeaguePositions positions = LeaguePositions.forSummoner(this).get();
final List<String> ids = new ArrayList<>(positions.size());
for(final LeaguePosition position : positions) {
ids.add(position.getLeagueId());
}
return Leagues.withIds(ids).withPlatform(Platform.withTag(coreData.getPlatform())).get();
}
public int getLevel() {
load(SUMMONER_LOAD_GROUP);
return coreData.getLevel();
}
@Override
protected List<String> getLoadGroups() {
return Arrays.asList(new String[] {
SUMMONER_LOAD_GROUP
});
}
@Searchable(String.class)
public String getName() {
if(coreData.getName() == null) {
load(SUMMONER_LOAD_GROUP);
}
return coreData.getName();
}
public Platform getPlatform() {
return Platform.withTag(coreData.getPlatform());
}
public ProfileIcon getProfileIcon() {
return profileIcon.get();
}
public DateTime getUpdated() {
load(SUMMONER_LOAD_GROUP);
return coreData.getUpdated();
}
public VerificationString getVerificationString() {
return VerificationString.forSummoner(this).get();
}
public boolean isInGame() {
return CurrentGame.forSummoner(this).get().exists();
}
@Override
protected void loadCoreData(final String group) {
ImmutableMap.Builder<String, Object> builder;
switch(group) {
case SUMMONER_LOAD_GROUP:
builder = ImmutableMap.builder();
if(coreData.getId() != 0) {
builder.put("id", coreData.getId());
}
if(coreData.getAccountId() != 0) {
builder.put("accountId", coreData.getAccountId());
}
if(coreData.getName() != null) {
builder.put("name", coreData.getName());
}
if(coreData.getPlatform() != null) {
builder.put("platform", Platform.withTag(coreData.getPlatform()));
}
coreData = Orianna.getSettings().getPipeline().get(com.merakianalytics.orianna.types.data.summoner.Summoner.class, builder.build());
break;
default:
break;
}
}
public MatchHistory.Builder matchHistory() {
return MatchHistory.forSummoner(this);
}
}
| only allow getting league/league position for ranked queues | orianna/src/main/java/com/merakianalytics/orianna/types/core/summoner/Summoner.java | only allow getting league/league position for ranked queues | <ide><path>rianna/src/main/java/com/merakianalytics/orianna/types/core/summoner/Summoner.java
<ide> }
<ide>
<ide> public League getLeague(final Queue queue) {
<add> if(!Queue.RANKED.contains(queue)) {
<add> throw new IllegalArgumentException("Can only get league for ranked queues!");
<add> }
<add>
<ide> final LeaguePositions positions = LeaguePositions.forSummoner(this).get();
<ide> for(final LeaguePosition position : positions) {
<ide> if(queue == position.getQueue()) {
<ide> }
<ide>
<ide> public LeaguePosition getLeaguePosition(final Queue queue) {
<add> if(!Queue.RANKED.contains(queue)) {
<add> throw new IllegalArgumentException("Can only get league positions for ranked queues!");
<add> }
<add>
<ide> final LeaguePositions positions = LeaguePositions.forSummoner(this).get();
<ide> for(final LeaguePosition position : positions) {
<ide> if(queue == position.getQueue()) { |
|
Java | mit | e550bce7e0505faabc9832b1a4cc79ca91ec9561 | 0 | betato/GameDisplay | package com.betato.gamedisplay;
import java.awt.Cursor;
import java.awt.Dimension;
import java.awt.Point;
import java.awt.Toolkit;
import java.awt.image.BufferedImage;
import javax.swing.JFrame;
/**
* An extended JFrame for use with a GameWindow. Supports setting content size
* and cursor hiding.
*/
public class Frame extends JFrame {
private static final long serialVersionUID = 1L;
/**
* Creates a new Frame.
*
* @param gameWindow
* The gameWindow to add inside the frame
* @param title
* The window title to display
* @param width
* The frame width
* @param height
* The frame height
* @param useContentSize
* If true, the inside of the frame will be set to the specified
* dimensions, if false, the outside will be set
* @param resizable
* Whether or not the Frame will be manually resizable
* @param hidecursor
* Whether or not to hide the mouse cursor over the Frame
*
*/
public Frame(GameWindow gameWindow, String title, int width, int height, boolean useContentSize, boolean resizable,
boolean hideCursor) {
add(gameWindow);
setDefaultCloseOperation(JFrame.DO_NOTHING_ON_CLOSE);
setTitle(title);
//
JFrame jFrame = new JFrame("");
jFrame.setVisible(true);
//
if (useContentSize) {
setContentSize(width, height);
} else {
setSize(width, height);
}
setResizable(resizable);
if (hideCursor) {
setCursorHidden(true);
}
}
/**
* Creates a new Frame with fixed size and shown cursor.
*
* @param gameWindow
* The gameWindow to add inside the frame
* @param title
* The window title to display
* @param width
* The frame width
* @param height
* The frame height
* @param useContentSize
* If true, the inside of the frame will be set to the specified
* dimensions, if false, the outside will be set
*/
public Frame(GameWindow gameWindow, String title, int width, int height, boolean useContentSize) {
this(gameWindow, title, width, height, useContentSize, false, false);
}
/**
* Sets the mouse cursor to be invisible
*
* @param hideCursor
* Whether or not to use an invisible cursor
*/
public void setCursorHidden(boolean hideCursor) {
if (hideCursor) {
// Create a new blank cursor
BufferedImage cursorImg = new BufferedImage(16, 16, BufferedImage.TYPE_INT_ARGB);
Cursor blankCursor = Toolkit.getDefaultToolkit().createCustomCursor(cursorImg, new Point(0, 0),
"blank cursor");
// Set the cursor
getContentPane().setCursor(blankCursor);
} else {
// Set default cursor
getContentPane().setCursor(Cursor.getDefaultCursor());
}
}
/**
* Sets the size of the frame based on the specified inner GameWindow size
*
* @param width
* The interior frame width
* @param height
* The interior frame height
*/
public void setContentSize(int width, int height) {
// Set content size and pack
getContentPane().setPreferredSize(new Dimension(width, height));
pack();
}
}
| src/com/betato/gamedisplay/Frame.java | package com.betato.gamedisplay;
import java.awt.Cursor;
import java.awt.Dimension;
import java.awt.Point;
import java.awt.Toolkit;
import java.awt.image.BufferedImage;
import javax.swing.JFrame;
public class Frame extends JFrame{
private static final long serialVersionUID = 1L;
public Frame(GameWindow gameWindow, String title, int width, int height, boolean useContentSize, boolean resizable, boolean hideCursor) {
add(gameWindow);
setDefaultCloseOperation(JFrame.DO_NOTHING_ON_CLOSE);
setTitle(title);
if (useContentSize) {
setContentSize(width, height);
} else {
setSize(width, height);
}
setResizable(resizable);
if (hideCursor) {
setCursorHidden(true);
}
}
public Frame(GameWindow gameWindow, String title, int width, int height, boolean useContentSize) {
this(gameWindow, title, width, height, useContentSize, false, false);
}
public void setCursorHidden(boolean hideCursor){
if (hideCursor) {
// Create a new blank cursor
BufferedImage cursorImg = new BufferedImage(16, 16, BufferedImage.TYPE_INT_ARGB);
Cursor blankCursor = Toolkit.getDefaultToolkit().createCustomCursor(
cursorImg, new Point(0, 0), "blank cursor");
// Set the cursor
getContentPane().setCursor(blankCursor);
} else {
// Set default cursor
getContentPane().setCursor(Cursor.getDefaultCursor());
}
}
public void setContentSize(int width, int height) {
// Set content size and pack
getContentPane().setPreferredSize(new Dimension(width, height));
pack();
}
}
| Add Javadoc comments to Frame
| src/com/betato/gamedisplay/Frame.java | Add Javadoc comments to Frame | <ide><path>rc/com/betato/gamedisplay/Frame.java
<ide>
<ide> import javax.swing.JFrame;
<ide>
<del>public class Frame extends JFrame{
<add>/**
<add> * An extended JFrame for use with a GameWindow. Supports setting content size
<add> * and cursor hiding.
<add> */
<add>public class Frame extends JFrame {
<ide> private static final long serialVersionUID = 1L;
<del>
<del> public Frame(GameWindow gameWindow, String title, int width, int height, boolean useContentSize, boolean resizable, boolean hideCursor) {
<add>
<add> /**
<add> * Creates a new Frame.
<add> *
<add> * @param gameWindow
<add> * The gameWindow to add inside the frame
<add> * @param title
<add> * The window title to display
<add> * @param width
<add> * The frame width
<add> * @param height
<add> * The frame height
<add> * @param useContentSize
<add> * If true, the inside of the frame will be set to the specified
<add> * dimensions, if false, the outside will be set
<add> * @param resizable
<add> * Whether or not the Frame will be manually resizable
<add> * @param hidecursor
<add> * Whether or not to hide the mouse cursor over the Frame
<add> *
<add> */
<add> public Frame(GameWindow gameWindow, String title, int width, int height, boolean useContentSize, boolean resizable,
<add> boolean hideCursor) {
<ide> add(gameWindow);
<ide> setDefaultCloseOperation(JFrame.DO_NOTHING_ON_CLOSE);
<ide> setTitle(title);
<del>
<add> //
<add> JFrame jFrame = new JFrame("");
<add> jFrame.setVisible(true);
<add> //
<ide> if (useContentSize) {
<ide> setContentSize(width, height);
<ide> } else {
<ide> setSize(width, height);
<ide> }
<del>
<add>
<ide> setResizable(resizable);
<del>
<add>
<ide> if (hideCursor) {
<ide> setCursorHidden(true);
<ide> }
<ide> }
<del>
<add>
<add> /**
<add> * Creates a new Frame with fixed size and shown cursor.
<add> *
<add> * @param gameWindow
<add> * The gameWindow to add inside the frame
<add> * @param title
<add> * The window title to display
<add> * @param width
<add> * The frame width
<add> * @param height
<add> * The frame height
<add> * @param useContentSize
<add> * If true, the inside of the frame will be set to the specified
<add> * dimensions, if false, the outside will be set
<add> */
<ide> public Frame(GameWindow gameWindow, String title, int width, int height, boolean useContentSize) {
<ide> this(gameWindow, title, width, height, useContentSize, false, false);
<ide> }
<del>
<del> public void setCursorHidden(boolean hideCursor){
<del> if (hideCursor) {
<add>
<add> /**
<add> * Sets the mouse cursor to be invisible
<add> *
<add> * @param hideCursor
<add> * Whether or not to use an invisible cursor
<add> */
<add> public void setCursorHidden(boolean hideCursor) {
<add> if (hideCursor) {
<ide> // Create a new blank cursor
<ide> BufferedImage cursorImg = new BufferedImage(16, 16, BufferedImage.TYPE_INT_ARGB);
<del> Cursor blankCursor = Toolkit.getDefaultToolkit().createCustomCursor(
<del> cursorImg, new Point(0, 0), "blank cursor");
<add> Cursor blankCursor = Toolkit.getDefaultToolkit().createCustomCursor(cursorImg, new Point(0, 0),
<add> "blank cursor");
<ide> // Set the cursor
<ide> getContentPane().setCursor(blankCursor);
<ide> } else {
<ide> getContentPane().setCursor(Cursor.getDefaultCursor());
<ide> }
<ide> }
<del>
<add>
<add> /**
<add> * Sets the size of the frame based on the specified inner GameWindow size
<add> *
<add> * @param width
<add> * The interior frame width
<add> * @param height
<add> * The interior frame height
<add> */
<ide> public void setContentSize(int width, int height) {
<ide> // Set content size and pack
<ide> getContentPane().setPreferredSize(new Dimension(width, height)); |
|
Java | lgpl-2.1 | a5adf967ccebcc570804ca0b6d1920479afd81fb | 0 | johnscancella/spotbugs,spotbugs/spotbugs,sewe/spotbugs,sewe/spotbugs,spotbugs/spotbugs,johnscancella/spotbugs,sewe/spotbugs,KengoTODA/spotbugs,spotbugs/spotbugs,sewe/spotbugs,KengoTODA/spotbugs,johnscancella/spotbugs,spotbugs/spotbugs,KengoTODA/spotbugs,KengoTODA/spotbugs,johnscancella/spotbugs,spotbugs/spotbugs | /*
* FindBugs - Find bugs in Java programs
* Copyright (C) 2003-2006, University of Maryland
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package edu.umd.cs.findbugs;
import java.io.PrintStream;
import java.util.Iterator;
/**
* Base class for BugReporters which provides convenient formatting
* and reporting of warnings and analysis errors.
*
* <p>
* "TextUIBugReporter" is a bit of a misnomer, since this class
* is useful in GUIs, too.
* </p>
*
* @author David Hovemeyer
*/
public abstract class TextUIBugReporter extends AbstractBugReporter {
private boolean reportStackTrace;
private boolean useLongBugCodes = false;
private boolean reportHistory = false;
private boolean reportUserDesignations = false;
static final String OTHER_CATEGORY_ABBREV = "X";
protected PrintStream outputStream = System.out;
public TextUIBugReporter() {
reportStackTrace = true;
}
/**
* Set the PrintStream to write bug output to.
*
* @param outputStream the PrintStream to write bug output to
*/
public void setOutputStream(PrintStream outputStream) {
this.outputStream = outputStream;
}
/**
* Set whether or not stack traces should be reported in error output.
*
* @param reportStackTrace true if stack traces should be reported, false if not
*/
public void setReportStackTrace(boolean reportStackTrace) {
this.reportStackTrace = reportStackTrace;
}
/**
* Print bug in one-line format.
*
* @param bugInstance the bug to print
*/
protected void printBug(BugInstance bugInstance) {
switch (bugInstance.getPriority()) {
case Detector.EXP_PRIORITY:
outputStream.print("E ");
break;
case Detector.LOW_PRIORITY:
outputStream.print("L ");
break;
case Detector.NORMAL_PRIORITY:
outputStream.print("M ");
break;
case Detector.HIGH_PRIORITY:
outputStream.print("H ");
break;
}
BugPattern pattern = bugInstance.getBugPattern();
if (pattern != null) {
String categoryAbbrev = null;
BugCategory bcat = I18N.instance().getBugCategory(pattern.getCategory());
if (bcat != null) categoryAbbrev = bcat.getAbbrev();
if (categoryAbbrev == null) categoryAbbrev = OTHER_CATEGORY_ABBREV;
outputStream.print(categoryAbbrev);
outputStream.print(" ");
}
if (useLongBugCodes) {
outputStream.print(bugInstance.getType());
outputStream.print(" ");
}
if (reportUserDesignations) {
outputStream.print(bugInstance.getUserDesignationKey());
outputStream.print(" ");
}
if (reportHistory) {
long first = bugInstance.getFirstVersion();
long last = bugInstance.getLastVersion();
outputStream.print(first);
outputStream.print(" ");
outputStream.print(last);
outputStream.print(" ");
}
SourceLineAnnotation line =
bugInstance.getPrimarySourceLineAnnotation();
if (line == null)
outputStream.println(bugInstance.getMessage());
else
outputStream.println(bugInstance.getMessage()
+ " " + line.toString());
}
private boolean analysisErrors;
private boolean missingClasses;
@Override
public void reportQueuedErrors() {
analysisErrors = missingClasses = false;
super.reportQueuedErrors();
}
@Override
public void reportAnalysisError(AnalysisError error) {
if (!analysisErrors) {
emitLine("The following errors occurred during analysis:");
analysisErrors = true;
}
emitLine("\t" + error.getMessage());
if (error.getExceptionMessage() != null) {
emitLine("\t\t" + error.getExceptionMessage());
if (reportStackTrace) {
String[] stackTrace = error.getStackTrace();
if (stackTrace != null) {
for (String aStackTrace : stackTrace) {
emitLine("\t\t\tAt " + aStackTrace);
}
}
}
}
}
@Override
public void reportMissingClass(String message) {
if (!missingClasses) {
emitLine("The following classes needed for analysis were missing:");
missingClasses = true;
}
emitLine("\t" + message);
}
/**
* Emit one line of the error message report.
* By default, error messages are printed to System.err.
* Subclasses may override.
*
* @param line one line of the error report
*/
protected void emitLine(String line) {
line = line.replaceAll("\t", " ");
System.err.println(line);
}
public boolean getUseLongBugCodes() {
return useLongBugCodes;
}
public void setReportHistory(boolean reportHistory) {
this.reportHistory = reportHistory;
}
public void setUseLongBugCodes(boolean useLongBugCodes) {
this.useLongBugCodes = useLongBugCodes;
}
public void setReportUserDesignations(boolean reportUserDesignations) {
this.reportUserDesignations = reportUserDesignations;
}
/* (non-Javadoc)
* @see edu.umd.cs.findbugs.BugReporter#getRealBugReporter()
*/
public BugReporter getRealBugReporter() {
return this;
}
/**
* For debugging: check a BugInstance to make sure it
* is valid.
*
* @param bugInstance the BugInstance to check
*/
protected void checkBugInstance(BugInstance bugInstance) {
for (Iterator<BugAnnotation> i = bugInstance.annotationIterator(); i.hasNext();) {
BugAnnotation bugAnnotation = i.next();
if (bugAnnotation instanceof PackageMemberAnnotation) {
PackageMemberAnnotation pkgMember = (PackageMemberAnnotation) bugAnnotation;
if (pkgMember.getSourceLines() == null) {
throw new IllegalStateException("Package member " + pkgMember +
" reported without source lines!");
}
}
}
}
}
// vim:ts=4
| findbugs/src/java/edu/umd/cs/findbugs/TextUIBugReporter.java | /*
* FindBugs - Find bugs in Java programs
* Copyright (C) 2003-2006, University of Maryland
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package edu.umd.cs.findbugs;
import java.io.PrintStream;
import java.util.Iterator;
/**
* Base class for BugReporters which provides convenient formatting
* and reporting of warnings and analysis errors.
*
* <p>
* "TextUIBugReporter" is a bit of a misnomer, since this class
* is useful in GUIs, too.
* </p>
*
* @author David Hovemeyer
*/
public abstract class TextUIBugReporter extends AbstractBugReporter {
private boolean reportStackTrace;
private boolean useLongBugCodes = false;
private boolean reportHistory = false;
private boolean reportUserDesignations = false;
static final String OTHER_CATEGORY_ABBREV = "X";
protected PrintStream outputStream = System.out;
public TextUIBugReporter() {
reportStackTrace = true;
}
/**
* Set the PrintStream to write bug output to.
*
* @param outputStream the PrintStream to write bug output to
*/
public void setOutputStream(PrintStream outputStream) {
this.outputStream = outputStream;
}
/**
* Set whether or not stack traces should be reported in error output.
*
* @param reportStackTrace true if stack traces should be reported, false if not
*/
public void setReportStackTrace(boolean reportStackTrace) {
this.reportStackTrace = reportStackTrace;
}
/**
* Print bug in one-line format.
*
* @param bugInstance the bug to print
*/
protected void printBug(BugInstance bugInstance) {
switch (bugInstance.getPriority()) {
case Detector.EXP_PRIORITY:
outputStream.print("E ");
break;
case Detector.LOW_PRIORITY:
outputStream.print("L ");
break;
case Detector.NORMAL_PRIORITY:
outputStream.print("M ");
break;
case Detector.HIGH_PRIORITY:
outputStream.print("H ");
break;
}
BugPattern pattern = bugInstance.getBugPattern();
if (pattern != null) {
String categoryAbbrev = null;
BugCategory bcat = I18N.instance().getBugCategory(pattern.getCategory());
if (bcat != null) categoryAbbrev = bcat.getAbbrev();
if (categoryAbbrev == null) categoryAbbrev = OTHER_CATEGORY_ABBREV;
outputStream.print(categoryAbbrev);
outputStream.print(" ");
}
if (useLongBugCodes) {
outputStream.print(bugInstance.getType());
outputStream.print(" ");
}
if (reportUserDesignations) {
outputStream.print(bugInstance.getUserDesignationKey());
outputStream.print(" ");
}
if (reportHistory) {
long first = bugInstance.getFirstVersion();
long last = bugInstance.getLastVersion();
outputStream.print(first);
outputStream.print(" ");
outputStream.print(last);
outputStream.print(" ");
}
SourceLineAnnotation line =
bugInstance.getPrimarySourceLineAnnotation();
if (line == null)
outputStream.println(bugInstance.getMessage());
else
outputStream.println(bugInstance.getMessage()
+ " " + line.toString());
}
private boolean analysisErrors;
private boolean missingClasses;
@Override
public void reportQueuedErrors() {
analysisErrors = missingClasses = false;
super.reportQueuedErrors();
}
@Override
public void reportAnalysisError(AnalysisError error) {
if (!analysisErrors) {
emitLine("The following errors occurred during analysis:");
analysisErrors = true;
}
emitLine("\t" + error.getMessage());
if (error.getExceptionMessage() != null) {
emitLine("\t\t" + error.getExceptionMessage());
if (reportStackTrace) {
String[] stackTrace = error.getStackTrace();
if (stackTrace != null) {
for (String aStackTrace : stackTrace) {
emitLine("\t\t\tAt " + aStackTrace);
}
}
}
}
}
@Override
public void reportMissingClass(String message) {
if (!missingClasses) {
emitLine("The following classes needed for analysis were missing:");
missingClasses = true;
}
emitLine("\t" + message);
}
/**
* Emit one line of the error message report.
* By default, error messages are printed to System.err.
* Subclasses may override.
*
* @param line one line of the error report
*/
protected void emitLine(String line) {
line = line.replaceAll("\t", " ");
System.err.println(line);
}
public boolean getUseLongBugCodes() {
return useLongBugCodes;
}
public void setReportHistory(boolean reportHistory) {
this.reportHistory = reportHistory;
}
public void setUseLongBugCodes(boolean useLongBugCodes) {
this.useLongBugCodes = useLongBugCodes;
}
public void setReportUserDesignations(boolean reportUserDesignations) {
this.reportUserDesignations = reportUserDesignations;
}
/* (non-Javadoc)
* @see edu.umd.cs.findbugs.BugReporter#getRealBugReporter()
*/
public BugReporter getRealBugReporter() {
return this;
}
/**
* For debugging: check a BugInstance to make sure it
* is valid.
*
* @param bugInstance the BugInstance to check
*/
protected void checkBugInstance(BugInstance bugInstance) {
for (Iterator<BugAnnotation> i = bugInstance.annotationIterator(); i.hasNext();) {
BugAnnotation bugAnnotation = i.next();
if (bugAnnotation instanceof BugAnnotationWithSourceLines) {
BugAnnotationWithSourceLines pkgMember = (BugAnnotationWithSourceLines) bugAnnotation;
if (pkgMember.getSourceLines() == null) {
throw new IllegalStateException("Package member " + pkgMember +
" reported without source lines!");
}
}
}
}
}
// vim:ts=4
| refactoring fix
git-svn-id: e7d6bde23f017c9ff4efd468d79d66def666766b@9612 eae3c2d3-9b19-0410-a86e-396b6ccb6ab3
| findbugs/src/java/edu/umd/cs/findbugs/TextUIBugReporter.java | refactoring fix | <ide><path>indbugs/src/java/edu/umd/cs/findbugs/TextUIBugReporter.java
<ide> protected void checkBugInstance(BugInstance bugInstance) {
<ide> for (Iterator<BugAnnotation> i = bugInstance.annotationIterator(); i.hasNext();) {
<ide> BugAnnotation bugAnnotation = i.next();
<del> if (bugAnnotation instanceof BugAnnotationWithSourceLines) {
<del> BugAnnotationWithSourceLines pkgMember = (BugAnnotationWithSourceLines) bugAnnotation;
<add> if (bugAnnotation instanceof PackageMemberAnnotation) {
<add> PackageMemberAnnotation pkgMember = (PackageMemberAnnotation) bugAnnotation;
<ide> if (pkgMember.getSourceLines() == null) {
<ide> throw new IllegalStateException("Package member " + pkgMember +
<ide> " reported without source lines!"); |
|
Java | apache-2.0 | 0f0c2c735512421045330f9323a1f960c00bbdd5 | 0 | jaamsim/jaamsim,jaamsim/jaamsim,jaamsim/jaamsim,jaamsim/jaamsim | /*
* JaamSim Discrete Event Simulation
* Copyright (C) 2014 Ausenco Engineering Canada Inc.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*/
package com.jaamsim.input;
import java.util.ArrayList;
import com.jaamsim.basicsim.ObjectType;
import com.jaamsim.units.AngleUnit;
import com.jaamsim.units.DimensionlessUnit;
import com.jaamsim.units.Unit;
public class ExpParser {
public interface UnOpFunc {
public ExpResult apply(ParseContext context, ExpResult val) throws ExpError;
}
public interface BinOpFunc {
public ExpResult apply(ParseContext context, ExpResult lval, ExpResult rval, String source, int pos) throws ExpError;
}
public interface CallableFunc {
public ExpResult call(ParseContext context, ExpResult[] args, String source, int pos) throws ExpError;
}
public static class UnitData {
double scaleFactor;
Class<? extends Unit> unitType;
}
public interface ParseContext {
public UnitData getUnitByName(String name);
public Class<? extends Unit> multUnitTypes(Class<? extends Unit> a, Class<? extends Unit> b);
public Class<? extends Unit> divUnitTypes(Class<? extends Unit> num, Class<? extends Unit> denom);
}
public interface EvalContext {
public ExpResult getVariableValue(String[] names) throws ExpError;
public boolean eagerEval();
}
private interface ExpressionWalker {
public void visit(ExpNode exp) throws ExpError;
public ExpNode updateRef(ExpNode exp) throws ExpError;
}
////////////////////////////////////////////////////////////////////
// Expression types
public static class Expression {
public final String source;
private final ArrayList<Thread> executingThreads = new ArrayList<>();
private ExpNode rootNode;
public Expression(String source) {
this.source = source;
}
public ExpResult evaluate(EvalContext ec) throws ExpError {
synchronized(executingThreads) {
if (executingThreads.contains(Thread.currentThread())) {
throw new ExpError(null, 0, "Expression recursion detected for expression: %s", source);
}
executingThreads.add(Thread.currentThread());
}
ExpResult res = null;
try {
res = rootNode.evaluate(ec);
} finally {
synchronized(executingThreads) {
executingThreads.remove(Thread.currentThread());
}
}
return res;
}
void setRootNode(ExpNode node) {
rootNode = node;
}
}
private abstract static class ExpNode {
public final ParseContext context;
public final Expression exp;
public final int tokenPos;
public abstract ExpResult evaluate(EvalContext ec) throws ExpError;
public ExpNode(ParseContext context, Expression exp, int pos) {
this.context = context;
this.tokenPos = pos;
this.exp = exp;
}
abstract void walk(ExpressionWalker w) throws ExpError;
}
private static class Constant extends ExpNode {
public ExpResult val;
public Constant(ParseContext context, ExpResult val, Expression exp, int pos) {
super(context, exp, pos);
this.val = val;
}
@Override
public ExpResult evaluate(EvalContext ec) {
return val;
}
@Override
void walk(ExpressionWalker w) throws ExpError {
w.visit(this);
}
}
public static class Variable extends ExpNode {
private String[] vals;
public Variable(ParseContext context, String[] vals, Expression exp, int pos) {
super(context, exp, pos);
this.vals = vals;
}
@Override
public ExpResult evaluate(EvalContext ec) throws ExpError {
return ec.getVariableValue(vals);
}
@Override
void walk(ExpressionWalker w) throws ExpError {
w.visit(this);
}
}
private static class UnaryOp extends ExpNode {
public ExpNode subExp;
private UnOpFunc func;
UnaryOp(ParseContext context, ExpNode subExp, UnOpFunc func, Expression exp, int pos) {
super(context, exp, pos);
this.subExp = subExp;
this.func = func;
}
@Override
public ExpResult evaluate(EvalContext ec) throws ExpError {
return func.apply(context, subExp.evaluate(ec));
}
@Override
void walk(ExpressionWalker w) throws ExpError {
subExp.walk(w);
subExp = w.updateRef(subExp);
w.visit(this);
}
}
private static class BinaryOp extends ExpNode {
public ExpNode lSubExp;
public ExpNode rSubExp;
public ExpResult lConstVal;
public ExpResult rConstVal;
private final BinOpFunc func;
BinaryOp(ParseContext context, ExpNode lSubExp, ExpNode rSubExp, BinOpFunc func, Expression exp, int pos) {
super(context, exp, pos);
this.lSubExp = lSubExp;
this.rSubExp = rSubExp;
this.func = func;
}
@Override
public ExpResult evaluate(EvalContext ec) throws ExpError {
ExpResult lRes = lConstVal != null ? lConstVal : lSubExp.evaluate(ec);
ExpResult rRes = rConstVal != null ? rConstVal : rSubExp.evaluate(ec);
return func.apply(context, lRes, rRes, exp.source, tokenPos);
}
@Override
void walk(ExpressionWalker w) throws ExpError {
lSubExp.walk(w);
rSubExp.walk(w);
lSubExp = w.updateRef(lSubExp);
rSubExp = w.updateRef(rSubExp);
w.visit(this);
}
}
public static class Conditional extends ExpNode {
private ExpNode condExp;
private ExpNode trueExp;
private ExpNode falseExp;
private ExpResult constCondRes;
private ExpResult constTrueRes;
private ExpResult constFalseRes;
public Conditional(ParseContext context, ExpNode c, ExpNode t, ExpNode f, Expression exp, int pos) {
super(context, exp, pos);
condExp = c;
trueExp = t;
falseExp =f;
}
@Override
public ExpResult evaluate(EvalContext ec) throws ExpError {
if (ec.eagerEval())
return eagerEval(ec);
else
return lazyEval(ec);
}
private ExpResult lazyEval(EvalContext ec) throws ExpError {
ExpResult condRes = constCondRes != null ? constCondRes : condExp.evaluate(ec);
if (condRes.value == 0)
return constFalseRes != null ? constFalseRes : falseExp.evaluate(ec);
else
return constTrueRes != null ? constTrueRes : trueExp.evaluate(ec);
}
private ExpResult eagerEval(EvalContext ec) throws ExpError {
ExpResult condRes = constCondRes != null ? constCondRes : condExp.evaluate(ec);
ExpResult trueRes = constTrueRes != null ? constTrueRes : trueExp.evaluate(ec);
ExpResult falseRes = constFalseRes != null ? constFalseRes : falseExp.evaluate(ec);
if (condRes.value == 0)
return falseRes;
else
return trueRes;
}
@Override
void walk(ExpressionWalker w) throws ExpError {
condExp.walk(w);
trueExp.walk(w);
falseExp.walk(w);
condExp = w.updateRef(condExp);
trueExp = w.updateRef(trueExp);
falseExp = w.updateRef(falseExp);
w.visit(this);
}
}
public static class FuncCall extends ExpNode {
private ArrayList<ExpNode> args;
private ArrayList<ExpResult> constResults;
private CallableFunc function;
public FuncCall(ParseContext context, CallableFunc function, ArrayList<ExpNode> args, Expression exp, int pos) {
super(context, exp, pos);
this.function = function;
this.args = args;
constResults = new ArrayList<>(args.size());
for (int i = 0; i < args.size(); ++i) {
constResults.add(null);
}
}
@Override
public ExpResult evaluate(EvalContext ec) throws ExpError {
ExpResult[] argVals = new ExpResult[args.size()];
for (int i = 0; i < args.size(); ++i) {
ExpResult constArg = constResults.get(i);
argVals[i] = constArg != null ? constArg : args.get(i).evaluate(ec);
}
return function.call(context, argVals, exp.source, tokenPos);
}
@Override
void walk(ExpressionWalker w) throws ExpError {
for (int i = 0; i < args.size(); ++i) {
args.get(i).walk(w);
}
for (int i = 0; i < args.size(); ++i) {
args.set(i, w.updateRef(args.get(i)));
}
w.visit(this);
}
}
public static class Assignment {
public String[] destination;
public Expression value;
}
///////////////////////////////////////////////////////////
// Entries for user definable operators and functions
private static class UnaryOpEntry {
public String symbol;
public UnOpFunc function;
public double bindingPower;
}
private static class BinaryOpEntry {
public String symbol;
public BinOpFunc function;
public double bindingPower;
public boolean rAssoc;
}
private static class FunctionEntry {
public String name;
public CallableFunc function;
public int numMinArgs;
public int numMaxArgs;
}
private static ArrayList<UnaryOpEntry> unaryOps = new ArrayList<>();
private static ArrayList<BinaryOpEntry> binaryOps = new ArrayList<>();
private static ArrayList<FunctionEntry> functions = new ArrayList<>();
private static void addUnaryOp(String symbol, double bindPower, UnOpFunc func) {
UnaryOpEntry oe = new UnaryOpEntry();
oe.symbol = symbol;
oe.function = func;
oe.bindingPower = bindPower;
unaryOps.add(oe);
}
private static void addBinaryOp(String symbol, double bindPower, boolean rAssoc, BinOpFunc func) {
BinaryOpEntry oe = new BinaryOpEntry();
oe.symbol = symbol;
oe.function = func;
oe.bindingPower = bindPower;
oe.rAssoc = rAssoc;
binaryOps.add(oe);
}
private static void addFunction(String name, int numMinArgs, int numMaxArgs, CallableFunc func) {
FunctionEntry fe = new FunctionEntry();
fe.name = name;
fe.function = func;
fe.numMinArgs = numMinArgs;
fe.numMaxArgs = numMaxArgs;
functions.add(fe);
}
private static UnaryOpEntry getUnaryOp(String symbol) {
for (UnaryOpEntry oe: unaryOps) {
if (oe.symbol.equals(symbol))
return oe;
}
return null;
}
private static BinaryOpEntry getBinaryOp(String symbol) {
for (BinaryOpEntry oe: binaryOps) {
if (oe.symbol.equals(symbol))
return oe;
}
return null;
}
private static FunctionEntry getFunctionEntry(String funcName) {
for (FunctionEntry fe : functions) {
if (fe.name.equals(funcName)){
return fe;
}
}
return null;
}
////////////////////////////////////////////////////////
// Statically initialize the operators and functions
static {
///////////////////////////////////////////////////
// Unary Operators
addUnaryOp("-", 50, new UnOpFunc() {
@Override
public ExpResult apply(ParseContext context, ExpResult val){
return new ExpResult(-val.value, val.unitType);
}
});
addUnaryOp("+", 50, new UnOpFunc() {
@Override
public ExpResult apply(ParseContext context, ExpResult val){
return new ExpResult(val.value, val.unitType);
}
});
addUnaryOp("!", 50, new UnOpFunc() {
@Override
public ExpResult apply(ParseContext context, ExpResult val){
return new ExpResult(val.value == 0 ? 1 : 0, DimensionlessUnit.class);
}
});
///////////////////////////////////////////////////
// Binary operators
addBinaryOp("+", 20, false, new BinOpFunc() {
@Override
public ExpResult apply(ParseContext context, ExpResult lval, ExpResult rval, String source, int pos) throws ExpError {
if (lval.unitType != rval.unitType) {
throw new ExpError(source, pos, getUnitMismatchString(lval.unitType, rval.unitType));
}
return new ExpResult(lval.value + rval.value, lval.unitType);
}
});
addBinaryOp("-", 20, false, new BinOpFunc() {
@Override
public ExpResult apply(ParseContext context, ExpResult lval, ExpResult rval, String source,int pos) throws ExpError {
if (lval.unitType != rval.unitType) {
throw new ExpError(source, pos, getUnitMismatchString(lval.unitType, rval.unitType));
}
return new ExpResult(lval.value - rval.value, lval.unitType);
}
});
addBinaryOp("*", 30, false, new BinOpFunc() {
@Override
public ExpResult apply(ParseContext context, ExpResult lval, ExpResult rval, String source, int pos) throws ExpError {
Class<? extends Unit> newType = context.multUnitTypes(lval.unitType, rval.unitType);
if (newType == null) {
throw new ExpError(source, pos, getUnitMismatchString(lval.unitType, rval.unitType));
}
return new ExpResult(lval.value * rval.value, newType);
}
});
addBinaryOp("/", 30, false, new BinOpFunc() {
@Override
public ExpResult apply(ParseContext context, ExpResult lval, ExpResult rval, String source, int pos) throws ExpError {
Class<? extends Unit> newType = context.divUnitTypes(lval.unitType, rval.unitType);
if (newType == null) {
throw new ExpError(source, pos, getUnitMismatchString(lval.unitType, rval.unitType));
}
return new ExpResult(lval.value / rval.value, newType);
}
});
addBinaryOp("^", 40, true, new BinOpFunc() {
@Override
public ExpResult apply(ParseContext context, ExpResult lval, ExpResult rval, String source, int pos) throws ExpError {
if (lval.unitType != DimensionlessUnit.class ||
rval.unitType != DimensionlessUnit.class) {
throw new ExpError(source, pos, getUnitMismatchString(lval.unitType, rval.unitType));
}
return new ExpResult(Math.pow(lval.value, rval.value), DimensionlessUnit.class);
}
});
addBinaryOp("%", 30, false, new BinOpFunc() {
@Override
public ExpResult apply(ParseContext context, ExpResult lval, ExpResult rval, String source, int pos) throws ExpError {
if (lval.unitType != rval.unitType) {
throw new ExpError(source, pos, getUnitMismatchString(lval.unitType, rval.unitType));
}
return new ExpResult(lval.value % rval.value, lval.unitType);
}
});
addBinaryOp("==", 10, false, new BinOpFunc() {
@Override
public ExpResult apply(ParseContext context, ExpResult lval, ExpResult rval, String source, int pos) throws ExpError {
if (lval.unitType != rval.unitType) {
throw new ExpError(source, pos, getUnitMismatchString(lval.unitType, rval.unitType));
}
return new ExpResult(lval.value == rval.value ? 1 : 0, DimensionlessUnit.class);
}
});
addBinaryOp("!=", 10, false, new BinOpFunc() {
@Override
public ExpResult apply(ParseContext context, ExpResult lval, ExpResult rval, String source, int pos) throws ExpError {
if (lval.unitType != rval.unitType) {
throw new ExpError(source, pos, getUnitMismatchString(lval.unitType, rval.unitType));
}
return new ExpResult(lval.value != rval.value ? 1 : 0, DimensionlessUnit.class);
}
});
addBinaryOp("&&", 8, false, new BinOpFunc() {
@Override
public ExpResult apply(ParseContext context, ExpResult lval, ExpResult rval, String source, int pos){
return new ExpResult((lval.value!=0) && (rval.value!=0) ? 1 : 0, DimensionlessUnit.class);
}
});
addBinaryOp("||", 6, false, new BinOpFunc() {
@Override
public ExpResult apply(ParseContext context, ExpResult lval, ExpResult rval, String source, int pos){
return new ExpResult((lval.value!=0) || (rval.value!=0) ? 1 : 0, DimensionlessUnit.class);
}
});
addBinaryOp("<", 12, false, new BinOpFunc() {
@Override
public ExpResult apply(ParseContext context, ExpResult lval, ExpResult rval, String source, int pos) throws ExpError {
if (lval.unitType != rval.unitType) {
throw new ExpError(source, pos, getUnitMismatchString(lval.unitType, rval.unitType));
}
return new ExpResult(lval.value < rval.value ? 1 : 0, DimensionlessUnit.class);
}
});
addBinaryOp("<=", 12, false, new BinOpFunc() {
@Override
public ExpResult apply(ParseContext context, ExpResult lval, ExpResult rval, String source, int pos) throws ExpError {
if (lval.unitType != rval.unitType) {
throw new ExpError(source, pos, getUnitMismatchString(lval.unitType, rval.unitType));
}
return new ExpResult(lval.value <= rval.value ? 1 : 0, DimensionlessUnit.class);
}
});
addBinaryOp(">", 12, false, new BinOpFunc() {
@Override
public ExpResult apply(ParseContext context, ExpResult lval, ExpResult rval, String source, int pos) throws ExpError {
if (lval.unitType != rval.unitType) {
throw new ExpError(source, pos, getUnitMismatchString(lval.unitType, rval.unitType));
}
return new ExpResult(lval.value > rval.value ? 1 : 0, DimensionlessUnit.class);
}
});
addBinaryOp(">=", 12, false, new BinOpFunc() {
@Override
public ExpResult apply(ParseContext context, ExpResult lval, ExpResult rval, String source, int pos) throws ExpError {
if (lval.unitType != rval.unitType) {
throw new ExpError(source, pos, getUnitMismatchString(lval.unitType, rval.unitType));
}
return new ExpResult(lval.value >= rval.value ? 1 : 0, DimensionlessUnit.class);
}
});
////////////////////////////////////////////////////
// Functions
addFunction("max", 2, -1, new CallableFunc() {
@Override
public ExpResult call(ParseContext context, ExpResult[] args, String source, int pos) throws ExpError {
for (int i = 1; i < args.length; ++ i) {
if (args[0].unitType != args[i].unitType)
throw new ExpError(source, pos, getUnitMismatchString(args[0].unitType, args[i].unitType));
}
ExpResult res = args[0];
for (int i = 1; i < args.length; ++ i) {
if (args[i].value > res.value)
res = args[i];
}
return res;
}
});
addFunction("min", 2, -1, new CallableFunc() {
@Override
public ExpResult call(ParseContext context, ExpResult[] args, String source, int pos) throws ExpError {
for (int i = 1; i < args.length; ++ i) {
if (args[0].unitType != args[i].unitType)
throw new ExpError(source, pos, getUnitMismatchString(args[0].unitType, args[i].unitType));
}
ExpResult res = args[0];
for (int i = 1; i < args.length; ++ i) {
if (args[i].value < res.value)
res = args[i];
}
return res;
}
});
addFunction("abs", 1, 1, new CallableFunc() {
@Override
public ExpResult call(ParseContext context, ExpResult[] args, String source, int pos) {
return new ExpResult(Math.abs(args[0].value), args[0].unitType);
}
});
addFunction("indexOfMin", 2, -1, new CallableFunc() {
@Override
public ExpResult call(ParseContext context, ExpResult[] args, String source, int pos) throws ExpError {
for (int i = 1; i < args.length; ++ i) {
if (args[0].unitType != args[i].unitType)
throw new ExpError(source, pos, getUnitMismatchString(args[0].unitType, args[i].unitType));
}
ExpResult res = args[0];
int index = 0;
for (int i = 1; i < args.length; ++ i) {
if (args[i].value < res.value) {
res = args[i];
index = i;
}
}
return new ExpResult(index + 1, DimensionlessUnit.class);
}
});
addFunction("indexOfMax", 2, -1, new CallableFunc() {
@Override
public ExpResult call(ParseContext context, ExpResult[] args, String source, int pos) throws ExpError {
for (int i = 1; i < args.length; ++ i) {
if (args[0].unitType != args[i].unitType)
throw new ExpError(source, pos, getUnitMismatchString(args[0].unitType, args[i].unitType));
}
ExpResult res = args[0];
int index = 0;
for (int i = 1; i < args.length; ++ i) {
if (args[i].value > res.value) {
res = args[i];
index = i;
}
}
return new ExpResult(index + 1, DimensionlessUnit.class);
}
});
addFunction("choose", 2, -1, new CallableFunc() {
@Override
public ExpResult call(ParseContext context, ExpResult[] args, String source, int pos) throws ExpError {
if (args[0].unitType != DimensionlessUnit.class)
throw new ExpError(source, pos, getInvalidUnitString(args[0].unitType, DimensionlessUnit.class));
for (int i = 2; i < args.length; ++ i) {
if (args[1].unitType != args[i].unitType)
throw new ExpError(source, pos, getUnitMismatchString(args[1].unitType, args[i].unitType));
}
int k = (int) args[0].value;
if (k < 1 || k >= args.length)
throw new ExpError(source, pos,
String.format("Invalid index: %s. Index must be between 1 and %s.", k, args.length-1));
return new ExpResult(args[k].value, args[k].unitType);
}
});
///////////////////////////////////////////////////
// Mathematical Constants
addFunction("E", 0, 0, new CallableFunc() {
@Override
public ExpResult call(ParseContext context, ExpResult[] args, String source, int pos) throws ExpError {
return new ExpResult(Math.E, DimensionlessUnit.class);
}
});
addFunction("PI", 0, 0, new CallableFunc() {
@Override
public ExpResult call(ParseContext context, ExpResult[] args, String source, int pos) throws ExpError {
return new ExpResult(Math.PI, DimensionlessUnit.class);
}
});
///////////////////////////////////////////////////
// Trigonometric Functions
addFunction("sin", 1, 1, new CallableFunc() {
@Override
public ExpResult call(ParseContext context, ExpResult[] args, String source, int pos) throws ExpError {
if (args[0].unitType != DimensionlessUnit.class && args[0].unitType != AngleUnit.class)
throw new ExpError(source, pos, getInvalidTrigUnitString(args[0].unitType));
return new ExpResult(Math.sin(args[0].value), DimensionlessUnit.class);
}
});
addFunction("cos", 1, 1, new CallableFunc() {
@Override
public ExpResult call(ParseContext context, ExpResult[] args, String source, int pos) throws ExpError {
if (args[0].unitType != DimensionlessUnit.class && args[0].unitType != AngleUnit.class)
throw new ExpError(source, pos, getInvalidTrigUnitString(args[0].unitType));
return new ExpResult(Math.cos(args[0].value), DimensionlessUnit.class);
}
});
addFunction("tan", 1, 1, new CallableFunc() {
@Override
public ExpResult call(ParseContext context, ExpResult[] args, String source, int pos) throws ExpError {
if (args[0].unitType != DimensionlessUnit.class && args[0].unitType != AngleUnit.class)
throw new ExpError(source, pos, getInvalidTrigUnitString(args[0].unitType));
return new ExpResult(Math.tan(args[0].value), DimensionlessUnit.class);
}
});
///////////////////////////////////////////////////
// Inverse Trigonometric Functions
addFunction("asin", 1, 1, new CallableFunc() {
@Override
public ExpResult call(ParseContext context, ExpResult[] args, String source, int pos) throws ExpError {
if (args[0].unitType != DimensionlessUnit.class)
throw new ExpError(source, pos, getInvalidUnitString(args[0].unitType, DimensionlessUnit.class));
return new ExpResult(Math.asin(args[0].value), AngleUnit.class);
}
});
addFunction("acos", 1, 1, new CallableFunc() {
@Override
public ExpResult call(ParseContext context, ExpResult[] args, String source, int pos) throws ExpError {
if (args[0].unitType != DimensionlessUnit.class)
throw new ExpError(source, pos, getInvalidUnitString(args[0].unitType, DimensionlessUnit.class));
return new ExpResult(Math.acos(args[0].value), AngleUnit.class);
}
});
addFunction("atan", 1, 1, new CallableFunc() {
@Override
public ExpResult call(ParseContext context, ExpResult[] args, String source, int pos) throws ExpError {
if (args[0].unitType != DimensionlessUnit.class)
throw new ExpError(source, pos, getInvalidUnitString(args[0].unitType, DimensionlessUnit.class));
return new ExpResult(Math.atan(args[0].value), AngleUnit.class);
}
});
addFunction("atan2", 2, 2, new CallableFunc() {
@Override
public ExpResult call(ParseContext context, ExpResult[] args, String source, int pos) throws ExpError {
if (args[0].unitType != DimensionlessUnit.class)
throw new ExpError(source, pos, getInvalidUnitString(args[0].unitType, DimensionlessUnit.class));
if (args[1].unitType != DimensionlessUnit.class)
throw new ExpError(source, pos, getInvalidUnitString(args[1].unitType, DimensionlessUnit.class));
return new ExpResult(Math.atan2(args[0].value, args[1].value), AngleUnit.class);
}
});
///////////////////////////////////////////////////
// Exponential Functions
addFunction("exp", 1, 1, new CallableFunc() {
@Override
public ExpResult call(ParseContext context, ExpResult[] args, String source, int pos) throws ExpError {
if (args[0].unitType != DimensionlessUnit.class)
throw new ExpError(source, pos, getInvalidUnitString(args[0].unitType, DimensionlessUnit.class));
return new ExpResult(Math.exp(args[0].value), DimensionlessUnit.class);
}
});
addFunction("ln", 1, 1, new CallableFunc() {
@Override
public ExpResult call(ParseContext context, ExpResult[] args, String source, int pos) throws ExpError {
if (args[0].unitType != DimensionlessUnit.class)
throw new ExpError(source, pos, getInvalidUnitString(args[0].unitType, DimensionlessUnit.class));
return new ExpResult(Math.log(args[0].value), DimensionlessUnit.class);
}
});
addFunction("log", 1, 1, new CallableFunc() {
@Override
public ExpResult call(ParseContext context, ExpResult[] args, String source, int pos) throws ExpError {
if (args[0].unitType != DimensionlessUnit.class)
throw new ExpError(source, pos, getInvalidUnitString(args[0].unitType, DimensionlessUnit.class));
return new ExpResult(Math.log10(args[0].value), DimensionlessUnit.class);
}
});
}
private static String unitToString(Class<? extends Unit> unit) {
ObjectType type = ObjectType.getObjectTypeForClass(unit);
if (type == null)
return "Unknown Unit";
return type.getName();
}
private static String getUnitMismatchString(Class<? extends Unit> u0, Class<? extends Unit> u1) {
String s0 = unitToString(u0);
String s1 = unitToString(u1);
return String.format("Unit mismatch: '%s' and '%s' are not compatible", s0, s1);
}
private static String getInvalidTrigUnitString(Class<? extends Unit> u0) {
String s0 = unitToString(u0);
return String.format("Invalid unit: %s. The input to a trigonometric function must be dimensionless or an angle.", s0);
}
private static String getInvalidUnitString(Class<? extends Unit> u0, Class<? extends Unit> u1) {
String s0 = unitToString(u0);
String s1 = unitToString(u1);
if (u1 == DimensionlessUnit.class)
return String.format("Invalid unit: %s. A dimensionless number is required.", s0);
return String.format("Invalid unit: %s. Units of %s are required.", s0, s1);
}
/**
* A utility class to make dealing with a list of tokens easier
*
*/
private static class TokenList {
ArrayList<ExpTokenizer.Token> tokens;
int pos;
TokenList(ArrayList<ExpTokenizer.Token> tokens) {
this.tokens = tokens;
this.pos = 0;
}
public void expect(int type, String val, String source) throws ExpError {
if (pos == tokens.size()) {
throw new ExpError(source, source.length(), String.format("Expected \"%s\", past the end of input", val));
}
ExpTokenizer.Token nextTok = tokens.get(pos);
if (nextTok.type != type || !nextTok.value.equals(val)) {
throw new ExpError(source, nextTok.pos, String.format("Expected \"%s\", got \"%s\"", val, nextTok.value));
}
pos++;
}
public ExpTokenizer.Token next() {
if (pos >= tokens.size()) {
return null;
}
return tokens.get(pos++);
}
public ExpTokenizer.Token peek() {
if (pos >= tokens.size()) {
return null;
}
return tokens.get(pos);
}
}
private static class ConstOptimizer implements ExpressionWalker {
@Override
public void visit(ExpNode exp) throws ExpError {
// Note: Below we are passing 'null' as an EvalContext, this is not typically
// acceptable, but is 'safe enough' when we know the expression is a constant
if (exp instanceof BinaryOp) {
BinaryOp bo = (BinaryOp)exp;
if (bo.lSubExp instanceof Constant) {
// Just the left is a constant, store it in the binop
bo.lConstVal = bo.lSubExp.evaluate(null);
}
if (bo.rSubExp instanceof Constant) {
// Just the right is a constant, store it in the binop
bo.rConstVal = bo.rSubExp.evaluate(null);
}
}
if (exp instanceof Conditional) {
Conditional cond = (Conditional)exp;
if (cond.condExp instanceof Constant) {
cond.constCondRes = cond.condExp.evaluate(null);
}
if (cond.trueExp instanceof Constant) {
cond.constTrueRes = cond.trueExp.evaluate(null);
}
if (cond.falseExp instanceof Constant) {
cond.constFalseRes = cond.falseExp.evaluate(null);
}
}
if (exp instanceof FuncCall) {
FuncCall fc = (FuncCall)exp;
for (int i = 0; i < fc.args.size(); ++i) {
if (fc.args.get(i) instanceof Constant) {
fc.constResults.set(i, fc.args.get(i).evaluate(null));
}
}
}
}
/**
* Give a node a chance to swap itself out with a different subtree.
*/
@Override
public ExpNode updateRef(ExpNode origNode) throws ExpError {
if (origNode instanceof UnaryOp) {
UnaryOp uo = (UnaryOp)origNode;
if (uo.subExp instanceof Constant) {
// This is an unary operation on a constant, we can replace it with a constant
ExpResult val = uo.evaluate(null);
return new Constant(uo.context, val, origNode.exp, uo.tokenPos);
}
}
if (origNode instanceof BinaryOp) {
BinaryOp bo = (BinaryOp)origNode;
if ((bo.lSubExp instanceof Constant) && (bo.rSubExp instanceof Constant)) {
// both sub expressions are constants, so replace the binop with a constant
ExpResult val = bo.evaluate(null);
return new Constant(bo.context, val, origNode.exp, bo.tokenPos);
}
}
return origNode;
}
}
private static ConstOptimizer CONST_OP = new ConstOptimizer();
/**
* The main entry point to the expression parsing system, will either return a valid
* expression that can be evaluated, or throw an error.
*/
public static Expression parseExpression(ParseContext context, String input) throws ExpError {
ArrayList<ExpTokenizer.Token> ts;
ts = ExpTokenizer.tokenize(input);
TokenList tokens = new TokenList(ts);
Expression ret = new Expression(input);
ExpNode expNode = parseExp(context, tokens, 0, ret);
// Make sure we've parsed all the tokens
ExpTokenizer.Token peeked = tokens.peek();
if (peeked != null) {
throw new ExpError(input, peeked.pos, "Unexpected additional values");
}
expNode.walk(CONST_OP);
expNode = CONST_OP.updateRef(expNode); // Finally, give the entire expression a chance to optimize itself into a constant
ret.setRootNode(expNode);
return ret;
}
private static ExpNode parseExp(ParseContext context, TokenList tokens, double bindPower, Expression exp) throws ExpError {
ExpNode lhs = parseOpeningExp(context, tokens, bindPower, exp);
// Now peek for a binary op to modify this expression
while (true) {
ExpTokenizer.Token peeked = tokens.peek();
if (peeked == null || peeked.type != ExpTokenizer.SYM_TYPE) {
break;
}
BinaryOpEntry binOp = getBinaryOp(peeked.value);
if (binOp != null && binOp.bindingPower > bindPower) {
// The next token is a binary op and powerful enough to bind us
lhs = handleBinOp(context, tokens, lhs, binOp, exp, peeked.pos);
continue;
}
// Specific check for binding the conditional (?:) operator
if (peeked.value.equals("?") && bindPower == 0) {
lhs = handleConditional(context, tokens, lhs, exp, peeked.pos);
continue;
}
break;
}
// We have bound as many operators as we can, return it
return lhs;
}
private static ExpNode handleBinOp(ParseContext context, TokenList tokens, ExpNode lhs, BinaryOpEntry binOp, Expression exp, int pos) throws ExpError {
tokens.next(); // Consume the operator
// For right associative operators, we weaken the binding power a bit at application time (but not testing time)
double assocMod = binOp.rAssoc ? -0.5 : 0;
ExpNode rhs = parseExp(context, tokens, binOp.bindingPower + assocMod, exp);
//currentPower = oe.bindingPower;
return new BinaryOp(context, lhs, rhs, binOp.function, exp, pos);
}
private static ExpNode handleConditional(ParseContext context, TokenList tokens, ExpNode lhs, Expression exp, int pos) throws ExpError {
tokens.next(); // Consume the '?'
ExpNode trueExp = parseExp(context, tokens, 0, exp);
tokens.expect(ExpTokenizer.SYM_TYPE, ":", exp.source);
ExpNode falseExp = parseExp(context, tokens , 0, exp);
return new Conditional(context, lhs, trueExp, falseExp, exp, pos);
}
public static Assignment parseAssignment(ParseContext context, String input) throws ExpError {
ArrayList<ExpTokenizer.Token> ts;
ts = ExpTokenizer.tokenize(input);
TokenList tokens = new TokenList(ts);
ExpTokenizer.Token nextTok = tokens.next();
if (nextTok == null || (nextTok.type != ExpTokenizer.SQ_TYPE &&
!nextTok.value.equals("this"))) {
throw new ExpError(input, 0, "Assignments must start with an identifier");
}
String[] destination = parseIdentifier(nextTok, tokens, new Expression(input));
nextTok = tokens.next();
if (nextTok == null || nextTok.type != ExpTokenizer.SYM_TYPE || !nextTok.value.equals("=")) {
throw new ExpError(input, nextTok.pos, "Expected '=' in assignment");
}
Assignment ret = new Assignment();
ret.destination = destination;
ret.value = new Expression(input);
ExpNode expNode = parseExp(context, tokens, 0, ret.value);
expNode.walk(CONST_OP);
expNode = CONST_OP.updateRef(expNode); // Finally, give the entire expression a chance to optimize itself into a constant
ret.value.setRootNode(expNode);
return ret;
}
// The first half of expression parsing, parse a simple expression based on the next token
private static ExpNode parseOpeningExp(ParseContext context, TokenList tokens, double bindPower, Expression exp) throws ExpError{
ExpTokenizer.Token nextTok = tokens.next(); // consume the first token
if (nextTok == null) {
throw new ExpError(exp.source, exp.source.length(), "Unexpected end of string");
}
if (nextTok.type == ExpTokenizer.NUM_TYPE) {
return parseConstant(context, nextTok.value, tokens, exp, nextTok.pos);
}
if (nextTok.type == ExpTokenizer.VAR_TYPE &&
!nextTok.value.equals("this")) {
return parseFuncCall(context, nextTok.value, tokens, exp, nextTok.pos);
}
if (nextTok.type == ExpTokenizer.SQ_TYPE ||
nextTok.value.equals("this")) {
String[] vals = parseIdentifier(nextTok, tokens, exp);
return new Variable(context, vals, exp, nextTok.pos);
}
// The next token must be a symbol
// handle parenthesis
if (nextTok.value.equals("(")) {
ExpNode expNode = parseExp(context, tokens, 0, exp);
tokens.expect(ExpTokenizer.SYM_TYPE, ")", exp.source); // Expect the closing paren
return expNode;
}
UnaryOpEntry oe = getUnaryOp(nextTok.value);
if (oe != null) {
ExpNode expNode = parseExp(context, tokens, oe.bindingPower, exp);
return new UnaryOp(context, expNode, oe.function, exp, nextTok.pos);
}
// We're all out of tricks here, this is an unknown expression
throw new ExpError(exp.source, nextTok.pos, "Can not parse expression");
}
private static ExpNode parseConstant(ParseContext context, String constant, TokenList tokens, Expression exp, int pos) throws ExpError {
double mult = 1;
Class<? extends Unit> ut = DimensionlessUnit.class;
ExpTokenizer.Token peeked = tokens.peek();
if (peeked != null && peeked.type == ExpTokenizer.SQ_TYPE) {
// This constant is followed by a square quoted token, it must be the unit
tokens.next(); // Consume unit token
UnitData unit = context.getUnitByName(peeked.value);
if (unit == null) {
throw new ExpError(exp.source, peeked.pos, "Unknown unit: %s", peeked.value);
}
mult = unit.scaleFactor;
ut = unit.unitType;
}
return new Constant(context, new ExpResult(Double.parseDouble(constant)*mult, ut), exp, pos);
}
private static ExpNode parseFuncCall(ParseContext context, String funcName, TokenList tokens, Expression exp, int pos) throws ExpError {
tokens.expect(ExpTokenizer.SYM_TYPE, "(", exp.source);
ArrayList<ExpNode> arguments = new ArrayList<>();
ExpTokenizer.Token peeked = tokens.peek();
if (peeked == null) {
throw new ExpError(exp.source, exp.source.length(), "Unexpected end of input in argument list");
}
boolean isEmpty = false;
if (peeked.value.equals(")")) {
// Special case with empty argument list
isEmpty = true;
tokens.next(); // Consume closing parens
}
while (!isEmpty) {
ExpNode nextArg = parseExp(context, tokens, 0, exp);
arguments.add(nextArg);
ExpTokenizer.Token nextTok = tokens.next();
if (nextTok == null) {
throw new ExpError(exp.source, exp.source.length(), "Unexpected end of input in argument list.");
}
if (nextTok.value.equals(")")) {
break;
}
if (nextTok.value.equals(",")) {
continue;
}
// Unexpected token
throw new ExpError(exp.source, nextTok.pos, "Unexpected token in arguement list");
}
FunctionEntry fe = getFunctionEntry(funcName);
if (fe == null) {
throw new ExpError(exp.source, pos, "Uknown function: \"%s\"", funcName);
}
if (fe.numMinArgs >= 0 && arguments.size() < fe.numMinArgs){
throw new ExpError(exp.source, pos, "Function \"%s\" expects at least %d arguments. %d provided.",
funcName, fe.numMinArgs, arguments.size());
}
if (fe.numMaxArgs >= 0 && arguments.size() > fe.numMaxArgs){
throw new ExpError(exp.source, pos, "Function \"%s\" expects at most %d arguments. %d provided.",
funcName, fe.numMaxArgs, arguments.size());
}
return new FuncCall(context, fe.function, arguments, exp, pos);
}
private static String[] parseIdentifier(ExpTokenizer.Token firstName, TokenList tokens, Expression exp) throws ExpError {
ArrayList<String> vals = new ArrayList<>();
vals.add(firstName.value.intern());
while (true) {
ExpTokenizer.Token peeked = tokens.peek();
if (peeked == null || peeked.type != ExpTokenizer.SYM_TYPE || !peeked.value.equals(".")) {
break;
}
// Next token is a '.' so parse another name
tokens.next(); // consume
ExpTokenizer.Token nextName = tokens.next();
if (nextName == null || nextName.type != ExpTokenizer.VAR_TYPE) {
throw new ExpError(exp.source, peeked.pos, "Expected Identifier after '.'");
}
vals.add(nextName.value.intern());
}
String[] ret = new String[vals.size()];
for (int i = 0; i < ret.length; i++)
ret[i] = vals.get(i);
return ret;
}
}
| src/main/java/com/jaamsim/input/ExpParser.java | /*
* JaamSim Discrete Event Simulation
* Copyright (C) 2014 Ausenco Engineering Canada Inc.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*/
package com.jaamsim.input;
import java.util.ArrayList;
import com.jaamsim.basicsim.ObjectType;
import com.jaamsim.units.AngleUnit;
import com.jaamsim.units.DimensionlessUnit;
import com.jaamsim.units.Unit;
public class ExpParser {
public interface UnOpFunc {
public ExpResult apply(ParseContext context, ExpResult val) throws ExpError;
}
public interface BinOpFunc {
public ExpResult apply(ParseContext context, ExpResult lval, ExpResult rval, String source, int pos) throws ExpError;
}
public interface CallableFunc {
public ExpResult call(ParseContext context, ExpResult[] args, String source, int pos) throws ExpError;
}
public static class UnitData {
double scaleFactor;
Class<? extends Unit> unitType;
}
public interface ParseContext {
public UnitData getUnitByName(String name);
public Class<? extends Unit> multUnitTypes(Class<? extends Unit> a, Class<? extends Unit> b);
public Class<? extends Unit> divUnitTypes(Class<? extends Unit> num, Class<? extends Unit> denom);
}
public interface EvalContext {
public ExpResult getVariableValue(String[] names) throws ExpError;
public boolean eagerEval();
}
private interface ExpressionWalker {
public void visit(ExpNode exp) throws ExpError;
public ExpNode updateRef(ExpNode exp) throws ExpError;
}
////////////////////////////////////////////////////////////////////
// Expression types
public static class Expression {
public final String source;
private final ArrayList<Thread> executingThreads = new ArrayList<>();
private ExpNode rootNode;
public Expression(String source) {
this.source = source;
}
public ExpResult evaluate(EvalContext ec) throws ExpError {
synchronized(executingThreads) {
if (executingThreads.contains(Thread.currentThread())) {
throw new ExpError(null, 0, "Expression recursion detected for expression: %s", source);
}
executingThreads.add(Thread.currentThread());
}
ExpResult res = null;
try {
res = rootNode.evaluate(ec);
} finally {
synchronized(executingThreads) {
executingThreads.remove(Thread.currentThread());
}
}
return res;
}
void setRootNode(ExpNode node) {
rootNode = node;
}
}
private abstract static class ExpNode {
public final ParseContext context;
public final Expression exp;
public final int tokenPos;
public abstract ExpResult evaluate(EvalContext ec) throws ExpError;
public ExpNode(ParseContext context, Expression exp, int pos) {
this.context = context;
this.tokenPos = pos;
this.exp = exp;
}
abstract void walk(ExpressionWalker w) throws ExpError;
}
private static class Constant extends ExpNode {
public ExpResult val;
public Constant(ParseContext context, ExpResult val, Expression exp, int pos) {
super(context, exp, pos);
this.val = val;
}
@Override
public ExpResult evaluate(EvalContext ec) {
return val;
}
@Override
void walk(ExpressionWalker w) throws ExpError {
w.visit(this);
}
}
public static class Variable extends ExpNode {
private String[] vals;
public Variable(ParseContext context, String[] vals, Expression exp, int pos) {
super(context, exp, pos);
this.vals = vals;
}
@Override
public ExpResult evaluate(EvalContext ec) throws ExpError {
return ec.getVariableValue(vals);
}
@Override
void walk(ExpressionWalker w) throws ExpError {
w.visit(this);
}
}
private static class UnaryOp extends ExpNode {
public ExpNode subExp;
private UnOpFunc func;
UnaryOp(ParseContext context, ExpNode subExp, UnOpFunc func, Expression exp, int pos) {
super(context, exp, pos);
this.subExp = subExp;
this.func = func;
}
@Override
public ExpResult evaluate(EvalContext ec) throws ExpError {
return func.apply(context, subExp.evaluate(ec));
}
@Override
void walk(ExpressionWalker w) throws ExpError {
subExp.walk(w);
subExp = w.updateRef(subExp);
w.visit(this);
}
}
private static class BinaryOp extends ExpNode {
public ExpNode lSubExp;
public ExpNode rSubExp;
public ExpResult lConstVal;
public ExpResult rConstVal;
private final BinOpFunc func;
BinaryOp(ParseContext context, ExpNode lSubExp, ExpNode rSubExp, BinOpFunc func, Expression exp, int pos) {
super(context, exp, pos);
this.lSubExp = lSubExp;
this.rSubExp = rSubExp;
this.func = func;
}
@Override
public ExpResult evaluate(EvalContext ec) throws ExpError {
ExpResult lRes = lConstVal != null ? lConstVal : lSubExp.evaluate(ec);
ExpResult rRes = rConstVal != null ? rConstVal : rSubExp.evaluate(ec);
return func.apply(context, lRes, rRes, exp.source, tokenPos);
}
@Override
void walk(ExpressionWalker w) throws ExpError {
lSubExp.walk(w);
rSubExp.walk(w);
lSubExp = w.updateRef(lSubExp);
rSubExp = w.updateRef(rSubExp);
w.visit(this);
}
}
public static class Conditional extends ExpNode {
private ExpNode condExp;
private ExpNode trueExp;
private ExpNode falseExp;
private ExpResult constCondRes;
private ExpResult constTrueRes;
private ExpResult constFalseRes;
public Conditional(ParseContext context, ExpNode c, ExpNode t, ExpNode f, Expression exp, int pos) {
super(context, exp, pos);
condExp = c;
trueExp = t;
falseExp =f;
}
@Override
public ExpResult evaluate(EvalContext ec) throws ExpError {
if (ec.eagerEval())
return eagerEval(ec);
else
return lazyEval(ec);
}
private ExpResult lazyEval(EvalContext ec) throws ExpError {
ExpResult condRes = constCondRes != null ? constCondRes : condExp.evaluate(ec);
if (condRes.value == 0)
return constFalseRes != null ? constFalseRes : falseExp.evaluate(ec);
else
return constTrueRes != null ? constTrueRes : trueExp.evaluate(ec);
}
private ExpResult eagerEval(EvalContext ec) throws ExpError {
ExpResult condRes = constCondRes != null ? constCondRes : condExp.evaluate(ec);
ExpResult trueRes = constTrueRes != null ? constTrueRes : trueExp.evaluate(ec);
ExpResult falseRes = constFalseRes != null ? constFalseRes : falseExp.evaluate(ec);
if (condRes.value == 0)
return falseRes;
else
return trueRes;
}
@Override
void walk(ExpressionWalker w) throws ExpError {
condExp.walk(w);
trueExp.walk(w);
falseExp.walk(w);
condExp = w.updateRef(condExp);
trueExp = w.updateRef(trueExp);
falseExp = w.updateRef(falseExp);
w.visit(this);
}
}
public static class FuncCall extends ExpNode {
private ArrayList<ExpNode> args;
private ArrayList<ExpResult> constResults;
private CallableFunc function;
public FuncCall(ParseContext context, CallableFunc function, ArrayList<ExpNode> args, Expression exp, int pos) {
super(context, exp, pos);
this.function = function;
this.args = args;
constResults = new ArrayList<>(args.size());
for (int i = 0; i < args.size(); ++i) {
constResults.add(null);
}
}
@Override
public ExpResult evaluate(EvalContext ec) throws ExpError {
ExpResult[] argVals = new ExpResult[args.size()];
for (int i = 0; i < args.size(); ++i) {
ExpResult constArg = constResults.get(i);
argVals[i] = constArg != null ? constArg : args.get(i).evaluate(ec);
}
return function.call(context, argVals, exp.source, tokenPos);
}
@Override
void walk(ExpressionWalker w) throws ExpError {
for (int i = 0; i < args.size(); ++i) {
args.get(i).walk(w);
}
for (int i = 0; i < args.size(); ++i) {
args.set(i, w.updateRef(args.get(i)));
}
w.visit(this);
}
}
public static class Assignment {
public String[] destination;
public Expression value;
}
///////////////////////////////////////////////////////////
// Entries for user definable operators and functions
private static class UnaryOpEntry {
public String symbol;
public UnOpFunc function;
public double bindingPower;
}
private static class BinaryOpEntry {
public String symbol;
public BinOpFunc function;
public double bindingPower;
public boolean rAssoc;
}
private static class FunctionEntry {
public String name;
public CallableFunc function;
public int numMinArgs;
public int numMaxArgs;
}
private static ArrayList<UnaryOpEntry> unaryOps = new ArrayList<>();
private static ArrayList<BinaryOpEntry> binaryOps = new ArrayList<>();
private static ArrayList<FunctionEntry> functions = new ArrayList<>();
private static void addUnaryOp(String symbol, double bindPower, UnOpFunc func) {
UnaryOpEntry oe = new UnaryOpEntry();
oe.symbol = symbol;
oe.function = func;
oe.bindingPower = bindPower;
unaryOps.add(oe);
}
private static void addBinaryOp(String symbol, double bindPower, boolean rAssoc, BinOpFunc func) {
BinaryOpEntry oe = new BinaryOpEntry();
oe.symbol = symbol;
oe.function = func;
oe.bindingPower = bindPower;
oe.rAssoc = rAssoc;
binaryOps.add(oe);
}
private static void addFunction(String name, int numMinArgs, int numMaxArgs, CallableFunc func) {
FunctionEntry fe = new FunctionEntry();
fe.name = name;
fe.function = func;
fe.numMinArgs = numMinArgs;
fe.numMaxArgs = numMaxArgs;
functions.add(fe);
}
private static UnaryOpEntry getUnaryOp(String symbol) {
for (UnaryOpEntry oe: unaryOps) {
if (oe.symbol.equals(symbol))
return oe;
}
return null;
}
private static BinaryOpEntry getBinaryOp(String symbol) {
for (BinaryOpEntry oe: binaryOps) {
if (oe.symbol.equals(symbol))
return oe;
}
return null;
}
private static FunctionEntry getFunctionEntry(String funcName) {
for (FunctionEntry fe : functions) {
if (fe.name.equals(funcName)){
return fe;
}
}
return null;
}
////////////////////////////////////////////////////////
// Statically initialize the operators and functions
static {
///////////////////////////////////////////////////
// Unary Operators
addUnaryOp("-", 50, new UnOpFunc() {
@Override
public ExpResult apply(ParseContext context, ExpResult val){
return new ExpResult(-val.value, val.unitType);
}
});
addUnaryOp("+", 50, new UnOpFunc() {
@Override
public ExpResult apply(ParseContext context, ExpResult val){
return new ExpResult(val.value, val.unitType);
}
});
addUnaryOp("!", 50, new UnOpFunc() {
@Override
public ExpResult apply(ParseContext context, ExpResult val){
return new ExpResult(val.value == 0 ? 1 : 0, DimensionlessUnit.class);
}
});
///////////////////////////////////////////////////
// Binary operators
addBinaryOp("+", 20, false, new BinOpFunc() {
@Override
public ExpResult apply(ParseContext context, ExpResult lval, ExpResult rval, String source, int pos) throws ExpError {
if (lval.unitType != rval.unitType) {
throw new ExpError(source, pos, getUnitMismatchString(lval.unitType, rval.unitType));
}
return new ExpResult(lval.value + rval.value, lval.unitType);
}
});
addBinaryOp("-", 20, false, new BinOpFunc() {
@Override
public ExpResult apply(ParseContext context, ExpResult lval, ExpResult rval, String source,int pos) throws ExpError {
if (lval.unitType != rval.unitType) {
throw new ExpError(source, pos, getUnitMismatchString(lval.unitType, rval.unitType));
}
return new ExpResult(lval.value - rval.value, lval.unitType);
}
});
addBinaryOp("*", 30, false, new BinOpFunc() {
@Override
public ExpResult apply(ParseContext context, ExpResult lval, ExpResult rval, String source, int pos) throws ExpError {
Class<? extends Unit> newType = context.multUnitTypes(lval.unitType, rval.unitType);
if (newType == null) {
throw new ExpError(source, pos, getUnitMismatchString(lval.unitType, rval.unitType));
}
return new ExpResult(lval.value * rval.value, newType);
}
});
addBinaryOp("/", 30, false, new BinOpFunc() {
@Override
public ExpResult apply(ParseContext context, ExpResult lval, ExpResult rval, String source, int pos) throws ExpError {
Class<? extends Unit> newType = context.divUnitTypes(lval.unitType, rval.unitType);
if (newType == null) {
throw new ExpError(source, pos, getUnitMismatchString(lval.unitType, rval.unitType));
}
return new ExpResult(lval.value / rval.value, newType);
}
});
addBinaryOp("^", 40, true, new BinOpFunc() {
@Override
public ExpResult apply(ParseContext context, ExpResult lval, ExpResult rval, String source, int pos) throws ExpError {
if (lval.unitType != DimensionlessUnit.class ||
rval.unitType != DimensionlessUnit.class) {
throw new ExpError(source, pos, getUnitMismatchString(lval.unitType, rval.unitType));
}
return new ExpResult(Math.pow(lval.value, rval.value), DimensionlessUnit.class);
}
});
addBinaryOp("%", 30, false, new BinOpFunc() {
@Override
public ExpResult apply(ParseContext context, ExpResult lval, ExpResult rval, String source, int pos) throws ExpError {
if (lval.unitType != rval.unitType) {
throw new ExpError(source, pos, getUnitMismatchString(lval.unitType, rval.unitType));
}
return new ExpResult(lval.value % rval.value, lval.unitType);
}
});
addBinaryOp("==", 10, false, new BinOpFunc() {
@Override
public ExpResult apply(ParseContext context, ExpResult lval, ExpResult rval, String source, int pos) throws ExpError {
if (lval.unitType != rval.unitType) {
throw new ExpError(source, pos, getUnitMismatchString(lval.unitType, rval.unitType));
}
return new ExpResult(lval.value == rval.value ? 1 : 0, DimensionlessUnit.class);
}
});
addBinaryOp("!=", 10, false, new BinOpFunc() {
@Override
public ExpResult apply(ParseContext context, ExpResult lval, ExpResult rval, String source, int pos) throws ExpError {
if (lval.unitType != rval.unitType) {
throw new ExpError(source, pos, getUnitMismatchString(lval.unitType, rval.unitType));
}
return new ExpResult(lval.value != rval.value ? 1 : 0, DimensionlessUnit.class);
}
});
addBinaryOp("&&", 8, false, new BinOpFunc() {
@Override
public ExpResult apply(ParseContext context, ExpResult lval, ExpResult rval, String source, int pos){
return new ExpResult((lval.value!=0) && (rval.value!=0) ? 1 : 0, DimensionlessUnit.class);
}
});
addBinaryOp("||", 6, false, new BinOpFunc() {
@Override
public ExpResult apply(ParseContext context, ExpResult lval, ExpResult rval, String source, int pos){
return new ExpResult((lval.value!=0) || (rval.value!=0) ? 1 : 0, DimensionlessUnit.class);
}
});
addBinaryOp("<", 12, false, new BinOpFunc() {
@Override
public ExpResult apply(ParseContext context, ExpResult lval, ExpResult rval, String source, int pos) throws ExpError {
if (lval.unitType != rval.unitType) {
throw new ExpError(source, pos, getUnitMismatchString(lval.unitType, rval.unitType));
}
return new ExpResult(lval.value < rval.value ? 1 : 0, DimensionlessUnit.class);
}
});
addBinaryOp("<=", 12, false, new BinOpFunc() {
@Override
public ExpResult apply(ParseContext context, ExpResult lval, ExpResult rval, String source, int pos) throws ExpError {
if (lval.unitType != rval.unitType) {
throw new ExpError(source, pos, getUnitMismatchString(lval.unitType, rval.unitType));
}
return new ExpResult(lval.value <= rval.value ? 1 : 0, DimensionlessUnit.class);
}
});
addBinaryOp(">", 12, false, new BinOpFunc() {
@Override
public ExpResult apply(ParseContext context, ExpResult lval, ExpResult rval, String source, int pos) throws ExpError {
if (lval.unitType != rval.unitType) {
throw new ExpError(source, pos, getUnitMismatchString(lval.unitType, rval.unitType));
}
return new ExpResult(lval.value > rval.value ? 1 : 0, DimensionlessUnit.class);
}
});
addBinaryOp(">=", 12, false, new BinOpFunc() {
@Override
public ExpResult apply(ParseContext context, ExpResult lval, ExpResult rval, String source, int pos) throws ExpError {
if (lval.unitType != rval.unitType) {
throw new ExpError(source, pos, getUnitMismatchString(lval.unitType, rval.unitType));
}
return new ExpResult(lval.value >= rval.value ? 1 : 0, DimensionlessUnit.class);
}
});
////////////////////////////////////////////////////
// Functions
addFunction("max", 2, -1, new CallableFunc() {
@Override
public ExpResult call(ParseContext context, ExpResult[] args, String source, int pos) throws ExpError {
for (int i = 1; i < args.length; ++ i) {
if (args[0].unitType != args[i].unitType)
throw new ExpError(source, pos, getUnitMismatchString(args[0].unitType, args[i].unitType));
}
ExpResult res = args[0];
for (int i = 1; i < args.length; ++ i) {
if (args[i].value > res.value)
res = args[i];
}
return res;
}
});
addFunction("min", 2, -1, new CallableFunc() {
@Override
public ExpResult call(ParseContext context, ExpResult[] args, String source, int pos) throws ExpError {
for (int i = 1; i < args.length; ++ i) {
if (args[0].unitType != args[i].unitType)
throw new ExpError(source, pos, getUnitMismatchString(args[0].unitType, args[i].unitType));
}
ExpResult res = args[0];
for (int i = 1; i < args.length; ++ i) {
if (args[i].value < res.value)
res = args[i];
}
return res;
}
});
addFunction("abs", 1, 1, new CallableFunc() {
@Override
public ExpResult call(ParseContext context, ExpResult[] args, String source, int pos) {
return new ExpResult(Math.abs(args[0].value), args[0].unitType);
}
});
addFunction("indexOfMin", 2, -1, new CallableFunc() {
@Override
public ExpResult call(ParseContext context, ExpResult[] args, String source, int pos) throws ExpError {
for (int i = 1; i < args.length; ++ i) {
if (args[0].unitType != args[i].unitType)
throw new ExpError(source, pos, getUnitMismatchString(args[0].unitType, args[i].unitType));
}
ExpResult res = args[0];
int index = 0;
for (int i = 1; i < args.length; ++ i) {
if (args[i].value < res.value) {
res = args[i];
index = i;
}
}
return new ExpResult(index + 1, DimensionlessUnit.class);
}
});
addFunction("indexOfMax", 2, -1, new CallableFunc() {
@Override
public ExpResult call(ParseContext context, ExpResult[] args, String source, int pos) throws ExpError {
for (int i = 1; i < args.length; ++ i) {
if (args[0].unitType != args[i].unitType)
throw new ExpError(source, pos, getUnitMismatchString(args[0].unitType, args[i].unitType));
}
ExpResult res = args[0];
int index = 0;
for (int i = 1; i < args.length; ++ i) {
if (args[i].value > res.value) {
res = args[i];
index = i;
}
}
return new ExpResult(index + 1, DimensionlessUnit.class);
}
});
///////////////////////////////////////////////////
// Mathematical Constants
addFunction("E", 0, 0, new CallableFunc() {
@Override
public ExpResult call(ParseContext context, ExpResult[] args, String source, int pos) throws ExpError {
return new ExpResult(Math.E, DimensionlessUnit.class);
}
});
addFunction("PI", 0, 0, new CallableFunc() {
@Override
public ExpResult call(ParseContext context, ExpResult[] args, String source, int pos) throws ExpError {
return new ExpResult(Math.PI, DimensionlessUnit.class);
}
});
///////////////////////////////////////////////////
// Trigonometric Functions
addFunction("sin", 1, 1, new CallableFunc() {
@Override
public ExpResult call(ParseContext context, ExpResult[] args, String source, int pos) throws ExpError {
if (args[0].unitType != DimensionlessUnit.class && args[0].unitType != AngleUnit.class)
throw new ExpError(source, pos, getInvalidTrigUnitString(args[0].unitType));
return new ExpResult(Math.sin(args[0].value), DimensionlessUnit.class);
}
});
addFunction("cos", 1, 1, new CallableFunc() {
@Override
public ExpResult call(ParseContext context, ExpResult[] args, String source, int pos) throws ExpError {
if (args[0].unitType != DimensionlessUnit.class && args[0].unitType != AngleUnit.class)
throw new ExpError(source, pos, getInvalidTrigUnitString(args[0].unitType));
return new ExpResult(Math.cos(args[0].value), DimensionlessUnit.class);
}
});
addFunction("tan", 1, 1, new CallableFunc() {
@Override
public ExpResult call(ParseContext context, ExpResult[] args, String source, int pos) throws ExpError {
if (args[0].unitType != DimensionlessUnit.class && args[0].unitType != AngleUnit.class)
throw new ExpError(source, pos, getInvalidTrigUnitString(args[0].unitType));
return new ExpResult(Math.tan(args[0].value), DimensionlessUnit.class);
}
});
///////////////////////////////////////////////////
// Inverse Trigonometric Functions
addFunction("asin", 1, 1, new CallableFunc() {
@Override
public ExpResult call(ParseContext context, ExpResult[] args, String source, int pos) throws ExpError {
if (args[0].unitType != DimensionlessUnit.class)
throw new ExpError(source, pos, getInvalidUnitString(args[0].unitType, DimensionlessUnit.class));
return new ExpResult(Math.asin(args[0].value), AngleUnit.class);
}
});
addFunction("acos", 1, 1, new CallableFunc() {
@Override
public ExpResult call(ParseContext context, ExpResult[] args, String source, int pos) throws ExpError {
if (args[0].unitType != DimensionlessUnit.class)
throw new ExpError(source, pos, getInvalidUnitString(args[0].unitType, DimensionlessUnit.class));
return new ExpResult(Math.acos(args[0].value), AngleUnit.class);
}
});
addFunction("atan", 1, 1, new CallableFunc() {
@Override
public ExpResult call(ParseContext context, ExpResult[] args, String source, int pos) throws ExpError {
if (args[0].unitType != DimensionlessUnit.class)
throw new ExpError(source, pos, getInvalidUnitString(args[0].unitType, DimensionlessUnit.class));
return new ExpResult(Math.atan(args[0].value), AngleUnit.class);
}
});
addFunction("atan2", 2, 2, new CallableFunc() {
@Override
public ExpResult call(ParseContext context, ExpResult[] args, String source, int pos) throws ExpError {
if (args[0].unitType != DimensionlessUnit.class)
throw new ExpError(source, pos, getInvalidUnitString(args[0].unitType, DimensionlessUnit.class));
if (args[1].unitType != DimensionlessUnit.class)
throw new ExpError(source, pos, getInvalidUnitString(args[1].unitType, DimensionlessUnit.class));
return new ExpResult(Math.atan2(args[0].value, args[1].value), AngleUnit.class);
}
});
///////////////////////////////////////////////////
// Exponential Functions
addFunction("exp", 1, 1, new CallableFunc() {
@Override
public ExpResult call(ParseContext context, ExpResult[] args, String source, int pos) throws ExpError {
if (args[0].unitType != DimensionlessUnit.class)
throw new ExpError(source, pos, getInvalidUnitString(args[0].unitType, DimensionlessUnit.class));
return new ExpResult(Math.exp(args[0].value), DimensionlessUnit.class);
}
});
addFunction("ln", 1, 1, new CallableFunc() {
@Override
public ExpResult call(ParseContext context, ExpResult[] args, String source, int pos) throws ExpError {
if (args[0].unitType != DimensionlessUnit.class)
throw new ExpError(source, pos, getInvalidUnitString(args[0].unitType, DimensionlessUnit.class));
return new ExpResult(Math.log(args[0].value), DimensionlessUnit.class);
}
});
addFunction("log", 1, 1, new CallableFunc() {
@Override
public ExpResult call(ParseContext context, ExpResult[] args, String source, int pos) throws ExpError {
if (args[0].unitType != DimensionlessUnit.class)
throw new ExpError(source, pos, getInvalidUnitString(args[0].unitType, DimensionlessUnit.class));
return new ExpResult(Math.log10(args[0].value), DimensionlessUnit.class);
}
});
}
private static String unitToString(Class<? extends Unit> unit) {
ObjectType type = ObjectType.getObjectTypeForClass(unit);
if (type == null)
return "Unknown Unit";
return type.getName();
}
private static String getUnitMismatchString(Class<? extends Unit> u0, Class<? extends Unit> u1) {
String s0 = unitToString(u0);
String s1 = unitToString(u1);
return String.format("Unit mismatch: '%s' and '%s' are not compatible", s0, s1);
}
private static String getInvalidTrigUnitString(Class<? extends Unit> u0) {
String s0 = unitToString(u0);
return String.format("Invalid unit: %s. The input to a trigonometric function must be dimensionless or an angle.", s0);
}
private static String getInvalidUnitString(Class<? extends Unit> u0, Class<? extends Unit> u1) {
String s0 = unitToString(u0);
String s1 = unitToString(u1);
if (u1 == DimensionlessUnit.class)
return String.format("Invalid unit: %s. A dimensionless number is required.", s0);
return String.format("Invalid unit: %s. Units of %s are required.", s0, s1);
}
/**
* A utility class to make dealing with a list of tokens easier
*
*/
private static class TokenList {
ArrayList<ExpTokenizer.Token> tokens;
int pos;
TokenList(ArrayList<ExpTokenizer.Token> tokens) {
this.tokens = tokens;
this.pos = 0;
}
public void expect(int type, String val, String source) throws ExpError {
if (pos == tokens.size()) {
throw new ExpError(source, source.length(), String.format("Expected \"%s\", past the end of input", val));
}
ExpTokenizer.Token nextTok = tokens.get(pos);
if (nextTok.type != type || !nextTok.value.equals(val)) {
throw new ExpError(source, nextTok.pos, String.format("Expected \"%s\", got \"%s\"", val, nextTok.value));
}
pos++;
}
public ExpTokenizer.Token next() {
if (pos >= tokens.size()) {
return null;
}
return tokens.get(pos++);
}
public ExpTokenizer.Token peek() {
if (pos >= tokens.size()) {
return null;
}
return tokens.get(pos);
}
}
private static class ConstOptimizer implements ExpressionWalker {
@Override
public void visit(ExpNode exp) throws ExpError {
// Note: Below we are passing 'null' as an EvalContext, this is not typically
// acceptable, but is 'safe enough' when we know the expression is a constant
if (exp instanceof BinaryOp) {
BinaryOp bo = (BinaryOp)exp;
if (bo.lSubExp instanceof Constant) {
// Just the left is a constant, store it in the binop
bo.lConstVal = bo.lSubExp.evaluate(null);
}
if (bo.rSubExp instanceof Constant) {
// Just the right is a constant, store it in the binop
bo.rConstVal = bo.rSubExp.evaluate(null);
}
}
if (exp instanceof Conditional) {
Conditional cond = (Conditional)exp;
if (cond.condExp instanceof Constant) {
cond.constCondRes = cond.condExp.evaluate(null);
}
if (cond.trueExp instanceof Constant) {
cond.constTrueRes = cond.trueExp.evaluate(null);
}
if (cond.falseExp instanceof Constant) {
cond.constFalseRes = cond.falseExp.evaluate(null);
}
}
if (exp instanceof FuncCall) {
FuncCall fc = (FuncCall)exp;
for (int i = 0; i < fc.args.size(); ++i) {
if (fc.args.get(i) instanceof Constant) {
fc.constResults.set(i, fc.args.get(i).evaluate(null));
}
}
}
}
/**
* Give a node a chance to swap itself out with a different subtree.
*/
@Override
public ExpNode updateRef(ExpNode origNode) throws ExpError {
if (origNode instanceof UnaryOp) {
UnaryOp uo = (UnaryOp)origNode;
if (uo.subExp instanceof Constant) {
// This is an unary operation on a constant, we can replace it with a constant
ExpResult val = uo.evaluate(null);
return new Constant(uo.context, val, origNode.exp, uo.tokenPos);
}
}
if (origNode instanceof BinaryOp) {
BinaryOp bo = (BinaryOp)origNode;
if ((bo.lSubExp instanceof Constant) && (bo.rSubExp instanceof Constant)) {
// both sub expressions are constants, so replace the binop with a constant
ExpResult val = bo.evaluate(null);
return new Constant(bo.context, val, origNode.exp, bo.tokenPos);
}
}
return origNode;
}
}
private static ConstOptimizer CONST_OP = new ConstOptimizer();
/**
* The main entry point to the expression parsing system, will either return a valid
* expression that can be evaluated, or throw an error.
*/
public static Expression parseExpression(ParseContext context, String input) throws ExpError {
ArrayList<ExpTokenizer.Token> ts;
ts = ExpTokenizer.tokenize(input);
TokenList tokens = new TokenList(ts);
Expression ret = new Expression(input);
ExpNode expNode = parseExp(context, tokens, 0, ret);
// Make sure we've parsed all the tokens
ExpTokenizer.Token peeked = tokens.peek();
if (peeked != null) {
throw new ExpError(input, peeked.pos, "Unexpected additional values");
}
expNode.walk(CONST_OP);
expNode = CONST_OP.updateRef(expNode); // Finally, give the entire expression a chance to optimize itself into a constant
ret.setRootNode(expNode);
return ret;
}
private static ExpNode parseExp(ParseContext context, TokenList tokens, double bindPower, Expression exp) throws ExpError {
ExpNode lhs = parseOpeningExp(context, tokens, bindPower, exp);
// Now peek for a binary op to modify this expression
while (true) {
ExpTokenizer.Token peeked = tokens.peek();
if (peeked == null || peeked.type != ExpTokenizer.SYM_TYPE) {
break;
}
BinaryOpEntry binOp = getBinaryOp(peeked.value);
if (binOp != null && binOp.bindingPower > bindPower) {
// The next token is a binary op and powerful enough to bind us
lhs = handleBinOp(context, tokens, lhs, binOp, exp, peeked.pos);
continue;
}
// Specific check for binding the conditional (?:) operator
if (peeked.value.equals("?") && bindPower == 0) {
lhs = handleConditional(context, tokens, lhs, exp, peeked.pos);
continue;
}
break;
}
// We have bound as many operators as we can, return it
return lhs;
}
private static ExpNode handleBinOp(ParseContext context, TokenList tokens, ExpNode lhs, BinaryOpEntry binOp, Expression exp, int pos) throws ExpError {
tokens.next(); // Consume the operator
// For right associative operators, we weaken the binding power a bit at application time (but not testing time)
double assocMod = binOp.rAssoc ? -0.5 : 0;
ExpNode rhs = parseExp(context, tokens, binOp.bindingPower + assocMod, exp);
//currentPower = oe.bindingPower;
return new BinaryOp(context, lhs, rhs, binOp.function, exp, pos);
}
private static ExpNode handleConditional(ParseContext context, TokenList tokens, ExpNode lhs, Expression exp, int pos) throws ExpError {
tokens.next(); // Consume the '?'
ExpNode trueExp = parseExp(context, tokens, 0, exp);
tokens.expect(ExpTokenizer.SYM_TYPE, ":", exp.source);
ExpNode falseExp = parseExp(context, tokens , 0, exp);
return new Conditional(context, lhs, trueExp, falseExp, exp, pos);
}
public static Assignment parseAssignment(ParseContext context, String input) throws ExpError {
ArrayList<ExpTokenizer.Token> ts;
ts = ExpTokenizer.tokenize(input);
TokenList tokens = new TokenList(ts);
ExpTokenizer.Token nextTok = tokens.next();
if (nextTok == null || (nextTok.type != ExpTokenizer.SQ_TYPE &&
!nextTok.value.equals("this"))) {
throw new ExpError(input, 0, "Assignments must start with an identifier");
}
String[] destination = parseIdentifier(nextTok, tokens, new Expression(input));
nextTok = tokens.next();
if (nextTok == null || nextTok.type != ExpTokenizer.SYM_TYPE || !nextTok.value.equals("=")) {
throw new ExpError(input, nextTok.pos, "Expected '=' in assignment");
}
Assignment ret = new Assignment();
ret.destination = destination;
ret.value = new Expression(input);
ExpNode expNode = parseExp(context, tokens, 0, ret.value);
expNode.walk(CONST_OP);
expNode = CONST_OP.updateRef(expNode); // Finally, give the entire expression a chance to optimize itself into a constant
ret.value.setRootNode(expNode);
return ret;
}
// The first half of expression parsing, parse a simple expression based on the next token
private static ExpNode parseOpeningExp(ParseContext context, TokenList tokens, double bindPower, Expression exp) throws ExpError{
ExpTokenizer.Token nextTok = tokens.next(); // consume the first token
if (nextTok == null) {
throw new ExpError(exp.source, exp.source.length(), "Unexpected end of string");
}
if (nextTok.type == ExpTokenizer.NUM_TYPE) {
return parseConstant(context, nextTok.value, tokens, exp, nextTok.pos);
}
if (nextTok.type == ExpTokenizer.VAR_TYPE &&
!nextTok.value.equals("this")) {
return parseFuncCall(context, nextTok.value, tokens, exp, nextTok.pos);
}
if (nextTok.type == ExpTokenizer.SQ_TYPE ||
nextTok.value.equals("this")) {
String[] vals = parseIdentifier(nextTok, tokens, exp);
return new Variable(context, vals, exp, nextTok.pos);
}
// The next token must be a symbol
// handle parenthesis
if (nextTok.value.equals("(")) {
ExpNode expNode = parseExp(context, tokens, 0, exp);
tokens.expect(ExpTokenizer.SYM_TYPE, ")", exp.source); // Expect the closing paren
return expNode;
}
UnaryOpEntry oe = getUnaryOp(nextTok.value);
if (oe != null) {
ExpNode expNode = parseExp(context, tokens, oe.bindingPower, exp);
return new UnaryOp(context, expNode, oe.function, exp, nextTok.pos);
}
// We're all out of tricks here, this is an unknown expression
throw new ExpError(exp.source, nextTok.pos, "Can not parse expression");
}
private static ExpNode parseConstant(ParseContext context, String constant, TokenList tokens, Expression exp, int pos) throws ExpError {
double mult = 1;
Class<? extends Unit> ut = DimensionlessUnit.class;
ExpTokenizer.Token peeked = tokens.peek();
if (peeked != null && peeked.type == ExpTokenizer.SQ_TYPE) {
// This constant is followed by a square quoted token, it must be the unit
tokens.next(); // Consume unit token
UnitData unit = context.getUnitByName(peeked.value);
if (unit == null) {
throw new ExpError(exp.source, peeked.pos, "Unknown unit: %s", peeked.value);
}
mult = unit.scaleFactor;
ut = unit.unitType;
}
return new Constant(context, new ExpResult(Double.parseDouble(constant)*mult, ut), exp, pos);
}
private static ExpNode parseFuncCall(ParseContext context, String funcName, TokenList tokens, Expression exp, int pos) throws ExpError {
tokens.expect(ExpTokenizer.SYM_TYPE, "(", exp.source);
ArrayList<ExpNode> arguments = new ArrayList<>();
ExpTokenizer.Token peeked = tokens.peek();
if (peeked == null) {
throw new ExpError(exp.source, exp.source.length(), "Unexpected end of input in argument list");
}
boolean isEmpty = false;
if (peeked.value.equals(")")) {
// Special case with empty argument list
isEmpty = true;
tokens.next(); // Consume closing parens
}
while (!isEmpty) {
ExpNode nextArg = parseExp(context, tokens, 0, exp);
arguments.add(nextArg);
ExpTokenizer.Token nextTok = tokens.next();
if (nextTok == null) {
throw new ExpError(exp.source, exp.source.length(), "Unexpected end of input in argument list.");
}
if (nextTok.value.equals(")")) {
break;
}
if (nextTok.value.equals(",")) {
continue;
}
// Unexpected token
throw new ExpError(exp.source, nextTok.pos, "Unexpected token in arguement list");
}
FunctionEntry fe = getFunctionEntry(funcName);
if (fe == null) {
throw new ExpError(exp.source, pos, "Uknown function: \"%s\"", funcName);
}
if (fe.numMinArgs >= 0 && arguments.size() < fe.numMinArgs){
throw new ExpError(exp.source, pos, "Function \"%s\" expects at least %d arguments. %d provided.",
funcName, fe.numMinArgs, arguments.size());
}
if (fe.numMaxArgs >= 0 && arguments.size() > fe.numMaxArgs){
throw new ExpError(exp.source, pos, "Function \"%s\" expects at most %d arguments. %d provided.",
funcName, fe.numMaxArgs, arguments.size());
}
return new FuncCall(context, fe.function, arguments, exp, pos);
}
private static String[] parseIdentifier(ExpTokenizer.Token firstName, TokenList tokens, Expression exp) throws ExpError {
ArrayList<String> vals = new ArrayList<>();
vals.add(firstName.value.intern());
while (true) {
ExpTokenizer.Token peeked = tokens.peek();
if (peeked == null || peeked.type != ExpTokenizer.SYM_TYPE || !peeked.value.equals(".")) {
break;
}
// Next token is a '.' so parse another name
tokens.next(); // consume
ExpTokenizer.Token nextName = tokens.next();
if (nextName == null || nextName.type != ExpTokenizer.VAR_TYPE) {
throw new ExpError(exp.source, peeked.pos, "Expected Identifier after '.'");
}
vals.add(nextName.value.intern());
}
String[] ret = new String[vals.size()];
for (int i = 0; i < ret.length; i++)
ret[i] = vals.get(i);
return ret;
}
}
| JS: add choose function to expressions
Signed-off-by: Harry King <[email protected]>
Signed-off-by: Stephen Wong <[email protected]>
| src/main/java/com/jaamsim/input/ExpParser.java | JS: add choose function to expressions | <ide><path>rc/main/java/com/jaamsim/input/ExpParser.java
<ide> }
<ide> });
<ide>
<add> addFunction("choose", 2, -1, new CallableFunc() {
<add> @Override
<add> public ExpResult call(ParseContext context, ExpResult[] args, String source, int pos) throws ExpError {
<add> if (args[0].unitType != DimensionlessUnit.class)
<add> throw new ExpError(source, pos, getInvalidUnitString(args[0].unitType, DimensionlessUnit.class));
<add>
<add> for (int i = 2; i < args.length; ++ i) {
<add> if (args[1].unitType != args[i].unitType)
<add> throw new ExpError(source, pos, getUnitMismatchString(args[1].unitType, args[i].unitType));
<add> }
<add>
<add> int k = (int) args[0].value;
<add> if (k < 1 || k >= args.length)
<add> throw new ExpError(source, pos,
<add> String.format("Invalid index: %s. Index must be between 1 and %s.", k, args.length-1));
<add>
<add> return new ExpResult(args[k].value, args[k].unitType);
<add> }
<add> });
<add>
<ide> ///////////////////////////////////////////////////
<ide> // Mathematical Constants
<ide> addFunction("E", 0, 0, new CallableFunc() { |
|
Java | bsd-3-clause | 2d618a18f4b2ad14cc17b15d155320a088f89691 | 0 | ksclarke/basex,vincentml/basex,BaseXdb/basex,dimitarp/basex,joansmith/basex,JensErat/basex,deshmnnit04/basex,ksclarke/basex,ksclarke/basex,dimitarp/basex,ksclarke/basex,ksclarke/basex,vincentml/basex,vincentml/basex,vincentml/basex,dimitarp/basex,deshmnnit04/basex,drmacro/basex,BaseXdb/basex,JensErat/basex,dimitarp/basex,BaseXdb/basex,drmacro/basex,joansmith/basex,JensErat/basex,deshmnnit04/basex,joansmith/basex,BaseXdb/basex,drmacro/basex,vincentml/basex,deshmnnit04/basex,deshmnnit04/basex,BaseXdb/basex,ksclarke/basex,JensErat/basex,drmacro/basex,BaseXdb/basex,joansmith/basex,dimitarp/basex,JensErat/basex,BaseXdb/basex,ksclarke/basex,ksclarke/basex,BaseXdb/basex,drmacro/basex,joansmith/basex,joansmith/basex,deshmnnit04/basex,dimitarp/basex,dimitarp/basex,joansmith/basex,dimitarp/basex,drmacro/basex,deshmnnit04/basex,joansmith/basex,deshmnnit04/basex,ksclarke/basex,joansmith/basex,ksclarke/basex,JensErat/basex,ksclarke/basex,JensErat/basex,ksclarke/basex,BaseXdb/basex,joansmith/basex,deshmnnit04/basex,joansmith/basex,dimitarp/basex,vincentml/basex,JensErat/basex,JensErat/basex,dimitarp/basex,deshmnnit04/basex,drmacro/basex,vincentml/basex,deshmnnit04/basex,vincentml/basex,deshmnnit04/basex,JensErat/basex,BaseXdb/basex,joansmith/basex,vincentml/basex,BaseXdb/basex,drmacro/basex,vincentml/basex,drmacro/basex,drmacro/basex,dimitarp/basex,JensErat/basex,vincentml/basex,JensErat/basex,dimitarp/basex,vincentml/basex,BaseXdb/basex,drmacro/basex,drmacro/basex | package org.basex.util.options;
import static java.lang.Integer.*;
import static org.basex.core.Prop.*;
import static org.basex.util.Token.*;
import java.io.*;
import java.lang.reflect.*;
import java.util.*;
import java.util.Map.Entry;
import org.basex.core.*;
import org.basex.io.*;
import org.basex.util.*;
import org.basex.util.list.*;
/**
* This class provides methods for accessing, reading and storing options.
* Options (name/value pairs) may either be instances of the {@link Option} class.
* If an instance of this class contains no pre-defined options, assigned options will
* be added as free options.
*
* @author BaseX Team 2005-13, BSD License
* @author Christian Gruen
*/
public class Options implements Iterable<Option<?>> {
/** Comment in configuration file. */
private static final String PROPUSER = "# Local Options";
/** Map with option names and definition. */
protected final TreeMap<String, Option<?>> options = new TreeMap<String, Option<?>>();
/** Map with option names and values. */
private final TreeMap<String, Object> values = new TreeMap<String, Object>();
/** Free option definitions. */
private final HashMap<String, String> free = new HashMap<String, String>();
/** Options, cached from an input file. */
private final StringBuilder user = new StringBuilder();
/** Options file. */
private IOFile file;
/**
* Default constructor.
*/
public Options() {
this(null);
}
/**
* Constructor with options file.
* @param opts options file
*/
protected Options(final IOFile opts) {
init();
if(opts != null) read(opts);
// overwrite initialized options with system properties
setSystem();
}
/**
* Initializes all options.
*/
private void init() {
try {
for(final Option<?> opt : options(getClass())) {
if(opt instanceof Comment) continue;
final String name = opt.name();
values.put(name, opt.value());
options.put(name, opt);
}
} catch(final Exception ex) {
Util.notexpected(ex);
}
}
/**
* Writes the options to disk.
*/
public final synchronized void write() {
BufferedWriter bw = null;
try {
bw = new BufferedWriter(new FileWriter(file.file()));
boolean first = true;
for(final Option<?> opt : options(getClass())) {
final String name = opt.name();
if(opt instanceof Comment) {
if(!first) bw.write(NL);
bw.write("# " + name + NL);
} else if(opt instanceof NumbersOption) {
final int[] ints = get((NumbersOption) opt);
final int is = ints == null ? 0 : ints.length;
for(int i = 0; i < is; ++i) bw.write(name + i + " = " + ints[i] + NL);
} else if(opt instanceof StringsOption) {
final String[] strings = get((StringsOption) opt);
final int ss = strings == null ? 0 : strings.length;
bw.write(name + " = " + ss + NL);
for(int i = 0; i < ss; ++i) bw.write(name + (i + 1) + " = " + strings[i] + NL);
} else {
bw.write(name + " = " + get(opt) + NL);
}
first = false;
}
bw.write(NL + PROPUSER + NL);
bw.write(user.toString());
} catch(final Exception ex) {
Util.errln("% could not be written.", file);
Util.debug(ex);
} finally {
if(bw != null) try { bw.close(); } catch(final IOException ignored) { }
}
}
/**
* Returns the option with the specified name.
* @param name name of the option
* @return value (may be {@code null})
*/
public final synchronized Option<?> option(final String name) {
return options.get(name);
}
/**
* Returns the value of the specified option.
* @param option option
* @return value (may be {@code null})
*/
public final synchronized Object get(final Option<?> option) {
return values.get(option.name());
}
/**
* Sets an option to a value without checking its type.
* @param option option
* @param value value to be assigned
*/
public final synchronized void put(final Option<?> option, final Object value) {
values.put(option.name(), value);
}
/**
* Checks if a value was set for the specified option.
* @param option option
* @return result of check
*/
public final synchronized boolean contains(final Option<?> option) {
return get(option) != null;
}
/**
* Returns the requested string.
* @param option option to be found
* @return value
*/
public final synchronized String get(final StringOption option) {
return (String) get((Option<?>) option);
}
/**
* Returns the requested number.
* @param option option to be found
* @return value
*/
public final synchronized Integer get(final NumberOption option) {
return (Integer) get((Option<?>) option);
}
/**
* Returns the requested boolean.
* @param option option to be found
* @return value
*/
public final synchronized Boolean get(final BooleanOption option) {
return (Boolean) get((Option<?>) option);
}
/**
* Returns the requested string array.
* @param option option to be found
* @return value
*/
public final synchronized String[] get(final StringsOption option) {
final String[] v = (String[]) get((Option<?>) option);
return v == null ? null : v.clone();
}
/**
* Returns the requested integer array.
* @param option option to be found
* @return value
*/
public final synchronized int[] get(final NumbersOption option) {
final int[] v = (int[]) get((Option<?>) option);
return v == null ? null : v.clone();
}
/**
* Returns the requested options.
* @param option option to be found
* @param <O> options
* @return value
*/
@SuppressWarnings("unchecked")
public final synchronized <O extends Options> O get(final OptionsOption<O> option) {
final O o = (O) get((Option<?>) option);
if(o == null) return null;
try {
final O n = ((Class<O>) o.getClass()).newInstance();
n.parse(o.toString());
return n;
} catch(final Exception ex) {
throw Util.notexpected(ex);
}
}
/**
* Returns the requested enum value.
* @param option option to be found
* @param <V> enumeration value
* @return value
*/
@SuppressWarnings("unchecked")
public final synchronized <V extends Enum<V>> V get(final EnumOption<V> option) {
return (V) get((Option<?>) option);
}
/**
* Sets the string value of an option.
* @param option option to be set
* @param value value to be written
*/
public final synchronized void set(final StringOption option, final String value) {
put(option, value);
}
/**
* Sets the integer value of an option.
* @param option option to be set
* @param value value to be written
*/
public final synchronized void set(final NumberOption option, final int value) {
put(option, value);
}
/**
* Sets the boolean value of an option.
* @param option option to be set
* @param value value to be written
*/
public final synchronized void set(final BooleanOption option, final boolean value) {
put(option, value);
}
/**
* Sets the string array value of an option.
* @param option option to be set
* @param value value to be written
*/
public final synchronized void set(final StringsOption option, final String[] value) {
put(option, value);
}
/**
* Sets the integer array value of an option.
* @param option option to be set
* @param value value to be written
*/
public final synchronized void set(final NumbersOption option, final int[] value) {
put(option, value);
}
/**
* Sets the options of an option.
* @param option option to be set
* @param value value to be set
* @param <O> options
*/
public final synchronized <O extends Options> void set(final OptionsOption<O> option,
final O value) {
put(option, value);
}
/**
* Sets the enumeration of an option.
* @param option option to be set
* @param value value to be set
* @param <V> enumeration value
*/
public final synchronized <V extends Enum<V>> void set(final EnumOption<V> option,
final Enum<V> value) {
put(option, value);
}
/**
* Sets the enumeration of an option.
* @param option option to be set
* @param value string value, which will be converted to an enum value or {@code null}
* @param <V> enumeration value
*/
public final synchronized <V extends Enum<V>> void set(final EnumOption<V> option,
final String value) {
put(option, option.get(value));
}
/**
* Assigns a value after casting it to the correct type. If the option is unknown,
* it will be added as free option.
* @param name name of option
* @param val value
* @throws BaseXException database exception
*/
public synchronized void assign(final String name, final String val) throws BaseXException {
if(options.isEmpty()) {
free.put(name, val);
} else {
assign(name, val, -1);
}
}
/**
* Returns all name/value pairs without pre-defined option.
* @return options
*/
public final synchronized HashMap<String, String> free() {
return free;
}
/**
* Returns an error string for an unknown option.
* @param name name of option
* @return error string
*/
public final synchronized String error(final String name) {
final String sim = similar(name);
return Util.info(sim != null ? Text.UNKNOWN_OPT_SIMILAR_X_X : Text.UNKNOWN_OPTION_X, name, sim);
}
/**
* Inverts the boolean value of an option.
* @param option option
* @return new value
*/
public final synchronized boolean invert(final BooleanOption option) {
final boolean val = !get(option);
set(option, val);
return val;
}
/**
* Scans the system properties and initializes the database options.
* All properties starting with {@code org.basex.} will be assigned as options.
*/
public final void setSystem() {
// collect parameters that start with "org.basex."
final StringList sl = new StringList();
for(final Object key : System.getProperties().keySet()) {
final String k = key.toString();
if(k.startsWith(DBPREFIX)) sl.add(k);
}
// assign properties
for(final String key : sl) {
final String v = System.getProperty(key);
try {
assign(key.substring(DBPREFIX.length()).toUpperCase(Locale.ENGLISH), v);
} catch(final BaseXException ignore) { /* may belong to another Options instance */ }
}
}
@Override
public final synchronized Iterator<Option<?>> iterator() {
return options.values().iterator();
}
@Override
public final synchronized String toString() {
// only those options are listed the value of which differs from default value
final StringBuilder sb = new StringBuilder();
for(final Entry<String, Object> e : values.entrySet()) {
final String name = e.getKey();
final Object value = e.getValue();
if(value == null) continue;
final StringList sl = new StringList();
final Object value2 = options.get(name).value();
if(value instanceof String[]) {
for(final String s : (String[]) value) sl.add(s);
} else if(value instanceof int[]) {
for(final int s : (int[]) value) sl.add(Integer.toString(s));
} else if(value instanceof Options) {
final String s = value.toString();
if(value2 == null || !s.equals(value2.toString())) sl.add(s);
} else if(!value.equals(value2)) {
sl.add(value.toString());
}
for(final String s : sl) {
if(sb.length() != 0) sb.append(',');
sb.append(name).append('=').append(s.replace(",", ",,"));
}
}
return sb.toString();
}
// STATIC METHODS =====================================================================
/**
* Returns a system property. If necessary, the key will be converted to lower-case
* and prefixed with the {@link Prop#DBPREFIX} string.
* @param option option
* @return value, or empty string
*/
public static String getSystem(final Option<?> option) {
String name = option.name().toLowerCase(Locale.ENGLISH);
if(!name.startsWith(DBPREFIX)) name = DBPREFIX + name;
final String v = System.getProperty(name);
return v == null ? "" : v;
}
/**
* Sets a system property if it has not been set before.
* @param option option
* @param val value
*/
public static void setSystem(final Option<?> option, final Object val) {
setSystem(option.name(), val);
}
/**
* Sets a system property if it has not been set before. If necessary, the key will
* be converted to lower-case and prefixed with the {@link Prop#DBPREFIX} string.
* @param key key
* @param val value
*/
public static void setSystem(final String key, final Object val) {
final String name = key.indexOf('.') == -1 ? DBPREFIX + key.toLowerCase(Locale.ENGLISH) : key;
if(System.getProperty(name) == null) System.setProperty(name, val.toString());
}
/**
* Returns all options from the specified class.
* @param clz options class
* @return option instances
* @throws IllegalAccessException exception
*/
public static Option<?>[] options(final Class<? extends Options> clz)
throws IllegalAccessException {
final ArrayList<Option<?>> opts = new ArrayList<Option<?>>();
for(final Field f : clz.getFields()) {
if(!Modifier.isStatic(f.getModifiers())) continue;
final Object obj = f.get(null);
if(obj instanceof Option) opts.add((Option<?>) obj);
}
return opts.toArray(new Option[opts.size()]);
}
/**
* Returns a list of allowed keys.
* @param option option
* @param all allowed values
* @return exception
*/
public static String allowed(final Option<?> option, final Object... all) {
final TokenBuilder vals = new TokenBuilder();
for(final Object a : all) vals.add(vals.isEmpty() ? "" : ",").add(a.toString());
return Util.info(Text.OPT_ONEOF, option.name(), vals);
}
// PRIVATE METHODS ====================================================================
/**
* Parses an option string and sets the options accordingly.
* @param string options string
* @throws BaseXException database exception
*/
public synchronized void parse(final String string) throws BaseXException {
final int sl = string.length();
int i = 0;
while(i < sl) {
int k = string.indexOf('=', i);
if(k == -1) k = sl;
final String key = string.substring(i, k).trim();
final StringBuilder val = new StringBuilder();
i = k;
while(++i < sl) {
final char ch = string.charAt(i);
if(ch == ',' && (++i == sl || string.charAt(i) != ',')) break;
val.append(ch);
}
assign(key, val.toString());
}
}
/**
* Reads the configuration file and initializes the options.
* The file is located in the project home directory.
* @param opts options file
*/
private synchronized void read(final IOFile opts) {
file = opts;
final StringList read = new StringList();
final StringList errs = new StringList();
final boolean exists = file.exists();
if(exists) {
BufferedReader br = null;
try {
br = new BufferedReader(new FileReader(file.file()));
boolean local = false;
for(String line; (line = br.readLine()) != null;) {
line = line.trim();
// start of local options
if(line.equals(PROPUSER)) {
local = true;
continue;
}
if(local) user.append(line).append(NL);
if(line.isEmpty() || line.charAt(0) == '#') continue;
final int d = line.indexOf('=');
if(d < 0) {
errs.add("line \"" + line + "\" ignored.");
continue;
}
final String val = line.substring(d + 1).trim();
String name = line.substring(0, d).trim();
// extract numeric value in key
int num = 0;
final int ss = name.length();
for(int s = 0; s < ss; ++s) {
if(Character.isDigit(name.charAt(s))) {
num = toInt(name.substring(s));
name = name.substring(0, s);
break;
}
}
if(local) {
// cache local options as system properties
setSystem(name, val);
} else {
try {
assign(name, val, num);
read.add(name);
} catch(final BaseXException ex) {
errs.add(ex.getMessage());
}
}
}
} catch(final IOException ex) {
errs.add("file could not be parsed.");
Util.errln(ex);
} finally {
if(br != null) try { br.close(); } catch(final IOException ignored) { }
}
}
// check if all mandatory files have been read
boolean ok = true;
if(errs.isEmpty()) {
try {
for(final Option<?> opt : options(getClass())) {
if(ok && !(opt instanceof Comment)) ok = read.contains(opt.name());
}
} catch(final IllegalAccessException ex) {
Util.notexpected(ex);
}
}
if(!ok || !exists || !errs.isEmpty()) {
write();
errs.add("writing new configuration file.");
for(final String s : errs) Util.errln(file + ": " + s);
}
}
/**
* Assigns the specified name and value.
* @param name name of option
* @param val value of option
* @param num number (optional)
* @throws BaseXException database exception
*/
private synchronized void assign(final String name, final String val, final int num)
throws BaseXException {
final Option<?> option = options.get(name);
if(option == null) {
throw new BaseXException(error(name));
} else if(option instanceof BooleanOption) {
final boolean v;
if(val == null || val.isEmpty()) {
final Boolean b = get((BooleanOption) option);
if(b == null) throw new BaseXException(Text.OPT_BOOLEAN, option.name());
v = !b;
} else {
v = Util.yes(val);
if(!v && !Util.no(val)) throw new BaseXException(Text.OPT_BOOLEAN, option.name());
}
put(option, v);
} else if(option instanceof NumberOption) {
final int v = toInt(val);
if(v == MIN_VALUE) throw new BaseXException(Text.OPT_NUMBER, option.name());
put(option, v);
} else if(option instanceof StringOption) {
put(option, val);
} else if(option instanceof EnumOption) {
final EnumOption<?> eo = (EnumOption<?>) option;
final Object v = eo.get(val);
if(v == null) throw new BaseXException(allowed(option, (Object[]) eo.values()));
put(option, v);
} else if(option instanceof OptionsOption) {
final Options o = ((OptionsOption<?>) option).newInstance();
o.parse(val);
put(option, o);
} else if(option instanceof NumbersOption) {
final int v = toInt(val);
if(v == MIN_VALUE) throw new BaseXException(Text.OPT_NUMBER, option.name());
int[] ii = (int[]) get(option);
if(num == -1) {
if(ii == null) ii = new int[0];
final IntList il = new IntList(ii.length + 1);
for(final int i : ii) il.add(i);
il.add(v);
put(option, il.toArray());
} else {
if(num < 0 || num >= ii.length) throw new BaseXException(Text.OPT_OFFSET, option.name());
ii[num] = v;
}
} else if(option instanceof StringsOption) {
String[] ss = (String[]) get(option);
if(num == -1) {
if(ss == null) ss = new String[0];
final StringList sl = new StringList(ss.length + 1);
for(final String s : ss) sl.add(s);
sl.add(val);
put(option, sl.toArray());
} else if(num == 0) {
final int v = toInt(val);
if(v == MIN_VALUE) throw new BaseXException(Text.OPT_NUMBER, option.name());
values.put(name, new String[v]);
} else {
if(num <= 0 || num > ss.length) throw new BaseXException(Text.OPT_OFFSET, option.name());
ss[num - 1] = val;
}
}
}
/**
* Returns an option name similar to the specified string, or {@code null}.
* @param name name to be found
* @return similar name
*/
private String similar(final String name) {
final byte[] nm = token(name);
final Levenshtein ls = new Levenshtein();
for(final String opts : options.keySet()) {
if(ls.similar(nm, token(opts))) return opts;
}
return null;
}
}
| basex-core/src/main/java/org/basex/util/options/Options.java | package org.basex.util.options;
import static java.lang.Integer.*;
import static org.basex.core.Prop.*;
import static org.basex.util.Token.*;
import java.io.*;
import java.lang.reflect.*;
import java.util.*;
import java.util.Map.Entry;
import org.basex.core.*;
import org.basex.io.*;
import org.basex.util.*;
import org.basex.util.list.*;
/**
* This class provides methods for accessing, reading and storing options.
* Options (name/value pairs) may either be instances of the {@link Option} class.
* If an instance of this class contains no pre-defined options, assigned options will
* be added as free options.
*
* @author BaseX Team 2005-13, BSD License
* @author Christian Gruen
*/
public class Options implements Iterable<Option<?>> {
/** Comment in configuration file. */
private static final String PROPUSER = "# Local Options";
/** Map with option names and definition. */
protected final TreeMap<String, Option<?>> options = new TreeMap<String, Option<?>>();
/** Map with option names and values. */
private final TreeMap<String, Object> values = new TreeMap<String, Object>();
/** Free option definitions. */
private final HashMap<String, String> free = new HashMap<String, String>();
/** Options, cached from an input file. */
private final StringBuilder user = new StringBuilder();
/** Options file. */
private IOFile file;
/**
* Default constructor.
*/
public Options() {
init();
}
/**
* Constructor with options string.
* @param opts options strings
* @throws BaseXException database exception
*/
protected Options(final String opts) throws BaseXException {
this();
parse(opts);
}
/**
* Constructor with options file.
* @param opts options file
*/
protected Options(final IOFile opts) {
this();
if(opts != null) read(opts);
// overwrite initialized options with system properties
setSystem();
}
/**
* Initializes all options.
*/
private void init() {
try {
for(final Option<?> opt : options(getClass())) {
if(opt instanceof Comment) continue;
final String name = opt.name();
values.put(name, opt.value());
options.put(name, opt);
}
} catch(final Exception ex) {
Util.notexpected(ex);
}
}
/**
* Writes the options to disk.
*/
public final synchronized void write() {
BufferedWriter bw = null;
try {
bw = new BufferedWriter(new FileWriter(file.file()));
boolean first = true;
for(final Option<?> opt : options(getClass())) {
final String name = opt.name();
if(opt instanceof Comment) {
if(!first) bw.write(NL);
bw.write("# " + name + NL);
} else if(opt instanceof NumbersOption) {
final int[] ints = get((NumbersOption) opt);
final int is = ints == null ? 0 : ints.length;
for(int i = 0; i < is; ++i) bw.write(name + i + " = " + ints[i] + NL);
} else if(opt instanceof StringsOption) {
final String[] strings = get((StringsOption) opt);
final int ss = strings == null ? 0 : strings.length;
bw.write(name + " = " + ss + NL);
for(int i = 0; i < ss; ++i) bw.write(name + (i + 1) + " = " + strings[i] + NL);
} else {
bw.write(name + " = " + get(opt) + NL);
}
first = false;
}
bw.write(NL + PROPUSER + NL);
bw.write(user.toString());
} catch(final Exception ex) {
Util.errln("% could not be written.", file);
Util.debug(ex);
} finally {
if(bw != null) try { bw.close(); } catch(final IOException ignored) { }
}
}
/**
* Returns the option with the specified name.
* @param name name of the option
* @return value (may be {@code null})
*/
public final synchronized Option<?> option(final String name) {
return options.get(name);
}
/**
* Returns the value of the specified option.
* @param option option
* @return value (may be {@code null})
*/
public final synchronized Object get(final Option<?> option) {
return values.get(option.name());
}
/**
* Sets an option to a value without checking its type.
* @param option option
* @param value value to be assigned
*/
public final synchronized void put(final Option<?> option, final Object value) {
values.put(option.name(), value);
}
/**
* Checks if a value was set for the specified option.
* @param option option
* @return result of check
*/
public final synchronized boolean contains(final Option<?> option) {
return get(option) != null;
}
/**
* Returns the requested string.
* @param option option to be found
* @return value
*/
public final synchronized String get(final StringOption option) {
return (String) get((Option<?>) option);
}
/**
* Returns the requested number.
* @param option option to be found
* @return value
*/
public final synchronized Integer get(final NumberOption option) {
return (Integer) get((Option<?>) option);
}
/**
* Returns the requested boolean.
* @param option option to be found
* @return value
*/
public final synchronized Boolean get(final BooleanOption option) {
return (Boolean) get((Option<?>) option);
}
/**
* Returns the requested string array.
* @param option option to be found
* @return value
*/
public final synchronized String[] get(final StringsOption option) {
final String[] v = (String[]) get((Option<?>) option);
return v == null ? null : v.clone();
}
/**
* Returns the requested integer array.
* @param option option to be found
* @return value
*/
public final synchronized int[] get(final NumbersOption option) {
final int[] v = (int[]) get((Option<?>) option);
return v == null ? null : v.clone();
}
/**
* Returns the requested options.
* @param option option to be found
* @param <O> options
* @return value
*/
@SuppressWarnings("unchecked")
public final synchronized <O extends Options> O get(final OptionsOption<O> option) {
final O o = (O) get((Option<?>) option);
if(o == null) return null;
try {
final O n = ((Class<O>) o.getClass()).newInstance();
n.parse(o.toString());
return n;
} catch(final Exception ex) {
throw Util.notexpected(ex);
}
}
/**
* Returns the requested enum value.
* @param option option to be found
* @param <V> enumeration value
* @return value
*/
@SuppressWarnings("unchecked")
public final synchronized <V extends Enum<V>> V get(final EnumOption<V> option) {
return (V) get((Option<?>) option);
}
/**
* Sets the string value of an option.
* @param option option to be set
* @param value value to be written
*/
public final synchronized void set(final StringOption option, final String value) {
put(option, value);
}
/**
* Sets the integer value of an option.
* @param option option to be set
* @param value value to be written
*/
public final synchronized void set(final NumberOption option, final int value) {
put(option, value);
}
/**
* Sets the boolean value of an option.
* @param option option to be set
* @param value value to be written
*/
public final synchronized void set(final BooleanOption option, final boolean value) {
put(option, value);
}
/**
* Sets the string array value of an option.
* @param option option to be set
* @param value value to be written
*/
public final synchronized void set(final StringsOption option, final String[] value) {
put(option, value);
}
/**
* Sets the integer array value of an option.
* @param option option to be set
* @param value value to be written
*/
public final synchronized void set(final NumbersOption option, final int[] value) {
put(option, value);
}
/**
* Sets the options of an option.
* @param option option to be set
* @param value value to be set
* @param <O> options
*/
public final synchronized <O extends Options> void set(final OptionsOption<O> option,
final O value) {
put(option, value);
}
/**
* Sets the enumeration of an option.
* @param option option to be set
* @param value value to be set
* @param <V> enumeration value
*/
public final synchronized <V extends Enum<V>> void set(final EnumOption<V> option,
final Enum<V> value) {
put(option, value);
}
/**
* Sets the enumeration of an option.
* @param option option to be set
* @param value string value, which will be converted to an enum value or {@code null}
* @param <V> enumeration value
*/
public final synchronized <V extends Enum<V>> void set(final EnumOption<V> option,
final String value) {
put(option, option.get(value));
}
/**
* Assigns a value after casting it to the correct type. If the option is unknown,
* it will be added as free option.
* @param name name of option
* @param val value
* @throws BaseXException database exception
*/
public synchronized void assign(final String name, final String val) throws BaseXException {
if(options.isEmpty()) {
free.put(name, val);
} else {
assign(name, val, -1);
}
}
/**
* Returns all name/value pairs without pre-defined option.
* @return options
*/
public final synchronized HashMap<String, String> free() {
return free;
}
/**
* Returns an error string for an unknown option.
* @param name name of option
* @return error string
*/
public final synchronized String error(final String name) {
final String sim = similar(name);
return Util.info(sim != null ? Text.UNKNOWN_OPT_SIMILAR_X_X : Text.UNKNOWN_OPTION_X, name, sim);
}
/**
* Inverts the boolean value of an option.
* @param option option
* @return new value
*/
public final synchronized boolean invert(final BooleanOption option) {
final boolean val = !get(option);
set(option, val);
return val;
}
/**
* Scans the system properties and initializes the database options.
* All properties starting with {@code org.basex.} will be assigned as options.
*/
public final void setSystem() {
// collect parameters that start with "org.basex."
final StringList sl = new StringList();
for(final Object key : System.getProperties().keySet()) {
final String k = key.toString();
if(k.startsWith(DBPREFIX)) sl.add(k);
}
// assign properties
for(final String key : sl) {
final String v = System.getProperty(key);
try {
assign(key.substring(DBPREFIX.length()).toUpperCase(Locale.ENGLISH), v);
} catch(final BaseXException ignore) { /* may belong to another Options instance */ }
}
}
@Override
public final synchronized Iterator<Option<?>> iterator() {
return options.values().iterator();
}
@Override
public final synchronized String toString() {
// only those options are listed the value of which differs from default value
final StringBuilder sb = new StringBuilder();
for(final Entry<String, Object> e : values.entrySet()) {
final String name = e.getKey();
final Object value = e.getValue();
if(value == null) continue;
final StringList sl = new StringList();
final Object value2 = options.get(name).value();
if(value instanceof String[]) {
for(final String s : (String[]) value) sl.add(s);
} else if(value instanceof int[]) {
for(final int s : (int[]) value) sl.add(Integer.toString(s));
} else if(value instanceof Options) {
final String s = value.toString();
if(value2 == null || !s.equals(value2.toString())) sl.add(s);
} else if(!value.equals(value2)) {
sl.add(value.toString());
}
for(final String s : sl) {
if(sb.length() != 0) sb.append(',');
sb.append(name).append('=').append(s.replace(",", ",,"));
}
}
return sb.toString();
}
// STATIC METHODS =====================================================================
/**
* Returns a system property. If necessary, the key will be converted to lower-case
* and prefixed with the {@link Prop#DBPREFIX} string.
* @param option option
* @return value, or empty string
*/
public static String getSystem(final Option<?> option) {
String name = option.name().toLowerCase(Locale.ENGLISH);
if(!name.startsWith(DBPREFIX)) name = DBPREFIX + name;
final String v = System.getProperty(name);
return v == null ? "" : v;
}
/**
* Sets a system property if it has not been set before.
* @param option option
* @param val value
*/
public static void setSystem(final Option<?> option, final Object val) {
setSystem(option.name(), val);
}
/**
* Sets a system property if it has not been set before. If necessary, the key will
* be converted to lower-case and prefixed with the {@link Prop#DBPREFIX} string.
* @param key key
* @param val value
*/
public static void setSystem(final String key, final Object val) {
final String name = key.indexOf('.') == -1 ? DBPREFIX + key.toLowerCase(Locale.ENGLISH) : key;
if(System.getProperty(name) == null) System.setProperty(name, val.toString());
}
/**
* Returns all options from the specified class.
* @param clz options class
* @return option instances
* @throws IllegalAccessException exception
*/
public static Option<?>[] options(final Class<? extends Options> clz)
throws IllegalAccessException {
final ArrayList<Option<?>> opts = new ArrayList<Option<?>>();
for(final Field f : clz.getFields()) {
if(!Modifier.isStatic(f.getModifiers())) continue;
final Object obj = f.get(null);
if(obj instanceof Option) opts.add((Option<?>) obj);
}
return opts.toArray(new Option[opts.size()]);
}
/**
* Returns a list of allowed keys.
* @param option option
* @param all allowed values
* @return exception
*/
public static String allowed(final Option<?> option, final Object... all) {
final TokenBuilder vals = new TokenBuilder();
for(final Object a : all) vals.add(vals.isEmpty() ? "" : ",").add(a.toString());
return Util.info(Text.OPT_ONEOF, option.name(), vals);
}
// PRIVATE METHODS ====================================================================
/**
* Parses an option string and sets the options accordingly.
* @param string options string
* @throws BaseXException database exception
*/
public synchronized void parse(final String string) throws BaseXException {
final int sl = string.length();
int i = 0;
while(i < sl) {
int k = string.indexOf('=', i);
if(k == -1) k = sl;
final String key = string.substring(i, k).trim();
final StringBuilder val = new StringBuilder();
i = k;
while(++i < sl) {
final char ch = string.charAt(i);
if(ch == ',' && (++i == sl || string.charAt(i) != ',')) break;
val.append(ch);
}
assign(key, val.toString());
}
}
/**
* Reads the configuration file and initializes the options.
* The file is located in the project home directory.
* @param opts options file
*/
private synchronized void read(final IOFile opts) {
file = opts;
final StringList read = new StringList();
final StringList errs = new StringList();
final boolean exists = file.exists();
if(exists) {
BufferedReader br = null;
try {
br = new BufferedReader(new FileReader(file.file()));
boolean local = false;
for(String line; (line = br.readLine()) != null;) {
line = line.trim();
// start of local options
if(line.equals(PROPUSER)) {
local = true;
continue;
}
if(local) user.append(line).append(NL);
if(line.isEmpty() || line.charAt(0) == '#') continue;
final int d = line.indexOf('=');
if(d < 0) {
errs.add("line \"" + line + "\" ignored.");
continue;
}
final String val = line.substring(d + 1).trim();
String name = line.substring(0, d).trim();
// extract numeric value in key
int num = 0;
final int ss = name.length();
for(int s = 0; s < ss; ++s) {
if(Character.isDigit(name.charAt(s))) {
num = toInt(name.substring(s));
name = name.substring(0, s);
break;
}
}
if(local) {
// cache local options as system properties
setSystem(name, val);
} else {
try {
assign(name, val, num);
read.add(name);
} catch(final BaseXException ex) {
errs.add(ex.getMessage());
}
}
}
} catch(final IOException ex) {
errs.add("file could not be parsed.");
Util.errln(ex);
} finally {
if(br != null) try { br.close(); } catch(final IOException ignored) { }
}
}
// check if all mandatory files have been read
boolean ok = true;
if(errs.isEmpty()) {
try {
for(final Option<?> opt : options(getClass())) {
if(ok && !(opt instanceof Comment)) ok = read.contains(opt.name());
}
} catch(final IllegalAccessException ex) {
Util.notexpected(ex);
}
}
if(!ok || !exists || !errs.isEmpty()) {
write();
errs.add("writing new configuration file.");
for(final String s : errs) Util.errln(file + ": " + s);
}
}
/**
* Assigns the specified name and value.
* @param name name of option
* @param val value of option
* @param num number (optional)
* @throws BaseXException database exception
*/
private synchronized void assign(final String name, final String val, final int num)
throws BaseXException {
final Option<?> option = options.get(name);
if(option == null) {
throw new BaseXException(error(name));
} else if(option instanceof BooleanOption) {
final boolean v;
if(val == null || val.isEmpty()) {
final Boolean b = get((BooleanOption) option);
if(b == null) throw new BaseXException(Text.OPT_BOOLEAN, option.name());
v = !b;
} else {
v = Util.yes(val);
if(!v && !Util.no(val)) throw new BaseXException(Text.OPT_BOOLEAN, option.name());
}
put(option, v);
} else if(option instanceof NumberOption) {
final int v = toInt(val);
if(v == MIN_VALUE) throw new BaseXException(Text.OPT_NUMBER, option.name());
put(option, v);
} else if(option instanceof StringOption) {
put(option, val);
} else if(option instanceof EnumOption) {
final EnumOption<?> eo = (EnumOption<?>) option;
final Object v = eo.get(val);
if(v == null) throw new BaseXException(allowed(option, (Object[]) eo.values()));
put(option, v);
} else if(option instanceof OptionsOption) {
final Options o = ((OptionsOption<?>) option).newInstance();
o.parse(val);
put(option, o);
} else if(option instanceof NumbersOption) {
final int v = toInt(val);
if(v == MIN_VALUE) throw new BaseXException(Text.OPT_NUMBER, option.name());
int[] ii = (int[]) get(option);
if(num == -1) {
if(ii == null) ii = new int[0];
final IntList il = new IntList(ii.length + 1);
for(final int i : ii) il.add(i);
il.add(v);
put(option, il.toArray());
} else {
if(num < 0 || num >= ii.length) throw new BaseXException(Text.OPT_OFFSET, option.name());
ii[num] = v;
}
} else if(option instanceof StringsOption) {
String[] ss = (String[]) get(option);
if(num == -1) {
if(ss == null) ss = new String[0];
final StringList sl = new StringList(ss.length + 1);
for(final String s : ss) sl.add(s);
sl.add(val);
put(option, sl.toArray());
} else if(num == 0) {
final int v = toInt(val);
if(v == MIN_VALUE) throw new BaseXException(Text.OPT_NUMBER, option.name());
values.put(name, new String[v]);
} else {
if(num <= 0 || num > ss.length) throw new BaseXException(Text.OPT_OFFSET, option.name());
ss[num - 1] = val;
}
}
}
/**
* Returns an option name similar to the specified string, or {@code null}.
* @param name name to be found
* @return similar name
*/
private String similar(final String name) {
final byte[] nm = token(name);
final Levenshtein ls = new Levenshtein();
for(final String opts : options.keySet()) {
if(ls.similar(nm, token(opts))) return opts;
}
return null;
}
}
| [FIX] Options: adopt options defined in .basex configuration file | basex-core/src/main/java/org/basex/util/options/Options.java | [FIX] Options: adopt options defined in .basex configuration file | <ide><path>asex-core/src/main/java/org/basex/util/options/Options.java
<ide> * Default constructor.
<ide> */
<ide> public Options() {
<del> init();
<del> }
<del>
<del> /**
<del> * Constructor with options string.
<del> * @param opts options strings
<del> * @throws BaseXException database exception
<del> */
<del> protected Options(final String opts) throws BaseXException {
<del> this();
<del> parse(opts);
<add> this(null);
<ide> }
<ide>
<ide> /**
<ide> * @param opts options file
<ide> */
<ide> protected Options(final IOFile opts) {
<del> this();
<add> init();
<ide> if(opts != null) read(opts);
<ide> // overwrite initialized options with system properties
<ide> setSystem(); |
|
Java | apache-2.0 | c3c209659bd34b2cd03169189b57780c5c319040 | 0 | krlohnes/tinkerpop,RedSeal-co/incubator-tinkerpop,n-tran/incubator-tinkerpop,edgarRd/incubator-tinkerpop,newkek/incubator-tinkerpop,robertdale/tinkerpop,krlohnes/tinkerpop,PommeVerte/incubator-tinkerpop,apache/incubator-tinkerpop,mike-tr-adamson/incubator-tinkerpop,BrynCooke/incubator-tinkerpop,n-tran/incubator-tinkerpop,jorgebay/tinkerpop,edgarRd/incubator-tinkerpop,krlohnes/tinkerpop,dalaro/incubator-tinkerpop,krlohnes/tinkerpop,robertdale/tinkerpop,BrynCooke/incubator-tinkerpop,dalaro/incubator-tinkerpop,RussellSpitzer/incubator-tinkerpop,gdelafosse/incubator-tinkerpop,rmagen/incubator-tinkerpop,rmagen/incubator-tinkerpop,artem-aliev/tinkerpop,vtslab/incubator-tinkerpop,newkek/incubator-tinkerpop,artem-aliev/tinkerpop,RussellSpitzer/incubator-tinkerpop,gdelafosse/incubator-tinkerpop,Lab41/tinkerpop3,PommeVerte/incubator-tinkerpop,apache/tinkerpop,BrynCooke/incubator-tinkerpop,apache/incubator-tinkerpop,pluradj/incubator-tinkerpop,RedSeal-co/incubator-tinkerpop,jorgebay/tinkerpop,PommeVerte/incubator-tinkerpop,Lab41/tinkerpop3,mike-tr-adamson/incubator-tinkerpop,robertdale/tinkerpop,artem-aliev/tinkerpop,apache/incubator-tinkerpop,pluradj/incubator-tinkerpop,edgarRd/incubator-tinkerpop,apache/tinkerpop,n-tran/incubator-tinkerpop,samiunn/incubator-tinkerpop,apache/tinkerpop,jorgebay/tinkerpop,mpollmeier/tinkerpop3,mpollmeier/tinkerpop3,velo/incubator-tinkerpop,RedSeal-co/incubator-tinkerpop,samiunn/incubator-tinkerpop,vtslab/incubator-tinkerpop,robertdale/tinkerpop,dalaro/incubator-tinkerpop,mike-tr-adamson/incubator-tinkerpop,apache/tinkerpop,pluradj/incubator-tinkerpop,newkek/incubator-tinkerpop,gdelafosse/incubator-tinkerpop,artem-aliev/tinkerpop,jorgebay/tinkerpop,apache/tinkerpop,artem-aliev/tinkerpop,RussellSpitzer/incubator-tinkerpop,rmagen/incubator-tinkerpop,apache/tinkerpop,krlohnes/tinkerpop,velo/incubator-tinkerpop,vtslab/incubator-tinkerpop,apache/tinkerpop,samiunn/incubator-tinkerpop,velo/incubator-tinkerpop,robertdale/tinkerpop | package com.tinkerpop.gremlin.structure.util.detached;
import com.tinkerpop.gremlin.AbstractGremlinTest;
import com.tinkerpop.gremlin.FeatureRequirement;
import com.tinkerpop.gremlin.FeatureRequirementSet;
import com.tinkerpop.gremlin.LoadGraphWith;
import com.tinkerpop.gremlin.structure.Graph;
import com.tinkerpop.gremlin.structure.VertexProperty;
import com.tinkerpop.gremlin.structure.Vertex;
import com.tinkerpop.gremlin.structure.util.ElementHelper;
import com.tinkerpop.gremlin.util.StreamFactory;
import org.junit.Test;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
/**
* @author Stephen Mallette (http://stephen.genoprime.com)
*/
public class DetachedVertexTest extends AbstractGremlinTest {
@Test(expected = IllegalArgumentException.class)
public void shouldNotConstructWithNullElement() {
DetachedVertex.detach(null);
}
@Test(expected = IllegalArgumentException.class)
@FeatureRequirementSet(FeatureRequirementSet.Package.VERTICES_ONLY)
public void shouldNotConstructWithSomethingAlreadyDetached() {
final Vertex v = g.addVertex();
DetachedVertex.detach(DetachedVertex.detach(v));
}
// todo: need "the crew"
@Test
@org.junit.Ignore
public void shouldConstructDetachedVertex() {
//assertEquals("1", this.detachedVertex.id());
//assertEquals("l", this.detachedVertex.label());
}
@Test
@LoadGraphWith(LoadGraphWith.GraphData.MODERN)
public void shouldEvaluateToEqual() {
assertTrue(DetachedVertex.detach(g.v(convertToVertexId("marko"))).equals(DetachedVertex.detach(g.v(convertToVertexId("marko")))));
}
@Test
@LoadGraphWith(LoadGraphWith.GraphData.MODERN)
public void shouldHaveSameHashCode() {
assertEquals(DetachedVertex.detach(g.v(convertToVertexId("marko"))).hashCode(), DetachedVertex.detach(g.v(convertToVertexId("marko"))).hashCode());
}
@Test
@LoadGraphWith(LoadGraphWith.GraphData.MODERN)
@FeatureRequirementSet(FeatureRequirementSet.Package.SIMPLE)
@FeatureRequirement(featureClass = Graph.Features.EdgePropertyFeatures.class, feature = Graph.Features.EdgePropertyFeatures.FEATURE_DOUBLE_VALUES)
public void shouldNotEvaluateToEqualDifferentId() {
final Vertex v = g.addVertex("name", "marko", "age", 29);
assertFalse(DetachedVertex.detach(v).equals(DetachedVertex.detach(g.v(convertToVertexId("marko")))));
}
@Test
public void shouldConstructDetachedVertexFromParts() {
final Map<String,Object> properties = new HashMap<>();
final Map<String,Object> propX1 = new HashMap<>();
propX1.put("value", "a");
propX1.put("id", 123);
propX1.put("label", VertexProperty.DEFAULT_LABEL);
final Map<String,Object> propX2 = new HashMap<>();
propX2.put("value", "c");
propX2.put("id", 124);
propX2.put("label", VertexProperty.DEFAULT_LABEL);
properties.put("x", Arrays.asList(propX1, propX2));
final Map<String,Object> hiddens = new HashMap<>();
final Map<String,Object> propY1 = new HashMap<>();
propY1.put("value", "b");
propY1.put("id", 125);
propY1.put("label", VertexProperty.DEFAULT_LABEL);
final Map<String,Object> propY2 = new HashMap<>();
propY2.put("value", "d");
propY2.put("id", 126);
propY2.put("label", VertexProperty.DEFAULT_LABEL);
hiddens.put(Graph.Key.hide("y"), Arrays.asList(propY1, propY2));
final DetachedVertex dv = new DetachedVertex(1, "test", properties, hiddens);
assertEquals(1, dv.id());
assertEquals("test", dv.label());
final List<VertexProperty> propertyX = StreamFactory.stream(dv.iterators().properties("x")).collect(Collectors.toList());
assertEquals(2, propertyX.size());
assertTrue(propertyX.stream().allMatch(p ->
p.label().equals(VertexProperty.DEFAULT_LABEL)
&& (p.id().equals(123) || p.id().equals(124))
&& (p.value().equals("a") || p.value().equals("c"))
&& !p.iterators().properties().hasNext()
&& !p.iterators().hiddens().hasNext()));
final List<VertexProperty> propertyY = StreamFactory.stream(dv.iterators().hiddens("y")).collect(Collectors.toList());
assertEquals(2, propertyY.size());
assertTrue(propertyY.stream().allMatch(p ->
p.label().equals(VertexProperty.DEFAULT_LABEL)
&& (p.id().equals(125) || p.id().equals(126))
&& (p.value().equals("b") || p.value().equals("d"))
&& !p.iterators().properties().hasNext()
&& !p.iterators().hiddens().hasNext()));
}
@Test
public void shouldConstructDetachedVertexFromPartsWithPropertiesOnProperties() {
final Map<String,Object> properties = new HashMap<>();
final Map<String,Object> propX1 = new HashMap<>();
propX1.put("value", "a");
propX1.put("id", 123);
propX1.put("label", VertexProperty.DEFAULT_LABEL);
propX1.put("properties", ElementHelper.asMap("propX1a", "a", "propX11", 1, "same", 123.01d, "extra", "something"));
propX1.put("hidden", ElementHelper.asMap(Graph.Key.hide("propX1ha"), "ha", Graph.Key.hide("propX1h1"), 11, Graph.Key.hide("same"), 321.01d));
final Map<String,Object> propX2 = new HashMap<>();
propX2.put("value", "c");
propX2.put("id", 124);
propX2.put("label", VertexProperty.DEFAULT_LABEL);
properties.put("x", Arrays.asList(propX1, propX2));
final Map<String,Object> hiddens = new HashMap<>();
final Map<String,Object> propY1 = new HashMap<>();
propY1.put("value", "b");
propY1.put("id", 125);
propY1.put("label", VertexProperty.DEFAULT_LABEL);
final Map<String,Object> propY2 = new HashMap<>();
propY2.put("value", "d");
propY2.put("id", 126);
propY2.put("label", VertexProperty.DEFAULT_LABEL);
hiddens.put(Graph.Key.hide("y"), Arrays.asList(propY1, propY2));
final DetachedVertex dv = new DetachedVertex(1, "test", properties, hiddens);
assertEquals(1, dv.id());
assertEquals("test", dv.label());
final List<VertexProperty> propertyX = StreamFactory.stream(dv.iterators().properties("x")).collect(Collectors.toList());
assertEquals(2, propertyX.size());
assertTrue(propertyX.stream().allMatch(p ->
p.label().equals(VertexProperty.DEFAULT_LABEL)
&& (p.id().equals(123) || p.id().equals(124))
&& (p.value().equals("a") || p.value().equals("c"))));
// there should be only one with properties on properties
final VertexProperty propertyOnProperty = propertyX.stream().filter(p -> p.iterators().properties().hasNext()).findFirst().get();
assertEquals("a", propertyOnProperty.iterators().properties("propX1a").next().value());
assertEquals(1, propertyOnProperty.iterators().properties("propX11").next().value());
assertEquals(123.01d, propertyOnProperty.iterators().properties("same").next().value());
assertEquals("something", propertyOnProperty.iterators().properties("extra").next().value());
assertEquals(4, StreamFactory.stream(propertyOnProperty.iterators().properties()).count());
assertEquals("ha", propertyOnProperty.iterators().hiddens("propX1ha").next().value());
assertEquals(11, propertyOnProperty.iterators().hiddens("propX1h1").next().value());
assertEquals(321.01d, propertyOnProperty.iterators().hiddens("same").next().value());
assertEquals(3, StreamFactory.stream(propertyOnProperty.iterators().hiddens()).count());
final List<VertexProperty> propertyY = StreamFactory.stream(dv.iterators().hiddens("y")).collect(Collectors.toList());
assertEquals(2, propertyY.size());
assertTrue(propertyY.stream().allMatch(p ->
p.label().equals(VertexProperty.DEFAULT_LABEL)
&& (p.id().equals(125) || p.id().equals(126))
&& (p.value().equals("b") || p.value().equals("d"))
&& !p.iterators().properties().hasNext()
&& !p.iterators().hiddens().hasNext()));
}
@Test(expected = UnsupportedOperationException.class)
@FeatureRequirementSet(FeatureRequirementSet.Package.VERTICES_ONLY)
public void shouldNotAllowAddEdge() {
final Vertex v = g.addVertex();
final DetachedVertex detachedVertex = DetachedVertex.detach(v);
detachedVertex.addEdge("test", null);
}
@Test(expected = UnsupportedOperationException.class)
@FeatureRequirementSet(FeatureRequirementSet.Package.VERTICES_ONLY)
public void shouldNotAllowSetProperty() {
final Vertex v = g.addVertex();
final DetachedVertex detachedVertex = DetachedVertex.detach(v);
detachedVertex.property("test", "test");
}
@Test(expected = UnsupportedOperationException.class)
@FeatureRequirementSet(FeatureRequirementSet.Package.VERTICES_ONLY)
public void shouldNotAllowRemove() {
final Vertex v = g.addVertex();
final DetachedVertex detachedVertex = DetachedVertex.detach(v);
detachedVertex.remove();
}
@Test(expected = UnsupportedOperationException.class)
@FeatureRequirementSet(FeatureRequirementSet.Package.VERTICES_ONLY)
public void shouldNotTraverse() {
final Vertex v = g.addVertex();
final DetachedVertex detachedVertex = DetachedVertex.detach(v);
detachedVertex.start();
}
// todo: need the crew
@Test(expected = IllegalStateException.class)
@org.junit.Ignore
public void shouldNotBeAbleToCallPropertyIfThereAreMultipleProperties() {
/*
final Map<String,Object> properties = new HashMap<>();
final IoVertexProperty propX1 = new IoVertexProperty();
propX1.value = "a";
propX1.id = 123;
propX1.label = VertexProperty.DEFAULT_LABEL;
final IoVertexProperty propX2 = new IoVertexProperty();
propX2.value = "c";
propX2.id = 124;
propX2.label = VertexProperty.DEFAULT_LABEL;
properties.put("x", Arrays.asList(propX1, propX2));
final Map<String,Object> hiddens = new HashMap<>();
final DetachedVertex dv = new DetachedVertex(1, "test", properties, hiddens);
dv.property("x");
*/
}
@Test
@LoadGraphWith(LoadGraphWith.GraphData.MODERN)
public void shouldBeAbleToCallPropertyIfThereIsASingleProperty() {
final DetachedVertex dv = DetachedVertex.detach(g.v(convertToVertexId("marko")));
assertEquals("marko", dv.property("name").value());
assertEquals(29, dv.property("age").value());
}
}
| gremlin-test/src/main/java/com/tinkerpop/gremlin/structure/util/detached/DetachedVertexTest.java | package com.tinkerpop.gremlin.structure.util.detached;
import com.tinkerpop.gremlin.AbstractGremlinTest;
import com.tinkerpop.gremlin.structure.Graph;
import com.tinkerpop.gremlin.structure.VertexProperty;
import com.tinkerpop.gremlin.structure.Vertex;
import com.tinkerpop.gremlin.structure.util.ElementHelper;
import com.tinkerpop.gremlin.util.StreamFactory;
import org.junit.Test;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
/**
* @author Stephen Mallette (http://stephen.genoprime.com)
*/
public class DetachedVertexTest extends AbstractGremlinTest {
// todo: clean up all detached tests
@Test(expected = IllegalArgumentException.class)
public void shouldNotConstructWithNullElement() {
DetachedVertex.detach(null);
}
@Test(expected = IllegalArgumentException.class)
public void shouldNotConstructWithSomethingAlreadyDetached() {
final Vertex v = g.addVertex();
DetachedVertex.detach(DetachedVertex.detach(v));
}
@Test
@org.junit.Ignore
public void shouldConstructDetachedVertex() {
//assertEquals("1", this.detachedVertex.id());
//assertEquals("l", this.detachedVertex.label());
}
@Test
@org.junit.Ignore
public void shouldEvaluateToEqual() {
// assertTrue(detachedVertex1.equals(this.detachedVertex));
}
@Test
@org.junit.Ignore
public void shouldNotEvaluateToEqualDifferentId() {
//assertFalse(detachedVertex1.equals(this.detachedVertex));
}
@Test
public void shouldConstructDetachedVertexFromParts() {
final Map<String,Object> properties = new HashMap<>();
final Map<String,Object> propX1 = new HashMap<>();
propX1.put("value", "a");
propX1.put("id", 123);
propX1.put("label", VertexProperty.DEFAULT_LABEL);
final Map<String,Object> propX2 = new HashMap<>();
propX2.put("value", "c");
propX2.put("id", 124);
propX2.put("label", VertexProperty.DEFAULT_LABEL);
properties.put("x", Arrays.asList(propX1, propX2));
final Map<String,Object> hiddens = new HashMap<>();
final Map<String,Object> propY1 = new HashMap<>();
propY1.put("value", "b");
propY1.put("id", 125);
propY1.put("label", VertexProperty.DEFAULT_LABEL);
final Map<String,Object> propY2 = new HashMap<>();
propY2.put("value", "d");
propY2.put("id", 126);
propY2.put("label", VertexProperty.DEFAULT_LABEL);
hiddens.put(Graph.Key.hide("y"), Arrays.asList(propY1, propY2));
final DetachedVertex dv = new DetachedVertex(1, "test", properties, hiddens);
assertEquals(1, dv.id());
assertEquals("test", dv.label());
final List<VertexProperty> propertyX = StreamFactory.stream(dv.iterators().properties("x")).collect(Collectors.toList());
assertEquals(2, propertyX.size());
assertTrue(propertyX.stream().allMatch(p ->
p.label().equals(VertexProperty.DEFAULT_LABEL)
&& (p.id().equals(123) || p.id().equals(124))
&& (p.value().equals("a") || p.value().equals("c"))
&& !p.iterators().properties().hasNext()
&& !p.iterators().hiddens().hasNext()));
final List<VertexProperty> propertyY = StreamFactory.stream(dv.iterators().hiddens("y")).collect(Collectors.toList());
assertEquals(2, propertyY.size());
assertTrue(propertyY.stream().allMatch(p ->
p.label().equals(VertexProperty.DEFAULT_LABEL)
&& (p.id().equals(125) || p.id().equals(126))
&& (p.value().equals("b") || p.value().equals("d"))
&& !p.iterators().properties().hasNext()
&& !p.iterators().hiddens().hasNext()));
}
@Test
public void shouldConstructDetachedVertexFromPartsWithPropertiesOnProperties() {
final Map<String,Object> properties = new HashMap<>();
final Map<String,Object> propX1 = new HashMap<>();
propX1.put("value", "a");
propX1.put("id", 123);
propX1.put("label", VertexProperty.DEFAULT_LABEL);
propX1.put("properties", ElementHelper.asMap("propX1a", "a", "propX11", 1, "same", 123.01d, "extra", "something"));
propX1.put("hidden", ElementHelper.asMap(Graph.Key.hide("propX1ha"), "ha", Graph.Key.hide("propX1h1"), 11, Graph.Key.hide("same"), 321.01d));
final Map<String,Object> propX2 = new HashMap<>();
propX2.put("value", "c");
propX2.put("id", 124);
propX2.put("label", VertexProperty.DEFAULT_LABEL);
properties.put("x", Arrays.asList(propX1, propX2));
final Map<String,Object> hiddens = new HashMap<>();
final Map<String,Object> propY1 = new HashMap<>();
propY1.put("value", "b");
propY1.put("id", 125);
propY1.put("label", VertexProperty.DEFAULT_LABEL);
final Map<String,Object> propY2 = new HashMap<>();
propY2.put("value", "d");
propY2.put("id", 126);
propY2.put("label", VertexProperty.DEFAULT_LABEL);
hiddens.put(Graph.Key.hide("y"), Arrays.asList(propY1, propY2));
final DetachedVertex dv = new DetachedVertex(1, "test", properties, hiddens);
assertEquals(1, dv.id());
assertEquals("test", dv.label());
final List<VertexProperty> propertyX = StreamFactory.stream(dv.iterators().properties("x")).collect(Collectors.toList());
assertEquals(2, propertyX.size());
assertTrue(propertyX.stream().allMatch(p ->
p.label().equals(VertexProperty.DEFAULT_LABEL)
&& (p.id().equals(123) || p.id().equals(124))
&& (p.value().equals("a") || p.value().equals("c"))));
// there should be only one with properties on properties
final VertexProperty propertyOnProperty = propertyX.stream().filter(p -> p.iterators().properties().hasNext()).findFirst().get();
assertEquals("a", propertyOnProperty.iterators().properties("propX1a").next().value());
assertEquals(1, propertyOnProperty.iterators().properties("propX11").next().value());
assertEquals(123.01d, propertyOnProperty.iterators().properties("same").next().value());
assertEquals("something", propertyOnProperty.iterators().properties("extra").next().value());
assertEquals(4, StreamFactory.stream(propertyOnProperty.iterators().properties()).count());
assertEquals("ha", propertyOnProperty.iterators().hiddens("propX1ha").next().value());
assertEquals(11, propertyOnProperty.iterators().hiddens("propX1h1").next().value());
assertEquals(321.01d, propertyOnProperty.iterators().hiddens("same").next().value());
assertEquals(3, StreamFactory.stream(propertyOnProperty.iterators().hiddens()).count());
final List<VertexProperty> propertyY = StreamFactory.stream(dv.iterators().hiddens("y")).collect(Collectors.toList());
assertEquals(2, propertyY.size());
assertTrue(propertyY.stream().allMatch(p ->
p.label().equals(VertexProperty.DEFAULT_LABEL)
&& (p.id().equals(125) || p.id().equals(126))
&& (p.value().equals("b") || p.value().equals("d"))
&& !p.iterators().properties().hasNext()
&& !p.iterators().hiddens().hasNext()));
}
@Test(expected = UnsupportedOperationException.class)
public void shouldNotAllowAddEdge() {
final Vertex v = g.addVertex();
final DetachedVertex detachedVertex = DetachedVertex.detach(v);
detachedVertex.addEdge("test", null);
}
@Test(expected = UnsupportedOperationException.class)
public void shouldNotAllowSetProperty() {
final Vertex v = g.addVertex();
final DetachedVertex detachedVertex = DetachedVertex.detach(v);
detachedVertex.property("test", "test");
}
@Test(expected = UnsupportedOperationException.class)
public void shouldNotAllowRemove() {
final Vertex v = g.addVertex();
final DetachedVertex detachedVertex = DetachedVertex.detach(v);
detachedVertex.remove();
}
@Test(expected = UnsupportedOperationException.class)
public void shouldNotTraverse() {
final Vertex v = g.addVertex();
final DetachedVertex detachedVertex = DetachedVertex.detach(v);
detachedVertex.start();
}
@Test(expected = IllegalStateException.class)
@org.junit.Ignore
public void shouldNotBeAbleToCallPropertyIfThereAreMultipleProperties() {
/*
final Map<String,Object> properties = new HashMap<>();
final IoVertexProperty propX1 = new IoVertexProperty();
propX1.value = "a";
propX1.id = 123;
propX1.label = VertexProperty.DEFAULT_LABEL;
final IoVertexProperty propX2 = new IoVertexProperty();
propX2.value = "c";
propX2.id = 124;
propX2.label = VertexProperty.DEFAULT_LABEL;
properties.put("x", Arrays.asList(propX1, propX2));
final Map<String,Object> hiddens = new HashMap<>();
final DetachedVertex dv = new DetachedVertex(1, "test", properties, hiddens);
dv.property("x");
*/
}
@Test
@org.junit.Ignore
public void shouldBeAbleToCallPropertyIfThereIsASingleProperty() {
/*
final Map<String,Object> properties = new HashMap<>();
final IoVertexProperty propX1 = new IoVertexProperty();
propX1.value = "a";
propX1.id = 123;
propX1.label = VertexProperty.DEFAULT_LABEL;
properties.put("x", Arrays.asList(propX1));
final Map<String,Object> hiddens = new HashMap<>();
final DetachedVertex dv = new DetachedVertex(1, "test", properties, hiddens);
assertEquals("a", dv.property("x").value());
*/
}
}
| Turn back on a few more Detached class tests.
| gremlin-test/src/main/java/com/tinkerpop/gremlin/structure/util/detached/DetachedVertexTest.java | Turn back on a few more Detached class tests. | <ide><path>remlin-test/src/main/java/com/tinkerpop/gremlin/structure/util/detached/DetachedVertexTest.java
<ide> package com.tinkerpop.gremlin.structure.util.detached;
<ide>
<ide> import com.tinkerpop.gremlin.AbstractGremlinTest;
<add>import com.tinkerpop.gremlin.FeatureRequirement;
<add>import com.tinkerpop.gremlin.FeatureRequirementSet;
<add>import com.tinkerpop.gremlin.LoadGraphWith;
<ide> import com.tinkerpop.gremlin.structure.Graph;
<ide> import com.tinkerpop.gremlin.structure.VertexProperty;
<ide> import com.tinkerpop.gremlin.structure.Vertex;
<ide> import java.util.stream.Collectors;
<ide>
<ide> import static org.junit.Assert.assertEquals;
<add>import static org.junit.Assert.assertFalse;
<ide> import static org.junit.Assert.assertTrue;
<ide>
<ide> /**
<ide> */
<ide> public class DetachedVertexTest extends AbstractGremlinTest {
<ide>
<del> // todo: clean up all detached tests
<del>
<ide> @Test(expected = IllegalArgumentException.class)
<ide> public void shouldNotConstructWithNullElement() {
<ide> DetachedVertex.detach(null);
<ide> }
<ide>
<ide> @Test(expected = IllegalArgumentException.class)
<add> @FeatureRequirementSet(FeatureRequirementSet.Package.VERTICES_ONLY)
<ide> public void shouldNotConstructWithSomethingAlreadyDetached() {
<ide> final Vertex v = g.addVertex();
<ide> DetachedVertex.detach(DetachedVertex.detach(v));
<ide> }
<add>
<add> // todo: need "the crew"
<ide>
<ide> @Test
<ide> @org.junit.Ignore
<ide> }
<ide>
<ide> @Test
<del> @org.junit.Ignore
<add> @LoadGraphWith(LoadGraphWith.GraphData.MODERN)
<ide> public void shouldEvaluateToEqual() {
<del> // assertTrue(detachedVertex1.equals(this.detachedVertex));
<del> }
<del>
<del> @Test
<del> @org.junit.Ignore
<add> assertTrue(DetachedVertex.detach(g.v(convertToVertexId("marko"))).equals(DetachedVertex.detach(g.v(convertToVertexId("marko")))));
<add> }
<add>
<add> @Test
<add> @LoadGraphWith(LoadGraphWith.GraphData.MODERN)
<add> public void shouldHaveSameHashCode() {
<add> assertEquals(DetachedVertex.detach(g.v(convertToVertexId("marko"))).hashCode(), DetachedVertex.detach(g.v(convertToVertexId("marko"))).hashCode());
<add> }
<add>
<add> @Test
<add> @LoadGraphWith(LoadGraphWith.GraphData.MODERN)
<add> @FeatureRequirementSet(FeatureRequirementSet.Package.SIMPLE)
<add> @FeatureRequirement(featureClass = Graph.Features.EdgePropertyFeatures.class, feature = Graph.Features.EdgePropertyFeatures.FEATURE_DOUBLE_VALUES)
<ide> public void shouldNotEvaluateToEqualDifferentId() {
<del> //assertFalse(detachedVertex1.equals(this.detachedVertex));
<add> final Vertex v = g.addVertex("name", "marko", "age", 29);
<add> assertFalse(DetachedVertex.detach(v).equals(DetachedVertex.detach(g.v(convertToVertexId("marko")))));
<ide> }
<ide>
<ide> @Test
<ide>
<ide>
<ide> @Test(expected = UnsupportedOperationException.class)
<add> @FeatureRequirementSet(FeatureRequirementSet.Package.VERTICES_ONLY)
<ide> public void shouldNotAllowAddEdge() {
<ide> final Vertex v = g.addVertex();
<ide> final DetachedVertex detachedVertex = DetachedVertex.detach(v);
<ide> }
<ide>
<ide> @Test(expected = UnsupportedOperationException.class)
<add> @FeatureRequirementSet(FeatureRequirementSet.Package.VERTICES_ONLY)
<ide> public void shouldNotAllowSetProperty() {
<ide> final Vertex v = g.addVertex();
<ide> final DetachedVertex detachedVertex = DetachedVertex.detach(v);
<ide> }
<ide>
<ide> @Test(expected = UnsupportedOperationException.class)
<add> @FeatureRequirementSet(FeatureRequirementSet.Package.VERTICES_ONLY)
<ide> public void shouldNotAllowRemove() {
<ide> final Vertex v = g.addVertex();
<ide> final DetachedVertex detachedVertex = DetachedVertex.detach(v);
<ide> }
<ide>
<ide> @Test(expected = UnsupportedOperationException.class)
<add> @FeatureRequirementSet(FeatureRequirementSet.Package.VERTICES_ONLY)
<ide> public void shouldNotTraverse() {
<ide> final Vertex v = g.addVertex();
<ide> final DetachedVertex detachedVertex = DetachedVertex.detach(v);
<ide> detachedVertex.start();
<ide> }
<add>
<add> // todo: need the crew
<ide>
<ide> @Test(expected = IllegalStateException.class)
<ide> @org.junit.Ignore
<ide> }
<ide>
<ide> @Test
<del> @org.junit.Ignore
<add> @LoadGraphWith(LoadGraphWith.GraphData.MODERN)
<ide> public void shouldBeAbleToCallPropertyIfThereIsASingleProperty() {
<del> /*
<del> final Map<String,Object> properties = new HashMap<>();
<del> final IoVertexProperty propX1 = new IoVertexProperty();
<del> propX1.value = "a";
<del> propX1.id = 123;
<del> propX1.label = VertexProperty.DEFAULT_LABEL;
<del> properties.put("x", Arrays.asList(propX1));
<del>
<del> final Map<String,Object> hiddens = new HashMap<>();
<del> final DetachedVertex dv = new DetachedVertex(1, "test", properties, hiddens);
<del> assertEquals("a", dv.property("x").value());
<del> */
<add> final DetachedVertex dv = DetachedVertex.detach(g.v(convertToVertexId("marko")));
<add> assertEquals("marko", dv.property("name").value());
<add> assertEquals(29, dv.property("age").value());
<ide> }
<ide> } |
|
Java | bsd-3-clause | 6a5d66dd2c70c7a28a6c934384b3a8dd6cd77a7a | 0 | muloem/xins,muloem/xins,muloem/xins | /*
* $Id$
*/
package org.xins.server;
import java.util.List;
import org.xins.util.MandatoryArgumentChecker;
import org.xins.util.text.ParseException;
/**
* Access rule list.
*
* <h3>Descriptor format</h3>
*
* <p>An access rule list <em>descriptor</em>, a character string, can be
* converted to produce an {@link AccessRuleList} object. A valid descriptor
* consists of a list of access rule descriptors (see class
* {@link AccessRule}), separated by semi-colon characters (<code>';'</code>).
* Optionally, the rules can have any amount of whitespace (space-, tab-,
* newline- and carriage return-characters), before and after them. The last
* descriptor cannot end with a semi-colon.
*
* <h3>Descriptor examples</h3>
*
* <p>An example of an access rule list descriptor is:
*
* <blockquote><code>allow 194.134.168.213/32 *;
* <br>deny 194.134.168.213/24 _*;
* <br>allow 194.134.168.213/24 *;
* <br>deny 0.0.0.0/0 *</code></blockquote>
*
* <p>The above access control list grants access to the IP address
* 194.134.168.213 to access all functions. Then in the second rule it denies
* access to all IP addresses in the range 194.134.168.0 to 194.134.168.255 to
* all functions that start with an underscore (<code>'_'</code>). Then it
* allows access for those IP addresses to all other functions, and finally
* all other IP addresses are denied access to any of the functions.
*
* @version $Revision$ $Date$
* @author Ernst de Haan (<a href="mailto:[email protected]">[email protected]</a>)
*/
public final class AccessRuleList
extends Object {
//-------------------------------------------------------------------------
// Class fields
//-------------------------------------------------------------------------
//-------------------------------------------------------------------------
// Class functions
//-------------------------------------------------------------------------
/**
* Parses the specified character string to construct a new
* <code>AccessRuleList</code> object.
*
* @param descriptor
* the access rule list descriptor, the character string to parse,
* cannot be <code>null</code>.
*
* @return
* an {@link AccessRuleList} instance, never <code>null</code>.
*
* @throws IllegalArgumentException
* if <code>descriptor == null</code>.
*
* @throws ParseException
* if there was a parsing error.
*/
public static final AccessRuleList parseAccessRuleList(String descriptor)
throws IllegalArgumentException, ParseException {
// Check preconditions
MandatoryArgumentChecker.check("descriptor", descriptor);
return null; // TODO
}
//-------------------------------------------------------------------------
// Constructors
//-------------------------------------------------------------------------
/**
* Creates a new <code>AccessRuleList</code> object.
*
* @param rules
* the list of rules ({@link AccessRule} objects), cannot be
* <code>null</code>.
*
* @throws IllegalArgumentException
* if <code>rules == null</code>.
*/
public AccessRuleList(List rules)
throws IllegalArgumentException {
// Check preconditions
MandatoryArgumentChecker.check("rules", rules);
_rules = rules;
}
//-------------------------------------------------------------------------
// Fields
//-------------------------------------------------------------------------
/**
* The list of rules. Cannot be <code>null</code>.
*/
private List _rules;
//-------------------------------------------------------------------------
// Methods
//-------------------------------------------------------------------------
}
| src/java-server-framework/org/xins/server/AccessRuleList.java | /*
* $Id$
*/
package org.xins.server;
import java.util.List;
import org.xins.util.MandatoryArgumentChecker;
import org.xins.util.text.ParseException;
/**
* Access rule list. This class can take a character string to produce an
* {@link AccessRuleList} object from it.
*
* <h3>Examples</h3>
*
* <p>An example of an access rule list descriptor is:
*
* <blockquote><code>allow 194.134.168.213/32 *;
* <br>deny 194.134.168.213/24 _*;
* <br>allow 194.134.168.213/24 *;
* <br>deny 0.0.0.0/0 *</code></blockquote>
*
* <p>The above access control list grants access to the IP address
* 194.134.168.213 to access all functions. Then in the second rule it denies
* access to all IP addresses in the range 194.134.168.0 to 194.134.168.255 to
* all functions that start with an underscore (<code>'_'</code>). Then it
* allows access for those IP addresses to all other functions, and finally
* all other IP addresses are denied access to any of the functions.
*
* @version $Revision$ $Date$
* @author Ernst de Haan (<a href="mailto:[email protected]">[email protected]</a>)
*/
public final class AccessRuleList
extends Object {
//-------------------------------------------------------------------------
// Class fields
//-------------------------------------------------------------------------
//-------------------------------------------------------------------------
// Class functions
//-------------------------------------------------------------------------
/**
* Parses the specified character string to construct a new
* <code>AccessRuleList</code> object.
*
* @param descriptor
* the access rule list descriptor, the character string to parse,
* cannot be <code>null</code>.
*
* @return
* an {@link AccessRuleList} instance, never <code>null</code>.
*
* @throws IllegalArgumentException
* if <code>descriptor == null</code>.
*
* @throws ParseException
* if there was a parsing error.
*/
public static final AccessRuleList parseAccessRuleList(String descriptor)
throws IllegalArgumentException, ParseException {
// Check preconditions
MandatoryArgumentChecker.check("descriptor", descriptor);
return null; // TODO
}
//-------------------------------------------------------------------------
// Constructors
//-------------------------------------------------------------------------
/**
* Creates a new <code>AccessRuleList</code> object.
*
* @param rules
* the list of rules ({@link AccessRule} objects), cannot be
* <code>null</code>.
*
* @throws IllegalArgumentException
* if <code>rules == null</code>.
*/
public AccessRuleList(List rules)
throws IllegalArgumentException {
// Check preconditions
MandatoryArgumentChecker.check("rules", rules);
_rules = rules;
}
//-------------------------------------------------------------------------
// Fields
//-------------------------------------------------------------------------
/**
* The list of rules. Cannot be <code>null</code>.
*/
private List _rules;
//-------------------------------------------------------------------------
// Methods
//-------------------------------------------------------------------------
}
| Improved documentation.
| src/java-server-framework/org/xins/server/AccessRuleList.java | Improved documentation. | <ide><path>rc/java-server-framework/org/xins/server/AccessRuleList.java
<ide> import org.xins.util.text.ParseException;
<ide>
<ide> /**
<del> * Access rule list. This class can take a character string to produce an
<del> * {@link AccessRuleList} object from it.
<add> * Access rule list.
<ide> *
<del> * <h3>Examples</h3>
<add> * <h3>Descriptor format</h3>
<add> *
<add> * <p>An access rule list <em>descriptor</em>, a character string, can be
<add> * converted to produce an {@link AccessRuleList} object. A valid descriptor
<add> * consists of a list of access rule descriptors (see class
<add> * {@link AccessRule}), separated by semi-colon characters (<code>';'</code>).
<add> * Optionally, the rules can have any amount of whitespace (space-, tab-,
<add> * newline- and carriage return-characters), before and after them. The last
<add> * descriptor cannot end with a semi-colon.
<add> *
<add> * <h3>Descriptor examples</h3>
<ide> *
<ide> * <p>An example of an access rule list descriptor is:
<ide> * |
|
Java | apache-2.0 | f577dace72a2c671ff2c9c9928c6f3e585bbc9ac | 0 | magnetsystems/message-samples-android | package com.magnet.wru;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.content.Intent;
import android.location.Location;
import android.os.AsyncTask;
import android.os.Bundle;
import android.os.Handler;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.view.inputmethod.InputMethodManager;
import android.widget.ImageButton;
import android.widget.LinearLayout;
import android.widget.TextView;
import android.widget.Toast;
import com.google.android.gms.common.api.GoogleApiClient;
import com.google.android.gms.location.LocationServices;
import com.google.android.gms.maps.CameraUpdateFactory;
import com.google.android.gms.maps.GoogleMap;
import com.google.android.gms.maps.OnMapReadyCallback;
import com.google.android.gms.maps.SupportMapFragment;
import com.google.android.gms.maps.model.Circle;
import com.google.android.gms.maps.model.CircleOptions;
import com.google.android.gms.maps.model.LatLng;
import com.google.android.gms.maps.model.LatLngBounds;
import com.google.android.gms.maps.model.Marker;
import com.google.android.gms.maps.model.MarkerOptions;
import com.magnet.mmx.client.api.MMX;
import com.magnet.mmx.client.api.MMXMessage;
import com.magnet.mmx.client.api.MMXUser;
import java.text.DateFormat;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.Date;
import java.util.EventListener;
import java.util.HashMap;
import java.util.List;
public class MapActivity extends AppCompatActivity implements OnMapReadyCallback {
private static final String TAG = MapActivity.class.getSimpleName();
private WRU mWru = null;
private GoogleMap mMap = null;
private TextView mTextTop = null;
private SupportMapFragment mMapFragment = null;
private MessageListFragment mMessageListFragment = null;
private ImageButton mToggleMessageList = null;
private DateFormat mFormatter = DateFormat.getDateTimeInstance(DateFormat.SHORT, DateFormat.SHORT);
private MyMessageStore.OnChangeListener mMessageListListener = new MyMessageStore.OnChangeListener() {
public void onChange() {
if (!mMessageListOpen) {
runOnUiThread(new Runnable() {
public void run() {
mToggleMessageList.setBackgroundColor(getResources().getColor(android.R.color.holo_green_light));
}
});
}
}
};
private WRU.OnLocationReceivedListener mListener = new WRU.OnLocationReceivedListener() {
public void onLocationReceived(MMXUser user, WRU.LocationTime locationTime) {
updateLocationMarkers(locationTime);
}
};
private MMX.EventListener mEventListener = new MMX.EventListener() {
@Override
public boolean onMessageReceived(MMXMessage mmxMessage) {
return false;
}
public boolean onLoginRequired(MMX.LoginReason reason) {
updateLoginState();
return false;
}
};
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_map);
mWru = WRU.getInstance(this);
mWru.registerOnLocationReceivedListener(mListener);
mTextTop = (TextView) findViewById(R.id.text_top);
mTextTop.setText("group key: " + mWru.getJoinedTopicKey() + ", passphrase: " + mWru.getJoinedTopicPassphrase());
mToggleMessageList = (ImageButton) findViewById(R.id.toggle_message_list);
//setup map
mMapFragment = (SupportMapFragment) getSupportFragmentManager()
.findFragmentById(R.id.map);
mMapFragment.getMapAsync(this);
mMessageListFragment = (MessageListFragment) getSupportFragmentManager()
.findFragmentById(R.id.fragment_message_list);
MyMessageStore.registerListener(mMessageListListener);
MMX.registerListener(mEventListener);
Handler handler = new Handler();
handler.postDelayed(new Runnable() {
public void run() {
updateLoginState();
}
}, 3000);
}
protected void onResume() {
updateLocationMarkers(null);
MyMessageStore.sSuppressNotification = true;
super.onResume();
}
protected void onPause() {
MyMessageStore.sSuppressNotification = false;
super.onPause();
}
protected void onDestroy() {
mWru.unregisterOnLocationReceivedListener(mListener);
MyMessageStore.unregisterListener(mMessageListListener);
MMX.unregisterListener(mEventListener);
super.onDestroy();
}
@Override
public void onMapReady(GoogleMap map) {
mMap = map;
mMap.setMyLocationEnabled(true);
mMap.setTrafficEnabled(true);
AsyncTask.execute(new Runnable() {
public void run() {
GoogleApiClient googleApiClient = mWru.waitForGoogleApi();
Log.d(TAG, "onMapReady(): requesting last location");
Location result = LocationServices.FusedLocationApi
.getLastLocation(googleApiClient);
if (result != null) {
final LatLng myloc = new LatLng(result.getLatitude(),
result.getLongitude());
runOnUiThread(new Runnable() {
public void run() {
mMap.animateCamera(CameraUpdateFactory.newLatLngZoom(myloc, 16));
}
});
}
}
});
updateLocationMarkers(null);
}
private HashMap<String, Marker> mMarkerMap = new HashMap<>();
private HashMap<String, Circle> mCircleMap = new HashMap<>();
private void updateLocationMarkers(final WRU.LocationTime locationTime) {
if (locationTime == null) {
for (WRU.LocationTime entry : mWru.getLocationTimes().values()) {
updateMarker(entry);
}
} else {
updateMarker(locationTime);
}
}
private void updateMarker(final WRU.LocationTime locationTime) {
runOnUiThread(new Runnable() {
public void run() {
if (mMap == null) {
Log.w(TAG, "updateMarker(): map is not ready yet, ignoring");
return;
}
if (!locationTime.username.equalsIgnoreCase(mWru.getUsername())) {
Marker marker = mMarkerMap.get(locationTime.username);
Circle circle = mCircleMap.get(locationTime.username);
LatLng loc = new LatLng(locationTime.location.getLat(), locationTime.location.getLng());
StringBuilder snippet = new StringBuilder()
.append("+/-").append(locationTime.location.getAccuracy()).append("m, fix: ")
.append(mFormatter.format(new Date(locationTime.locationTimestamp)))
.append(", rcv: ")
.append(mFormatter.format(new Date(locationTime.timestamp)));
if (marker == null) {
String label = locationTime.username;
marker = mMap.addMarker(new MarkerOptions().position(loc).title(label).snippet(snippet.toString()));
mMarkerMap.put(locationTime.username, marker);
//draw the circle
mCircleMap.put(locationTime.username, mMap.addCircle(
new CircleOptions()
.center(marker.getPosition())
.radius(locationTime.location.getAccuracy())
.strokeColor(0xffff0000)
.strokeWidth(1.0f)
.fillColor(0x44ff0000)));
} else {
marker.setSnippet(snippet.toString());
marker.setPosition(loc);
if (marker.isInfoWindowShown()) {
//refresh the marker info window
marker.hideInfoWindow();
marker.showInfoWindow();
}
circle.setCenter(marker.getPosition());
circle.setRadius(locationTime.location.getAccuracy());
}
marker.setAlpha(getTimeBasedAlpha(locationTime.locationTimestamp));
}
}
});
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.menu_map, menu);
MenuItem item = menu.findItem(R.id.action_set_enabled);
EventLog eventLog = EventLog.getInstance(this);
if (eventLog.isEnabled()) {
item.setTitle(R.string.action_disable);
} else {
item.setTitle(R.string.action_enable);
}
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
//noinspection SimplifiableIfStatement
if (id == R.id.action_settings) {
AlertDialog dialog = new AlertDialog.Builder(this)
.setTitle(R.string.action_settings)
.setMessage("YOU ARE: " + mWru.getUsername() + "\nGROUP KEY: " + mWru.getJoinedTopicKey() +
"\nPASSPHRASE: " + mWru.getJoinedTopicPassphrase())
.show();
return true;
}
if (id == R.id.action_request_updates) {
mWru.requestLocationUpdates();
return true;
}
if (id == R.id.action_leave) {
mWru.leaveTopic();
Intent intent = new Intent(this, MainActivity.class);
intent.addFlags(Intent.FLAG_ACTIVITY_SINGLE_TOP);
startActivity(intent);
this.finish();
return true;
}
if (id == R.id.action_find) {
final String[] users = new String[mMarkerMap.size()];
mMarkerMap.keySet().toArray(users);
ArrayList<String> tmpList = new ArrayList<>();
Collections.addAll(tmpList, users);
Collections.sort(tmpList, new SortIgnoreCase());
tmpList.toArray(users);
final AlertDialog dialog = new AlertDialog.Builder(this)
.setTitle(R.string.action_find)
.setItems(users, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
String user = users[which];
Marker marker = mMarkerMap.get(user);
marker.showInfoWindow();
LatLngBounds.Builder builder = new LatLngBounds.Builder()
.include(marker.getPosition());
Location myLoc = mMap.getMyLocation();
if (myLoc != null) {
builder.include(new LatLng(mMap.getMyLocation().getLatitude(),
mMap.getMyLocation().getLongitude()));
}
mMap.animateCamera(CameraUpdateFactory.newLatLngBounds(builder.build(), 40));
dialog.dismiss();
}
})
.show();
}
if (id == R.id.action_clear_log) {
EventLog.getInstance(this).clear();
}
if (id == R.id.action_view_log) {
List<EventLog.Event> events = EventLog.getInstance(this).listEvents(null, 50);
final String[] eventStrs = new String[events.size()];
for (int i=0; i<events.size(); i++) {
EventLog.Event event = events.get(i);
eventStrs[i] = mFormatter.format(event.timestamp) + " " + event.type + ": " + event.text;
}
final AlertDialog dialog = new AlertDialog.Builder(this)
.setTitle(R.string.action_view_log)
.setItems(eventStrs, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
AlertDialog messageDialog = new AlertDialog.Builder(MapActivity.this)
.setMessage(eventStrs[which])
.show();
}
})
.show();
}
if (id == R.id.action_set_enabled) {
EventLog eventLog = EventLog.getInstance(this);
eventLog.setEnabled(!eventLog.isEnabled());
item.setTitle(eventLog.isEnabled() ? R.string.action_disable : R.string.action_enable);
Toast.makeText(this, "Event log is " + (eventLog.isEnabled() ? "enabled" : "disabled"), Toast.LENGTH_SHORT).show();
}
return super.onOptionsItemSelected(item);
}
private static final float MARKER_MIN_ALPHA = 0.2f;
private static final float MILLIS_IN_HOUR = 60 * 60 * 1000f;
private float getTimeBasedAlpha(long locationTime) {
// we will use a 1 hr window for opacity meaning at 1hr age,
// the marker should be transparent (or at least min alpha)
long age = System.currentTimeMillis() - locationTime;
float calculatedFloat = (MILLIS_IN_HOUR - age) / MILLIS_IN_HOUR;
Log.d(TAG, "getTimeBasedAlpha(): calculated float = " + String.valueOf(calculatedFloat));
return calculatedFloat < MARKER_MIN_ALPHA ? MARKER_MIN_ALPHA : calculatedFloat;
}
public class SortIgnoreCase implements Comparator<Object> {
public int compare(Object o1, Object o2) {
String s1 = (String) o1;
String s2 = (String) o2;
return s1.toLowerCase().compareTo(s2.toLowerCase());
}
}
public void doFragmentClick(View view) {
mMessageListFragment.doFragmentClick(view);
}
private boolean mMessageListOpen = false;
public synchronized void toggleMessageList(View view) {
float newWeight;
if (mMessageListOpen) {
//hide soft keyboard
InputMethodManager imm = (InputMethodManager)getSystemService(INPUT_METHOD_SERVICE);
imm.hideSoftInputFromWindow(view.getWindowToken(), 0);
newWeight = 0;
} else {
mToggleMessageList.setBackgroundColor(getResources().getColor(android.R.color.darker_gray));
newWeight = 9;
}
mMessageListFragment.getView()
.setLayoutParams(new LinearLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, 0, newWeight));
mMessageListOpen = !mMessageListOpen;
}
private void updateLoginState() {
runOnUiThread(new Runnable() {
public void run() {
if (MMX.getCurrentUser() != null) {
//logged in
mToggleMessageList.setBackgroundColor(getResources().getColor(android.R.color.holo_green_light));
} else {
//not logged in
mToggleMessageList.setBackgroundColor(getResources().getColor(android.R.color.holo_red_dark));
}
}
});
}
}
| WRU/app/src/main/java/com/magnet/wru/MapActivity.java | package com.magnet.wru;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.content.Intent;
import android.location.Location;
import android.os.AsyncTask;
import android.os.Bundle;
import android.os.Handler;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.view.inputmethod.InputMethodManager;
import android.widget.ImageButton;
import android.widget.LinearLayout;
import android.widget.TextView;
import android.widget.Toast;
import com.google.android.gms.common.api.GoogleApiClient;
import com.google.android.gms.location.LocationServices;
import com.google.android.gms.maps.CameraUpdateFactory;
import com.google.android.gms.maps.GoogleMap;
import com.google.android.gms.maps.OnMapReadyCallback;
import com.google.android.gms.maps.SupportMapFragment;
import com.google.android.gms.maps.model.Circle;
import com.google.android.gms.maps.model.CircleOptions;
import com.google.android.gms.maps.model.LatLng;
import com.google.android.gms.maps.model.LatLngBounds;
import com.google.android.gms.maps.model.Marker;
import com.google.android.gms.maps.model.MarkerOptions;
import com.magnet.mmx.client.api.MMXUser;
import java.text.DateFormat;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
public class MapActivity extends AppCompatActivity implements OnMapReadyCallback {
private static final String TAG = MapActivity.class.getSimpleName();
private WRU mWru = null;
private GoogleMap mMap = null;
private TextView mTextTop = null;
private SupportMapFragment mMapFragment = null;
private MessageListFragment mMessageListFragment = null;
private ImageButton mToggleMessageList = null;
private DateFormat mFormatter = DateFormat.getDateTimeInstance(DateFormat.SHORT, DateFormat.SHORT);
private MyMessageStore.OnChangeListener mMessageListListener = new MyMessageStore.OnChangeListener() {
public void onChange() {
if (!mMessageListOpen) {
runOnUiThread(new Runnable() {
public void run() {
mToggleMessageList.setBackgroundColor(getResources().getColor(android.R.color.holo_green_light));
}
});
}
}
};
private WRU.OnLocationReceivedListener mListener = new WRU.OnLocationReceivedListener() {
public void onLocationReceived(MMXUser user, WRU.LocationTime locationTime) {
updateLocationMarkers(locationTime);
}
};
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_map);
mWru = WRU.getInstance(this);
mWru.registerOnLocationReceivedListener(mListener);
mTextTop = (TextView) findViewById(R.id.text_top);
mTextTop.setText("group key: " + mWru.getJoinedTopicKey() + ", passphrase: " + mWru.getJoinedTopicPassphrase());
mToggleMessageList = (ImageButton) findViewById(R.id.toggle_message_list);
//setup map
mMapFragment = (SupportMapFragment) getSupportFragmentManager()
.findFragmentById(R.id.map);
mMapFragment.getMapAsync(this);
mMessageListFragment = (MessageListFragment) getSupportFragmentManager()
.findFragmentById(R.id.fragment_message_list);
MyMessageStore.registerListener(mMessageListListener);
}
protected void onResume() {
updateLocationMarkers(null);
MyMessageStore.sSuppressNotification = true;
super.onResume();
}
protected void onPause() {
MyMessageStore.sSuppressNotification = false;
super.onPause();
}
protected void onDestroy() {
mWru.unregisterOnLocationReceivedListener(mListener);
MyMessageStore.unregisterListener(mMessageListListener);
super.onDestroy();
}
@Override
public void onMapReady(GoogleMap map) {
mMap = map;
mMap.setMyLocationEnabled(true);
mMap.setTrafficEnabled(true);
AsyncTask.execute(new Runnable() {
public void run() {
GoogleApiClient googleApiClient = mWru.waitForGoogleApi();
Log.d(TAG, "onMapReady(): requesting last location");
Location result = LocationServices.FusedLocationApi
.getLastLocation(googleApiClient);
if (result != null) {
final LatLng myloc = new LatLng(result.getLatitude(),
result.getLongitude());
runOnUiThread(new Runnable() {
public void run() {
mMap.animateCamera(CameraUpdateFactory.newLatLngZoom(myloc, 16));
}
});
}
}
});
updateLocationMarkers(null);
}
private HashMap<String, Marker> mMarkerMap = new HashMap<>();
private HashMap<String, Circle> mCircleMap = new HashMap<>();
private void updateLocationMarkers(final WRU.LocationTime locationTime) {
if (locationTime == null) {
for (WRU.LocationTime entry : mWru.getLocationTimes().values()) {
updateMarker(entry);
}
} else {
updateMarker(locationTime);
}
}
private void updateMarker(final WRU.LocationTime locationTime) {
runOnUiThread(new Runnable() {
public void run() {
if (mMap == null) {
Log.w(TAG, "updateMarker(): map is not ready yet, ignoring");
return;
}
if (!locationTime.username.equalsIgnoreCase(mWru.getUsername())) {
Marker marker = mMarkerMap.get(locationTime.username);
Circle circle = mCircleMap.get(locationTime.username);
LatLng loc = new LatLng(locationTime.location.getLat(), locationTime.location.getLng());
StringBuilder snippet = new StringBuilder()
.append("+/-").append(locationTime.location.getAccuracy()).append("m, fix: ")
.append(mFormatter.format(new Date(locationTime.locationTimestamp)))
.append(", rcv: ")
.append(mFormatter.format(new Date(locationTime.timestamp)));
if (marker == null) {
String label = locationTime.username;
marker = mMap.addMarker(new MarkerOptions().position(loc).title(label).snippet(snippet.toString()));
mMarkerMap.put(locationTime.username, marker);
//draw the circle
mCircleMap.put(locationTime.username, mMap.addCircle(
new CircleOptions()
.center(marker.getPosition())
.radius(locationTime.location.getAccuracy())
.strokeColor(0xffff0000)
.strokeWidth(1.0f)
.fillColor(0x44ff0000)));
} else {
marker.setSnippet(snippet.toString());
marker.setPosition(loc);
if (marker.isInfoWindowShown()) {
//refresh the marker info window
marker.hideInfoWindow();
marker.showInfoWindow();
}
circle.setCenter(marker.getPosition());
circle.setRadius(locationTime.location.getAccuracy());
}
marker.setAlpha(getTimeBasedAlpha(locationTime.locationTimestamp));
}
}
});
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.menu_map, menu);
MenuItem item = menu.findItem(R.id.action_set_enabled);
EventLog eventLog = EventLog.getInstance(this);
if (eventLog.isEnabled()) {
item.setTitle(R.string.action_disable);
} else {
item.setTitle(R.string.action_enable);
}
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
//noinspection SimplifiableIfStatement
if (id == R.id.action_settings) {
AlertDialog dialog = new AlertDialog.Builder(this)
.setTitle(R.string.action_settings)
.setMessage("YOU ARE: " + mWru.getUsername() + "\nGROUP KEY: " + mWru.getJoinedTopicKey() +
"\nPASSPHRASE: " + mWru.getJoinedTopicPassphrase())
.show();
return true;
}
if (id == R.id.action_request_updates) {
mWru.requestLocationUpdates();
return true;
}
if (id == R.id.action_leave) {
mWru.leaveTopic();
Intent intent = new Intent(this, MainActivity.class);
intent.addFlags(Intent.FLAG_ACTIVITY_SINGLE_TOP);
startActivity(intent);
this.finish();
return true;
}
if (id == R.id.action_find) {
final String[] users = new String[mMarkerMap.size()];
mMarkerMap.keySet().toArray(users);
ArrayList<String> tmpList = new ArrayList<>();
Collections.addAll(tmpList, users);
Collections.sort(tmpList, new SortIgnoreCase());
tmpList.toArray(users);
final AlertDialog dialog = new AlertDialog.Builder(this)
.setTitle(R.string.action_find)
.setItems(users, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
String user = users[which];
Marker marker = mMarkerMap.get(user);
marker.showInfoWindow();
LatLngBounds.Builder builder = new LatLngBounds.Builder()
.include(marker.getPosition());
Location myLoc = mMap.getMyLocation();
if (myLoc != null) {
builder.include(new LatLng(mMap.getMyLocation().getLatitude(),
mMap.getMyLocation().getLongitude()));
}
mMap.animateCamera(CameraUpdateFactory.newLatLngBounds(builder.build(), 40));
dialog.dismiss();
}
})
.show();
}
if (id == R.id.action_clear_log) {
EventLog.getInstance(this).clear();
}
if (id == R.id.action_view_log) {
List<EventLog.Event> events = EventLog.getInstance(this).listEvents(null, 50);
final String[] eventStrs = new String[events.size()];
for (int i=0; i<events.size(); i++) {
EventLog.Event event = events.get(i);
eventStrs[i] = mFormatter.format(event.timestamp) + " " + event.type + ": " + event.text;
}
final AlertDialog dialog = new AlertDialog.Builder(this)
.setTitle(R.string.action_view_log)
.setItems(eventStrs, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
AlertDialog messageDialog = new AlertDialog.Builder(MapActivity.this)
.setMessage(eventStrs[which])
.show();
}
})
.show();
}
if (id == R.id.action_set_enabled) {
EventLog eventLog = EventLog.getInstance(this);
eventLog.setEnabled(!eventLog.isEnabled());
item.setTitle(eventLog.isEnabled() ? R.string.action_disable : R.string.action_enable);
Toast.makeText(this, "Event log is " + (eventLog.isEnabled() ? "enabled" : "disabled"), Toast.LENGTH_SHORT).show();
}
return super.onOptionsItemSelected(item);
}
private static final float MARKER_MIN_ALPHA = 0.2f;
private static final float MILLIS_IN_HOUR = 60 * 60 * 1000f;
private float getTimeBasedAlpha(long locationTime) {
// we will use a 1 hr window for opacity meaning at 1hr age,
// the marker should be transparent (or at least min alpha)
long age = System.currentTimeMillis() - locationTime;
float calculatedFloat = (MILLIS_IN_HOUR - age) / MILLIS_IN_HOUR;
Log.d(TAG, "getTimeBasedAlpha(): calculated float = " + String.valueOf(calculatedFloat));
return calculatedFloat < MARKER_MIN_ALPHA ? MARKER_MIN_ALPHA : calculatedFloat;
}
public class SortIgnoreCase implements Comparator<Object> {
public int compare(Object o1, Object o2) {
String s1 = (String) o1;
String s2 = (String) o2;
return s1.toLowerCase().compareTo(s2.toLowerCase());
}
}
public void doFragmentClick(View view) {
mMessageListFragment.doFragmentClick(view);
}
private boolean mMessageListOpen = false;
public synchronized void toggleMessageList(View view) {
float newWeight;
if (mMessageListOpen) {
//hide soft keyboard
InputMethodManager imm = (InputMethodManager)getSystemService(INPUT_METHOD_SERVICE);
imm.hideSoftInputFromWindow(view.getWindowToken(), 0);
newWeight = 0;
} else {
mToggleMessageList.setBackgroundColor(getResources().getColor(android.R.color.darker_gray));
newWeight = 9;
}
mMessageListFragment.getView()
.setLayoutParams(new LinearLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, 0, newWeight));
mMessageListOpen = !mMessageListOpen;
}
}
| Add connection status colors.
| WRU/app/src/main/java/com/magnet/wru/MapActivity.java | Add connection status colors. | <ide><path>RU/app/src/main/java/com/magnet/wru/MapActivity.java
<ide> import com.google.android.gms.maps.model.LatLngBounds;
<ide> import com.google.android.gms.maps.model.Marker;
<ide> import com.google.android.gms.maps.model.MarkerOptions;
<add>import com.magnet.mmx.client.api.MMX;
<add>import com.magnet.mmx.client.api.MMXMessage;
<ide> import com.magnet.mmx.client.api.MMXUser;
<ide>
<ide> import java.text.DateFormat;
<ide> import java.util.Collections;
<ide> import java.util.Comparator;
<ide> import java.util.Date;
<add>import java.util.EventListener;
<ide> import java.util.HashMap;
<ide> import java.util.List;
<ide>
<ide> }
<ide> };
<ide>
<add> private MMX.EventListener mEventListener = new MMX.EventListener() {
<add> @Override
<add> public boolean onMessageReceived(MMXMessage mmxMessage) {
<add> return false;
<add> }
<add>
<add> public boolean onLoginRequired(MMX.LoginReason reason) {
<add> updateLoginState();
<add> return false;
<add> }
<add> };
<add>
<ide> @Override
<ide> protected void onCreate(Bundle savedInstanceState) {
<ide> super.onCreate(savedInstanceState);
<ide> .findFragmentById(R.id.fragment_message_list);
<ide>
<ide> MyMessageStore.registerListener(mMessageListListener);
<add> MMX.registerListener(mEventListener);
<add> Handler handler = new Handler();
<add> handler.postDelayed(new Runnable() {
<add> public void run() {
<add> updateLoginState();
<add> }
<add> }, 3000);
<ide> }
<ide>
<ide> protected void onResume() {
<ide> protected void onDestroy() {
<ide> mWru.unregisterOnLocationReceivedListener(mListener);
<ide> MyMessageStore.unregisterListener(mMessageListListener);
<add> MMX.unregisterListener(mEventListener);
<ide> super.onDestroy();
<ide> }
<ide>
<ide> .setLayoutParams(new LinearLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, 0, newWeight));
<ide> mMessageListOpen = !mMessageListOpen;
<ide> }
<add>
<add> private void updateLoginState() {
<add> runOnUiThread(new Runnable() {
<add> public void run() {
<add> if (MMX.getCurrentUser() != null) {
<add> //logged in
<add> mToggleMessageList.setBackgroundColor(getResources().getColor(android.R.color.holo_green_light));
<add> } else {
<add> //not logged in
<add> mToggleMessageList.setBackgroundColor(getResources().getColor(android.R.color.holo_red_dark));
<add> }
<add> }
<add> });
<add> }
<ide> } |
|
JavaScript | agpl-3.0 | 51fdd4c46266fef927251a11c6704904715aecbe | 0 | tlevine/rhizi,ozFri/rhizi,cben/rhizi,ozFri/rhizi,cben/rhizi,Rhizi/rhizi,yuvadm/rhizi,tlevine/rhizi,Rhizi/rhizi,ozFri/rhizi,shlomif/rhizi,cben/rhizi,tlevine/rhizi,shlomif/rhizi,Rhizi/rhizi,yuvadm/rhizi,cben/rhizi,yuvadm/rhizi,shlomif/rhizi,yuvadm/rhizi,tlevine/rhizi,ozFri/rhizi,Rhizi/rhizi,shlomif/rhizi | "use strict"
define(['underscore', 'Bacon', 'consts', 'util', 'model/core', 'model/util', 'model/diff', 'rz_api_backend', 'rz_api_mesh', 'history', 'model/types'],
function (_, Bacon, consts, util, model_core, model_util, model_diff, rz_api_backend, rz_api_mesh, history, model_types) {
// aliases
var all_attributes = model_types.all_attributes;
var debug = false;
function Graph(spec) {
var id_to_node_map,
id_to_link_map,
id_to_link_id_set,
diffBus = new Bacon.Bus(),
activityBus = new Bacon.Bus(),
cached_links,
invalidate_links = true,
cached_nodes,
invalidate_nodes = true,
temporary = spec.temporary,
base = spec.base,
server_pending_objects = [],
filtered_types = {}; // set of node types not to show
this.temporary = temporary;
this.base = base;
util.assert(temporary !== undefined && base !== undefined, "missing inputs");
clear();
// All operations done on the graph. When the server is used (i.e. always) this
// bus contains the server events, not the user events (most of the time the same just with delay).
this.diffBus = diffBus;
// Same as diffBus, but includes operations done in the past, loaded when
// the document is loaded (commit log)
this.activityBus = activityBus;
var links_forEach = function (f) {
for (var link_key in id_to_link_map) {
f(id_to_link_map[link_key]);
}
}
var nodes_forEach = function (f) {
for (var node_key in id_to_node_map) {
f(id_to_node_map[node_key]);
}
}
function key_count(obj) {
return _.keys(obj).length;
}
var degree = function (node) {
return key_count(id_to_link_id_set[node.id]);
}
this.degree = degree;
/**
* add node if no previous node is present whose id equals that of the node being added
*
* @return node if node was actually added
*/
// FIXME: is this a good idea? i.e. changing API based on constructor? just in dynamic city
if (temporary) {
this.addTempNode = function(spec) {
return this.__addNode(spec);
}
}
var nodes_to_touched_links = function (node_id_set) {
var touched_links = [];
node_id_set.forEach(function (n_id) {
var n = id_to_node_map[n_id];
links_forEach(function (link) {
if ((link['__src'].equals(n)) || (link['__dst'].equals(n))) { // compare by id
touched_links.push(link);
}
});
});
console.dir(touched_links);
return touched_links.map(function(l){ return l.id; });
}
/**
*
* @param a topo_diff but that might be missing a few things, sanitize it first.
* all sanitation should be idempotent, but probably isn't.
*
* NOTE: currently this function transmits only. Later we want to optimistically
* first commit and then transmit.
*/
var commit_and_tx_diff__topo = function (topo_diff) {
util.assert(temporary === false, "cannot be temporary");
$.merge(topo_diff.link_id_set_rm, nodes_to_touched_links(topo_diff.node_id_set_rm));
topo_diff.node_set_add = topo_diff.node_set_add
.filter(function(n) {
util.assert(undefined !== n.id, "undefined id in node in topo diff");
util.assert(undefined === server_pending_objects[n.id], "cache full at id");
return find_node__by_id(n.id) === null;
})
.map(function(n_spec) {
server_pending_objects[n_spec.id] = n_spec;
return model_util.adapt_format_write_node(n_spec);
});
topo_diff.link_set_add = topo_diff.link_set_add.map(function(l_spec) {
util.assert(l_spec.id !== undefined, "undefined id in link in topo diff");
if (l_spec.source === undefined) {
l_spec.source = l_spec.__src;
}
if (l_spec.target === undefined) {
l_spec.target = l_spec.__dst;
}
if (l_spec.name == undefined) {
// cannot have a zero length name, using name as label in neo4j
l_spec.name = 'is';
}
if (l_spec.__type === undefined) {
l_spec.__type = l_spec.name;
}
server_pending_objects[l_spec.id] = l_spec;
return model_util.adapt_format_write_link(l_spec);
});
// filter already existing nodes now, after we conveniently used them
// for name_to_node map
topo_diff.node_set_add = topo_diff.node_set_add.filter(function(n) {
return !hasNodeByName(n.name);
});
rz_api_backend.commit_diff__topo(topo_diff, __commit_diff_ajax__topo);
}
this.commit_and_tx_diff__topo = commit_and_tx_diff__topo;
/**
* Inner implementation
*
* @param notify whether or not a presenter notification will be sent, default = true
*/
function __addNode(spec) {
var existing_node,
node;
if (undefined == spec.id) {
existing_node = find_node__by_name(spec.name)
if (existing_node){
return existing_node;
} else {
node = model_core.create_node__set_random_id(spec);
if (debug) {
console.log('__addNode: stamping node id: ' + node.id + ', name: \'' + node.name + '\'');
}
}
} else {
node = model_core.create_node_from_spec(spec);
}
existing_node = find_node__by_id(node.id);
if (existing_node) {
console.warn('__addNode: id collision: existing-node.id: \'' + existing_node.id);
return existing_node;
}
util.assert(undefined != node.id, '__addNode: node id missing');
_node_add_helper(node);
if (debug) {
console.log('__addNode: node added: id: ' + node.id + ' state ' + node.state);
}
return node;
}
this.__addNode = __addNode;
var _node_remove_helper = function (node_id) {
util.assert(node_id, "missing node id");
delete id_to_node_map[node_id];
delete id_to_link_id_set[node_id];
invalidate_nodes = true;
}
var _node_add_helper = function (node) {
util.assert(node.id, "missing node id");
id_to_node_map[node.id] = node;
id_to_link_id_set[node.id] = [];
invalidate_nodes = true;
}
var _link_remove_helper = function (link_id) {
var link = id_to_link_map[link_id],
src_id = link.__src.id,
dst_id = link.__dst.id;
util.assert(link_id, "missing link id");
util.assert(link, "non existent link");
delete id_to_link_id_set[src_id][dst_id];
delete id_to_link_id_set[dst_id][src_id];
delete id_to_link_map[link_id];
util.assert(id_to_link_map[link_id] === undefined, "delete failed?!");
invalidate_links = true;
}
var _link_add_helper = function (link) {
var src_id = link.__src.id,
dst_id = link.__dst.id;
util.assert(link.id, "missing link id");
id_to_link_map[link.id] = link;
// link's nodes may not belong to this graph, check first - we add them if required to the id_to_link_id_set only
if (id_to_link_id_set[src_id] === undefined) {
id_to_link_id_set[src_id] = [];
}
if (id_to_link_id_set[dst_id] === undefined) {
id_to_link_id_set[dst_id] = [];
}
id_to_link_id_set[src_id][dst_id] = 1;
id_to_link_id_set[dst_id][src_id] = 1;
invalidate_links = true;
}
var _remove_node_set = function(node_id_set) {
node_id_set.forEach(function (id) {
if (undefined === id_to_node_map[id]) {
console.log("warning: server returned an id we don't have " + id);
return;
}
_node_remove_helper(id);
console.log('_remove_node_set: ' + id);
});
}
this._remove_node_set = _remove_node_set;
function calc_neighbours() {
return _.reduce(get_links(), function(d, link) {
d[link.__src.id].src.push(link);
d[link.__dst.id].dst.push(link);
return d;
}, _.object(_.map(get_nodes(), "id"),
get_nodes().map(function (n) {
return {node: n, src: [], dst: []};
})
));
}
/**
* Visitation constants for neighbourhood and shortest paths computation.
*/
var kind_exit = 1,
kind_enter = 2,
kind_selected = 4;
function kind_to_string(kind) {
switch (kind) {
case kind_exit: return 'exit';
case kind_enter: return 'enter';
case kind_selected: return 'selected';
default:
// TODO: add css for both
return 'exit';
}
}
function BFS(node_id) {
var neighbours = calc_neighbours(),
queue = [node_id],
start_id,
node_ids = get_node_ids(),
V = _.object(node_ids, _.map(node_ids, function (id) {
return {node_id: id, distance: Infinity, prev: {}};
})),
ret = {};
V[node_id].distance = 0;
while ((start_id = queue.shift()) !== undefined) {
var src_ids = _.pluck(_.pluck(neighbours[start_id].src, "__dst"), "id"),
dst_ids = _.pluck(_.pluck(neighbours[start_id].dst, "__src"), "id"),
n_ids = src_ids.concat(dst_ids);
_.each(n_ids, function(next_id) {
var distance = V[start_id].distance + 1;
if (V[next_id].distance >= distance) {
V[next_id].distance = distance;
V[next_id].prev[start_id] = true;
queue.push(next_id);
}
});
}
_.each(_.keys(V), function (k) {
if (V[k].distance !== Infinity) {
ret[k] = V[k];
}
});
return ret;
}
this.BFS = BFS;
/**
* pairs_symmetric
*
* cb will be called for every pair in the input list but only in the order
* lower_index, maybe_higher_index
* where lower_index <= maybe_higher_index (i.e. diagonal is covered).
*
* i.e. for |list| = N, (N + 1) * N / 2 calls are made
*/
function pairs_symmetric(list, cb) {
var i, j, N = list.length;
for (i = 0 ; i < N; ++i) {
for (j = i; j < N ; ++j) {
cb(list[i], list[j]);
}
}
}
/**
* @sources - list of nodes
*
* returns all nodes in the shortest paths between all sources.
*
* returns same dictionary as neighbourhood.
*/
function shortest_paths(sources) {
function make_status(node, distance, prev_nodes) {
return {node: node, distances: distance || 0, prev_nodes: prev_nodes || {}};
}
var ids = _.pluck(sources, 'id'),
bfs = _.object(ids, _.map(ids, BFS)),
nodes = {};
function append_paths(bfs, start_id) {
var queue = [bfs[start_id]],
next,
next_id;
while ((next = queue.shift()) !== undefined) {
next_id = next.node_id;
if (nodes[next_id] === undefined) {
nodes[next_id] = {node_id: next_id, sources: {}};
}
_.each(_.keys(next.prev), function (p) {
nodes[next_id].sources[p] = true;
queue.push(bfs[p]);
});
}
}
pairs_symmetric(ids, function (one, two) {
if (bfs[one][two].distance === Infinity) {
return;
}
append_paths(bfs[one], two);
});
return {
'nodes': _.values(nodes),
'links': []
};
}
this.shortest_paths = shortest_paths;
/**
*
* neighbourhood
*
* @start - list of starting nodes
* @d - radius of neighbours
*
* NOTE: Doesn't handle inter graph links
* NOTE: return doesn't include original nodes
*
* @return - {
* 'nodes': [{
* node: node,
* kind: kind,
* sources: {node_id: true}
* }]
* 'links: [{link: link, kind: kind}]
* }
*
* kind: exit/enter
*
* TODO: implement for d !== 1
*
*/
this.neighbourhood = function(start, d) {
var ret = {'nodes':[], 'links':[]};
function addNode(node) {
if (start.filter(function (n) { return n.id == node.id; }).length == 1) {
return;
}
ret.nodes.push(node);
}
function get_name(node) {
// XXX: using lowercase name comparison instead of id because nodes may be stale
return node.name.toLowerCase();
}
if (start === undefined) {
console.log('neighbourhood: bug: called with undefined node');
return;
}
if (d > 1) {
console.log('neighbourhood: bug: not implemented for d == ' + d);
}
if (d === 0) {
// 0 depth is empty group of nodes and links
return ret;
}
d = d || 1;
if (start.length === undefined) {
console.log('neighbourhood: expected array');
return ret;
}
function make_status(kind, node) {
return {node: node, kind: kind, links: [], depth: Infinity, sources: {}};
}
var nodes = get_nodes(),
links = get_links(),
neighbours = calc_neighbours(),
visited = _.object(_.map(start, get_name),
_.map(start, _.partial(make_status, kind_selected)));
function visit(source, link, getter, kind, depth) {
var node = getter(link),
name = get_name(node),
data = visited[name];
if (data === undefined) {
data = visited[name] = make_status(0, node);
}
data.kind |= kind;
data.links.push({link: link, kind: kind});
data.depth = Math.min(data.depth, depth);
data.sources[source.id] = true;
return data;
}
_.each(start, function (node) {
var N = neighbours[node.id];
_.each(N.src, function (link) {
visit(node, link, function (link) { return link.__dst; }, kind_enter);
});
_.each(N.dst, function (link) {
visit(node, link, function (link) { return link.__src; }, kind_exit);
});
});
_.values(visited).forEach(function (data) {
var node = data.node,
kind = data.kind,
links = data.links;
if ((kind & kind_selected) === kind_selected) {
return;
}
ret.nodes.push({type: kind_to_string(kind), node: node, sources: data.sources});
_.each(links, function (data) {
ret.links.push({link: data.link, kind: kind_to_string(kind)});
});
});
return ret;
}
/* compareSubset:
* state: one of the optional states that defines a subgraph
* new_nodes: array of objects with name
* new_links: array of length two arrays [source_name, target_name]
* returns: true if current and new graph are homomorphic up to
* a single node id change. false otherwise
*/
this.compareSubset = function(state, new_nodes, new_links) {
var state_nodes = find_nodes__by_state(state);
var state_links = find_links__by_state(state).map(function(link) {
return [link.__src.name, link.__dst.name];
}).sort();
var k;
var state_source, state_target, new_source, new_target;
var changed_nodes;
var verbose = false; // XXX should be global.
var set_old_name, set_new_name;
new_nodes.map(function (f) {
if (!f.name) {
console.log('missing name on node. node follows');
console.log(f);
}
});
new_nodes.sort();
new_links.sort();
if (new_nodes.length != state_nodes.length || new_links.length != state_links.length) {
if (verbose) {
console.log('not same size: new/old ' + new_nodes.length + ' / ' + state_nodes.length + '; ' +
new_links.length + ' / ' + state_links.length);
}
return {graph_same: false};
}
changed_nodes = util.set_diff(util.set_from_array(state_nodes.map(function(d) { return d.name; })),
util.set_from_array(new_nodes.map(function (f) { return f.name; })));
// we allow any number of changed nodes as long as we it is 1 or 2 :)
if (changed_nodes.a_b.length > 2) {
if (verbose) {
console.log('changed too many nodes');
console.log(changed_nodes);
}
return {graph_same: false};
}
set_old_name = util.set_from_array(changed_nodes.a_b);
set_new_name = util.set_from_array(changed_nodes.b_a);
for (k = 0 ; k < state_links.length ; ++k) {
state_source = state_links[k][0];
state_target = state_links[k][1];
new_source = new_links[k][0];
new_target = new_links[k][1];
if ((state_source !== new_source &&
!(state_source in set_old_name && new_source in set_new_name))
||
(state_target !== new_target &&
!(state_target in set_old_name && new_target in set_new_name))) {
if (verbose) {
console.log('not same link: ' +
state_source + '->' + state_target + ' != ' +
new_source + '->' + new_target);
console.log('state_source === new_source: ' + String(state_source === new_source));
console.log('state_target === new_target: ' + String(state_target === new_target));
console.log(set_old_name);
console.log(set_new_name);
}
return {graph_same: false};
}
}
return {graph_same: true, old_name: changed_nodes.a_b, new_name: changed_nodes.b_a};
}
function __addLink(link) {
var trimmed_name = link.name.trim();
util.assert(link instanceof model_core.Link);
if (link.name.length != trimmed_name.length) {
console.log('bug: __addLink with name containing spaces - removing before sending to server');
}
link.name = trimmed_name;
var existing_link = findLink(link.__src.id, link.__dst.id, link.name);
if (undefined == existing_link) {
_link_add_helper(link);
} else {
existing_link.name = link.name;
existing_link.state = link.state;
}
}
// FIXME: good idea to add API based on constructor parameter?
if (temporary) {
this.addTempLink = function (link) {
return __addLink(link);
}
}
this.update_link = function(link, new_link_spec, on_success, on_error) {
util.assert(link instanceof model_core.Link);
// TODO - fake api for client only (debug, demo, ui work)
if (!rz_config.backend_enabled) return;
var attr_diff = model_diff.new_attr_diff();
for (var key in new_link_spec) {
attr_diff.add_link_attr_write(link.id, key, new_link_spec[key]);
}
var on_ajax_success = function(attr_diff_spec) {
var attr_diff = model_util.adapt_format_read_diff__attr(attr_diff_spec);
var id_to_link_map = attr_diff.id_to_link_map
var l_id = link.id; // original node id
util.assert(id_to_link_map && id_to_link_map[l_id], "bad return value from ajax");
var ret_link = id_to_link_map[l_id];
for (var key in ret_link['__attr_write']){
link[key] = ret_link['__attr_write'][key];
}
for (var key in ret_link['__attr_remove']){
delete link[key];
}
// TODO: handle NAK: add problem emblem to link
if (on_success !== undefined) {
on_success();
}
diffBus.push(attr_diff);
};
var on_ajax_error = function(){
console.log('error with commit to server: danger robinson!');
};
rz_api_backend.commit_diff__attr(attr_diff, on_ajax_success, on_ajax_error);
}
this.update_node = function(node, new_node_spec) {
util.assert(node instanceof model_core.Node);
// TODO - fake api for client only (debug, demo, ui work)
if (!rz_config.backend_enabled) return;
if (new_node_spec.name !== undefined && node.name != new_node_spec.name){
/*
* handle name update collision: suggest removal first
*/
var n_eq_name = find_node__by_name(new_node_spec.name);
if (null !== n_eq_name && n_eq_name !== node) {
if (window.confirm('really merge ' + node.name + ' into ' + n_eq_name.name + '?')) {
return nodes__merge([n_eq_name.id, node.id]);
} else {
return; // do nothing
}
}
node['name'] = new_node_spec['name']; // [!] may still fail due to server NAK
}
var attr_diff = model_diff.new_attr_diff();
for (var key in new_node_spec) {
attr_diff.add_node_attr_write(node.id, key, new_node_spec[key]);
}
var on_ajax_success = function(attr_diff_spec){
var attr_diff = model_util.adapt_format_read_diff__attr(attr_diff_spec);
var id_to_node_map = attr_diff.id_to_node_map
var n_id = node.id; // original node id
util.assert(id_to_node_map && id_to_node_map[n_id], "bad return value from ajax");
var ret_node = id_to_node_map[n_id];
for (var key in ret_node['__attr_write']){
node[key] = ret_node['__attr_write'][key];
}
for (var key in ret_node['__attr_remove']){
delete node[key];
}
diffBus.push(attr_diff);
};
var on_ajax_error = function(){
console.log('error with commit to server: danger robinson!');
};
rz_api_backend.commit_diff__attr(attr_diff, on_ajax_success, on_ajax_error);
}
var update_node = this.update_node;
this.editNameByName = function(old_name, new_name) {
var node = find_node__by_name(old_name);
if (node === undefined) {
console.log('editNameByName: error: cannot find node with name ' + old_name);
return;
}
return this.editName(node.id, new_name);
}
this.editName = function(id, new_name) {
var n_eq_name = find_node__by_name(new_name);
var n_eq_id = find_node__by_id(id);
var acceptReplace=true;
if (n_eq_id === undefined) {
return;
}
if (n_eq_id.name == new_name) {
return;
}
util.assert(temporary, 'editName should now only be used on temporary graphs');
util.assert(n_eq_name === undefined);
n_eq_id.name = new_name;
}
/**
* editType:
*
* @return true if type changed
*/
this.editType = function(id, newtype) {
return this._editProperty(id, 'type', newtype);
}
function new_attr_diff_prop_value(id, prop, value)
{
var diff = model_diff.new_attr_diff();
diff.add_node_attr_write(id, prop, value);
return diff;
}
this._editProperty = function(id, prop, value) {
var n = find_node__by_id(id),
local = find_node__by_id(id, false);
if ((n === undefined)) {
return false;
}
if (local === null) {
return base._editProperty(id, prop, value);
}
if (temporary) {
n[prop] = value;
diffBus.push(new_attr_diff_prop_value(id, prop, value));
} else {
// FIXME: should not do a server roundtrip, should keep this data local
// and part of the temporary graph, and send it on user enter in a single commit.
// The current implementation is just a quick way to get sorta the same outcome.
// it misses atomicity (since we create a commit for every tab click on an existing node),
// and responsiveness (since there is a roundtrip to the server and it isn't client side)
var props = {};
props[prop] = value;
update_node(n, props);
}
return true;
}
this.links__delete = function(link_ids) {
var topo_diff = model_diff.new_topo_diff({link_id_set_rm: link_ids});
this.commit_and_tx_diff__topo(topo_diff);
}
this.nodes__delete = function(node_ids) {
var topo_diff = model_diff.new_topo_diff({node_id_set_rm: node_ids});
this.commit_and_tx_diff__topo(topo_diff);
}
/**
* links the first node in the list to the rest of the list.
*/
var nodes__link_fan = function(node_ids) {
util.assert(node_ids.length > 1); // strictly speaking we can also treat 1 as correct usage
var src_id = node_ids[0],
src_node = find_node__by_id(src_id),
added_links = node_ids.slice(1).map(function (tgt_id) {
return model_core.create_link__set_random_id(src_node, find_node__by_id(tgt_id),
{name: consts.EMPTY_LINK_NAME});
});
commit_and_tx_diff__topo(model_diff.new_topo_diff({link_set_add: added_links}));
};
this.nodes__link_fan = nodes__link_fan;
var nodes__merge = function(node_ids) {
util.assert(node_ids.length > 1); // strictly speaking we can also treat 1 as correct usage
var merged = _.rest(node_ids);
var merge_node_id = node_ids[0];
var merge_node = find_node__by_id(merge_node_id);
var topo_diff;
util.assert(merge_node != null);
var added_links = _.flatten(_.map(merged, function (node_id) {
var src_links = find_link__by_src_id(node_id)
.filter(function (src_link) { return src_link.__dst.id !== merge_node_id; })
.map(function (src_link) {
return model_core.create_link__set_random_id(merge_node, src_link.__dst, {
name: src_link.name,
});
});
var dst_links = find_link__by_dst_id(node_id)
.filter(function (dst_link) { return dst_link.__src.id !== merge_node_id; })
.map(function (dst_link) {
return model_core.create_link__set_random_id(dst_link.__src, merge_node, {
name: dst_link.name,
});
});
return _.union(src_links, dst_links);
}));
topo_diff = model_diff.new_topo_diff({
link_set_add: added_links,
node_id_set_rm: merged});
commit_and_tx_diff__topo(topo_diff);
};
this.nodes__merge = nodes__merge;
var _remove_link_set = function(link_id_set) {
link_id_set.forEach(function (id) {
var link = id_to_link_map[id];
if (undefined === link) {
console.log("warning: server returned an id we don't have " + id);
return;
}
_link_remove_helper(id);
console.log('_remove_link_set: ' + id);
});
}
this._remove_link_set = _remove_link_set;
this.nodes_rm = function(state) {
var node_ids = get_nodes().filter(function (n) { return n.state == state; })
.map(function (n) { return n.id; }),
topo_diff = model_diff.new_topo_diff({
node_id_set_rm : node_ids,
});
this.commit_and_tx_diff__topo(topo_diff);
}
var findLink = function(src_id, dst_id, name) {
var link_key, link;
for (link_key in id_to_link_map) {
link = id_to_link_map[link_key];
if (link.__src.id === src_id && link.__dst.id === dst_id) {
return link;
}
}
}
var find_links__by_nodes = function(nodes) {
var ids = util.set_from_array(_.pluck(nodes, "id"));
return get_links().filter(function (link) {
return ids[link.__src.id] && ids[link.__dst.id];
});
}
this.find_links__by_nodes = find_links__by_nodes;
var find_links__by_state = function(state) {
var foundLinks = [];
links_forEach(function (link) {
if (link.state == state) {
foundLinks.push(link);
}
});
return foundLinks;
}
var compareNames = function(name1, name2) {
return name1.toLowerCase() === name2.toLowerCase();
};
var hasNodeByName = function(name, state) {
return get_nodes().filter(function (n) {
return compareNames(n.name, name) && (undefined === state || n.state === state);
}).length > 0;
}
this.hasNodeByName = hasNodeByName;
/**
* return node whose id matches the given id or undefined if no node was found
*/
var find_node__by_id = function(id, recursive) {
// default to recursion
recursive = recursive === undefined ? true : recursive;
if (recursive && base) {
var base_node = base.find_node__by_id(id, recursive);
if (base_node) {
return base_node;
}
}
return id_to_node_map[id] || null;
}
this.find_node__by_id = find_node__by_id;
var find_nodes__by_id = function(ids, recursive) {
return _.map(ids, function (id) { return find_node__by_id(id, recursive); });
}
this.find_nodes__by_id = find_nodes__by_id;
/**
* @param filter: must return true in order for node to be included in the returned set
*/
var find_nodes__by_filter = function(filter) {
var ret = [];
nodes.map(function(n){
if (true == filter(n)){
ret.push(n);
}
});
return ret;
}
/**
* @param id unique id of link
* @return Link with given id
*/
var find_link__by_id = function(id, recursive) {
// default to recursion
recursive = recursive === undefined ? true : recursive;
if (recursive && base) {
var base_link = base.find_link__by_id(id, recursive);
if (base_link) {
return base_link;
}
}
return id_to_link_map[id] || null;
}
this.find_link__by_id = find_link__by_id;
/**
* @param id of source node
* @return array of links whose source node is id
* FIXME: none O(E) implementation (used by merge)
*/
var find_link__by_src_id = function(src_id) {
return _.filter(get_links(), function (link) { return link.__src.id == src_id; });
}
this.find_link__by_src_id = find_link__by_src_id;
/**
* @param id of destination node
* @return array of links whose destination node is id
* FIXME: none O(E) implementation (used by merge)
*/
var find_link__by_dst_id = function(dst_id) {
return _.filter(get_links(), function (link) { return link.__dst.id == dst_id; });
}
this.find_link__by_dst_id = find_link__by_dst_id;
var find_node__by_name = function(name, recursive) {
// default to recursion
recursive = recursive === undefined ? true : recursive;
if (recursive && base) {
var node = base.find_node__by_name(name, true);
if (node !== null) {
return node;
}
}
for (var k in id_to_node_map) {
if (compareNames(id_to_node_map[k].name, name)) {
return id_to_node_map[k];
}
}
return null;
}
this.find_node__by_name = find_node__by_name;
var find_nodes__by_state = function(state) {
var foundNodes = [];
nodes_forEach(function (node) {
if (node.state === state) {
foundNodes.push(node);
}
});
return foundNodes;
}
function clear(push_diff) {
push_diff = push_diff === undefined ? true : push_diff;
id_to_node_map = {};
id_to_link_map = {};
id_to_link_id_set = {};
invalidate_links = true;
invalidate_nodes = true;
if (push_diff) {
diffBus.push({}); // FIXME: better value
}
}
this.clear = clear;
function empty() {
// FIXME: O(|nodes|+|links|)
return get_nodes().length == 0 && get_links().length == 0;
}
this.empty = empty;
// @ajax-trans
this.commit_diff_set = function (diff_set) {
function on_success(data){
console.log('commit_diff_set:on_success: TODO impl');
}
rz_api_mesh.broadcast_possible_next_diff_block(diff_set);
}
function on_backend__node_add(n_spec) {
n_spec = model_util.adapt_format_read_node(n_spec);
util.assert(undefined != n_spec.id, 'load_from_backend: n_spec missing id');
return n_spec;
}
function on_backend__link_add(l_spec) {
var src_id = l_spec.__src_id,
dst_id = l_spec.__dst_id,
l_ptr = model_util.adapt_format_read_link_ptr(l_spec);
util.assert(undefined !== l_ptr.id, 'load_from_backend: l_ptr missing id');
util.assert(undefined !== src_id, 'load_from_backend: link missing __src_id');
util.assert(undefined !== dst_id, 'load_from_backend: link missing __dst_id');
// cleanup & reuse as link_spec
delete l_ptr.__src_id;
delete l_ptr.__dst_id;
var link_spec = l_ptr;
link_spec.__src = find_node__by_id(src_id);
link_spec.__dst = find_node__by_id(dst_id);
if (null === link_spec.__src) {
util.log_error("src_id not found: " + src_id);
return null;
}
if (null === link_spec.__dst) {
util.log_error("dst_id not found: " + dst_id);
return null;
}
return link_spec;
}
function __commit_diff_ajax__clone(clone) {
var topo = clone[0],
commits = clone[1].reverse(), // [!] received in new to old order, need them reversed
node_specs = topo.node_set_add.map(on_backend__node_add),
nodes = _add_node_set(node_specs),
link_specs = topo.link_set_add.map(on_backend__link_add).filter(function (link_spec) {
return link_spec !== null;
}),
links = _add_link_set(link_specs);
commits.forEach(function (commit) {
activityBus.push(commit);
});
diffBus.push({node_set_add: nodes, link_set_add: links});
}
function __commit_diff_ajax__topo(diff) {
diff.node_set_add = diff.node_id_set_add.map(_get_server_pending);
diff.link_set_add = diff.link_id_set_add.map(_get_server_pending);
commit_diff__topo(diff);
}
function _get_server_pending(id) {
// FIXME: should track cache
var spec;
util.assert(undefined !== server_pending_objects[id]);
spec = server_pending_objects[id];
delete server_pending_objects[id];
return spec;
}
function _add_node_set(node_specs) {
return node_specs.map(function (node_spec) {
__addNode(node_spec);
});
}
function _add_link_set(link_specs) {
return link_specs.map(function (link_spec) {
// resolve link ptr
var src = (undefined !== link_spec.__src && null !== link_spec.__src &&
(find_node__by_id(link_spec.__src.id) || link_spec.__src)) ||
find_node__by_id(link_spec.__src_id),
dst = (undefined !== link_spec.__dst && null !== link_spec.__dst &&
(find_node__by_id(link_spec.__dst.id) || link_spec.__dst)) ||
find_node__by_id(link_spec.__dst_id),
link = model_core.create_link_from_spec(src, dst, link_spec);
__addLink(link);
});
}
/*
* Inputs are specs, not raw - after adaptation from the on wire format.
*
* FIXME: use a different object? different properties in the same object?
*/
function commit_diff__topo(diff) {
_add_node_set(diff.node_set_add);
_add_link_set(diff.link_set_add);
// done under protest
_remove_link_set(diff.link_id_set_rm);
_remove_node_set(diff.node_id_set_rm);
diffBus.push(diff);
}
this.commit_diff__topo = commit_diff__topo;
/**
* Apply a Attr_Diff:
* - commit diff to the local graph instanse
* - emit a diffBus event
*
* This function should not trigger remote transmission of diff object
*/
function commit_diff__attr(attr_diff) {
util.assert(model_diff.is_attr_diff(attr_diff), 'commit_diff__attr: argument type != Attr_Diff');
// process nodes
attr_diff.for_each_node(function(n_id, n_attr_diff) {
var node = id_to_node_map[n_id];
if (undefined == node) {
console.warn('commit_diff__attr: incoming attr diff for non-existing node, discarding');
return;
}
// apply attr writes: node
var count_w = 0;
for (var attr_key in n_attr_diff['__attr_write']) {
var attr_value = n_attr_diff['__attr_write'][attr_key];
node[attr_key] = attr_value; // write each new attr update
count_w = count_w + 1;
};
// apply attr removals: node
var count_d = 0;
for (var attr_key in n_attr_diff['__attr_remove']) {
delete node[attr_key]; // apply each attr removal
count_d = count_d + 1;
};
console.log('commit_diff__attr: n_id: \'' + n_id + '\', write-count: ' + count_w + ', rm-count: ' + count_d);
});
// process links
attr_diff.for_each_link(function(l_id, n_attr_diff) {
var link = id_to_link_map[l_id];
if (undefined == link) {
console.warn('commit_diff__attr: incoming attr diff for non-existing link, discarding');
return;
}
// apply attr writes: link
var count_w = 0;
for (var attr_key in n_attr_diff['__attr_write']) {
var attr_value = n_attr_diff['__attr_write'][attr_key];
link[attr_key] = attr_value; // write each new attr update
count_w = count_w + 1;
};
// apply attr removals: link
var count_d = 0;
for (var attr_key in n_attr_diff['__attr_remove']) {
delete link[attr_key]; // apply each attr removal
count_d = count_d + 1;
};
console.log('commit_diff__attr: l_id: \'' + l_id + '\', write-count: ' + count_w + ', rm-count: ' + count_d);
});
diffBus.push(attr_diff);
}
this.commit_diff__attr = commit_diff__attr;
/**
* perform initial DB load from backend
*
* @param on_success: should be used by MVP presentors to trigger UI update
*/
// @ajax-trans
function load_from_backend(on_success, on_error) {
function on_success_wrapper(clone) {
__commit_diff_ajax__clone(clone);
undefined != on_success && on_success();
}
rz_api_backend.rzdoc_clone(on_success_wrapper, on_error);
}
this.load_from_backend = load_from_backend;
var new_topo_diff__from_nodes_links = function (nodes, links) {
var diff,
node_by_id = {},
old_id_to_new_id = {};
diff = model_diff.new_topo_diff();
diff.node_set_add = nodes.map(function(ext_spec) {
var node_spec = {
name: ext_spec.name ? ext_spec.name : ext_spec.id,
type: ext_spec.type,
x: ext_spec.x,
y: ext_spec.y,
},
node;
all_attributes.forEach(function (attr) {
node_spec[attr] = ext_spec[attr];
});
if (ext_spec.start) {
node_spec.start = new Date(ext_spec.start);
}
if (ext_spec.end) {
node_spec.end = new Date(ext_spec.end);
}
node = model_core.create_node__set_random_id(node_spec);
old_id_to_new_id[ext_spec.id] = node.id,
node_by_id[node.id] = node;
return node;
});
diff.link_set_add = links.map(function (link_spec) {
var src = node_by_id[old_id_to_new_id[link_spec.__src]],
dst = node_by_id[old_id_to_new_id[link_spec.__dst]],
link = model_core.create_link__set_random_id(src, dst, {
name: link_spec.name,
state: 'perm', // FIXME: this is meaningless now with graph separation
});
link.__src_id = src.id;
link.__dst_id = dst.id;
return link;
});
return diff;
}
this.new_topo_diff__from_nodes_links = new_topo_diff__from_nodes_links;
this.load_from_json = function(json) {
var data = JSON.parse(json);
if (data == null) {
console.log('load callback: no data to load');
return;
}
// FIXME: prompt for replace/merge; now defaulting to merge
commit_and_tx_diff__topo(new_topo_diff__from_nodes_links(data.nodes, data.links));
}
this.save_to_json = function() {
var d = {"nodes":[], "links":[]},
nodes = get_nodes(),
links = get_links();
for (var i = 0 ; i < nodes.length ; i++) {
var node = nodes[i],
node_dict = {id: node.id, x: node.x, y: node.y};
all_attributes.forEach(function (attr) {
node_dict[attr] = node[attr];
});
d['nodes'].push(node_dict);
}
for (var j = 0 ; j < links.length ; j++) {
var link = links[j];
d['links'].push({
"__src":link.__src.id,
"__dst":link.__dst.id,
"name":link.name
});
}
return JSON.stringify(d);
}
this.set_user = function(user) {
var elem = $('svg g.zoom')[0];
this.user = user;
this.history = new history.History(this.user, this, elem);
}
function clear_history() {
if (this.history !== undefined) {
this.history.clear();
}
}
this.clear_history = clear_history;
var get_nodes = function() {
if (cached_nodes === undefined || invalidate_nodes) {
cached_nodes = _.filter(id_to_node_map, function (node, node_id) {
return filtered_types[node.type] === undefined;
});
invalidate_nodes = false;
}
return cached_nodes;
};
this.nodes = get_nodes;
var get_node_ids = function() {
return _.keys(id_to_node_map);
}
this.get_node_ids = get_node_ids;
var get_links = function() {
if (cached_links === undefined || invalidate_links) {
cached_links = _.filter(id_to_link_map, function (link, link_id) {
return filtered_types[link.__src.type] === undefined &&
filtered_types[link.__dst.type] === undefined;
});
invalidate_links = false;
}
return cached_links;
};
this.links = get_links;
this.find__by_visitors = function(node_visitor, link_visitor) {
var nodes = get_nodes(),
links = get_links(),
selected_nodes,
selected_links;
if (!node_visitor && !link_visitor) {
return;
}
selected_nodes = node_visitor ? nodes.filter(node_visitor) : [];
selected_links = link_visitor ? links.filter(link_visitor) : [];
return {nodes: selected_nodes, links: selected_links};
}
function markRelated(names) {
removeRelated();
nodes_forEach(function (node) {
names.forEach(function (name) {
if (compareNames(node.name, name)) {
node.state = 'related';
}
});
});
}
this.markRelated = markRelated;
function removeRelated() {
// FIXME: related should use separate variable, not overload 'state' (bad bad bad)
nodes_forEach(function (node) {
if (node.state === 'related') {
node.state = 'perm';
}
});
}
this.removeRelated = removeRelated;
this.node__set_filtered_types = function (new_filtered_types) {
filtered_types = new_filtered_types;
invalidate_links = true;
invalidate_nodes = true;
}
}
function is_node(item)
{
return item.__src === undefined;
}
function is_link(item)
{
return item.__src !== undefined;
}
return {
Graph: Graph,
is_node: is_node,
is_link: is_link,
};
});
| src/client/model/graph.js | "use strict"
define(['underscore', 'Bacon', 'consts', 'util', 'model/core', 'model/util', 'model/diff', 'rz_api_backend', 'rz_api_mesh', 'history', 'model/types'],
function (_, Bacon, consts, util, model_core, model_util, model_diff, rz_api_backend, rz_api_mesh, history, model_types) {
// aliases
var all_attributes = model_types.all_attributes;
var debug = false;
function Graph(spec) {
var id_to_node_map,
id_to_link_map,
id_to_link_id_set,
diffBus = new Bacon.Bus(),
activityBus = new Bacon.Bus(),
cached_links,
invalidate_links = true,
cached_nodes,
invalidate_nodes = true,
temporary = spec.temporary,
base = spec.base,
server_pending_objects = [],
filtered_types = {}; // set of node types not to show
this.temporary = temporary;
this.base = base;
util.assert(temporary !== undefined && base !== undefined, "missing inputs");
clear();
// All operations done on the graph. When the server is used (i.e. always) this
// bus contains the server events, not the user events (most of the time the same just with delay).
this.diffBus = diffBus;
// Same as diffBus, but includes operations done in the past, loaded when
// the document is loaded (commit log)
this.activityBus = activityBus;
var links_forEach = function (f) {
for (var link_key in id_to_link_map) {
f(id_to_link_map[link_key]);
}
}
var nodes_forEach = function (f) {
for (var node_key in id_to_node_map) {
f(id_to_node_map[node_key]);
}
}
function key_count(obj) {
return _.keys(obj).length;
}
var degree = function (node) {
return key_count(id_to_link_id_set[node.id]);
}
this.degree = degree;
/**
* add node if no previous node is present whose id equals that of the node being added
*
* @return node if node was actually added
*/
// FIXME: is this a good idea? i.e. changing API based on constructor? just in dynamic city
if (temporary) {
this.addTempNode = function(spec) {
return this.__addNode(spec);
}
}
var nodes_to_touched_links = function (node_id_set) {
var touched_links = [];
node_id_set.forEach(function (n_id) {
var n = id_to_node_map[n_id];
links_forEach(function (link) {
if ((link['__src'].equals(n)) || (link['__dst'].equals(n))) { // compare by id
touched_links.push(link);
}
});
});
console.dir(touched_links);
return touched_links.map(function(l){ return l.id; });
}
/**
*
* @param a topo_diff but that might be missing a few things, sanitize it first.
* all sanitation should be idempotent, but probably isn't.
*
* NOTE: currently this function transmits only. Later we want to optimistically
* first commit and then transmit.
*/
var commit_and_tx_diff__topo = function (topo_diff) {
util.assert(temporary === false, "cannot be temporary");
$.merge(topo_diff.link_id_set_rm, nodes_to_touched_links(topo_diff.node_id_set_rm));
topo_diff.node_set_add = topo_diff.node_set_add
.filter(function(n) {
util.assert(undefined !== n.id, "undefined id in node in topo diff");
util.assert(undefined === server_pending_objects[n.id], "cache full at id");
return find_node__by_id(n.id) === null;
})
.map(function(n_spec) {
server_pending_objects[n_spec.id] = n_spec;
return model_util.adapt_format_write_node(n_spec);
});
topo_diff.link_set_add = topo_diff.link_set_add.map(function(l_spec) {
util.assert(l_spec.id !== undefined, "undefined id in link in topo diff");
if (l_spec.source === undefined) {
l_spec.source = l_spec.__src;
}
if (l_spec.target === undefined) {
l_spec.target = l_spec.__dst;
}
if (l_spec.name == undefined) {
// cannot have a zero length name, using name as label in neo4j
l_spec.name = 'is';
}
if (l_spec.__type === undefined) {
l_spec.__type = l_spec.name;
}
server_pending_objects[l_spec.id] = l_spec;
return model_util.adapt_format_write_link(l_spec);
});
// filter already existing nodes now, after we conveniently used them
// for name_to_node map
topo_diff.node_set_add = topo_diff.node_set_add.filter(function(n) {
return !hasNodeByName(n.name);
});
var graph_on_error = function(error) {
console.log('error:');
console.dir(error);
}
rz_api_backend.commit_diff__topo(topo_diff, __commit_diff_ajax__topo, graph_on_error);
}
this.commit_and_tx_diff__topo = commit_and_tx_diff__topo;
/**
* Inner implementation
*
* @param notify whether or not a presenter notification will be sent, default = true
*/
function __addNode(spec) {
var existing_node,
node;
if (undefined == spec.id) {
existing_node = find_node__by_name(spec.name)
if (existing_node){
return existing_node;
} else {
node = model_core.create_node__set_random_id(spec);
if (debug) {
console.log('__addNode: stamping node id: ' + node.id + ', name: \'' + node.name + '\'');
}
}
} else {
node = model_core.create_node_from_spec(spec);
}
existing_node = find_node__by_id(node.id);
if (existing_node) {
console.warn('__addNode: id collision: existing-node.id: \'' + existing_node.id);
return existing_node;
}
util.assert(undefined != node.id, '__addNode: node id missing');
_node_add_helper(node);
if (debug) {
console.log('__addNode: node added: id: ' + node.id + ' state ' + node.state);
}
return node;
}
this.__addNode = __addNode;
var _node_remove_helper = function (node_id) {
util.assert(node_id, "missing node id");
delete id_to_node_map[node_id];
delete id_to_link_id_set[node_id];
invalidate_nodes = true;
}
var _node_add_helper = function (node) {
util.assert(node.id, "missing node id");
id_to_node_map[node.id] = node;
id_to_link_id_set[node.id] = [];
invalidate_nodes = true;
}
var _link_remove_helper = function (link_id) {
var link = id_to_link_map[link_id],
src_id = link.__src.id,
dst_id = link.__dst.id;
util.assert(link_id, "missing link id");
util.assert(link, "non existent link");
delete id_to_link_id_set[src_id][dst_id];
delete id_to_link_id_set[dst_id][src_id];
delete id_to_link_map[link_id];
util.assert(id_to_link_map[link_id] === undefined, "delete failed?!");
invalidate_links = true;
}
var _link_add_helper = function (link) {
var src_id = link.__src.id,
dst_id = link.__dst.id;
util.assert(link.id, "missing link id");
id_to_link_map[link.id] = link;
// link's nodes may not belong to this graph, check first - we add them if required to the id_to_link_id_set only
if (id_to_link_id_set[src_id] === undefined) {
id_to_link_id_set[src_id] = [];
}
if (id_to_link_id_set[dst_id] === undefined) {
id_to_link_id_set[dst_id] = [];
}
id_to_link_id_set[src_id][dst_id] = 1;
id_to_link_id_set[dst_id][src_id] = 1;
invalidate_links = true;
}
var _remove_node_set = function(node_id_set) {
node_id_set.forEach(function (id) {
if (undefined === id_to_node_map[id]) {
console.log("warning: server returned an id we don't have " + id);
return;
}
_node_remove_helper(id);
console.log('_remove_node_set: ' + id);
});
}
this._remove_node_set = _remove_node_set;
function calc_neighbours() {
return _.reduce(get_links(), function(d, link) {
d[link.__src.id].src.push(link);
d[link.__dst.id].dst.push(link);
return d;
}, _.object(_.map(get_nodes(), "id"),
get_nodes().map(function (n) {
return {node: n, src: [], dst: []};
})
));
}
/**
* Visitation constants for neighbourhood and shortest paths computation.
*/
var kind_exit = 1,
kind_enter = 2,
kind_selected = 4;
function kind_to_string(kind) {
switch (kind) {
case kind_exit: return 'exit';
case kind_enter: return 'enter';
case kind_selected: return 'selected';
default:
// TODO: add css for both
return 'exit';
}
}
function BFS(node_id) {
var neighbours = calc_neighbours(),
queue = [node_id],
start_id,
node_ids = get_node_ids(),
V = _.object(node_ids, _.map(node_ids, function (id) {
return {node_id: id, distance: Infinity, prev: {}};
})),
ret = {};
V[node_id].distance = 0;
while ((start_id = queue.shift()) !== undefined) {
var src_ids = _.pluck(_.pluck(neighbours[start_id].src, "__dst"), "id"),
dst_ids = _.pluck(_.pluck(neighbours[start_id].dst, "__src"), "id"),
n_ids = src_ids.concat(dst_ids);
_.each(n_ids, function(next_id) {
var distance = V[start_id].distance + 1;
if (V[next_id].distance >= distance) {
V[next_id].distance = distance;
V[next_id].prev[start_id] = true;
queue.push(next_id);
}
});
}
_.each(_.keys(V), function (k) {
if (V[k].distance !== Infinity) {
ret[k] = V[k];
}
});
return ret;
}
this.BFS = BFS;
/**
* pairs_symmetric
*
* cb will be called for every pair in the input list but only in the order
* lower_index, maybe_higher_index
* where lower_index <= maybe_higher_index (i.e. diagonal is covered).
*
* i.e. for |list| = N, (N + 1) * N / 2 calls are made
*/
function pairs_symmetric(list, cb) {
var i, j, N = list.length;
for (i = 0 ; i < N; ++i) {
for (j = i; j < N ; ++j) {
cb(list[i], list[j]);
}
}
}
/**
* @sources - list of nodes
*
* returns all nodes in the shortest paths between all sources.
*
* returns same dictionary as neighbourhood.
*/
function shortest_paths(sources) {
function make_status(node, distance, prev_nodes) {
return {node: node, distances: distance || 0, prev_nodes: prev_nodes || {}};
}
var ids = _.pluck(sources, 'id'),
bfs = _.object(ids, _.map(ids, BFS)),
nodes = {};
function append_paths(bfs, start_id) {
var queue = [bfs[start_id]],
next,
next_id;
while ((next = queue.shift()) !== undefined) {
next_id = next.node_id;
if (nodes[next_id] === undefined) {
nodes[next_id] = {node_id: next_id, sources: {}};
}
_.each(_.keys(next.prev), function (p) {
nodes[next_id].sources[p] = true;
queue.push(bfs[p]);
});
}
}
pairs_symmetric(ids, function (one, two) {
if (bfs[one][two].distance === Infinity) {
return;
}
append_paths(bfs[one], two);
});
return {
'nodes': _.values(nodes),
'links': []
};
}
this.shortest_paths = shortest_paths;
/**
*
* neighbourhood
*
* @start - list of starting nodes
* @d - radius of neighbours
*
* NOTE: Doesn't handle inter graph links
* NOTE: return doesn't include original nodes
*
* @return - {
* 'nodes': [{
* node: node,
* kind: kind,
* sources: {node_id: true}
* }]
* 'links: [{link: link, kind: kind}]
* }
*
* kind: exit/enter
*
* TODO: implement for d !== 1
*
*/
this.neighbourhood = function(start, d) {
var ret = {'nodes':[], 'links':[]};
function addNode(node) {
if (start.filter(function (n) { return n.id == node.id; }).length == 1) {
return;
}
ret.nodes.push(node);
}
function get_name(node) {
// XXX: using lowercase name comparison instead of id because nodes may be stale
return node.name.toLowerCase();
}
if (start === undefined) {
console.log('neighbourhood: bug: called with undefined node');
return;
}
if (d > 1) {
console.log('neighbourhood: bug: not implemented for d == ' + d);
}
if (d === 0) {
// 0 depth is empty group of nodes and links
return ret;
}
d = d || 1;
if (start.length === undefined) {
console.log('neighbourhood: expected array');
return ret;
}
function make_status(kind, node) {
return {node: node, kind: kind, links: [], depth: Infinity, sources: {}};
}
var nodes = get_nodes(),
links = get_links(),
neighbours = calc_neighbours(),
visited = _.object(_.map(start, get_name),
_.map(start, _.partial(make_status, kind_selected)));
function visit(source, link, getter, kind, depth) {
var node = getter(link),
name = get_name(node),
data = visited[name];
if (data === undefined) {
data = visited[name] = make_status(0, node);
}
data.kind |= kind;
data.links.push({link: link, kind: kind});
data.depth = Math.min(data.depth, depth);
data.sources[source.id] = true;
return data;
}
_.each(start, function (node) {
var N = neighbours[node.id];
_.each(N.src, function (link) {
visit(node, link, function (link) { return link.__dst; }, kind_enter);
});
_.each(N.dst, function (link) {
visit(node, link, function (link) { return link.__src; }, kind_exit);
});
});
_.values(visited).forEach(function (data) {
var node = data.node,
kind = data.kind,
links = data.links;
if ((kind & kind_selected) === kind_selected) {
return;
}
ret.nodes.push({type: kind_to_string(kind), node: node, sources: data.sources});
_.each(links, function (data) {
ret.links.push({link: data.link, kind: kind_to_string(kind)});
});
});
return ret;
}
/* compareSubset:
* state: one of the optional states that defines a subgraph
* new_nodes: array of objects with name
* new_links: array of length two arrays [source_name, target_name]
* returns: true if current and new graph are homomorphic up to
* a single node id change. false otherwise
*/
this.compareSubset = function(state, new_nodes, new_links) {
var state_nodes = find_nodes__by_state(state);
var state_links = find_links__by_state(state).map(function(link) {
return [link.__src.name, link.__dst.name];
}).sort();
var k;
var state_source, state_target, new_source, new_target;
var changed_nodes;
var verbose = false; // XXX should be global.
var set_old_name, set_new_name;
new_nodes.map(function (f) {
if (!f.name) {
console.log('missing name on node. node follows');
console.log(f);
}
});
new_nodes.sort();
new_links.sort();
if (new_nodes.length != state_nodes.length || new_links.length != state_links.length) {
if (verbose) {
console.log('not same size: new/old ' + new_nodes.length + ' / ' + state_nodes.length + '; ' +
new_links.length + ' / ' + state_links.length);
}
return {graph_same: false};
}
changed_nodes = util.set_diff(util.set_from_array(state_nodes.map(function(d) { return d.name; })),
util.set_from_array(new_nodes.map(function (f) { return f.name; })));
// we allow any number of changed nodes as long as we it is 1 or 2 :)
if (changed_nodes.a_b.length > 2) {
if (verbose) {
console.log('changed too many nodes');
console.log(changed_nodes);
}
return {graph_same: false};
}
set_old_name = util.set_from_array(changed_nodes.a_b);
set_new_name = util.set_from_array(changed_nodes.b_a);
for (k = 0 ; k < state_links.length ; ++k) {
state_source = state_links[k][0];
state_target = state_links[k][1];
new_source = new_links[k][0];
new_target = new_links[k][1];
if ((state_source !== new_source &&
!(state_source in set_old_name && new_source in set_new_name))
||
(state_target !== new_target &&
!(state_target in set_old_name && new_target in set_new_name))) {
if (verbose) {
console.log('not same link: ' +
state_source + '->' + state_target + ' != ' +
new_source + '->' + new_target);
console.log('state_source === new_source: ' + String(state_source === new_source));
console.log('state_target === new_target: ' + String(state_target === new_target));
console.log(set_old_name);
console.log(set_new_name);
}
return {graph_same: false};
}
}
return {graph_same: true, old_name: changed_nodes.a_b, new_name: changed_nodes.b_a};
}
function __addLink(link) {
var trimmed_name = link.name.trim();
util.assert(link instanceof model_core.Link);
if (link.name.length != trimmed_name.length) {
console.log('bug: __addLink with name containing spaces - removing before sending to server');
}
link.name = trimmed_name;
var existing_link = findLink(link.__src.id, link.__dst.id, link.name);
if (undefined == existing_link) {
_link_add_helper(link);
} else {
existing_link.name = link.name;
existing_link.state = link.state;
}
}
// FIXME: good idea to add API based on constructor parameter?
if (temporary) {
this.addTempLink = function (link) {
return __addLink(link);
}
}
this.update_link = function(link, new_link_spec, on_success, on_error) {
util.assert(link instanceof model_core.Link);
// TODO - fake api for client only (debug, demo, ui work)
if (!rz_config.backend_enabled) return;
var attr_diff = model_diff.new_attr_diff();
for (var key in new_link_spec) {
attr_diff.add_link_attr_write(link.id, key, new_link_spec[key]);
}
var on_ajax_success = function(attr_diff_spec) {
var attr_diff = model_util.adapt_format_read_diff__attr(attr_diff_spec);
var id_to_link_map = attr_diff.id_to_link_map
var l_id = link.id; // original node id
util.assert(id_to_link_map && id_to_link_map[l_id], "bad return value from ajax");
var ret_link = id_to_link_map[l_id];
for (var key in ret_link['__attr_write']){
link[key] = ret_link['__attr_write'][key];
}
for (var key in ret_link['__attr_remove']){
delete link[key];
}
// TODO: handle NAK: add problem emblem to link
if (on_success !== undefined) {
on_success();
}
diffBus.push(attr_diff);
};
var on_ajax_error = function(){
console.log('error with commit to server: danger robinson!');
};
rz_api_backend.commit_diff__attr(attr_diff, on_ajax_success, on_ajax_error);
}
this.update_node = function(node, new_node_spec) {
util.assert(node instanceof model_core.Node);
// TODO - fake api for client only (debug, demo, ui work)
if (!rz_config.backend_enabled) return;
if (new_node_spec.name !== undefined && node.name != new_node_spec.name){
/*
* handle name update collision: suggest removal first
*/
var n_eq_name = find_node__by_name(new_node_spec.name);
if (null !== n_eq_name && n_eq_name !== node) {
if (window.confirm('really merge ' + node.name + ' into ' + n_eq_name.name + '?')) {
return nodes__merge([n_eq_name.id, node.id]);
} else {
return; // do nothing
}
}
node['name'] = new_node_spec['name']; // [!] may still fail due to server NAK
}
var attr_diff = model_diff.new_attr_diff();
for (var key in new_node_spec) {
attr_diff.add_node_attr_write(node.id, key, new_node_spec[key]);
}
var on_ajax_success = function(attr_diff_spec){
var attr_diff = model_util.adapt_format_read_diff__attr(attr_diff_spec);
var id_to_node_map = attr_diff.id_to_node_map
var n_id = node.id; // original node id
util.assert(id_to_node_map && id_to_node_map[n_id], "bad return value from ajax");
var ret_node = id_to_node_map[n_id];
for (var key in ret_node['__attr_write']){
node[key] = ret_node['__attr_write'][key];
}
for (var key in ret_node['__attr_remove']){
delete node[key];
}
diffBus.push(attr_diff);
};
var on_ajax_error = function(){
console.log('error with commit to server: danger robinson!');
};
rz_api_backend.commit_diff__attr(attr_diff, on_ajax_success, on_ajax_error);
}
var update_node = this.update_node;
this.editNameByName = function(old_name, new_name) {
var node = find_node__by_name(old_name);
if (node === undefined) {
console.log('editNameByName: error: cannot find node with name ' + old_name);
return;
}
return this.editName(node.id, new_name);
}
this.editName = function(id, new_name) {
var n_eq_name = find_node__by_name(new_name);
var n_eq_id = find_node__by_id(id);
var acceptReplace=true;
if (n_eq_id === undefined) {
return;
}
if (n_eq_id.name == new_name) {
return;
}
util.assert(temporary, 'editName should now only be used on temporary graphs');
util.assert(n_eq_name === undefined);
n_eq_id.name = new_name;
}
/**
* editType:
*
* @return true if type changed
*/
this.editType = function(id, newtype) {
return this._editProperty(id, 'type', newtype);
}
function new_attr_diff_prop_value(id, prop, value)
{
var diff = model_diff.new_attr_diff();
diff.add_node_attr_write(id, prop, value);
return diff;
}
this._editProperty = function(id, prop, value) {
var n = find_node__by_id(id),
local = find_node__by_id(id, false);
if ((n === undefined)) {
return false;
}
if (local === null) {
return base._editProperty(id, prop, value);
}
if (temporary) {
n[prop] = value;
diffBus.push(new_attr_diff_prop_value(id, prop, value));
} else {
// FIXME: should not do a server roundtrip, should keep this data local
// and part of the temporary graph, and send it on user enter in a single commit.
// The current implementation is just a quick way to get sorta the same outcome.
// it misses atomicity (since we create a commit for every tab click on an existing node),
// and responsiveness (since there is a roundtrip to the server and it isn't client side)
var props = {};
props[prop] = value;
update_node(n, props);
}
return true;
}
this.links__delete = function(link_ids) {
var topo_diff = model_diff.new_topo_diff({link_id_set_rm: link_ids});
this.commit_and_tx_diff__topo(topo_diff);
}
this.nodes__delete = function(node_ids) {
var topo_diff = model_diff.new_topo_diff({node_id_set_rm: node_ids});
this.commit_and_tx_diff__topo(topo_diff);
}
/**
* links the first node in the list to the rest of the list.
*/
var nodes__link_fan = function(node_ids) {
util.assert(node_ids.length > 1); // strictly speaking we can also treat 1 as correct usage
var src_id = node_ids[0],
src_node = find_node__by_id(src_id),
added_links = node_ids.slice(1).map(function (tgt_id) {
return model_core.create_link__set_random_id(src_node, find_node__by_id(tgt_id),
{name: consts.EMPTY_LINK_NAME});
});
commit_and_tx_diff__topo(model_diff.new_topo_diff({link_set_add: added_links}));
};
this.nodes__link_fan = nodes__link_fan;
var nodes__merge = function(node_ids) {
util.assert(node_ids.length > 1); // strictly speaking we can also treat 1 as correct usage
var merged = _.rest(node_ids);
var merge_node_id = node_ids[0];
var merge_node = find_node__by_id(merge_node_id);
var topo_diff;
util.assert(merge_node != null);
var added_links = _.flatten(_.map(merged, function (node_id) {
var src_links = find_link__by_src_id(node_id)
.filter(function (src_link) { return src_link.__dst.id !== merge_node_id; })
.map(function (src_link) {
return model_core.create_link__set_random_id(merge_node, src_link.__dst, {
name: src_link.name,
});
});
var dst_links = find_link__by_dst_id(node_id)
.filter(function (dst_link) { return dst_link.__src.id !== merge_node_id; })
.map(function (dst_link) {
return model_core.create_link__set_random_id(dst_link.__src, merge_node, {
name: dst_link.name,
});
});
return _.union(src_links, dst_links);
}));
topo_diff = model_diff.new_topo_diff({
link_set_add: added_links,
node_id_set_rm: merged});
commit_and_tx_diff__topo(topo_diff);
};
this.nodes__merge = nodes__merge;
var _remove_link_set = function(link_id_set) {
link_id_set.forEach(function (id) {
var link = id_to_link_map[id];
if (undefined === link) {
console.log("warning: server returned an id we don't have " + id);
return;
}
_link_remove_helper(id);
console.log('_remove_link_set: ' + id);
});
}
this._remove_link_set = _remove_link_set;
this.nodes_rm = function(state) {
var node_ids = get_nodes().filter(function (n) { return n.state == state; })
.map(function (n) { return n.id; }),
topo_diff = model_diff.new_topo_diff({
node_id_set_rm : node_ids,
});
this.commit_and_tx_diff__topo(topo_diff);
}
var findLink = function(src_id, dst_id, name) {
var link_key, link;
for (link_key in id_to_link_map) {
link = id_to_link_map[link_key];
if (link.__src.id === src_id && link.__dst.id === dst_id) {
return link;
}
}
}
var find_links__by_nodes = function(nodes) {
var ids = util.set_from_array(_.pluck(nodes, "id"));
return get_links().filter(function (link) {
return ids[link.__src.id] && ids[link.__dst.id];
});
}
this.find_links__by_nodes = find_links__by_nodes;
var find_links__by_state = function(state) {
var foundLinks = [];
links_forEach(function (link) {
if (link.state == state) {
foundLinks.push(link);
}
});
return foundLinks;
}
var compareNames = function(name1, name2) {
return name1.toLowerCase() === name2.toLowerCase();
};
var hasNodeByName = function(name, state) {
return get_nodes().filter(function (n) {
return compareNames(n.name, name) && (undefined === state || n.state === state);
}).length > 0;
}
this.hasNodeByName = hasNodeByName;
/**
* return node whose id matches the given id or undefined if no node was found
*/
var find_node__by_id = function(id, recursive) {
// default to recursion
recursive = recursive === undefined ? true : recursive;
if (recursive && base) {
var base_node = base.find_node__by_id(id, recursive);
if (base_node) {
return base_node;
}
}
return id_to_node_map[id] || null;
}
this.find_node__by_id = find_node__by_id;
var find_nodes__by_id = function(ids, recursive) {
return _.map(ids, function (id) { return find_node__by_id(id, recursive); });
}
this.find_nodes__by_id = find_nodes__by_id;
/**
* @param filter: must return true in order for node to be included in the returned set
*/
var find_nodes__by_filter = function(filter) {
var ret = [];
nodes.map(function(n){
if (true == filter(n)){
ret.push(n);
}
});
return ret;
}
/**
* @param id unique id of link
* @return Link with given id
*/
var find_link__by_id = function(id, recursive) {
// default to recursion
recursive = recursive === undefined ? true : recursive;
if (recursive && base) {
var base_link = base.find_link__by_id(id, recursive);
if (base_link) {
return base_link;
}
}
return id_to_link_map[id] || null;
}
this.find_link__by_id = find_link__by_id;
/**
* @param id of source node
* @return array of links whose source node is id
* FIXME: none O(E) implementation (used by merge)
*/
var find_link__by_src_id = function(src_id) {
return _.filter(get_links(), function (link) { return link.__src.id == src_id; });
}
this.find_link__by_src_id = find_link__by_src_id;
/**
* @param id of destination node
* @return array of links whose destination node is id
* FIXME: none O(E) implementation (used by merge)
*/
var find_link__by_dst_id = function(dst_id) {
return _.filter(get_links(), function (link) { return link.__dst.id == dst_id; });
}
this.find_link__by_dst_id = find_link__by_dst_id;
var find_node__by_name = function(name, recursive) {
// default to recursion
recursive = recursive === undefined ? true : recursive;
if (recursive && base) {
var node = base.find_node__by_name(name, true);
if (node !== null) {
return node;
}
}
for (var k in id_to_node_map) {
if (compareNames(id_to_node_map[k].name, name)) {
return id_to_node_map[k];
}
}
return null;
}
this.find_node__by_name = find_node__by_name;
var find_nodes__by_state = function(state) {
var foundNodes = [];
nodes_forEach(function (node) {
if (node.state === state) {
foundNodes.push(node);
}
});
return foundNodes;
}
function clear(push_diff) {
push_diff = push_diff === undefined ? true : push_diff;
id_to_node_map = {};
id_to_link_map = {};
id_to_link_id_set = {};
invalidate_links = true;
invalidate_nodes = true;
if (push_diff) {
diffBus.push({}); // FIXME: better value
}
}
this.clear = clear;
function empty() {
// FIXME: O(|nodes|+|links|)
return get_nodes().length == 0 && get_links().length == 0;
}
this.empty = empty;
// @ajax-trans
this.commit_diff_set = function (diff_set) {
function on_success(data){
console.log('commit_diff_set:on_success: TODO impl');
}
rz_api_mesh.broadcast_possible_next_diff_block(diff_set);
}
function on_backend__node_add(n_spec) {
n_spec = model_util.adapt_format_read_node(n_spec);
util.assert(undefined != n_spec.id, 'load_from_backend: n_spec missing id');
return n_spec;
}
function on_backend__link_add(l_spec) {
var src_id = l_spec.__src_id,
dst_id = l_spec.__dst_id,
l_ptr = model_util.adapt_format_read_link_ptr(l_spec);
util.assert(undefined !== l_ptr.id, 'load_from_backend: l_ptr missing id');
util.assert(undefined !== src_id, 'load_from_backend: link missing __src_id');
util.assert(undefined !== dst_id, 'load_from_backend: link missing __dst_id');
// cleanup & reuse as link_spec
delete l_ptr.__src_id;
delete l_ptr.__dst_id;
var link_spec = l_ptr;
link_spec.__src = find_node__by_id(src_id);
link_spec.__dst = find_node__by_id(dst_id);
if (null === link_spec.__src) {
util.log_error("src_id not found: " + src_id);
return null;
}
if (null === link_spec.__dst) {
util.log_error("dst_id not found: " + dst_id);
return null;
}
return link_spec;
}
function __commit_diff_ajax__clone(clone) {
var topo = clone[0],
commits = clone[1].reverse(), // [!] received in new to old order, need them reversed
node_specs = topo.node_set_add.map(on_backend__node_add),
nodes = _add_node_set(node_specs),
link_specs = topo.link_set_add.map(on_backend__link_add).filter(function (link_spec) {
return link_spec !== null;
}),
links = _add_link_set(link_specs);
commits.forEach(function (commit) {
activityBus.push(commit);
});
diffBus.push({node_set_add: nodes, link_set_add: links});
}
function __commit_diff_ajax__topo(diff) {
diff.node_set_add = diff.node_id_set_add.map(_get_server_pending);
diff.link_set_add = diff.link_id_set_add.map(_get_server_pending);
commit_diff__topo(diff);
}
function _get_server_pending(id) {
// FIXME: should track cache
var spec;
util.assert(undefined !== server_pending_objects[id]);
spec = server_pending_objects[id];
delete server_pending_objects[id];
return spec;
}
function _add_node_set(node_specs) {
return node_specs.map(function (node_spec) {
__addNode(node_spec);
});
}
function _add_link_set(link_specs) {
return link_specs.map(function (link_spec) {
// resolve link ptr
var src = (undefined !== link_spec.__src && null !== link_spec.__src &&
(find_node__by_id(link_spec.__src.id) || link_spec.__src)) ||
find_node__by_id(link_spec.__src_id),
dst = (undefined !== link_spec.__dst && null !== link_spec.__dst &&
(find_node__by_id(link_spec.__dst.id) || link_spec.__dst)) ||
find_node__by_id(link_spec.__dst_id),
link = model_core.create_link_from_spec(src, dst, link_spec);
__addLink(link);
});
}
/*
* Inputs are specs, not raw - after adaptation from the on wire format.
*
* FIXME: use a different object? different properties in the same object?
*/
function commit_diff__topo(diff) {
_add_node_set(diff.node_set_add);
_add_link_set(diff.link_set_add);
// done under protest
_remove_link_set(diff.link_id_set_rm);
_remove_node_set(diff.node_id_set_rm);
diffBus.push(diff);
}
this.commit_diff__topo = commit_diff__topo;
/**
* Apply a Attr_Diff:
* - commit diff to the local graph instanse
* - emit a diffBus event
*
* This function should not trigger remote transmission of diff object
*/
function commit_diff__attr(attr_diff) {
util.assert(model_diff.is_attr_diff(attr_diff), 'commit_diff__attr: argument type != Attr_Diff');
// process nodes
attr_diff.for_each_node(function(n_id, n_attr_diff) {
var node = id_to_node_map[n_id];
if (undefined == node) {
console.warn('commit_diff__attr: incoming attr diff for non-existing node, discarding');
return;
}
// apply attr writes: node
var count_w = 0;
for (var attr_key in n_attr_diff['__attr_write']) {
var attr_value = n_attr_diff['__attr_write'][attr_key];
node[attr_key] = attr_value; // write each new attr update
count_w = count_w + 1;
};
// apply attr removals: node
var count_d = 0;
for (var attr_key in n_attr_diff['__attr_remove']) {
delete node[attr_key]; // apply each attr removal
count_d = count_d + 1;
};
console.log('commit_diff__attr: n_id: \'' + n_id + '\', write-count: ' + count_w + ', rm-count: ' + count_d);
});
// process links
attr_diff.for_each_link(function(l_id, n_attr_diff) {
var link = id_to_link_map[l_id];
if (undefined == link) {
console.warn('commit_diff__attr: incoming attr diff for non-existing link, discarding');
return;
}
// apply attr writes: link
var count_w = 0;
for (var attr_key in n_attr_diff['__attr_write']) {
var attr_value = n_attr_diff['__attr_write'][attr_key];
link[attr_key] = attr_value; // write each new attr update
count_w = count_w + 1;
};
// apply attr removals: link
var count_d = 0;
for (var attr_key in n_attr_diff['__attr_remove']) {
delete link[attr_key]; // apply each attr removal
count_d = count_d + 1;
};
console.log('commit_diff__attr: l_id: \'' + l_id + '\', write-count: ' + count_w + ', rm-count: ' + count_d);
});
diffBus.push(attr_diff);
}
this.commit_diff__attr = commit_diff__attr;
/**
* perform initial DB load from backend
*
* @param on_success: should be used by MVP presentors to trigger UI update
*/
// @ajax-trans
function load_from_backend(on_success, on_error) {
function on_success_wrapper(clone) {
__commit_diff_ajax__clone(clone);
undefined != on_success && on_success();
}
rz_api_backend.rzdoc_clone(on_success_wrapper, on_error);
}
this.load_from_backend = load_from_backend;
var new_topo_diff__from_nodes_links = function (nodes, links) {
var diff,
node_by_id = {},
old_id_to_new_id = {};
diff = model_diff.new_topo_diff();
diff.node_set_add = nodes.map(function(ext_spec) {
var node_spec = {
name: ext_spec.name ? ext_spec.name : ext_spec.id,
type: ext_spec.type,
x: ext_spec.x,
y: ext_spec.y,
},
node;
all_attributes.forEach(function (attr) {
node_spec[attr] = ext_spec[attr];
});
if (ext_spec.start) {
node_spec.start = new Date(ext_spec.start);
}
if (ext_spec.end) {
node_spec.end = new Date(ext_spec.end);
}
node = model_core.create_node__set_random_id(node_spec);
old_id_to_new_id[ext_spec.id] = node.id,
node_by_id[node.id] = node;
return node;
});
diff.link_set_add = links.map(function (link_spec) {
var src = node_by_id[old_id_to_new_id[link_spec.__src]],
dst = node_by_id[old_id_to_new_id[link_spec.__dst]],
link = model_core.create_link__set_random_id(src, dst, {
name: link_spec.name,
state: 'perm', // FIXME: this is meaningless now with graph separation
});
link.__src_id = src.id;
link.__dst_id = dst.id;
return link;
});
return diff;
}
this.new_topo_diff__from_nodes_links = new_topo_diff__from_nodes_links;
this.load_from_json = function(json) {
var data = JSON.parse(json);
if (data == null) {
console.log('load callback: no data to load');
return;
}
// FIXME: prompt for replace/merge; now defaulting to merge
commit_and_tx_diff__topo(new_topo_diff__from_nodes_links(data.nodes, data.links));
}
this.save_to_json = function() {
var d = {"nodes":[], "links":[]},
nodes = get_nodes(),
links = get_links();
for (var i = 0 ; i < nodes.length ; i++) {
var node = nodes[i],
node_dict = {id: node.id, x: node.x, y: node.y};
all_attributes.forEach(function (attr) {
node_dict[attr] = node[attr];
});
d['nodes'].push(node_dict);
}
for (var j = 0 ; j < links.length ; j++) {
var link = links[j];
d['links'].push({
"__src":link.__src.id,
"__dst":link.__dst.id,
"name":link.name
});
}
return JSON.stringify(d);
}
this.set_user = function(user) {
var elem = $('svg g.zoom')[0];
this.user = user;
this.history = new history.History(this.user, this, elem);
}
function clear_history() {
if (this.history !== undefined) {
this.history.clear();
}
}
this.clear_history = clear_history;
var get_nodes = function() {
if (cached_nodes === undefined || invalidate_nodes) {
cached_nodes = _.filter(id_to_node_map, function (node, node_id) {
return filtered_types[node.type] === undefined;
});
invalidate_nodes = false;
}
return cached_nodes;
};
this.nodes = get_nodes;
var get_node_ids = function() {
return _.keys(id_to_node_map);
}
this.get_node_ids = get_node_ids;
var get_links = function() {
if (cached_links === undefined || invalidate_links) {
cached_links = _.filter(id_to_link_map, function (link, link_id) {
return filtered_types[link.__src.type] === undefined &&
filtered_types[link.__dst.type] === undefined;
});
invalidate_links = false;
}
return cached_links;
};
this.links = get_links;
this.find__by_visitors = function(node_visitor, link_visitor) {
var nodes = get_nodes(),
links = get_links(),
selected_nodes,
selected_links;
if (!node_visitor && !link_visitor) {
return;
}
selected_nodes = node_visitor ? nodes.filter(node_visitor) : [];
selected_links = link_visitor ? links.filter(link_visitor) : [];
return {nodes: selected_nodes, links: selected_links};
}
function markRelated(names) {
removeRelated();
nodes_forEach(function (node) {
names.forEach(function (name) {
if (compareNames(node.name, name)) {
node.state = 'related';
}
});
});
}
this.markRelated = markRelated;
function removeRelated() {
// FIXME: related should use separate variable, not overload 'state' (bad bad bad)
nodes_forEach(function (node) {
if (node.state === 'related') {
node.state = 'perm';
}
});
}
this.removeRelated = removeRelated;
this.node__set_filtered_types = function (new_filtered_types) {
filtered_types = new_filtered_types;
invalidate_links = true;
invalidate_nodes = true;
}
}
function is_node(item)
{
return item.__src === undefined;
}
function is_link(item)
{
return item.__src !== undefined;
}
return {
Graph: Graph,
is_node: is_node,
is_link: is_link,
};
});
| graph.js: commit_and_tx_diff__topo: remove on_error which duplicates default on_error behavior
| src/client/model/graph.js | graph.js: commit_and_tx_diff__topo: remove on_error which duplicates default on_error behavior | <ide><path>rc/client/model/graph.js
<ide> return !hasNodeByName(n.name);
<ide> });
<ide>
<del> var graph_on_error = function(error) {
<del> console.log('error:');
<del> console.dir(error);
<del> }
<del> rz_api_backend.commit_diff__topo(topo_diff, __commit_diff_ajax__topo, graph_on_error);
<add> rz_api_backend.commit_diff__topo(topo_diff, __commit_diff_ajax__topo);
<ide> }
<ide> this.commit_and_tx_diff__topo = commit_and_tx_diff__topo;
<ide> |
|
JavaScript | mit | 07f4accb62ddbe2d5b129d378ac6044e301bc156 | 0 | JasonMFry/calculator,JasonMFry/calculator | {
let tape = [];
// TODO make AC functional, don't disable AC here, instruct user to press AC in validateNumber()
const disableButtons = () => {
const buttons = document.getElementsByTagName('button');
for (const button of buttons) {
button.disabled = true;
}
};
const validationHelpers = {
errors: ['Error', 'Undefined'],
validateNumber: (number) => {
if (number.toPrecision().length > 10) {
disableButtons();
tape = ['='];
return validationHelpers.errors[0];
} else if (number === Infinity) {
disableButtons();
tape = ['='];
return validationHelpers.errors[1];
}
return number;
},
// on page load, dataset.processed is null. When user first presses a digits
// button, responsePane will show the digit that was pressed, and then
// dataset.processed becomes false. When the user presses an operations
// button, dataset.processed becomes true so that the next digits button
// press will display the digit instead of appending the digit to the
// display.
validateDigitsInput: (responsePane, elemText) => {
let responsePaneText = responsePane.innerText;
if (responsePane.dataset.processed === 'false') {
responsePaneText += elemText;
return responsePaneText;
}
return elemText;
},
validateOperationsInput: (responsePane) => {
const operators = ['+', '-', '×', '÷', '='];
if (responsePane.dataset.processed !== 'true') {
tape.push(Number(responsePane.innerText));
}
// if user inputs 2 operators in a row, overwrite the first one
if (operators.includes(tape[tape.length - 1])) {
tape.pop();
}
},
validateOutput: (responsePaneText) => {
if (validationHelpers.errors.includes(tape[0])) {
return tape[0];
}
// reset tape after user presses '='
if (tape.length === 1) {
return tape.pop();
}
// always display the last number in the tape to the user
for (let i = tape.length; i >= 0; i -= 1) {
if (typeof tape[i] === 'number') {
return tape[i];
}
}
return responsePaneText;
},
};
const math = {
'+': (x, y) => {
const sum = x + y;
return validationHelpers.validateNumber(sum);
},
'-': (x, y) => {
const sum = x - y;
return validationHelpers.validateNumber(sum);
},
'÷': (x, y) => {
const product = x / y;
return validationHelpers.validateNumber(product);
},
'×': (x, y) => {
const product = x * y;
return validationHelpers.validateNumber(product);
},
doMath: () => {
const numbersToEvaluate = tape.splice(-4, 3);
const x = numbersToEvaluate[0];
const y = numbersToEvaluate[2];
const operator = numbersToEvaluate[1];
const answer = math[operator](x, y);
tape.splice(-1, 0, answer);
},
evaluateTape: () => {
const addition = ['+', '-'];
const multiplication = ['×', '÷'];
const firstOperator = tape[1];
const secondOperator = tape[3];
const thirdOperator = tape[5];
// N is any number, m is division or multiplication, and a is addition or
// subtraction. There are a total of 5 possible scenarios, as listed below.
// This accounts for scenarios [N, m, N, m], [N, m, N, a], and [N, a, N, a]
if (multiplication.includes(firstOperator) || addition.includes(secondOperator)) {
math.doMath();
}
if (thirdOperator) {
// This accounts for scenario [N, a, N, m, N, m]
if (multiplication.includes(thirdOperator)) {
math.doMath();
}
// This accounts for scenario [N, a, N, m, N, a]
if (addition.includes(thirdOperator)) {
math.doMath();
math.doMath();
}
}
},
};
const addDigitsListener = (digitButtons, responsePane) => {
digitButtons.addEventListener('click', (e) => {
const elemText = e.target.innerText;
responsePane.innerText = validationHelpers.validateDigitsInput(responsePane, elemText);
responsePane.dataset.processed = false;
});
};
const addOperationsListener = (operationButtons, responsePane) => {
operationButtons.addEventListener('click', (e) => {
validationHelpers.validateOperationsInput(responsePane);
responsePane.dataset.processed = true;
tape.push(e.target.innerText);
if (tape.length === 4 || tape.length === 6) {
math.evaluateTape();
}
if (tape[tape.length - 1] === '=') {
while (tape.length > 2) {
math.doMath();
}
tape.pop();
responsePane.removeAttribute('data-processed');
}
responsePane.innerText = validationHelpers.validateOutput(responsePane.innerText);
});
};
const addEventsToButtons = () => {
const responsePane = document.getElementById('response-pane');
const operationButtons = document.getElementById('operation-buttons');
const digitButtons = document.getElementById('digit-buttons');
addDigitsListener(digitButtons, responsePane);
addOperationsListener(operationButtons, responsePane);
};
document.addEventListener('DOMContentLoaded', addEventsToButtons());
}
| index.js | {
const tape = [];
// TODO make AC functional, don't disable AC here, instruct user to press AC in validateNumber()
const disableButtons = () => {
const buttons = document.getElementsByTagName('button');
for (const button of buttons) {
button.disabled = true;
}
};
const validationHelpers = {
errors: ['Error', 'Undefined'],
validateNumber: (number) => {
if (number.toPrecision().length > 10) {
disableButtons();
return validationHelpers.errors[0];
} else if (number === Infinity) {
disableButtons();
return validationHelpers.errors[1];
}
return number;
},
// on page load, dataset.processed is null. When user first presses a digits
// button, responsePane will show the digit that was pressed, and then
// dataset.processed becomes false. When the user presses an operations
// button, dataset.processed becomes true so that the next digits button
// press will display the digit instead of appending the digit to the
// display.
validateDigitsInput: (responsePane, elemText) => {
let responsePaneText = responsePane.innerText;
if (responsePane.dataset.processed === 'false') {
responsePaneText += elemText;
return responsePaneText;
}
return elemText;
},
validateOperationsInput: (responsePane) => {
const operators = ['+', '-', '×', '÷', '='];
if (responsePane.dataset.processed !== 'true') {
tape.push(Number(responsePane.innerText));
}
// if user inputs 2 operators in a row, overwrite the first one
if (operators.includes(tape[tape.length - 1])) {
tape.pop();
}
},
validateOutput: (responsePaneText) => {
if (validationHelpers.errors.includes(tape[0])) {
return tape[0];
}
// reset tape after user presses '='
if (tape.length === 1) {
return tape.pop();
}
// always display the last number in the tape to the user
for (let i = tape.length; i >= 0; i -= 1) {
if (typeof tape[i] === 'number') {
return tape[i];
}
}
return responsePaneText;
},
};
const math = {
'+': (x, y) => {
const sum = x + y;
return validationHelpers.validateNumber(sum);
},
'-': (x, y) => {
const sum = x - y;
return validationHelpers.validateNumber(sum);
},
'÷': (x, y) => {
const product = x / y;
return validationHelpers.validateNumber(product);
},
'×': (x, y) => {
const product = x * y;
return validationHelpers.validateNumber(product);
},
doMath: () => {
const numbersToEvaluate = tape.splice(-4, 3);
const x = numbersToEvaluate[0];
const y = numbersToEvaluate[2];
const operator = numbersToEvaluate[1];
const answer = math[operator](x, y);
tape.splice(-1, 0, answer);
},
evaluateTape: () => {
const addition = ['+', '-'];
const multiplication = ['×', '÷'];
const firstOperator = tape[1];
const secondOperator = tape[3];
const thirdOperator = tape[5];
// N is any number, m is division or multiplication, and a is addition or
// subtraction. There are a total of 5 possible scenarios, as listed below.
// This accounts for scenarios [N, m, N, m], [N, m, N, a], and [N, a, N, a]
if (multiplication.includes(firstOperator) || addition.includes(secondOperator)) {
math.doMath();
}
if (thirdOperator) {
// This accounts for scenario [N, a, N, m, N, m]
if (multiplication.includes(thirdOperator)) {
math.doMath();
}
// This accounts for scenario [N, a, N, m, N, a]
if (addition.includes(thirdOperator)) {
math.doMath();
math.doMath();
}
}
},
};
const addDigitsListener = (digitButtons, responsePane) => {
digitButtons.addEventListener('click', (e) => {
const elemText = e.target.innerText;
responsePane.innerText = validationHelpers.validateDigitsInput(responsePane, elemText);
responsePane.dataset.processed = false;
});
};
const addOperationsListener = (operationButtons, responsePane) => {
operationButtons.addEventListener('click', (e) => {
validationHelpers.validateOperationsInput(responsePane);
responsePane.dataset.processed = true;
tape.push(e.target.innerText);
if (tape.length === 4 || tape.length === 6) {
math.evaluateTape();
}
if (tape[tape.length - 1] === '=') {
while (tape.length > 2) {
math.doMath();
}
tape.pop();
responsePane.removeAttribute('data-processed');
}
responsePane.innerText = validationHelpers.validateOutput(responsePane.innerText);
});
};
const addEventsToButtons = () => {
const responsePane = document.getElementById('response-pane');
const operationButtons = document.getElementById('operation-buttons');
const digitButtons = document.getElementById('digit-buttons');
addDigitsListener(digitButtons, responsePane);
addOperationsListener(operationButtons, responsePane);
};
document.addEventListener('DOMContentLoaded', addEventsToButtons());
}
| Fix issue where an irrational number would freeze the calculator
| index.js | Fix issue where an irrational number would freeze the calculator | <ide><path>ndex.js
<ide> {
<del> const tape = [];
<add> let tape = [];
<ide>
<ide> // TODO make AC functional, don't disable AC here, instruct user to press AC in validateNumber()
<ide> const disableButtons = () => {
<ide> validateNumber: (number) => {
<ide> if (number.toPrecision().length > 10) {
<ide> disableButtons();
<add> tape = ['='];
<ide> return validationHelpers.errors[0];
<ide> } else if (number === Infinity) {
<ide> disableButtons();
<add> tape = ['='];
<ide> return validationHelpers.errors[1];
<ide> }
<ide> return number; |
|
Java | agpl-3.0 | f0f97d2c336fd1542f8ed5fe219fab01fdbb53f0 | 0 | DJmaxZPL4Y/Photon-Server | /*
* Copyright (c) 2016 MCPhoton <http://mcphoton.org> and contributors.
*
* This file is part of the Photon API <https://github.com/mcphoton/Photon-API>.
*
* The Photon API is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* The Photon API is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.mcphoton.item;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Objects;
import org.mcphoton.Photon;
/**
* A stack of items.
*
* @author TheElectronWill
*/
public class ItemStack {
protected ItemType type;
protected int maxSize, size, damage;
public ItemStack(ItemType type, int maxSize, int size, int damage) {
this.type = type;
this.maxSize = maxSize;
this.size = size;
this.damage = damage;
}
public ItemStack(ItemType type) {
this(type, 64, 0, 0);
}
/**
* @return true if the stack is empty.
*/
boolean isEmpty() {
return size == 0;
}
/**
* @return true if the stack is full, ie if its size is equal to its max size.
*/
boolean isFull() {
return size == maxSize;
}
/**
* @return the stack's size.
*/
int getSize() {
return size;
}
/**
* Adds the given value to the current size value.
*
* @param delta the value to add
*/
void addSize(int delta) {
size += delta;
}
/**
* Sets the stack's size.
*/
void setSize(int size) {
this.size = size;
}
/**
* @return the max stack's size.
*/
int getMaxSize() {
return maxSize;
}
/**
* @return the item's type.
*/
ItemType getType() {
return type;
}
/**
* @return the stack's damage.
*/
int getDamage() {
return damage;
}
/**
* Adds the given value to the current damage value.
*
* @param delta the value to add
*/
void addDamage(int delta) {
this.damage += delta;
}
/**
* Sets the stack's damage.
*/
void setDamage(int damage) {
this.damage = damage;
}
public static ItemStack readFrom(ByteBuffer buff) throws IOException {
int typeId = buff.getShort();
if (typeId == -1) {
return new ItemStack(Photon.getGameRegistry().getRegisteredItem(0));
} else {
int size = buff.get();
int damage = buff.getShort();
//TODO read NBT data like enchantments
return new ItemStack(Photon.getGameRegistry().getRegisteredItem(typeId), 64, size,
damage);
}
}
@Override
public String toString() {
return "ItemStack{"
+ "type="
+ type
+ ", maxSize="
+ maxSize
+ ", size="
+ size
+ ", damage="
+ damage
+ '}';
}
@Override
public int hashCode() {
int hash = 3;
hash = 47 * hash + Objects.hashCode(this.type);
hash = 47 * hash + this.maxSize;
hash = 47 * hash + this.size;
hash = 47 * hash + this.damage;
return hash;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj instanceof ItemStack) {
ItemStack other = (ItemStack)obj;
return size == other.size && type.equals(other.type) && maxSize == other.maxSize;
}
return false;
}
} | api/src/main/java/org/mcphoton/item/ItemStack.java | /*
* Copyright (c) 2016 MCPhoton <http://mcphoton.org> and contributors.
*
* This file is part of the Photon API <https://github.com/mcphoton/Photon-API>.
*
* The Photon API is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* The Photon API is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.mcphoton.item;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Objects;
import org.mcphoton.Photon;
/**
* A stack of items.
*
* @author TheElectronWill
*/
public class ItemStack {
protected ItemType type;
protected int maxSize, size, damage;
public ItemStack(ItemType type, int maxSize, int size, int damage) {
this.type = type;
this.maxSize = maxSize;
this.size = size;
this.damage = damage;
}
public ItemStack(ItemType type) {
this(type, 64, 0, 0);
}
/**
* @return true if the stack is empty.
*/
boolean isEmpty() {
return size == 0;
}
/**
* @return true if the stack is full, ie if its size is equal to its max size.
*/
boolean isFull() {
return size == maxSize;
}
/**
* @return the stack's size.
*/
int getSize() {
return size;
}
/**
* Adds the given value to the current size value.
*
* @param delta the value to add
*/
void addSize(int delta) {
size += delta;
}
/**
* Sets the stack's size.
*/
void setSize(int size) {
this.size = size;
}
/**
* @return the max stack's size.
*/
int getMaxSize() {
return maxSize;
}
/**
* @return the item's type.
*/
ItemType getType() {
return type;
}
/**
* @return the stack's damage.
*/
int getDamage() {
return damage;
}
/**
* Adds the given value to the current damage value.
*
* @param delta the value to add
*/
void addDamage(int delta) {
this.damage += delta;
}
/**
* Sets the stack's damage.
*/
void setDamage(int damage) {
this.damage = damage;
}
public static ItemStack readFrom(ByteBuffer buff) throws IOException {
int typeId = buff.getShort();
if (typeId == -1) {
return new ItemStack(Photon.getGameRegistry().getRegisteredItem(0));
} else {
int size = buff.get();
int damage = buff.getShort();
//TODO read NBT data like enchantments
return new ItemStack(Photon.getGameRegistry().getRegisteredItem(typeId), 64, size, damage);
}
}
@Override
public String toString() {
return "ItemStack{" + "type=" + type + ", maxSize=" + maxSize + ", size=" + size + ", damage=" + damage + '}';
}
@Override
public int hashCode() {
int hash = 3;
hash = 47 * hash + Objects.hashCode(this.type);
hash = 47 * hash + this.maxSize;
hash = 47 * hash + this.size;
hash = 47 * hash + this.damage;
return hash;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj instanceof ItemStack) {
ItemStack other = (ItemStack) obj;
return size == other.size && type.equals(other.type) && maxSize == other.maxSize;
}
return false;
}
}
| Format ItemStack
| api/src/main/java/org/mcphoton/item/ItemStack.java | Format ItemStack | <ide><path>pi/src/main/java/org/mcphoton/item/ItemStack.java
<ide> * @author TheElectronWill
<ide> */
<ide> public class ItemStack {
<del>
<ide> protected ItemType type;
<ide> protected int maxSize, size, damage;
<ide>
<ide> int size = buff.get();
<ide> int damage = buff.getShort();
<ide> //TODO read NBT data like enchantments
<del> return new ItemStack(Photon.getGameRegistry().getRegisteredItem(typeId), 64, size, damage);
<add> return new ItemStack(Photon.getGameRegistry().getRegisteredItem(typeId), 64, size,
<add> damage);
<ide> }
<ide> }
<ide>
<ide> @Override
<ide> public String toString() {
<del> return "ItemStack{" + "type=" + type + ", maxSize=" + maxSize + ", size=" + size + ", damage=" + damage + '}';
<add> return "ItemStack{"
<add> + "type="
<add> + type
<add> + ", maxSize="
<add> + maxSize
<add> + ", size="
<add> + size
<add> + ", damage="
<add> + damage
<add> + '}';
<ide> }
<ide>
<ide> @Override
<ide> return true;
<ide> }
<ide> if (obj instanceof ItemStack) {
<del> ItemStack other = (ItemStack) obj;
<add> ItemStack other = (ItemStack)obj;
<ide> return size == other.size && type.equals(other.type) && maxSize == other.maxSize;
<ide> }
<ide> return false;
<ide> }
<del>
<ide> } |
|
Java | apache-2.0 | 4cde02ea26ed6b1cdcb7d97db9f143d1eb6ba2b0 | 0 | 0xf1f0/QuakeReport | package com.example.android.quakereport;
import android.content.AsyncTaskLoader;
import android.content.Context;
import android.util.Log;
import java.util.List;
/**
* Created by Lenovo on 7/6/2017.
*/
/*
Loads a list of earthquakes by using an AsyncTaskLoader to perform the
network request to the destination(USGS) url.
*/
public class EarthquakeLoader extends AsyncTaskLoader<List<Earthquake>>
{
/* Tag for log messages */
private final static String LOG_TAG = EarthquakeLoader.class.getSimpleName();
/* Query URL */
private String mUrl;
//TODO: Extract the earthquake data from the USGS url
public EarthquakeLoader(Context context, String url)
{
super(context);
this.mUrl = url;
}
@Override
protected void onStartLoading()
{
forceLoad();
}
/*
* Run the earthquake data request in the background
*/
@Override
public List<Earthquake> loadInBackground()
{
List<Earthquake> earthquakes = null;
// Don't perform the request if there are no URLs, or the first URL
if(mUrl == null)
return null;
try
{
/* Perform the HTTP request for earthquake data and process the response. */
earthquakes = QueryUtils.fetchEarthquakeData(mUrl);
}catch (SecurityException e)
{
Log.e(LOG_TAG, e.getMessage(), e);
}
return earthquakes;
}
}
| app/src/main/java/com/example/android/quakereport/EarthquakeLoader.java | package com.example.android.quakereport;
import android.content.AsyncTaskLoader;
import android.content.Context;
import android.util.Log;
import java.util.List;
/**
* Created by Lenovo on 7/6/2017.
*/
public class EarthquakeLoader extends AsyncTaskLoader<List<Earthquake>>
{
final static String LOG_TAG = EarthquakeLoader.class.getSimpleName();
private String mUrl;
//TODO: Extract the earthquake data from the USGS url
public EarthquakeLoader(Context context, String url)
{
super(context);
this.mUrl = url;
}
@Override
protected void onStartLoading()
{
forceLoad();
}
@Override
public List<Earthquake> loadInBackground()
{
List<Earthquake> result = null;
// Don't perform the request if there are no URLs, or the first URL
if(getUrl().length() < 1)
return null;
try
{
// Perform the HTTP request for earthquake data and process the response.
result = QueryUtils.fetchEarthquakeData(getUrl());
}catch (SecurityException e)
{
Log.e(LOG_TAG, e.getMessage(), e);
}
return result;
}
//@return the url of the earthquake data to the displayed
public String getUrl()
{
return mUrl;
}
}
| Replace AsyncTask with AsyncTaskLoader and do some clean up.
| app/src/main/java/com/example/android/quakereport/EarthquakeLoader.java | Replace AsyncTask with AsyncTaskLoader and do some clean up. | <ide><path>pp/src/main/java/com/example/android/quakereport/EarthquakeLoader.java
<ide> * Created by Lenovo on 7/6/2017.
<ide> */
<ide>
<add>/*
<add> Loads a list of earthquakes by using an AsyncTaskLoader to perform the
<add> network request to the destination(USGS) url.
<add> */
<ide> public class EarthquakeLoader extends AsyncTaskLoader<List<Earthquake>>
<ide> {
<del> final static String LOG_TAG = EarthquakeLoader.class.getSimpleName();
<add> /* Tag for log messages */
<add> private final static String LOG_TAG = EarthquakeLoader.class.getSimpleName();
<add>
<add> /* Query URL */
<ide> private String mUrl;
<ide>
<ide> //TODO: Extract the earthquake data from the USGS url
<ide> forceLoad();
<ide> }
<ide>
<add> /*
<add> * Run the earthquake data request in the background
<add> */
<ide> @Override
<ide> public List<Earthquake> loadInBackground()
<ide> {
<del> List<Earthquake> result = null;
<add> List<Earthquake> earthquakes = null;
<ide>
<ide> // Don't perform the request if there are no URLs, or the first URL
<del> if(getUrl().length() < 1)
<add> if(mUrl == null)
<ide> return null;
<ide> try
<ide> {
<del> // Perform the HTTP request for earthquake data and process the response.
<del> result = QueryUtils.fetchEarthquakeData(getUrl());
<add> /* Perform the HTTP request for earthquake data and process the response. */
<add> earthquakes = QueryUtils.fetchEarthquakeData(mUrl);
<ide>
<ide> }catch (SecurityException e)
<ide> {
<ide> Log.e(LOG_TAG, e.getMessage(), e);
<ide> }
<del> return result;
<del> }
<del>
<del> //@return the url of the earthquake data to the displayed
<del> public String getUrl()
<del> {
<del> return mUrl;
<add> return earthquakes;
<ide> }
<ide> } |
|
Java | mit | c67bb07d50e81194d27426f68ec42f916c3342ec | 0 | vtsukur/spring-rest-black-market,vtsukur/spring-rest-black-market,vtsukur/spring-rest-black-market | package org.vtsukur.spring.rest.market.util;
import com.fasterxml.jackson.databind.Module;
import com.fasterxml.jackson.datatype.jsr310.JSR310Module;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.CommandLineRunner;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.data.repository.CrudRepository;
import org.springframework.data.rest.core.config.RepositoryRestConfiguration;
import org.springframework.data.rest.webmvc.config.RepositoryRestMvcConfiguration;
import org.springframework.stereotype.Component;
import org.vtsukur.spring.rest.market.domain.core.ad.Ad;
import org.vtsukur.spring.rest.market.domain.core.user.User;
import org.vtsukur.spring.rest.market.infrastructure.SecurityUtils;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.time.LocalDateTime;
import java.util.Random;
/**
* @author volodymyr.tsukur
*/
@Component
public class ApplicationConfiguration {
private static final Integer[] MOBILE_OPERATOR_CODES = new Integer[] {
39,
50,
63,
66,
67,
68,
93,
95,
96,
97,
98,
99
};
private static final String[] KYIV_DISTRICTS = new String[] {
"Голосеево",
"Дарница",
"Деснянский",
"Днепровский",
"Оболонь",
"Печерск",
"Подол",
"Святошино",
"Соломенский",
"Шевченковский"
};
private static final String[] LVIV_DISTRICTS = new String[] {
"Шевченківський",
"Личаківський",
"Сихівський",
"Франківський",
"Залізничний",
"Личаківський"
};
private static final String[] COMMENTS = new String[] {
"",
"целиком",
"можно частями",
"малыш, я подъеду"
};
public static final int PUBLISHING_TIME_MAX_DIFF = 4;
@Autowired
private CrudRepository<User, Long> userRepository;
@Autowired
private CrudRepository<Ad, Long> adRepository;
private boolean stableUsersOnly;
public void load() {
int amount = 100;
LocalDateTime now = LocalDateTime.now();
final LocalDateTime publishedAt = now.minusMinutes(PUBLISHING_TIME_MAX_DIFF * amount);
SecurityUtils.run("system", "system", new String[]{"ROLE_ADMIN"}, () -> {
setupAdmin(publishedAt.minusMinutes(10));
setupStableUser(publishedAt);
if (!stableUsersOnly) {
LocalDateTime at = publishedAt;
for (int i = 0; i < amount; ++i) {
User user = nextUser();
userRepository.save(user);
Ad ad = nextAd(user, at);
adRepository.save(ad);
at = ad.getPublishedAt();
}
}
});
}
private void setupAdmin(LocalDateTime publishedAt) {
User admin = new User();
admin.setPhoneNumber("hontareva");
userRepository.save(admin);
if (!stableUsersOnly) {
Ad ad = new Ad();
ad.setType(Ad.Type.BUY);
ad.setAmount(BigInteger.valueOf(100000000));
ad.setCurrency(Ad.Currency.USD);
ad.setRate(nextRate(ad.getCurrency(), ad.getType()));
ad.setUser(admin);
ad.setStatus(Ad.Status.PUBLISHED);
ad.setPublishedAt(publishedAt);
ad.setLocation(new Ad.Location("Киев", "Печерск"));
ad.setComment("играем по крупному");
adRepository.save(ad);
}
}
private void setupStableUser(LocalDateTime publishedAt) {
User user = new User();
user.setPhoneNumber("0681854104");
userRepository.save(user);
if (!stableUsersOnly) {
Ad ad = new Ad();
ad.setType(Ad.Type.BUY);
ad.setAmount(BigInteger.valueOf(4000));
ad.setCurrency(Ad.Currency.USD);
ad.setRate(nextRate(ad.getCurrency(), ad.getType()));
ad.setUser(user);
ad.setStatus(Ad.Status.PUBLISHED);
ad.setPublishedAt(publishedAt);
ad.setLocation(new Ad.Location("Киев", "Соломенка"));
ad.setComment("нужна валюта срочно, зарплата \"горит\", могу подъехать!");
adRepository.save(ad);
}
}
private static User nextUser() {
User user = new User();
user.setPhoneNumber(nextPhoneNumber());
return user;
}
private static Ad nextAd(User user, LocalDateTime publishedAt) {
Ad ad = new Ad();
Ad.Type type = nextType();
ad.setType(type);
Ad.Currency currency = nextCurrency();
ad.setCurrency(currency);
ad.setAmount(nextAmount());
ad.setRate(nextRate(currency, type));
ad.setUser(user);
ad.setLocation(new Ad.Location("Киев", nextDistrict()));
ad.setComment(nextComments());
ad.setStatus(Ad.Status.PUBLISHED);
ad.setPublishedAt(nextPublishingTime(publishedAt));
return ad;
}
private static String nextPhoneNumber() {
return String.format("0%d%07d", nextMobileOperatorCode(), nextInt(10000000));
}
private static int nextMobileOperatorCode() {
return nextRandomFromArray(MOBILE_OPERATOR_CODES);
}
private static Ad.Type nextType() {
return nextRandomFromArray(Ad.Type.values());
}
private static BigInteger nextAmount() {
return BigInteger.valueOf(nextInt(100) * 100 + 100);
}
private static Ad.Currency nextCurrency() {
return nextRandomFromArray(Ad.Currency.values());
}
private static BigDecimal nextRate(Ad.Currency currency, Ad.Type type) {
return avgRate(currency, type);
}
private static BigDecimal avgRate(Ad.Currency currency, Ad.Type type) {
return (currency == Ad.Currency.USD ?
BigDecimal.valueOf(type == Ad.Type.BUY ? 21.58 : 22.18) :
BigDecimal.valueOf(type == Ad.Type.BUY ? 24.2 : 24.67)
);
}
private static String nextDistrict() {
return nextRandomFromArray(LVIV_DISTRICTS);
}
private static String nextComments() {
return nextRandomFromArray(COMMENTS);
}
private static LocalDateTime nextPublishingTime(LocalDateTime previous) {
return previous.plusMinutes(nextInt(PUBLISHING_TIME_MAX_DIFF - 1) + 1);
}
private static <T> T nextRandomFromArray(T[] array) {
return array[nextInt(array.length)];
}
private static int nextInt(int bound) {
return new Random().nextInt(bound);
}
public ApplicationConfiguration minimalSet(boolean stableUsersOnly) {
this.stableUsersOnly = stableUsersOnly;
return this;
}
@Bean
CommandLineRunner commandLineRunner(ApplicationConfiguration dataLoader) {
return (o) -> dataLoader.minimalSet(false).load();
}
@Bean
public Module newJSR310Module() {
return new JSR310Module();
}
@Configuration
public static class CustomRepositoryRestMvcConfiguration extends RepositoryRestMvcConfiguration {
@Override
protected void configureRepositoryRestConfiguration(RepositoryRestConfiguration config) {
config.exposeIdsFor(Ad.class);
}
}
}
| src/main/java/org/vtsukur/spring/rest/market/util/ApplicationConfiguration.java | package org.vtsukur.spring.rest.market.util;
import com.fasterxml.jackson.databind.Module;
import com.fasterxml.jackson.datatype.jsr310.JSR310Module;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.CommandLineRunner;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.data.repository.CrudRepository;
import org.springframework.data.rest.core.config.RepositoryRestConfiguration;
import org.springframework.data.rest.webmvc.config.RepositoryRestMvcConfiguration;
import org.springframework.stereotype.Component;
import org.vtsukur.spring.rest.market.domain.core.ad.Ad;
import org.vtsukur.spring.rest.market.domain.core.user.User;
import org.vtsukur.spring.rest.market.infrastructure.SecurityUtils;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.time.LocalDateTime;
import java.util.Random;
/**
* @author volodymyr.tsukur
*/
@Component
public class ApplicationConfiguration {
private static final Integer[] MOBILE_OPERATOR_CODES = new Integer[] {
39,
50,
63,
66,
67,
68,
93,
95,
96,
97,
98,
99
};
private static final String[] KYIV_DISTRICTS = new String[] {
"Голосеево",
"Дарница",
"Деснянский",
"Днепровский",
"Оболонь",
"Печерск",
"Подол",
"Святошино",
"Соломенский",
"Шевченковский"
};
private static final String[] LVIV_DISTRICTS = new String[] {
"Шевченківський",
"Личаківський",
"Сихівський",
"Франківський",
"Залізничний",
"Личаківський"
};
private static final String[] COMMENTS = new String[] {
"",
"целиком",
"можно частями",
"малыш, я подъеду"
};
public static final int PUBLISHING_TIME_MAX_DIFF = 4;
@Autowired
private CrudRepository<User, Long> userRepository;
@Autowired
private CrudRepository<Ad, Long> adRepository;
private boolean stableUsersOnly;
public void load() {
int amount = 100;
LocalDateTime now = LocalDateTime.now();
final LocalDateTime publishedAt = now.minusMinutes(PUBLISHING_TIME_MAX_DIFF * amount);
SecurityUtils.run("system", "system", new String[]{"ROLE_ADMIN"}, () -> {
setupAdmin(publishedAt.minusMinutes(10));
setupStableUser(publishedAt);
if (!stableUsersOnly) {
LocalDateTime at = publishedAt;
for (int i = 0; i < amount; ++i) {
User user = nextUser();
userRepository.save(user);
Ad ad = nextAd(user, at);
adRepository.save(ad);
at = ad.getPublishedAt();
}
}
});
}
private void setupAdmin(LocalDateTime publishedAt) {
User admin = new User();
admin.setPhoneNumber("hontareva");
userRepository.save(admin);
if (!stableUsersOnly) {
Ad ad = new Ad();
ad.setType(Ad.Type.BUY);
ad.setAmount(BigInteger.valueOf(100000000));
ad.setCurrency(Ad.Currency.USD);
ad.setRate(nextRate(ad.getCurrency(), ad.getType()));
ad.setUser(admin);
ad.setStatus(Ad.Status.PUBLISHED);
ad.setPublishedAt(publishedAt);
ad.setLocation(new Ad.Location("Киев", "Печерск"));
ad.setComment("играем по крупному");
adRepository.save(ad);
}
}
private void setupStableUser(LocalDateTime publishedAt) {
User user = new User();
user.setPhoneNumber("0681854104");
userRepository.save(user);
if (!stableUsersOnly) {
Ad ad = new Ad();
ad.setType(Ad.Type.BUY);
ad.setAmount(BigInteger.valueOf(4000));
ad.setCurrency(Ad.Currency.USD);
ad.setRate(nextRate(ad.getCurrency(), ad.getType()));
ad.setUser(user);
ad.setStatus(Ad.Status.PUBLISHED);
ad.setPublishedAt(publishedAt);
ad.setLocation(new Ad.Location("Киев", "Соломенка"));
ad.setComment("нужна валюта срочно, зарплата \"горит\", могу подъехать!");
adRepository.save(ad);
}
}
private static User nextUser() {
User user = new User();
user.setPhoneNumber(nextPhoneNumber());
return user;
}
private static Ad nextAd(User user, LocalDateTime publishedAt) {
Ad ad = new Ad();
Ad.Type type = nextType();
ad.setType(type);
Ad.Currency currency = nextCurrency();
ad.setCurrency(currency);
ad.setAmount(nextAmount());
ad.setRate(nextRate(currency, type));
ad.setUser(user);
ad.setLocation(new Ad.Location("Киев", nextDistrict()));
ad.setComment(nextComments());
ad.setStatus(Ad.Status.PUBLISHED);
ad.setPublishedAt(nextPublishingTime(publishedAt));
return ad;
}
private static String nextPhoneNumber() {
return String.format("0%d%07d", nextMobileOperatorCode(), nextInt(10000000));
}
private static int nextMobileOperatorCode() {
return nextRandomFromArray(MOBILE_OPERATOR_CODES);
}
private static Ad.Type nextType() {
return nextRandomFromArray(Ad.Type.values());
}
private static BigInteger nextAmount() {
return BigInteger.valueOf(nextInt(100) * 100 + 100);
}
private static Ad.Currency nextCurrency() {
return nextRandomFromArray(Ad.Currency.values());
}
private static BigDecimal nextRate(Ad.Currency currency, Ad.Type type) {
return avgRate(currency, type);
}
private static BigDecimal avgRate(Ad.Currency currency, Ad.Type type) {
return (currency == Ad.Currency.USD ?
BigDecimal.valueOf(type == Ad.Type.BUY ? 21.81 : 22) :
BigDecimal.valueOf(type == Ad.Type.BUY ? 24.24 : 24.44)
);
}
private static String nextDistrict() {
return nextRandomFromArray(LVIV_DISTRICTS);
}
private static String nextComments() {
return nextRandomFromArray(COMMENTS);
}
private static LocalDateTime nextPublishingTime(LocalDateTime previous) {
return previous.plusMinutes(nextInt(PUBLISHING_TIME_MAX_DIFF - 1) + 1);
}
private static <T> T nextRandomFromArray(T[] array) {
return array[nextInt(array.length)];
}
private static int nextInt(int bound) {
return new Random().nextInt(bound);
}
public ApplicationConfiguration minimalSet(boolean stableUsersOnly) {
this.stableUsersOnly = stableUsersOnly;
return this;
}
@Bean
CommandLineRunner commandLineRunner(ApplicationConfiguration dataLoader) {
return (o) -> dataLoader.minimalSet(false).load();
}
@Bean
public Module newJSR310Module() {
return new JSR310Module();
}
@Configuration
public static class CustomRepositoryRestMvcConfiguration extends RepositoryRestMvcConfiguration {
@Override
protected void configureRepositoryRestConfiguration(RepositoryRestConfiguration config) {
config.exposeIdsFor(Ad.class);
}
}
}
| Actualizing rates.
| src/main/java/org/vtsukur/spring/rest/market/util/ApplicationConfiguration.java | Actualizing rates. | <ide><path>rc/main/java/org/vtsukur/spring/rest/market/util/ApplicationConfiguration.java
<ide>
<ide> private static BigDecimal avgRate(Ad.Currency currency, Ad.Type type) {
<ide> return (currency == Ad.Currency.USD ?
<del> BigDecimal.valueOf(type == Ad.Type.BUY ? 21.81 : 22) :
<del> BigDecimal.valueOf(type == Ad.Type.BUY ? 24.24 : 24.44)
<add> BigDecimal.valueOf(type == Ad.Type.BUY ? 21.58 : 22.18) :
<add> BigDecimal.valueOf(type == Ad.Type.BUY ? 24.2 : 24.67)
<ide> );
<ide> }
<ide> |
|
Java | apache-2.0 | 14eff02838fdfc971dbebdbf235b0f1ab6c47931 | 0 | lgathy/ddd-toolbox | package com.doctusoft.ddd.idsequence;
import com.doctusoft.ddd.model.EntityKey;
import com.doctusoft.ddd.persistence.GenericPersistence;
import lombok.RequiredArgsConstructor;
import javax.inject.Inject;
import java.math.BigInteger;
import static java.util.Objects.*;
@RequiredArgsConstructor(onConstructor = @__(@Inject))
public class IdSequenceManagerImpl implements IdSequenceManager {
private final GenericPersistence persistence;
public <T extends IdSequence> IdAllocationRange allocate(EntityKey<T> key, long count) {
requireNonNull(key);
if (count <= 0) {
throw new IllegalArgumentException("count=" + count + " for key: " + key);
}
IdSequence idSequence = persistence.requireForUpdate(key);
BigInteger last = BigInteger.valueOf(idSequence.getLastValue());
long lowerBound = last.add(BigInteger.ONE).longValueExact();
long upperBound = last.add(BigInteger.valueOf(count)).longValueExact();
long maxValue = idSequence.getMaxValue();
if (upperBound > maxValue) {
throw new IllegalArgumentException("maxValue exceeded: " + upperBound + " > " + maxValue + " for key: " + key);
}
IdAllocationRange idRange = IdAllocationRange.fromTo(lowerBound, upperBound);
idSequence.setLastValue(upperBound);
persistence.update(idSequence);
return idRange;
}
}
| ddd-idsequence/src/main/java/com/doctusoft/ddd/idsequence/IdSequenceManagerImpl.java | package com.doctusoft.ddd.idsequence;
import com.doctusoft.ddd.model.EntityKey;
import com.doctusoft.ddd.persistence.GenericPersistence;
import lombok.RequiredArgsConstructor;
import javax.inject.Inject;
import java.math.BigInteger;
import static java.util.Objects.*;
@RequiredArgsConstructor(onConstructor = @__(@Inject))
public class IdSequenceManagerImpl implements IdSequenceManager {
private final GenericPersistence persistence;
public <T extends IdSequence> IdAllocationRange allocate(EntityKey<T> key, long count) {
requireNonNull(key);
if (count <= 0) {
throw new IllegalArgumentException("count=" + count + " for key: " + key);
}
IdSequence idSequence = persistence.require(key);
BigInteger last = BigInteger.valueOf(idSequence.getLastValue());
long lowerBound = last.add(BigInteger.ONE).longValueExact();
long upperBound = last.add(BigInteger.valueOf(count)).longValueExact();
long maxValue = idSequence.getMaxValue();
if (upperBound > maxValue) {
throw new IllegalArgumentException("maxValue exceeded: " + upperBound + " > " + maxValue + " for key: " + key);
}
IdAllocationRange idRange = IdAllocationRange.fromTo(lowerBound, upperBound);
idSequence.setLastValue(upperBound);
persistence.update(idSequence);
return idRange;
}
}
| We need to lock the idSequence for update upon id allocation
| ddd-idsequence/src/main/java/com/doctusoft/ddd/idsequence/IdSequenceManagerImpl.java | We need to lock the idSequence for update upon id allocation | <ide><path>dd-idsequence/src/main/java/com/doctusoft/ddd/idsequence/IdSequenceManagerImpl.java
<ide> if (count <= 0) {
<ide> throw new IllegalArgumentException("count=" + count + " for key: " + key);
<ide> }
<del> IdSequence idSequence = persistence.require(key);
<add> IdSequence idSequence = persistence.requireForUpdate(key);
<ide> BigInteger last = BigInteger.valueOf(idSequence.getLastValue());
<ide> long lowerBound = last.add(BigInteger.ONE).longValueExact();
<ide> long upperBound = last.add(BigInteger.valueOf(count)).longValueExact(); |
|
Java | apache-2.0 | aa6ed4491026591fab4414ea2341a84a2c00997a | 0 | kkysen/QuickTrip,kkysen/QuickTrip | package io.github.kkysen.quicktrip.app;
import java.io.IOException;
import java.time.LocalDate;
import java.util.ArrayList;
import java.util.List;
import lombok.Getter;
import javafx.fxml.FXML;
import javafx.fxml.FXMLLoader;
import javafx.geometry.Insets;
import javafx.geometry.Pos;
import javafx.scene.Node;
import javafx.scene.control.Alert;
import javafx.scene.control.Alert.AlertType;
import javafx.scene.control.Button;
import javafx.scene.control.DatePicker;
import javafx.scene.control.Label;
import javafx.scene.control.TextField;
import javafx.scene.layout.GridPane;
//TODO dynamic pane edit to fxgraph
/**
*
*
* @author Khyber Sen
*/
@Getter
public class SearchView {
@FXML private GridPane grid = new GridPane();
private final GridRows rows = new GridRows(grid);
@FXML private TextField origin;
@FXML private DatePicker startDate;
private DestField dest;
@FXML private Button moreDestsBtn;
@FXML private WholeNumberField numDests;
private final List<DestField> destFields = new ArrayList<>();
@FXML private WholeNumberField numPeople;
@FXML private WholeNumberField budget;
@FXML private Button searchBtn;
@FXML private Button backBtn;
@FXML private Button resetBtn;
@FXML private Button lastSearchBtn;
@FXML private final String now = LocalDate.now().toString();
/**
* model and view
*
* @author Khyber Sen
*/
public class DestField implements Model, Nodes {
private static final int MAX_NUM_DAYS = 365;
private final int destNum;
private final Label label;
private final TextField address;
private final WholeNumberField numDays;
public DestField(final int destNum) {
this.destNum = destNum;
String labelText = "Destination";
// if destNum = 0, don't add destNum to label
if (destNum != 0) {
labelText += " " + destNum;
}
label = new Label(labelText);
address = new TextField();
numDays = new WholeNumberField(MAX_NUM_DAYS);
destFields.add(this);
}
public void addToGrid() {
rows.add(label, address, numDays);
}
@Override
public Node[] toNodeArray() {
return new Node[] {label, address, numDays};
}
/**
* makes an error dialog if the address does not exist
* @throws EmptyInputError if there's no address
*
* @throws InputError if the address doesn't exist
*/
@Validation
private boolean validateAddress() throws AddressInputError, EmptyInputError {
return AddressInputError.validate(address.getText());
}
@Validation
private boolean validateNumDays() throws WholeNumberInputError, EmptyInputError {
WholeNumberInputError.validate(numDays.getText(), "Number of Days", MAX_NUM_DAYS);
return true;
}
/**
* serializes this into a Json Pojo
* should be called after {@link #validate()}
*
* @see #validate()
*
* @return Json Pojo NoDateDestination for serialization
*/
public NoDateDestination toNoDateDestination() {
return new NoDateDestination(address.getText(), Integer.parseInt(numDays.getText()));
}
@Override
public String toString() {
return "DestField [destNum=" + destNum + ", label=" + label + ", address=" + address
+ ", numDays=" + numDays + "]";
}
}
private void setupGrid() {
grid.setAlignment(Pos.CENTER);
grid.setHgap(10);
grid.setVgap(10);
grid.setPadding(new Insets(25, 25, 25, 25));
}
private TextField addLabeledInputField(final String name) {
final Label label = new Label(name);
final TextField text = new TextField();
rows.add(label, text);
return text;
}
private Button createButton(final String name, final int columnIndex) {
final Button btn = new Button(name);
return btn;
}
public void setNumDestinations(final int numDests) {
// final int numToAdd = numDests - destFields.size();
// if (numToAdd == 0) {
// return;
// }
// final boolean adding = numToAdd > 0;
//
// final DestField lastDest = destFields.get(destFields.size() - 1);
// final int fromRowIndex = GridPane.getRowIndex(lastDest.address);
// if (adding) {
// for (int i = 0; i < numToAdd; i++) {
// rows.addNodes(i + fromRowIndex, destFields.get(i));
// }
// } else {
// final int numToRemove = - numToAdd;
// rows.removeRange(fromRowIndex - numToRemove, fromRowIndex);
// }
// final int numToRemove = destFields.size() - numDests;
// if (numToRemove > 0) {
// final int fromRowIndex = GridPane
// .getRowIndex(destFields.get(destFields.size() - numToRemove).address);
// rows.removeRange(fromRowIndex, fromRowIndex + numToRemove);
// if (destFields.size() == 1) {
// destFields.set(0, dest);
// rows.add(fromRowIndex, dest.toNodeArray());
// }
// return;
// }
//
// final DestField lastDest = destFields.get(destFields.size() - 1);
// final int lastDestNum = lastDest.destNum;
// final int fromRowIndex = GridPane.getRowIndex(lastDest.address);
// int numToAdd = -numToRemove;
// if (destFields.size() == 1) {
// destFields.remove(0);
// numToAdd++;
// }
// final List<Node[]> destFieldsToAdd = new ArrayList<>(numToAdd);
// for (int i = 0; i < numToAdd; i++) {
// destFieldsToAdd.add(new DestField(i + lastDestNum).toNodeArray());
// }
// rows.addAll(fromRowIndex, destFieldsToAdd);
// simplified but slower
final int fromRowIndex = GridPane.getRowIndex(destFields.get(0).address);
rows.removeRange(fromRowIndex, fromRowIndex + destFields.size());
destFields.clear();
if (numDests == 1) {
rows.add(fromRowIndex, dest.toNodeArray());
destFields.add(dest);
} else {
//final List<Node[]> destFieldNodes = new ArrayList<>(numDests);
for (int i = 0; i < numDests; i++) {
rows.add(fromRowIndex + i, new DestField(i + 1).toNodeArray());
//destFieldNodes.add(new DestField(i + 1).toNodeArray());
}
//rows.addAll(fromRowIndex, destFieldNodes);
}
}
private void alert(final Object o) {
final Alert alert = new Alert(AlertType.ERROR, o.toString());
alert.setResizable(true);
alert.showAndWait();
}
private void alert() {
alert("");
}
private WholeNumberField addWholeNumberField(final String name, final long max) {
final Label label = new Label(name);
final WholeNumberField input = new WholeNumberField(max);
rows.add(label, input);
return input;
}
private WholeNumberField addWholeNumberField(final String name) {
return addWholeNumberField(name, Long.MAX_VALUE);
}
public void reset() {
origin = new TextField();
moreDestsBtn = new Button();
numDests = new WholeNumberField();
numPeople = new WholeNumberField();
budget = new WholeNumberField();
searchBtn = new Button();
backBtn = new Button();
resetBtn = new Button();
lastSearchBtn = new Button();
rows.clear();
//origin = addLabeledInputField("Origin");
// startDate = addLabeledInputField("Start Date");
// rowIndex++;
//final Label startDateLabel = new Label("Start Date");
//startDate = new DatePicker(LocalDate.now());
//rows.add(startDateLabel, startDate);
dest = new DestField(0);
dest.addToGrid();
/*moreDestsBtn = new Button("Number of Destinations");
numDests = new WholeNumberField(23);
rows.add(moreDestsBtn, numDests);*/
//numDests = addButtonedInputField("Number of Destinations", event -> makeMoreDests());
/*numPeople = addWholeNumberField("Number of People");
budget = addWholeNumberField("Budget");
searchBtn = new Button("Search");
rows.add(searchBtn);
resetBtn = new Button("Reset");
rows.add(resetBtn);
backBtn = new Button("Back");
rows.add(backBtn);
lastSearchBtn = new Button("Last Search");
rows.add(lastSearchBtn);*/
}
public SearchView() {
FXMLLoader loader = new FXMLLoader();
loader.setLocation(getClass().getResource("view/SearchScreenView.fxml"));
loader.setController(SearchController.class);
try {
grid = loader.load();
} catch (IOException e) {
e.printStackTrace();
}
((DatePicker)grid.lookup("#startDate")).setValue(LocalDate.now());
//setupGrid();
reset();
}
}
| src/main/java/io/github/kkysen/quicktrip/app/SearchView.java | package io.github.kkysen.quicktrip.app;
import java.io.IOException;
import java.time.LocalDate;
import java.util.ArrayList;
import java.util.List;
import lombok.Getter;
import javafx.fxml.FXML;
import javafx.fxml.FXMLLoader;
import javafx.geometry.Insets;
import javafx.geometry.Pos;
import javafx.scene.Node;
import javafx.scene.control.Alert;
import javafx.scene.control.Alert.AlertType;
import javafx.scene.control.Button;
import javafx.scene.control.DatePicker;
import javafx.scene.control.Label;
import javafx.scene.control.TextField;
import javafx.scene.layout.GridPane;
/**
*
*
* @author Khyber Sen
*/
@Getter
public class SearchView {
@FXML private GridPane grid = new GridPane();
private final GridRows rows = new GridRows(grid);
@FXML private TextField origin;
@FXML private DatePicker startDate;
private DestField dest;
@FXML private Button moreDestsBtn;
@FXML private WholeNumberField numDests;
private final List<DestField> destFields = new ArrayList<>();
@FXML private WholeNumberField numPeople;
@FXML private WholeNumberField budget;
@FXML private Button searchBtn;
@FXML private Button backBtn;
@FXML private Button resetBtn;
@FXML private Button lastSearchBtn;
@FXML private final String now = LocalDate.now().toString();
/**
* model and view
*
* @author Khyber Sen
*/
public class DestField implements Model, Nodes {
private static final int MAX_NUM_DAYS = 365;
private final int destNum;
private final Label label;
private final TextField address;
private final WholeNumberField numDays;
public DestField(final int destNum) {
this.destNum = destNum;
String labelText = "Destination";
// if destNum = 0, don't add destNum to label
if (destNum != 0) {
labelText += " " + destNum;
}
label = new Label(labelText);
address = new TextField();
numDays = new WholeNumberField(MAX_NUM_DAYS);
destFields.add(this);
}
public void addToGrid() {
rows.add(label, address, numDays);
}
@Override
public Node[] toNodeArray() {
return new Node[] {label, address, numDays};
}
/**
* makes an error dialog if the address does not exist
* @throws EmptyInputError if there's no address
*
* @throws InputError if the address doesn't exist
*/
@Validation
private boolean validateAddress() throws AddressInputError, EmptyInputError {
return AddressInputError.validate(address.getText());
}
@Validation
private boolean validateNumDays() throws WholeNumberInputError, EmptyInputError {
WholeNumberInputError.validate(numDays.getText(), "Number of Days", MAX_NUM_DAYS);
return true;
}
/**
* serializes this into a Json Pojo
* should be called after {@link #validate()}
*
* @see #validate()
*
* @return Json Pojo NoDateDestination for serialization
*/
public NoDateDestination toNoDateDestination() {
return new NoDateDestination(address.getText(), Integer.parseInt(numDays.getText()));
}
@Override
public String toString() {
return "DestField [destNum=" + destNum + ", label=" + label + ", address=" + address
+ ", numDays=" + numDays + "]";
}
}
private void setupGrid() {
grid.setAlignment(Pos.CENTER);
grid.setHgap(10);
grid.setVgap(10);
grid.setPadding(new Insets(25, 25, 25, 25));
}
private TextField addLabeledInputField(final String name) {
final Label label = new Label(name);
final TextField text = new TextField();
rows.add(label, text);
return text;
}
private Button createButton(final String name, final int columnIndex) {
final Button btn = new Button(name);
return btn;
}
public void setNumDestinations(final int numDests) {
// final int numToAdd = numDests - destFields.size();
// if (numToAdd == 0) {
// return;
// }
// final boolean adding = numToAdd > 0;
//
// final DestField lastDest = destFields.get(destFields.size() - 1);
// final int fromRowIndex = GridPane.getRowIndex(lastDest.address);
// if (adding) {
// for (int i = 0; i < numToAdd; i++) {
// rows.addNodes(i + fromRowIndex, destFields.get(i));
// }
// } else {
// final int numToRemove = - numToAdd;
// rows.removeRange(fromRowIndex - numToRemove, fromRowIndex);
// }
// final int numToRemove = destFields.size() - numDests;
// if (numToRemove > 0) {
// final int fromRowIndex = GridPane
// .getRowIndex(destFields.get(destFields.size() - numToRemove).address);
// rows.removeRange(fromRowIndex, fromRowIndex + numToRemove);
// if (destFields.size() == 1) {
// destFields.set(0, dest);
// rows.add(fromRowIndex, dest.toNodeArray());
// }
// return;
// }
//
// final DestField lastDest = destFields.get(destFields.size() - 1);
// final int lastDestNum = lastDest.destNum;
// final int fromRowIndex = GridPane.getRowIndex(lastDest.address);
// int numToAdd = -numToRemove;
// if (destFields.size() == 1) {
// destFields.remove(0);
// numToAdd++;
// }
// final List<Node[]> destFieldsToAdd = new ArrayList<>(numToAdd);
// for (int i = 0; i < numToAdd; i++) {
// destFieldsToAdd.add(new DestField(i + lastDestNum).toNodeArray());
// }
// rows.addAll(fromRowIndex, destFieldsToAdd);
// simplified but slower
final int fromRowIndex = GridPane.getRowIndex(destFields.get(0).address);
rows.removeRange(fromRowIndex, fromRowIndex + destFields.size());
destFields.clear();
if (numDests == 1) {
rows.add(fromRowIndex, dest.toNodeArray());
destFields.add(dest);
} else {
//final List<Node[]> destFieldNodes = new ArrayList<>(numDests);
for (int i = 0; i < numDests; i++) {
rows.add(fromRowIndex + i, new DestField(i + 1).toNodeArray());
//destFieldNodes.add(new DestField(i + 1).toNodeArray());
}
//rows.addAll(fromRowIndex, destFieldNodes);
}
}
private void alert(final Object o) {
final Alert alert = new Alert(AlertType.ERROR, o.toString());
alert.setResizable(true);
alert.showAndWait();
}
private void alert() {
alert("");
}
private WholeNumberField addWholeNumberField(final String name, final long max) {
final Label label = new Label(name);
final WholeNumberField input = new WholeNumberField(max);
rows.add(label, input);
return input;
}
private WholeNumberField addWholeNumberField(final String name) {
return addWholeNumberField(name, Long.MAX_VALUE);
}
public void reset() {
origin = new TextField();
moreDestsBtn = new Button();
numDests = new WholeNumberField();
numPeople = new WholeNumberField();
budget = new WholeNumberField();
searchBtn = new Button();
backBtn = new Button();
resetBtn = new Button();
lastSearchBtn = new Button();
rows.clear();
//origin = addLabeledInputField("Origin");
// startDate = addLabeledInputField("Start Date");
// rowIndex++;
//final Label startDateLabel = new Label("Start Date");
//startDate = new DatePicker(LocalDate.now());
//rows.add(startDateLabel, startDate);
dest = new DestField(0);
dest.addToGrid();
/*moreDestsBtn = new Button("Number of Destinations");
numDests = new WholeNumberField(23);
rows.add(moreDestsBtn, numDests);*/
//numDests = addButtonedInputField("Number of Destinations", event -> makeMoreDests());
/*numPeople = addWholeNumberField("Number of People");
budget = addWholeNumberField("Budget");
searchBtn = new Button("Search");
rows.add(searchBtn);
resetBtn = new Button("Reset");
rows.add(resetBtn);
backBtn = new Button("Back");
rows.add(backBtn);
lastSearchBtn = new Button("Last Search");
rows.add(lastSearchBtn);*/
}
public SearchView() {
FXMLLoader loader = new FXMLLoader();
loader.setLocation(getClass().getResource("view/SearchScreenView.fxml"));
loader.setController(SearchController.class);
try {
grid = loader.load();
} catch (IOException e) {
e.printStackTrace();
}
((DatePicker)grid.lookup("#startDate")).setValue(LocalDate.now());
//setupGrid();
reset();
}
}
| Added TODO for myself
| src/main/java/io/github/kkysen/quicktrip/app/SearchView.java | Added TODO for myself | <ide><path>rc/main/java/io/github/kkysen/quicktrip/app/SearchView.java
<ide> import javafx.scene.control.Label;
<ide> import javafx.scene.control.TextField;
<ide> import javafx.scene.layout.GridPane;
<add>
<add>//TODO dynamic pane edit to fxgraph
<ide>
<ide> /**
<ide> * |
|
Java | apache-2.0 | bedb7de45b2ce1ce3895afcc70a21425d8785a19 | 0 | jajakobyly/rustidea,jajakobyly/rustidea | /*
* Copyright 2015 Marek Kaput
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.rustidea.psi.types;
import org.rustidea.stubs.types.*;
public interface RsStubElementTypes {
RsAttributeElementType ATTRIBUTE = RsAttributeElementType.INSTANCE;
RsAttributeItemElementType ATTRIBUTE_ITEM = RsAttributeItemElementType.INSTANCE;
RsAttributeItemListElementType ATTRIBUTE_ITEM_LIST = RsAttributeItemListElementType.INSTANCE;
RsDocElementType DOC = RsDocElementType.INSTANCE;
RsLifetimeElementType LIFETIME = RsLifetimeElementType.INSTANCE;
RsLifetimeTypeParameterElementType LIFETIME_TYPE_PARAMETER = RsLifetimeTypeParameterElementType.INSTANCE;
RsPathComponentElementType PATH_COMPONENT = RsPathComponentElementType.INSTANCE;
RsPathElementType PATH = RsPathElementType.INSTANCE;
RsTypeParameterElementType TYPE_PARAMETER = RsTypeParameterElementType.INSTANCE;
RsTypeParameterListElementType TYPE_PARAMETER_LIST = RsTypeParameterListElementType.INSTANCE;
RsWhereClauseElementType WHERE_CLAUSE = RsWhereClauseElementType.INSTANCE;
}
| src/org/rustidea/psi/types/RsStubElementTypes.java | /*
* Copyright 2015 Marek Kaput
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.rustidea.psi.types;
import org.rustidea.stubs.types.*;
public interface RsStubElementTypes {
RsLifetimeElementType LIFETIME = RsLifetimeElementType.INSTANCE;
RsTypeParameterElementType TYPE_PARAMETER = RsTypeParameterElementType.INSTANCE;
RsLifetimeTypeParameterElementType LIFETIME_TYPE_PARAMETER = RsLifetimeTypeParameterElementType.INSTANCE;
RsTypeParameterListElementType TYPE_PARAMETER_LIST = RsTypeParameterListElementType.INSTANCE;
RsWhereClauseElementType WHERE_CLAUSE = RsWhereClauseElementType.INSTANCE;
RsPathElementType PATH = RsPathElementType.INSTANCE;
RsPathComponentElementType PATH_COMPONENT = RsPathComponentElementType.INSTANCE;
RsDocElementType DOC = RsDocElementType.INSTANCE;
RsAttributeElementType ATTRIBUTE = RsAttributeElementType.INSTANCE;
RsAttributeItemElementType ATTRIBUTE_ITEM = RsAttributeItemElementType.INSTANCE;
RsAttributeItemListElementType ATTRIBUTE_ITEM_LIST = RsAttributeItemListElementType.INSTANCE;
}
| Sort stub element type definitions
| src/org/rustidea/psi/types/RsStubElementTypes.java | Sort stub element type definitions | <ide><path>rc/org/rustidea/psi/types/RsStubElementTypes.java
<ide> import org.rustidea.stubs.types.*;
<ide>
<ide> public interface RsStubElementTypes {
<del> RsLifetimeElementType LIFETIME = RsLifetimeElementType.INSTANCE;
<del> RsTypeParameterElementType TYPE_PARAMETER = RsTypeParameterElementType.INSTANCE;
<del> RsLifetimeTypeParameterElementType LIFETIME_TYPE_PARAMETER = RsLifetimeTypeParameterElementType.INSTANCE;
<del> RsTypeParameterListElementType TYPE_PARAMETER_LIST = RsTypeParameterListElementType.INSTANCE;
<del> RsWhereClauseElementType WHERE_CLAUSE = RsWhereClauseElementType.INSTANCE;
<del> RsPathElementType PATH = RsPathElementType.INSTANCE;
<del> RsPathComponentElementType PATH_COMPONENT = RsPathComponentElementType.INSTANCE;
<del> RsDocElementType DOC = RsDocElementType.INSTANCE;
<ide> RsAttributeElementType ATTRIBUTE = RsAttributeElementType.INSTANCE;
<ide> RsAttributeItemElementType ATTRIBUTE_ITEM = RsAttributeItemElementType.INSTANCE;
<ide> RsAttributeItemListElementType ATTRIBUTE_ITEM_LIST = RsAttributeItemListElementType.INSTANCE;
<add> RsDocElementType DOC = RsDocElementType.INSTANCE;
<add> RsLifetimeElementType LIFETIME = RsLifetimeElementType.INSTANCE;
<add> RsLifetimeTypeParameterElementType LIFETIME_TYPE_PARAMETER = RsLifetimeTypeParameterElementType.INSTANCE;
<add> RsPathComponentElementType PATH_COMPONENT = RsPathComponentElementType.INSTANCE;
<add> RsPathElementType PATH = RsPathElementType.INSTANCE;
<add> RsTypeParameterElementType TYPE_PARAMETER = RsTypeParameterElementType.INSTANCE;
<add> RsTypeParameterListElementType TYPE_PARAMETER_LIST = RsTypeParameterListElementType.INSTANCE;
<add> RsWhereClauseElementType WHERE_CLAUSE = RsWhereClauseElementType.INSTANCE;
<ide> } |
|
Java | mit | 8f740bc08e03d06ca34406726d7092ddcde950ef | 0 | MarkEWaite/git-plugin,jenkinsci/git-plugin,jenkinsci/git-plugin,MarkEWaite/git-plugin,jenkinsci/git-plugin,MarkEWaite/git-plugin,MarkEWaite/git-plugin,jenkinsci/git-plugin,martinda/git-plugin,martinda/git-plugin,martinda/git-plugin | package hudson.plugins.git.browser;
import hudson.EnvVars;
import hudson.model.Item;
import hudson.model.Job;
import hudson.model.TaskListener;
import hudson.plugins.git.GitChangeSet;
import hudson.plugins.git.GitChangeSet.Path;
import hudson.scm.RepositoryBrowser;
import org.kohsuke.stapler.Stapler;
import org.kohsuke.stapler.StaplerRequest;
import java.io.IOException;
import java.net.IDN;
import java.net.InetAddress;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.net.UnknownHostException;
import java.util.Collections;
import java.util.Objects;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.regex.Pattern;
import org.apache.commons.lang.StringUtils;
import edu.umd.cs.findbugs.annotations.CheckForNull;
public abstract class GitRepositoryBrowser extends RepositoryBrowser<GitChangeSet> {
private /* mostly final */ String url;
private static final Logger LOGGER = Logger.getLogger(GitRepositoryBrowser.class.getName());
@Deprecated
protected GitRepositoryBrowser() {
}
protected GitRepositoryBrowser(String repourl) {
this.url = repourl;
}
public final String getRepoUrl() {
return url;
}
public final URL getUrl() throws IOException {
String u = url;
StaplerRequest req = Stapler.getCurrentRequest();
if (req != null) {
Job job = req.findAncestorObject(Job.class);
if (job != null) {
EnvVars env;
try {
env = job.getEnvironment(null, TaskListener.NULL);
} catch (InterruptedException e) {
throw new IOException("Failed to retrieve job environment", e);
}
u = env.expand(url);
}
}
if (getNormalizeUrl()) {
return normalizeToEndWithSlash(new URL(u));
}
else {
return new URL(u);
}
}
/**
* Determines the link to the diff between the version
* in the specified revision of {@link hudson.plugins.git.GitChangeSet.Path} to its previous version.
*
* @param path affected file path
* @return
* null if the browser doesn't have any URL for diff.
* @throws IOException on input or output error
*/
public abstract URL getDiffLink(GitChangeSet.Path path) throws IOException;
/**
* Determines the link to a single file under Git.
* This page should display all the past revisions of this file, etc.
*
* @param path affected file path
* @return
* null if the browser doesn't have any suitable URL.
* @throws IOException on input or output error
* @throws URISyntaxException on URI syntax error
*/
public abstract URL getFileLink(GitChangeSet.Path path) throws IOException, URISyntaxException;
/**
* Determines the link to the given change set ID (SHA).
*
* @return the URL to the change set or {@code null} if this repository browser doesn't have any meaningful URL for
* a change set
*/
@CheckForNull
public URL getChangeSetLink(final String commitId) throws IOException {
if (!StringUtils.isBlank(commitId)) {
return getChangeSetLink(new CommitChangeSet(commitId));
}
return null;
}
/**
* Determines whether a URL should be normalized
* Overridden in the rare case where it shouldn't
*
* @return True if the URL should be normalized
*/
protected boolean getNormalizeUrl() {
return true;
}
/**
* Calculate the index of the given path in a
* sorted list of affected files
*
* @param path affected file path
* @return The index in the lexicographical sorted filelist
* @throws IOException on input or output error
*/
protected int getIndexOfPath(Path path) throws IOException {
final String pathAsString = path.getPath();
final GitChangeSet changeSet = path.getChangeSet();
int i = 0;
for (String affected : changeSet.getAffectedPaths())
{
if (affected.compareTo(pathAsString) < 0)
i++;
}
return i;
}
public static URL encodeURL(URL url) throws IOException {
try {
return new URI(url.getProtocol(), url.getUserInfo(), IDN.toASCII(url.getHost()), url.getPort(), url.getPath(), url.getQuery(), url.getRef()).toURL();
} catch (URISyntaxException e) {
throw new IOException(e);
}
}
protected static boolean initialChecksAndReturnOk(Item project, String cleanUrl){
if (cleanUrl == null) {
return true;
}
if (project == null || !project.hasPermission(Item.CONFIGURE)) {
return true;
}
if (cleanUrl.contains("$")) {
// set by variable, can't validate
return true;
}
return false;
}
/* Top level domains that should always be considered valid */
private static final Pattern SUFFIXES = Pattern.compile(".*[.](corp|home|local|localnet)$");
/* Browser URL validation of remote/local urls */
protected static boolean validateUrl(String url) throws URISyntaxException {
try {
URL urlToValidate = new URL(url);
String hostname = urlToValidate.getHost();
if (hostname == null) {
LOGGER.log(Level.FINE, "Invalid hostname validating URL {0}", url);
return false;
}
if (SUFFIXES.matcher(hostname).matches()) {
return true;
}
if (InetAddress.getByName(hostname) == null) {
LOGGER.log(Level.FINE, "Host unknown validating URL {0}", url);
return false;
}
} catch (MalformedURLException ex) {
LOGGER.log(Level.FINE, "Malformed URL exception validating URL " + url, ex);
return false;
} catch (UnknownHostException ex) {
LOGGER.log(Level.FINE, "Unknown host exception validating URL " + url, ex);
return false;
}
return true;
}
/**
* Used to obtain a repository link to a Git commit ID (SHA hash).
*/
private static class CommitChangeSet extends GitChangeSet {
private final String id;
CommitChangeSet(final String id) {
super(Collections.emptyList(), false);
this.id = id;
}
@Override
public String getId() {
return id;
}
@Override
public boolean equals(final Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
if (!super.equals(o)) {
return false;
}
CommitChangeSet that = (CommitChangeSet) o;
return Objects.equals(id, that.id);
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), id);
}
}
private static final long serialVersionUID = 1L;
}
| src/main/java/hudson/plugins/git/browser/GitRepositoryBrowser.java | package hudson.plugins.git.browser;
import hudson.EnvVars;
import hudson.model.Item;
import hudson.model.Job;
import hudson.model.TaskListener;
import hudson.plugins.git.GitChangeSet;
import hudson.plugins.git.GitChangeSet.Path;
import hudson.scm.RepositoryBrowser;
import org.kohsuke.stapler.Stapler;
import org.kohsuke.stapler.StaplerRequest;
import java.io.IOException;
import java.net.IDN;
import java.net.InetAddress;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.net.UnknownHostException;
import java.util.Collections;
import java.util.Objects;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.regex.Pattern;
import org.apache.commons.lang.StringUtils;
import edu.umd.cs.findbugs.annotations.CheckForNull;
public abstract class GitRepositoryBrowser extends RepositoryBrowser<GitChangeSet> {
private /* mostly final */ String url;
private static final Logger LOGGER = Logger.getLogger(GitRepositoryBrowser.class.getName());
@Deprecated
protected GitRepositoryBrowser() {
}
protected GitRepositoryBrowser(String repourl) {
this.url = repourl;
}
public final String getRepoUrl() {
return url;
}
public final URL getUrl() throws IOException {
String u = url;
StaplerRequest req = Stapler.getCurrentRequest();
if (req != null) {
Job job = req.findAncestorObject(Job.class);
if (job != null) {
EnvVars env;
try {
env = job.getEnvironment(null, TaskListener.NULL);
} catch (InterruptedException e) {
throw new IOException("Failed to retrieve job environment", e);
}
u = env.expand(url);
}
}
if (getNormalizeUrl()) {
return normalizeToEndWithSlash(new URL(u));
}
else {
return new URL(u);
}
}
/**
* Determines the link to the diff between the version
* in the specified revision of {@link hudson.plugins.git.GitChangeSet.Path} to its previous version.
*
* @param path affected file path
* @return
* null if the browser doesn't have any URL for diff.
* @throws IOException on input or output error
*/
public abstract URL getDiffLink(GitChangeSet.Path path) throws IOException;
/**
* Determines the link to a single file under Git.
* This page should display all the past revisions of this file, etc.
*
* @param path affected file path
* @return
* null if the browser doesn't have any suitable URL.
* @throws IOException on input or output error
* @throws URISyntaxException on URI syntax error
*/
public abstract URL getFileLink(GitChangeSet.Path path) throws IOException, URISyntaxException;
/**
* Determines the link to the given change set ID (SHA).
*
* @return the URL to the change set or {@code null} if this repository browser doesn't have any meaningful URL for
* a change set
*/
@CheckForNull
public URL getChangeSetLink(final String commitId) throws IOException {
if (!StringUtils.isBlank(commitId)) {
return getChangeSetLink(new CommitChangeSet(commitId));
}
return null;
}
/**
* Determines whether a URL should be normalized
* Overridden in the rare case where it shouldn't
*
* @return True if the URL should be normalized
*/
protected boolean getNormalizeUrl() {
return true;
}
/**
* Calculate the index of the given path in a
* sorted list of affected files
*
* @param path affected file path
* @return The index in the lexicographical sorted filelist
* @throws IOException on input or output error
*/
protected int getIndexOfPath(Path path) throws IOException {
final String pathAsString = path.getPath();
final GitChangeSet changeSet = path.getChangeSet();
int i = 0;
for (String affected : changeSet.getAffectedPaths())
{
if (affected.compareTo(pathAsString) < 0)
i++;
}
return i;
}
public static URL encodeURL(URL url) throws IOException {
try {
return new URI(url.getProtocol(), url.getUserInfo(), IDN.toASCII(url.getHost()), url.getPort(), url.getPath(), url.getQuery(), url.getRef()).toURL();
} catch (URISyntaxException e) {
throw new IOException(e);
}
}
protected static boolean initialChecksAndReturnOk(Item project, String cleanUrl){
if (cleanUrl == null) {
return true;
}
if (project == null || !project.hasPermission(Item.CONFIGURE)) {
return true;
}
if (cleanUrl.contains("$")) {
// set by variable, can't validate
return true;
}
return false;
}
/* Top level domains that should always be considered valid */
private static final Pattern SUFFIXES = Pattern.compile(".*[.](corp|home|local|localnet)$");
/* Browser URL validation of remote/local urls */
protected static boolean validateUrl(String url) throws URISyntaxException {
try {
URL urlToValidate = new URL(url);
String hostname = urlToValidate.getHost();
if (hostname == null) {
LOGGER.log(Level.FINE, "Invalid hostname validating URL {0}", url);
return false;
}
if (SUFFIXES.matcher(hostname).matches()) {
return true;
}
if (InetAddress.getByName(hostname) == null) {
LOGGER.log(Level.FINE, "Host unknown validating URL {0}", url);
return false;
}
} catch (MalformedURLException ex) {
LOGGER.log(Level.FINE, "Malformed URL exception validating URL " + url, ex);
return false;
} catch (UnknownHostException ex) {
LOGGER.log(Level.FINE, "Unknown host exception validating URL " + url, ex);
return false;
}
return true;
}
/**
* Used to obtain a repository link to a Git commit ID (SHA hash).
*/
private static class CommitChangeSet extends GitChangeSet {
private final String id;
CommitChangeSet(final String id) {
super(Collections.emptyList(), false);
this.id = id;
}
@Override
public String getId() {
return id;
}
@Override
public boolean equals(final Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
if (!super.equals(o)) {
return false;
}
CommitChangeSet that = (CommitChangeSet) o;
return Objects.equals(id, that.id);
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), id);
}
}
private static final long serialVersionUID = 1L;
}
| Remove some whitespace changes.
| src/main/java/hudson/plugins/git/browser/GitRepositoryBrowser.java | Remove some whitespace changes. | <ide><path>rc/main/java/hudson/plugins/git/browser/GitRepositoryBrowser.java
<ide> return true;
<ide> }
<ide> if (cleanUrl.contains("$")) {
<del> // set by variable, can't validate
<add> // set by variable, can't validate
<ide> return true;
<ide> }
<ide> return false; |
|
JavaScript | apache-2.0 | 18a5bfba9b611af49b9f9a1bb4eb880f90ca777b | 0 | BSWANG/denverdino.github.io,aduermael/docker.github.io,LuisBosquez/docker.github.io,JimGalasyn/docker.github.io,saiberz/kitematic,LuisBosquez/docker.github.io,JimGalasyn/docker.github.io,joaofnfernandes/docker.github.io,rillig/docker.github.io,LuisBosquez/docker.github.io,docker/kitematic,anweiss/docker.github.io,bdwill/docker.github.io,shin-/docker.github.io,menglingwei/denverdino.github.io,alexisbellido/docker.github.io,moxiegirl/kitematic,danix800/docker.github.io,docker/kitematic,anweiss/docker.github.io,shubheksha/docker.github.io,rillig/docker.github.io,phiroict/docker,gdevillele/docker.github.io,shubheksha/docker.github.io,gdevillele/docker.github.io,alexisbellido/docker.github.io,docker-zh/docker.github.io,menglingwei/denverdino.github.io,thaJeztah/docker.github.io,kelemen/kitematic,aduermael/docker.github.io,hypriot/kitematic,shin-/docker.github.io,troy0820/docker.github.io,denverdino/denverdino.github.io,gdevillele/docker.github.io,rillig/docker.github.io,bdwill/docker.github.io,daaru00/kitematic,aduermael/docker.github.io,denverdino/docker.github.io,zedtux/kitematic,denverdino/docker.github.io,docker/docker.github.io,joeuo/docker.github.io,thaJeztah/docker.github.io,londoncalling/docker.github.io,kitematic/kitematic,joaofnfernandes/docker.github.io,BSWANG/denverdino.github.io,alexisbellido/docker.github.io,LuisBosquez/docker.github.io,johnstep/docker.github.io,menglingwei/denverdino.github.io,londoncalling/docker.github.io,rlugojr/kitematic,docker-zh/docker.github.io,denverdino/denverdino.github.io,ridethepony/kitematic,phiroict/docker,johnstep/docker.github.io,shubheksha/docker.github.io,anweiss/docker.github.io,moxiegirl/kitematic,troy0820/docker.github.io,jzwlqx/denverdino.github.io,jzwlqx/denverdino.github.io,jzwlqx/denverdino.github.io,phiroict/docker,zedtux/kitematic,JimGalasyn/docker.github.io,sanderboom/kitematic,jzwlqx/denverdino.github.io,aduermael/docker.github.io,johnstep/docker.github.io,LuisBosquez/docker.github.io,daaru00/kitematic,menglingwei/denverdino.github.io,docker-zh/docker.github.io,sanscontext/docker.github.io,docker/kitematic,johnstep/docker.github.io,rillig/docker.github.io,docker/kitematic,JimGalasyn/docker.github.io,kitematic/kitematic,shin-/docker.github.io,hypriot/kitematic,londoncalling/docker.github.io,JimGalasyn/docker.github.io,zedtux/kitematic,docker/docker.github.io,phiroict/docker,sanscontext/docker.github.io,BSWANG/denverdino.github.io,alexisbellido/docker.github.io,dongjoon-hyun/kitematic,thaJeztah/docker.github.io,joaofnfernandes/docker.github.io,menglingwei/denverdino.github.io,denverdino/denverdino.github.io,daaru00/kitematic,sanscontext/docker.github.io,docker/docker.github.io,shin-/docker.github.io,kelemen/kitematic,ridethepony/kitematic,troy0820/docker.github.io,BSWANG/denverdino.github.io,joeuo/docker.github.io,anweiss/docker.github.io,gdevillele/docker.github.io,rlugojr/kitematic,denverdino/docker.github.io,sanscontext/docker.github.io,denverdino/denverdino.github.io,dongjoon-hyun/kitematic,saiberz/kitematic,denverdino/docker.github.io,docker-zh/docker.github.io,rlugojr/kitematic,thaJeztah/docker.github.io,sanscontext/docker.github.io,alexisbellido/docker.github.io,kitematic/kitematic,denverdino/denverdino.github.io,denverdino/docker.github.io,docker-zh/docker.github.io,danix800/docker.github.io,bdwill/docker.github.io,hypriot/kitematic,sanderboom/kitematic,joeuo/docker.github.io,bdwill/docker.github.io,bdwill/docker.github.io,londoncalling/docker.github.io,BSWANG/denverdino.github.io,johnstep/docker.github.io,shubheksha/docker.github.io,danix800/docker.github.io,sanderboom/kitematic,joaofnfernandes/docker.github.io,gdevillele/docker.github.io,docker/docker.github.io,shin-/docker.github.io,londoncalling/docker.github.io,joaofnfernandes/docker.github.io,saiberz/kitematic,moxiegirl/kitematic,ridethepony/kitematic,danix800/docker.github.io,dongjoon-hyun/kitematic,jzwlqx/denverdino.github.io,docker/docker.github.io,shubheksha/docker.github.io,troy0820/docker.github.io,thaJeztah/docker.github.io,kelemen/kitematic,anweiss/docker.github.io,phiroict/docker,joeuo/docker.github.io,joeuo/docker.github.io | import remote from 'remote';
import shell from 'shell';
import router from './router';
import util from './utils/Util';
import metrics from './utils/MetricsUtil';
import machine from './utils/DockerMachineUtil';
import docker from './utils/DockerUtil';
var app = remote.require('app');
// main.js
var MenuTemplate = function () {
return [
{
label: 'Kitematic',
submenu: [
{
label: 'About Kitematic',
enabled: !!docker.host,
click: function () {
metrics.track('Opened About', {
from: 'menu'
});
router.get().transitionTo('about');
}
},
{
type: 'separator'
},
{
label: 'Preferences',
accelerator: util.CommandOrCtrl() + '+,',
enabled: !!docker.host,
click: function () {
metrics.track('Opened Preferences', {
from: 'menu'
});
router.get().transitionTo('preferences');
}
},
{
type: 'separator'
},
{
type: 'separator'
},
{
label: 'Hide Kitematic',
accelerator: util.CommandOrCtrl() + '+H',
selector: 'hide:'
},
{
label: 'Hide Others',
accelerator: util.CommandOrCtrl() + '+Shift+H',
selector: 'hideOtherApplications:'
},
{
label: 'Show All',
selector: 'unhideAllApplications:'
},
{
type: 'separator'
},
{
label: 'Quit',
accelerator: util.CommandOrCtrl() + '+Q',
click: function() {
app.quit();
}
}
]
},
{
label: 'File',
submenu: [
{
type: 'separator'
},
{
label: 'Open Docker Command Line Terminal',
accelerator: util.CommandOrCtrl() + '+Shift+T',
enabled: !!docker.host,
click: function() {
metrics.track('Opened Docker Terminal', {
from: 'menu'
});
machine.dockerTerminal();
}
}
]
},
{
label: 'Edit',
submenu: [
{
label: 'Undo',
accelerator: util.CommandOrCtrl() + '+Z',
selector: 'undo:'
},
{
label: 'Redo',
accelerator: 'Shift+' + util.CommandOrCtrl() + '+Z',
selector: 'redo:'
},
{
type: 'separator'
},
{
label: 'Cut',
accelerator: util.CommandOrCtrl() + '+X',
selector: 'cut:'
},
{
label: 'Copy',
accelerator: util.CommandOrCtrl() + '+C',
selector: 'copy:'
},
{
label: 'Paste',
accelerator: util.CommandOrCtrl() + '+V',
selector: 'paste:'
},
{
label: 'Select All',
accelerator: util.CommandOrCtrl() + '+A',
selector: 'selectAll:'
}
]
},
{
label: 'View',
submenu: [
{
label: 'Toggle Chromium Developer Tools',
accelerator: 'Alt+' + util.CommandOrCtrl() + '+I',
click: function() { remote.getCurrentWindow().toggleDevTools(); }
}
]
},
{
label: 'Window',
submenu: [
{
label: 'Minimize',
accelerator: util.CommandOrCtrl() + '+M',
selector: 'performMiniaturize:'
},
{
label: 'Close',
accelerator: util.CommandOrCtrl() + '+W',
click: function () {
remote.getCurrentWindow().hide();
}
},
{
type: 'separator'
},
{
label: 'Bring All to Front',
selector: 'arrangeInFront:'
},
{
type: 'separator'
},
{
label: 'Kitematic',
accelerator: 'Cmd+0',
click: function () {
remote.getCurrentWindow().show();
}
},
]
},
{
label: 'Help',
submenu: [
{
label: 'Report Issue or Suggest Feedback',
click: function () {
metrics.track('Opened Issue Reporter', {
from: 'menu'
});
shell.openExternal('https://github.com/kitematic/kitematic/issues/new');
}
}
]
}
];
};
module.exports = MenuTemplate;
| src/menutemplate.js | import remote from 'remote';
import shell from 'shell';
import router from './router';
import util from './utils/Util';
import metrics from './utils/MetricsUtil';
import machine from './utils/DockerMachineUtil';
import docker from './utils/DockerUtil';
var app = remote.require('app');
// main.js
var MenuTemplate = function () {
return [
{
label: 'Kitematic',
submenu: [
{
label: 'About Kitematic',
enabled: !!docker.host,
click: function () {
metrics.track('Opened About', {
from: 'menu'
});
router.get().transitionTo('about');
}
},
{
type: 'separator'
},
{
label: 'Preferences',
accelerator: util.CommandOrCtrl() + '+,',
enabled: !!docker.host,
click: function () {
metrics.track('Opened Preferences', {
from: 'menu'
});
router.get().transitionTo('preferences');
}
},
{
type: 'separator'
},
{
type: 'separator'
},
{
label: 'Hide Kitematic',
accelerator: util.CommandOrCtrl() + '+H',
selector: 'hide:'
},
{
label: 'Hide Others',
accelerator: util.CommandOrCtrl() + '+Shift+H',
selector: 'hideOtherApplications:'
},
{
label: 'Show All',
selector: 'unhideAllApplications:'
},
{
type: 'separator'
},
{
label: 'Quit',
accelerator: util.CommandOrCtrl() + '+Q',
click: function() {
app.quit();
}
}
]
},
{
label: 'File',
submenu: [
{
type: 'separator'
},
{
label: 'Open Docker Command Line Terminal',
accelerator: util.CommandOrCtrl() + '+Shift+T',
enabled: !!docker.host,
click: function() {
metrics.track('Opened Docker Terminal', {
from: 'menu'
});
machine.dockerTerminal();
}
}
]
},
{
label: 'Edit',
submenu: [
{
label: 'Undo',
accelerator: util.CommandOrCtrl() + '+Z',
selector: 'undo:'
},
{
label: 'Redo',
accelerator: 'Shift+' + util.CommandOrCtrl() + '+Z',
selector: 'redo:'
},
{
type: 'separator'
},
{
label: 'Cut',
accelerator: util.CommandOrCtrl() + '+X',
selector: 'cut:'
},
{
label: 'Copy',
accelerator: util.CommandOrCtrl() + '+C',
selector: 'copy:'
},
{
label: 'Paste',
accelerator: util.CommandOrCtrl() + '+V',
selector: 'paste:'
},
{
label: 'Select All',
accelerator: util.CommandOrCtrl() + '+A',
selector: 'selectAll:'
}
]
},
{
label: 'View',
submenu: [
{
label: 'Toggle Chromium Developer Tools',
accelerator: 'Alt+' + util.CommandOrCtrl() + '+I',
click: function() { remote.getCurrentWindow().toggleDevTools(); }
}
]
},
{
label: 'Window',
submenu: [
{
label: 'Minimize',
accelerator: util.CommandOrCtrl() + '+M',
selector: 'performMiniaturize:'
},
{
label: 'Close',
accelerator: util.CommandOrCtrl() + '+W',
click: function () {
remote.getCurrentWindow().hide();
}
},
{
type: 'separator'
},
{
label: 'Bring All to Front',
selector: 'arrangeInFront:'
}
]
},
{
label: 'Help',
submenu: [
{
label: 'Report Issue or Suggest Feedback',
click: function () {
metrics.track('Opened Issue Reporter', {
from: 'menu'
});
shell.openExternal('https://github.com/kitematic/kitematic/issues/new');
}
}
]
}
];
};
module.exports = MenuTemplate;
| Fix #702 : Cmd+0 reopen Kitematic window in Mac
Signed-off-by: Mika Andrianarijaona <[email protected]>
| src/menutemplate.js | Fix #702 : Cmd+0 reopen Kitematic window in Mac | <ide><path>rc/menutemplate.js
<ide> {
<ide> label: 'Bring All to Front',
<ide> selector: 'arrangeInFront:'
<del> }
<add> },
<add> {
<add> type: 'separator'
<add> },
<add> {
<add> label: 'Kitematic',
<add> accelerator: 'Cmd+0',
<add> click: function () {
<add> remote.getCurrentWindow().show();
<add> }
<add> },
<ide> ]
<ide> },
<ide> { |
|
Java | apache-2.0 | 1e0194c75d5096de8c8557b3f64f9547470f3a09 | 0 | Khyzad/PID-webservice,Khyzad/PID-webservice,HawaiiStateDigitalArchives/PID-webservice,HawaiiStateDigitalArchives/PID-webservice | package com.hida.model;
import static com.hida.model.IdGenerator.Rng;
import static com.hida.model.IdGenerator.Logger;
import java.util.Set;
import java.util.TreeSet;
/**
* An Id Generator that creates Pids primarily based on tokenType and
* rootLength.
*
* @author lruffin
*/
public class AutoIdGenerator extends IdGenerator {
/**
* Designates what characters are contained in the id's root. There are 7
* types of token maps, each describing a range of possible characters in
* the id. This range is further affected by the variable SansVowel.
*
* <pre>
* DIGIT: Digit values only.
* LOWER_ALPHABET: Lowercase letters only.
* UPPER_ALPHABET: Uppercase letters only.
* MIXED_ALPHABET: Lowercase and Uppercase letters only.
* LOWER_ALPHABET_EXTENDED: Digit values and Lowercase letters only.
* UPPER_ALPHABET_EXTENDED: Digits and Uppercase letters only
* MIXED_ALPHABET_EXTENDED: All characters specified by previous tokens
* </pre>
*
*/
private TokenType TokenType;
/**
* Designates the length of the id's root.
*/
private int RootLength;
/**
* Default constructor. Aside from TokenType, there are no restrictions
* placed on the parameters and can be used however one sees fit.
*
* @param prefix A sequence of characters that appear in the beginning of
* PIDs
* @param sansVowel Dictates whether or not vowels are allowed
* @param tokenType An enum used to configure PIDS
* @param rootLength Designates the length of the id's root
*/
public AutoIdGenerator(String prefix, boolean sansVowel, TokenType tokenType, int rootLength) {
super(prefix, sansVowel);
this.TokenType = tokenType;
this.RootLength = rootLength;
}
/**
* Creates Pids without regard to a natural order.
*
* @param amount The number of PIDs to be created
* @return A set of Pids
*/
@Override
public Set<Pid> randomMint(long amount) {
// checks to see if its possible to produce or add requested amount of
long total = calculatePermutations();
if (total < amount) {
throw new NotEnoughPermutationsException(total, amount);
}
// generate ids
String map = TokenType.getCharacters();
Set<Pid> tempIdList = new TreeSet<>();
for (int i = 0; i < amount; i++) {
int[] tempIdBaseMap = new int[RootLength];
for (int j = 0; j < RootLength; j++) {
tempIdBaseMap[j] = Rng.nextInt(map.length());
}
Pid currentId = new AutoId(Prefix, tempIdBaseMap, map);
Logger.trace("Generated Auto Random ID: " + currentId);
while (tempIdList.contains(currentId)) {
currentId.incrementId();
}
tempIdList.add(currentId);
}
return tempIdList;
}
/**
* Creates Pids in ascending order
*
* @param amount The number of PIDs to be created
* @return A set of Pids
*/
@Override
public Set<Pid> sequentialMint(long amount) {
// checks to see if its possible to produce or add requested amount of
long total = calculatePermutations();
if (total < amount) {
throw new NotEnoughPermutationsException(total, amount);
}
// generate ids
String map = TokenType.getCharacters();
Set<Pid> idSet = new TreeSet<>();
int[] previousIdBaseMap = new int[RootLength];
AutoId currentId = new AutoId(Prefix, previousIdBaseMap, map);
for (int i = 0; i < amount; i++) {
AutoId nextId = new AutoId(currentId);
idSet.add(currentId);
Logger.trace("Generated Auto Sequential ID: " + currentId);
nextId.incrementId();
currentId = new AutoId(nextId);
}
return idSet;
}
/**
* This method calculates and returns the total possible number of
* permutations using the values given in the constructor.
*
* @return number of permutations
*/
@Override
public long calculatePermutations() {
// get the base of each character
int base = TokenType.getCharacters().length();
// raise it to the power of how ever long the rootLength is
return ((long) Math.pow(base, RootLength));
}
/**
* Increments a value of a PID. If the maximum limit is reached the values
* will wrap around.
*
* @param pid The pid to increment
*/
@Override
public void incrementPid(Pid pid) {
int range = TokenType.getCharacters().length() - 1;
boolean overflow = true;
// increment the values in a pid's basemap
for (int k = 0; k <= range && overflow; k++) {
// record value of current index
int value = pid.getBaseMap()[range - k];
// if the last value is reached then wrap around
if (value == range) {
pid.getBaseMap()[range - k] = 0;
}
// otherwise increment the value at the current index and break the loop
else {
pid.getBaseMap()[range - k]++;
overflow = false;
}
}
// assign a new name to the Pid based on its base map
assignName(pid);
}
/**
* Creates and sets a new name for a pid based on its indices contained in
* the BaseMap and the characters in the TokenType. This should be called
* whenever the values in a Pid's BaseMap has been changed.
*
* @param pid The pid that needs a new name.
*/
@Override
protected void assignName(Pid pid) {
String Name = "";
for (int i = 0; i < pid.getBaseMap().length; i++) {
Name += TokenType.getCharacters().charAt(pid.getBaseMap()[i]);
}
pid.setName(this.getPrefix() + Name);
}
/* getters and setters */
public TokenType getTokenType() {
return TokenType;
}
public void setTokenType(TokenType TokenType) {
this.TokenType = TokenType;
}
public int getRootLength() {
return RootLength;
}
public void setRootLength(int RootLength) {
this.RootLength = RootLength;
}
}
| Minter/src/main/java/com/hida/model/AutoIdGenerator.java | package com.hida.model;
import static com.hida.model.IdGenerator.Rng;
import static com.hida.model.IdGenerator.Logger;
import java.util.Set;
import java.util.TreeSet;
/**
* An Id Generator that creates Pids primarily based on tokenType and
* rootLength.
*
* @author lruffin
*/
public class AutoIdGenerator extends IdGenerator {
/**
* Designates what characters are contained in the id's root. There are 7
* types of token maps, each describing a range of possible characters in
* the id. This range is further affected by the variable SansVowel.
*
* <pre>
* DIGIT: Digit values only.
* LOWER_ALPHABET: Lowercase letters only.
* UPPER_ALPHABET: Uppercase letters only.
* MIXED_ALPHABET: Lowercase and Uppercase letters only.
* LOWER_ALPHABET_EXTENDED: Digit values and Lowercase letters only.
* UPPER_ALPHABET_EXTENDED: Digits and Uppercase letters only
* MIXED_ALPHABET_EXTENDED: All characters specified by previous tokens
* </pre>
*
*/
private TokenType TokenType;
/**
* Designates the length of the id's root.
*/
private int RootLength;
/**
* Default constructor. Aside from TokenType, there are no restrictions
* placed on the parameters and can be used however one sees fit.
*
* @param prefix A sequence of characters that appear in the beginning of
* PIDs
* @param sansVowel Dictates whether or not vowels are allowed
* @param tokenType An enum used to configure PIDS
* @param rootLength Designates the length of the id's root
*/
public AutoIdGenerator(String prefix, boolean sansVowel, TokenType tokenType, int rootLength) {
super(prefix, sansVowel);
this.TokenType = tokenType;
this.RootLength = rootLength;
}
/**
* Creates Pids without regard to a natural order.
*
* @param amount The number of PIDs to be created
* @return A set of Pids
*/
@Override
public Set<Pid> randomMint(long amount) {
// checks to see if its possible to produce or add requested amount of
long total = calculatePermutations();
if (total < amount) {
throw new NotEnoughPermutationsException(total, amount);
}
// generate ids
String map = TokenType.getCharacters();
Set<Pid> tempIdList = new TreeSet<>();
for (int i = 0; i < amount; i++) {
int[] tempIdBaseMap = new int[RootLength];
for (int j = 0; j < RootLength; j++) {
tempIdBaseMap[j] = Rng.nextInt(map.length());
}
Pid currentId = new AutoId(Prefix, tempIdBaseMap, map);
Logger.trace("Generated Auto Random ID: " + currentId);
while (tempIdList.contains(currentId)) {
currentId.incrementId();
}
tempIdList.add(currentId);
}
return tempIdList;
}
/**
* Creates Pids in ascending order
*
* @param amount The number of PIDs to be created
* @return A set of Pids
*/
@Override
public Set<Pid> sequentialMint(long amount) {
// checks to see if its possible to produce or add requested amount of
long total = calculatePermutations();
if (total < amount) {
throw new NotEnoughPermutationsException(total, amount);
}
// generate ids
String map = TokenType.getCharacters();
Set<Pid> idSet = new TreeSet<>();
int[] previousIdBaseMap = new int[RootLength];
AutoId currentId = new AutoId(Prefix, previousIdBaseMap, map);
for (int i = 0; i < amount; i++) {
AutoId nextId = new AutoId(currentId);
idSet.add(currentId);
Logger.trace("Generated Auto Sequential ID: " + currentId);
nextId.incrementId();
currentId = new AutoId(nextId);
}
return idSet;
}
/**
* This method calculates and returns the total possible number of
* permutations using the values given in the constructor.
*
* @return number of permutations
*/
@Override
public long calculatePermutations() {
// get the base of each character
int base = 0;
switch (TokenType) {
case DIGIT:
base = 10;
break;
case LOWER_ALPHABET:
case UPPER_ALPHABET:
base = (SansVowel) ? 20 : 26;
break;
case MIXED_ALPHABET:
base = (SansVowel) ? 40 : 52;
break;
case LOWER_ALPHABET_EXTENDED:
case UPPER_ALPHABET_EXTENDED:
base = (SansVowel) ? 30 : 36;
break;
case MIXED_ALPHABET_EXTENDED:
base = (SansVowel) ? 50 : 62;
break;
}
// raise it to the power of how ever long the rootLength is
return ((long) Math.pow(base, RootLength));
}
/**
* Increments a value of a PID. If the maximum limit is reached the values
* will wrap around.
*
* @param pid The pid to increment
*/
@Override
public void incrementPid(Pid pid) {
int range = TokenType.getCharacters().length() - 1;
boolean overflow = true;
// increment the values in a pid's basemap
for (int k = 0; k <= range && overflow; k++) {
// record value of current index
int value = pid.getBaseMap()[range - k];
// if the last value is reached then wrap around
if (value == range) {
pid.getBaseMap()[range - k] = 0;
}
// otherwise increment the value at the current index and break the loop
else {
pid.getBaseMap()[range - k]++;
overflow = false;
}
}
// assign a new name to the Pid based on its base map
assignName(pid);
}
/**
* Creates and sets a new name for a pid based on its indices contained in
* the BaseMap and the characters in the TokenType. This should be called
* whenever the values in a Pid's BaseMap has been changed.
*
* @param pid The pid that needs a new name.
*/
@Override
protected void assignName(Pid pid) {
String Name = "";
for (int i = 0; i < pid.getBaseMap().length; i++) {
Name += TokenType.getCharacters().charAt(pid.getBaseMap()[i]);
}
pid.setName(this.getPrefix() + Name);
}
/* getters and setters */
public TokenType getTokenType() {
return TokenType;
}
public void setTokenType(TokenType TokenType) {
this.TokenType = TokenType;
}
public int getRootLength() {
return RootLength;
}
public void setRootLength(int RootLength) {
this.RootLength = RootLength;
}
}
| Correct AutoIdGenerator's calculatePermutations
Since enums now have a String field directly associated with them we
can logically say that the base is equal to the lengths of their
Strings.
| Minter/src/main/java/com/hida/model/AutoIdGenerator.java | Correct AutoIdGenerator's calculatePermutations | <ide><path>inter/src/main/java/com/hida/model/AutoIdGenerator.java
<ide> @Override
<ide> public long calculatePermutations() {
<ide> // get the base of each character
<del> int base = 0;
<del> switch (TokenType) {
<del> case DIGIT:
<del> base = 10;
<del> break;
<del> case LOWER_ALPHABET:
<del> case UPPER_ALPHABET:
<del> base = (SansVowel) ? 20 : 26;
<del> break;
<del> case MIXED_ALPHABET:
<del> base = (SansVowel) ? 40 : 52;
<del> break;
<del> case LOWER_ALPHABET_EXTENDED:
<del> case UPPER_ALPHABET_EXTENDED:
<del> base = (SansVowel) ? 30 : 36;
<del> break;
<del> case MIXED_ALPHABET_EXTENDED:
<del> base = (SansVowel) ? 50 : 62;
<del> break;
<del> }
<add> int base = TokenType.getCharacters().length();
<ide>
<ide> // raise it to the power of how ever long the rootLength is
<ide> return ((long) Math.pow(base, RootLength)); |
|
Java | apache-2.0 | 5dc2654882da9f6831ec59ca2c2649b320335677 | 0 | caskdata/coopr,caskdata/coopr,cdapio/coopr,cdapio/coopr,cdapio/coopr,caskdata/coopr,cdapio/coopr,quantiply-fork/coopr,quantiply-fork/coopr,quantiply-fork/coopr,caskdata/coopr,caskdata/coopr,quantiply-fork/coopr,cdapio/coopr | /**
* Copyright 2012-2014, Continuuity, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.continuuity.test.input;
import com.continuuity.loom.codec.json.guice.CodecModules;
import com.continuuity.test.Constants;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Sets;
import com.google.gson.Gson;
import com.google.gson.JsonObject;
import com.google.gson.JsonSyntaxException;
import com.google.inject.Guice;
import com.google.inject.Injector;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.List;
import java.util.Set;
/**
* Read cluster files.
*/
public class ClusterReader {
private static final Logger LOG = LoggerFactory.getLogger(ClusterReader.class);
private static final String URI = "/v1/loom/clusters/00000028";
private static final List<String> KEYS = ImmutableList.of("00000139", "00000138", "00000135");
private static final String CLUSTER_ID = "00000139";
private final Gson gson;
public ClusterReader() {
Injector injector = Guice.createInjector(new CodecModules().getModule());
gson = injector.getInstance(Gson.class);
}
public JsonObject getCluster() throws Exception {
try {
JsonObject clusterDefinition = gson.fromJson(readCluster(Constants.CLUSTERDEF_FILE_NAME), JsonObject.class);
return clusterDefinition.get(CLUSTER_ID).getAsJsonObject();
} catch (JsonSyntaxException e) {
LOG.error("Got exception while parsing JSON: ", e);
}
return null;
}
public JsonObject getCreateCluster() throws Exception {
try {
JsonObject cluster = gson.fromJson(readCluster(Constants.CLUSTER_CREATE_FILE_NAME), JsonObject.class);
return cluster;
} catch (JsonSyntaxException e) {
LOG.error("Got exception while parsing JSON: ", e);
}
return null;
}
public Set<TestCluster> getClusters(String status) throws Exception {
Set<TestCluster> testClusters = Sets.newHashSet();
try {
JsonObject clusters = gson.fromJson(readCluster(Constants.CLUSTERS_FILE_NAME), JsonObject.class);
for (String key : KEYS) {
TestCluster cluster = parseCluster(clusters.get(key).getAsJsonObject(), status);
if (cluster != null) {
testClusters.add(cluster);
}
}
return testClusters;
} catch (JsonSyntaxException e) {
LOG.error("Got exception while parsing JSON ", e);
}
return null;
}
private TestCluster parseCluster(JsonObject json, String status) {
if (status.equalsIgnoreCase(json.get("status").getAsString())) {
// Lop off milliseconds.
long ts = 1000 * (json.get("createTime").getAsLong() / 1000);
return new TestCluster(json.get("name").getAsString(), json.get("id").getAsString(),
ts, json.get("clusterTemplate").getAsString(),
Integer.parseInt(json.get("numNodes").getAsString()));
}
return null;
}
private String readCluster(String fileName) throws IOException {
BufferedReader br = new BufferedReader(new FileReader(fileName));
StringBuilder sb = new StringBuilder();
String currentline;
while ((currentline = br.readLine()) != null) {
sb.append(currentline);
}
return sb.toString();
}
}
| integration-testing/src/test/java/com/continuuity/test/input/ClusterReader.java | /**
* Copyright 2012-2014, Continuuity, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.continuuity.test.input;
import com.continuuity.loom.codec.json.guice.CodecModules;
import com.continuuity.test.Constants;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Sets;
import com.google.gson.Gson;
import com.google.gson.JsonObject;
import com.google.gson.JsonSyntaxException;
import com.google.inject.Guice;
import com.google.inject.Injector;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.List;
import java.util.Set;
/**
* Read cluster files.
*/
public class ClusterReader {
private static final Logger LOG = LoggerFactory.getLogger(ClusterReader.class);
private static final SimpleDateFormat DATE_FORMAT = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
private static final String URI = "/v1/loom/clusters/00000028";
private static final List<String> KEYS = ImmutableList.of("00000139", "00000138", "00000135");
private static final String CLUSTER_ID = "00000139";
private final Gson gson;
public ClusterReader() {
Injector injector = Guice.createInjector(new CodecModules().getModule());
gson = injector.getInstance(Gson.class);
}
public JsonObject getCluster() throws Exception {
try {
JsonObject clusterDefinition = gson.fromJson(readCluster(Constants.CLUSTERDEF_FILE_NAME), JsonObject.class);
return clusterDefinition.get(CLUSTER_ID).getAsJsonObject();
} catch (JsonSyntaxException e) {
LOG.error("Got exception while parsing JSON: ", e);
}
return null;
}
public JsonObject getCreateCluster() throws Exception {
try {
JsonObject cluster = gson.fromJson(readCluster(Constants.CLUSTER_CREATE_FILE_NAME), JsonObject.class);
return cluster;
} catch (JsonSyntaxException e) {
LOG.error("Got exception while parsing JSON: ", e);
}
return null;
}
public Set<TestCluster> getClusters(String status) throws Exception {
Set<TestCluster> testClusters = Sets.newHashSet();
try {
JsonObject clusters = gson.fromJson(readCluster(Constants.CLUSTERS_FILE_NAME), JsonObject.class);
for (String key : KEYS) {
TestCluster cluster = parseCluster(clusters.get(key).getAsJsonObject(), status);
if (cluster != null) {
testClusters.add(cluster);
}
}
return testClusters;
} catch (JsonSyntaxException e) {
LOG.error("Got exception while parsing JSON ", e);
}
return null;
}
private TestCluster parseCluster(JsonObject json, String status) {
if (status.equalsIgnoreCase(json.get("status").getAsString())) {
// Convert GMT to PST. Lop off milliseconds.
long ts = 1000 * (json.get("createTime").getAsLong() / 1000);
return new TestCluster(json.get("name").getAsString(), json.get("id").getAsString(),
ts, json.get("clusterTemplate").getAsString(),
Integer.parseInt(json.get("numNodes").getAsString()));
}
return null;
}
private String readCluster(String fileName) throws IOException {
BufferedReader br = new BufferedReader(new FileReader(fileName));
StringBuilder sb = new StringBuilder();
String currentline;
while ((currentline = br.readLine()) != null) {
sb.append(currentline);
}
return sb.toString();
}
}
| fix comment and remove unused line.
| integration-testing/src/test/java/com/continuuity/test/input/ClusterReader.java | fix comment and remove unused line. | <ide><path>ntegration-testing/src/test/java/com/continuuity/test/input/ClusterReader.java
<ide> import java.io.FileReader;
<ide> import java.io.IOException;
<ide> import java.text.SimpleDateFormat;
<del>import java.util.Date;
<ide> import java.util.List;
<ide> import java.util.Set;
<ide>
<ide> public class ClusterReader {
<ide> private static final Logger LOG = LoggerFactory.getLogger(ClusterReader.class);
<ide>
<del> private static final SimpleDateFormat DATE_FORMAT = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
<ide> private static final String URI = "/v1/loom/clusters/00000028";
<ide> private static final List<String> KEYS = ImmutableList.of("00000139", "00000138", "00000135");
<ide> private static final String CLUSTER_ID = "00000139";
<ide>
<ide> private TestCluster parseCluster(JsonObject json, String status) {
<ide> if (status.equalsIgnoreCase(json.get("status").getAsString())) {
<del> // Convert GMT to PST. Lop off milliseconds.
<add> // Lop off milliseconds.
<ide> long ts = 1000 * (json.get("createTime").getAsLong() / 1000);
<ide>
<ide> return new TestCluster(json.get("name").getAsString(), json.get("id").getAsString(), |
|
Java | apache-2.0 | f20c333900514b7ac919ab02e7f67828dbabde17 | 0 | WilliamZapata/alluxio,madanadit/alluxio,Alluxio/alluxio,riversand963/alluxio,bf8086/alluxio,madanadit/alluxio,Reidddddd/mo-alluxio,riversand963/alluxio,maboelhassan/alluxio,PasaLab/tachyon,jswudi/alluxio,jsimsa/alluxio,Reidddddd/mo-alluxio,Reidddddd/alluxio,bf8086/alluxio,bf8086/alluxio,madanadit/alluxio,calvinjia/tachyon,maobaolong/alluxio,Alluxio/alluxio,uronce-cc/alluxio,ShailShah/alluxio,wwjiang007/alluxio,calvinjia/tachyon,calvinjia/tachyon,riversand963/alluxio,bf8086/alluxio,calvinjia/tachyon,wwjiang007/alluxio,uronce-cc/alluxio,uronce-cc/alluxio,PasaLab/tachyon,ChangerYoung/alluxio,yuluo-ding/alluxio,Alluxio/alluxio,PasaLab/tachyon,apc999/alluxio,riversand963/alluxio,calvinjia/tachyon,Reidddddd/mo-alluxio,aaudiber/alluxio,maboelhassan/alluxio,ShailShah/alluxio,ShailShah/alluxio,EvilMcJerkface/alluxio,EvilMcJerkface/alluxio,Alluxio/alluxio,EvilMcJerkface/alluxio,madanadit/alluxio,yuluo-ding/alluxio,ChangerYoung/alluxio,aaudiber/alluxio,bf8086/alluxio,bf8086/alluxio,jswudi/alluxio,PasaLab/tachyon,Alluxio/alluxio,jswudi/alluxio,wwjiang007/alluxio,madanadit/alluxio,jsimsa/alluxio,maobaolong/alluxio,EvilMcJerkface/alluxio,Reidddddd/alluxio,maboelhassan/alluxio,jsimsa/alluxio,aaudiber/alluxio,maboelhassan/alluxio,maboelhassan/alluxio,apc999/alluxio,calvinjia/tachyon,wwjiang007/alluxio,bf8086/alluxio,Alluxio/alluxio,PasaLab/tachyon,Alluxio/alluxio,WilliamZapata/alluxio,jswudi/alluxio,bf8086/alluxio,ShailShah/alluxio,jsimsa/alluxio,EvilMcJerkface/alluxio,EvilMcJerkface/alluxio,maobaolong/alluxio,yuluo-ding/alluxio,Reidddddd/alluxio,jsimsa/alluxio,maobaolong/alluxio,madanadit/alluxio,WilliamZapata/alluxio,apc999/alluxio,wwjiang007/alluxio,uronce-cc/alluxio,calvinjia/tachyon,Reidddddd/mo-alluxio,Alluxio/alluxio,WilliamZapata/alluxio,wwjiang007/alluxio,PasaLab/tachyon,apc999/alluxio,apc999/alluxio,madanadit/alluxio,jswudi/alluxio,jswudi/alluxio,Reidddddd/alluxio,Reidddddd/mo-alluxio,maobaolong/alluxio,Alluxio/alluxio,aaudiber/alluxio,WilliamZapata/alluxio,maboelhassan/alluxio,ChangerYoung/alluxio,EvilMcJerkface/alluxio,aaudiber/alluxio,Reidddddd/alluxio,PasaLab/tachyon,riversand963/alluxio,yuluo-ding/alluxio,jsimsa/alluxio,apc999/alluxio,Reidddddd/alluxio,maobaolong/alluxio,WilliamZapata/alluxio,Reidddddd/mo-alluxio,madanadit/alluxio,aaudiber/alluxio,riversand963/alluxio,apc999/alluxio,uronce-cc/alluxio,yuluo-ding/alluxio,Reidddddd/alluxio,calvinjia/tachyon,maobaolong/alluxio,uronce-cc/alluxio,maobaolong/alluxio,ShailShah/alluxio,maobaolong/alluxio,maboelhassan/alluxio,maobaolong/alluxio,aaudiber/alluxio,ChangerYoung/alluxio,yuluo-ding/alluxio,wwjiang007/alluxio,EvilMcJerkface/alluxio,wwjiang007/alluxio,ChangerYoung/alluxio,wwjiang007/alluxio,ShailShah/alluxio,ChangerYoung/alluxio,wwjiang007/alluxio,Alluxio/alluxio | /*
* The Alluxio Open Foundation licenses this work under the Apache License, version 2.0
* (the "License"). You may not use this work except in compliance with the License, which is
* available at www.apache.org/licenses/LICENSE-2.0
*
* This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied, as more fully set forth in the License.
*
* See the NOTICE file distributed with this work for information regarding copyright ownership.
*/
package alluxio.hadoop;
import alluxio.CommonTestUtils;
import alluxio.Configuration;
import alluxio.ConfigurationTestUtils;
import alluxio.Constants;
import alluxio.client.ClientContext;
import alluxio.client.block.BlockStoreContext;
import alluxio.client.file.FileSystemContext;
import alluxio.client.file.FileSystemMasterClient;
import alluxio.client.lineage.LineageContext;
import alluxio.client.util.ClientTestUtils;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.security.UserGroupInformation;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mockito;
import org.powermock.api.mockito.PowerMockito;
import org.powermock.core.classloader.MockClassLoader;
import org.powermock.core.classloader.annotations.PowerMockIgnore;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
import org.powermock.reflect.Whitebox;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.util.ArrayList;
import java.util.List;
/**
* Unit tests for {@link FileSystem}.
*/
@RunWith(PowerMockRunner.class)
@PrepareForTest({FileSystemContext.class, FileSystemMasterClient.class, UserGroupInformation.class})
/*
* [ALLUXIO-1384] Tell PowerMock to defer the loading of javax.security classes to the system
* classloader in order to avoid linkage error when running this test with CDH.
* See https://code.google.com/p/powermock/wiki/FAQ.
*/
@PowerMockIgnore("javax.security.*")
/**
* Tests for {@link AbstractFileSystem}.
*/
public class AbstractFileSystemTest {
private static final Logger LOG = LoggerFactory.getLogger(Constants.LOGGER_TYPE);
private FileSystemContext mMockFileSystemContext;
/**
* Sets up the configuration before a test runs.
*/
@Before
public void before() throws Exception {
mockUserGroupInformation();
mockMasterClient();
if (isHadoop1x()) {
LOG.debug("Running Alluxio FS tests against hadoop 1x");
} else if (isHadoop2x()) {
LOG.debug("Running Alluxio FS tests against hadoop 2x");
} else {
LOG.warn("Running Alluxio FS tests against untargeted Hadoop version: " + getHadoopVersion());
}
}
@After
public void after() {
ConfigurationTestUtils.resetConfiguration();
ClientTestUtils.resetClient();
}
private ClassLoader getClassLoader(Class<?> clazz) {
// Power Mock makes this hard, so try to hack it
ClassLoader cl = clazz.getClassLoader();
if (cl instanceof MockClassLoader) {
cl = cl.getParent();
}
return cl;
}
private String getHadoopVersion() {
try {
final URL url = getSourcePath(org.apache.hadoop.fs.FileSystem.class);
final File path = new File(url.toURI());
final String[] splits = path.getName().split("-");
final String last = splits[splits.length - 1];
return last.substring(0, last.lastIndexOf("."));
} catch (URISyntaxException e) {
throw new AssertionError(e);
}
}
private URL getSourcePath(Class<?> clazz) {
try {
clazz = getClassLoader(clazz).loadClass(clazz.getName());
return clazz.getProtectionDomain().getCodeSource().getLocation();
} catch (ClassNotFoundException e) {
throw new AssertionError("Unable to find class " + clazz.getName());
}
}
/**
* Ensures that Hadoop loads {@link FaultTolerantFileSystem} when configured.
*/
@Test
public void hadoopShouldLoadFaultTolerantFileSystemWhenConfiguredTest() throws Exception {
final org.apache.hadoop.conf.Configuration conf = new org.apache.hadoop.conf.Configuration();
if (isHadoop1x()) {
conf.set("fs." + Constants.SCHEME_FT + ".impl", FaultTolerantFileSystem.class.getName());
}
// when
final URI uri = URI.create(Constants.HEADER_FT + "localhost:19998/tmp/path.txt");
Configuration.set(Constants.MASTER_HOSTNAME, uri.getHost());
Configuration.set(Constants.MASTER_RPC_PORT, Integer.toString(uri.getPort()));
Configuration.set(Constants.ZOOKEEPER_ENABLED, "true");
final org.apache.hadoop.fs.FileSystem fs = org.apache.hadoop.fs.FileSystem.get(uri, conf);
Assert.assertTrue(fs instanceof FaultTolerantFileSystem);
PowerMockito.verifyStatic();
alluxio.client.file.FileSystem.Factory.get();
ClientTestUtils.resetClient();
}
/**
* Ensures that Hadoop loads the Alluxio file system when configured.
*/
@Test
public void hadoopShouldLoadFileSystemWhenConfiguredTest() throws Exception {
final org.apache.hadoop.conf.Configuration conf = getConf();
// when
final URI uri = URI.create(Constants.HEADER + "localhost:19998/tmp/path.txt");
Configuration.set(Constants.MASTER_HOSTNAME, uri.getHost());
Configuration.set(Constants.MASTER_RPC_PORT, Integer.toString(uri.getPort()));
Configuration.set(Constants.ZOOKEEPER_ENABLED, "false");
final org.apache.hadoop.fs.FileSystem fs = org.apache.hadoop.fs.FileSystem.get(uri, conf);
Assert.assertTrue(fs instanceof FileSystem);
PowerMockito.verifyStatic();
alluxio.client.file.FileSystem.Factory.get();
ClientTestUtils.resetClient();
}
/**
* Ensures that FileNotFoundException is thrown when listStatus is called on a nonexistent path.
*/
@Test
public void throwFileNotFoundExceptionWhenListStatusNonExistingTest() throws Exception {
final org.apache.hadoop.conf.Configuration conf = getConf();
conf.set(Constants.MASTER_HOSTNAME, "localhost");
conf.set(Constants.MASTER_RPC_PORT, "19998");
ClientContext.init();
org.apache.hadoop.fs.FileSystem fs = FileSystem.get(conf);
StringBuilder path = new StringBuilder("/ALLUXIO-2036.");
path.append(System.currentTimeMillis()).append(".txt");
try {
fs.listStatus(new Path(path.toString()));
Assert.fail("Listing the status of a nonexistent file should throw an exception");
} catch (FileNotFoundException e) {
// This exception is expected
} finally {
fs.close();
}
}
/**
* Tests that initializing the {@link AbstractFileSystem} will reinitialize contexts to pick up
* changes to the master address.
*/
@Test
public void resetContextTest() throws Exception {
// Change to otherhost:410
URI uri = URI.create(Constants.HEADER + "otherhost:410/");
org.apache.hadoop.fs.FileSystem fs = org.apache.hadoop.fs.FileSystem.get(uri, getConf());
// Make sure all contexts are using the new address
InetSocketAddress newAddress = new InetSocketAddress("otherhost", 410);
Assert.assertEquals(newAddress, ClientContext.getMasterAddress());
Assert.assertEquals(newAddress, CommonTestUtils.getInternalState(BlockStoreContext.INSTANCE,
"mBlockMasterClientPool", "mMasterAddress"));
// Once from calling FileSystem.get
Mockito.verify(mMockFileSystemContext).reset();
Assert.assertEquals(newAddress, CommonTestUtils.getInternalState(LineageContext.INSTANCE,
"mLineageMasterClientPool", "mMasterAddress"));
}
/**
* Verifies that the initialize method is only called once even when there are many concurrent
* initializers during the initialization phase.
*/
@Test
public void concurrentInitializeTest() throws Exception {
final List<Thread> threads = new ArrayList<>();
final org.apache.hadoop.conf.Configuration conf = getConf();
for (int i = 0; i < 100; i++) {
final int id = i;
Thread t = new Thread(new Runnable() {
@Override
public void run() {
URI uri = URI.create(Constants.HEADER + "randomhost" + id + ":410/");
try {
org.apache.hadoop.fs.FileSystem.get(uri, conf);
} catch (IOException e) {
Assert.fail();
}
}
});
threads.add(t);
}
for (Thread t : threads) {
t.start();
}
for (Thread t : threads) {
t.join();
}
Mockito.verify(mMockFileSystemContext).reset();
}
private boolean isHadoop1x() {
return getHadoopVersion().startsWith("1");
}
private boolean isHadoop2x() {
return getHadoopVersion().startsWith("2");
}
private org.apache.hadoop.conf.Configuration getConf() throws Exception {
org.apache.hadoop.conf.Configuration conf = new org.apache.hadoop.conf.Configuration();
if (isHadoop1x()) {
conf.set("fs." + Constants.SCHEME + ".impl", FileSystem.class.getName());
}
return conf;
}
private void mockMasterClient() {
PowerMockito.mockStatic(FileSystemContext.class);
mMockFileSystemContext = PowerMockito.mock(FileSystemContext.class);
FileSystemMasterClient mockMaster =
PowerMockito.mock(FileSystemMasterClient.class);
Whitebox.setInternalState(FileSystemContext.class, "INSTANCE", mMockFileSystemContext);
Mockito.when(mMockFileSystemContext.acquireMasterClient()).thenReturn(mockMaster);
}
private void mockUserGroupInformation() throws IOException {
// need to mock out since FileSystem.get calls UGI, which some times has issues on some systems
PowerMockito.mockStatic(UserGroupInformation.class);
final UserGroupInformation ugi = Mockito.mock(UserGroupInformation.class);
Mockito.when(UserGroupInformation.getCurrentUser()).thenReturn(ugi);
}
}
| core/client/src/test/java/alluxio/hadoop/AbstractFileSystemTest.java | /*
* The Alluxio Open Foundation licenses this work under the Apache License, version 2.0
* (the "License"). You may not use this work except in compliance with the License, which is
* available at www.apache.org/licenses/LICENSE-2.0
*
* This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied, as more fully set forth in the License.
*
* See the NOTICE file distributed with this work for information regarding copyright ownership.
*/
package alluxio.hadoop;
import alluxio.CommonTestUtils;
import alluxio.Configuration;
import alluxio.ConfigurationTestUtils;
import alluxio.Constants;
import alluxio.client.ClientContext;
import alluxio.client.block.BlockStoreContext;
import alluxio.client.file.FileSystemContext;
import alluxio.client.file.FileSystemMasterClient;
import alluxio.client.lineage.LineageContext;
import alluxio.client.util.ClientTestUtils;
import org.apache.hadoop.security.UserGroupInformation;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mockito;
import org.powermock.api.mockito.PowerMockito;
import org.powermock.core.classloader.MockClassLoader;
import org.powermock.core.classloader.annotations.PowerMockIgnore;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
import org.powermock.reflect.Whitebox;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.util.ArrayList;
import java.util.List;
/**
* Unit tests for {@link FileSystem}.
*/
@RunWith(PowerMockRunner.class)
@PrepareForTest({FileSystemContext.class, FileSystemMasterClient.class, UserGroupInformation.class})
/*
* [ALLUXIO-1384] Tell PowerMock to defer the loading of javax.security classes to the system
* classloader in order to avoid linkage error when running this test with CDH.
* See https://code.google.com/p/powermock/wiki/FAQ.
*/
@PowerMockIgnore("javax.security.*")
/**
* Tests for {@link AbstractFileSystem}.
*/
public class AbstractFileSystemTest {
private static final Logger LOG = LoggerFactory.getLogger(Constants.LOGGER_TYPE);
private FileSystemContext mMockFileSystemContext;
/**
* Sets up the configuration before a test runs.
*/
@Before
public void before() throws Exception {
mockUserGroupInformation();
mockMasterClient();
if (isHadoop1x()) {
LOG.debug("Running Alluxio FS tests against hadoop 1x");
} else if (isHadoop2x()) {
LOG.debug("Running Alluxio FS tests against hadoop 2x");
} else {
LOG.warn("Running Alluxio FS tests against untargeted Hadoop version: " + getHadoopVersion());
}
}
@After
public void after() {
ConfigurationTestUtils.resetConfiguration();
ClientTestUtils.resetClient();
}
private ClassLoader getClassLoader(Class<?> clazz) {
// Power Mock makes this hard, so try to hack it
ClassLoader cl = clazz.getClassLoader();
if (cl instanceof MockClassLoader) {
cl = cl.getParent();
}
return cl;
}
private String getHadoopVersion() {
try {
final URL url = getSourcePath(org.apache.hadoop.fs.FileSystem.class);
final File path = new File(url.toURI());
final String[] splits = path.getName().split("-");
final String last = splits[splits.length - 1];
return last.substring(0, last.lastIndexOf("."));
} catch (URISyntaxException e) {
throw new AssertionError(e);
}
}
private URL getSourcePath(Class<?> clazz) {
try {
clazz = getClassLoader(clazz).loadClass(clazz.getName());
return clazz.getProtectionDomain().getCodeSource().getLocation();
} catch (ClassNotFoundException e) {
throw new AssertionError("Unable to find class " + clazz.getName());
}
}
/**
* Ensures that Hadoop loads {@link FaultTolerantFileSystem} when configured.
*/
@Test
public void hadoopShouldLoadFaultTolerantFileSystemWhenConfiguredTest() throws Exception {
final org.apache.hadoop.conf.Configuration conf = new org.apache.hadoop.conf.Configuration();
if (isHadoop1x()) {
conf.set("fs." + Constants.SCHEME_FT + ".impl", FaultTolerantFileSystem.class.getName());
}
// when
final URI uri = URI.create(Constants.HEADER_FT + "localhost:19998/tmp/path.txt");
Configuration.set(Constants.MASTER_HOSTNAME, uri.getHost());
Configuration.set(Constants.MASTER_RPC_PORT, Integer.toString(uri.getPort()));
Configuration.set(Constants.ZOOKEEPER_ENABLED, "true");
final org.apache.hadoop.fs.FileSystem fs = org.apache.hadoop.fs.FileSystem.get(uri, conf);
Assert.assertTrue(fs instanceof FaultTolerantFileSystem);
PowerMockito.verifyStatic();
alluxio.client.file.FileSystem.Factory.get();
ClientTestUtils.resetClient();
}
/**
* Ensures that Hadoop loads the Alluxio file system when configured.
*/
@Test
public void hadoopShouldLoadFileSystemWhenConfiguredTest() throws Exception {
final org.apache.hadoop.conf.Configuration conf = getConf();
// when
final URI uri = URI.create(Constants.HEADER + "localhost:19998/tmp/path.txt");
Configuration.set(Constants.MASTER_HOSTNAME, uri.getHost());
Configuration.set(Constants.MASTER_RPC_PORT, Integer.toString(uri.getPort()));
Configuration.set(Constants.ZOOKEEPER_ENABLED, "false");
final org.apache.hadoop.fs.FileSystem fs = org.apache.hadoop.fs.FileSystem.get(uri, conf);
Assert.assertTrue(fs instanceof FileSystem);
PowerMockito.verifyStatic();
alluxio.client.file.FileSystem.Factory.get();
ClientTestUtils.resetClient();
}
/**
* Tests that initializing the {@link AbstractFileSystem} will reinitialize contexts to pick up
* changes to the master address.
*/
@Test
public void resetContextTest() throws Exception {
// Change to otherhost:410
URI uri = URI.create(Constants.HEADER + "otherhost:410/");
org.apache.hadoop.fs.FileSystem fs = org.apache.hadoop.fs.FileSystem.get(uri, getConf());
// Make sure all contexts are using the new address
InetSocketAddress newAddress = new InetSocketAddress("otherhost", 410);
Assert.assertEquals(newAddress, ClientContext.getMasterAddress());
Assert.assertEquals(newAddress, CommonTestUtils.getInternalState(BlockStoreContext.INSTANCE,
"mBlockMasterClientPool", "mMasterAddress"));
// Once from calling FileSystem.get
Mockito.verify(mMockFileSystemContext).reset();
Assert.assertEquals(newAddress, CommonTestUtils.getInternalState(LineageContext.INSTANCE,
"mLineageMasterClientPool", "mMasterAddress"));
}
/**
* Verifies that the initialize method is only called once even when there are many concurrent
* initializers during the initialization phase.
*/
@Test
public void concurrentInitializeTest() throws Exception {
final List<Thread> threads = new ArrayList<>();
final org.apache.hadoop.conf.Configuration conf = getConf();
for (int i = 0; i < 100; i++) {
final int id = i;
Thread t = new Thread(new Runnable() {
@Override
public void run() {
URI uri = URI.create(Constants.HEADER + "randomhost" + id + ":410/");
try {
org.apache.hadoop.fs.FileSystem.get(uri, conf);
} catch (IOException e) {
Assert.fail();
}
}
});
threads.add(t);
}
for (Thread t : threads) {
t.start();
}
for (Thread t : threads) {
t.join();
}
Mockito.verify(mMockFileSystemContext).reset();
}
private boolean isHadoop1x() {
return getHadoopVersion().startsWith("1");
}
private boolean isHadoop2x() {
return getHadoopVersion().startsWith("2");
}
private org.apache.hadoop.conf.Configuration getConf() throws Exception {
org.apache.hadoop.conf.Configuration conf = new org.apache.hadoop.conf.Configuration();
if (isHadoop1x()) {
conf.set("fs." + Constants.SCHEME + ".impl", FileSystem.class.getName());
}
return conf;
}
private void mockMasterClient() {
PowerMockito.mockStatic(FileSystemContext.class);
mMockFileSystemContext = PowerMockito.mock(FileSystemContext.class);
FileSystemMasterClient mockMaster =
PowerMockito.mock(FileSystemMasterClient.class);
Whitebox.setInternalState(FileSystemContext.class, "INSTANCE", mMockFileSystemContext);
Mockito.when(mMockFileSystemContext.acquireMasterClient()).thenReturn(mockMaster);
}
private void mockUserGroupInformation() throws IOException {
// need to mock out since FileSystem.get calls UGI, which some times has issues on some systems
PowerMockito.mockStatic(UserGroupInformation.class);
final UserGroupInformation ugi = Mockito.mock(UserGroupInformation.class);
Mockito.when(UserGroupInformation.getCurrentUser()).thenReturn(ugi);
}
}
| fix ALLUXIO-2036
| core/client/src/test/java/alluxio/hadoop/AbstractFileSystemTest.java | fix ALLUXIO-2036 | <ide><path>ore/client/src/test/java/alluxio/hadoop/AbstractFileSystemTest.java
<ide> import alluxio.client.lineage.LineageContext;
<ide> import alluxio.client.util.ClientTestUtils;
<ide>
<add>import org.apache.hadoop.fs.Path;
<ide> import org.apache.hadoop.security.UserGroupInformation;
<ide> import org.junit.After;
<ide> import org.junit.Assert;
<ide> import org.slf4j.LoggerFactory;
<ide>
<ide> import java.io.File;
<add>import java.io.FileNotFoundException;
<ide> import java.io.IOException;
<ide> import java.net.InetSocketAddress;
<ide> import java.net.URI;
<ide> PowerMockito.verifyStatic();
<ide> alluxio.client.file.FileSystem.Factory.get();
<ide> ClientTestUtils.resetClient();
<add> }
<add>
<add> /**
<add> * Ensures that FileNotFoundException is thrown when listStatus is called on a nonexistent path.
<add> */
<add> @Test
<add> public void throwFileNotFoundExceptionWhenListStatusNonExistingTest() throws Exception {
<add> final org.apache.hadoop.conf.Configuration conf = getConf();
<add> conf.set(Constants.MASTER_HOSTNAME, "localhost");
<add> conf.set(Constants.MASTER_RPC_PORT, "19998");
<add> ClientContext.init();
<add> org.apache.hadoop.fs.FileSystem fs = FileSystem.get(conf);
<add> StringBuilder path = new StringBuilder("/ALLUXIO-2036.");
<add> path.append(System.currentTimeMillis()).append(".txt");
<add> try {
<add> fs.listStatus(new Path(path.toString()));
<add> Assert.fail("Listing the status of a nonexistent file should throw an exception");
<add> } catch (FileNotFoundException e) {
<add> // This exception is expected
<add> } finally {
<add> fs.close();
<add> }
<ide> }
<ide>
<ide> /** |
|
Java | mit | af4cf07be7cdf1266e3b4fc301ab71f95de33abe | 0 | Madura/SahanaInventoryManager,Madura/SahanaInventoryManager | home/madura/Documents/myprojects/SahanaInventoryManager/src/main/java/com/sahana/inventory/user/configuration/AppConfiguration.java | package com.sahana.inventory.user.configuration;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import org.springframework.web.servlet.config.annotation.EnableWebMvc;
import org.springframework.web.servlet.config.annotation.ResourceHandlerRegistry;
import org.springframework.web.servlet.config.annotation.ViewResolverRegistry;
import org.springframework.web.servlet.config.annotation.WebMvcConfigurerAdapter;
import org.springframework.web.servlet.view.InternalResourceViewResolver;
import org.springframework.web.servlet.view.JstlView;
@Configuration
@EnableWebMvc
@ComponentScan(basePackages = "com.sahana.inventory.user")
public class AppConfiguration extends WebMvcConfigurerAdapter{
@Override
public void configureViewResolvers(ViewResolverRegistry registry) {
InternalResourceViewResolver viewResolver = new InternalResourceViewResolver();
viewResolver.setViewClass(JstlView.class);
viewResolver.setPrefix("/WEB-INF/views/");
viewResolver.setSuffix(".jsp");
registry.viewResolver(viewResolver);
}
@Override
public void addResourceHandlers(ResourceHandlerRegistry registry) {
registry.addResourceHandler("/static/**").addResourceLocations("/static/");
}
}
| Delete AppConfiguration.java | home/madura/Documents/myprojects/SahanaInventoryManager/src/main/java/com/sahana/inventory/user/configuration/AppConfiguration.java | Delete AppConfiguration.java | <ide><path>ome/madura/Documents/myprojects/SahanaInventoryManager/src/main/java/com/sahana/inventory/user/configuration/AppConfiguration.java
<del>package com.sahana.inventory.user.configuration;
<del>
<del>import org.springframework.context.annotation.ComponentScan;
<del>import org.springframework.context.annotation.Configuration;
<del>import org.springframework.web.servlet.config.annotation.EnableWebMvc;
<del>import org.springframework.web.servlet.config.annotation.ResourceHandlerRegistry;
<del>import org.springframework.web.servlet.config.annotation.ViewResolverRegistry;
<del>import org.springframework.web.servlet.config.annotation.WebMvcConfigurerAdapter;
<del>import org.springframework.web.servlet.view.InternalResourceViewResolver;
<del>import org.springframework.web.servlet.view.JstlView;
<del>
<del>@Configuration
<del>@EnableWebMvc
<del>@ComponentScan(basePackages = "com.sahana.inventory.user")
<del>public class AppConfiguration extends WebMvcConfigurerAdapter{
<del>
<del> @Override
<del> public void configureViewResolvers(ViewResolverRegistry registry) {
<del> InternalResourceViewResolver viewResolver = new InternalResourceViewResolver();
<del> viewResolver.setViewClass(JstlView.class);
<del> viewResolver.setPrefix("/WEB-INF/views/");
<del> viewResolver.setSuffix(".jsp");
<del> registry.viewResolver(viewResolver);
<del> }
<del>
<del> @Override
<del> public void addResourceHandlers(ResourceHandlerRegistry registry) {
<del> registry.addResourceHandler("/static/**").addResourceLocations("/static/");
<del> }
<del>
<del>} |
||
JavaScript | agpl-3.0 | 549917cafd5d6653dc565bb3d26ff42c9f636828 | 0 | golflima/hoplaJS,golflima/hoplaJS,golflima/hoplaJS,golflima/hoplaJS | /*!
* _ _ _ ____
* | |__ ___ _ __ | | __ _ | / ___|
* | '_ \ / _ \| '_ \| |/ _` |_ | \___ \
* | | | | (_) | |_) | | (_| | |_| |___) |
* |_| |_|\___/| .__/|_|\__,_|\___/|____/
* |_|
*
* This file is part of hoplaJS.
* See: <https://github.com/golflima/hoplaJS>.
*
* Copyright (C) 2017 Jérémy Walther <[email protected]>.
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
* Otherwise, see: <https://www.gnu.org/licenses/agpl-3.0>.
*/
function base64_encode(text) {
return window.btoa(unescape(encodeURIComponent(text))).replace('+', '-').replace('/', '_').replace('=', '');
}
$(document).ready(function(){
// Init 'Generate the HoplaJS URLs for this script !' button ...
$('.onclick-generate').click(function() {
var javascript = $('#javascript').val();
var dependencies = $('#dependencies').val();
var htmlBody = $('#htmlBody').val();
$.ajax({
type: "POST",
url: baseUrl + '/api/encode',
data: {
javascript: javascript,
dependencies: dependencies,
htmlBody: htmlBody,
},
success: function(data) {
$('#urlEdit').val(data.baseUrl + '/edit/' + data.data);
$('#urlRaw').val(data.baseUrl + '/raw/' + data.data);
$('#urlRun').val(data.baseUrl + '/run/' + data.data);
$('.urlHash').html('— hash: ' + data.hash);
var urlSize = (data.baseUrl + '/run/' + data.data).length;
$('#urlSize').html(urlSize);
$('#urlSize').removeClass('bg-success bg-warning bg-danger');
if (urlSize <= 2048) {
// URL length below 2048 are OK on every browsers
// but over 2048 it won't work with MS IE
$('#urlSize').addClass('bg-success');
$('#urlSizeProgress1').css('width', urlSize * 100 / 2048 + '%');
$('#urlSizeProgress2').css('width', '0%');
$('#urlSizeProgress3').css('width', '0%');
} else if (urlSize <= 8000) {
// URL length over 8000 won't work on Android, and will be blocked by Apache by default
$('#urlSize').addClass('bg-warning');
$('#urlSizeProgress1').css('width', 2047 * 100 / 8000 + '%');
$('#urlSizeProgress2').css('width', (urlSize - 2048) * 100 / 8000 + '%');
$('#urlSizeProgress3').css('width', '0%');
}
else {
// URL length over 32779 won't work on Google Chrome
// It seems Firefox and Safari are able to handle URL length over 65535
$('#urlSize').addClass('bg-danger');
$('#urlSizeProgress1').css('width', 2047 * 100 / 32779 + '%');
$('#urlSizeProgress2').css('width', (8000 - 2048) * 100 / 32779 + '%');
$('#urlSizeProgress3').css('width', (urlSize - 2048 - 8000) * 100 / 32779 + '%');
}
},
dataType: 'json'
});
});
// Init 'Copy' buttons ...
$('.onclick-copy').click(function() {
var toCopy = $(this).parent().parent().find('input:last').attr('id');
document.getElementById(toCopy).select();
var copied;
try {
copied = document.execCommand('copy');
} catch (ex) {
copied = false;
}
if (copied) {
$(this)
.removeClass('btn-primary').addClass('btn-success')
.delay(1000)
.removeClass('btn-success').addClass('btn-primary');
} else {
$(this)
.removeClass('btn-primary').addClass('btn-danger')
.delay(1000)
.removeClass('btn-danger').addClass('btn-primary');
}
});
// Init 'Test' buttons ...
$('.onclick-test').click(function() {
var url = $(this).parent().parent().find('input:last').val();
var opened = window.open(url, '_blank');
if (opened) {
opened.focus();
} else {
alert('Please allow popups for this website.');
}
});
// Init ToolBox - Proxy
$('#proxyUrlRaw').change(function() {
$('#proxyUrl').val(baseUrl + '/api/proxy/' + base64_encode($('#proxyUrlRaw').val()) +
($('#proxyUrlContentType').val() != '' ? '/' + base64_encode($('#proxyUrlContentType').val()) : ''));
});
$('#proxyUrlContentType').change(function() {
$('#proxyUrlRaw').change();
});
}); | web/js/site.js | /*!
* _ _ _ ____
* | |__ ___ _ __ | | __ _ | / ___|
* | '_ \ / _ \| '_ \| |/ _` |_ | \___ \
* | | | | (_) | |_) | | (_| | |_| |___) |
* |_| |_|\___/| .__/|_|\__,_|\___/|____/
* |_|
*
* This file is part of hoplaJS.
* See: <https://github.com/golflima/hoplaJS>.
*
* Copyright (C) 2017 Jérémy Walther <[email protected]>.
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
* Otherwise, see: <https://www.gnu.org/licenses/agpl-3.0>.
*/
function base64_encode(text) {
return window.btoa(unescape(encodeURIComponent(text))).replace('+', '-').replace('/', '_').replace('=', '');
}
$(document).ready(function(){
// Init 'Generate the HoplaJS URLs for this script !' button ...
$('.onclick-generate').click(function() {
var javascript = $('#javascript').val();
var dependencies = $('#dependencies').val();
var htmlBody = $('#htmlBody').val();
$.ajax({
type: "POST",
url: baseUrl + '/api/encode',
data: {
javascript: javascript,
dependencies: dependencies,
htmlBody: htmlBody,
},
success: function(data) {
$('#urlEdit').val(data.baseUrl + '/edit/' + data.data);
$('#urlRaw').val(data.baseUrl + '/raw/' + data.data);
$('#urlRun').val(data.baseUrl + '/run/' + data.data);
$('.urlHash').html('— hash: ' + data.hash);
var urlSize = (data.baseUrl + '/run/' + data.data).length;
$('#urlSize').html(urlSize);
$('#urlSize').removeClass('bg-success bg-warning bg-danger');
if (urlSize <= 2048) {
// URL length below 2048 are OK on every browsers
// but over 2048 it won't work with MS IE
$('#urlSize').addClass('bg-success');
$('#urlSizeProgress1').css('width', urlSize * 100 / 2048 + '%');
$('#urlSizeProgress2').css('width', '0%');
$('#urlSizeProgress3').css('width', '0%');
} else if (urlSize <= 8192) {
// URL length over 8192 won't work on Android
$('#urlSize').addClass('bg-warning');
$('#urlSizeProgress1').css('width', 2047 * 100 / 8192 + '%');
$('#urlSizeProgress2').css('width', (urlSize - 2048) * 100 / 8192 + '%');
$('#urlSizeProgress3').css('width', '0%');
}
else {
// URL length over 32779 won't work on Google Chrome
// It seems Firefox and Safari are able to handle URL length over 65535
$('#urlSize').addClass('bg-danger');
$('#urlSizeProgress1').css('width', 2047 * 100 / 32779 + '%');
$('#urlSizeProgress2').css('width', (8192 - 2048) * 100 / 32779 + '%');
$('#urlSizeProgress3').css('width', (urlSize - 2048 - 8192) * 100 / 32779 + '%');
}
},
dataType: 'json'
});
});
// Init 'Copy' buttons ...
$('.onclick-copy').click(function() {
var toCopy = $(this).parent().parent().find('input:last').attr('id');
document.getElementById(toCopy).select();
var copied;
try {
copied = document.execCommand('copy');
} catch (ex) {
copied = false;
}
if (copied) {
$(this)
.removeClass('btn-primary').addClass('btn-success')
.delay(1000)
.removeClass('btn-success').addClass('btn-primary');
} else {
$(this)
.removeClass('btn-primary').addClass('btn-danger')
.delay(1000)
.removeClass('btn-danger').addClass('btn-primary');
}
});
// Init 'Test' buttons ...
$('.onclick-test').click(function() {
var url = $(this).parent().parent().find('input:last').val();
var opened = window.open(url, '_blank');
if (opened) {
opened.focus();
} else {
alert('Please allow popups for this website.');
}
});
// Init ToolBox - Proxy
$('#proxyUrlRaw').change(function() {
$('#proxyUrl').val(baseUrl + '/api/proxy/' + base64_encode($('#proxyUrlRaw').val()) +
($('#proxyUrlContentType').val() != '' ? '/' + base64_encode($('#proxyUrlContentType').val()) : ''));
});
$('#proxyUrlContentType').change(function() {
$('#proxyUrlRaw').change();
});
}); | Change URL size limits
| web/js/site.js | Change URL size limits | <ide><path>eb/js/site.js
<ide> $('#urlSizeProgress1').css('width', urlSize * 100 / 2048 + '%');
<ide> $('#urlSizeProgress2').css('width', '0%');
<ide> $('#urlSizeProgress3').css('width', '0%');
<del> } else if (urlSize <= 8192) {
<del> // URL length over 8192 won't work on Android
<add> } else if (urlSize <= 8000) {
<add> // URL length over 8000 won't work on Android, and will be blocked by Apache by default
<ide> $('#urlSize').addClass('bg-warning');
<del> $('#urlSizeProgress1').css('width', 2047 * 100 / 8192 + '%');
<del> $('#urlSizeProgress2').css('width', (urlSize - 2048) * 100 / 8192 + '%');
<add> $('#urlSizeProgress1').css('width', 2047 * 100 / 8000 + '%');
<add> $('#urlSizeProgress2').css('width', (urlSize - 2048) * 100 / 8000 + '%');
<ide> $('#urlSizeProgress3').css('width', '0%');
<ide> }
<ide> else {
<ide> // It seems Firefox and Safari are able to handle URL length over 65535
<ide> $('#urlSize').addClass('bg-danger');
<ide> $('#urlSizeProgress1').css('width', 2047 * 100 / 32779 + '%');
<del> $('#urlSizeProgress2').css('width', (8192 - 2048) * 100 / 32779 + '%');
<del> $('#urlSizeProgress3').css('width', (urlSize - 2048 - 8192) * 100 / 32779 + '%');
<add> $('#urlSizeProgress2').css('width', (8000 - 2048) * 100 / 32779 + '%');
<add> $('#urlSizeProgress3').css('width', (urlSize - 2048 - 8000) * 100 / 32779 + '%');
<ide> }
<ide> },
<ide> dataType: 'json' |
|
Java | lgpl-2.1 | b88946e1d4a449cd9267dbff461aa4f15fff9dcc | 0 | ethaneldridge/vassal,ethaneldridge/vassal,ethaneldridge/vassal | /*
* $Id$
*
* Copyright (c) 2000-2003 by Rodney Kinney
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License (LGPL) as published by the Free Software Foundation.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, copies are available
* at http://www.opensource.org.
*/
package VASSAL.configure;
import java.awt.Color;
import java.awt.Component;
import java.awt.Image;
import java.awt.Window;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.io.File;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.swing.Box;
import javax.swing.BoxLayout;
import javax.swing.Icon;
import javax.swing.JPanel;
import javax.swing.KeyStroke;
import VASSAL.build.AutoConfigurable;
import VASSAL.build.Configurable;
import VASSAL.build.GameModule;
import VASSAL.tools.ErrorDialog;
import VASSAL.tools.ErrorUtils;
/**
* A Configurer for configuring Configurable components
* (Is that as redundant as it sounds?)
* Automatically builds a property editor with controls for setting all
* of the attributes of the target Configurable component
*/
public class AutoConfigurer extends Configurer
implements PropertyChangeListener {
protected JPanel p;
protected AutoConfigurable target;
protected List<Configurer> configurers = new ArrayList<Configurer>();
protected Map<String,VisibilityCondition> conditions;
public AutoConfigurer(AutoConfigurable c) {
super(null, c.getConfigureName());
target = c;
setValue(target);
target.addPropertyChangeListener(new PropertyChangeListener() {
public void propertyChange(final PropertyChangeEvent evt) {
if (Configurable.NAME_PROPERTY.equals(evt.getPropertyName())) {
setName((String) evt.getNewValue());
}
}
});
p = new JPanel();
p.setLayout(new BoxLayout(p, BoxLayout.Y_AXIS));
String[] name = c.getAttributeNames();
String[] prompt = c.getAttributeDescriptions();
Class<?>[] type = c.getAttributeTypes();
int n = Math.min(name.length, Math.min(prompt.length, type.length));
for (int i = 0; i < n; ++i) {
if (type[i] == null) {
continue;
}
Configurer config;
config = createConfigurer(type[i], name[i], prompt[i], target);
if (config != null) {
config.addPropertyChangeListener(this);
config.setValue(target.getAttributeValueString(name[i]));
Box box = Box.createHorizontalBox();
box.add(config.getControls());
box.add(Box.createHorizontalGlue());
p.add(box);
configurers.add(config);
}
setVisibility(name[i],c.getAttributeVisibility(name[i]));
}
}
public static Configurer createConfigurer(Class<?> type,
String key,
String prompt,
AutoConfigurable target) {
Configurer config = null;
if (String.class.isAssignableFrom(type)) {
config = new StringConfigurer(key, prompt);
}
else if (Integer.class.isAssignableFrom(type)) {
config = new IntConfigurer(key, prompt);
}
else if (Double.class.isAssignableFrom(type)) {
config = new DoubleConfigurer(key, prompt);
}
else if (Boolean.class.isAssignableFrom(type)) {
config = new BooleanConfigurer(key, prompt);
}
else if (Image.class.isAssignableFrom(type)) {
config = new ImageConfigurer(key, prompt,
GameModule.getGameModule().getArchiveWriter());
}
else if (Color.class.isAssignableFrom(type)) {
config = new ColorConfigurer(key, prompt);
}
else if (KeyStroke.class.isAssignableFrom(type)) {
config = new HotKeyConfigurer(key, prompt);
}
else if (File.class.isAssignableFrom(type)) {
config = new FileConfigurer(key, prompt,
GameModule.getGameModule().getArchiveWriter());
}
else if (String[].class.isAssignableFrom(type)) {
config = new StringArrayConfigurer(key, prompt);
}
else if (Icon.class.isAssignableFrom(type)) {
config = new IconConfigurer(key,prompt,null);
}
else if (PropertyExpression.class.isAssignableFrom(type)) {
config = new PropertyExpressionConfigurer(key, prompt);
}
else if (StringEnum.class.isAssignableFrom(type)) {
StringEnum se = null;
try {
se = (StringEnum) type.getConstructor().newInstance();
}
catch (Throwable t) {
ErrorUtils.handleNewInstanceFailure(t);
config = new StringConfigurer(key, prompt);
}
if (se != null) {
final String[] validValues = se.getValidValues(target);
config = new StringEnumConfigurer(key, prompt, validValues);
}
}
else if (ConfigurerFactory.class.isAssignableFrom(type)) {
ConfigurerFactory cf = null;
try {
cf = (ConfigurerFactory) type.getConstructor().newInstance();
}
catch (Throwable t) {
ErrorUtils.handleNewInstanceFailure(t);
}
if (cf != null) {
config = cf.getConfigurer(target, key, prompt);
}
}
else {
throw new IllegalArgumentException("Invalid class " + type.getName());
}
return config;
}
public void reset() {
String[] s = target.getAttributeNames();
for (int i=0;i<s.length;++i) {
Configurer config = getConfigurer(s[i]);
if (config != null) {
config.setValue(target.getAttributeValueString(s[i]));
}
}
}
public String getValueString() {
return target.getConfigureName();
}
public void setValue(String s) {
throw new UnsupportedOperationException(
"Can't set Configurable from String");
}
public Component getControls() {
return p;
}
public void propertyChange(final PropertyChangeEvent evt) {
target.setAttribute(evt.getPropertyName(), evt.getNewValue());
checkVisibility();
}
public void setVisibility(String attribute, VisibilityCondition c) {
if (c != null) {
if (conditions == null) {
conditions = new HashMap<String,VisibilityCondition>();
}
conditions.put(attribute, c);
checkVisibility();
}
}
protected void checkVisibility() {
boolean visChanged = false;
if (conditions != null) {
for (Configurer c : configurers) {
VisibilityCondition cond = conditions.get(c.getKey());
if (cond != null) {
if (c.getControls().isVisible() != cond.shouldBeVisible()) {
visChanged = true;
c.getControls().setVisible(cond.shouldBeVisible());
}
}
}
// Only repack the configurer if an item visiblity has changed.
if (visChanged && p.getTopLevelAncestor() instanceof Window) {
((Window) p.getTopLevelAncestor()).pack();
}
}
}
public Configurer getConfigurer(String attribute) {
for (Configurer c : configurers) {
if (attribute.equals(c.getKey())) {
return c;
}
}
return null;
}
}
| src/VASSAL/configure/AutoConfigurer.java | /*
* $Id$
*
* Copyright (c) 2000-2003 by Rodney Kinney
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License (LGPL) as published by the Free Software Foundation.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, copies are available
* at http://www.opensource.org.
*/
package VASSAL.configure;
import java.awt.Color;
import java.awt.Component;
import java.awt.Image;
import java.awt.Window;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.swing.Box;
import javax.swing.Icon;
import javax.swing.JPanel;
import javax.swing.KeyStroke;
import VASSAL.build.AutoConfigurable;
import VASSAL.build.Configurable;
import VASSAL.build.GameModule;
import VASSAL.tools.ErrorDialog;
/**
* A Configurer for configuring Configurable components
* (Is that as redundant as it sounds?)
* Automatically builds a property editor with controls for setting all
* of the attributes of the target Configurable component
*/
public class AutoConfigurer extends Configurer
implements PropertyChangeListener {
protected JPanel p;
protected AutoConfigurable target;
protected List<Configurer> configurers = new ArrayList<Configurer>();
protected Map<String,VisibilityCondition> conditions;
public AutoConfigurer(AutoConfigurable c) {
super(null, c.getConfigureName());
target = c;
setValue(target);
target.addPropertyChangeListener(new PropertyChangeListener() {
public void propertyChange(final PropertyChangeEvent evt) {
if (Configurable.NAME_PROPERTY.equals(evt.getPropertyName())) {
setName((String) evt.getNewValue());
}
}
});
p = new JPanel();
p.setLayout(new javax.swing.BoxLayout(p, javax.swing.BoxLayout.Y_AXIS));
String[] name = c.getAttributeNames();
String[] prompt = c.getAttributeDescriptions();
Class<?>[] type = c.getAttributeTypes();
int n = Math.min(name.length, Math.min(prompt.length, type.length));
for (int i = 0; i < n; ++i) {
if (type[i] == null) {
continue;
}
Configurer config;
config = createConfigurer(type[i], name[i], prompt[i], target);
if (config != null) {
config.addPropertyChangeListener(this);
config.setValue(target.getAttributeValueString(name[i]));
Box box = Box.createHorizontalBox();
box.add(config.getControls());
box.add(Box.createHorizontalGlue());
p.add(box);
configurers.add(config);
}
setVisibility(name[i],c.getAttributeVisibility(name[i]));
}
}
public static Configurer createConfigurer(Class type,
String key,
String prompt,
AutoConfigurable target) {
Configurer config = null;
if (String.class.isAssignableFrom(type)) {
config = new StringConfigurer(key, prompt);
}
else if (Integer.class.isAssignableFrom(type)) {
config = new IntConfigurer(key, prompt);
}
else if (Double.class.isAssignableFrom(type)) {
config = new DoubleConfigurer(key, prompt);
}
else if (Boolean.class.isAssignableFrom(type)) {
config = new BooleanConfigurer(key, prompt);
}
else if (Image.class.isAssignableFrom(type)) {
config = new ImageConfigurer(key, prompt,
GameModule.getGameModule().getArchiveWriter());
}
else if (Color.class.isAssignableFrom(type)) {
config = new ColorConfigurer(key, prompt);
}
else if (KeyStroke.class.isAssignableFrom(type)) {
config = new HotKeyConfigurer(key, prompt);
}
else if (java.io.File.class.isAssignableFrom(type)) {
config = new FileConfigurer(key, prompt,
GameModule.getGameModule().getArchiveWriter());
}
else if (String[].class.isAssignableFrom(type)) {
config = new StringArrayConfigurer(key, prompt);
}
else if (Icon.class.isAssignableFrom(type)) {
config = new IconConfigurer(key,prompt,null);
}
else if (PropertyExpression.class.isAssignableFrom(type)) {
config = new PropertyExpressionConfigurer(key,prompt);
}
else if (StringEnum.class.isAssignableFrom(type)) {
try {
final String[] validValues =
((StringEnum) type.newInstance()).getValidValues(target);
config = new StringEnumConfigurer(key, prompt, validValues);
}
catch (IllegalAccessException e) {
ErrorDialog.bug(e);
config = new StringConfigurer(key, prompt);
}
catch (InstantiationException e) {
ErrorDialog.bug(e);
config = new StringConfigurer(key, prompt);
}
}
else if (ConfigurerFactory.class.isAssignableFrom(type)) {
try {
final ConfigurerFactory f = (ConfigurerFactory) type.newInstance();
config = f.getConfigurer(target, key,prompt);
}
catch (IllegalAccessException e) {
// FIXME: do this, or throw?
ErrorDialog.bug(e);
}
catch (InstantiationException e) {
ErrorDialog.bug(e);
}
}
else {
throw new IllegalArgumentException("Invalid class " + type.getName());
}
return config;
}
public void reset() {
String[] s = target.getAttributeNames();
for (int i=0;i<s.length;++i) {
Configurer config = getConfigurer(s[i]);
if (config != null) {
config.setValue(target.getAttributeValueString(s[i]));
}
}
}
public String getValueString() {
return target.getConfigureName();
}
public void setValue(String s) {
throw new UnsupportedOperationException(
"Can't set Configurable from String");
}
public Component getControls() {
return p;
}
public void propertyChange(final PropertyChangeEvent evt) {
target.setAttribute(evt.getPropertyName(), evt.getNewValue());
checkVisibility();
}
public void setVisibility(String attribute, VisibilityCondition c) {
if (c != null) {
if (conditions == null) {
conditions = new HashMap<String,VisibilityCondition>();
}
conditions.put(attribute, c);
checkVisibility();
}
}
protected void checkVisibility() {
boolean visChanged = false;
if (conditions != null) {
for (Configurer c : configurers) {
VisibilityCondition cond = conditions.get(c.getKey());
if (cond != null) {
if (c.getControls().isVisible() != cond.shouldBeVisible()) {
visChanged = true;
c.getControls().setVisible(cond.shouldBeVisible());
}
}
}
// Only repack the configurer if an item visiblity has changed.
if (visChanged && p.getTopLevelAncestor() instanceof Window) {
((Window) p.getTopLevelAncestor()).pack();
}
}
}
public Configurer getConfigurer(String attribute) {
for (Configurer c : configurers) {
if (attribute.equals(c.getKey())) {
return c;
}
}
return null;
}
}
| Use Class.getConstructor().newInstance() instead of Class.newInstance() to trap errors thrown by ctor.
git-svn-id: 3948e88432e1a59e1cb2bc472f8f957688004121@3926 67b53d14-2c14-4ace-a08f-0dab2b34000c
| src/VASSAL/configure/AutoConfigurer.java | Use Class.getConstructor().newInstance() instead of Class.newInstance() to trap errors thrown by ctor. | <ide><path>rc/VASSAL/configure/AutoConfigurer.java
<ide> import java.awt.Window;
<ide> import java.beans.PropertyChangeEvent;
<ide> import java.beans.PropertyChangeListener;
<add>import java.io.File;
<ide> import java.util.ArrayList;
<ide> import java.util.HashMap;
<ide> import java.util.List;
<ide> import java.util.Map;
<ide> import javax.swing.Box;
<add>import javax.swing.BoxLayout;
<ide> import javax.swing.Icon;
<ide> import javax.swing.JPanel;
<ide> import javax.swing.KeyStroke;
<ide> import VASSAL.build.Configurable;
<ide> import VASSAL.build.GameModule;
<ide> import VASSAL.tools.ErrorDialog;
<add>import VASSAL.tools.ErrorUtils;
<ide>
<ide> /**
<ide> * A Configurer for configuring Configurable components
<ide> });
<ide>
<ide> p = new JPanel();
<del> p.setLayout(new javax.swing.BoxLayout(p, javax.swing.BoxLayout.Y_AXIS));
<add> p.setLayout(new BoxLayout(p, BoxLayout.Y_AXIS));
<ide>
<ide> String[] name = c.getAttributeNames();
<ide> String[] prompt = c.getAttributeDescriptions();
<ide> }
<ide> }
<ide>
<del> public static Configurer createConfigurer(Class type,
<add> public static Configurer createConfigurer(Class<?> type,
<ide> String key,
<ide> String prompt,
<ide> AutoConfigurable target) {
<ide> else if (KeyStroke.class.isAssignableFrom(type)) {
<ide> config = new HotKeyConfigurer(key, prompt);
<ide> }
<del> else if (java.io.File.class.isAssignableFrom(type)) {
<add> else if (File.class.isAssignableFrom(type)) {
<ide> config = new FileConfigurer(key, prompt,
<ide> GameModule.getGameModule().getArchiveWriter());
<ide> }
<ide> config = new IconConfigurer(key,prompt,null);
<ide> }
<ide> else if (PropertyExpression.class.isAssignableFrom(type)) {
<del> config = new PropertyExpressionConfigurer(key,prompt);
<add> config = new PropertyExpressionConfigurer(key, prompt);
<ide> }
<ide> else if (StringEnum.class.isAssignableFrom(type)) {
<add> StringEnum se = null;
<ide> try {
<del> final String[] validValues =
<del> ((StringEnum) type.newInstance()).getValidValues(target);
<add> se = (StringEnum) type.getConstructor().newInstance();
<add> }
<add> catch (Throwable t) {
<add> ErrorUtils.handleNewInstanceFailure(t);
<add> config = new StringConfigurer(key, prompt);
<add> }
<add>
<add> if (se != null) {
<add> final String[] validValues = se.getValidValues(target);
<ide> config = new StringEnumConfigurer(key, prompt, validValues);
<ide> }
<del> catch (IllegalAccessException e) {
<del> ErrorDialog.bug(e);
<del> config = new StringConfigurer(key, prompt);
<del> }
<del> catch (InstantiationException e) {
<del> ErrorDialog.bug(e);
<del> config = new StringConfigurer(key, prompt);
<del> }
<ide> }
<ide> else if (ConfigurerFactory.class.isAssignableFrom(type)) {
<add> ConfigurerFactory cf = null;
<ide> try {
<del> final ConfigurerFactory f = (ConfigurerFactory) type.newInstance();
<del> config = f.getConfigurer(target, key,prompt);
<del> }
<del> catch (IllegalAccessException e) {
<del>// FIXME: do this, or throw?
<del> ErrorDialog.bug(e);
<del> }
<del> catch (InstantiationException e) {
<del> ErrorDialog.bug(e);
<add> cf = (ConfigurerFactory) type.getConstructor().newInstance();
<add> }
<add> catch (Throwable t) {
<add> ErrorUtils.handleNewInstanceFailure(t);
<add> }
<add>
<add> if (cf != null) {
<add> config = cf.getConfigurer(target, key, prompt);
<ide> }
<ide> }
<ide> else { |
|
Java | apache-2.0 | de2beeeeeacfe21993eaf89191a30298b8ea9680 | 0 | psoreide/bnd,mcculls/bnd,lostiniceland/bnd,joansmith/bnd,magnet/bnd,joansmith/bnd,xtracoder/bnd,mcculls/bnd,mcculls/bnd,xtracoder/bnd,lostiniceland/bnd,GEBIT/bnd,GEBIT/bnd,magnet/bnd,psoreide/bnd,lostiniceland/bnd,psoreide/bnd,magnet/bnd | package aQute.lib.io;
import java.io.*;
import java.net.*;
import java.nio.*;
import java.util.*;
public class IO {
public static void copy(InputStream in, OutputStream out) throws IOException {
DataOutputStream dos = new DataOutputStream(out);
copy(in, (DataOutput) dos);
}
public static void copy(InputStream in, DataOutput out) throws IOException {
byte[] buffer = new byte[10000];
try {
int size = in.read(buffer);
while (size > 0) {
out.write(buffer, 0, size);
size = in.read(buffer);
}
} finally {
in.close();
}
}
public static void copy(InputStream in, ByteBuffer bb) throws IOException {
byte[] buffer = new byte[10000];
try {
int size = in.read(buffer);
while (size > 0) {
bb.put(buffer, 0, size);
size = in.read(buffer);
}
} finally {
in.close();
}
}
public static void copy(File a, File b) throws IOException {
if (a.isFile()) {
FileOutputStream out = new FileOutputStream(b);
try {
copy(new FileInputStream(a), out);
} finally {
out.close();
}
} else if (a.isDirectory()) {
b.mkdirs();
if (!b.isDirectory())
throw new IllegalArgumentException(
"target directory for a directory must be a directory: " + b);
File subs[] = a.listFiles();
for (File sub : subs) {
copy(sub, new File(b, sub.getName()));
}
} else
throw new FileNotFoundException("During copy: " + a.toString());
}
public static void copy(InputStream a, File b) throws IOException {
FileOutputStream out = new FileOutputStream(b);
try {
copy(a, out);
} finally {
out.close();
}
}
public static void copy(File a, OutputStream b) throws IOException {
copy(new FileInputStream(a), b);
}
public static String collect(File a, String encoding) throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
copy(a, out);
return new String(out.toByteArray(), encoding);
}
public static String collect(URL a, String encoding) throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
copy(a.openStream(), out);
return new String(out.toByteArray(), encoding);
}
public static String collect(URL a) throws IOException {
return collect(a, "UTF-8");
}
public static String collect(File a) throws IOException {
return collect(a, "UTF-8");
}
public static String collect(String a) throws IOException {
return collect(new File(a), "UTF-8");
}
public static String collect(InputStream a, String encoding) throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
copy(a, out);
return new String(out.toByteArray(), encoding);
}
public static String collect(InputStream a) throws IOException {
return collect(a, "UTF-8");
}
public static String collect(Reader a) throws IOException {
StringWriter sw = new StringWriter();
char[] buffer = new char[10000];
int size = a.read(buffer);
while (size > 0) {
sw.write(buffer, 0, size);
size = a.read(buffer);
}
return sw.toString();
}
public static File getFile(File base, String file) {
File f = new File(file);
if (f.isAbsolute())
return f;
int n;
f = base.getAbsoluteFile();
while ((n = file.indexOf('/')) > 0) {
String first = file.substring(0, n);
file = file.substring(n + 1);
if (first.equals(".."))
f = f.getParentFile();
else
f = new File(f, first);
}
if (file.equals(".."))
return f.getParentFile();
else
return new File(f, file).getAbsoluteFile();
}
public static void delete(File f) {
f = f.getAbsoluteFile();
if (f.getParentFile() == null)
throw new IllegalArgumentException("Cannot recursively delete root for safety reasons");
if (f.isDirectory()) {
File[] subs = f.listFiles();
for (File sub : subs)
delete(sub);
}
f.delete();
}
public static long drain(InputStream in) throws IOException {
long result = 0;
byte[] buffer = new byte[10000];
try {
int size = in.read(buffer);
while (size > 0) {
result += size;
size = in.read(buffer);
}
} finally {
in.close();
}
return result;
}
public void copy(Collection<?> c, OutputStream out) {
PrintStream ps = new PrintStream(out);
for (Object o : c) {
ps.println(o);
}
ps.flush();
}
public static Throwable close(Closeable in) {
try {
in.close();
return null;
} catch (Throwable e) {
return e;
}
}
public static URL toURL(String s, File base) throws MalformedURLException {
int n = s.indexOf(':');
if (n > 0 && n < 10) {
// is url
return new URL(s);
}
return getFile(base, s).toURI().toURL();
}
public static void store(Object o, File out) throws IOException {
store(o, out, "UTF-8");
}
public static void store(Object o, File out, String encoding) throws IOException {
FileOutputStream fout = new FileOutputStream(out);
try {
store(o, fout, encoding);
} finally {
fout.close();
}
}
public static void store(Object o, OutputStream fout) throws UnsupportedEncodingException,
IOException {
store(o, fout, "UTF-8");
}
public static void store(Object o, OutputStream fout, String encoding)
throws UnsupportedEncodingException, IOException {
String s;
if (o == null)
s = "";
else
s = o.toString();
try {
fout.write(s.getBytes(encoding));
} finally {
fout.close();
}
}
public static InputStream stream(String s) {
try {
return new ByteArrayInputStream(s.getBytes("UTF-8"));
} catch(Exception e) {
// Ignore
return null;
}
}
public static InputStream stream(String s, String encoding) throws UnsupportedEncodingException {
return new ByteArrayInputStream(s.getBytes(encoding));
}
public static InputStream stream(File s) throws FileNotFoundException {
return new FileInputStream(s);
}
public static InputStream stream(URL s) throws IOException {
return s.openStream();
}
public static Reader reader(String s) {
return new StringReader(s);
}
}
| aQute.libg/src/aQute/lib/io/IO.java | package aQute.lib.io;
import java.io.*;
import java.net.*;
import java.nio.*;
import java.util.*;
public class IO {
public static void copy(InputStream in, OutputStream out) throws IOException {
DataOutputStream dos = new DataOutputStream(out);
copy(in, (DataOutput) dos);
}
public static void copy(InputStream in, DataOutput out) throws IOException {
byte[] buffer = new byte[10000];
try {
int size = in.read(buffer);
while (size > 0) {
out.write(buffer, 0, size);
size = in.read(buffer);
}
} finally {
in.close();
}
}
public static void copy(InputStream in, ByteBuffer bb) throws IOException {
byte[] buffer = new byte[10000];
try {
int size = in.read(buffer);
while (size > 0) {
bb.put(buffer, 0, size);
size = in.read(buffer);
}
} finally {
in.close();
}
}
public static void copy(File a, File b) throws IOException {
if (a.isFile()) {
FileOutputStream out = new FileOutputStream(b);
try {
copy(new FileInputStream(a), out);
} finally {
out.close();
}
} else if (a.isDirectory()) {
b.mkdirs();
if (!b.isDirectory())
throw new IllegalArgumentException(
"target directory for a directory must be a directory: " + b);
File subs[] = a.listFiles();
for (File sub : subs) {
copy(sub, new File(b, sub.getName()));
}
} else
throw new FileNotFoundException("During copy: " + a.toString());
}
public static void copy(InputStream a, File b) throws IOException {
FileOutputStream out = new FileOutputStream(b);
try {
copy(a, out);
} finally {
out.close();
}
}
public static void copy(File a, OutputStream b) throws IOException {
copy(new FileInputStream(a), b);
}
public static String collect(File a, String encoding) throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
copy(a, out);
return new String(out.toByteArray(), encoding);
}
public static String collect(URL a, String encoding) throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
copy(a.openStream(), out);
return new String(out.toByteArray(), encoding);
}
public static String collect(File a) throws IOException {
return collect(a, "UTF-8");
}
public static String collect(InputStream a, String encoding) throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
copy(a, out);
return new String(out.toByteArray(), encoding);
}
public static String collect(InputStream a) throws IOException {
return collect(a, "UTF-8");
}
public static String collect(Reader a) throws IOException {
StringWriter sw = new StringWriter();
char[] buffer = new char[10000];
int size = a.read(buffer);
while (size > 0) {
sw.write(buffer, 0, size);
size = a.read(buffer);
}
return sw.toString();
}
public static File getFile(File base, String file) {
File f = new File(file);
if (f.isAbsolute())
return f;
int n;
f = base.getAbsoluteFile();
while ((n = file.indexOf('/')) > 0) {
String first = file.substring(0, n);
file = file.substring(n + 1);
if (first.equals(".."))
f = f.getParentFile();
else
f = new File(f, first);
}
if (file.equals(".."))
return f.getParentFile();
else
return new File(f, file).getAbsoluteFile();
}
public static void delete(File f) {
f = f.getAbsoluteFile();
if (f.getParentFile() == null)
throw new IllegalArgumentException("Cannot recursively delete root for safety reasons");
if (f.isDirectory()) {
File[] subs = f.listFiles();
for (File sub : subs)
delete(sub);
}
f.delete();
}
public static long drain(InputStream in) throws IOException {
long result = 0;
byte[] buffer = new byte[10000];
try {
int size = in.read(buffer);
while (size > 0) {
result+=size;
size = in.read(buffer);
}
} finally {
in.close();
}
return result;
}
public void copy(Collection<?> c, OutputStream out) {
PrintStream ps = new PrintStream(out);
for (Object o : c) {
ps.println(o);
}
ps.flush();
}
public static Throwable close(Closeable in) {
try {
in.close();
return null;
} catch (Throwable e) {
return e;
}
}
public static URL toURL(String s, File base) throws MalformedURLException {
int n = s.indexOf(':');
if (n > 0 && n < 10) {
// is url
return new URL(s);
}
return getFile(base, s).toURI().toURL();
}
public static void store(Object o, File out) throws IOException {
store(o, out, "UTF-8");
}
public static void store(Object o, File out, String encoding) throws IOException {
FileOutputStream fout = new FileOutputStream(out);
try {
store(o, fout, encoding);
} finally {
fout.close();
}
}
public static void store(Object o, OutputStream fout) throws UnsupportedEncodingException, IOException {
store(o, fout, "UTF-8");
}
public static void store(Object o, OutputStream fout, String encoding)
throws UnsupportedEncodingException, IOException {
String s;
if (o == null)
s = "";
else
s = o.toString();
try {
fout.write(s.getBytes(encoding));
} finally {
fout.close();
}
}
}
| Added more convenience methods | aQute.libg/src/aQute/lib/io/IO.java | Added more convenience methods | <ide><path>Qute.libg/src/aQute/lib/io/IO.java
<ide> return new String(out.toByteArray(), encoding);
<ide> }
<ide>
<add> public static String collect(URL a) throws IOException {
<add> return collect(a, "UTF-8");
<add> }
<add>
<ide> public static String collect(File a) throws IOException {
<ide> return collect(a, "UTF-8");
<add> }
<add>
<add> public static String collect(String a) throws IOException {
<add> return collect(new File(a), "UTF-8");
<ide> }
<ide>
<ide> public static String collect(InputStream a, String encoding) throws IOException {
<ide> try {
<ide> int size = in.read(buffer);
<ide> while (size > 0) {
<del> result+=size;
<add> result += size;
<ide> size = in.read(buffer);
<ide> }
<ide> } finally {
<ide> }
<ide> }
<ide>
<del> public static void store(Object o, OutputStream fout) throws UnsupportedEncodingException, IOException {
<add> public static void store(Object o, OutputStream fout) throws UnsupportedEncodingException,
<add> IOException {
<ide> store(o, fout, "UTF-8");
<ide> }
<ide>
<ide> }
<ide> }
<ide>
<add> public static InputStream stream(String s) {
<add> try {
<add> return new ByteArrayInputStream(s.getBytes("UTF-8"));
<add> } catch(Exception e) {
<add> // Ignore
<add> return null;
<add> }
<add> }
<add>
<add> public static InputStream stream(String s, String encoding) throws UnsupportedEncodingException {
<add> return new ByteArrayInputStream(s.getBytes(encoding));
<add> }
<add>
<add> public static InputStream stream(File s) throws FileNotFoundException {
<add> return new FileInputStream(s);
<add> }
<add>
<add> public static InputStream stream(URL s) throws IOException {
<add> return s.openStream();
<add> }
<add>
<add>
<add> public static Reader reader(String s) {
<add> return new StringReader(s);
<add> }
<add>
<ide> } |
|
Java | agpl-3.0 | eeacd099a343ec0f03f73a3f4c2dcce21660c9a9 | 0 | opensourceBIM/BIMserver,opensourceBIM/BIMserver,opensourceBIM/BIMserver | package org.bimserver.webservices.impl;
/******************************************************************************
* Copyright (C) 2009-2019 BIMserver.org
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see {@literal<http://www.gnu.org/licenses/>}.
*****************************************************************************/
import org.bimserver.database.DatabaseSession;
import org.bimserver.database.OldQuery;
import org.bimserver.database.OperationType;
import org.bimserver.database.actions.BimDatabaseAction;
import org.bimserver.database.actions.ChangePasswordDatabaseAction;
import org.bimserver.database.actions.LoginDatabaseAction;
import org.bimserver.database.actions.LoginUserTokenDatabaseAction;
import org.bimserver.database.actions.RequestPasswordChangeDatabaseAction;
import org.bimserver.database.actions.ValidateUserDatabaseAction;
import org.bimserver.interfaces.objects.SAccessMethod;
import org.bimserver.interfaces.objects.SUser;
import org.bimserver.models.store.User;
import org.bimserver.shared.exceptions.ServerException;
import org.bimserver.shared.exceptions.UserException;
import org.bimserver.shared.interfaces.AuthInterface;
import org.bimserver.shared.interfaces.ServiceInterface;
import org.bimserver.webservices.ServiceMap;
public class AuthServiceImpl extends GenericServiceImpl implements AuthInterface {
public AuthServiceImpl(ServiceMap serviceMap) {
super(serviceMap);
}
@Override
public String login(String username, String password) throws ServerException, UserException {
DatabaseSession session = getBimServer().getDatabase().createSession(OperationType.POSSIBLY_WRITE);
try {
LoginDatabaseAction loginDatabaseAction = new LoginDatabaseAction(getBimServer(), session, getServiceMap(), super.getInternalAccessMethod(), username, password);
return session.executeAndCommitAction(loginDatabaseAction);
} catch (Exception e) {
return handleException(e);
} finally {
session.close();
}
}
@Override
public void logout() throws UserException {
requireAuthenticationAndRunningServer();
setAuthorization(null);
}
@Override
public Boolean isLoggedIn() {
return getAuthorization() != null;
}
@Override
public SAccessMethod getAccessMethod() {
return SAccessMethod.valueOf(getInternalAccessMethod().getName());
}
@Override
public String loginUserToken(String token) throws ServerException, UserException {
DatabaseSession session = getBimServer().getDatabase().createSession(OperationType.POSSIBLY_WRITE);
try {
LoginUserTokenDatabaseAction loginDatabaseAction = new LoginUserTokenDatabaseAction(getBimServer(), session, getServiceMap(), super.getInternalAccessMethod(), token);
return session.executeAndCommitAction(loginDatabaseAction);
} catch (Exception e) {
return handleException(e);
} finally {
session.close();
}
}
@Override
public SUser getLoggedInUser() throws ServerException, UserException {
requireAuthenticationAndRunningServer();
return getServiceMap().get(ServiceInterface.class).getUserByUoid(getAuthorization().getUoid());
}
@Override
public Boolean changePassword(Long uoid, String oldPassword, String newPassword) throws ServerException, UserException {
requireRealUserAuthentication();
DatabaseSession session = getBimServer().getDatabase().createSession();
try {
BimDatabaseAction<Boolean> action = new ChangePasswordDatabaseAction(getBimServer(), session, getInternalAccessMethod(), uoid, oldPassword, newPassword, getAuthorization());
return session.executeAndCommitAction(action);
} catch (Exception e) {
return handleException(e);
} finally {
session.close();
}
}
@Override
public void requestPasswordChange(String username, String resetUrl, Boolean includeSiteAddress) throws ServerException, UserException {
// No authentication required because you should be able to do this wihout logging in...
DatabaseSession session = getBimServer().getDatabase().createSession();
try {
BimDatabaseAction<Void> action = new RequestPasswordChangeDatabaseAction(session, getInternalAccessMethod(), getBimServer(), username, resetUrl, includeSiteAddress);
session.executeAndCommitAction(action);
} catch (Exception e) {
handleException(e);
} finally {
session.close();
}
}
@Override
public SUser validateAccount(Long uoid, String token, String password) throws ServerException, UserException {
DatabaseSession session = getBimServer().getDatabase().createSession();
try {
BimDatabaseAction<User> action = new ValidateUserDatabaseAction(session, getInternalAccessMethod(), uoid, token, password);
return getBimServer().getSConverter().convertToSObject(session.executeAndCommitAction(action));
} catch (Exception e) {
handleException(e);
} finally {
session.close();
}
return null;
}
@Override
public void setHash(Long uoid, byte[] hash, byte[] salt) throws ServerException, UserException {
requireAdminAuthentication();
DatabaseSession session = getBimServer().getDatabase().createSession();
try {
User user = session.get(uoid, OldQuery.getDefault());
user.setPasswordHash(hash);
user.setPasswordSalt(salt);
session.commit();
} catch (Exception e) {
handleException(e);
} finally {
session.close();
}
}
//
// @Override
// public String createToken(Integer validitySeconds) throws UserException, ServerException {
// Authorization authorization = null;
// SUser currentUser = getCurrentUser();
// if (currentUser.getUserType() == SUserType.ADMIN) {
// authorization = new AdminAuthorization(validitySeconds, TimeUnit.SECONDS);
// } else {
// authorization = new UserAuthorization(validitySeconds, TimeUnit.SECONDS);
// }
// authorization.setUoid(currentUser.getOid());
// String asHexToken = authorization.asHexToken(getBimServer().getEncryptionKey());
// return asHexToken;
// }
} | BimServer/src/org/bimserver/webservices/impl/AuthServiceImpl.java | package org.bimserver.webservices.impl;
/******************************************************************************
* Copyright (C) 2009-2019 BIMserver.org
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see {@literal<http://www.gnu.org/licenses/>}.
*****************************************************************************/
import org.bimserver.database.DatabaseSession;
import org.bimserver.database.OldQuery;
import org.bimserver.database.OperationType;
import org.bimserver.database.actions.BimDatabaseAction;
import org.bimserver.database.actions.ChangePasswordDatabaseAction;
import org.bimserver.database.actions.LoginDatabaseAction;
import org.bimserver.database.actions.LoginUserTokenDatabaseAction;
import org.bimserver.database.actions.RequestPasswordChangeDatabaseAction;
import org.bimserver.database.actions.ValidateUserDatabaseAction;
import org.bimserver.interfaces.objects.SAccessMethod;
import org.bimserver.interfaces.objects.SUser;
import org.bimserver.models.store.User;
import org.bimserver.shared.exceptions.ServerException;
import org.bimserver.shared.exceptions.UserException;
import org.bimserver.shared.interfaces.AuthInterface;
import org.bimserver.shared.interfaces.ServiceInterface;
import org.bimserver.webservices.ServiceMap;
public class AuthServiceImpl extends GenericServiceImpl implements AuthInterface {
public AuthServiceImpl(ServiceMap serviceMap) {
super(serviceMap);
}
@Override
public String login(String username, String password) throws ServerException, UserException {
DatabaseSession session = getBimServer().getDatabase().createSession(OperationType.POSSIBLY_WRITE);
try {
LoginDatabaseAction loginDatabaseAction = new LoginDatabaseAction(getBimServer(), session, getServiceMap(), super.getInternalAccessMethod(), username, password);
return session.executeAndCommitAction(loginDatabaseAction);
} catch (Exception e) {
return handleException(e);
} finally {
session.close();
}
}
@Override
public void logout() throws UserException {
requireAuthenticationAndRunningServer();
setAuthorization(null);
}
@Override
public Boolean isLoggedIn() {
return getAuthorization() != null;
}
@Override
public SAccessMethod getAccessMethod() {
return SAccessMethod.valueOf(getInternalAccessMethod().getName());
}
@Override
public String loginUserToken(String token) throws ServerException, UserException {
DatabaseSession session = getBimServer().getDatabase().createSession(OperationType.POSSIBLY_WRITE);
try {
LoginUserTokenDatabaseAction loginDatabaseAction = new LoginUserTokenDatabaseAction(getBimServer(), session, getServiceMap(), super.getInternalAccessMethod(), token);
return session.executeAndCommitAction(loginDatabaseAction);
} catch (Exception e) {
return handleException(e);
} finally {
session.close();
}
}
@Override
public SUser getLoggedInUser() throws ServerException, UserException {
requireAuthenticationAndRunningServer();
return getServiceMap().get(ServiceInterface.class).getUserByUoid(getAuthorization().getUoid());
}
@Override
public Boolean changePassword(Long uoid, String oldPassword, String newPassword) throws ServerException, UserException {
requireRealUserAuthentication();
DatabaseSession session = getBimServer().getDatabase().createSession();
try {
BimDatabaseAction<Boolean> action = new ChangePasswordDatabaseAction(getBimServer(), session, getInternalAccessMethod(), uoid, oldPassword, newPassword, getAuthorization());
return session.executeAndCommitAction(action);
} catch (Exception e) {
return handleException(e);
} finally {
session.close();
}
}
@Override
public void requestPasswordChange(String username, String resetUrl, Boolean includeSiteAddress) throws ServerException, UserException {
// No authentication required because you should be able to do this wihout logging in...
DatabaseSession session = getBimServer().getDatabase().createSession();
try {
BimDatabaseAction<Void> action = new RequestPasswordChangeDatabaseAction(session, getInternalAccessMethod(), getBimServer(), username, resetUrl, includeSiteAddress);
session.executeAndCommitAction(action);
} catch (Exception e) {
handleException(e);
} finally {
session.close();
}
}
@Override
public SUser validateAccount(Long uoid, String token, String password) throws ServerException, UserException {
DatabaseSession session = getBimServer().getDatabase().createReadOnlySession();
try {
BimDatabaseAction<User> action = new ValidateUserDatabaseAction(session, getInternalAccessMethod(), uoid, token, password);
return getBimServer().getSConverter().convertToSObject(session.executeAndCommitAction(action));
} catch (Exception e) {
handleException(e);
} finally {
session.close();
}
return null;
}
@Override
public void setHash(Long uoid, byte[] hash, byte[] salt) throws ServerException, UserException {
requireAdminAuthentication();
DatabaseSession session = getBimServer().getDatabase().createSession();
try {
User user = session.get(uoid, OldQuery.getDefault());
user.setPasswordHash(hash);
user.setPasswordSalt(salt);
session.commit();
} catch (Exception e) {
handleException(e);
} finally {
session.close();
}
}
//
// @Override
// public String createToken(Integer validitySeconds) throws UserException, ServerException {
// Authorization authorization = null;
// SUser currentUser = getCurrentUser();
// if (currentUser.getUserType() == SUserType.ADMIN) {
// authorization = new AdminAuthorization(validitySeconds, TimeUnit.SECONDS);
// } else {
// authorization = new UserAuthorization(validitySeconds, TimeUnit.SECONDS);
// }
// authorization.setUoid(currentUser.getOid());
// String asHexToken = authorization.asHexToken(getBimServer().getEncryptionKey());
// return asHexToken;
// }
} | validateAccount needs non-readonly session | BimServer/src/org/bimserver/webservices/impl/AuthServiceImpl.java | validateAccount needs non-readonly session | <ide><path>imServer/src/org/bimserver/webservices/impl/AuthServiceImpl.java
<ide>
<ide> @Override
<ide> public SUser validateAccount(Long uoid, String token, String password) throws ServerException, UserException {
<del> DatabaseSession session = getBimServer().getDatabase().createReadOnlySession();
<add> DatabaseSession session = getBimServer().getDatabase().createSession();
<ide> try {
<ide> BimDatabaseAction<User> action = new ValidateUserDatabaseAction(session, getInternalAccessMethod(), uoid, token, password);
<ide> return getBimServer().getSConverter().convertToSObject(session.executeAndCommitAction(action)); |
|
Java | apache-2.0 | b25fb653b866a8d252ab98c70b42e69bb79b5ef5 | 0 | CloudSlang/score,CloudSlang/score | /*
* Copyright © 2014-2017 EntIT Software LLC, a Micro Focus company (L.P.)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.cloudslang.runtime.impl.python;
import io.cloudslang.runtime.api.python.PythonEvaluationResult;
import io.cloudslang.runtime.api.python.PythonExecutionResult;
import io.cloudslang.runtime.impl.Executor;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.python.core.Py;
import org.python.core.PyBoolean;
import org.python.core.PyClass;
import org.python.core.PyException;
import org.python.core.PyFile;
import org.python.core.PyFunction;
import org.python.core.PyModule;
import org.python.core.PyObject;
import org.python.core.PyReflectedFunction;
import org.python.core.PyString;
import org.python.core.PyStringMap;
import org.python.core.PySystemState;
import org.python.core.PyType;
import org.python.util.PythonInterpreter;
import java.io.Serializable;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
/**
* Created by Genadi Rabinovich, [email protected] on 05/05/2016.
*/
public class PythonExecutor implements Executor {
public static final String THREADED_MODULES_ISSUE = "No module named";
private static final Logger logger = Logger.getLogger(PythonExecutor.class);
private static final String TRUE = "true";
private static final String FALSE = "false";
private static final PythonInterpreter GLOBAL_INTERPRETER = new ThreadSafePythonInterpreter(null);
/**
* There is an issue in loaded environment - existing python module not found in PySystem.modules.table
* although it exists in the table.
* Meanwhile we execute retries ans will open an issue to the jython.org
*/
public static final int RETRIES_NUMBER_ON_THREADED_ISSUE = 3;
public static final int MAX_LENGTH = Integer.getInteger("input.error.max.length", 1000);
static {
//here to avoid jython preferring io.cloudslang package over python io package
GLOBAL_INTERPRETER.exec("import io");
}
private final PythonInterpreter interpreter;
private final Lock allocationLock = new ReentrantLock();
private int allocations = 0;
//Executor marked to be actuallyClosed. Executor may be still in use thus we don't close it immediately
private boolean markedClosed = false;
//Executor was finally actuallyClosed
private boolean actuallyClosed = false;
private final Set<String> dependencies;
public PythonExecutor() {
this(Collections.<String>emptySet());
}
public PythonExecutor(Set<String> dependencies) {
this.dependencies = dependencies;
interpreter = initInterpreter(dependencies);
}
protected PythonInterpreter initInterpreter(Set<String> dependencies) {
logger.info("Creating python interpreter with [" + dependencies.size() + "] dependencies [" + dependencies + "]");
if(!dependencies.isEmpty()) {
PySystemState systemState = new PySystemState();
for (String dependency: dependencies) {
systemState.path.append(new PyString(dependency));
}
return new ThreadSafePythonInterpreter(systemState);
}
return GLOBAL_INTERPRETER;
}
//we need this method to be synchronized so we will not have multiple scripts run in parallel on the same context
public PythonExecutionResult exec(String script, Map<String, Serializable> callArguments) {
checkValidInterpreter();
initInterpreter();
prepareInterpreterContext(callArguments);
Exception originException = null;
for(int i = 0; i < RETRIES_NUMBER_ON_THREADED_ISSUE; i++) {
try {
return exec(script);
} catch (Exception e) {
if(!isThreadsRelatedModuleIssue(e)) {
throw new RuntimeException("Error executing python script: " + e, e);
}
if(originException == null) {
originException = e;
}
}
}
throw new RuntimeException("Error executing python script: " + originException, originException);
}
private boolean isThreadsRelatedModuleIssue(Exception e) {
if (e instanceof PyException) {
PyException pyException = (PyException) e;
String message = pyException.value.toString();
return message.contains(THREADED_MODULES_ISSUE);
}
return false;
}
private PythonExecutionResult exec(String script) {
interpreter.exec(script);
Iterator<PyObject> localsIterator = interpreter.getLocals().asIterable().iterator();
Map<String, Serializable> returnValue = new HashMap<>();
while (localsIterator.hasNext()) {
String key = localsIterator.next().asString();
PyObject value = interpreter.get(key);
if (keyIsExcluded(key, value)) {
continue;
}
Serializable javaValue = resolveJythonObjectToJavaExec(value, key);
returnValue.put(key, javaValue);
}
return new PythonExecutionResult(returnValue);
}
private Map<String, Serializable> getPythonLocals() {
Map<String, Serializable> result = new HashMap<>();
if(interpreter.getLocals() != null) {
for (PyObject pyObject : interpreter.getLocals().asIterable()) {
String key = pyObject.asString();
PyObject value = interpreter.get(key);
if (keyIsExcluded(key, value)) {
continue;
}
result.put(key, value);
}
}
return result;
}
public PythonEvaluationResult eval(String prepareEnvironmentScript, String expr, Map<String, Serializable> context) {
checkValidInterpreter();
try {
initInterpreter();
prepareInterpreterContext(context);
return new PythonEvaluationResult(eval(prepareEnvironmentScript, expr), getPythonLocals());
} catch (PyException exception) {
throw new RuntimeException("Error in running script expression: '" +
getTruncatedExpression(expr) + "',\n\tException is: " +
handleExceptionSpecialCases(exception.value.toString()), exception);
} catch (Exception exception) {
throw new RuntimeException("Error in running script expression: '" +
getTruncatedExpression(expr) + "',\n\tException is: " +
handleExceptionSpecialCases(exception.getMessage()), exception);
}
}
private String getTruncatedExpression(String expr) {
return expr.length() > MAX_LENGTH ? expr.substring(0, MAX_LENGTH) + "..." : expr;
}
private String handleExceptionSpecialCases(String message) {
String processedMessage = message;
if (StringUtils.isNotEmpty(message) && message.contains("get_sp") && message.contains("not defined")) {
processedMessage = message + ". Make sure to use correct syntax for the function: get_sp('fully.qualified.name', optional_default_value).";
}
return processedMessage;
}
private void checkValidInterpreter() {
if(isClosed()) {
throw new RuntimeException("Trying to execute script on already closed python interpreter");
}
}
protected Serializable eval(String prepareEnvironmentScript, String script) {
if (interpreter.get(TRUE) == null) {
interpreter.set(TRUE, Boolean.TRUE);
}
if (interpreter.get(FALSE) == null) {
interpreter.set(FALSE, Boolean.FALSE);
}
if(prepareEnvironmentScript != null && !prepareEnvironmentScript.isEmpty()) {
interpreter.exec(prepareEnvironmentScript);
}
PyObject evalResultAsPyObject = interpreter.eval(script);
Serializable evalResult;
evalResult = resolveJythonObjectToJavaEval(evalResultAsPyObject, script);
return evalResult;
}
@Override
public void allocate() {
allocationLock.lock();
try {
allocations++;
} finally {
allocationLock.unlock();
}
}
@Override
public void release() {
allocationLock.lock();
try {
allocations--;
if(markedClosed && (allocations == 0)) {
close();
}
} finally {
allocationLock.unlock();
}
}
@Override
public void close() {
allocationLock.lock();
try {
markedClosed = true;
if ((interpreter != GLOBAL_INTERPRETER) && (allocations == 0)) {
logger.info("Removing LRU python executor for dependencies [" + dependencies + "]");
try {interpreter.close();} catch (Throwable e) {}
actuallyClosed = true;
}
} finally {
allocationLock.unlock();
}
}
public boolean isClosed() {
return actuallyClosed;
}
private void initInterpreter() {
interpreter.setLocals(new PyStringMap());
}
private void prepareInterpreterContext(Map<String, Serializable> context) {
for (Map.Entry<String, Serializable> entry : context.entrySet()) {
interpreter.set(entry.getKey(), entry.getValue());
}
}
private Serializable resolveJythonObjectToJavaExec(PyObject value, String key) {
String errorMessage =
"Non-serializable values are not allowed in the output context of a Python script:\n" +
"\tConversion failed for '" + key + "' (" + value + "),\n" +
"\tThe error can be solved by removing the variable from the context in the script: e.g. 'del " + key + "'.\n";
return resolveJythonObjectToJava(value, errorMessage);
}
private Serializable resolveJythonObjectToJavaEval(PyObject value, String expression) {
String errorMessage =
"Evaluation result for a Python expression should be serializable:\n" +
"\tConversion failed for '" + expression + "' (" + value + ").\n";
return resolveJythonObjectToJava(value, errorMessage);
}
private Serializable resolveJythonObjectToJava(PyObject value, String errorMessage) {
if (value == null) {
return null;
}
if (value instanceof PyBoolean) {
PyBoolean pyBoolean = (PyBoolean) value;
return pyBoolean.getBooleanValue();
}
try {
return Py.tojava(value, Serializable.class);
} catch (PyException e) {
PyObject typeObject = e.type;
if (typeObject instanceof PyType) {
PyType type = (PyType) typeObject;
String typeName = type.getName();
if ("TypeError".equals(typeName)) {
throw new RuntimeException(errorMessage, e);
}
}
throw e;
}
}
private boolean keyIsExcluded(String key, PyObject value) {
return (key.startsWith("__") && key.endsWith("__")) ||
value instanceof PyFile ||
value instanceof PyModule ||
value instanceof PyFunction ||
value instanceof PySystemState ||
value instanceof PyClass ||
value instanceof PyType ||
value instanceof PyReflectedFunction;
}
private static class ThreadSafePythonInterpreter extends PythonInterpreter {
ThreadSafePythonInterpreter() {
this(null);
}
ThreadSafePythonInterpreter(PySystemState systemState) {
super(null, systemState, true);
}
}
}
| runtime-management/runtime-management-impl/src/main/java/io/cloudslang/runtime/impl/python/PythonExecutor.java | /*
* Copyright © 2014-2017 EntIT Software LLC, a Micro Focus company (L.P.)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.cloudslang.runtime.impl.python;
import io.cloudslang.runtime.api.python.PythonEvaluationResult;
import io.cloudslang.runtime.api.python.PythonExecutionResult;
import io.cloudslang.runtime.impl.Executor;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.python.core.*;
import org.python.util.PythonInterpreter;
import java.io.Serializable;
import java.util.*;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
/**
* Created by Genadi Rabinovich, [email protected] on 05/05/2016.
*/
public class PythonExecutor implements Executor {
public static final String THREADED_MODULES_ISSUE = "No module named";
private static final Logger logger = Logger.getLogger(PythonExecutor.class);
private static final String TRUE = "true";
private static final String FALSE = "false";
private static final PythonInterpreter GLOBAL_INTERPRETER = new ThreadSafePythonInterpreter(null);
/**
* There is an issue in loaded environment - existing python module not found in PySystem.modules.table
* although it exists in the table.
* Meanwhile we execute retries ans will open an issue to the jython.org
*/
public static final int RETRIES_NUMBER_ON_THREADED_ISSUE = 3;
public static final int MAX_LENGTH = Integer.getInteger("input.error.max.length", 1000);
static {
//here to avoid jython preferring io.cloudslang package over python io package
GLOBAL_INTERPRETER.exec("import io");
}
private final PythonInterpreter interpreter;
private final Lock allocationLock = new ReentrantLock();
private int allocations = 0;
//Executor marked to be actuallyClosed. Executor may be still in use thus we don't close it immediately
private boolean markedClosed = false;
//Executor was finally actuallyClosed
private boolean actuallyClosed = false;
private final Set<String> dependencies;
public PythonExecutor() {
this(Collections.<String>emptySet());
}
public PythonExecutor(Set<String> dependencies) {
this.dependencies = dependencies;
interpreter = initInterpreter(dependencies);
}
protected PythonInterpreter initInterpreter(Set<String> dependencies) {
logger.info("Creating python interpreter with [" + dependencies.size() + "] dependencies [" + dependencies + "]");
if(!dependencies.isEmpty()) {
PySystemState systemState = new PySystemState();
for (String dependency: dependencies) {
systemState.path.append(new PyString(dependency));
}
return new ThreadSafePythonInterpreter(systemState);
}
return GLOBAL_INTERPRETER;
}
//we need this method to be synchronized so we will not have multiple scripts run in parallel on the same context
public PythonExecutionResult exec(String script, Map<String, Serializable> callArguments) {
checkValidInterpreter();
initInterpreter();
prepareInterpreterContext(callArguments);
Exception originException = null;
for(int i = 0; i < RETRIES_NUMBER_ON_THREADED_ISSUE; i++) {
try {
return exec(script);
} catch (Exception e) {
if(!isThreadsRelatedModuleIssue(e)) {
throw new RuntimeException("Error executing python script: " + e, e);
}
if(originException == null) {
originException = e;
}
}
}
throw new RuntimeException("Error executing python script: " + originException, originException);
}
private boolean isThreadsRelatedModuleIssue(Exception e) {
if (e instanceof PyException) {
PyException pyException = (PyException) e;
String message = pyException.value.toString();
return message.contains(THREADED_MODULES_ISSUE);
}
return false;
}
private PythonExecutionResult exec(String script) {
interpreter.exec(script);
Iterator<PyObject> localsIterator = interpreter.getLocals().asIterable().iterator();
Map<String, Serializable> returnValue = new HashMap<>();
while (localsIterator.hasNext()) {
String key = localsIterator.next().asString();
PyObject value = interpreter.get(key);
if (keyIsExcluded(key, value)) {
continue;
}
Serializable javaValue = resolveJythonObjectToJavaExec(value, key);
returnValue.put(key, javaValue);
}
return new PythonExecutionResult(returnValue);
}
private Map<String, Serializable> getPythonLocals() {
Map<String, Serializable> result = new HashMap<>();
if(interpreter.getLocals() != null) {
for (PyObject pyObject : interpreter.getLocals().asIterable()) {
String key = pyObject.asString();
PyObject value = interpreter.get(key);
if (keyIsExcluded(key, value)) {
continue;
}
result.put(key, value);
}
}
return result;
}
public PythonEvaluationResult eval(String prepareEnvironmentScript, String expr, Map<String, Serializable> context) {
checkValidInterpreter();
try {
initInterpreter();
prepareInterpreterContext(context);
return new PythonEvaluationResult(eval(prepareEnvironmentScript, expr), getPythonLocals());
} catch (PyException exception) {
throw new RuntimeException("Error in running script expression: '" +
getTruncatedExpression(expr) + "',\n\tException is: " +
handleExceptionSpecialCases(exception.value.toString()), exception);
} catch (Exception exception) {
throw new RuntimeException("Error in running script expression: '" +
getTruncatedExpression(expr) + "',\n\tException is: " +
handleExceptionSpecialCases(exception.getMessage()), exception);
}
}
private String getTruncatedExpression(String expr) {
return expr.length() > MAX_LENGTH ? expr.substring(0, MAX_LENGTH) + "..." : expr;
}
private String handleExceptionSpecialCases(String message) {
String processedMessage = message;
if (StringUtils.isNotEmpty(message) && message.contains("get_sp") && message.contains("not defined")) {
processedMessage = message + ". Make sure to use correct syntax for the function: get_sp('fully.qualified.name', optional_default_value).";
}
return processedMessage;
}
private void checkValidInterpreter() {
if(isClosed()) {
throw new RuntimeException("Trying to execute script on already closed python interpreter");
}
}
protected Serializable eval(String prepareEnvironmentScript, String script) {
if (interpreter.get(TRUE) == null) {
interpreter.set(TRUE, Boolean.TRUE);
}
if (interpreter.get(FALSE) == null) {
interpreter.set(FALSE, Boolean.FALSE);
}
if(prepareEnvironmentScript != null && !prepareEnvironmentScript.isEmpty()) {
interpreter.exec(prepareEnvironmentScript);
}
PyObject evalResultAsPyObject = interpreter.eval(script);
Serializable evalResult;
evalResult = resolveJythonObjectToJavaEval(evalResultAsPyObject, script);
return evalResult;
}
@Override
public void allocate() {
allocationLock.lock();
try {
allocations++;
} finally {
allocationLock.unlock();
}
}
@Override
public void release() {
allocationLock.lock();
try {
allocations--;
if(markedClosed && (allocations == 0)) {
close();
}
} finally {
allocationLock.unlock();
}
}
@Override
public void close() {
allocationLock.lock();
try {
markedClosed = true;
if ((interpreter != GLOBAL_INTERPRETER) && (allocations == 0)) {
logger.info("Removing LRU python executor for dependencies [" + dependencies + "]");
try {interpreter.close();} catch (Throwable e) {}
actuallyClosed = true;
}
} finally {
allocationLock.unlock();
}
}
public boolean isClosed() {
return actuallyClosed;
}
private void initInterpreter() {
interpreter.setLocals(new PyStringMap());
}
private void prepareInterpreterContext(Map<String, Serializable> context) {
for (Map.Entry<String, Serializable> entry : context.entrySet()) {
interpreter.set(entry.getKey(), entry.getValue());
}
}
private Serializable resolveJythonObjectToJavaExec(PyObject value, String key) {
String errorMessage =
"Non-serializable values are not allowed in the output context of a Python script:\n" +
"\tConversion failed for '" + key + "' (" + value + "),\n" +
"\tThe error can be solved by removing the variable from the context in the script: e.g. 'del " + key + "'.\n";
return resolveJythonObjectToJava(value, errorMessage);
}
private Serializable resolveJythonObjectToJavaEval(PyObject value, String expression) {
String errorMessage =
"Evaluation result for a Python expression should be serializable:\n" +
"\tConversion failed for '" + expression + "' (" + value + ").\n";
return resolveJythonObjectToJava(value, errorMessage);
}
private Serializable resolveJythonObjectToJava(PyObject value, String errorMessage) {
if (value == null) {
return null;
}
if (value instanceof PyBoolean) {
PyBoolean pyBoolean = (PyBoolean) value;
return pyBoolean.getBooleanValue();
}
try {
return Py.tojava(value, Serializable.class);
} catch (PyException e) {
PyObject typeObject = e.type;
if (typeObject instanceof PyType) {
PyType type = (PyType) typeObject;
String typeName = type.getName();
if ("TypeError".equals(typeName)) {
throw new RuntimeException(errorMessage, e);
}
}
throw e;
}
}
private boolean keyIsExcluded(String key, PyObject value) {
return (key.startsWith("__") && key.endsWith("__")) ||
value instanceof PyFile ||
value instanceof PyModule ||
value instanceof PyFunction ||
value instanceof PySystemState ||
value instanceof PyClass;
}
private static class ThreadSafePythonInterpreter extends PythonInterpreter {
ThreadSafePythonInterpreter() {
this(null);
}
ThreadSafePythonInterpreter(PySystemState systemState) {
super(null, systemState, true);
}
}
}
| python context filtering
Signed-off-by: Levente Bonczidai <[email protected]>
| runtime-management/runtime-management-impl/src/main/java/io/cloudslang/runtime/impl/python/PythonExecutor.java | python context filtering | <ide><path>untime-management/runtime-management-impl/src/main/java/io/cloudslang/runtime/impl/python/PythonExecutor.java
<ide> import io.cloudslang.runtime.impl.Executor;
<ide> import org.apache.commons.lang.StringUtils;
<ide> import org.apache.log4j.Logger;
<del>import org.python.core.*;
<add>import org.python.core.Py;
<add>import org.python.core.PyBoolean;
<add>import org.python.core.PyClass;
<add>import org.python.core.PyException;
<add>import org.python.core.PyFile;
<add>import org.python.core.PyFunction;
<add>import org.python.core.PyModule;
<add>import org.python.core.PyObject;
<add>import org.python.core.PyReflectedFunction;
<add>import org.python.core.PyString;
<add>import org.python.core.PyStringMap;
<add>import org.python.core.PySystemState;
<add>import org.python.core.PyType;
<ide> import org.python.util.PythonInterpreter;
<ide>
<ide> import java.io.Serializable;
<del>import java.util.*;
<add>import java.util.Collections;
<add>import java.util.HashMap;
<add>import java.util.Iterator;
<add>import java.util.Map;
<add>import java.util.Set;
<ide> import java.util.concurrent.locks.Lock;
<ide> import java.util.concurrent.locks.ReentrantLock;
<ide>
<ide> value instanceof PyModule ||
<ide> value instanceof PyFunction ||
<ide> value instanceof PySystemState ||
<del> value instanceof PyClass;
<add> value instanceof PyClass ||
<add> value instanceof PyType ||
<add> value instanceof PyReflectedFunction;
<ide> }
<ide>
<ide> private static class ThreadSafePythonInterpreter extends PythonInterpreter { |
|
Java | apache-2.0 | f5d307e426ae75f24f45949bc4da9aa3197a7ae0 | 0 | C-Bish/elasticsearch,gingerwizard/elasticsearch,artnowo/elasticsearch,mjason3/elasticsearch,xuzha/elasticsearch,LeoYao/elasticsearch,i-am-Nathan/elasticsearch,avikurapati/elasticsearch,markharwood/elasticsearch,qwerty4030/elasticsearch,sneivandt/elasticsearch,davidvgalbraith/elasticsearch,mikemccand/elasticsearch,ESamir/elasticsearch,camilojd/elasticsearch,spiegela/elasticsearch,polyfractal/elasticsearch,njlawton/elasticsearch,liweinan0423/elasticsearch,davidvgalbraith/elasticsearch,winstonewert/elasticsearch,JSCooke/elasticsearch,sreeramjayan/elasticsearch,ZTE-PaaS/elasticsearch,cwurm/elasticsearch,nomoa/elasticsearch,ThiagoGarciaAlves/elasticsearch,fred84/elasticsearch,nezirus/elasticsearch,lks21c/elasticsearch,LeoYao/elasticsearch,jimczi/elasticsearch,i-am-Nathan/elasticsearch,fforbeck/elasticsearch,tebriel/elasticsearch,kalimatas/elasticsearch,yynil/elasticsearch,vroyer/elasticassandra,mjason3/elasticsearch,lks21c/elasticsearch,ivansun1010/elasticsearch,mapr/elasticsearch,mapr/elasticsearch,camilojd/elasticsearch,StefanGor/elasticsearch,awislowski/elasticsearch,cwurm/elasticsearch,yynil/elasticsearch,JackyMai/elasticsearch,rajanm/elasticsearch,gfyoung/elasticsearch,strapdata/elassandra,rlugojr/elasticsearch,gfyoung/elasticsearch,trangvh/elasticsearch,mmaracic/elasticsearch,ZTE-PaaS/elasticsearch,sneivandt/elasticsearch,uschindler/elasticsearch,uschindler/elasticsearch,dongjoon-hyun/elasticsearch,gmarz/elasticsearch,njlawton/elasticsearch,bawse/elasticsearch,palecur/elasticsearch,jchampion/elasticsearch,masaruh/elasticsearch,obourgain/elasticsearch,dpursehouse/elasticsearch,masaruh/elasticsearch,girirajsharma/elasticsearch,strapdata/elassandra5-rc,martinstuga/elasticsearch,ivansun1010/elasticsearch,nilabhsagar/elasticsearch,martinstuga/elasticsearch,henakamaMSFT/elasticsearch,JackyMai/elasticsearch,rajanm/elasticsearch,diendt/elasticsearch,mmaracic/elasticsearch,myelin/elasticsearch,ESamir/elasticsearch,lks21c/elasticsearch,JervyShi/elasticsearch,jchampion/elasticsearch,mikemccand/elasticsearch,strapdata/elassandra,ZTE-PaaS/elasticsearch,gfyoung/elasticsearch,jprante/elasticsearch,s1monw/elasticsearch,Stacey-Gammon/elasticsearch,gingerwizard/elasticsearch,ricardocerq/elasticsearch,palecur/elasticsearch,camilojd/elasticsearch,rajanm/elasticsearch,glefloch/elasticsearch,jbertouch/elasticsearch,henakamaMSFT/elasticsearch,mikemccand/elasticsearch,liweinan0423/elasticsearch,zkidkid/elasticsearch,MisterAndersen/elasticsearch,ThiagoGarciaAlves/elasticsearch,uschindler/elasticsearch,nomoa/elasticsearch,jprante/elasticsearch,LeoYao/elasticsearch,sneivandt/elasticsearch,gmarz/elasticsearch,ricardocerq/elasticsearch,trangvh/elasticsearch,trangvh/elasticsearch,diendt/elasticsearch,zkidkid/elasticsearch,xuzha/elasticsearch,winstonewert/elasticsearch,nazarewk/elasticsearch,wangtuo/elasticsearch,bawse/elasticsearch,qwerty4030/elasticsearch,elasticdog/elasticsearch,girirajsharma/elasticsearch,jpountz/elasticsearch,nomoa/elasticsearch,umeshdangat/elasticsearch,jprante/elasticsearch,MisterAndersen/elasticsearch,njlawton/elasticsearch,gingerwizard/elasticsearch,markharwood/elasticsearch,vroyer/elassandra,wenpos/elasticsearch,fforbeck/elasticsearch,GlenRSmith/elasticsearch,episerver/elasticsearch,markharwood/elasticsearch,i-am-Nathan/elasticsearch,a2lin/elasticsearch,vroyer/elasticassandra,myelin/elasticsearch,GlenRSmith/elasticsearch,palecur/elasticsearch,ESamir/elasticsearch,JSCooke/elasticsearch,ivansun1010/elasticsearch,mortonsykes/elasticsearch,episerver/elasticsearch,maddin2016/elasticsearch,markwalkom/elasticsearch,martinstuga/elasticsearch,yynil/elasticsearch,dpursehouse/elasticsearch,clintongormley/elasticsearch,wuranbo/elasticsearch,artnowo/elasticsearch,jbertouch/elasticsearch,polyfractal/elasticsearch,diendt/elasticsearch,wuranbo/elasticsearch,jpountz/elasticsearch,jimczi/elasticsearch,awislowski/elasticsearch,nazarewk/elasticsearch,camilojd/elasticsearch,geidies/elasticsearch,fernandozhu/elasticsearch,ESamir/elasticsearch,glefloch/elasticsearch,LewayneNaidoo/elasticsearch,HonzaKral/elasticsearch,JackyMai/elasticsearch,wenpos/elasticsearch,awislowski/elasticsearch,ricardocerq/elasticsearch,dongjoon-hyun/elasticsearch,Helen-Zhao/elasticsearch,gmarz/elasticsearch,Helen-Zhao/elasticsearch,mjason3/elasticsearch,zkidkid/elasticsearch,JackyMai/elasticsearch,markharwood/elasticsearch,clintongormley/elasticsearch,polyfractal/elasticsearch,JervyShi/elasticsearch,wangtuo/elasticsearch,mmaracic/elasticsearch,Shepard1212/elasticsearch,shreejay/elasticsearch,alexshadow007/elasticsearch,ThiagoGarciaAlves/elasticsearch,nknize/elasticsearch,girirajsharma/elasticsearch,StefanGor/elasticsearch,a2lin/elasticsearch,fforbeck/elasticsearch,alexshadow007/elasticsearch,nilabhsagar/elasticsearch,robin13/elasticsearch,uschindler/elasticsearch,martinstuga/elasticsearch,obourgain/elasticsearch,pozhidaevak/elasticsearch,geidies/elasticsearch,clintongormley/elasticsearch,mohit/elasticsearch,scorpionvicky/elasticsearch,GlenRSmith/elasticsearch,jbertouch/elasticsearch,sreeramjayan/elasticsearch,markwalkom/elasticsearch,coding0011/elasticsearch,gingerwizard/elasticsearch,cwurm/elasticsearch,C-Bish/elasticsearch,pozhidaevak/elasticsearch,sreeramjayan/elasticsearch,mohit/elasticsearch,C-Bish/elasticsearch,alexshadow007/elasticsearch,camilojd/elasticsearch,njlawton/elasticsearch,nomoa/elasticsearch,shreejay/elasticsearch,artnowo/elasticsearch,rajanm/elasticsearch,yanjunh/elasticsearch,davidvgalbraith/elasticsearch,yynil/elasticsearch,brandonkearby/elasticsearch,robin13/elasticsearch,henakamaMSFT/elasticsearch,brandonkearby/elasticsearch,mmaracic/elasticsearch,MaineC/elasticsearch,geidies/elasticsearch,Helen-Zhao/elasticsearch,clintongormley/elasticsearch,markwalkom/elasticsearch,zkidkid/elasticsearch,tebriel/elasticsearch,myelin/elasticsearch,wenpos/elasticsearch,scottsom/elasticsearch,obourgain/elasticsearch,mjason3/elasticsearch,martinstuga/elasticsearch,brandonkearby/elasticsearch,JervyShi/elasticsearch,fred84/elasticsearch,HonzaKral/elasticsearch,coding0011/elasticsearch,clintongormley/elasticsearch,sreeramjayan/elasticsearch,kalimatas/elasticsearch,nazarewk/elasticsearch,maddin2016/elasticsearch,winstonewert/elasticsearch,Shepard1212/elasticsearch,strapdata/elassandra,lks21c/elasticsearch,wangtuo/elasticsearch,episerver/elasticsearch,LeoYao/elasticsearch,elasticdog/elasticsearch,scottsom/elasticsearch,scottsom/elasticsearch,mikemccand/elasticsearch,wangtuo/elasticsearch,mortonsykes/elasticsearch,xuzha/elasticsearch,robin13/elasticsearch,sneivandt/elasticsearch,alexshadow007/elasticsearch,bawse/elasticsearch,dongjoon-hyun/elasticsearch,yynil/elasticsearch,jchampion/elasticsearch,JSCooke/elasticsearch,fred84/elasticsearch,LewayneNaidoo/elasticsearch,strapdata/elassandra5-rc,scottsom/elasticsearch,nknize/elasticsearch,masaruh/elasticsearch,wenpos/elasticsearch,IanvsPoplicola/elasticsearch,geidies/elasticsearch,nilabhsagar/elasticsearch,LewayneNaidoo/elasticsearch,episerver/elasticsearch,palecur/elasticsearch,ivansun1010/elasticsearch,glefloch/elasticsearch,strapdata/elassandra5-rc,shreejay/elasticsearch,ricardocerq/elasticsearch,tebriel/elasticsearch,dpursehouse/elasticsearch,liweinan0423/elasticsearch,IanvsPoplicola/elasticsearch,naveenhooda2000/elasticsearch,trangvh/elasticsearch,nknize/elasticsearch,diendt/elasticsearch,HonzaKral/elasticsearch,maddin2016/elasticsearch,jchampion/elasticsearch,s1monw/elasticsearch,clintongormley/elasticsearch,nezirus/elasticsearch,maddin2016/elasticsearch,nilabhsagar/elasticsearch,umeshdangat/elasticsearch,jimczi/elasticsearch,avikurapati/elasticsearch,xuzha/elasticsearch,MaineC/elasticsearch,GlenRSmith/elasticsearch,C-Bish/elasticsearch,JSCooke/elasticsearch,elasticdog/elasticsearch,MisterAndersen/elasticsearch,polyfractal/elasticsearch,mikemccand/elasticsearch,mjason3/elasticsearch,ESamir/elasticsearch,nezirus/elasticsearch,kalimatas/elasticsearch,vroyer/elassandra,polyfractal/elasticsearch,jimczi/elasticsearch,davidvgalbraith/elasticsearch,naveenhooda2000/elasticsearch,henakamaMSFT/elasticsearch,masaruh/elasticsearch,umeshdangat/elasticsearch,jprante/elasticsearch,jpountz/elasticsearch,artnowo/elasticsearch,robin13/elasticsearch,uschindler/elasticsearch,liweinan0423/elasticsearch,jbertouch/elasticsearch,martinstuga/elasticsearch,coding0011/elasticsearch,ESamir/elasticsearch,mohit/elasticsearch,nazarewk/elasticsearch,Helen-Zhao/elasticsearch,polyfractal/elasticsearch,shreejay/elasticsearch,MisterAndersen/elasticsearch,nilabhsagar/elasticsearch,scottsom/elasticsearch,nezirus/elasticsearch,qwerty4030/elasticsearch,scorpionvicky/elasticsearch,yanjunh/elasticsearch,scorpionvicky/elasticsearch,IanvsPoplicola/elasticsearch,kalimatas/elasticsearch,ZTE-PaaS/elasticsearch,girirajsharma/elasticsearch,ivansun1010/elasticsearch,davidvgalbraith/elasticsearch,fernandozhu/elasticsearch,rlugojr/elasticsearch,umeshdangat/elasticsearch,scorpionvicky/elasticsearch,mmaracic/elasticsearch,nomoa/elasticsearch,awislowski/elasticsearch,mapr/elasticsearch,rlugojr/elasticsearch,jchampion/elasticsearch,ThiagoGarciaAlves/elasticsearch,rajanm/elasticsearch,spiegela/elasticsearch,gfyoung/elasticsearch,fforbeck/elasticsearch,winstonewert/elasticsearch,StefanGor/elasticsearch,myelin/elasticsearch,naveenhooda2000/elasticsearch,davidvgalbraith/elasticsearch,mmaracic/elasticsearch,nezirus/elasticsearch,nknize/elasticsearch,JervyShi/elasticsearch,vroyer/elassandra,sneivandt/elasticsearch,elasticdog/elasticsearch,markharwood/elasticsearch,tebriel/elasticsearch,dpursehouse/elasticsearch,Shepard1212/elasticsearch,IanvsPoplicola/elasticsearch,JSCooke/elasticsearch,Stacey-Gammon/elasticsearch,jbertouch/elasticsearch,robin13/elasticsearch,ThiagoGarciaAlves/elasticsearch,LewayneNaidoo/elasticsearch,IanvsPoplicola/elasticsearch,Stacey-Gammon/elasticsearch,xuzha/elasticsearch,s1monw/elasticsearch,xuzha/elasticsearch,mortonsykes/elasticsearch,a2lin/elasticsearch,a2lin/elasticsearch,pozhidaevak/elasticsearch,naveenhooda2000/elasticsearch,wangtuo/elasticsearch,gingerwizard/elasticsearch,cwurm/elasticsearch,yanjunh/elasticsearch,wuranbo/elasticsearch,diendt/elasticsearch,gmarz/elasticsearch,i-am-Nathan/elasticsearch,pozhidaevak/elasticsearch,brandonkearby/elasticsearch,mapr/elasticsearch,MaineC/elasticsearch,liweinan0423/elasticsearch,obourgain/elasticsearch,yynil/elasticsearch,coding0011/elasticsearch,gingerwizard/elasticsearch,MaineC/elasticsearch,ThiagoGarciaAlves/elasticsearch,mohit/elasticsearch,tebriel/elasticsearch,zkidkid/elasticsearch,fforbeck/elasticsearch,rajanm/elasticsearch,strapdata/elassandra,ivansun1010/elasticsearch,maddin2016/elasticsearch,wuranbo/elasticsearch,episerver/elasticsearch,palecur/elasticsearch,markwalkom/elasticsearch,spiegela/elasticsearch,a2lin/elasticsearch,i-am-Nathan/elasticsearch,JervyShi/elasticsearch,pozhidaevak/elasticsearch,mapr/elasticsearch,strapdata/elassandra5-rc,HonzaKral/elasticsearch,jbertouch/elasticsearch,strapdata/elassandra5-rc,Stacey-Gammon/elasticsearch,C-Bish/elasticsearch,fernandozhu/elasticsearch,GlenRSmith/elasticsearch,markwalkom/elasticsearch,diendt/elasticsearch,fred84/elasticsearch,nknize/elasticsearch,wenpos/elasticsearch,s1monw/elasticsearch,avikurapati/elasticsearch,gmarz/elasticsearch,jprante/elasticsearch,ricardocerq/elasticsearch,sreeramjayan/elasticsearch,rlugojr/elasticsearch,spiegela/elasticsearch,dongjoon-hyun/elasticsearch,LeoYao/elasticsearch,fernandozhu/elasticsearch,yanjunh/elasticsearch,masaruh/elasticsearch,scorpionvicky/elasticsearch,winstonewert/elasticsearch,ZTE-PaaS/elasticsearch,coding0011/elasticsearch,dpursehouse/elasticsearch,glefloch/elasticsearch,wuranbo/elasticsearch,mortonsykes/elasticsearch,awislowski/elasticsearch,jpountz/elasticsearch,qwerty4030/elasticsearch,jpountz/elasticsearch,geidies/elasticsearch,brandonkearby/elasticsearch,rlugojr/elasticsearch,JackyMai/elasticsearch,markharwood/elasticsearch,nazarewk/elasticsearch,markwalkom/elasticsearch,gingerwizard/elasticsearch,MisterAndersen/elasticsearch,artnowo/elasticsearch,fred84/elasticsearch,bawse/elasticsearch,camilojd/elasticsearch,bawse/elasticsearch,lks21c/elasticsearch,strapdata/elassandra,StefanGor/elasticsearch,vroyer/elasticassandra,s1monw/elasticsearch,Helen-Zhao/elasticsearch,henakamaMSFT/elasticsearch,mortonsykes/elasticsearch,fernandozhu/elasticsearch,avikurapati/elasticsearch,jpountz/elasticsearch,StefanGor/elasticsearch,trangvh/elasticsearch,tebriel/elasticsearch,yanjunh/elasticsearch,Shepard1212/elasticsearch,geidies/elasticsearch,umeshdangat/elasticsearch,jimczi/elasticsearch,myelin/elasticsearch,naveenhooda2000/elasticsearch,Stacey-Gammon/elasticsearch,cwurm/elasticsearch,girirajsharma/elasticsearch,girirajsharma/elasticsearch,obourgain/elasticsearch,Shepard1212/elasticsearch,gfyoung/elasticsearch,spiegela/elasticsearch,avikurapati/elasticsearch,LeoYao/elasticsearch,mapr/elasticsearch,njlawton/elasticsearch,kalimatas/elasticsearch,shreejay/elasticsearch,LeoYao/elasticsearch,glefloch/elasticsearch,MaineC/elasticsearch,mohit/elasticsearch,JervyShi/elasticsearch,LewayneNaidoo/elasticsearch,elasticdog/elasticsearch,qwerty4030/elasticsearch,sreeramjayan/elasticsearch,dongjoon-hyun/elasticsearch,jchampion/elasticsearch,alexshadow007/elasticsearch | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.bulk;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.action.DocWriteResponse;
import org.elasticsearch.action.delete.DeleteResponse;
import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.action.update.UpdateResponse;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable;
import org.elasticsearch.common.xcontent.StatusToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString;
import org.elasticsearch.rest.RestStatus;
import java.io.IOException;
/**
* Represents a single item response for an action executed as part of the bulk API. Holds the index/type/id
* of the relevant action, and if it has failed or not (with the failure message incase it failed).
*/
public class BulkItemResponse implements Streamable, StatusToXContent {
@Override
public RestStatus status() {
return failure == null ? response.status() : failure.getStatus();
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(opType);
if (failure == null) {
response.toXContent(builder, params);
builder.field(Fields.STATUS, response.status().getStatus());
} else {
builder.field(Fields._INDEX, failure.getIndex());
builder.field(Fields._TYPE, failure.getType());
builder.field(Fields._ID, failure.getId());
builder.field(Fields.STATUS, failure.getStatus().getStatus());
builder.startObject(Fields.ERROR);
ElasticsearchException.toXContent(builder, params, failure.getCause());
builder.endObject();
}
builder.endObject();
return builder;
}
static final class Fields {
static final XContentBuilderString _INDEX = new XContentBuilderString("_index");
static final XContentBuilderString _TYPE = new XContentBuilderString("_type");
static final XContentBuilderString _ID = new XContentBuilderString("_id");
static final XContentBuilderString STATUS = new XContentBuilderString("status");
static final XContentBuilderString ERROR = new XContentBuilderString("error");
}
/**
* Represents a failure.
*/
public static class Failure {
private final String index;
private final String type;
private final String id;
private final Throwable cause;
private final RestStatus status;
public Failure(String index, String type, String id, Throwable t) {
this.index = index;
this.type = type;
this.id = id;
this.cause = t;
this.status = ExceptionsHelper.status(t);
}
/**
* The index name of the action.
*/
public String getIndex() {
return this.index;
}
/**
* The type of the action.
*/
public String getType() {
return type;
}
/**
* The id of the action.
*/
public String getId() {
return id;
}
/**
* The failure message.
*/
public String getMessage() {
return this.cause.toString();
}
/**
* The rest status.
*/
public RestStatus getStatus() {
return this.status;
}
public Throwable getCause() {
return cause;
}
}
private int id;
private String opType;
private DocWriteResponse response;
private Failure failure;
BulkItemResponse() {
}
public BulkItemResponse(int id, String opType, DocWriteResponse response) {
this.id = id;
this.opType = opType;
this.response = response;
}
public BulkItemResponse(int id, String opType, Failure failure) {
this.id = id;
this.opType = opType;
this.failure = failure;
}
/**
* The numeric order of the item matching the same request order in the bulk request.
*/
public int getItemId() {
return id;
}
/**
* The operation type ("index", "create" or "delete").
*/
public String getOpType() {
return this.opType;
}
/**
* The index name of the action.
*/
public String getIndex() {
if (failure != null) {
return failure.getIndex();
}
return response.getIndex();
}
/**
* The type of the action.
*/
public String getType() {
if (failure != null) {
return failure.getType();
}
return response.getType();
}
/**
* The id of the action.
*/
public String getId() {
if (failure != null) {
return failure.getId();
}
return response.getId();
}
/**
* The version of the action.
*/
public long getVersion() {
if (failure != null) {
return -1;
}
return response.getVersion();
}
/**
* The actual response ({@link IndexResponse} or {@link DeleteResponse}). <tt>null</tt> in
* case of failure.
*/
public <T extends DocWriteResponse> T getResponse() {
return (T) response;
}
/**
* Is this a failed execution of an operation.
*/
public boolean isFailed() {
return failure != null;
}
/**
* The failure message, <tt>null</tt> if it did not fail.
*/
public String getFailureMessage() {
if (failure != null) {
return failure.getMessage();
}
return null;
}
/**
* The actual failure object if there was a failure.
*/
public Failure getFailure() {
return this.failure;
}
public static BulkItemResponse readBulkItem(StreamInput in) throws IOException {
BulkItemResponse response = new BulkItemResponse();
response.readFrom(in);
return response;
}
@Override
public void readFrom(StreamInput in) throws IOException {
id = in.readVInt();
opType = in.readString();
byte type = in.readByte();
if (type == 0) {
response = new IndexResponse();
response.readFrom(in);
} else if (type == 1) {
response = new DeleteResponse();
response.readFrom(in);
} else if (type == 3) { // make 3 instead of 2, because 2 is already in use for 'no responses'
response = new UpdateResponse();
response.readFrom(in);
}
if (in.readBoolean()) {
String fIndex = in.readString();
String fType = in.readString();
String fId = in.readOptionalString();
Throwable throwable = in.readThrowable();
failure = new Failure(fIndex, fType, fId, throwable);
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeVInt(id);
out.writeString(opType);
if (response == null) {
out.writeByte((byte) 2);
} else {
if (response instanceof IndexResponse) {
out.writeByte((byte) 0);
} else if (response instanceof DeleteResponse) {
out.writeByte((byte) 1);
} else if (response instanceof UpdateResponse) {
out.writeByte((byte) 3); // make 3 instead of 2, because 2 is already in use for 'no responses'
}
response.writeTo(out);
}
if (failure == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
out.writeString(failure.getIndex());
out.writeString(failure.getType());
out.writeOptionalString(failure.getId());
out.writeThrowable(failure.getCause());
}
}
}
| core/src/main/java/org/elasticsearch/action/bulk/BulkItemResponse.java | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.bulk;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.action.DocWriteResponse;
import org.elasticsearch.action.delete.DeleteResponse;
import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.action.update.UpdateResponse;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable;
import org.elasticsearch.common.xcontent.StatusToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString;
import org.elasticsearch.rest.RestStatus;
import java.io.IOException;
/**
* Represents a single item response for an action executed as part of the bulk API. Holds the index/type/id
* of the relevant action, and if it has failed or not (with the failure message incase it failed).
*/
public class BulkItemResponse implements Streamable, StatusToXContent {
@Override
public RestStatus status() {
return failure == null ? response.status() : failure.getStatus();
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(opType);
if (failure == null) {
response.toXContent(builder, params);
builder.field(Fields.STATUS, response.status());
} else {
builder.field(Fields._INDEX, failure.getIndex());
builder.field(Fields._TYPE, failure.getType());
builder.field(Fields._ID, failure.getId());
builder.field(Fields.STATUS, failure.getStatus());
builder.startObject(Fields.ERROR);
ElasticsearchException.toXContent(builder, params, failure.getCause());
builder.endObject();
}
builder.endObject();
return builder;
}
static final class Fields {
static final XContentBuilderString _INDEX = new XContentBuilderString("_index");
static final XContentBuilderString _TYPE = new XContentBuilderString("_type");
static final XContentBuilderString _ID = new XContentBuilderString("_id");
static final XContentBuilderString STATUS = new XContentBuilderString("status");
static final XContentBuilderString ERROR = new XContentBuilderString("error");
}
/**
* Represents a failure.
*/
public static class Failure {
private final String index;
private final String type;
private final String id;
private final Throwable cause;
private final RestStatus status;
public Failure(String index, String type, String id, Throwable t) {
this.index = index;
this.type = type;
this.id = id;
this.cause = t;
this.status = ExceptionsHelper.status(t);
}
/**
* The index name of the action.
*/
public String getIndex() {
return this.index;
}
/**
* The type of the action.
*/
public String getType() {
return type;
}
/**
* The id of the action.
*/
public String getId() {
return id;
}
/**
* The failure message.
*/
public String getMessage() {
return this.cause.toString();
}
/**
* The rest status.
*/
public RestStatus getStatus() {
return this.status;
}
public Throwable getCause() {
return cause;
}
}
private int id;
private String opType;
private DocWriteResponse response;
private Failure failure;
BulkItemResponse() {
}
public BulkItemResponse(int id, String opType, DocWriteResponse response) {
this.id = id;
this.opType = opType;
this.response = response;
}
public BulkItemResponse(int id, String opType, Failure failure) {
this.id = id;
this.opType = opType;
this.failure = failure;
}
/**
* The numeric order of the item matching the same request order in the bulk request.
*/
public int getItemId() {
return id;
}
/**
* The operation type ("index", "create" or "delete").
*/
public String getOpType() {
return this.opType;
}
/**
* The index name of the action.
*/
public String getIndex() {
if (failure != null) {
return failure.getIndex();
}
return response.getIndex();
}
/**
* The type of the action.
*/
public String getType() {
if (failure != null) {
return failure.getType();
}
return response.getType();
}
/**
* The id of the action.
*/
public String getId() {
if (failure != null) {
return failure.getId();
}
return response.getId();
}
/**
* The version of the action.
*/
public long getVersion() {
if (failure != null) {
return -1;
}
return response.getVersion();
}
/**
* The actual response ({@link IndexResponse} or {@link DeleteResponse}). <tt>null</tt> in
* case of failure.
*/
public <T extends DocWriteResponse> T getResponse() {
return (T) response;
}
/**
* Is this a failed execution of an operation.
*/
public boolean isFailed() {
return failure != null;
}
/**
* The failure message, <tt>null</tt> if it did not fail.
*/
public String getFailureMessage() {
if (failure != null) {
return failure.getMessage();
}
return null;
}
/**
* The actual failure object if there was a failure.
*/
public Failure getFailure() {
return this.failure;
}
public static BulkItemResponse readBulkItem(StreamInput in) throws IOException {
BulkItemResponse response = new BulkItemResponse();
response.readFrom(in);
return response;
}
@Override
public void readFrom(StreamInput in) throws IOException {
id = in.readVInt();
opType = in.readString();
byte type = in.readByte();
if (type == 0) {
response = new IndexResponse();
response.readFrom(in);
} else if (type == 1) {
response = new DeleteResponse();
response.readFrom(in);
} else if (type == 3) { // make 3 instead of 2, because 2 is already in use for 'no responses'
response = new UpdateResponse();
response.readFrom(in);
}
if (in.readBoolean()) {
String fIndex = in.readString();
String fType = in.readString();
String fId = in.readOptionalString();
Throwable throwable = in.readThrowable();
failure = new Failure(fIndex, fType, fId, throwable);
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeVInt(id);
out.writeString(opType);
if (response == null) {
out.writeByte((byte) 2);
} else {
if (response instanceof IndexResponse) {
out.writeByte((byte) 0);
} else if (response instanceof DeleteResponse) {
out.writeByte((byte) 1);
} else if (response instanceof UpdateResponse) {
out.writeByte((byte) 3); // make 3 instead of 2, because 2 is already in use for 'no responses'
}
response.writeTo(out);
}
if (failure == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
out.writeString(failure.getIndex());
out.writeString(failure.getType());
out.writeOptionalString(failure.getId());
out.writeThrowable(failure.getCause());
}
}
}
| BulkItemResponse returns status code instead of status name
In commit fafeb3a, we've refactored REST response handling logic
and returned HTTP status names instead of HTTP status codes for
bulk item responses. With this commit we restore the original
behavior.
Checked with @bleskes.
| core/src/main/java/org/elasticsearch/action/bulk/BulkItemResponse.java | BulkItemResponse returns status code instead of status name | <ide><path>ore/src/main/java/org/elasticsearch/action/bulk/BulkItemResponse.java
<ide> builder.startObject(opType);
<ide> if (failure == null) {
<ide> response.toXContent(builder, params);
<del> builder.field(Fields.STATUS, response.status());
<add> builder.field(Fields.STATUS, response.status().getStatus());
<ide> } else {
<ide> builder.field(Fields._INDEX, failure.getIndex());
<ide> builder.field(Fields._TYPE, failure.getType());
<ide> builder.field(Fields._ID, failure.getId());
<del> builder.field(Fields.STATUS, failure.getStatus());
<add> builder.field(Fields.STATUS, failure.getStatus().getStatus());
<ide> builder.startObject(Fields.ERROR);
<ide> ElasticsearchException.toXContent(builder, params, failure.getCause());
<ide> builder.endObject(); |
|
Java | apache-2.0 | 4da44b22d717b474002ca58259c7b0ef2e04ba30 | 0 | jasperhoogland/jautomata | package net.jhoogland.jautomata.semirings;
import java.util.Arrays;
/**
*
* This semiring is used by the shortest distance algorithm to determine the n shortest distances to a state. If the storePath
* field has value true, then the paths are stored that led to the n shortest distances.
*
* @author Jasper Hoogland
*
*/
public class KTropicalSemiring implements Semiring<BestPathWeights>
{
public int k;
public boolean storePath;
public KTropicalSemiring(int k)
{
this(k, true);
}
public KTropicalSemiring(int k, boolean storePath)
{
this.k = k;
this.storePath = storePath;
}
public BestPathWeights multiply(BestPathWeights x1, BestPathWeights x2)
{
PathWeight[] x = new PathWeight[k * k];
int p = 0;
for (int i = 0; i < k; i++) for (int j = 0; j < k; j++, p++)
{
x[p] = new PathWeight(storePath ? x1.pathWeights[i] : null, x1.pathWeights[i].weight + x2.pathWeights[j].weight, storePath ? x2.pathWeights[j].transition : null);
}
Arrays.sort(x);
PathWeight[] y = new PathWeight[k];
System.arraycopy(x, 0, y, 0, k);
return new BestPathWeights(y);
}
public BestPathWeights add(BestPathWeights x1, BestPathWeights x2)
{
PathWeight[] x = new PathWeight[2 * k];
System.arraycopy(x1.pathWeights, 0, x, 0, k);
System.arraycopy(x2.pathWeights, 0, x, k, k);
Arrays.sort(x);
PathWeight[] y = new PathWeight[k];
System.arraycopy(x, 0, y, 0, k);
return new BestPathWeights(y);
}
public BestPathWeights one()
{
PathWeight[] mi = new PathWeight[k];
mi[0] = new PathWeight(null, 0.0);
for (int i = 1; i < k; i++) mi[i] = new PathWeight(null, Double.POSITIVE_INFINITY);
return new BestPathWeights(mi);
}
public BestPathWeights zero()
{
PathWeight[] mi = new PathWeight[k];
for (int i = 0; i < k; i++) mi[i] = new PathWeight(null, Double.POSITIVE_INFINITY);
return new BestPathWeights(mi);
}
public boolean isIdempotent()
{
return k < 2;
}
public boolean isCommutative()
{
return ! storePath;
}
public boolean isKClosed(int k)
{
return k >= this.k - 1;
}
}
| jautomata/src/main/java/net/jhoogland/jautomata/semirings/KTropicalSemiring.java | package net.jhoogland.jautomata.semirings;
import java.util.Arrays;
/**
*
* This semiring is used by the shortest distance algorithm to determine the n shortest distances to a state. If the storePath
* field has value true, then the paths are stored that led to the n shortest distances.
*
* @author Jasper Hoogland
*
* @param <T>
*/
public class KTropicalSemiring implements Semiring<BestPathWeights>
{
public int k;
public boolean storePath;
public KTropicalSemiring(int k)
{
this(k, true);
}
public KTropicalSemiring(int k, boolean storePath)
{
this.k = k;
this.storePath = storePath;
}
@SuppressWarnings("unchecked")
public BestPathWeights multiply(BestPathWeights x1, BestPathWeights x2)
{
PathWeight[] x = new PathWeight[k * k];
int p = 0;
for (int i = 0; i < k; i++) for (int j = 0; j < k; j++, p++)
{
x[p] = new PathWeight(storePath ? x1.pathWeights[i] : null, x1.pathWeights[i].weight + x2.pathWeights[j].weight, storePath ? x2.pathWeights[j].transition : null);
}
Arrays.sort(x);
PathWeight[] y = new PathWeight[k];
System.arraycopy(x, 0, y, 0, k);
return new BestPathWeights(y);
}
@SuppressWarnings("unchecked")
public BestPathWeights add(BestPathWeights x1, BestPathWeights x2)
{
PathWeight[] x = new PathWeight[2 * k];
System.arraycopy(x1.pathWeights, 0, x, 0, k);
System.arraycopy(x2.pathWeights, 0, x, k, k);
Arrays.sort(x);
PathWeight[] y = new PathWeight[k];
System.arraycopy(x, 0, y, 0, k);
return new BestPathWeights(y);
}
@SuppressWarnings("unchecked")
public BestPathWeights one()
{
PathWeight[] mi = new PathWeight[k];
mi[0] = new PathWeight(null, 0.0);
for (int i = 1; i < k; i++) mi[i] = new PathWeight(null, Double.POSITIVE_INFINITY);
return new BestPathWeights(mi);
}
@SuppressWarnings("unchecked")
public BestPathWeights zero()
{
PathWeight[] mi = new PathWeight[k];
for (int i = 0; i < k; i++) mi[i] = new PathWeight(null, Double.POSITIVE_INFINITY);
return new BestPathWeights(mi);
}
public boolean isIdempotent()
{
return k < 2;
}
public boolean isCommutative()
{
return ! storePath;
}
public boolean isKClosed(int k)
{
return k >= this.k - 1;
}
}
| Minor changes. Javadoc updated. | jautomata/src/main/java/net/jhoogland/jautomata/semirings/KTropicalSemiring.java | Minor changes. Javadoc updated. | <ide><path>automata/src/main/java/net/jhoogland/jautomata/semirings/KTropicalSemiring.java
<ide> *
<ide> * @author Jasper Hoogland
<ide> *
<del> * @param <T>
<ide> */
<ide>
<ide> public class KTropicalSemiring implements Semiring<BestPathWeights>
<ide> this.storePath = storePath;
<ide> }
<ide>
<del> @SuppressWarnings("unchecked")
<ide> public BestPathWeights multiply(BestPathWeights x1, BestPathWeights x2)
<ide> {
<ide> PathWeight[] x = new PathWeight[k * k];
<ide> return new BestPathWeights(y);
<ide> }
<ide>
<del> @SuppressWarnings("unchecked")
<ide> public BestPathWeights add(BestPathWeights x1, BestPathWeights x2)
<ide> {
<ide> PathWeight[] x = new PathWeight[2 * k];
<ide> return new BestPathWeights(y);
<ide> }
<ide>
<del> @SuppressWarnings("unchecked")
<ide> public BestPathWeights one()
<ide> {
<ide> PathWeight[] mi = new PathWeight[k];
<ide> return new BestPathWeights(mi);
<ide> }
<ide>
<del> @SuppressWarnings("unchecked")
<ide> public BestPathWeights zero()
<ide> {
<ide> PathWeight[] mi = new PathWeight[k]; |
|
Java | agpl-3.0 | 8439e1f9b88bbb8423e9f696df0f32a4215533eb | 0 | ua-eas/kfs-devops-automation-fork,bhutchinson/kfs,ua-eas/kfs,ua-eas/kfs-devops-automation-fork,kuali/kfs,ua-eas/kfs-devops-automation-fork,quikkian-ua-devops/will-financials,quikkian-ua-devops/will-financials,UniversityOfHawaii/kfs,UniversityOfHawaii/kfs,quikkian-ua-devops/kfs,quikkian-ua-devops/will-financials,quikkian-ua-devops/will-financials,kuali/kfs,quikkian-ua-devops/will-financials,kkronenb/kfs,quikkian-ua-devops/kfs,quikkian-ua-devops/will-financials,kkronenb/kfs,ua-eas/kfs,UniversityOfHawaii/kfs,UniversityOfHawaii/kfs,UniversityOfHawaii/kfs,smith750/kfs,quikkian-ua-devops/kfs,quikkian-ua-devops/kfs,kuali/kfs,smith750/kfs,ua-eas/kfs-devops-automation-fork,quikkian-ua-devops/kfs,kuali/kfs,kkronenb/kfs,quikkian-ua-devops/kfs,kkronenb/kfs,kuali/kfs,bhutchinson/kfs,smith750/kfs,ua-eas/kfs,ua-eas/kfs-devops-automation-fork,smith750/kfs,bhutchinson/kfs,ua-eas/kfs,ua-eas/kfs,bhutchinson/kfs | /*
* Copyright 2007 The Kuali Foundation.
*
* Licensed under the Educational Community License, Version 1.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl1.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.module.labor.dao.ojb;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.apache.ojb.broker.query.Criteria;
import org.apache.ojb.broker.query.QueryByCriteria;
import org.apache.ojb.broker.query.QueryFactory;
import org.apache.ojb.broker.query.ReportQueryByCriteria;
import org.kuali.PropertyConstants;
import org.kuali.core.bo.user.PersonPayrollId;
import org.kuali.core.bo.user.UniversalUser;
import org.kuali.core.lookup.LookupUtils;
import org.kuali.core.util.KualiDecimal;
import org.kuali.module.budget.bo.CalculatedSalaryFoundationTracker;
import org.kuali.module.gl.util.OJBUtility;
import org.kuali.module.labor.bo.AccountStatusBaseFunds;
import org.kuali.module.labor.bo.AccountStatusCurrentFunds;
import org.kuali.module.labor.dao.LaborDao;
import org.springmodules.orm.ojb.support.PersistenceBrokerDaoSupport;
/**
* This class is a facade for Labor Distribution DAO balance inquiries
*/
public class LaborDaoOjb extends PersistenceBrokerDaoSupport implements LaborDao {
private LaborDaoOjb dao;
/**
*
* @see org.kuali.module.labor.dao.LaborDao#getCSFTrackerData(java.util.Map)
*/
public Collection getCSFTrackerData(Map fieldValues) {
Criteria criteria = new Criteria();
criteria.addAndCriteria(OJBUtility.buildCriteriaFromMap(fieldValues, new CalculatedSalaryFoundationTracker()));
LookupUtils.applySearchResultsLimit(criteria);
QueryByCriteria query = QueryFactory.newQuery(CalculatedSalaryFoundationTracker.class, criteria);
return getPersistenceBrokerTemplate().getCollectionByQuery(query);
}
/**
*
* @see org.kuali.module.labor.dao.LaborDao#getCSFTrackerData(java.util.Map)
*/
public Object getCSFTrackerTotal(Map fieldValues) {
final String CSF_AMOUNT = "csfAmount";
System.out.println("accountNumber:" + fieldValues.get("accountNumber"));
System.out.println("universityFiscalYear:" + fieldValues.get("universityFiscalYear"));
System.out.println("chartOfAccountsCode:" + fieldValues.get("chartOfAccountsCode"));
System.out.println("accountNumber:" + fieldValues.get("accountNumber"));
System.out.println("subAccountNumber:" + fieldValues.get("subAccountNumber"));
System.out.println("financialObjectCode:" + fieldValues.get("financialObjectCode"));
System.out.println("financialSubObjectCode:" + fieldValues.get("financialSubObjectCode"));
/*
// fieldValues.clear();
// fieldValues.put("accountNumber", "1031400");
// fieldValues.put("chartOfAccountsCode", "BL");
// fieldValues.put("universityFiscalYear", "2004");
// fieldValues.put("financialObjectCode", "5821");
*/
Criteria criteria = new Criteria();
criteria.addAndCriteria(OJBUtility.buildCriteriaFromMap(fieldValues, new CalculatedSalaryFoundationTracker()));
System.out.println("criteria:" + criteria);
ReportQueryByCriteria query = QueryFactory.newReportQuery(CalculatedSalaryFoundationTracker.class, criteria);
List groupByList = new ArrayList();
groupByList.add(PropertyConstants.UNIVERSITY_FISCAL_YEAR);
groupByList.add(PropertyConstants.CHART_OF_ACCOUNTS_CODE);
groupByList.add(PropertyConstants.ACCOUNT_NUMBER);
groupByList.add(PropertyConstants.SUB_ACCOUNT_NUMBER);
groupByList.add(PropertyConstants.FINANCIAL_OBJECT_CODE);
groupByList.add(PropertyConstants.FINANCIAL_SUB_OBJECT_CODE);
String[] groupBy = (String[]) groupByList.toArray(new String[groupByList.size()]);
query.setAttributes(new String[] { "sum(" + CSF_AMOUNT + ")"});
query.addGroupBy(groupBy);
Object[] csf = null;
Iterator<Object[]> calculatedSalaryFoundationTracker = getPersistenceBrokerTemplate().getReportQueryIteratorByQuery(query);
while (calculatedSalaryFoundationTracker!=null && calculatedSalaryFoundationTracker.hasNext()) {
csf = calculatedSalaryFoundationTracker.next();
}
KualiDecimal csfAmount = new KualiDecimal("0.00");
if (csf != null)
csfAmount = new KualiDecimal(csf[0].toString());
System.out.println("Amount:" + csfAmount);
return csfAmount;
}
/**
*
* @see org.kuali.module.labor.dao.LaborDao#getCurrentYearFunds(java.util.Map)
*/
public Collection getCurrentYearFunds(Map fieldValues) {
Criteria criteria = new Criteria();
criteria.addAndCriteria(OJBUtility.buildCriteriaFromMap(fieldValues, new AccountStatusCurrentFunds()));
QueryByCriteria query = QueryFactory.newQuery(AccountStatusCurrentFunds.class, criteria);
OJBUtility.limitResultSize(query);
Collection ledgerBalances = getPersistenceBrokerTemplate().getCollectionByQuery(query);
for (Iterator iter = ledgerBalances.iterator(); iter.hasNext();) {
AccountStatusCurrentFunds currentFund = (AccountStatusCurrentFunds) ledgerBalances;
new PersonPayrollId(currentFund.getEmplid());
UniversalUser universalUser = null;
// try{
// universalUser = SpringServiceLocator.getUniversalUserService().getUniversalUser(empl);
// }catch(UserNotFoundException e){
// return LaborConstants.BalanceInquiries.UnknownPersonName;
// }
// return universalUser.getPersonName();
}
return ledgerBalances;
}
/**
*
* @see org.kuali.module.labor.dao.LaborDao#getBaseFunds(java.util.Map)
*/
public Collection getBaseFunds(Map fieldValues) {
Criteria criteria = new Criteria();
criteria.addAndCriteria(OJBUtility.buildCriteriaFromMap(fieldValues, new AccountStatusBaseFunds()));
QueryByCriteria query = QueryFactory.newQuery(AccountStatusBaseFunds.class, criteria);
OJBUtility.limitResultSize(query);
return getPersistenceBrokerTemplate().getCollectionByQuery(query);
}
/**
*
* @see org.kuali.module.labor.dao.LaborDao#getCurrentFunds(java.util.Map)
*/
public Collection getCurrentFunds(Map fieldValues) {
Criteria criteria = new Criteria();
criteria.addAndCriteria(OJBUtility.buildCriteriaFromMap(fieldValues, new AccountStatusCurrentFunds()));
QueryByCriteria query = QueryFactory.newQuery(AccountStatusCurrentFunds.class, criteria);
OJBUtility.limitResultSize(query);
return getPersistenceBrokerTemplate().getCollectionByQuery(query);
}
} | work/src/org/kuali/kfs/module/ld/dataaccess/impl/LaborDaoOjb.java | /*
* Copyright 2007 The Kuali Foundation.
*
* Licensed under the Educational Community License, Version 1.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl1.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.module.labor.dao.ojb;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.apache.ojb.broker.query.Criteria;
import org.apache.ojb.broker.query.QueryByCriteria;
import org.apache.ojb.broker.query.QueryFactory;
import org.apache.ojb.broker.query.ReportQueryByCriteria;
import org.kuali.PropertyConstants;
import org.kuali.core.bo.user.PersonPayrollId;
import org.kuali.core.bo.user.UniversalUser;
import org.kuali.core.lookup.LookupUtils;
import org.kuali.core.util.KualiDecimal;
import org.kuali.module.budget.bo.CalculatedSalaryFoundationTracker;
import org.kuali.module.gl.util.OJBUtility;
import org.kuali.module.labor.bo.AccountStatusBaseFunds;
import org.kuali.module.labor.bo.AccountStatusCurrentFunds;
import org.kuali.module.labor.dao.LaborDao;
import org.springmodules.orm.ojb.support.PersistenceBrokerDaoSupport;
/**
* This class is a facade for Labor Distribution DAO balance inquiries
*/
public class LaborDaoOjb extends PersistenceBrokerDaoSupport implements LaborDao {
private LaborDaoOjb dao;
/**
*
* @see org.kuali.module.labor.dao.LaborDao#getCSFTrackerData(java.util.Map)
*/
public Collection getCSFTrackerData(Map fieldValues) {
Criteria criteria = new Criteria();
criteria.addAndCriteria(OJBUtility.buildCriteriaFromMap(fieldValues, new CalculatedSalaryFoundationTracker()));
LookupUtils.applySearchResultsLimit(criteria);
QueryByCriteria query = QueryFactory.newQuery(CalculatedSalaryFoundationTracker.class, criteria);
return getPersistenceBrokerTemplate().getCollectionByQuery(query);
}
/**
*
* @see org.kuali.module.labor.dao.LaborDao#getCSFTrackerData(java.util.Map)
*/
public Object getCSFTrackerTotal(Map fieldValues) {
final String CSF_AMOUNT = "csfAmount";
System.out.println("accountNumber:" + fieldValues.get("accountNumber"));
System.out.println("universityFiscalYear:" + fieldValues.get("universityFiscalYear"));
System.out.println("chartOfAccountsCode:" + fieldValues.get("chartOfAccountsCode"));
System.out.println("accountNumber:" + fieldValues.get("accountNumber"));
System.out.println("subAccountNumber:" + fieldValues.get("subAccountNumber"));
System.out.println("financialObjectCode:" + fieldValues.get("financialObjectCode"));
System.out.println("financialSubObjectCode:" + fieldValues.get("financialSubObjectCode"));
/*
// fieldValues.clear();
// fieldValues.put("accountNumber", "1031400");
// fieldValues.put("chartOfAccountsCode", "BL");
// fieldValues.put("universityFiscalYear", "2004");
// fieldValues.put("financialObjectCode", "5821");
*/
Criteria criteria = new Criteria();
criteria.addAndCriteria(OJBUtility.buildCriteriaFromMap(fieldValues, new CalculatedSalaryFoundationTracker()));
System.out.println("criteria:" + criteria);
ReportQueryByCriteria query = QueryFactory.newReportQuery(CalculatedSalaryFoundationTracker.class, criteria);
List groupByList = new ArrayList();
groupByList.add(PropertyConstants.UNIVERSITY_FISCAL_YEAR);
groupByList.add(PropertyConstants.CHART_OF_ACCOUNTS_CODE);
groupByList.add(PropertyConstants.ACCOUNT_NUMBER);
groupByList.add(PropertyConstants.SUB_ACCOUNT_NUMBER);
groupByList.add(PropertyConstants.FINANCIAL_OBJECT_CODE);
groupByList.add(PropertyConstants.FINANCIAL_SUB_OBJECT_CODE);
String[] groupBy = (String[]) groupByList.toArray(new String[groupByList.size()]);
query.setAttributes(new String[] { "sum(" + CSF_AMOUNT + ")"});
query.addGroupBy(groupBy);
Object[] csf = null;
Iterator<Object[]> calculatedSalaryFoundationTracker = getPersistenceBrokerTemplate().getReportQueryIteratorByQuery(query);
while (calculatedSalaryFoundationTracker!=null && calculatedSalaryFoundationTracker.hasNext()) {
// csf = calculatedSalaryFoundationTracker.next();
}
KualiDecimal csfAmount = new KualiDecimal("0.00");
if (csf != null)
csfAmount = new KualiDecimal(csf[0].toString());
System.out.println("Amount:" + csfAmount);
return csfAmount;
}
/**
*
* @see org.kuali.module.labor.dao.LaborDao#getCurrentYearFunds(java.util.Map)
*/
public Collection getCurrentYearFunds(Map fieldValues) {
Criteria criteria = new Criteria();
criteria.addAndCriteria(OJBUtility.buildCriteriaFromMap(fieldValues, new AccountStatusCurrentFunds()));
QueryByCriteria query = QueryFactory.newQuery(AccountStatusCurrentFunds.class, criteria);
OJBUtility.limitResultSize(query);
Collection ledgerBalances = getPersistenceBrokerTemplate().getCollectionByQuery(query);
for (Iterator iter = ledgerBalances.iterator(); iter.hasNext();) {
AccountStatusCurrentFunds currentFund = (AccountStatusCurrentFunds) ledgerBalances;
new PersonPayrollId(currentFund.getEmplid());
UniversalUser universalUser = null;
// try{
// universalUser = SpringServiceLocator.getUniversalUserService().getUniversalUser(empl);
// }catch(UserNotFoundException e){
// return LaborConstants.BalanceInquiries.UnknownPersonName;
// }
// return universalUser.getPersonName();
}
return ledgerBalances;
}
/**
*
* @see org.kuali.module.labor.dao.LaborDao#getBaseFunds(java.util.Map)
*/
public Collection getBaseFunds(Map fieldValues) {
Criteria criteria = new Criteria();
criteria.addAndCriteria(OJBUtility.buildCriteriaFromMap(fieldValues, new AccountStatusBaseFunds()));
QueryByCriteria query = QueryFactory.newQuery(AccountStatusBaseFunds.class, criteria);
OJBUtility.limitResultSize(query);
return getPersistenceBrokerTemplate().getCollectionByQuery(query);
}
/**
*
* @see org.kuali.module.labor.dao.LaborDao#getCurrentFunds(java.util.Map)
*/
public Collection getCurrentFunds(Map fieldValues) {
Criteria criteria = new Criteria();
criteria.addAndCriteria(OJBUtility.buildCriteriaFromMap(fieldValues, new AccountStatusCurrentFunds()));
QueryByCriteria query = QueryFactory.newQuery(AccountStatusCurrentFunds.class, criteria);
OJBUtility.limitResultSize(query);
return getPersistenceBrokerTemplate().getCollectionByQuery(query);
}
} | KULLAB-91: Develop the BaseFundsAccountStatus Balance Inquiry
| work/src/org/kuali/kfs/module/ld/dataaccess/impl/LaborDaoOjb.java | KULLAB-91: Develop the BaseFundsAccountStatus Balance Inquiry | <ide><path>ork/src/org/kuali/kfs/module/ld/dataaccess/impl/LaborDaoOjb.java
<ide> Object[] csf = null;
<ide> Iterator<Object[]> calculatedSalaryFoundationTracker = getPersistenceBrokerTemplate().getReportQueryIteratorByQuery(query);
<ide> while (calculatedSalaryFoundationTracker!=null && calculatedSalaryFoundationTracker.hasNext()) {
<del>// csf = calculatedSalaryFoundationTracker.next();
<add> csf = calculatedSalaryFoundationTracker.next();
<ide> }
<ide> KualiDecimal csfAmount = new KualiDecimal("0.00");
<ide> if (csf != null) |
|
JavaScript | mit | c93e0c02e3a33feb1d665711a5495fdb7c16ef57 | 0 | danferth/DJ-wp-theme,danferth/DJ-wp-theme,danferth/Thomson-wp-theme,danferth/Thomson-wp-theme,danferth/DJ-wp-theme | //grab the url
var protocol = window.location.protocol;
var hostname = window.location.hostname;
var url = protocol + "//" + hostname;
//=====distributors page=====
var distributors = angular.module('distributors', ['ngSanitize']);
distributors.controller('distController', ['$scope', '$http', '$sce', function($scope, $http, $sce){
//grab JSON data
$http.get(url+'/wp-content/themes/TIC/assets/json/distributors.json').then(function(res){
$scope.distributors = res.data;
//set defaults for single distributor view
$scope.distId = "73";
$scope.hasTel2 = true;
$scope.hasFax = true;
$scope.hasWeb = true;
$scope.hasEmail = true;
$scope.hasNotes = false;
//on click of info buttom
$scope.singleDist = function(obj){
//set id
$scope.distId = obj.target.attributes.value.value;
//check for tel2
if($scope.distributors[$scope.distId].tel2 == "" || !$scope.distributors[$scope.distId].hasOwnProperty('tel2')){
$scope.hasTel2 = false;
}else{
$scope.hasTel2 = true;
}
//check for fax
if($scope.distributors[$scope.distId].fax == "" || !$scope.distributors[$scope.distId].hasOwnProperty('fax')){
$scope.hasFax = false;
}else{
$scope.hasFax = true;
}
//check for notes (special)
if($scope.distributors[$scope.distId].special == "" || !$scope.distributors[$scope.distId].hasOwnProperty('special')){
$scope.hasNotes = false;
}else{
$scope.hasNotes = true;
}
//check for web
if($scope.distributors[$scope.distId].web == "" || !$scope.distributors[$scope.distId].hasOwnProperty('web')){
$scope.hasWeb = false;
}else{
$scope.hasWeb = true;
}
//check for email
if($scope.distributors[$scope.distId].email == "" || !$scope.distributors[$scope.distId].hasOwnProperty('email')){
$scope.hasEmail = false;
}else{
$scope.hasEmail = true;
}
};
$scope.scrollToTop = function(){
window.scroll(0,0);
};
});
//sorting default
$scope.sortType = 'company';
$scope.sortReverse = false;
$scope.filterType = "";
}]);
//=====compound compatibility=====
var compound = angular.module('compound', []);
compound.controller('compoundController', ['$scope', '$http', function($scope, $http){
$http.get(url+'/wp-content/themes/TIC/assets/json/compound.json').then(
function(rslt){
$scope.compounds = rslt.data;
});
$scope.sortReverse = false;
$scope.sortType = "drugName";
}]);
//=====chemical compatibility=====
var chemicalIndex = angular.module('chemicalIndex', []);
chemicalIndex.controller('chemicalIndexController', ['$scope', '$http',function($scope, $http){
$http.get(url+'/wp-content/themes/TIC/assets/json/chemical.json').then(function(rslt){
$scope.chemical = rslt.data;
$scope.sortType = "chemical";
$scope.sortReverse = false;
$scope.legend = function(n){
var legendSet = {
"R" : "Recommended",
"GR" : "Generally Recommended",
"LTD" : "Limited Recommendation",
"NR" : "Not Recommended",
"GNR" : "Generally Not Recommended",
"TST" : "Testing Recommended",
"ND" : "No Data Presently Available "
};
return legendSet[n];
};
});
}]);
//=====product search=====
var products = angular.module('products', []);
products.controller('productsController', ['$scope', '$http',function($scope, $http){
$http.get(url+'/wp-content/themes/TIC/assets/json/products.json').then(function(rslt){
$scope.products = rslt.data;
});
$scope.sortType = "line";
$scope.sortReverse = false;
$scope.goToProduct = function(n){
//console.log(url + "/" + n);
window.location.href = url + "/" + n;
};
}]);
//=====Plates search page=====
var plates = angular.module('platesearch', []);
plates.controller('platesearchController', ['$scope','$http', function($scope,$http){
$http.get(url+'/wp-content/themes/TIC/assets/json/plates.json').then(function(rslt){
$scope.plates = rslt.data;
});
$scope.sortType = "partNum";
$scope.sortReverse = false;
$scope.setItem = function(rslt){
$scope.set = rslt;
};
$scope.triggerOverlay = function(e){
$('.overlay').removeClass('hidden');
$('.overlay-content').addClass('animated fadeInUp');
};
}]);
plates.filter('tostring', function(){
return function(item){
var rslt = "";
for(var i = 0; i < item.length; i++){
if(i < item.length-1){
rslt += item[i] + ", ";
}else{
rslt += item[i];
}
}
return rslt;
}
});
plates.filter('yesNo', function(){
return function(item){
if(item === false || item === 1){
return "No";
}else if(item === true || item === 0){
return "Yes";
}
}
});
//=====Product pages=====
var product_page = angular.module('product_page', ['ngSanitize']);
product_page.controller('product_pageController', ['$scope', '$http', function($scope,$http){
$http.get(url+'/wp-content/themes/TIC/assets/json/products.json').then(function(rslt){
$scope.products = rslt.data;
});
$http.get(url+'/wp-content/themes/TIC/assets/json/techlibrary.json').then(function(rslt){
$scope.techdata = rslt.data;
});
//product set with attribute on <product-inquiry product="foobar"></product-inquiry>
$scope.product = "no product set!";
//select
$scope.UYF_options = [
{ "value" : "ecoli", "label" : "E. Coli" },
{ "value" : "microbial", "label" : "Microbial" },
{ "value" : "pink backtirium", "label" : "Pink Bacterium" },
{ "value" : "streptomyces" , "label" : "streptomyces" }
];
$scope.industry = { "value" : "", "label" : "no industry selected" };
//form
if(sessionStorage.getItem('fname')){
$scope.first_name = sessionStorage.getItem('fname');
}
if(sessionStorage.getItem('lname')){
$scope.last_name = sessionStorage.getItem('lname');
}
if(sessionStorage.getItem('email')){
$scope.email = sessionStorage.getItem('email');
}
if(sessionStorage.getItem('zipCode')){
$scope.zip_code = sessionStorage.getItem('zipCode');
}
$scope.setter = function(formID){
var fname = $('#'+formID+' input[name="first-name"]').val(),
lname = $('#'+formID+' input[name="last-name"]').val(),
email = $('#'+formID+' input[name="email"]').val(),
zipCode = $('#'+formID+' input[name="zip-code"]').val();
sessionStorage.setItem('fname', fname);
sessionStorage.setItem('lname', lname);
sessionStorage.setItem('email', email);
sessionStorage.setItem('zipCode', zipCode);
};
$scope.sendId = function(techId){
var newURL = url+"/tech?id="+techId;
window.open(newURL, '_blank');
};
}]);
//=========techlibrary=================
var techlibrary = angular.module('techlibrary', ['ngSanitize']);
techlibrary.controller('techlibraryController',['$scope', '$http', function($scope, $http){
$http.get(url+'/wp-content/themes/TIC/assets/json/techlibrary.json').then(function(rslt){
$scope.techdata = rslt.data;
// var productLineArray = [];
// for(var pl in $scope.techdata){
// productLineArray.push($scope.techdata[pl].subProductLine);
// }
// productLineArray = productLineArray.sort();
// $scope.productLine = productLineArray.filter(function(elem, index, self){
// return index == self.indexOf(elem);
// });
});
$scope.product = "";
$scope.sendId = function(techId){
var newURL = url+"/tech?id="+techId;
window.open(newURL, '_blank');
};
//get query for tech note
function getQueryVariable(variable){
var query = window.location.search.substring(1);
var vars = query.split("&");
for (var i=0;i<vars.length;i++) {
var pair = vars[i].split("=");
if(pair[0] == variable){
return pair[1];
}
}
return(false);
};
$scope.GETproduct = getQueryVariable('product');
if($scope.GETproduct){
$scope.product = $scope.GETproduct;
$('option[value="'+$scope.product+'"]').attr('selected', 'selected');
}
}]);
//===============techResult======================
var techResult = angular.module('techResult', ['ngSanitize']);
techResult.config(function($sceDelegateProvider) {
$sceDelegateProvider.resourceUrlWhitelist(['self']);
});
techResult.filter('trustUrl', function ($sce) {
return function(url) {
return $sce.trustAsResourceUrl(url);
};
});
techResult.controller('techResultController', ['$scope', '$http', '$filter', '$sce', function($scope, $http, $filter, $sce){
$http.get(url+'/wp-content/themes/TIC/assets/json/techlibrary.json').then(function(rslt){
$scope.techdata = rslt.data;
//get query for tech note
function getQueryVariable(variable){
var query = window.location.search.substring(1);
var vars = query.split("&");
for (var i=0;i<vars.length;i++) {
var pair = vars[i].split("=");
if(pair[0] == variable){return pair[1];}
}
return(false);
};
$scope.techQuery = getQueryVariable('id');
$scope.techNote = $filter('filter')($scope.techdata, {id: $scope.techQuery })[0];
if($scope.techNote.type === 'GI'){
$scope.pageTitle = "General Information";
}
if($scope.techNote.type === 'COMP'){
$scope.pageTitle = "Comparisons to Our Products";
}
if($scope.techNote.type === 'FAQ'){
$scope.pageTitle = "FAQ";
}
if($scope.techNote.type === 'VIDEO'){
$scope.pageTitle = $scope.techNote.title;
}
if($scope.techNote.type === 'APPNOTE'){
$scope.pageTitle = "Application Note";
}
if($scope.techNote.type === 'PW'){
$scope.pageTitle = "Published Works";
}
if($scope.techNote.linkType === "pdf"){
$scope.PDF = true;
}
if($scope.techNote.linkType === "page"){
window.location = url+"/"+$scope.techNote.link;
}
if($scope.techNote.linkType === "link"){
window.location = url+"/"+$scope.techNote.link;
}
if($scope.techNote.linkType === "mp4"){
$scope.VIDEO = true;
$scope.videoUrl = url + "/wp-content/uploads/video/videos/" + $scope.techNote.link + '.mp4';
}
});
}]);
//=====test page=====
var test = angular.module('test', ['ngSanitize']);
test.controller('testController', ['$scope', '$http', function($scope, $http){
$http.get(url+'/wp-content/themes/TIC/assets/json/products.json').then(function(rslt){
$scope.products = rslt.data;
});
$http.get(url+'/wp-content/themes/TIC/assets/json/techlibrary.json').then(function(rslt){
$scope.techdata = rslt.data;
});
$scope.welcome = "Hello, sorry but there is no test being conducted on this page at the moment. Possibly the test you were looking for has been moved to production. whatever page you are thinking was going to display is now where it should be on the site.";
}]);
//test also page
var testAlso = angular.module('testAlso',['ngSanitize']);
testAlso.controller('testAlsoController',['$scope','$http', function($scope,$http){
$http.get(url+'/wp-content/themes/TIC/assets/json/techlibrary.json').then(function(rslt){
$scope.techdata = rslt.data;
});
$scope.sortType = "index";
$scope.sortReverse = false;
$scope.key = "";
$scope.value = "";
}]);
| assets/javascript/app.js | //grab the url
var protocol = window.location.protocol;
var hostname = window.location.hostname;
var url = protocol + "//" + hostname;
//=====distributors page=====
var distributors = angular.module('distributors', ['ngSanitize']);
distributors.controller('distController', ['$scope', '$http', '$sce', function($scope, $http, $sce){
//grab JSON data
$http.get(url+'/wp-content/themes/TIC/assets/json/distributors.json').then(function(res){
$scope.distributors = res.data;
//set defaults for single distributor view
$scope.distId = "73";
$scope.hasTel2 = true;
$scope.hasFax = true;
$scope.hasWeb = true;
$scope.hasEmail = true;
$scope.hasNotes = false;
//on click of info buttom
$scope.singleDist = function(obj){
//set id
$scope.distId = obj.target.attributes.value.value;
//check for tel2
if($scope.distributors[$scope.distId].tel2 == "" || !$scope.distributors[$scope.distId].hasOwnProperty('tel2')){
$scope.hasTel2 = false;
}else{
$scope.hasTel2 = true;
}
//check for fax
if($scope.distributors[$scope.distId].fax == "" || !$scope.distributors[$scope.distId].hasOwnProperty('fax')){
$scope.hasFax = false;
}else{
$scope.hasFax = true;
}
//check for notes (special)
if($scope.distributors[$scope.distId].special == "" || !$scope.distributors[$scope.distId].hasOwnProperty('special')){
$scope.hasNotes = false;
}else{
$scope.hasNotes = true;
}
//check for web
if($scope.distributors[$scope.distId].web == "" || !$scope.distributors[$scope.distId].hasOwnProperty('web')){
$scope.hasWeb = false;
}else{
$scope.hasWeb = true;
}
//check for email
if($scope.distributors[$scope.distId].email == "" || !$scope.distributors[$scope.distId].hasOwnProperty('email')){
$scope.hasEmail = false;
}else{
$scope.hasEmail = true;
}
};
$scope.scrollToTop = function(){
window.scroll(0,0);
};
});
//sorting default
$scope.sortType = 'company';
$scope.sortReverse = false;
$scope.filterType = "";
}]);
//=====compound compatibility=====
var compound = angular.module('compound', []);
compound.controller('compoundController', ['$scope', '$http', function($scope, $http){
$http.get(url+'/wp-content/themes/TIC/assets/json/compound.json').then(
function(rslt){
$scope.compounds = rslt.data;
});
$scope.sortReverse = false;
$scope.sortType = "drugName";
}]);
//=====chemical compatibility=====
var chemicalIndex = angular.module('chemicalIndex', []);
chemicalIndex.controller('chemicalIndexController', ['$scope', '$http',function($scope, $http){
$http.get(url+'/wp-content/themes/TIC/assets/json/chemical.json').then(function(rslt){
$scope.chemical = rslt.data;
$scope.sortType = "chemical";
$scope.sortReverse = false;
$scope.legend = function(n){
var legendSet = {
"R" : "Recommended",
"GR" : "Generally Recommended",
"LTD" : "Limited Recommendation",
"NR" : "Not Recommended",
"GNR" : "Generally Not Recommended",
"TST" : "Testing Recommended",
"ND" : "No Data Presently Available "
};
return legendSet[n];
};
});
}]);
//=====product search=====
var products = angular.module('products', []);
products.controller('productsController', ['$scope', '$http',function($scope, $http){
$http.get(url+'/wp-content/themes/TIC/assets/json/products.json').then(function(rslt){
$scope.products = rslt.data;
});
$scope.sortType = "line";
$scope.sortReverse = false;
$scope.goToProduct = function(n){
//console.log(url + "/" + n);
window.location.href = url + "/" + n;
};
}]);
//=====Plates search page=====
var plates = angular.module('platesearch', []);
plates.controller('platesearchController', ['$scope','$http', function($scope,$http){
$http.get(url+'/wp-content/themes/TIC/assets/json/plates.json').then(function(rslt){
$scope.plates = rslt.data;
});
$scope.sortType = "partNum";
$scope.sortReverse = false;
$scope.setItem = function(rslt){
$scope.set = rslt;
};
$scope.triggerOverlay = function(e){
$('.overlay').removeClass('hidden');
$('.overlay-content').addClass('animated fadeInUp');
};
}]);
plates.filter('tostring', function(){
return function(item){
var rslt = "";
for(var i = 0; i < item.length; i++){
if(i < item.length-1){
rslt += item[i] + ", ";
}else{
rslt += item[i];
}
}
return rslt;
}
});
plates.filter('yesNo', function(){
return function(item){
if(item === false || item === 1){
return "No";
}else if(item === true || item === 0){
return "Yes";
}
}
});
//=====Product pages=====
var product_page = angular.module('product_page', ['ngSanitize']);
product_page.controller('product_pageController', ['$scope', '$http', function($scope,$http){
$http.get(url+'/wp-content/themes/TIC/assets/json/products.json').then(function(rslt){
$scope.products = rslt.data;
});
$http.get(url+'/wp-content/themes/TIC/assets/json/techlibrary.json').then(function(rslt){
$scope.techdata = rslt.data;
});
//product set with attribute on <product-inquiry product="foobar"></product-inquiry>
$scope.product = "no product set!";
//select
$scope.UYF_options = [
{ "value" : "ecoli", "label" : "E. Coli" },
{ "value" : "microbial", "label" : "Microbial" },
{ "value" : "pink backtirium", "label" : "Pink Bacterium" },
{ "value" : "streptomyces" , "label" : "streptomyces" }
];
$scope.industry = { "value" : "", "label" : "no industry selected" };
//form
if(sessionStorage.getItem('fname')){
$scope.first_name = sessionStorage.getItem('fname');
}
if(sessionStorage.getItem('lname')){
$scope.last_name = sessionStorage.getItem('lname');
}
if(sessionStorage.getItem('email')){
$scope.email = sessionStorage.getItem('email');
}
if(sessionStorage.getItem('zipCode')){
$scope.zip_code = sessionStorage.getItem('zipCode');
}
$scope.setter = function(formID){
var fname = $('#'+formID+' input[name="first-name"]').val(),
lname = $('#'+formID+' input[name="last-name"]').val(),
email = $('#'+formID+' input[name="email"]').val(),
zipCode = $('#'+formID+' input[name="zip-code"]').val();
sessionStorage.setItem('fname', fname);
sessionStorage.setItem('lname', lname);
sessionStorage.setItem('email', email);
sessionStorage.setItem('zipCode', zipCode);
};
$scope.sendId = function(techId){
var newURL = url+"/tech?id="+techId;
window.open(newURL, '_blank');
};
}]);
//=========techlibrary=================
var techlibrary = angular.module('techlibrary', ['ngSanitize']);
techlibrary.controller('techlibraryController',['$scope', '$http', function($scope, $http){
$http.get(url+'/wp-content/themes/TIC/assets/json/techlibrary.json').then(function(rslt){
$scope.techdata = rslt.data;
var productLineArray = [];
for(var pl in $scope.techdata){
productLineArray.push($scope.techdata[pl].subProductLine);
}
productLineArray = productLineArray.sort();
$scope.productLine = productLineArray.filter(function(elem, index, self){
return index == self.indexOf(elem);
});
});
$scope.product = "";
$scope.sendId = function(techId){
var newURL = url+"/tech?id="+techId;
window.open(newURL, '_blank');
};
//get query for tech note
function getQueryVariable(variable){
var query = window.location.search.substring(1);
var vars = query.split("&");
for (var i=0;i<vars.length;i++) {
var pair = vars[i].split("=");
if(pair[0] == variable){
return pair[1];
}
}
return(false);
};
$scope.GETproduct = getQueryVariable('product');
if($scope.GETproduct){
$scope.product = $scope.GETproduct;
}
}]);
//===============techResult======================
var techResult = angular.module('techResult', ['ngSanitize']);
techResult.config(function($sceDelegateProvider) {
$sceDelegateProvider.resourceUrlWhitelist(['self']);
});
techResult.filter('trustUrl', function ($sce) {
return function(url) {
return $sce.trustAsResourceUrl(url);
};
});
techResult.controller('techResultController', ['$scope', '$http', '$filter', '$sce', function($scope, $http, $filter, $sce){
$http.get(url+'/wp-content/themes/TIC/assets/json/techlibrary.json').then(function(rslt){
$scope.techdata = rslt.data;
//get query for tech note
function getQueryVariable(variable){
var query = window.location.search.substring(1);
var vars = query.split("&");
for (var i=0;i<vars.length;i++) {
var pair = vars[i].split("=");
if(pair[0] == variable){return pair[1];}
}
return(false);
};
$scope.techQuery = getQueryVariable('id');
$scope.techNote = $filter('filter')($scope.techdata, {id: $scope.techQuery })[0];
if($scope.techNote.type === 'GI'){
$scope.pageTitle = "General Information";
}
if($scope.techNote.type === 'COMP'){
$scope.pageTitle = "Comparisons to Our Products";
}
if($scope.techNote.type === 'FAQ'){
$scope.pageTitle = "FAQ";
}
if($scope.techNote.type === 'VIDEO'){
$scope.pageTitle = $scope.techNote.title;
}
if($scope.techNote.type === 'APPNOTE'){
$scope.pageTitle = "Application Note";
}
if($scope.techNote.type === 'PW'){
$scope.pageTitle = "Published Works";
}
if($scope.techNote.linkType === "pdf"){
$scope.PDF = true;
}
if($scope.techNote.linkType === "page"){
window.location = url+"/"+$scope.techNote.link;
}
if($scope.techNote.linkType === "link"){
window.location = url+"/"+$scope.techNote.link;
}
if($scope.techNote.linkType === "mp4"){
$scope.VIDEO = true;
$scope.videoUrl = url + "/wp-content/uploads/video/videos/" + $scope.techNote.link + '.mp4';
}
});
}]);
//=====test page=====
var test = angular.module('test', ['ngSanitize']);
test.controller('testController', ['$scope', '$http', function($scope, $http){
$http.get(url+'/wp-content/themes/TIC/assets/json/products.json').then(function(rslt){
$scope.products = rslt.data;
});
$http.get(url+'/wp-content/themes/TIC/assets/json/techlibrary.json').then(function(rslt){
$scope.techdata = rslt.data;
});
$scope.welcome = "Hello, sorry but there is no test being conducted on this page at the moment. Possibly the test you were looking for has been moved to production. whatever page you are thinking was going to display is now where it should be on the site.";
}]);
//test also page
var testAlso = angular.module('testAlso',['ngSanitize']);
testAlso.controller('testAlsoController',['$scope','$http', function($scope,$http){
$http.get(url+'/wp-content/themes/TIC/assets/json/techlibrary.json').then(function(rslt){
$scope.techdata = rslt.data;
});
$scope.sortType = "index";
$scope.sortReverse = false;
$scope.key = "";
$scope.value = "";
}]);
| when user is directed to tech page from product the proper option is selected to no confusion on a blank select box
| assets/javascript/app.js | when user is directed to tech page from product the proper option is selected to no confusion on a blank select box | <ide><path>ssets/javascript/app.js
<ide> $http.get(url+'/wp-content/themes/TIC/assets/json/techlibrary.json').then(function(rslt){
<ide> $scope.techdata = rslt.data;
<ide>
<del>var productLineArray = [];
<del>
<del>for(var pl in $scope.techdata){
<del>productLineArray.push($scope.techdata[pl].subProductLine);
<del> }
<add>// var productLineArray = [];
<add>
<add>// for(var pl in $scope.techdata){
<add>// productLineArray.push($scope.techdata[pl].subProductLine);
<add>// }
<ide>
<del>productLineArray = productLineArray.sort();
<del>$scope.productLine = productLineArray.filter(function(elem, index, self){
<del> return index == self.indexOf(elem);
<del>});
<add>// productLineArray = productLineArray.sort();
<add>// $scope.productLine = productLineArray.filter(function(elem, index, self){
<add>// return index == self.indexOf(elem);
<add>// });
<ide>
<ide> });
<ide>
<ide> $scope.GETproduct = getQueryVariable('product');
<ide> if($scope.GETproduct){
<ide> $scope.product = $scope.GETproduct;
<add> $('option[value="'+$scope.product+'"]').attr('selected', 'selected');
<ide> }
<ide>
<ide> }]); |
|
Java | bsd-2-clause | b7fa9c6eade5a8be7869a6831555c265020abf52 | 0 | chototsu/MikuMikuStudio,chototsu/MikuMikuStudio,chototsu/MikuMikuStudio,chototsu/MikuMikuStudio | /*
* Copyright (c) 2003-2005 jMonkeyEngine
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* * Neither the name of 'jMonkeyEngine' nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.jme.math;
import java.util.Random;
/**
* <code>FastMath</code> provides 'fast' math approximations and float equivalents of Math
* functions. These are all used as static values and functions.
*
* @author Various
* @version $Id: FastMath.java,v 1.19 2005-10-03 05:37:04 renanse Exp $
*/
final public class FastMath {
private FastMath(){}
/** A "close to zero" double epsilon value for use*/
public static final double DBL_EPSILON = 2.220446049250313E-16d;
/** A "close to zero" float epsilon value for use*/
public static final float FLT_EPSILON = 1.1920928955078125E-7f;
/** The value PI as a float. */
public static final float PI = (float) (4.0 * atan(1.0f));
/** The value 2PI as a float. */
public static final float TWO_PI = 2.0f * PI;
/** The value PI/2 as a float. */
public static final float HALF_PI = 0.5f * PI;
/** The value 1/PI as a float. */
public static final float INV_PI = 1.0f / PI;
/** The value 1/(2PI) as a float. */
public static final float INV_TWO_PI = 1.0f / TWO_PI;
/** A value to multiply a degree value by, to convert it to radians. */
public static final float DEG_TO_RAD = PI / 180.0f;
/** A value to multiply a radian value by, to convert it to degrees. */
public static final float RAD_TO_DEG = 180.0f / PI;
/** A precreated random object for random numbers. */
public static final Random rand = new Random(System.currentTimeMillis());
/** If true, fast trig approximations are used for values such as sin/cos/tan. */
public static boolean USE_FAST_TRIG = false;
// A good implementation found on the Java boards.
// note: a number is a power of two if and only if it is the smallest number
// with that number of significant bits. Therefore, if you subtract 1,
// you know that the new number will have fewer bits, so ANDing the original
// number
// with anything less than it will give 0.
/**
* Returns true if the number is a power of 2 (2,4,8,16...)
* @param number The number to test.
* @return True if it is a power of two.
*/
public static boolean isPowerOfTwo(int number) {
return (number > 0) && (number & (number - 1)) == 0;
}
/**
* Linear interpolation from v0 to v1 by f percent. IE (1-f) * V0 + f * V1
* @param f Percent value to use.
* @param v0 Begining value. 0% of f
* @param v1 ending value. 100% of f
* @return An interpolation between v0 and v1.
*/
public static float LERP(float f, float v0, float v1) {
return ( (1 - (f)) * (v0) + (f) * (v1));
}
/**
* Returns the arc cosine of an angle given in radians.<br>
* Special cases:
* <ul><li>If fValue is smaller than -1, then the result is PI.
* <li>If the argument is greater than 1, then the result is 0.</ul>
* @param fValue The angle, in radians.
* @return fValue's acos
* @see java.lang.Math#acos(double)
*/
public static float acos(float fValue) {
if (-1.0f < fValue) {
if (fValue < 1.0f)
return (float) Math.acos((double) fValue);
else
return 0.0f;
} else {
return PI;
}
}
/**
* Returns the arc sine of an angle given in radians.<br>
* Special cases:
* <ul><li>If fValue is smaller than -1, then the result is -HALF_PI.
* <li>If the argument is greater than 1, then the result is HALF_PI.</ul>
* @param fValue The angle, in radians.
* @return fValue's asin
* @see java.lang.Math#asin(double)
*/
public static float asin(float fValue) {
if (-1.0f < fValue) {
if (fValue < 1.0f)
return (float) Math.asin((double) fValue);
else
return HALF_PI;
} else {
return -HALF_PI;
}
}
/**
* Returns the arc tangent of an angle given in radians.<br>
* @param fValue The angle, in radians.
* @return fValue's asin
* @see java.lang.Math#atan(double)
*/
public static float atan(float fValue) {
return (float) Math.atan((double) fValue);
}
/**
* A direct call to Math.atan2.
* @param fY
* @param fX
* @return Math.atan2(fY,fX)
* @see java.lang.Math#atan2(double, double)
*/
public static float atan2(float fY, float fX) {
return (float) Math.atan2((double) fY, (double) fX);
}
/**
* Rounds a fValue up. A call to Math.ceil
* @param fValue The value.
* @return The fValue rounded up
* @see java.lang.Math#ceil(double)
*/
public static float ceil(float fValue) {
return (float) Math.ceil((double) fValue);
}
/**
* Returns cos of a value. If USE_FAST_TRIG is enabled, an approximate value is returned.
* Otherwise, a direct value is used.
* @param fValue The value to cosine, in raidans.
* @return The cosine of fValue.
* @see java.lang.Math#cos(double)
*/
public static float cos(float fValue) {
if (USE_FAST_TRIG)
return FastTrig.cos(fValue);
else
return (float) Math.cos((double) fValue);
}
/**
* Returns E^fValue
* @param fValue Value to raise to a power.
* @return The value E^fValue
* @see java.lang.Math#exp(double)
*/
public static float exp(float fValue) {
return (float) Math.exp((double) fValue);
}
/**
* Returns Absolute value of a float.
* @param fValue The value to abs.
* @return The abs of the value.
* @see java.lang.Math#abs(float)
*/
public static float abs(float fValue) {
if (fValue < 0) return -fValue;
else return fValue;
}
/**
* Returns a number rounded down.
* @param fValue The value to round
* @return The given number rounded down
* @see java.lang.Math#floor(double)
*/
public static float floor(float fValue) {
return (float) Math.floor((double) fValue);
}
/**
* Returns 1/sqrt(fValue)
* @param fValue The value to process.
* @return 1/sqrt(fValue)
* @see java.lang.Math#sqrt(double)
*/
public static float invSqrt(float fValue) {
return (float) (1.0 / Math.sqrt((double) fValue));
}
/**
* Returns the log base E of a value.
* @param fValue The value to log.
* @return The log of fValue base E
* @see java.lang.Math#log(double)
*/
public static float log(float fValue) {
return (float) Math.log((double) fValue);
}
/**
* Returns a number raised to an exponent power. fBase^fExponent
* @param fBase The base value (IE 2)
* @param fExponent The exponent value (IE 3)
* @return base raised to exponent (IE 8)
* @see java.lang.Math#pow(double, double)
*/
public static float pow(float fBase, float fExponent) {
return (float) Math.pow((double) fBase, (double) fExponent);
}
/**
* Returns sine of a value. If USE_FAST_TRIG is enabled, an approximate value is returned.
* Otherwise, a direct value is used.
* @param fValue The value to sine, in raidans.
* @return The sine of fValue.
* @see java.lang.Math#sin(double)
*/
public static float sin(float fValue) {
if (USE_FAST_TRIG)
return FastTrig.sin(fValue);
else
return (float) Math.sin((double) fValue);
}
/**
* Returns the value squared. fValue ^ 2
* @param fValue The vaule to square.
* @return The square of the given value.
*/
public static float sqr(float fValue) {
return fValue * fValue;
}
/**
* Returns the square root of a given value.
* @param fValue The value to sqrt.
* @return The square root of the given value.
* @see java.lang.Math#sqrt(double)
*/
public static float sqrt(float fValue) {
return (float) Math.sqrt((double) fValue);
}
/**
* Returns the tangent of a value. If USE_FAST_TRIG is enabled, an approximate value
* is returned. Otherwise, a direct value is used.
* @param fValue The value to tangent, in raidans.
* @return The tangent of fValue.
* @see java.lang.Math#tan(double)
*/
public static float tan(float fValue) {
if (USE_FAST_TRIG)
return FastTrig.tan(fValue);
else
return (float) Math.tan((double) fValue);
}
/**
* Returns 1 if the number is positive, -1 if the number is negative, and 0 otherwise
* @param iValue The integer to examine.
* @return The integer's sign.
*/
public static int sign(int iValue) {
if (iValue > 0) return 1;
if (iValue < 0) return -1;
return 0;
}
/**
* Returns 1 if the number is positive, -1 if the number is negative, and 0 otherwise
* @param fValue The float to examine.
* @return The float's sign.
*/
public static float sign(float fValue) {
if (fValue > 0.0f) return 1.0f;
if (fValue < 0.0f) return -1.0f;
return 0.0f;
}
/** */
public static float logGamma(float fX) {
float afCoeff[] = { +76.18009173f, -86.50532033f, +24.01409822f,
-1.231739516f, +(float) 0.120858003e-02,
-(float) 0.536382000e-05};
fX -= 1.0f;
float fTmp = fX + 5.5f;
fTmp -= (fX + 0.5f) * log(fTmp);
float fSeries = 1.0f;
for (int j = 0; j <= 5; j++) {
fX += 1.0f;
fSeries += afCoeff[j] / fX;
}
return -fTmp + log((2.50662827465f) * fSeries);
}
/** */
public static float gamma(float fX) {
return exp(logGamma(fX));
}
/** */
public static float incompleteGammaS(float fA, float fX) {
int iMaxIterations = 100;
float fTolerance = (float) 3e-07;
if (fX > 0.0f) {
float fAp = fA;
float fSum = (1.0f) / fA, fDel = fSum;
for (int i = 1; i <= iMaxIterations; i++) {
fAp += 1.0f;
fDel *= fX / fAp;
fSum += fDel;
if (abs(fDel) < abs(fSum) * fTolerance) {
float fArg = -fX + fA * log(fX) - logGamma(fA);
return fSum * exp(fArg);
}
}
}
if (fX == 0.0f) return 0.0f;
return Float.MAX_VALUE; // LogGamma not defined for x < 0
}
/** */
public static float incompleteGammaCF(float fA, float fX) {
int iMaxIterations = 100;
float fTolerance = (float) 3e-07;
float fA0 = 1.0f, fA1 = fX;
float fB0 = 0, fB1 = 1.0f;
float fGold = 0.0f, fFac = 1.0f;
for (int i = 1; i <= iMaxIterations; i++) {
float fI = (float) i;
float fImA = fI - fA;
fA0 = (fA1 + fA0 * fImA) * fFac;
fB0 = (fB1 + fB0 * fImA) * fFac;
float fItF = fI * fFac;
fA1 = fX * fA0 + fItF * fA1;
fB1 = fX * fB0 + fItF * fB1;
if (fA1 != 0.0f) {
fFac = (1.0f) / fA1;
float fG = fB1 * fFac;
if (abs((fG - fGold) / fG) < fTolerance) {
float fArg = -fX + fA * log(fX) - logGamma(fA);
return fG * exp(fArg);
}
fGold = fG;
}
}
return Float.MAX_VALUE; // numerical error if you get here
}
/** */
public static float incompleteGamma(float fA, float fX) {
if (fX < 1.0f + fA)
return incompleteGammaS(fA, fX);
else
return 1.0f - incompleteGammaCF(fA, fX);
}
/** */
public static float erf(float fX) {
return 1.0f - erfc(fX);
}
/** */
public static float erfc(float fX) {
float afCoeff[] = { -1.26551223f, +1.00002368f, +0.37409196f,
+0.09678418f, -0.18628806f, +0.27886807f, -1.13520398f,
+1.48851587f, -0.82215223f, +0.17087277f};
float fZ = abs(fX);
float fT = (1.0f) / (1.0f + (0.5f) * fZ);
float fSum = afCoeff[9];
for (int i = 9; i >= 0; i--)
fSum = fT * fSum + afCoeff[i];
float fResult = fT * exp(-fZ * fZ + fSum);
return (fX >= 0.0f ? fResult : 2.0f - fResult);
}
/** */
public static float modBessel0(float fX) {
if (fX < 0.0f) // function is even
fX = -fX;
float fT, fResult;
int i;
if (fX <= 3.75f) {
float afCoeff[] = { 1.0000000f, 3.5156229f, 3.0899424f, 1.2067492f,
0.2659732f, 0.0360768f, 0.0045813f};
fT = fX / 3.75f;
float fT2 = fT * fT;
fResult = afCoeff[6];
for (i = 5; i >= 0; i--) {
fResult *= fT2;
fResult += afCoeff[i];
}
// |error| < 1.6e-07
} else {
float afCoeff[] = { +0.39894228f, +0.01328592f, +0.00225319f,
-0.00157565f, +0.00916281f, -0.02057706f, +0.02635537f,
-0.01647633f, +0.00392377f};
fT = fX / 3.75f;
float fInvT = (1.0f) / fT;
fResult = afCoeff[8];
for (i = 7; i >= 0; i--) {
fResult *= fInvT;
fResult += afCoeff[i];
}
fResult *= exp(fX);
fResult /= sqrt(fX);
// |error| < 1.9e-07
}
return fResult;
}
/** */
public static float modBessel1(float fX) {
int iSign;
if (fX > 0.0f) {
iSign = 1;
} else if (fX < 0.0f) {
fX = -fX;
iSign = -1;
} else {
return 0.0f;
}
float fT, fResult;
int i;
if (fX <= 3.75f) {
float afCoeff[] = { 0.50000000f, 0.87890549f, 0.51498869f,
0.15084934f, 0.02658733f, 0.00301532f, 0.00032411f};
fT = fX / 3.75f;
float fT2 = fT * fT;
fResult = afCoeff[6];
for (i = 5; i >= 0; i--) {
fResult *= fT2;
fResult += afCoeff[i];
}
fResult *= fX;
// |error| < 8e-09
} else {
float afCoeff[] = { +0.39894228f, -0.03988024f, -0.00362018f,
+0.00163801f, -0.01031555f, +0.02282967f, -0.02895312f,
+0.01787654f, -0.00420059f};
fT = fX / 3.75f;
float fInvT = (1.0f) / fT;
fResult = afCoeff[8];
for (i = 7; i >= 0; i--) {
fResult *= fInvT;
fResult += afCoeff[i];
}
fResult *= exp(fX);
fResult /= sqrt(fX);
// |error| < 2.2e-07
}
fResult *= iSign;
return fResult;
}
/**
* Given 3 points in a 2d plane, this function computes if the points going from A-B-C
* are moving counter clock wise.
* @param p0 Point 0.
* @param p1 Point 1.
* @param p2 Point 2.
* @return 1 If they are CCW, -1 if they are not CCW, 0 if p2 is between p0 and p1.
*/
public static int ccw(Vector2f p0,Vector2f p1,Vector2f p2){
float dx1,dx2,dy1,dy2;
dx1=p1.x-p0.x;
dy1=p1.y-p0.y;
dx2=p2.x-p0.x;
dy2=p2.y-p0.y;
if (dx1*dy2>dy1*dx2) return 1;
if (dx1*dy2<dy1*dx2) return -1;
if ((dx1*dx2 < 0) || (dy1*dy2 <0)) return -1;
if ((dx1*dx1+dy1*dy1) < (dx2*dx2+dy2*dy2)) return 1;
return 0;
}
/**
* Test if a point is inside a triangle. 1 if the point is on the ccw side,
* -1 if the point is on the cw side, and 0 if it is on neither.
* @param t0 First point of the triangle.
* @param t1 Second point of the triangle.
* @param t2 Third point of the triangle.
* @param p The point to test.
* @return Value 1 or -1 if inside triangle, 0 otherwise.
*/
public static int pointInsideTriangle(Vector2f t0,Vector2f t1,Vector2f t2,Vector2f p){
int val1=ccw(t0,t1,p);
if (val1==0) return 1;
int val2=ccw(t1,t2,p);
if (val2==0) return 1;
if (val2!=val1) return 0;
int val3=ccw(t2,t0,p);
if (val3==0) return 1;
if (val3!=val1) return 0;
return val3;
}
/**
* Returns the determinant of a 4x4 matrix.
*/
public static float determinant(double m00,double m01,double m02,double m03, double m10,double m11,double m12,double m13,
double m20,double m21,double m22,double m23,double m30,double m31,double m32,double m33) {
double value;
value =
m03 * m12 * m21 * m30-m02 * m13 * m21 * m30-m03 * m11 * m22 * m30+m01 * m13 * m22 * m30+
m02 * m11 * m23 * m30-m01 * m12 * m23 * m30-m03 * m12 * m20 * m31+m02 * m13 * m20 * m31+
m03 * m10 * m22 * m31-m00 * m13 * m22 * m31-m02 * m10 * m23 * m31+m00 * m12 * m23 * m31+
m03 * m11 * m20 * m32-m01 * m13 * m20 * m32-m03 * m10 * m21 * m32+m00 * m13 * m21 * m32+
m01 * m10 * m23 * m32-m00 * m11 * m23 * m32-m02 * m11 * m20 * m33+m01 * m12 * m20 * m33+
m02 * m10 * m21 * m33-m00 * m12 * m21 * m33-m01 * m10 * m22 * m33+m00 * m11 * m22 * m33;
return (float) value;
}
/**
* Returns a random float between 0 and 1.
* @return A random float between 0 and 1.
*/
public static float nextRandomFloat() {
return rand.nextFloat();
}
/**
* Converts a point from spherical coordinates to cartesian and stores the
* results in the store var.
*/
public static Vector3f sphericalToCartesian(Vector3f sphereCoords,
Vector3f store) {
store.y = sphereCoords.x * FastMath.sin(sphereCoords.z);
float a = sphereCoords.x * FastMath.cos(sphereCoords.z);
store.x = a * FastMath.cos(sphereCoords.y);
store.z = a * FastMath.sin(sphereCoords.y);
return store;
}
/**
* Converts a point from cartesian coordinates to spherical and stores the
* results in the store var.
*/
public static Vector3f cartesianToSpherical(Vector3f cartCoords,
Vector3f store) {
if (cartCoords.x == 0)
cartCoords.x = FastMath.FLT_EPSILON;
store.x = FastMath
.sqrt((cartCoords.x * cartCoords.x)
+ (cartCoords.y * cartCoords.y)
+ (cartCoords.z * cartCoords.z));
store.y = FastMath.atan(cartCoords.z / cartCoords.x);
if (cartCoords.x < 0)
store.y += FastMath.PI;
store.z = FastMath.asin(cartCoords.y / store.x);
return store;
}
/**
* Takes an angle (in radians) and expresses it in terms of -2pi to 2pi.
*
* @param r -
* the angle to normalize (in radians)
* @return the normalized angle (also in radians)
*/
public static float normalizeAngle(float r) {
if (Float.isInfinite(r) || Float.isNaN(r))
return 0f;
while (r > FastMath.TWO_PI)
r -= FastMath.TWO_PI;
while (r < -FastMath.TWO_PI)
r += FastMath.TWO_PI;
return r;
}
/**
* Takes an angle (in radians) and expresses it in terms of -pi to pi.
*
* @param r -
* the angle to normalize (in radians)
* @return the normalized angle (also in radians)
*/
public static float normalizeHalfAngle(float r) {
if (Float.isInfinite(r) || Float.isNaN(r))
return 0f;
while (r > FastMath.PI)
r -= FastMath.PI;
while (r < -FastMath.PI)
r += FastMath.PI;
return r;
}
/**
* FastTrig is used to calculate quick trig functions using a lookup table.
*
* @author Erikd
* @author Jack Lindamood (javadoc only)
*/
static public class FastTrig {
/** The size of the lookup table. The bigger, the more accurate. */
public static int PRECISION = 0x100000;
private static float RAD_SLICE = TWO_PI / PRECISION, sinTable[] = null,
tanTable[] = null;
static {
RAD_SLICE = TWO_PI / PRECISION;
sinTable = new float[PRECISION];
tanTable = new float[PRECISION];
float rad = 0;
for (int i = 0; i < PRECISION; i++) {
rad = (float) i * RAD_SLICE;
sinTable[i] = (float) java.lang.Math.sin(rad);
tanTable[i] = (float) java.lang.Math.tan(rad);
}
}
private static final int radToIndex(float radians) {
return (int) ((radians / TWO_PI) * (float) PRECISION)
& (PRECISION - 1);
}
/**
* Returns the sine of a given value, by looking up it's approximation in a
* precomputed table.
* @param radians The value to sine.
* @return The approximation of the value's sine.
*/
public static float sin(float radians) {
return sinTable[radToIndex(radians)];
}
/**
* Returns the cosine of a given value, by looking up it's approximation in a
* precomputed table.
* @param radians The value to cosine.
* @return The approximation of the value's cosine.
*/
public static float cos(float radians) {
return sinTable[radToIndex(HALF_PI-radians)];
}
/**
* Returns the tangent of a given value, by looking up it's approximation in a
* precomputed table.
* @param radians The value to tan.
* @return The approximation of the value's tangent.
*/
public static float tan(float radians) {
return tanTable[radToIndex(radians)];
}
}
}
| src/com/jme/math/FastMath.java | /*
* Copyright (c) 2003-2005 jMonkeyEngine
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* * Neither the name of 'jMonkeyEngine' nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.jme.math;
import java.util.Random;
/**
* <code>FastMath</code> provides 'fast' math approximations and float equivalents of Math
* functions. These are all used as static values and functions.
*
* @author Various
* @version $Id: FastMath.java,v 1.18 2005-09-20 19:33:37 renanse Exp $
*/
final public class FastMath {
private FastMath(){}
/** A "close to zero" double epsilon value for use*/
public static final double DBL_EPSILON = 2.220446049250313E-16d;
/** A "close to zero" float epsilon value for use*/
public static final float FLT_EPSILON = 1.1920928955078125E-7f;
/** The value PI as a float. */
public static final float PI = (float) (4.0 * atan(1.0f));
/** The value 2PI as a float. */
public static final float TWO_PI = 2.0f * PI;
/** The value PI/2 as a float. */
public static final float HALF_PI = 0.5f * PI;
/** The value 1/PI as a float. */
public static final float INV_PI = 1.0f / PI;
/** The value 1/(2PI) as a float. */
public static final float INV_TWO_PI = 1.0f / TWO_PI;
/** A value to multiply a degree value by, to convert it to radians. */
public static final float DEG_TO_RAD = PI / 180.0f;
/** A value to multiply a radian value by, to convert it to degrees. */
public static final float RAD_TO_DEG = 180.0f / PI;
/** A precreated random object for random numbers. */
public static final Random rand = new Random(System.currentTimeMillis());
/** If true, fast trig approximations are used for values such as sin/cos/tan. */
public static boolean USE_FAST_TRIG = false;
// A good implementation found on the Java boards.
// note: a number is a power of two if and only if it is the smallest number
// with that number of significant bits. Therefore, if you subtract 1,
// you know that the new number will have fewer bits, so ANDing the original
// number
// with anything less than it will give 0.
/**
* Returns true if the number is a power of 2 (2,4,8,16...)
* @param number The number to test.
* @return True if it is a power of two.
*/
public static boolean isPowerOfTwo(int number) {
return (number > 0) && (number & (number - 1)) == 0;
}
/**
* Linear interpolation from v0 to v1 by f percent. IE (1-f) * V0 + f * V1
* @param f Percent value to use.
* @param v0 Begining value. 0% of f
* @param v1 ending value. 100% of f
* @return An interpolation between v0 and v1.
*/
public static float LERP(float f, float v0, float v1) {
return ( (1 - (f)) * (v0) + (f) * (v1));
}
/**
* Returns the arc cosine of an angle given in radians.<br>
* Special cases:
* <ul><li>If fValue is smaller than -1, then the result is PI.
* <li>If the argument is greater than 1, then the result is 0.</ul>
* @param fValue The angle, in radians.
* @return fValue's acos
* @see java.lang.Math#acos(double)
*/
public static float acos(float fValue) {
if (-1.0f < fValue) {
if (fValue < 1.0f)
return (float) Math.acos((double) fValue);
else
return 0.0f;
} else {
return PI;
}
}
/**
* Returns the arc sine of an angle given in radians.<br>
* Special cases:
* <ul><li>If fValue is smaller than -1, then the result is -HALF_PI.
* <li>If the argument is greater than 1, then the result is HALF_PI.</ul>
* @param fValue The angle, in radians.
* @return fValue's asin
* @see java.lang.Math#asin(double)
*/
public static float asin(float fValue) {
if (-1.0f < fValue) {
if (fValue < 1.0f)
return (float) Math.asin((double) fValue);
else
return HALF_PI;
} else {
return -HALF_PI;
}
}
/**
* Returns the arc tangent of an angle given in radians.<br>
* @param fValue The angle, in radians.
* @return fValue's asin
* @see java.lang.Math#atan(double)
*/
public static float atan(float fValue) {
return (float) Math.atan((double) fValue);
}
/**
* A direct call to Math.atan2.
* @param fY
* @param fX
* @return Math.atan2(fY,fX)
* @see java.lang.Math#atan2(double, double)
*/
public static float atan2(float fY, float fX) {
return (float) Math.atan2((double) fY, (double) fX);
}
/**
* Rounds a fValue up. A call to Math.ceil
* @param fValue The value.
* @return The fValue rounded up
* @see java.lang.Math#ceil(double)
*/
public static float ceil(float fValue) {
return (float) Math.ceil((double) fValue);
}
/**
* Returns cos of a value. If USE_FAST_TRIG is enabled, an approximate value is returned.
* Otherwise, a direct value is used.
* @param fValue The value to cosine, in raidans.
* @return The cosine of fValue.
* @see java.lang.Math#cos(double)
*/
public static float cos(float fValue) {
if (USE_FAST_TRIG)
return FastTrig.cos(fValue);
else
return (float) Math.cos((double) fValue);
}
/**
* Returns E^fValue
* @param fValue Value to raise to a power.
* @return The value E^fValue
* @see java.lang.Math#exp(double)
*/
public static float exp(float fValue) {
return (float) Math.exp((double) fValue);
}
/**
* Returns Absolute value of a float.
* @param fValue The value to abs.
* @return The abs of the value.
* @see java.lang.Math#abs(float)
*/
public static float abs(float fValue) {
if (fValue < 0) return -fValue;
else return fValue;
}
/**
* Returns a number rounded down.
* @param fValue The value to round
* @return The given number rounded down
* @see java.lang.Math#floor(double)
*/
public static float floor(float fValue) {
return (float) Math.floor((double) fValue);
}
/**
* Returns 1/sqrt(fValue)
* @param fValue The value to process.
* @return 1/sqrt(fValue)
* @see java.lang.Math#sqrt(double)
*/
public static float invSqrt(float fValue) {
return (float) (1.0 / Math.sqrt((double) fValue));
}
/**
* Returns the log base E of a value.
* @param fValue The value to log.
* @return The log of fValue base E
* @see java.lang.Math#log(double)
*/
public static float log(float fValue) {
return (float) Math.log((double) fValue);
}
/**
* Returns a number raised to an exponent power. fBase^fExponent
* @param fBase The base value (IE 2)
* @param fExponent The exponent value (IE 3)
* @return base raised to exponent (IE 8)
* @see java.lang.Math#pow(double, double)
*/
public static float pow(float fBase, float fExponent) {
return (float) Math.pow((double) fBase, (double) fExponent);
}
/**
* Returns sine of a value. If USE_FAST_TRIG is enabled, an approximate value is returned.
* Otherwise, a direct value is used.
* @param fValue The value to sine, in raidans.
* @return The sine of fValue.
* @see java.lang.Math#sin(double)
*/
public static float sin(float fValue) {
if (USE_FAST_TRIG)
return FastTrig.sin(fValue);
else
return (float) Math.sin((double) fValue);
}
/**
* Returns the value squared. fValue ^ 2
* @param fValue The vaule to square.
* @return The square of the given value.
*/
public static float sqr(float fValue) {
return fValue * fValue;
}
/**
* Returns the square root of a given value.
* @param fValue The value to sqrt.
* @return The square root of the given value.
* @see java.lang.Math#sqrt(double)
*/
public static float sqrt(float fValue) {
return (float) Math.sqrt((double) fValue);
}
/**
* Returns the tangent of a value. If USE_FAST_TRIG is enabled, an approximate value
* is returned. Otherwise, a direct value is used.
* @param fValue The value to tangent, in raidans.
* @return The tangent of fValue.
* @see java.lang.Math#tan(double)
*/
public static float tan(float fValue) {
if (USE_FAST_TRIG)
return FastTrig.tan(fValue);
else
return (float) Math.tan((double) fValue);
}
/**
* Returns 1 if the number is positive, -1 if the number is negative, and 0 otherwise
* @param iValue The integer to examine.
* @return The integer's sign.
*/
public static int sign(int iValue) {
if (iValue > 0) return 1;
if (iValue < 0) return -1;
return 0;
}
/**
* Returns 1 if the number is positive, -1 if the number is negative, and 0 otherwise
* @param fValue The float to examine.
* @return The float's sign.
*/
public static float sign(float fValue) {
if (fValue > 0.0f) return 1.0f;
if (fValue < 0.0f) return -1.0f;
return 0.0f;
}
/** */
public static float logGamma(float fX) {
float afCoeff[] = { +76.18009173f, -86.50532033f, +24.01409822f,
-1.231739516f, +(float) 0.120858003e-02,
-(float) 0.536382000e-05};
fX -= 1.0f;
float fTmp = fX + 5.5f;
fTmp -= (fX + 0.5f) * log(fTmp);
float fSeries = 1.0f;
for (int j = 0; j <= 5; j++) {
fX += 1.0f;
fSeries += afCoeff[j] / fX;
}
return -fTmp + log((2.50662827465f) * fSeries);
}
/** */
public static float gamma(float fX) {
return exp(logGamma(fX));
}
/** */
public static float incompleteGammaS(float fA, float fX) {
int iMaxIterations = 100;
float fTolerance = (float) 3e-07;
if (fX > 0.0f) {
float fAp = fA;
float fSum = (1.0f) / fA, fDel = fSum;
for (int i = 1; i <= iMaxIterations; i++) {
fAp += 1.0f;
fDel *= fX / fAp;
fSum += fDel;
if (abs(fDel) < abs(fSum) * fTolerance) {
float fArg = -fX + fA * log(fX) - logGamma(fA);
return fSum * exp(fArg);
}
}
}
if (fX == 0.0f) return 0.0f;
return Float.MAX_VALUE; // LogGamma not defined for x < 0
}
/** */
public static float incompleteGammaCF(float fA, float fX) {
int iMaxIterations = 100;
float fTolerance = (float) 3e-07;
float fA0 = 1.0f, fA1 = fX;
float fB0 = 0, fB1 = 1.0f;
float fGold = 0.0f, fFac = 1.0f;
for (int i = 1; i <= iMaxIterations; i++) {
float fI = (float) i;
float fImA = fI - fA;
fA0 = (fA1 + fA0 * fImA) * fFac;
fB0 = (fB1 + fB0 * fImA) * fFac;
float fItF = fI * fFac;
fA1 = fX * fA0 + fItF * fA1;
fB1 = fX * fB0 + fItF * fB1;
if (fA1 != 0.0f) {
fFac = (1.0f) / fA1;
float fG = fB1 * fFac;
if (abs((fG - fGold) / fG) < fTolerance) {
float fArg = -fX + fA * log(fX) - logGamma(fA);
return fG * exp(fArg);
}
fGold = fG;
}
}
return Float.MAX_VALUE; // numerical error if you get here
}
/** */
public static float incompleteGamma(float fA, float fX) {
if (fX < 1.0f + fA)
return incompleteGammaS(fA, fX);
else
return 1.0f - incompleteGammaCF(fA, fX);
}
/** */
public static float erf(float fX) {
return 1.0f - erfc(fX);
}
/** */
public static float erfc(float fX) {
float afCoeff[] = { -1.26551223f, +1.00002368f, +0.37409196f,
+0.09678418f, -0.18628806f, +0.27886807f, -1.13520398f,
+1.48851587f, -0.82215223f, +0.17087277f};
float fZ = abs(fX);
float fT = (1.0f) / (1.0f + (0.5f) * fZ);
float fSum = afCoeff[9];
for (int i = 9; i >= 0; i--)
fSum = fT * fSum + afCoeff[i];
float fResult = fT * exp(-fZ * fZ + fSum);
return (fX >= 0.0f ? fResult : 2.0f - fResult);
}
/** */
public static float modBessel0(float fX) {
if (fX < 0.0f) // function is even
fX = -fX;
float fT, fResult;
int i;
if (fX <= 3.75f) {
float afCoeff[] = { 1.0000000f, 3.5156229f, 3.0899424f, 1.2067492f,
0.2659732f, 0.0360768f, 0.0045813f};
fT = fX / 3.75f;
float fT2 = fT * fT;
fResult = afCoeff[6];
for (i = 5; i >= 0; i--) {
fResult *= fT2;
fResult += afCoeff[i];
}
// |error| < 1.6e-07
} else {
float afCoeff[] = { +0.39894228f, +0.01328592f, +0.00225319f,
-0.00157565f, +0.00916281f, -0.02057706f, +0.02635537f,
-0.01647633f, +0.00392377f};
fT = fX / 3.75f;
float fInvT = (1.0f) / fT;
fResult = afCoeff[8];
for (i = 7; i >= 0; i--) {
fResult *= fInvT;
fResult += afCoeff[i];
}
fResult *= exp(fX);
fResult /= sqrt(fX);
// |error| < 1.9e-07
}
return fResult;
}
/** */
public static float modBessel1(float fX) {
int iSign;
if (fX > 0.0f) {
iSign = 1;
} else if (fX < 0.0f) {
fX = -fX;
iSign = -1;
} else {
return 0.0f;
}
float fT, fResult;
int i;
if (fX <= 3.75f) {
float afCoeff[] = { 0.50000000f, 0.87890549f, 0.51498869f,
0.15084934f, 0.02658733f, 0.00301532f, 0.00032411f};
fT = fX / 3.75f;
float fT2 = fT * fT;
fResult = afCoeff[6];
for (i = 5; i >= 0; i--) {
fResult *= fT2;
fResult += afCoeff[i];
}
fResult *= fX;
// |error| < 8e-09
} else {
float afCoeff[] = { +0.39894228f, -0.03988024f, -0.00362018f,
+0.00163801f, -0.01031555f, +0.02282967f, -0.02895312f,
+0.01787654f, -0.00420059f};
fT = fX / 3.75f;
float fInvT = (1.0f) / fT;
fResult = afCoeff[8];
for (i = 7; i >= 0; i--) {
fResult *= fInvT;
fResult += afCoeff[i];
}
fResult *= exp(fX);
fResult /= sqrt(fX);
// |error| < 2.2e-07
}
fResult *= iSign;
return fResult;
}
/**
* Given 3 points in a 2d plane, this function computes if the points going from A-B-C
* are moving counter clock wise.
* @param p0 Point 0.
* @param p1 Point 1.
* @param p2 Point 2.
* @return 1 If they are CCW, -1 if they are not CCW, 0 if p2 is between p0 and p1.
*/
public static int ccw(Vector2f p0,Vector2f p1,Vector2f p2){
float dx1,dx2,dy1,dy2;
dx1=p1.x-p0.x;
dy1=p1.y-p0.y;
dx2=p2.x-p0.x;
dy2=p2.y-p0.y;
if (dx1*dy2>dy1*dx2) return 1;
if (dx1*dy2<dy1*dx2) return -1;
if ((dx1*dx2 < 0) || (dy1*dy2 <0)) return -1;
if ((dx1*dx1+dy1*dy1) < (dx2*dx2+dy2*dy2)) return 1;
return 0;
}
/**
* Test if a point is inside a triangle. 1 if the point is on the ccw side,
* -1 if the point is on the cw side, and 0 if it is on neither.
* @param t0 First point of the triangle.
* @param t1 Second point of the triangle.
* @param t2 Third point of the triangle.
* @param p The point to test.
* @return Value 1 or -1 if inside triangle, 0 otherwise.
*/
public static int pointInsideTriangle(Vector2f t0,Vector2f t1,Vector2f t2,Vector2f p){
int val1=ccw(t0,t1,p);
if (val1==0) return 1;
int val2=ccw(t1,t2,p);
if (val2==0) return 1;
if (val2!=val1) return 0;
int val3=ccw(t2,t0,p);
if (val3==0) return 1;
if (val3!=val1) return 0;
return val3;
}
/**
* Returns the determinant of a 4x4 matrix.
*/
public static float determinant(double m00,double m01,double m02,double m03, double m10,double m11,double m12,double m13,
double m20,double m21,double m22,double m23,double m30,double m31,double m32,double m33) {
double value;
value =
m03 * m12 * m21 * m30-m02 * m13 * m21 * m30-m03 * m11 * m22 * m30+m01 * m13 * m22 * m30+
m02 * m11 * m23 * m30-m01 * m12 * m23 * m30-m03 * m12 * m20 * m31+m02 * m13 * m20 * m31+
m03 * m10 * m22 * m31-m00 * m13 * m22 * m31-m02 * m10 * m23 * m31+m00 * m12 * m23 * m31+
m03 * m11 * m20 * m32-m01 * m13 * m20 * m32-m03 * m10 * m21 * m32+m00 * m13 * m21 * m32+
m01 * m10 * m23 * m32-m00 * m11 * m23 * m32-m02 * m11 * m20 * m33+m01 * m12 * m20 * m33+
m02 * m10 * m21 * m33-m00 * m12 * m21 * m33-m01 * m10 * m22 * m33+m00 * m11 * m22 * m33;
return (float) value;
}
/**
* Returns a random float between 0 and 1.
* @return A random float between 0 and 1.
*/
public static float nextRandomFloat() {
return rand.nextFloat();
}
/**
* FastTrig is used to calculate quick trig functions using a lookup table.
*
* @author Erikd
* @author Jack Lindamood (javadoc only)
*/
static public class FastTrig {
/** The size of the lookup table. The bigger, the more accurate. */
public static int PRECISION = 0x100000;
private static float RAD_SLICE = TWO_PI / PRECISION, sinTable[] = null,
tanTable[] = null;
static {
RAD_SLICE = TWO_PI / PRECISION;
sinTable = new float[PRECISION];
tanTable = new float[PRECISION];
float rad = 0;
for (int i = 0; i < PRECISION; i++) {
rad = (float) i * RAD_SLICE;
sinTable[i] = (float) java.lang.Math.sin(rad);
tanTable[i] = (float) java.lang.Math.tan(rad);
}
}
private static final int radToIndex(float radians) {
return (int) ((radians / TWO_PI) * (float) PRECISION)
& (PRECISION - 1);
}
/**
* Returns the sine of a given value, by looking up it's approximation in a
* precomputed table.
* @param radians The value to sine.
* @return The approximation of the value's sine.
*/
public static float sin(float radians) {
return sinTable[radToIndex(radians)];
}
/**
* Returns the cosine of a given value, by looking up it's approximation in a
* precomputed table.
* @param radians The value to cosine.
* @return The approximation of the value's cosine.
*/
public static float cos(float radians) {
return sinTable[radToIndex(HALF_PI-radians)];
}
/**
* Returns the tangent of a given value, by looking up it's approximation in a
* precomputed table.
* @param radians The value to tan.
* @return The approximation of the value's tangent.
*/
public static float tan(float radians) {
return tanTable[radToIndex(radians)];
}
}
}
| new spherical methods
git-svn-id: 5afc437a751a4ff2ced778146f5faadda0b504ab@2400 75d07b2b-3a1a-0410-a2c5-0572b91ccdca
| src/com/jme/math/FastMath.java | new spherical methods | <ide><path>rc/com/jme/math/FastMath.java
<ide> * functions. These are all used as static values and functions.
<ide> *
<ide> * @author Various
<del> * @version $Id: FastMath.java,v 1.18 2005-09-20 19:33:37 renanse Exp $
<add> * @version $Id: FastMath.java,v 1.19 2005-10-03 05:37:04 renanse Exp $
<ide> */
<ide>
<ide> final public class FastMath {
<ide> }
<ide>
<ide> /**
<add> * Converts a point from spherical coordinates to cartesian and stores the
<add> * results in the store var.
<add> */
<add> public static Vector3f sphericalToCartesian(Vector3f sphereCoords,
<add> Vector3f store) {
<add> store.y = sphereCoords.x * FastMath.sin(sphereCoords.z);
<add> float a = sphereCoords.x * FastMath.cos(sphereCoords.z);
<add> store.x = a * FastMath.cos(sphereCoords.y);
<add> store.z = a * FastMath.sin(sphereCoords.y);
<add>
<add> return store;
<add> }
<add>
<add> /**
<add> * Converts a point from cartesian coordinates to spherical and stores the
<add> * results in the store var.
<add> */
<add> public static Vector3f cartesianToSpherical(Vector3f cartCoords,
<add> Vector3f store) {
<add> if (cartCoords.x == 0)
<add> cartCoords.x = FastMath.FLT_EPSILON;
<add> store.x = FastMath
<add> .sqrt((cartCoords.x * cartCoords.x)
<add> + (cartCoords.y * cartCoords.y)
<add> + (cartCoords.z * cartCoords.z));
<add> store.y = FastMath.atan(cartCoords.z / cartCoords.x);
<add> if (cartCoords.x < 0)
<add> store.y += FastMath.PI;
<add> store.z = FastMath.asin(cartCoords.y / store.x);
<add> return store;
<add> }
<add>
<add> /**
<add> * Takes an angle (in radians) and expresses it in terms of -2pi to 2pi.
<add> *
<add> * @param r -
<add> * the angle to normalize (in radians)
<add> * @return the normalized angle (also in radians)
<add> */
<add> public static float normalizeAngle(float r) {
<add> if (Float.isInfinite(r) || Float.isNaN(r))
<add> return 0f;
<add> while (r > FastMath.TWO_PI)
<add> r -= FastMath.TWO_PI;
<add> while (r < -FastMath.TWO_PI)
<add> r += FastMath.TWO_PI;
<add> return r;
<add> }
<add>
<add>
<add> /**
<add> * Takes an angle (in radians) and expresses it in terms of -pi to pi.
<add> *
<add> * @param r -
<add> * the angle to normalize (in radians)
<add> * @return the normalized angle (also in radians)
<add> */
<add> public static float normalizeHalfAngle(float r) {
<add> if (Float.isInfinite(r) || Float.isNaN(r))
<add> return 0f;
<add> while (r > FastMath.PI)
<add> r -= FastMath.PI;
<add> while (r < -FastMath.PI)
<add> r += FastMath.PI;
<add> return r;
<add> }
<add>
<add>
<add>
<add> /**
<ide> * FastTrig is used to calculate quick trig functions using a lookup table.
<ide> *
<ide> * @author Erikd |
|
Java | apache-2.0 | dac27e8e26ad63db579515c2687c46a12cbaa7a3 | 0 | caot/intellij-community,MER-GROUP/intellij-community,supersven/intellij-community,apixandru/intellij-community,suncycheng/intellij-community,TangHao1987/intellij-community,petteyg/intellij-community,kdwink/intellij-community,alphafoobar/intellij-community,ThiagoGarciaAlves/intellij-community,tmpgit/intellij-community,salguarnieri/intellij-community,fitermay/intellij-community,dslomov/intellij-community,FHannes/intellij-community,ThiagoGarciaAlves/intellij-community,akosyakov/intellij-community,mglukhikh/intellij-community,gnuhub/intellij-community,alphafoobar/intellij-community,nicolargo/intellij-community,TangHao1987/intellij-community,youdonghai/intellij-community,michaelgallacher/intellij-community,hurricup/intellij-community,ibinti/intellij-community,semonte/intellij-community,lucafavatella/intellij-community,adedayo/intellij-community,blademainer/intellij-community,retomerz/intellij-community,fitermay/intellij-community,allotria/intellij-community,mglukhikh/intellij-community,alphafoobar/intellij-community,Distrotech/intellij-community,akosyakov/intellij-community,idea4bsd/idea4bsd,samthor/intellij-community,caot/intellij-community,asedunov/intellij-community,pwoodworth/intellij-community,lucafavatella/intellij-community,dslomov/intellij-community,xfournet/intellij-community,kool79/intellij-community,retomerz/intellij-community,amith01994/intellij-community,signed/intellij-community,pwoodworth/intellij-community,hurricup/intellij-community,nicolargo/intellij-community,tmpgit/intellij-community,alphafoobar/intellij-community,FHannes/intellij-community,xfournet/intellij-community,signed/intellij-community,muntasirsyed/intellij-community,wreckJ/intellij-community,SerCeMan/intellij-community,muntasirsyed/intellij-community,clumsy/intellij-community,michaelgallacher/intellij-community,MichaelNedzelsky/intellij-community,vladmm/intellij-community,lucafavatella/intellij-community,izonder/intellij-community,ThiagoGarciaAlves/intellij-community,ol-loginov/intellij-community,alphafoobar/intellij-community,mglukhikh/intellij-community,apixandru/intellij-community,ahb0327/intellij-community,gnuhub/intellij-community,salguarnieri/intellij-community,fengbaicanhe/intellij-community,fnouama/intellij-community,suncycheng/intellij-community,akosyakov/intellij-community,supersven/intellij-community,vladmm/intellij-community,samthor/intellij-community,samthor/intellij-community,youdonghai/intellij-community,Distrotech/intellij-community,amith01994/intellij-community,xfournet/intellij-community,fengbaicanhe/intellij-community,blademainer/intellij-community,ftomassetti/intellij-community,ibinti/intellij-community,FHannes/intellij-community,jagguli/intellij-community,holmes/intellij-community,samthor/intellij-community,MER-GROUP/intellij-community,dslomov/intellij-community,MER-GROUP/intellij-community,pwoodworth/intellij-community,suncycheng/intellij-community,mglukhikh/intellij-community,lucafavatella/intellij-community,SerCeMan/intellij-community,lucafavatella/intellij-community,fitermay/intellij-community,holmes/intellij-community,asedunov/intellij-community,jagguli/intellij-community,vladmm/intellij-community,signed/intellij-community,MichaelNedzelsky/intellij-community,ftomassetti/intellij-community,vladmm/intellij-community,allotria/intellij-community,wreckJ/intellij-community,fitermay/intellij-community,youdonghai/intellij-community,ivan-fedorov/intellij-community,kdwink/intellij-community,dslomov/intellij-community,xfournet/intellij-community,suncycheng/intellij-community,holmes/intellij-community,MER-GROUP/intellij-community,diorcety/intellij-community,petteyg/intellij-community,salguarnieri/intellij-community,fnouama/intellij-community,xfournet/intellij-community,ThiagoGarciaAlves/intellij-community,consulo/consulo,ivan-fedorov/intellij-community,blademainer/intellij-community,amith01994/intellij-community,fitermay/intellij-community,muntasirsyed/intellij-community,ernestp/consulo,tmpgit/intellij-community,apixandru/intellij-community,retomerz/intellij-community,vvv1559/intellij-community,fengbaicanhe/intellij-community,ol-loginov/intellij-community,gnuhub/intellij-community,petteyg/intellij-community,robovm/robovm-studio,Lekanich/intellij-community,allotria/intellij-community,michaelgallacher/intellij-community,amith01994/intellij-community,ThiagoGarciaAlves/intellij-community,muntasirsyed/intellij-community,apixandru/intellij-community,FHannes/intellij-community,FHannes/intellij-community,nicolargo/intellij-community,ryano144/intellij-community,SerCeMan/intellij-community,supersven/intellij-community,jagguli/intellij-community,diorcety/intellij-community,petteyg/intellij-community,vvv1559/intellij-community,kool79/intellij-community,vvv1559/intellij-community,MER-GROUP/intellij-community,hurricup/intellij-community,alphafoobar/intellij-community,signed/intellij-community,caot/intellij-community,ahb0327/intellij-community,youdonghai/intellij-community,MichaelNedzelsky/intellij-community,vvv1559/intellij-community,fitermay/intellij-community,fnouama/intellij-community,asedunov/intellij-community,Distrotech/intellij-community,fengbaicanhe/intellij-community,youdonghai/intellij-community,adedayo/intellij-community,hurricup/intellij-community,signed/intellij-community,diorcety/intellij-community,TangHao1987/intellij-community,jagguli/intellij-community,apixandru/intellij-community,xfournet/intellij-community,diorcety/intellij-community,ryano144/intellij-community,Distrotech/intellij-community,ftomassetti/intellij-community,retomerz/intellij-community,gnuhub/intellij-community,idea4bsd/idea4bsd,kdwink/intellij-community,xfournet/intellij-community,semonte/intellij-community,hurricup/intellij-community,vladmm/intellij-community,caot/intellij-community,semonte/intellij-community,michaelgallacher/intellij-community,supersven/intellij-community,MichaelNedzelsky/intellij-community,orekyuu/intellij-community,amith01994/intellij-community,kdwink/intellij-community,gnuhub/intellij-community,vvv1559/intellij-community,adedayo/intellij-community,ahb0327/intellij-community,ftomassetti/intellij-community,ftomassetti/intellij-community,ol-loginov/intellij-community,ryano144/intellij-community,consulo/consulo,ivan-fedorov/intellij-community,clumsy/intellij-community,blademainer/intellij-community,asedunov/intellij-community,vladmm/intellij-community,Lekanich/intellij-community,samthor/intellij-community,mglukhikh/intellij-community,kdwink/intellij-community,signed/intellij-community,kdwink/intellij-community,ibinti/intellij-community,blademainer/intellij-community,akosyakov/intellij-community,clumsy/intellij-community,izonder/intellij-community,petteyg/intellij-community,mglukhikh/intellij-community,ibinti/intellij-community,ol-loginov/intellij-community,pwoodworth/intellij-community,fitermay/intellij-community,youdonghai/intellij-community,dslomov/intellij-community,ThiagoGarciaAlves/intellij-community,izonder/intellij-community,xfournet/intellij-community,alphafoobar/intellij-community,amith01994/intellij-community,ryano144/intellij-community,asedunov/intellij-community,caot/intellij-community,akosyakov/intellij-community,amith01994/intellij-community,apixandru/intellij-community,michaelgallacher/intellij-community,da1z/intellij-community,jagguli/intellij-community,diorcety/intellij-community,mglukhikh/intellij-community,signed/intellij-community,jagguli/intellij-community,mglukhikh/intellij-community,FHannes/intellij-community,slisson/intellij-community,fnouama/intellij-community,SerCeMan/intellij-community,FHannes/intellij-community,jagguli/intellij-community,alphafoobar/intellij-community,ThiagoGarciaAlves/intellij-community,petteyg/intellij-community,suncycheng/intellij-community,ryano144/intellij-community,allotria/intellij-community,diorcety/intellij-community,fitermay/intellij-community,clumsy/intellij-community,MichaelNedzelsky/intellij-community,izonder/intellij-community,da1z/intellij-community,lucafavatella/intellij-community,xfournet/intellij-community,fitermay/intellij-community,holmes/intellij-community,hurricup/intellij-community,petteyg/intellij-community,xfournet/intellij-community,gnuhub/intellij-community,suncycheng/intellij-community,semonte/intellij-community,semonte/intellij-community,mglukhikh/intellij-community,asedunov/intellij-community,petteyg/intellij-community,semonte/intellij-community,Distrotech/intellij-community,hurricup/intellij-community,orekyuu/intellij-community,ol-loginov/intellij-community,kool79/intellij-community,wreckJ/intellij-community,michaelgallacher/intellij-community,kool79/intellij-community,robovm/robovm-studio,apixandru/intellij-community,idea4bsd/idea4bsd,MER-GROUP/intellij-community,MichaelNedzelsky/intellij-community,suncycheng/intellij-community,idea4bsd/idea4bsd,MER-GROUP/intellij-community,diorcety/intellij-community,vladmm/intellij-community,adedayo/intellij-community,petteyg/intellij-community,mglukhikh/intellij-community,apixandru/intellij-community,michaelgallacher/intellij-community,orekyuu/intellij-community,fnouama/intellij-community,gnuhub/intellij-community,ThiagoGarciaAlves/intellij-community,samthor/intellij-community,slisson/intellij-community,youdonghai/intellij-community,ol-loginov/intellij-community,blademainer/intellij-community,hurricup/intellij-community,gnuhub/intellij-community,pwoodworth/intellij-community,petteyg/intellij-community,idea4bsd/idea4bsd,michaelgallacher/intellij-community,Distrotech/intellij-community,holmes/intellij-community,ahb0327/intellij-community,MichaelNedzelsky/intellij-community,diorcety/intellij-community,fitermay/intellij-community,robovm/robovm-studio,izonder/intellij-community,fnouama/intellij-community,kool79/intellij-community,adedayo/intellij-community,youdonghai/intellij-community,Lekanich/intellij-community,ivan-fedorov/intellij-community,retomerz/intellij-community,slisson/intellij-community,clumsy/intellij-community,nicolargo/intellij-community,holmes/intellij-community,adedayo/intellij-community,nicolargo/intellij-community,diorcety/intellij-community,Lekanich/intellij-community,ibinti/intellij-community,ahb0327/intellij-community,MER-GROUP/intellij-community,slisson/intellij-community,SerCeMan/intellij-community,wreckJ/intellij-community,idea4bsd/idea4bsd,ThiagoGarciaAlves/intellij-community,retomerz/intellij-community,pwoodworth/intellij-community,orekyuu/intellij-community,hurricup/intellij-community,TangHao1987/intellij-community,allotria/intellij-community,ThiagoGarciaAlves/intellij-community,robovm/robovm-studio,xfournet/intellij-community,fengbaicanhe/intellij-community,MichaelNedzelsky/intellij-community,kdwink/intellij-community,kdwink/intellij-community,orekyuu/intellij-community,Distrotech/intellij-community,hurricup/intellij-community,vladmm/intellij-community,vladmm/intellij-community,tmpgit/intellij-community,nicolargo/intellij-community,robovm/robovm-studio,supersven/intellij-community,allotria/intellij-community,muntasirsyed/intellij-community,vvv1559/intellij-community,hurricup/intellij-community,asedunov/intellij-community,ahb0327/intellij-community,ivan-fedorov/intellij-community,caot/intellij-community,samthor/intellij-community,ThiagoGarciaAlves/intellij-community,ftomassetti/intellij-community,dslomov/intellij-community,robovm/robovm-studio,fengbaicanhe/intellij-community,FHannes/intellij-community,retomerz/intellij-community,blademainer/intellij-community,allotria/intellij-community,youdonghai/intellij-community,michaelgallacher/intellij-community,robovm/robovm-studio,lucafavatella/intellij-community,wreckJ/intellij-community,SerCeMan/intellij-community,signed/intellij-community,lucafavatella/intellij-community,fitermay/intellij-community,kool79/intellij-community,TangHao1987/intellij-community,diorcety/intellij-community,wreckJ/intellij-community,dslomov/intellij-community,clumsy/intellij-community,salguarnieri/intellij-community,SerCeMan/intellij-community,slisson/intellij-community,wreckJ/intellij-community,fengbaicanhe/intellij-community,dslomov/intellij-community,slisson/intellij-community,kool79/intellij-community,orekyuu/intellij-community,gnuhub/intellij-community,caot/intellij-community,slisson/intellij-community,ol-loginov/intellij-community,slisson/intellij-community,holmes/intellij-community,Distrotech/intellij-community,ernestp/consulo,FHannes/intellij-community,da1z/intellij-community,muntasirsyed/intellij-community,ryano144/intellij-community,orekyuu/intellij-community,youdonghai/intellij-community,alphafoobar/intellij-community,michaelgallacher/intellij-community,consulo/consulo,Distrotech/intellij-community,ryano144/intellij-community,ibinti/intellij-community,salguarnieri/intellij-community,Lekanich/intellij-community,semonte/intellij-community,retomerz/intellij-community,xfournet/intellij-community,caot/intellij-community,hurricup/intellij-community,retomerz/intellij-community,ahb0327/intellij-community,izonder/intellij-community,vladmm/intellij-community,ibinti/intellij-community,salguarnieri/intellij-community,jagguli/intellij-community,ernestp/consulo,ibinti/intellij-community,suncycheng/intellij-community,ivan-fedorov/intellij-community,fengbaicanhe/intellij-community,da1z/intellij-community,akosyakov/intellij-community,asedunov/intellij-community,akosyakov/intellij-community,fnouama/intellij-community,ftomassetti/intellij-community,TangHao1987/intellij-community,youdonghai/intellij-community,ol-loginov/intellij-community,holmes/intellij-community,da1z/intellij-community,lucafavatella/intellij-community,slisson/intellij-community,supersven/intellij-community,supersven/intellij-community,clumsy/intellij-community,SerCeMan/intellij-community,supersven/intellij-community,gnuhub/intellij-community,dslomov/intellij-community,tmpgit/intellij-community,MER-GROUP/intellij-community,adedayo/intellij-community,robovm/robovm-studio,fnouama/intellij-community,asedunov/intellij-community,semonte/intellij-community,amith01994/intellij-community,ryano144/intellij-community,da1z/intellij-community,tmpgit/intellij-community,TangHao1987/intellij-community,ivan-fedorov/intellij-community,orekyuu/intellij-community,izonder/intellij-community,pwoodworth/intellij-community,izonder/intellij-community,wreckJ/intellij-community,robovm/robovm-studio,ivan-fedorov/intellij-community,holmes/intellij-community,akosyakov/intellij-community,vladmm/intellij-community,jagguli/intellij-community,da1z/intellij-community,alphafoobar/intellij-community,izonder/intellij-community,consulo/consulo,mglukhikh/intellij-community,nicolargo/intellij-community,slisson/intellij-community,Distrotech/intellij-community,orekyuu/intellij-community,jagguli/intellij-community,kdwink/intellij-community,ahb0327/intellij-community,robovm/robovm-studio,tmpgit/intellij-community,blademainer/intellij-community,adedayo/intellij-community,holmes/intellij-community,supersven/intellij-community,pwoodworth/intellij-community,ivan-fedorov/intellij-community,suncycheng/intellij-community,consulo/consulo,ahb0327/intellij-community,ahb0327/intellij-community,TangHao1987/intellij-community,asedunov/intellij-community,izonder/intellij-community,signed/intellij-community,supersven/intellij-community,alphafoobar/intellij-community,Lekanich/intellij-community,ftomassetti/intellij-community,SerCeMan/intellij-community,semonte/intellij-community,FHannes/intellij-community,ibinti/intellij-community,ryano144/intellij-community,ol-loginov/intellij-community,kool79/intellij-community,allotria/intellij-community,gnuhub/intellij-community,fengbaicanhe/intellij-community,retomerz/intellij-community,akosyakov/intellij-community,muntasirsyed/intellij-community,blademainer/intellij-community,ol-loginov/intellij-community,signed/intellij-community,ryano144/intellij-community,ThiagoGarciaAlves/intellij-community,TangHao1987/intellij-community,samthor/intellij-community,youdonghai/intellij-community,orekyuu/intellij-community,supersven/intellij-community,ernestp/consulo,clumsy/intellij-community,mglukhikh/intellij-community,Lekanich/intellij-community,da1z/intellij-community,retomerz/intellij-community,TangHao1987/intellij-community,wreckJ/intellij-community,petteyg/intellij-community,holmes/intellij-community,ryano144/intellij-community,ol-loginov/intellij-community,Lekanich/intellij-community,Lekanich/intellij-community,SerCeMan/intellij-community,apixandru/intellij-community,muntasirsyed/intellij-community,MichaelNedzelsky/intellij-community,ibinti/intellij-community,muntasirsyed/intellij-community,vvv1559/intellij-community,nicolargo/intellij-community,amith01994/intellij-community,SerCeMan/intellij-community,orekyuu/intellij-community,ivan-fedorov/intellij-community,caot/intellij-community,fnouama/intellij-community,akosyakov/intellij-community,dslomov/intellij-community,ibinti/intellij-community,signed/intellij-community,nicolargo/intellij-community,youdonghai/intellij-community,ivan-fedorov/intellij-community,kdwink/intellij-community,robovm/robovm-studio,ryano144/intellij-community,lucafavatella/intellij-community,wreckJ/intellij-community,salguarnieri/intellij-community,fnouama/intellij-community,idea4bsd/idea4bsd,semonte/intellij-community,wreckJ/intellij-community,kool79/intellij-community,tmpgit/intellij-community,lucafavatella/intellij-community,FHannes/intellij-community,fitermay/intellij-community,samthor/intellij-community,kool79/intellij-community,semonte/intellij-community,caot/intellij-community,salguarnieri/intellij-community,ftomassetti/intellij-community,idea4bsd/idea4bsd,akosyakov/intellij-community,vvv1559/intellij-community,vvv1559/intellij-community,gnuhub/intellij-community,kdwink/intellij-community,Lekanich/intellij-community,MER-GROUP/intellij-community,apixandru/intellij-community,fengbaicanhe/intellij-community,kdwink/intellij-community,ftomassetti/intellij-community,mglukhikh/intellij-community,lucafavatella/intellij-community,amith01994/intellij-community,salguarnieri/intellij-community,fnouama/intellij-community,diorcety/intellij-community,idea4bsd/idea4bsd,retomerz/intellij-community,fengbaicanhe/intellij-community,muntasirsyed/intellij-community,MER-GROUP/intellij-community,ol-loginov/intellij-community,ernestp/consulo,allotria/intellij-community,jagguli/intellij-community,ftomassetti/intellij-community,clumsy/intellij-community,wreckJ/intellij-community,supersven/intellij-community,dslomov/intellij-community,caot/intellij-community,fengbaicanhe/intellij-community,adedayo/intellij-community,Lekanich/intellij-community,allotria/intellij-community,asedunov/intellij-community,muntasirsyed/intellij-community,Distrotech/intellij-community,tmpgit/intellij-community,da1z/intellij-community,allotria/intellij-community,caot/intellij-community,allotria/intellij-community,salguarnieri/intellij-community,vvv1559/intellij-community,nicolargo/intellij-community,TangHao1987/intellij-community,lucafavatella/intellij-community,da1z/intellij-community,izonder/intellij-community,ibinti/intellij-community,consulo/consulo,nicolargo/intellij-community,akosyakov/intellij-community,kool79/intellij-community,da1z/intellij-community,clumsy/intellij-community,signed/intellij-community,FHannes/intellij-community,MichaelNedzelsky/intellij-community,hurricup/intellij-community,holmes/intellij-community,Lekanich/intellij-community,dslomov/intellij-community,izonder/intellij-community,robovm/robovm-studio,ftomassetti/intellij-community,slisson/intellij-community,clumsy/intellij-community,michaelgallacher/intellij-community,MichaelNedzelsky/intellij-community,semonte/intellij-community,petteyg/intellij-community,muntasirsyed/intellij-community,asedunov/intellij-community,slisson/intellij-community,orekyuu/intellij-community,pwoodworth/intellij-community,pwoodworth/intellij-community,ivan-fedorov/intellij-community,idea4bsd/idea4bsd,vvv1559/intellij-community,signed/intellij-community,ahb0327/intellij-community,Distrotech/intellij-community,tmpgit/intellij-community,pwoodworth/intellij-community,nicolargo/intellij-community,apixandru/intellij-community,allotria/intellij-community,blademainer/intellij-community,adedayo/intellij-community,apixandru/intellij-community,alphafoobar/intellij-community,fitermay/intellij-community,blademainer/intellij-community,da1z/intellij-community,xfournet/intellij-community,diorcety/intellij-community,FHannes/intellij-community,jagguli/intellij-community,kool79/intellij-community,vvv1559/intellij-community,retomerz/intellij-community,apixandru/intellij-community,amith01994/intellij-community,samthor/intellij-community,ibinti/intellij-community,clumsy/intellij-community,MichaelNedzelsky/intellij-community,michaelgallacher/intellij-community,vladmm/intellij-community,salguarnieri/intellij-community,ahb0327/intellij-community,SerCeMan/intellij-community,tmpgit/intellij-community,suncycheng/intellij-community,suncycheng/intellij-community,TangHao1987/intellij-community,da1z/intellij-community,idea4bsd/idea4bsd,salguarnieri/intellij-community,ernestp/consulo,samthor/intellij-community,apixandru/intellij-community,asedunov/intellij-community,suncycheng/intellij-community,idea4bsd/idea4bsd,blademainer/intellij-community,semonte/intellij-community,adedayo/intellij-community,vvv1559/intellij-community,samthor/intellij-community,idea4bsd/idea4bsd,adedayo/intellij-community,tmpgit/intellij-community,fnouama/intellij-community,amith01994/intellij-community,pwoodworth/intellij-community,MER-GROUP/intellij-community | /*
* Copyright 2000-2012 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.android.designer.propertyTable.editors;
import com.android.resources.ResourceType;
import com.intellij.android.designer.model.RadViewComponent;
import com.intellij.android.designer.propertyTable.renderers.ResourceRenderer;
import com.intellij.designer.componentTree.TreeNodeDescriptor;
import com.intellij.designer.utils.SizedIcon;
import com.intellij.icons.AllIcons;
import com.intellij.ide.highlighter.XmlFileType;
import com.intellij.ide.util.treeView.AbstractTreeBuilder;
import com.intellij.ide.util.treeView.AbstractTreeStructure;
import com.intellij.ide.util.treeView.NodeDescriptor;
import com.intellij.ide.util.treeView.NodeRenderer;
import com.intellij.openapi.actionSystem.*;
import com.intellij.openapi.application.ex.ApplicationManagerEx;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.ui.DialogWrapper;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.PsiDirectory;
import com.intellij.psi.PsiDocumentManager;
import com.intellij.psi.xml.XmlFile;
import com.intellij.psi.xml.XmlTag;
import com.intellij.ui.*;
import com.intellij.ui.components.JBTabbedPane;
import com.intellij.ui.speedSearch.SpeedSearchUtil;
import com.intellij.ui.treeStructure.Tree;
import com.intellij.util.ArrayUtil;
import com.intellij.util.ui.tree.TreeUtil;
import org.intellij.images.fileTypes.ImageFileTypeManager;
import org.jetbrains.android.actions.CreateResourceFileAction;
import org.jetbrains.android.actions.CreateXmlResourceDialog;
import org.jetbrains.android.dom.resources.ResourceElement;
import org.jetbrains.android.facet.AndroidFacet;
import org.jetbrains.android.refactoring.AndroidBaseLayoutRefactoringAction;
import org.jetbrains.android.refactoring.AndroidExtractStyleAction;
import org.jetbrains.android.resourceManagers.FileResourceProcessor;
import org.jetbrains.android.resourceManagers.ResourceManager;
import org.jetbrains.android.util.AndroidResourceUtil;
import org.jetbrains.android.util.AndroidUtils;
import org.jetbrains.annotations.Nls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;
import javax.swing.event.TreeSelectionEvent;
import javax.swing.event.TreeSelectionListener;
import javax.swing.tree.DefaultMutableTreeNode;
import javax.swing.tree.DefaultTreeModel;
import javax.swing.tree.TreeSelectionModel;
import java.awt.*;
import java.awt.event.ActionEvent;
import java.awt.event.MouseEvent;
import java.io.IOException;
import java.util.*;
import java.util.List;
/**
* @author Alexander Lobas
*/
public class ResourceDialog extends DialogWrapper implements TreeSelectionListener {
private static final String ANDROID = "@android:";
private static final String TYPE_KEY = "ResourceType";
private static final String TEXT = "Text";
private static final String TABS = "Tabs";
private static final String IMAGE = "Image";
private static final String NONE = "None";
private static final Icon RESOURCE_ITEM_ICON = AllIcons.Css.Property;
private final Module myModule;
private final RadViewComponent myComponent;
private final JBTabbedPane myContentPanel;
private final ResourcePanel myProjectPanel;
private final ResourcePanel mySystemPanel;
private ColorPicker myColorPicker;
private final Action myNewResourceAction = new AbstractAction("New Resource", AllIcons.General.ComboArrowDown) {
@Override
public void actionPerformed(ActionEvent e) {
JComponent component = (JComponent)e.getSource();
ActionPopupMenu popupMenu = createNewResourcePopupMenu();
popupMenu.getComponent().show(component, 0, component.getHeight());
}
};
private final AnAction myNewResourceValueAction = new AnAction() {
@Override
public void actionPerformed(AnActionEvent e) {
ResourceType type = (ResourceType)getTemplatePresentation().getClientProperty(TYPE_KEY);
createNewResourceValue(type);
}
};
private final AnAction myNewResourceFileAction = new AnAction() {
@Override
public void actionPerformed(AnActionEvent e) {
ResourceType type = (ResourceType)getTemplatePresentation().getClientProperty(TYPE_KEY);
createNewResourceFile(type);
}
};
private final AnAction myExtractStyleAction = new AnAction("Extract Style...") {
@Override
public void actionPerformed(AnActionEvent e) {
extractStyle();
}
};
private String myResultResourceName;
public ResourceDialog(Module module, ResourceType[] types, String value, RadViewComponent component) {
super(module.getProject());
myModule = module;
myComponent = component;
setTitle("Resources");
AndroidFacet facet = AndroidFacet.getInstance(module);
myProjectPanel = new ResourcePanel(facet, types, false);
mySystemPanel = new ResourcePanel(facet, types, true);
myContentPanel = new JBTabbedPane();
myContentPanel.setPreferredSize(new Dimension(600, 500));
myContentPanel.addTab("Project", myProjectPanel.myComponent);
myContentPanel.addTab("System", mySystemPanel.myComponent);
myProjectPanel.myTreeBuilder.expandAll(null);
mySystemPanel.myTreeBuilder.expandAll(null);
boolean doSelection = value != null;
if (types == ResourceEditor.COLOR_TYPES) {
Color color = ResourceRenderer.parseColor(value);
myColorPicker = new ColorPicker(myDisposable, color, true);
myContentPanel.addTab("Color", myColorPicker);
if (color != null) {
myContentPanel.setSelectedIndex(2);
doSelection = false;
}
}
if (doSelection && value.startsWith("@")) {
value = StringUtil.replace(value, "+", "");
int index = value.indexOf('/');
if (index != -1) {
ResourcePanel panel;
String type;
String name = value.substring(index + 1);
if (value.startsWith(ANDROID)) {
panel = mySystemPanel;
type = value.substring(ANDROID.length(), index);
}
else {
panel = myProjectPanel;
type = value.substring(1, index);
}
myContentPanel.setSelectedComponent(panel.myComponent);
panel.select(type, name);
}
}
myContentPanel.addChangeListener(new ChangeListener() {
@Override
public void stateChanged(ChangeEvent e) {
valueChanged(null);
}
});
valueChanged(null);
init();
}
private ActionPopupMenu createNewResourcePopupMenu() {
ActionManager actionManager = ActionManager.getInstance();
DefaultActionGroup actionGroup = new DefaultActionGroup();
ResourceGroup resourceGroup = getSelectedElement(myProjectPanel.myTreeBuilder, ResourceGroup.class);
if (resourceGroup == null) {
resourceGroup = getSelectedElement(myProjectPanel.myTreeBuilder, ResourceItem.class).getGroup();
}
if (AndroidResourceUtil.VALUE_RESOURCE_TYPES.contains(resourceGroup.getType())) {
myNewResourceValueAction.getTemplatePresentation().setText("New " + resourceGroup + " Value...");
myNewResourceValueAction.getTemplatePresentation().putClientProperty(TYPE_KEY, resourceGroup.getType());
actionGroup.add(myNewResourceValueAction);
}
if (AndroidResourceUtil.XML_FILE_RESOURCE_TYPES.contains(resourceGroup.getType())) {
myNewResourceFileAction.getTemplatePresentation().setText("New " + resourceGroup + " File...");
myNewResourceFileAction.getTemplatePresentation().putClientProperty(TYPE_KEY, resourceGroup.getType());
actionGroup.add(myNewResourceFileAction);
}
if (myComponent != null && ResourceType.STYLE.equals(resourceGroup.getType())) {
final XmlTag componentTag = myComponent.getTag();
final boolean enabled = AndroidBaseLayoutRefactoringAction.getLayoutViewElement(componentTag) != null &&
AndroidExtractStyleAction.doIsEnabled(componentTag);
myExtractStyleAction.getTemplatePresentation().setEnabled(enabled);
actionGroup.add(myExtractStyleAction);
}
return actionManager.createActionPopupMenu(ActionPlaces.UNKNOWN, actionGroup);
}
private void createNewResourceValue(ResourceType resourceType) {
CreateXmlResourceDialog dialog = new CreateXmlResourceDialog(myModule, resourceType, null, null, true);
dialog.setTitle("New " + StringUtil.capitalize(resourceType.getDisplayName()) + " Value Resource");
dialog.show();
if (!dialog.isOK()) {
return;
}
Module moduleToPlaceResource = dialog.getModule();
if (moduleToPlaceResource == null) {
return;
}
String fileName = dialog.getFileName();
List<String> dirNames = dialog.getDirNames();
String resValue = dialog.getValue();
String resName = dialog.getResourceName();
if (!AndroidResourceUtil.createValueResource(moduleToPlaceResource, resName, resourceType, fileName, dirNames, resValue)) {
return;
}
PsiDocumentManager.getInstance(myModule.getProject()).commitAllDocuments();
myResultResourceName = "@" + resourceType.getName() + "/" + resName;
close(OK_EXIT_CODE);
}
private void createNewResourceFile(ResourceType resourceType) {
AndroidFacet facet = AndroidFacet.getInstance(myModule);
XmlFile newFile = CreateResourceFileAction.createFileResource(facet, resourceType, null, null, null, true, null);
if (newFile != null) {
String name = newFile.getName();
int index = name.lastIndexOf('.');
if (index != -1) {
name = name.substring(0, index);
}
myResultResourceName = "@" + resourceType.getName() + "/" + name;
close(OK_EXIT_CODE);
}
}
private void extractStyle() {
final String resName = AndroidExtractStyleAction.doExtractStyle(myModule, myComponent.getTag(), false, null);
if (resName == null) {
return;
}
myResultResourceName = "@style/" + resName;
close(OK_EXIT_CODE);
}
@Override
public JComponent getPreferredFocusedComponent() {
return myProjectPanel.myTree;
}
@Override
protected JComponent createCenterPanel() {
return myContentPanel;
}
@Override
protected Action[] createLeftSideActions() {
return new Action[]{myNewResourceAction};
}
@Override
protected void dispose() {
super.dispose();
Disposer.dispose(myProjectPanel.myTreeBuilder);
Disposer.dispose(mySystemPanel.myTreeBuilder);
}
public String getResourceName() {
return myResultResourceName;
}
@Override
protected void doOKAction() {
valueChanged(null);
super.doOKAction();
}
@Nullable
private static <T> T getSelectedElement(AbstractTreeBuilder treeBuilder, Class<T> elementClass) {
Set<T> elements = treeBuilder.getSelectedElements(elementClass);
return elements.isEmpty() ? null : elements.iterator().next();
}
@Override
public void valueChanged(@Nullable TreeSelectionEvent e) {
Component selectedComponent = myContentPanel.getSelectedComponent();
if (selectedComponent == myColorPicker) {
Color color = myColorPicker.getColor();
setOKActionEnabled(color != null);
myNewResourceAction.setEnabled(false);
myResultResourceName = color == null ? null : "#" + toHex(color.getRed()) + toHex(color.getGreen()) + toHex(color.getBlue());
}
else {
boolean isProjectPanel = selectedComponent == myProjectPanel.myComponent;
ResourcePanel panel = isProjectPanel ? myProjectPanel : mySystemPanel;
ResourceItem element = getSelectedElement(panel.myTreeBuilder, ResourceItem.class);
setOKActionEnabled(element != null);
myNewResourceAction.setEnabled(isProjectPanel && !panel.myTreeBuilder.getSelectedElements().isEmpty());
if (element == null) {
myResultResourceName = null;
}
else {
String prefix = panel == myProjectPanel ? "@" : ANDROID;
myResultResourceName = prefix + element.getName();
}
panel.showPreview(element);
}
}
private static String toHex(int value) {
String hex = Integer.toString(value, 16);
return hex.length() == 1 ? "0" + hex : hex;
}
private class ResourcePanel {
public final Tree myTree;
public final AbstractTreeBuilder myTreeBuilder;
public final JBSplitter myComponent;
private final JPanel myPreviewPanel;
private final JTextArea myTextArea;
private final JBTabbedPane myTabbedPane;
private final JLabel myImageComponent;
private final JLabel myNoPreviewComponent;
private final ResourceGroup[] myGroups;
private final ResourceManager myManager;
public ResourcePanel(AndroidFacet facet, ResourceType[] types, boolean system) {
myTree = new Tree();
myTree.setModel(new DefaultTreeModel(new DefaultMutableTreeNode()));
myTree.setScrollsOnExpand(true);
myTree.setRootVisible(false);
myTree.setShowsRootHandles(true);
new DoubleClickListener() {
@Override
protected boolean onDoubleClick(MouseEvent e) {
if (!myTreeBuilder.getSelectedElements(ResourceItem.class).isEmpty()) {
close(OK_EXIT_CODE);
return true;
}
return false;
}
}.installOn(myTree);
ToolTipManager.sharedInstance().registerComponent(myTree);
TreeUtil.installActions(myTree);
myManager = facet.getResourceManager(system ? AndroidUtils.SYSTEM_RESOURCE_PACKAGE : null);
myGroups = new ResourceGroup[types.length];
for (int i = 0; i < types.length; i++) {
myGroups[i] = new ResourceGroup(types[i], myManager);
}
myTreeBuilder =
new AbstractTreeBuilder(myTree, (DefaultTreeModel)myTree.getModel(), new TreeContentProvider(myGroups), null);
myTreeBuilder.initRootNode();
TreeSelectionModel selectionModel = myTree.getSelectionModel();
selectionModel.setSelectionMode(TreeSelectionModel.SINGLE_TREE_SELECTION);
selectionModel.addTreeSelectionListener(ResourceDialog.this);
myTree.setCellRenderer(new NodeRenderer() {
@Override
protected void doAppend(@NotNull @Nls String fragment,
@NotNull SimpleTextAttributes attributes,
boolean isMainText,
boolean selected) {
SpeedSearchUtil.appendFragmentsForSpeedSearch(myTree, fragment, attributes, selected, this);
}
@Override
public void doAppend(@NotNull String fragment, @NotNull SimpleTextAttributes attributes, boolean selected) {
SpeedSearchUtil.appendFragmentsForSpeedSearch(myTree, fragment, attributes, selected, this);
}
@Override
public void doAppend(String fragment, boolean selected) {
SpeedSearchUtil.appendFragmentsForSpeedSearch(myTree, fragment, SimpleTextAttributes.REGULAR_ATTRIBUTES, selected, this);
}
});
new TreeSpeedSearch(myTree, TreeSpeedSearch.NODE_DESCRIPTOR_TOSTRING, true);
myComponent = new JBSplitter(true, 0.8f);
myComponent.setSplitterProportionKey("android.resource_dialog_splitter");
myComponent.setFirstComponent(ScrollPaneFactory.createScrollPane(myTree));
myPreviewPanel = new JPanel(new CardLayout());
myComponent.setSecondComponent(myPreviewPanel);
myTextArea = new JTextArea(5, 20);
myPreviewPanel.add(ScrollPaneFactory.createScrollPane(myTextArea), TEXT);
myPreviewPanel.add(myTabbedPane = new JBTabbedPane(JTabbedPane.BOTTOM, JTabbedPane.SCROLL_TAB_LAYOUT), TABS);
myImageComponent = new JLabel();
myImageComponent.setHorizontalAlignment(SwingConstants.CENTER);
myImageComponent.setVerticalAlignment(SwingConstants.CENTER);
myPreviewPanel.add(myImageComponent, IMAGE);
myNoPreviewComponent = new JLabel("No Preview");
myNoPreviewComponent.setHorizontalAlignment(SwingConstants.CENTER);
myNoPreviewComponent.setVerticalAlignment(SwingConstants.CENTER);
myPreviewPanel.add(myNoPreviewComponent, NONE);
}
public void showPreview(@Nullable ResourceItem element) {
CardLayout layout = (CardLayout)myPreviewPanel.getLayout();
if (element == null || element.getGroup().getType() == ResourceType.ID) {
layout.show(myPreviewPanel, NONE);
return;
}
try {
VirtualFile file = element.getFile();
if (file == null) {
String value = element.getPreviewString();
if (value == null) {
String[] values = element.getPreviewStrings();
if (values == null) {
long time = System.currentTimeMillis();
List<ResourceElement> resources = myManager.findValueResources(element.getGroup().getType().getName(), element.toString());
if (ApplicationManagerEx.getApplicationEx().isInternal()) {
System.out.println("Time: " + (System.currentTimeMillis() - time)); // XXX
}
int size = resources.size();
if (size == 1) {
value = getResourceElementValue(resources.get(0));
element.setPreviewString(value);
}
else if (size > 1) {
values = new String[size];
String[] tabNames = new String[size];
for (int i = 0; i < size; i++) {
ResourceElement resource = resources.get(i);
values[i] = getResourceElementValue(resource);
PsiDirectory directory = resource.getXmlTag().getContainingFile().getParent();
String tabName = directory == null ? "unknown-" + i : directory.getName();
tabNames[i] = tabName.substring(tabName.indexOf('-') + 1);
}
element.setPreviewStrings(values, tabNames);
}
else {
layout.show(myPreviewPanel, NONE);
return;
}
}
if (values != null) {
int selectedIndex = myTabbedPane.getSelectedIndex();
myTabbedPane.removeAll();
String[] tabNames = element.getTabNames();
if (selectedIndex == -1) {
for (int i = 0; i < tabNames.length; i++) {
if (tabNames[i].startsWith("en")) {
selectedIndex = i;
break;
}
}
}
for (int i = 0; i < tabNames.length; i++) {
JTextArea textArea = new JTextArea(5, 20);
textArea.setText(values[i]);
textArea.setEditable(false);
myTabbedPane.addTab(tabNames[i], ScrollPaneFactory.createScrollPane(textArea));
}
if (selectedIndex >= 0 && selectedIndex < tabNames.length) {
myTabbedPane.setSelectedIndex(selectedIndex);
}
layout.show(myPreviewPanel, TABS);
return;
}
}
if (value == null) {
layout.show(myPreviewPanel, NONE);
return;
}
myTextArea.setText(value);
myTextArea.setEditable(false);
layout.show(myPreviewPanel, TEXT);
}
else if (ImageFileTypeManager.getInstance().isImage(file)) {
Icon icon = element.getPreviewIcon();
if (icon == null) {
icon = new SizedIcon(100, 100, new ImageIcon(file.getPath()));
element.setPreviewIcon(icon);
}
myImageComponent.setIcon(icon);
layout.show(myPreviewPanel, IMAGE);
}
else if (file.getFileType() == XmlFileType.INSTANCE) {
String value = element.getPreviewString();
if (value == null) {
value = new String(file.contentsToByteArray());
element.setPreviewString(value);
}
myTextArea.setText(value);
myTextArea.setEditable(false);
layout.show(myPreviewPanel, TEXT);
}
else {
layout.show(myPreviewPanel, NONE);
}
}
catch (IOException e) {
layout.show(myPreviewPanel, NONE);
}
}
private void select(String type, String name) {
for (ResourceGroup group : myGroups) {
if (type.equalsIgnoreCase(group.getName())) {
for (ResourceItem item : group.getItems()) {
if (name.equals(item.toString())) {
myTreeBuilder.select(item);
return;
}
}
return;
}
}
}
}
private static String getResourceElementValue(ResourceElement element) {
String text = element.getRawText();
if (StringUtil.isEmpty(text)) {
return element.getXmlTag().getText();
}
return text;
}
private static class ResourceGroup {
private List<ResourceItem> myItems = new ArrayList<ResourceItem>();
private final ResourceType myType;
public ResourceGroup(ResourceType type, ResourceManager manager) {
myType = type;
final String resourceType = type.getName();
Collection<String> resourceNames = manager.getValueResourceNames(resourceType);
for (String resourceName : resourceNames) {
myItems.add(new ResourceItem(this, resourceName, null, RESOURCE_ITEM_ICON));
}
final Set<String> fileNames = new HashSet<String>();
manager.processFileResources(resourceType, new FileResourceProcessor() {
@Override
public boolean process(@NotNull VirtualFile resFile, @NotNull String resName, @NotNull String resFolderType) {
if (fileNames.add(resName)) {
myItems.add(new ResourceItem(ResourceGroup.this, resName, resFile, resFile.getFileType().getIcon()));
}
return true;
}
});
if (type == ResourceType.ID) {
for (String id : manager.getIds()) {
if (!resourceNames.contains(id)) {
myItems.add(new ResourceItem(this, id, null, RESOURCE_ITEM_ICON));
}
}
}
Collections.sort(myItems, new Comparator<ResourceItem>() {
@Override
public int compare(ResourceItem resource1, ResourceItem resource2) {
return resource1.toString().compareTo(resource2.toString());
}
});
}
public ResourceType getType() {
return myType;
}
public String getName() {
return myType.getName();
}
public List<ResourceItem> getItems() {
return myItems;
}
@Override
public String toString() {
return myType.getDisplayName();
}
}
private static class ResourceItem {
private final ResourceGroup myGroup;
private final String myName;
private final VirtualFile myFile;
private final Icon myIcon;
private String myPreviewString;
private String[] myPreviewStrings;
private String[] myNames;
private Icon myPreviewIcon;
public ResourceItem(@NotNull ResourceGroup group, @NotNull String name, @Nullable VirtualFile file, Icon icon) {
myGroup = group;
myName = name;
myFile = file;
myIcon = icon;
}
public ResourceGroup getGroup() {
return myGroup;
}
public String getName() {
return myGroup.getName() + "/" + myName;
}
public VirtualFile getFile() {
return myFile;
}
public Icon getIcon() {
return myIcon;
}
public String getPreviewString() {
return myPreviewString;
}
public void setPreviewString(String previewString) {
myPreviewString = previewString;
}
public String[] getPreviewStrings() {
return myPreviewStrings;
}
public String[] getTabNames() {
return myNames;
}
public void setPreviewStrings(String[] previewStrings, String[] names) {
myPreviewStrings = previewStrings;
myNames = names;
}
public Icon getPreviewIcon() {
return myPreviewIcon;
}
public void setPreviewIcon(Icon previewIcon) {
myPreviewIcon = previewIcon;
}
@Override
public String toString() {
return myName;
}
}
private static class TreeContentProvider extends AbstractTreeStructure {
private final Object myTreeRoot = new Object();
private final ResourceGroup[] myGroups;
public TreeContentProvider(ResourceGroup[] groups) {
myGroups = groups;
}
@Override
public Object getRootElement() {
return myTreeRoot;
}
@Override
public Object[] getChildElements(Object element) {
if (element == myTreeRoot) {
return myGroups;
}
if (element instanceof ResourceGroup) {
ResourceGroup group = (ResourceGroup)element;
return group.getItems().toArray();
}
return ArrayUtil.EMPTY_OBJECT_ARRAY;
}
@Override
public Object getParentElement(Object element) {
if (element instanceof ResourceItem) {
ResourceItem resource = (ResourceItem)element;
return resource.getGroup();
}
return null;
}
@NotNull
@Override
public NodeDescriptor createDescriptor(Object element, NodeDescriptor parentDescriptor) {
TreeNodeDescriptor descriptor = new TreeNodeDescriptor(parentDescriptor, element, element == null ? null : element.toString());
if (element instanceof ResourceGroup) {
descriptor.setIcon(AllIcons.Nodes.TreeClosed);
}
else if (element instanceof ResourceItem) {
descriptor.setIcon(((ResourceItem)element).getIcon());
}
return descriptor;
}
@Override
public boolean hasSomethingToCommit() {
return false;
}
@Override
public void commit() {
}
}
}
| plugins/android-designer/src/com/intellij/android/designer/propertyTable/editors/ResourceDialog.java | /*
* Copyright 2000-2012 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.android.designer.propertyTable.editors;
import com.android.resources.ResourceType;
import com.intellij.android.designer.model.RadViewComponent;
import com.intellij.android.designer.propertyTable.renderers.ResourceRenderer;
import com.intellij.designer.componentTree.TreeNodeDescriptor;
import com.intellij.designer.utils.SizedIcon;
import com.intellij.icons.AllIcons;
import com.intellij.ide.highlighter.XmlFileType;
import com.intellij.ide.util.treeView.AbstractTreeBuilder;
import com.intellij.ide.util.treeView.AbstractTreeStructure;
import com.intellij.ide.util.treeView.NodeDescriptor;
import com.intellij.ide.util.treeView.NodeRenderer;
import com.intellij.openapi.actionSystem.*;
import com.intellij.openapi.application.ex.ApplicationManagerEx;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.ui.DialogWrapper;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.PsiDocumentManager;
import com.intellij.psi.xml.XmlFile;
import com.intellij.psi.xml.XmlTag;
import com.intellij.ui.*;
import com.intellij.ui.components.JBTabbedPane;
import com.intellij.ui.speedSearch.SpeedSearchUtil;
import com.intellij.ui.treeStructure.Tree;
import com.intellij.util.ArrayUtil;
import com.intellij.util.ui.tree.TreeUtil;
import org.intellij.images.fileTypes.ImageFileTypeManager;
import org.jetbrains.android.actions.CreateResourceFileAction;
import org.jetbrains.android.actions.CreateXmlResourceDialog;
import org.jetbrains.android.dom.resources.ResourceElement;
import org.jetbrains.android.facet.AndroidFacet;
import org.jetbrains.android.refactoring.AndroidBaseLayoutRefactoringAction;
import org.jetbrains.android.refactoring.AndroidExtractStyleAction;
import org.jetbrains.android.resourceManagers.FileResourceProcessor;
import org.jetbrains.android.resourceManagers.ResourceManager;
import org.jetbrains.android.util.AndroidResourceUtil;
import org.jetbrains.android.util.AndroidUtils;
import org.jetbrains.annotations.Nls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;
import javax.swing.event.TreeSelectionEvent;
import javax.swing.event.TreeSelectionListener;
import javax.swing.tree.DefaultMutableTreeNode;
import javax.swing.tree.DefaultTreeModel;
import javax.swing.tree.TreeSelectionModel;
import java.awt.*;
import java.awt.event.ActionEvent;
import java.awt.event.MouseEvent;
import java.io.IOException;
import java.util.*;
import java.util.List;
/**
* @author Alexander Lobas
*/
public class ResourceDialog extends DialogWrapper implements TreeSelectionListener {
private static final String ANDROID = "@android:";
private static final String TYPE_KEY = "ResourceType";
private static final String TEXT = "Text";
private static final String TABS = "Tabs";
private static final String IMAGE = "Image";
private static final String NONE = "None";
private static final Icon RESOURCE_ITEM_ICON = AllIcons.Css.Property;
private final Module myModule;
private final RadViewComponent myComponent;
private final JBTabbedPane myContentPanel;
private final ResourcePanel myProjectPanel;
private final ResourcePanel mySystemPanel;
private ColorPicker myColorPicker;
private final Action myNewResourceAction = new AbstractAction("New Resource", AllIcons.General.ComboArrowDown) {
@Override
public void actionPerformed(ActionEvent e) {
JComponent component = (JComponent)e.getSource();
ActionPopupMenu popupMenu = createNewResourcePopupMenu();
popupMenu.getComponent().show(component, 0, component.getHeight());
}
};
private final AnAction myNewResourceValueAction = new AnAction() {
@Override
public void actionPerformed(AnActionEvent e) {
ResourceType type = (ResourceType)getTemplatePresentation().getClientProperty(TYPE_KEY);
createNewResourceValue(type);
}
};
private final AnAction myNewResourceFileAction = new AnAction() {
@Override
public void actionPerformed(AnActionEvent e) {
ResourceType type = (ResourceType)getTemplatePresentation().getClientProperty(TYPE_KEY);
createNewResourceFile(type);
}
};
private final AnAction myExtractStyleAction = new AnAction("Extract Style...") {
@Override
public void actionPerformed(AnActionEvent e) {
extractStyle();
}
};
private String myResultResourceName;
public ResourceDialog(Module module, ResourceType[] types, String value, RadViewComponent component) {
super(module.getProject());
myModule = module;
myComponent = component;
setTitle("Resources");
AndroidFacet facet = AndroidFacet.getInstance(module);
myProjectPanel = new ResourcePanel(facet, types, false);
mySystemPanel = new ResourcePanel(facet, types, true);
myContentPanel = new JBTabbedPane();
myContentPanel.setPreferredSize(new Dimension(600, 500));
myContentPanel.addTab("Project", myProjectPanel.myComponent);
myContentPanel.addTab("System", mySystemPanel.myComponent);
myProjectPanel.myTreeBuilder.expandAll(null);
mySystemPanel.myTreeBuilder.expandAll(null);
boolean doSelection = value != null;
if (types == ResourceEditor.COLOR_TYPES) {
Color color = ResourceRenderer.parseColor(value);
myColorPicker = new ColorPicker(myDisposable, color, true);
myContentPanel.addTab("Color", myColorPicker);
if (color != null) {
myContentPanel.setSelectedIndex(2);
doSelection = false;
}
}
if (doSelection && value.startsWith("@")) {
int index = value.indexOf('/');
if (index != -1) {
ResourcePanel panel;
String type;
String name = value.substring(index + 1);
if (value.startsWith(ANDROID)) {
panel = mySystemPanel;
type = value.substring(ANDROID.length(), index);
}
else {
panel = myProjectPanel;
type = value.substring(1, index);
}
myContentPanel.setSelectedComponent(panel.myComponent);
panel.select(type, name);
}
}
myContentPanel.addChangeListener(new ChangeListener() {
@Override
public void stateChanged(ChangeEvent e) {
valueChanged(null);
}
});
valueChanged(null);
init();
}
private ActionPopupMenu createNewResourcePopupMenu() {
ActionManager actionManager = ActionManager.getInstance();
DefaultActionGroup actionGroup = new DefaultActionGroup();
ResourceGroup resourceGroup = getSelectedElement(myProjectPanel.myTreeBuilder, ResourceGroup.class);
if (resourceGroup == null) {
resourceGroup = getSelectedElement(myProjectPanel.myTreeBuilder, ResourceItem.class).getGroup();
}
if (AndroidResourceUtil.VALUE_RESOURCE_TYPES.contains(resourceGroup.getType())) {
myNewResourceValueAction.getTemplatePresentation().setText("New " + resourceGroup + " Value...");
myNewResourceValueAction.getTemplatePresentation().putClientProperty(TYPE_KEY, resourceGroup.getType());
actionGroup.add(myNewResourceValueAction);
}
if (AndroidResourceUtil.XML_FILE_RESOURCE_TYPES.contains(resourceGroup.getType())) {
myNewResourceFileAction.getTemplatePresentation().setText("New " + resourceGroup + " File...");
myNewResourceFileAction.getTemplatePresentation().putClientProperty(TYPE_KEY, resourceGroup.getType());
actionGroup.add(myNewResourceFileAction);
}
if (myComponent != null && ResourceType.STYLE.equals(resourceGroup.getType())) {
final XmlTag componentTag = myComponent.getTag();
final boolean enabled = AndroidBaseLayoutRefactoringAction.getLayoutViewElement(componentTag) != null &&
AndroidExtractStyleAction.doIsEnabled(componentTag);
myExtractStyleAction.getTemplatePresentation().setEnabled(enabled);
actionGroup.add(myExtractStyleAction);
}
return actionManager.createActionPopupMenu(ActionPlaces.UNKNOWN, actionGroup);
}
private void createNewResourceValue(ResourceType resourceType) {
CreateXmlResourceDialog dialog = new CreateXmlResourceDialog(myModule, resourceType, null, null, true);
dialog.setTitle("New " + StringUtil.capitalize(resourceType.getDisplayName()) + " Value Resource");
dialog.show();
if (!dialog.isOK()) {
return;
}
Module moduleToPlaceResource = dialog.getModule();
if (moduleToPlaceResource == null) {
return;
}
String fileName = dialog.getFileName();
List<String> dirNames = dialog.getDirNames();
String resValue = dialog.getValue();
String resName = dialog.getResourceName();
if (!AndroidResourceUtil.createValueResource(moduleToPlaceResource, resName, resourceType, fileName, dirNames, resValue)) {
return;
}
PsiDocumentManager.getInstance(myModule.getProject()).commitAllDocuments();
myResultResourceName = "@" + resourceType.getName() + "/" + resName;
close(OK_EXIT_CODE);
}
private void createNewResourceFile(ResourceType resourceType) {
AndroidFacet facet = AndroidFacet.getInstance(myModule);
XmlFile newFile = CreateResourceFileAction.createFileResource(facet, resourceType, null, null, null, true, null);
if (newFile != null) {
String name = newFile.getName();
int index = name.lastIndexOf('.');
if (index != -1) {
name = name.substring(0, index);
}
myResultResourceName = "@" + resourceType.getName() + "/" + name;
close(OK_EXIT_CODE);
}
}
private void extractStyle() {
final String resName = AndroidExtractStyleAction.doExtractStyle(myModule, myComponent.getTag(), false, null);
if (resName == null) {
return;
}
myResultResourceName = "@style/" + resName;
close(OK_EXIT_CODE);
}
@Override
public JComponent getPreferredFocusedComponent() {
return myProjectPanel.myTree;
}
@Override
protected JComponent createCenterPanel() {
return myContentPanel;
}
@Override
protected Action[] createLeftSideActions() {
return new Action[]{myNewResourceAction};
}
@Override
protected void dispose() {
super.dispose();
Disposer.dispose(myProjectPanel.myTreeBuilder);
Disposer.dispose(mySystemPanel.myTreeBuilder);
}
public String getResourceName() {
return myResultResourceName;
}
@Override
protected void doOKAction() {
valueChanged(null);
super.doOKAction();
}
@Nullable
private static <T> T getSelectedElement(AbstractTreeBuilder treeBuilder, Class<T> elementClass) {
Set<T> elements = treeBuilder.getSelectedElements(elementClass);
return elements.isEmpty() ? null : elements.iterator().next();
}
@Override
public void valueChanged(@Nullable TreeSelectionEvent e) {
Component selectedComponent = myContentPanel.getSelectedComponent();
if (selectedComponent == myColorPicker) {
Color color = myColorPicker.getColor();
setOKActionEnabled(color != null);
myNewResourceAction.setEnabled(false);
myResultResourceName = color == null ? null : "#" + toHex(color.getRed()) + toHex(color.getGreen()) + toHex(color.getBlue());
}
else {
boolean isProjectPanel = selectedComponent == myProjectPanel.myComponent;
ResourcePanel panel = isProjectPanel ? myProjectPanel : mySystemPanel;
ResourceItem element = getSelectedElement(panel.myTreeBuilder, ResourceItem.class);
setOKActionEnabled(element != null);
myNewResourceAction.setEnabled(isProjectPanel && !panel.myTreeBuilder.getSelectedElements().isEmpty());
if (element == null) {
myResultResourceName = null;
}
else {
String prefix = panel == myProjectPanel ? "@" : ANDROID;
myResultResourceName = prefix + element.getName();
}
panel.showPreview(element);
}
}
private static String toHex(int value) {
String hex = Integer.toString(value, 16);
return hex.length() == 1 ? "0" + hex : hex;
}
private class ResourcePanel {
public final Tree myTree;
public final AbstractTreeBuilder myTreeBuilder;
public final JBSplitter myComponent;
private final JPanel myPreviewPanel;
private final JTextArea myTextArea;
private final JBTabbedPane myTabbedPane;
private final JLabel myImageComponent;
private final JLabel myNoPreviewComponent;
private final ResourceGroup[] myGroups;
private final ResourceManager myManager;
public ResourcePanel(AndroidFacet facet, ResourceType[] types, boolean system) {
myTree = new Tree();
myTree.setModel(new DefaultTreeModel(new DefaultMutableTreeNode()));
myTree.setScrollsOnExpand(true);
myTree.setRootVisible(false);
myTree.setShowsRootHandles(true);
new DoubleClickListener() {
@Override
protected boolean onDoubleClick(MouseEvent e) {
if (!myTreeBuilder.getSelectedElements(ResourceItem.class).isEmpty()) {
close(OK_EXIT_CODE);
return true;
}
return false;
}
}.installOn(myTree);
ToolTipManager.sharedInstance().registerComponent(myTree);
TreeUtil.installActions(myTree);
myManager = facet.getResourceManager(system ? AndroidUtils.SYSTEM_RESOURCE_PACKAGE : null);
myGroups = new ResourceGroup[types.length];
for (int i = 0; i < types.length; i++) {
myGroups[i] = new ResourceGroup(types[i], myManager);
}
myTreeBuilder =
new AbstractTreeBuilder(myTree, (DefaultTreeModel)myTree.getModel(), new TreeContentProvider(myGroups), null);
myTreeBuilder.initRootNode();
TreeSelectionModel selectionModel = myTree.getSelectionModel();
selectionModel.setSelectionMode(TreeSelectionModel.SINGLE_TREE_SELECTION);
selectionModel.addTreeSelectionListener(ResourceDialog.this);
myTree.setCellRenderer(new NodeRenderer() {
@Override
protected void doAppend(@NotNull @Nls String fragment,
@NotNull SimpleTextAttributes attributes,
boolean isMainText,
boolean selected) {
SpeedSearchUtil.appendFragmentsForSpeedSearch(myTree, fragment, attributes, selected, this);
}
@Override
public void doAppend(@NotNull String fragment, @NotNull SimpleTextAttributes attributes, boolean selected) {
SpeedSearchUtil.appendFragmentsForSpeedSearch(myTree, fragment, attributes, selected, this);
}
@Override
public void doAppend(String fragment, boolean selected) {
SpeedSearchUtil.appendFragmentsForSpeedSearch(myTree, fragment, SimpleTextAttributes.REGULAR_ATTRIBUTES, selected, this);
}
});
new TreeSpeedSearch(myTree, TreeSpeedSearch.NODE_DESCRIPTOR_TOSTRING, true);
myComponent = new JBSplitter(true, 0.8f);
myComponent.setSplitterProportionKey("android.resource_dialog_splitter");
myComponent.setFirstComponent(ScrollPaneFactory.createScrollPane(myTree));
myPreviewPanel = new JPanel(new CardLayout());
myComponent.setSecondComponent(myPreviewPanel);
myTextArea = new JTextArea(5, 20);
myPreviewPanel.add(ScrollPaneFactory.createScrollPane(myTextArea), TEXT);
myPreviewPanel.add(myTabbedPane = new JBTabbedPane(JTabbedPane.BOTTOM, JTabbedPane.SCROLL_TAB_LAYOUT), TABS);
myImageComponent = new JLabel();
myImageComponent.setHorizontalAlignment(SwingConstants.CENTER);
myImageComponent.setVerticalAlignment(SwingConstants.CENTER);
myPreviewPanel.add(myImageComponent, IMAGE);
myNoPreviewComponent = new JLabel("No Preview");
myNoPreviewComponent.setHorizontalAlignment(SwingConstants.CENTER);
myNoPreviewComponent.setVerticalAlignment(SwingConstants.CENTER);
myPreviewPanel.add(myNoPreviewComponent, NONE);
}
public void showPreview(@Nullable ResourceItem element) {
CardLayout layout = (CardLayout)myPreviewPanel.getLayout();
if (element == null) {
layout.show(myPreviewPanel, NONE);
return;
}
try {
VirtualFile file = element.getFile();
if (file == null) {
String value = element.getPreviewString();
if (value == null) {
String[] values = element.getPreviewStrings();
if (values == null) {
long time = System.currentTimeMillis();
List<ResourceElement> resources = myManager.findValueResources(element.getGroup().getType().getName(), element.toString());
if (ApplicationManagerEx.getApplicationEx().isInternal()) {
System.out.println("Time: " + (System.currentTimeMillis() - time)); // XXX
}
int size = resources.size();
if (size == 1) {
value = resources.get(0).getRawText();
element.setPreviewString(value);
}
else if (size > 1) {
values = new String[size];
String[] tabNames = new String[size];
for (int i = 0; i < size; i++) {
ResourceElement resource = resources.get(i);
values[i] = resource.getRawText();
String tabName = resource.getXmlTag().getContainingFile().getParent().getName();
tabNames[i] = tabName.substring(tabName.indexOf('-') + 1);
}
element.setPreviewStrings(values, tabNames);
}
else {
layout.show(myPreviewPanel, NONE);
return;
}
}
if (values != null) {
int selectedIndex = myTabbedPane.getSelectedIndex();
myTabbedPane.removeAll();
String[] tabNames = element.getTabNames();
if (selectedIndex == -1) {
for (int i = 0; i < tabNames.length; i++) {
if (tabNames[i].startsWith("en")) {
selectedIndex = i;
break;
}
}
}
for (int i = 0; i < tabNames.length; i++) {
JTextArea textArea = new JTextArea(5, 20);
textArea.setText(values[i]);
textArea.setEditable(false);
myTabbedPane.addTab(tabNames[i], ScrollPaneFactory.createScrollPane(textArea));
}
if (selectedIndex >= 0 && selectedIndex < tabNames.length) {
myTabbedPane.setSelectedIndex(selectedIndex);
}
layout.show(myPreviewPanel, TABS);
return;
}
}
if (value == null) {
layout.show(myPreviewPanel, NONE);
return;
}
myTextArea.setText(value);
myTextArea.setEditable(false);
layout.show(myPreviewPanel, TEXT);
}
else if (ImageFileTypeManager.getInstance().isImage(file)) {
Icon icon = element.getPreviewIcon();
if (icon == null) {
icon = new SizedIcon(100, 100, new ImageIcon(file.getPath()));
element.setPreviewIcon(icon);
}
myImageComponent.setIcon(icon);
layout.show(myPreviewPanel, IMAGE);
}
else if (file.getFileType() == XmlFileType.INSTANCE) {
String value = element.getPreviewString();
if (value == null) {
value = new String(file.contentsToByteArray());
element.setPreviewString(value);
}
myTextArea.setText(value);
myTextArea.setEditable(false);
layout.show(myPreviewPanel, TEXT);
}
else {
layout.show(myPreviewPanel, NONE);
}
}
catch (IOException e) {
layout.show(myPreviewPanel, NONE);
}
}
private void select(String type, String name) {
for (ResourceGroup group : myGroups) {
if (type.equalsIgnoreCase(group.getName())) {
for (ResourceItem item : group.getItems()) {
if (name.equals(item.toString())) {
myTreeBuilder.select(item);
return;
}
}
return;
}
}
}
}
private static class ResourceGroup {
private List<ResourceItem> myItems = new ArrayList<ResourceItem>();
private final ResourceType myType;
public ResourceGroup(ResourceType type, ResourceManager manager) {
myType = type;
final String resourceType = type.getName();
Collection<String> resourceNames = manager.getValueResourceNames(resourceType);
for (String resourceName : resourceNames) {
myItems.add(new ResourceItem(this, resourceName, null, RESOURCE_ITEM_ICON));
}
final Set<String> fileNames = new HashSet<String>();
manager.processFileResources(resourceType, new FileResourceProcessor() {
@Override
public boolean process(@NotNull VirtualFile resFile, @NotNull String resName, @NotNull String resFolderType) {
if (fileNames.add(resName)) {
myItems.add(new ResourceItem(ResourceGroup.this, resName, resFile, resFile.getFileType().getIcon()));
}
return true;
}
});
if (type == ResourceType.ID) {
for (String id : manager.getIds()) {
if (!resourceNames.contains(id)) {
myItems.add(new ResourceItem(this, id, null, RESOURCE_ITEM_ICON));
}
}
}
Collections.sort(myItems, new Comparator<ResourceItem>() {
@Override
public int compare(ResourceItem resource1, ResourceItem resource2) {
return resource1.toString().compareTo(resource2.toString());
}
});
}
public ResourceType getType() {
return myType;
}
public String getName() {
return myType.getName();
}
public List<ResourceItem> getItems() {
return myItems;
}
@Override
public String toString() {
return myType.getDisplayName();
}
}
private static class ResourceItem {
private final ResourceGroup myGroup;
private final String myName;
private final VirtualFile myFile;
private final Icon myIcon;
private String myPreviewString;
private String[] myPreviewStrings;
private String[] myNames;
private Icon myPreviewIcon;
public ResourceItem(@NotNull ResourceGroup group, @NotNull String name, @Nullable VirtualFile file, Icon icon) {
myGroup = group;
myName = name;
myFile = file;
myIcon = icon;
}
public ResourceGroup getGroup() {
return myGroup;
}
public String getName() {
return myGroup.getName() + "/" + myName;
}
public VirtualFile getFile() {
return myFile;
}
public Icon getIcon() {
return myIcon;
}
public String getPreviewString() {
return myPreviewString;
}
public void setPreviewString(String previewString) {
myPreviewString = previewString;
}
public String[] getPreviewStrings() {
return myPreviewStrings;
}
public String[] getTabNames() {
return myNames;
}
public void setPreviewStrings(String[] previewStrings, String[] names) {
myPreviewStrings = previewStrings;
myNames = names;
}
public Icon getPreviewIcon() {
return myPreviewIcon;
}
public void setPreviewIcon(Icon previewIcon) {
myPreviewIcon = previewIcon;
}
@Override
public String toString() {
return myName;
}
}
private static class TreeContentProvider extends AbstractTreeStructure {
private final Object myTreeRoot = new Object();
private final ResourceGroup[] myGroups;
public TreeContentProvider(ResourceGroup[] groups) {
myGroups = groups;
}
@Override
public Object getRootElement() {
return myTreeRoot;
}
@Override
public Object[] getChildElements(Object element) {
if (element == myTreeRoot) {
return myGroups;
}
if (element instanceof ResourceGroup) {
ResourceGroup group = (ResourceGroup)element;
return group.getItems().toArray();
}
return ArrayUtil.EMPTY_OBJECT_ARRAY;
}
@Override
public Object getParentElement(Object element) {
if (element instanceof ResourceItem) {
ResourceItem resource = (ResourceItem)element;
return resource.getGroup();
}
return null;
}
@NotNull
@Override
public NodeDescriptor createDescriptor(Object element, NodeDescriptor parentDescriptor) {
TreeNodeDescriptor descriptor = new TreeNodeDescriptor(parentDescriptor, element, element == null ? null : element.toString());
if (element instanceof ResourceGroup) {
descriptor.setIcon(AllIcons.Nodes.TreeClosed);
}
else if (element instanceof ResourceItem) {
descriptor.setIcon(((ResourceItem)element).getIcon());
}
return descriptor;
}
@Override
public boolean hasSomethingToCommit() {
return false;
}
@Override
public void commit() {
}
}
}
| IDEA-93044
| plugins/android-designer/src/com/intellij/android/designer/propertyTable/editors/ResourceDialog.java | IDEA-93044 | <ide><path>lugins/android-designer/src/com/intellij/android/designer/propertyTable/editors/ResourceDialog.java
<ide> import com.intellij.openapi.util.Disposer;
<ide> import com.intellij.openapi.util.text.StringUtil;
<ide> import com.intellij.openapi.vfs.VirtualFile;
<add>import com.intellij.psi.PsiDirectory;
<ide> import com.intellij.psi.PsiDocumentManager;
<ide> import com.intellij.psi.xml.XmlFile;
<ide> import com.intellij.psi.xml.XmlTag;
<ide> }
<ide> }
<ide> if (doSelection && value.startsWith("@")) {
<add> value = StringUtil.replace(value, "+", "");
<ide> int index = value.indexOf('/');
<ide> if (index != -1) {
<ide> ResourcePanel panel;
<ide> public void showPreview(@Nullable ResourceItem element) {
<ide> CardLayout layout = (CardLayout)myPreviewPanel.getLayout();
<ide>
<del> if (element == null) {
<add> if (element == null || element.getGroup().getType() == ResourceType.ID) {
<ide> layout.show(myPreviewPanel, NONE);
<ide> return;
<ide> }
<ide>
<ide> int size = resources.size();
<ide> if (size == 1) {
<del> value = resources.get(0).getRawText();
<add> value = getResourceElementValue(resources.get(0));
<ide> element.setPreviewString(value);
<ide> }
<ide> else if (size > 1) {
<ide> String[] tabNames = new String[size];
<ide> for (int i = 0; i < size; i++) {
<ide> ResourceElement resource = resources.get(i);
<del> values[i] = resource.getRawText();
<del>
<del> String tabName = resource.getXmlTag().getContainingFile().getParent().getName();
<add> values[i] = getResourceElementValue(resource);
<add>
<add> PsiDirectory directory = resource.getXmlTag().getContainingFile().getParent();
<add> String tabName = directory == null ? "unknown-" + i : directory.getName();
<ide> tabNames[i] = tabName.substring(tabName.indexOf('-') + 1);
<ide> }
<ide> element.setPreviewStrings(values, tabNames);
<ide> }
<ide> }
<ide>
<add> private static String getResourceElementValue(ResourceElement element) {
<add> String text = element.getRawText();
<add> if (StringUtil.isEmpty(text)) {
<add> return element.getXmlTag().getText();
<add> }
<add> return text;
<add> }
<add>
<ide> private static class ResourceGroup {
<ide> private List<ResourceItem> myItems = new ArrayList<ResourceItem>();
<ide> private final ResourceType myType; |
|
Java | apache-2.0 | 341a0859c4abf46199d5207c3d4f482fb705829a | 0 | adjohnson916/incubator-groovy,yukangguo/incubator-groovy,antoaravinth/incubator-groovy,guangying945/incubator-groovy,groovy/groovy-core,jwagenleitner/incubator-groovy,paulk-asert/groovy,rlovtangen/groovy-core,rlovtangen/groovy-core,mariogarcia/groovy-core,taoguan/incubator-groovy,EPadronU/incubator-groovy,christoph-frick/groovy-core,russel/groovy,sagarsane/incubator-groovy,ebourg/groovy-core,aim-for-better/incubator-groovy,paulk-asert/groovy,ebourg/groovy-core,i55ac/incubator-groovy,ebourg/incubator-groovy,apache/groovy,PascalSchumacher/incubator-groovy,paplorinc/incubator-groovy,jwagenleitner/groovy,armsargis/groovy,upadhyayap/incubator-groovy,alien11689/groovy-core,russel/incubator-groovy,dpolivaev/groovy,upadhyayap/incubator-groovy,ChanJLee/incubator-groovy,bsideup/incubator-groovy,sagarsane/incubator-groovy,i55ac/incubator-groovy,genqiang/incubator-groovy,ebourg/incubator-groovy,pickypg/incubator-groovy,kidaa/incubator-groovy,avafanasiev/groovy,bsideup/incubator-groovy,rabbitcount/incubator-groovy,russel/groovy,russel/incubator-groovy,EPadronU/incubator-groovy,paplorinc/incubator-groovy,i55ac/incubator-groovy,nobeans/incubator-groovy,ChanJLee/incubator-groovy,alien11689/groovy-core,taoguan/incubator-groovy,PascalSchumacher/incubator-groovy,pledbrook/incubator-groovy,genqiang/incubator-groovy,adjohnson916/groovy-core,dpolivaev/groovy,shils/incubator-groovy,eginez/incubator-groovy,paulk-asert/incubator-groovy,alien11689/incubator-groovy,kenzanmedia/incubator-groovy,aaronzirbes/incubator-groovy,jwagenleitner/groovy,avafanasiev/groovy,graemerocher/incubator-groovy,apache/groovy,eginez/incubator-groovy,kidaa/incubator-groovy,paulk-asert/incubator-groovy,bsideup/groovy-core,alien11689/groovy-core,shils/incubator-groovy,armsargis/groovy,bsideup/incubator-groovy,fpavageau/groovy,bsideup/groovy-core,groovy/groovy-core,tkruse/incubator-groovy,kenzanmedia/incubator-groovy,ebourg/incubator-groovy,samanalysis/incubator-groovy,i55ac/incubator-groovy,upadhyayap/incubator-groovy,adjohnson916/incubator-groovy,adjohnson916/incubator-groovy,aaronzirbes/incubator-groovy,adjohnson916/groovy-core,samanalysis/incubator-groovy,samanalysis/incubator-groovy,bsideup/groovy-core,pickypg/incubator-groovy,shils/groovy,groovy/groovy-core,antoaravinth/incubator-groovy,pickypg/incubator-groovy,bsideup/incubator-groovy,pickypg/incubator-groovy,traneHead/groovy-core,aaronzirbes/incubator-groovy,russel/groovy,gillius/incubator-groovy,EPadronU/incubator-groovy,nobeans/incubator-groovy,russel/incubator-groovy,jwagenleitner/incubator-groovy,shils/incubator-groovy,nkhuyu/incubator-groovy,PascalSchumacher/incubator-groovy,jwagenleitner/groovy,apache/groovy,armsargis/groovy,adjohnson916/incubator-groovy,aim-for-better/incubator-groovy,rlovtangen/groovy-core,dpolivaev/groovy,sagarsane/groovy-core,groovy/groovy-core,traneHead/groovy-core,nkhuyu/incubator-groovy,ChanJLee/incubator-groovy,graemerocher/incubator-groovy,mariogarcia/groovy-core,kidaa/incubator-groovy,yukangguo/incubator-groovy,apache/groovy,avafanasiev/groovy,pledbrook/incubator-groovy,mariogarcia/groovy-core,aim-for-better/incubator-groovy,bsideup/groovy-core,mariogarcia/groovy-core,ebourg/groovy-core,adjohnson916/groovy-core,tkruse/incubator-groovy,adjohnson916/groovy-core,pledbrook/incubator-groovy,alien11689/groovy-core,gillius/incubator-groovy,antoaravinth/incubator-groovy,ebourg/incubator-groovy,armsargis/groovy,christoph-frick/groovy-core,guangying945/incubator-groovy,dpolivaev/groovy,sagarsane/groovy-core,sagarsane/incubator-groovy,gillius/incubator-groovy,ebourg/groovy-core,fpavageau/groovy,yukangguo/incubator-groovy,rlovtangen/groovy-core,tkruse/incubator-groovy,paplorinc/incubator-groovy,traneHead/groovy-core,paulk-asert/groovy,nobeans/incubator-groovy,christoph-frick/groovy-core,christoph-frick/groovy-core,kenzanmedia/incubator-groovy,eginez/incubator-groovy,tkruse/incubator-groovy,graemerocher/incubator-groovy,alien11689/groovy-core,graemerocher/incubator-groovy,genqiang/incubator-groovy,paulk-asert/incubator-groovy,alien11689/incubator-groovy,paulk-asert/incubator-groovy,taoguan/incubator-groovy,shils/groovy,paplorinc/incubator-groovy,traneHead/groovy-core,apache/incubator-groovy,upadhyayap/incubator-groovy,shils/groovy,jwagenleitner/incubator-groovy,aim-for-better/incubator-groovy,adjohnson916/groovy-core,avafanasiev/groovy,rabbitcount/incubator-groovy,groovy/groovy-core,apache/incubator-groovy,rabbitcount/incubator-groovy,aaronzirbes/incubator-groovy,mariogarcia/groovy-core,apache/incubator-groovy,christoph-frick/groovy-core,apache/incubator-groovy,ChanJLee/incubator-groovy,gillius/incubator-groovy,guangying945/incubator-groovy,pledbrook/incubator-groovy,kenzanmedia/incubator-groovy,EPadronU/incubator-groovy,paulk-asert/groovy,kidaa/incubator-groovy,sagarsane/incubator-groovy,eginez/incubator-groovy,rabbitcount/incubator-groovy,sagarsane/groovy-core,ebourg/groovy-core,antoaravinth/incubator-groovy,jwagenleitner/incubator-groovy,shils/groovy,alien11689/incubator-groovy,taoguan/incubator-groovy,genqiang/incubator-groovy,yukangguo/incubator-groovy,russel/incubator-groovy,nobeans/incubator-groovy,PascalSchumacher/incubator-groovy,russel/groovy,jwagenleitner/groovy,paulk-asert/incubator-groovy,PascalSchumacher/incubator-groovy,rlovtangen/groovy-core,samanalysis/incubator-groovy,sagarsane/groovy-core,sagarsane/groovy-core,fpavageau/groovy,nkhuyu/incubator-groovy,guangying945/incubator-groovy,fpavageau/groovy,shils/incubator-groovy,alien11689/incubator-groovy,nkhuyu/incubator-groovy | /*
* Copyright 2003-2010 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.codehaus.groovy.ant;
import groovy.lang.GroovyClassLoader;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.PosixParser;
import org.apache.tools.ant.AntClassLoader;
import org.apache.tools.ant.BuildException;
import org.apache.tools.ant.DirectoryScanner;
import org.apache.tools.ant.Project;
import org.apache.tools.ant.RuntimeConfigurable;
import org.apache.tools.ant.taskdefs.Execute;
import org.apache.tools.ant.taskdefs.Javac;
import org.apache.tools.ant.taskdefs.MatchingTask;
import org.apache.tools.ant.types.Path;
import org.apache.tools.ant.types.Reference;
import org.apache.tools.ant.util.GlobPatternMapper;
import org.apache.tools.ant.util.SourceFileScanner;
import org.codehaus.groovy.control.CompilationUnit;
import org.codehaus.groovy.control.CompilerConfiguration;
import org.codehaus.groovy.tools.ErrorReporter;
import org.codehaus.groovy.tools.FileSystemCompiler;
import org.codehaus.groovy.tools.RootLoader;
import org.codehaus.groovy.tools.javac.JavaAwareCompilationUnit;
import org.codehaus.groovy.runtime.DefaultGroovyMethods;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.net.URL;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.StringTokenizer;
/**
* Compiles Groovy source files. This task can take the following arguments:
* <ul>
* <li>srcdir</li>
* <li>destdir</li>
* <li>classpath</li>
* <li>encoding</li>
* <li>verbose</li>
* <li>failonerror</li>
* <li>includeantruntime</li>
* <li>includejavaruntime</li>
* <li>memoryInitialSize</li>
* <li>memoryMaximumSize</li>
* <li>fork</li>
* <li>stacktrace</li>
* <li>stubdir</li>
* </ul>
* Of these arguments, the <b>srcdir</b> and <b>destdir</b> are required.
* <p/>
* <p>When this task executes, it will recursively scan srcdir and destdir looking for Groovy source files
* to compile. This task makes its compile decision based on timestamp.</p>
* <p/>
* <p>Based heavily on the Javac implementation in Ant.</p>
*
* @author <a href="mailto:[email protected]">James Strachan</a>
* @author Hein Meling
* @author <a href="mailto:[email protected]">Russel Winder</a>
* @author Danno Ferrin
* @version $Revision$
*/
public class Groovyc extends MatchingTask {
private final LoggingHelper log = new LoggingHelper(this);
private Path src;
private File destDir;
private Path compileClasspath;
private Path compileSourcepath;
private String encoding;
private boolean stacktrace = false;
private boolean verbose = false;
private boolean includeAntRuntime = true;
private boolean includeJavaRuntime = false;
private boolean fork = false;
private File forkJDK;
private String memoryInitialSize;
private String memoryMaximumSize;
protected boolean failOnError = true;
protected boolean listFiles = false;
protected File[] compileList = new File[0];
private String updatedProperty;
private String errorProperty;
private boolean taskSuccess = true; // assume the best
private boolean includeDestClasses = true;
protected CompilerConfiguration configuration;
private Javac javac;
private boolean jointCompilation;
private List<File> temporaryFiles = new ArrayList(2);
/**
* Adds a path for source compilation.
*
* @return a nested src element.
*/
public Path createSrc() {
if (src == null) {
src = new Path(getProject());
}
return src.createPath();
}
/**
* Recreate src.
*
* @return a nested src element.
*/
protected Path recreateSrc() {
src = null;
return createSrc();
}
/**
* Set the source directories to find the source Java files.
*
* @param srcDir the source directories as a path
*/
public void setSrcdir(Path srcDir) {
if (src == null) {
src = srcDir;
} else {
src.append(srcDir);
}
}
/**
* Gets the source dirs to find the source java files.
*
* @return the source directories as a path
*/
public Path getSrcdir() {
return src;
}
/**
* Set the destination directory into which the Java source
* files should be compiled.
*
* @param destDir the destination director
*/
public void setDestdir(File destDir) {
this.destDir = destDir;
}
/**
* Gets the destination directory into which the java source files
* should be compiled.
*
* @return the destination directory
*/
public File getDestdir() {
return destDir;
}
/**
* Set the sourcepath to be used for this compilation.
*
* @param sourcepath the source path
*/
public void setSourcepath(Path sourcepath) {
if (compileSourcepath == null) {
compileSourcepath = sourcepath;
} else {
compileSourcepath.append(sourcepath);
}
}
/**
* Gets the sourcepath to be used for this compilation.
*
* @return the source path
*/
public Path getSourcepath() {
return compileSourcepath;
}
/**
* Adds a path to sourcepath.
*
* @return a sourcepath to be configured
*/
public Path createSourcepath() {
if (compileSourcepath == null) {
compileSourcepath = new Path(getProject());
}
return compileSourcepath.createPath();
}
/**
* Adds a reference to a source path defined elsewhere.
*
* @param r a reference to a source path
*/
public void setSourcepathRef(Reference r) {
createSourcepath().setRefid(r);
}
/**
* Set the classpath to be used for this compilation.
*
* @param classpath an Ant Path object containing the compilation classpath.
*/
public void setClasspath(Path classpath) {
if (compileClasspath == null) {
compileClasspath = classpath;
} else {
compileClasspath.append(classpath);
}
}
/**
* Gets the classpath to be used for this compilation.
*
* @return the class path
*/
public Path getClasspath() {
return compileClasspath;
}
/**
* Adds a path to the classpath.
*
* @return a class path to be configured
*/
public Path createClasspath() {
if (compileClasspath == null) {
compileClasspath = new Path(getProject());
}
return compileClasspath.createPath();
}
/**
* Adds a reference to a classpath defined elsewhere.
*
* @param r a reference to a classpath
*/
public void setClasspathRef(Reference r) {
createClasspath().setRefid(r);
}
/**
* If true, list the source files being handed off to the compiler.
* Default is false.
*
* @param list if true list the source files
*/
public void setListfiles(boolean list) {
listFiles = list;
}
/**
* Get the listfiles flag.
*
* @return the listfiles flag
*/
public boolean getListfiles() {
return listFiles;
}
/**
* Indicates whether the build will continue
* even if there are compilation errors; defaults to true.
*
* @param fail if true halt the build on failure
*/
public void setFailonerror(boolean fail) {
failOnError = fail;
}
/**
* @param proceed inverse of failonerror
*/
public void setProceed(boolean proceed) {
failOnError = !proceed;
}
/**
* Gets the failonerror flag.
*
* @return the failonerror flag
*/
public boolean getFailonerror() {
return failOnError;
}
/**
* The initial size of the memory for the underlying VM
* if javac is run externally; ignored otherwise.
* Defaults to the standard VM memory setting.
* (Examples: 83886080, 81920k, or 80m)
*
* @param memoryInitialSize string to pass to VM
*/
public void setMemoryInitialSize(String memoryInitialSize) {
this.memoryInitialSize = memoryInitialSize;
}
/**
* Gets the memoryInitialSize flag.
*
* @return the memoryInitialSize flag
*/
public String getMemoryInitialSize() {
return memoryInitialSize;
}
/**
* The maximum size of the memory for the underlying VM
* if javac is run externally; ignored otherwise.
* Defaults to the standard VM memory setting.
* (Examples: 83886080, 81920k, or 80m)
*
* @param memoryMaximumSize string to pass to VM
*/
public void setMemoryMaximumSize(String memoryMaximumSize) {
this.memoryMaximumSize = memoryMaximumSize;
}
/**
* Gets the memoryMaximumSize flag.
*
* @return the memoryMaximumSize flag
*/
public String getMemoryMaximumSize() {
return memoryMaximumSize;
}
/**
* Sets the file encoding for generated files.
*
* @param encoding the file encoding to be used
*/
public void setEncoding(String encoding) {
this.encoding = encoding;
}
/**
* Returns the encoding to be used when creating files.
*
* @return the file encoding to use
*/
public String getEncoding() {
return encoding;
}
/**
* Enable verbose compiling which will display which files
* are being compiled. Default is false.
*/
public void setVerbose(boolean verbose) {
this.verbose = verbose;
}
/**
* Gets the verbose flag.
*
* @return the verbose flag
*/
public boolean getVerbose() {
return verbose;
}
/**
* If true, includes Ant's own classpath in the classpath. Default is true.
* If setting to false and using groovyc in conjunction with AntBuilder
* you might need to explicitly add the Groovy jar(s) to the groovyc
* classpath using a nested classpath task.
*
* @param include if true, includes Ant's own classpath in the classpath
*/
public void setIncludeantruntime(boolean include) {
includeAntRuntime = include;
}
/**
* Gets whether or not the ant classpath is to be included in the classpath.
*
* @return whether or not the ant classpath is to be included in the classpath
*/
public boolean getIncludeantruntime() {
return includeAntRuntime;
}
/**
* If true, includes the Java runtime libraries in the classpath. Default is false.
*
* @param include if true, includes the Java runtime libraries in the classpath
*/
public void setIncludejavaruntime(boolean include) {
includeJavaRuntime = include;
}
/**
* Gets whether or not the java runtime should be included in this
* task's classpath.
*
* @return the includejavaruntime attribute
*/
public boolean getIncludejavaruntime() {
return includeJavaRuntime;
}
/**
* If true forks the Groovy compiler. Default is false.
*
* @param f "true|false|on|off|yes|no"
*/
public void setFork(boolean f) {
fork = f;
}
/**
* The JDK Home to use when forked.
*
* @param home the java.home value to use, default is the current JDK's home
*/
public void setJavaHome(File home) {
forkJDK = home;
}
/**
* The property to set on compilation success.
* This property will not be set if the compilation
* fails, or if there are no files to compile.
*
* @param updatedProperty the property name to use.
*/
public void setUpdatedProperty(String updatedProperty) {
this.updatedProperty = updatedProperty;
}
/**
* The property to set on compilation failure.
* This property will be set if the compilation
* fails.
*
* @param errorProperty the property name to use.
*/
public void setErrorProperty(String errorProperty) {
this.errorProperty = errorProperty;
}
/**
* This property controls whether to include the
* destination classes directory in the classpath
* given to the compiler.
* The default value is "true".
*
* @param includeDestClasses the value to use.
*/
public void setIncludeDestClasses(boolean includeDestClasses) {
this.includeDestClasses = includeDestClasses;
}
/**
* Get the value of the includeDestClasses property.
*
* @return the value.
*/
public boolean isIncludeDestClasses() {
return includeDestClasses;
}
/**
* Get the result of the groovyc task (success or failure).
*
* @return true if compilation succeeded, or
* was not necessary, false if the compilation failed.
*/
public boolean getTaskSuccess() {
return taskSuccess;
}
/*
public void setJointCompilationOptions(String options) {
String[] args = StringHelper.tokenizeUnquoted(options);
evalCompilerFlags(args);
}
*/
/**
* Add the configured nested javac task if present to initiate joint compilation.
*/
public void addConfiguredJavac(final Javac javac) {
this.javac = javac;
jointCompilation = true;
}
/**
* Enable compiler to report stack trace information if a problem occurs
* during compilation. Default is false.
*/
public void setStacktrace(boolean stacktrace) {
this.stacktrace = stacktrace;
}
/**
* Executes the task.
*
* @throws BuildException if an error occurs
*/
public void execute() throws BuildException {
checkParameters();
resetFileLists();
if (javac != null) jointCompilation = true;
// scan source directories and dest directory to build up
// compile lists
String[] list = src.list();
for (String filename : list) {
File file = getProject().resolveFile(filename);
if (!file.exists()) {
throw new BuildException("srcdir \"" + file.getPath() + "\" does not exist!", getLocation());
}
DirectoryScanner ds = this.getDirectoryScanner(file);
String[] files = ds.getIncludedFiles();
scanDir(file, destDir != null ? destDir : file, files);
}
compile();
if (updatedProperty != null
&& taskSuccess
&& compileList.length != 0) {
getProject().setNewProperty(updatedProperty, "true");
}
}
/**
* Clear the list of files to be compiled and copied..
*/
protected void resetFileLists() {
compileList = new File[0];
}
/**
* Scans the directory looking for source files to be compiled.
* The results are returned in the class variable compileList
*
* @param srcDir The source directory
* @param destDir The destination directory
* @param files An array of filenames
*/
protected void scanDir(File srcDir, File destDir, String[] files) {
GlobPatternMapper m = new GlobPatternMapper();
m.setFrom("*.groovy");
m.setTo("*.class");
SourceFileScanner sfs = new SourceFileScanner(this);
File[] newFiles = sfs.restrictAsFiles(files, srcDir, destDir, m);
addToCompileList(newFiles);
if (jointCompilation) {
m.setFrom("*.java");
m.setTo("*.class");
newFiles = sfs.restrictAsFiles(files, srcDir, destDir, m);
addToCompileList(newFiles);
}
}
protected void addToCompileList(File[] newFiles) {
if (newFiles.length > 0) {
File[] newCompileList = new File[compileList.length + newFiles.length];
System.arraycopy(compileList, 0, newCompileList, 0, compileList.length);
System.arraycopy(newFiles, 0, newCompileList, compileList.length, newFiles.length);
compileList = newCompileList;
}
}
/**
* Gets the list of files to be compiled.
*
* @return the list of files as an array
*/
public File[] getFileList() {
return compileList;
}
protected void checkParameters() throws BuildException {
if (src == null) {
throw new BuildException("srcdir attribute must be set!", getLocation());
}
if (src.size() == 0) {
throw new BuildException("srcdir attribute must be set!", getLocation());
}
if (destDir != null && !destDir.isDirectory()) {
throw new BuildException("destination directory \""
+ destDir
+ "\" does not exist or is not a directory",
getLocation());
}
if (encoding != null && !Charset.isSupported(encoding)) {
throw new BuildException("encoding \"" + encoding + "\" not supported.");
}
}
protected void compile() {
try {
if (compileList.length > 0) {
log("Compiling " + compileList.length + " source file"
+ (compileList.length == 1 ? "" : "s")
+ (destDir != null ? " to " + destDir : ""));
if (listFiles) {
for (File srcFile : compileList) {
log(srcFile.getAbsolutePath());
}
}
Path classpath = getClasspath() != null ? getClasspath() : new Path(getProject());
// extract joint options, some get pushed up...
List<String> jointOptions = new ArrayList<String>();
if (jointCompilation) {
RuntimeConfigurable rc = javac.getRuntimeConfigurableWrapper();
for (Iterator i = rc.getAttributeMap().entrySet().iterator(); i.hasNext();) {
final Map.Entry e = (Map.Entry) i.next();
final String key = e.getKey().toString();
final String value = getProject().replaceProperties(e.getValue().toString());
if (key.contains("debug")) {
String level = "";
if (javac.getDebugLevel() != null) {
level = ":" + javac.getDebugLevel();
}
jointOptions.add("-Fg" + level);
} else if (key.contains("debugLevel")) {
// ignore, taken care of in debug
} else if ((key.contains("nowarn"))
|| (key.contains("verbose"))
|| (key.contains("deprecation"))) {
// false is default, so something to do only in true case
if ("on".equalsIgnoreCase(value) || "true".equalsIgnoreCase(value) || "yes".equalsIgnoreCase("value"))
jointOptions.add("-F" + key);
} else if (key.contains("classpath")) {
classpath.add(javac.getClasspath());
} else if ((key.contains("depend"))
|| (key.contains("extdirs"))
|| (key.contains("encoding"))
|| (key.contains("source"))
|| (key.contains("target"))
|| (key.contains("verbose"))) {
jointOptions.add("-J" + key + "=" + value);
} else {
log("The option " + key + " cannot be set on the contained <javac> element. The option will be ignored", Project.MSG_WARN);
}
// includes? excludes?
}
// ant's <javac> supports nested <compilerarg value=""> elements (there can be multiple of them)
// for additional options to be passed to javac.
Enumeration children = rc.getChildren();
while (children.hasMoreElements()) {
RuntimeConfigurable childrc = (RuntimeConfigurable) children.nextElement();
if(childrc.getElementTag().equals("compilerarg")) {
for (Iterator i = childrc.getAttributeMap().entrySet().iterator(); i.hasNext();) {
final Map.Entry e = (Map.Entry) i.next();
final String key = e.getKey().toString();
if(key.equals("value")) {
final String value = getProject().replaceProperties(e.getValue().toString());
StringTokenizer st = new StringTokenizer(value, " ");
while(st.hasMoreTokens()) {
String optionStr = st.nextToken();
jointOptions.add(optionStr.replace("-X", "-FX"));
}
}
}
}
}
}
String separator = System.getProperty("file.separator");
List<String> commandLineList = new ArrayList<String>();
if (fork) {
String javaHome;
if (forkJDK != null) {
javaHome = forkJDK.getPath();
} else {
javaHome = System.getProperty("java.home");
}
if (includeAntRuntime) {
classpath.addExisting((new Path(getProject())).concatSystemClasspath("last"));
}
if (includeJavaRuntime) {
classpath.addJavaRuntime();
}
commandLineList.add(javaHome + separator + "bin" + separator + "java");
commandLineList.add("-classpath");
commandLineList.add(classpath.toString());
if ((memoryInitialSize != null) && !memoryInitialSize.equals("")) {
commandLineList.add("-Xms" + memoryInitialSize);
}
if ((memoryMaximumSize != null) && !memoryMaximumSize.equals("")) {
commandLineList.add("-Xmx" + memoryMaximumSize);
}
commandLineList.add("org.codehaus.groovy.tools.FileSystemCompiler");
}
commandLineList.add("--classpath");
commandLineList.add(classpath.toString());
if (jointCompilation) {
commandLineList.add("-j");
commandLineList.addAll(jointOptions);
}
commandLineList.add("-d");
commandLineList.add(destDir.getPath());
if (encoding != null) {
commandLineList.add("--encoding");
commandLineList.add(encoding);
}
if (stacktrace) {
commandLineList.add("-e");
}
// check to see if an external file is needed
int count = 0;
if (fork) {
for (File srcFile : compileList) {
count += srcFile.getPath().length();
}
for (Object commandLineArg : commandLineList) {
count += commandLineArg.toString().length();
}
count += compileList.length;
count += commandLineList.size();
}
// 32767 is the command line length limit on Windows
if (fork && (count > 32767)) {
try {
File tempFile = File.createTempFile("groovyc-files-", ".txt");
temporaryFiles.add(tempFile);
PrintWriter pw = new PrintWriter(new FileWriter(tempFile));
for (File srcFile : compileList) {
pw.println(srcFile.getPath());
}
pw.close();
commandLineList.add("@" + tempFile.getPath());
} catch (IOException e) {
log("Error creating file list", e, Project.MSG_ERR);
}
} else {
for (File srcFile : compileList) {
commandLineList.add(srcFile.getPath());
}
}
final String[] commandLine = new String[commandLineList.size()];
for (int i = 0; i < commandLine.length; ++i) {
commandLine[i] = commandLineList.get(i);
}
log("Compilation arguments:", Project.MSG_VERBOSE);
log(DefaultGroovyMethods.join(commandLine, "\n"), Project.MSG_VERBOSE);
if (fork) {
// use the main method in FileSystemCompiler
final Execute executor = new Execute(); // new LogStreamHandler ( attributes , Project.MSG_INFO , Project.MSG_WARN ) ) ;
executor.setAntRun(getProject());
executor.setWorkingDirectory(getProject().getBaseDir());
executor.setCommandline(commandLine);
try {
executor.execute();
}
catch (final IOException ioe) {
throw new BuildException("Error running forked groovyc.", ioe);
}
final int returnCode = executor.getExitValue();
if (returnCode != 0) {
if (failOnError) {
throw new BuildException("Forked groovyc returned error code: " + returnCode);
} else {
log("Forked groovyc returned error code: " + returnCode, Project.MSG_ERR);
}
}
} else {
// hand crank it so we can add our own compiler configuration
try {
Options options = FileSystemCompiler.createCompilationOptions();
PosixParser cliParser = new PosixParser();
CommandLine cli;
cli = cliParser.parse(options, commandLine);
configuration = FileSystemCompiler.generateCompilerConfigurationFromOptions(cli);
// Load the file name list
String[] filenames = FileSystemCompiler.generateFileNamesFromOptions(cli);
boolean fileNameErrors = filenames == null;
fileNameErrors = fileNameErrors && !FileSystemCompiler.validateFiles(filenames);
if (!fileNameErrors) {
FileSystemCompiler.doCompilation(configuration, makeCompileUnit(), filenames);
}
} catch (Exception re) {
Throwable t = re;
if ((re.getClass() == RuntimeException.class) && (re.getCause() != null)) {
// unwrap to the real exception
t = re.getCause();
}
StringWriter writer = new StringWriter();
new ErrorReporter(t, false).write(new PrintWriter(writer));
String message = writer.toString();
if (failOnError) {
log(message, Project.MSG_INFO);
throw new BuildException("Compilation Failed", t, getLocation());
} else {
log(message, Project.MSG_ERR);
}
}
}
}
} finally {
for (File temporaryFile : temporaryFiles) {
try {
FileSystemCompiler.deleteRecursive(temporaryFile);
} catch (Throwable t) {
System.err.println("error: could not delete temp files - " + temporaryFile.getPath());
}
}
}
}
protected CompilationUnit makeCompileUnit() {
if (configuration.getJointCompilationOptions() != null) {
if (!configuration.getJointCompilationOptions().containsKey("stubDir")) {
try {
File tempStubDir = FileSystemCompiler.createTempDir();
temporaryFiles.add(tempStubDir);
configuration.getJointCompilationOptions().put("stubDir", tempStubDir);
} catch (IOException ioe) {
throw new BuildException(ioe);
}
}
return new JavaAwareCompilationUnit(configuration, buildClassLoaderFor());
} else {
return new CompilationUnit(configuration, null, buildClassLoaderFor());
}
}
protected GroovyClassLoader buildClassLoaderFor() {
ClassLoader parent = getIncludeantruntime()
? getClass().getClassLoader()
: new AntClassLoader(new RootLoader(new URL[0], null), getProject(), getClasspath());
if (parent instanceof AntClassLoader) {
AntClassLoader antLoader = (AntClassLoader) parent;
String[] pathElm = antLoader.getClasspath().split(File.pathSeparator);
List<String> classpath = configuration.getClasspath();
/*
* Iterate over the classpath provided to groovyc, and add any missing path
* entries to the AntClassLoader. This is a workaround, since for some reason
* 'directory' classpath entries were not added to the AntClassLoader' classpath.
*/
for (String cpEntry : classpath) {
boolean found = false;
for (String path : pathElm) {
if (cpEntry.equals(path)) {
found = true;
break;
}
}
/*
* fix for GROOVY-2284
* seems like AntClassLoader doesn't check if the file
* may not exist in the classpath yet
*/
if (!found && new File(cpEntry).exists())
antLoader.addPathElement(cpEntry);
}
}
return new GroovyClassLoader(parent, configuration);
}
/**
* Set the stub directory into which the Java source stub
* files should be generated. The directory should exist
* will not be deleted automatically.
*
* @param stubDir the stub directory
*/
public void setStubdir(File stubDir) {
jointCompilation = true;
configuration.getJointCompilationOptions().put("stubDir", stubDir);
}
/**
* Gets the stub directory into which the Java source stub
* files should be generated
*
* @return the stub directory
*/
public File getStubdir() {
return (File) configuration.getJointCompilationOptions().get("stubDir");
}
}
| src/main/org/codehaus/groovy/ant/Groovyc.java | /*
* Copyright 2003-2009 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.codehaus.groovy.ant;
import groovy.lang.GroovyClassLoader;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.PosixParser;
import org.apache.tools.ant.AntClassLoader;
import org.apache.tools.ant.BuildException;
import org.apache.tools.ant.DirectoryScanner;
import org.apache.tools.ant.Project;
import org.apache.tools.ant.RuntimeConfigurable;
import org.apache.tools.ant.taskdefs.Execute;
import org.apache.tools.ant.taskdefs.Javac;
import org.apache.tools.ant.taskdefs.MatchingTask;
import org.apache.tools.ant.types.Path;
import org.apache.tools.ant.types.Reference;
import org.apache.tools.ant.util.GlobPatternMapper;
import org.apache.tools.ant.util.SourceFileScanner;
import org.codehaus.groovy.control.CompilationUnit;
import org.codehaus.groovy.control.CompilerConfiguration;
import org.codehaus.groovy.tools.ErrorReporter;
import org.codehaus.groovy.tools.FileSystemCompiler;
import org.codehaus.groovy.tools.javac.JavaAwareCompilationUnit;
import org.codehaus.groovy.runtime.DefaultGroovyMethods;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.StringTokenizer;
/**
* Compiles Groovy source files. This task can take the following arguments:
* <ul>
* <li>srcdir</li>
* <li>destdir</li>
* <li>classpath</li>
* <li>encoding</li>
* <li>verbose</li>
* <li>failonerror</li>
* <li>includeantruntime</li>
* <li>includejavaruntime</li>
* <li>memoryInitialSize</li>
* <li>memoryMaximumSize</li>
* <li>fork</li>
* <li>stacktrace</li>
* <li>stubdir</li>
* </ul>
* Of these arguments, the <b>srcdir</b> and <b>destdir</b> are required.
* <p/>
* <p>When this task executes, it will recursively scan srcdir and destdir looking for Groovy source files
* to compile. This task makes its compile decision based on timestamp.</p>
* <p/>
* <p>Based heavily on the Javac implementation in Ant.</p>
*
* @author <a href="mailto:[email protected]">James Strachan</a>
* @author Hein Meling
* @author <a href="mailto:[email protected]">Russel Winder</a>
* @author Danno Ferrin
* @version $Revision$
*/
public class Groovyc extends MatchingTask {
private final LoggingHelper log = new LoggingHelper(this);
private Path src;
private File destDir;
private Path compileClasspath;
private Path compileSourcepath;
private String encoding;
private boolean stacktrace = false;
private boolean verbose = false;
private boolean includeAntRuntime = true;
private boolean includeJavaRuntime = false;
private boolean fork = false;
private File forkJDK;
private String memoryInitialSize;
private String memoryMaximumSize;
protected boolean failOnError = true;
protected boolean listFiles = false;
protected File[] compileList = new File[0];
private String updatedProperty;
private String errorProperty;
private boolean taskSuccess = true; // assume the best
private boolean includeDestClasses = true;
protected CompilerConfiguration configuration;
private Javac javac;
private boolean jointCompilation;
private List<File> temporaryFiles = new ArrayList(2);
/**
* Adds a path for source compilation.
*
* @return a nested src element.
*/
public Path createSrc() {
if (src == null) {
src = new Path(getProject());
}
return src.createPath();
}
/**
* Recreate src.
*
* @return a nested src element.
*/
protected Path recreateSrc() {
src = null;
return createSrc();
}
/**
* Set the source directories to find the source Java files.
*
* @param srcDir the source directories as a path
*/
public void setSrcdir(Path srcDir) {
if (src == null) {
src = srcDir;
} else {
src.append(srcDir);
}
}
/**
* Gets the source dirs to find the source java files.
*
* @return the source directorys as a path
*/
public Path getSrcdir() {
return src;
}
/**
* Set the destination directory into which the Java source
* files should be compiled.
*
* @param destDir the destination director
*/
public void setDestdir(File destDir) {
this.destDir = destDir;
}
/**
* Gets the destination directory into which the java source files
* should be compiled.
*
* @return the destination directory
*/
public File getDestdir() {
return destDir;
}
/**
* Set the sourcepath to be used for this compilation.
*
* @param sourcepath the source path
*/
public void setSourcepath(Path sourcepath) {
if (compileSourcepath == null) {
compileSourcepath = sourcepath;
} else {
compileSourcepath.append(sourcepath);
}
}
/**
* Gets the sourcepath to be used for this compilation.
*
* @return the source path
*/
public Path getSourcepath() {
return compileSourcepath;
}
/**
* Adds a path to sourcepath.
*
* @return a sourcepath to be configured
*/
public Path createSourcepath() {
if (compileSourcepath == null) {
compileSourcepath = new Path(getProject());
}
return compileSourcepath.createPath();
}
/**
* Adds a reference to a source path defined elsewhere.
*
* @param r a reference to a source path
*/
public void setSourcepathRef(Reference r) {
createSourcepath().setRefid(r);
}
/**
* Set the classpath to be used for this compilation.
*
* @param classpath an Ant Path object containing the compilation classpath.
*/
public void setClasspath(Path classpath) {
if (compileClasspath == null) {
compileClasspath = classpath;
} else {
compileClasspath.append(classpath);
}
}
/**
* Gets the classpath to be used for this compilation.
*
* @return the class path
*/
public Path getClasspath() {
return compileClasspath;
}
/**
* Adds a path to the classpath.
*
* @return a class path to be configured
*/
public Path createClasspath() {
if (compileClasspath == null) {
compileClasspath = new Path(getProject());
}
return compileClasspath.createPath();
}
/**
* Adds a reference to a classpath defined elsewhere.
*
* @param r a reference to a classpath
*/
public void setClasspathRef(Reference r) {
createClasspath().setRefid(r);
}
/**
* If true, list the source files being handed off to the compiler.
*
* @param list if true list the source files
*/
public void setListfiles(boolean list) {
listFiles = list;
}
/**
* Get the listfiles flag.
*
* @return the listfiles flag
*/
public boolean getListfiles() {
return listFiles;
}
/**
* Indicates whether the build will continue
* even if there are compilation errors; defaults to true.
*
* @param fail if true halt the build on failure
*/
public void setFailonerror(boolean fail) {
failOnError = fail;
}
/**
* @param proceed inverse of failonerror
*/
public void setProceed(boolean proceed) {
failOnError = !proceed;
}
/**
* Gets the failonerror flag.
*
* @return the failonerror flag
*/
public boolean getFailonerror() {
return failOnError;
}
/**
* The initial size of the memory for the underlying VM
* if javac is run externally; ignored otherwise.
* Defaults to the standard VM memory setting.
* (Examples: 83886080, 81920k, or 80m)
*
* @param memoryInitialSize string to pass to VM
*/
public void setMemoryInitialSize(String memoryInitialSize) {
this.memoryInitialSize = memoryInitialSize;
}
/**
* Gets the memoryInitialSize flag.
*
* @return the memoryInitialSize flag
*/
public String getMemoryInitialSize() {
return memoryInitialSize;
}
/**
* The maximum size of the memory for the underlying VM
* if javac is run externally; ignored otherwise.
* Defaults to the standard VM memory setting.
* (Examples: 83886080, 81920k, or 80m)
*
* @param memoryMaximumSize string to pass to VM
*/
public void setMemoryMaximumSize(String memoryMaximumSize) {
this.memoryMaximumSize = memoryMaximumSize;
}
/**
* Gets the memoryMaximumSize flag.
*
* @return the memoryMaximumSize flag
*/
public String getMemoryMaximumSize() {
return memoryMaximumSize;
}
/**
* Sets the file encoding for generated files.
*
* @param encoding the file encoding to be used
*/
public void setEncoding(String encoding) {
this.encoding = encoding;
}
/**
* Returns the encoding to be used when creating files.
*
* @return the file encoding to use
*/
public String getEncoding() {
return encoding;
}
/**
* Enable verbose compiling which will display which files
* are being compiled
*/
public void setVerbose(boolean verbose) {
this.verbose = verbose;
}
/**
* Gets the verbose flag.
*
* @return the verbose flag
*/
public boolean getVerbose() {
return verbose;
}
/**
* If true, includes Ant's own classpath in the classpath.
*
* @param include if true, includes Ant's own classpath in the classpath
*/
public void setIncludeantruntime(boolean include) {
includeAntRuntime = include;
}
/**
* Gets whether or not the ant classpath is to be included in the classpath.
*
* @return whether or not the ant classpath is to be included in the classpath
*/
public boolean getIncludeantruntime() {
return includeAntRuntime;
}
/**
* If true, includes the Java runtime libraries in the classpath.
*
* @param include if true, includes the Java runtime libraries in the classpath
*/
public void setIncludejavaruntime(boolean include) {
includeJavaRuntime = include;
}
/**
* Gets whether or not the java runtime should be included in this
* task's classpath.
*
* @return the includejavaruntime attribute
*/
public boolean getIncludejavaruntime() {
return includeJavaRuntime;
}
/**
* If true forks the Groovy compiler.
*
* @param f "true|false|on|off|yes|no"
*/
public void setFork(boolean f) {
fork = f;
}
/**
* The JDK Home to use when forked.
*
* @param home the java.home value to use, default is the current JDK's home
*/
public void setJavaHome(File home) {
forkJDK = home;
}
/**
* The property to set on compilation success.
* This property will not be set if the compilation
* fails, or if there are no files to compile.
*
* @param updatedProperty the property name to use.
*/
public void setUpdatedProperty(String updatedProperty) {
this.updatedProperty = updatedProperty;
}
/**
* The property to set on compilation failure.
* This property will be set if the compilation
* fails.
*
* @param errorProperty the property name to use.
*/
public void setErrorProperty(String errorProperty) {
this.errorProperty = errorProperty;
}
/**
* This property controls whether to include the
* destination classes directory in the classpath
* given to the compiler.
* The default value is "true".
*
* @param includeDestClasses the value to use.
*/
public void setIncludeDestClasses(boolean includeDestClasses) {
this.includeDestClasses = includeDestClasses;
}
/**
* Get the value of the includeDestClasses property.
*
* @return the value.
*/
public boolean isIncludeDestClasses() {
return includeDestClasses;
}
/**
* Get the result of the groovyc task (success or failure).
*
* @return true if compilation succeeded, or
* was not necessary, false if the compilation failed.
*/
public boolean getTaskSuccess() {
return taskSuccess;
}
/*
public void setJointCompilationOptions(String options) {
String[] args = StringHelper.tokenizeUnquoted(options);
evalCompilerFlags(args);
}
*/
/**
* Add the configured nested javac task if present to initiate joint compilation.
*/
public void addConfiguredJavac(final Javac javac) {
this.javac = javac;
jointCompilation = true;
}
/**
* Enable compiler to report stack trace information if a problem occurs
* during compilation.
*/
public void setStacktrace(boolean stacktrace) {
this.stacktrace = stacktrace;
}
/**
* Executes the task.
*
* @throws BuildException if an error occurs
*/
public void execute() throws BuildException {
checkParameters();
resetFileLists();
if (javac != null) jointCompilation = true;
// scan source directories and dest directory to build up
// compile lists
String[] list = src.list();
for (String filename : list) {
File file = getProject().resolveFile(filename);
if (!file.exists()) {
throw new BuildException("srcdir \"" + file.getPath() + "\" does not exist!", getLocation());
}
DirectoryScanner ds = this.getDirectoryScanner(file);
String[] files = ds.getIncludedFiles();
scanDir(file, destDir != null ? destDir : file, files);
}
compile();
if (updatedProperty != null
&& taskSuccess
&& compileList.length != 0) {
getProject().setNewProperty(updatedProperty, "true");
}
}
/**
* Clear the list of files to be compiled and copied..
*/
protected void resetFileLists() {
compileList = new File[0];
}
/**
* Scans the directory looking for source files to be compiled.
* The results are returned in the class variable compileList
*
* @param srcDir The source directory
* @param destDir The destination directory
* @param files An array of filenames
*/
protected void scanDir(File srcDir, File destDir, String[] files) {
GlobPatternMapper m = new GlobPatternMapper();
m.setFrom("*.groovy");
m.setTo("*.class");
SourceFileScanner sfs = new SourceFileScanner(this);
File[] newFiles = sfs.restrictAsFiles(files, srcDir, destDir, m);
addToCompileList(newFiles);
if (jointCompilation) {
m.setFrom("*.java");
m.setTo("*.class");
newFiles = sfs.restrictAsFiles(files, srcDir, destDir, m);
addToCompileList(newFiles);
}
}
protected void addToCompileList(File[] newFiles) {
if (newFiles.length > 0) {
File[] newCompileList = new File[compileList.length + newFiles.length];
System.arraycopy(compileList, 0, newCompileList, 0, compileList.length);
System.arraycopy(newFiles, 0, newCompileList, compileList.length, newFiles.length);
compileList = newCompileList;
}
}
/**
* Gets the list of files to be compiled.
*
* @return the list of files as an array
*/
public File[] getFileList() {
return compileList;
}
protected void checkParameters() throws BuildException {
if (src == null) {
throw new BuildException("srcdir attribute must be set!", getLocation());
}
if (src.size() == 0) {
throw new BuildException("srcdir attribute must be set!", getLocation());
}
if (destDir != null && !destDir.isDirectory()) {
throw new BuildException("destination directory \""
+ destDir
+ "\" does not exist or is not a directory",
getLocation());
}
if (encoding != null && !Charset.isSupported(encoding)) {
throw new BuildException("encoding \"" + encoding + "\" not supported.");
}
}
protected void compile() {
try {
if (compileList.length > 0) {
log("Compiling " + compileList.length + " source file"
+ (compileList.length == 1 ? "" : "s")
+ (destDir != null ? " to " + destDir : ""));
if (listFiles) {
for (File srcFile : compileList) {
log(srcFile.getAbsolutePath());
}
}
Path classpath = getClasspath() != null ? getClasspath() : new Path(getProject());
// extract joint options, some get pushed up...
List<String> jointOptions = new ArrayList<String>();
if (jointCompilation) {
RuntimeConfigurable rc = javac.getRuntimeConfigurableWrapper();
for (Iterator i = rc.getAttributeMap().entrySet().iterator(); i.hasNext();) {
final Map.Entry e = (Map.Entry) i.next();
final String key = e.getKey().toString();
final String value = getProject().replaceProperties(e.getValue().toString());
if (key.contains("debug")) {
String level = "";
if (javac.getDebugLevel() != null) {
level = ":" + javac.getDebugLevel();
}
jointOptions.add("-Fg" + level);
} else if (key.contains("debugLevel")) {
// ignore, taken care of in debug
} else if ((key.contains("nowarn"))
|| (key.contains("verbose"))
|| (key.contains("deprecation"))) {
// false is default, so something to do only in true case
if ("on".equalsIgnoreCase(value) || "true".equalsIgnoreCase(value) || "yes".equalsIgnoreCase("value"))
jointOptions.add("-F" + key);
} else if (key.contains("classpath")) {
classpath.add(javac.getClasspath());
} else if ((key.contains("depend"))
|| (key.contains("extdirs"))
|| (key.contains("encoding"))
|| (key.contains("source"))
|| (key.contains("target"))
|| (key.contains("verbose"))) {
jointOptions.add("-J" + key + "=" + value);
} else {
log("The option " + key + " cannot be set on the contained <javac> element. The option will be ignored", Project.MSG_WARN);
}
// includes? excludes?
}
// ant's <javac> supports nested <compilerarg value=""> elements (there can be multiple of them)
// for additional options to be passed to javac.
Enumeration children = rc.getChildren();
while (children.hasMoreElements()) {
RuntimeConfigurable childrc = (RuntimeConfigurable) children.nextElement();
if(childrc.getElementTag().equals("compilerarg")) {
for (Iterator i = childrc.getAttributeMap().entrySet().iterator(); i.hasNext();) {
final Map.Entry e = (Map.Entry) i.next();
final String key = e.getKey().toString();
if(key.equals("value")) {
final String value = getProject().replaceProperties(e.getValue().toString());
StringTokenizer st = new StringTokenizer(value, " ");
while(st.hasMoreTokens()) {
String optionStr = st.nextToken();
jointOptions.add(optionStr.replace("-X", "-FX"));
}
}
}
}
}
}
String separator = System.getProperty("file.separator");
List<String> commandLineList = new ArrayList<String>();
if (fork) {
String javaHome;
if (forkJDK != null) {
javaHome = forkJDK.getPath();
} else {
javaHome = System.getProperty("java.home");
}
if (includeAntRuntime) {
classpath.addExisting((new Path(getProject())).concatSystemClasspath("last"));
}
if (includeJavaRuntime) {
classpath.addJavaRuntime();
}
commandLineList.add(javaHome + separator + "bin" + separator + "java");
commandLineList.add("-classpath");
commandLineList.add(classpath.toString());
if ((memoryInitialSize != null) && !memoryInitialSize.equals("")) {
commandLineList.add("-Xms" + memoryInitialSize);
}
if ((memoryMaximumSize != null) && !memoryMaximumSize.equals("")) {
commandLineList.add("-Xmx" + memoryMaximumSize);
}
commandLineList.add("org.codehaus.groovy.tools.FileSystemCompiler");
}
commandLineList.add("--classpath");
commandLineList.add(classpath.toString());
if (jointCompilation) {
commandLineList.add("-j");
commandLineList.addAll(jointOptions);
}
commandLineList.add("-d");
commandLineList.add(destDir.getPath());
if (encoding != null) {
commandLineList.add("--encoding");
commandLineList.add(encoding);
}
if (stacktrace) {
commandLineList.add("-e");
}
// check to see if an external file is needed
int count = 0;
if (fork) {
for (File srcFile : compileList) {
count += srcFile.getPath().length();
}
for (Object commandLineArg : commandLineList) {
count += commandLineArg.toString().length();
}
count += compileList.length;
count += commandLineList.size();
}
// 32767 is the command line length limit on Windows
if (fork && (count > 32767)) {
try {
File tempFile = File.createTempFile("groovyc-files-", ".txt");
temporaryFiles.add(tempFile);
PrintWriter pw = new PrintWriter(new FileWriter(tempFile));
for (File srcFile : compileList) {
pw.println(srcFile.getPath());
}
pw.close();
commandLineList.add("@" + tempFile.getPath());
} catch (IOException e) {
log("Error creating file list", e, Project.MSG_ERR);
}
} else {
for (File srcFile : compileList) {
commandLineList.add(srcFile.getPath());
}
}
final String[] commandLine = new String[commandLineList.size()];
for (int i = 0; i < commandLine.length; ++i) {
commandLine[i] = commandLineList.get(i);
}
log("Compilation arguments:", Project.MSG_VERBOSE);
log(DefaultGroovyMethods.join(commandLine, "\n"), Project.MSG_VERBOSE);
if (fork) {
// use the main method in FileSystemCompiler
final Execute executor = new Execute(); // new LogStreamHandler ( attributes , Project.MSG_INFO , Project.MSG_WARN ) ) ;
executor.setAntRun(getProject());
executor.setWorkingDirectory(getProject().getBaseDir());
executor.setCommandline(commandLine);
try {
executor.execute();
}
catch (final IOException ioe) {
throw new BuildException("Error running forked groovyc.", ioe);
}
final int returnCode = executor.getExitValue();
if (returnCode != 0) {
if (failOnError) {
throw new BuildException("Forked groovyc returned error code: " + returnCode);
} else {
log("Forked groovyc returned error code: " + returnCode, Project.MSG_ERR);
}
}
} else {
// hand crank it so we can add our own compiler configuration
try {
Options options = FileSystemCompiler.createCompilationOptions();
PosixParser cliParser = new PosixParser();
CommandLine cli;
cli = cliParser.parse(options, commandLine);
configuration = FileSystemCompiler.generateCompilerConfigurationFromOptions(cli);
// Load the file name list
String[] filenames = FileSystemCompiler.generateFileNamesFromOptions(cli);
boolean fileNameErrors = filenames == null;
fileNameErrors = fileNameErrors && !FileSystemCompiler.validateFiles(filenames);
if (!fileNameErrors) {
FileSystemCompiler.doCompilation(configuration, makeCompileUnit(), filenames);
}
} catch (Exception re) {
Throwable t = re;
if ((re.getClass() == RuntimeException.class) && (re.getCause() != null)) {
// unwrap to the real exception
t = re.getCause();
}
StringWriter writer = new StringWriter();
new ErrorReporter(t, false).write(new PrintWriter(writer));
String message = writer.toString();
if (failOnError) {
log(message, Project.MSG_INFO);
throw new BuildException("Compilation Failed", t, getLocation());
} else {
log(message, Project.MSG_ERR);
}
}
}
}
} finally {
for (File temporaryFile : temporaryFiles) {
try {
FileSystemCompiler.deleteRecursive(temporaryFile);
} catch (Throwable t) {
System.err.println("error: could not delete temp files - " + temporaryFile.getPath());
}
}
}
}
protected CompilationUnit makeCompileUnit() {
if (configuration.getJointCompilationOptions() != null) {
if (!configuration.getJointCompilationOptions().containsKey("stubDir")) {
try {
File tempStubDir = FileSystemCompiler.createTempDir();
temporaryFiles.add(tempStubDir);
configuration.getJointCompilationOptions().put("stubDir", tempStubDir);
} catch (IOException ioe) {
throw new BuildException(ioe);
}
}
return new JavaAwareCompilationUnit(configuration, buildClassLoaderFor());
} else {
return new CompilationUnit(configuration, null, buildClassLoaderFor());
}
}
protected GroovyClassLoader buildClassLoaderFor() {
ClassLoader parent = this.getClass().getClassLoader();
if (parent instanceof AntClassLoader) {
AntClassLoader antLoader = (AntClassLoader) parent;
String[] pathElm = antLoader.getClasspath().split(File.pathSeparator);
List<String> classpath = configuration.getClasspath();
/*
* Iterate over the classpath provided to groovyc, and add any missing path
* entries to the AntClassLoader. This is a workaround, since for some reason
* 'directory' classpath entries were not added to the AntClassLoader' classpath.
*/
for (String cpEntry : classpath) {
boolean found = false;
for (String path : pathElm) {
if (cpEntry.equals(path)) {
found = true;
break;
}
}
/*
* fix for GROOVY-2284
* seems like AntClassLoader doesn't check if the file
* may not exist in the classpath yet
*/
if (!found && new File(cpEntry).exists())
antLoader.addPathElement(cpEntry);
}
}
return new GroovyClassLoader(parent, configuration);
}
/**
* Set the stub directory into which the Java source stub
* files should be generated. The directory should exist
* will not be deleted automatically.
*
* @param stubDir the stub directory
*/
public void setStubdir(File stubDir) {
jointCompilation = true;
configuration.getJointCompilationOptions().put("stubDir", stubDir);
}
/**
* Gets the stub directory into which the Java source stub
* files should be generated
*
* @return the stub directory
*/
public File getStubdir() {
return (File) configuration.getJointCompilationOptions().get("stubDir");
}
}
| GROOVY-2717: Groovyc ignores includeAntRuntime when not forked.
git-svn-id: aa43ce4553b005588bb3cc6c16966320b011facb@19649 a5544e8c-8a19-0410-ba12-f9af4593a198
| src/main/org/codehaus/groovy/ant/Groovyc.java | GROOVY-2717: Groovyc ignores includeAntRuntime when not forked. | <ide><path>rc/main/org/codehaus/groovy/ant/Groovyc.java
<ide> /*
<del> * Copyright 2003-2009 the original author or authors.
<add> * Copyright 2003-2010 the original author or authors.
<ide> *
<ide> * Licensed under the Apache License, Version 2.0 (the "License");
<ide> * you may not use this file except in compliance with the License.
<ide> import org.codehaus.groovy.control.CompilerConfiguration;
<ide> import org.codehaus.groovy.tools.ErrorReporter;
<ide> import org.codehaus.groovy.tools.FileSystemCompiler;
<add>import org.codehaus.groovy.tools.RootLoader;
<ide> import org.codehaus.groovy.tools.javac.JavaAwareCompilationUnit;
<ide> import org.codehaus.groovy.runtime.DefaultGroovyMethods;
<ide>
<ide> import java.io.IOException;
<ide> import java.io.PrintWriter;
<ide> import java.io.StringWriter;
<add>import java.net.URL;
<ide> import java.nio.charset.Charset;
<ide> import java.util.ArrayList;
<ide> import java.util.Enumeration;
<ide> /**
<ide> * Gets the source dirs to find the source java files.
<ide> *
<del> * @return the source directorys as a path
<add> * @return the source directories as a path
<ide> */
<ide> public Path getSrcdir() {
<ide> return src;
<ide>
<ide> /**
<ide> * If true, list the source files being handed off to the compiler.
<add> * Default is false.
<ide> *
<ide> * @param list if true list the source files
<ide> */
<ide>
<ide> /**
<ide> * Enable verbose compiling which will display which files
<del> * are being compiled
<add> * are being compiled. Default is false.
<ide> */
<ide> public void setVerbose(boolean verbose) {
<ide> this.verbose = verbose;
<ide> }
<ide>
<ide> /**
<del> * If true, includes Ant's own classpath in the classpath.
<add> * If true, includes Ant's own classpath in the classpath. Default is true.
<add> * If setting to false and using groovyc in conjunction with AntBuilder
<add> * you might need to explicitly add the Groovy jar(s) to the groovyc
<add> * classpath using a nested classpath task.
<ide> *
<ide> * @param include if true, includes Ant's own classpath in the classpath
<ide> */
<ide> }
<ide>
<ide> /**
<del> * If true, includes the Java runtime libraries in the classpath.
<add> * If true, includes the Java runtime libraries in the classpath. Default is false.
<ide> *
<ide> * @param include if true, includes the Java runtime libraries in the classpath
<ide> */
<ide> }
<ide>
<ide> /**
<del> * If true forks the Groovy compiler.
<add> * If true forks the Groovy compiler. Default is false.
<ide> *
<ide> * @param f "true|false|on|off|yes|no"
<ide> */
<ide>
<ide> /**
<ide> * Enable compiler to report stack trace information if a problem occurs
<del> * during compilation.
<add> * during compilation. Default is false.
<ide> */
<ide> public void setStacktrace(boolean stacktrace) {
<ide> this.stacktrace = stacktrace;
<ide>
<ide>
<ide> protected GroovyClassLoader buildClassLoaderFor() {
<del> ClassLoader parent = this.getClass().getClassLoader();
<add> ClassLoader parent = getIncludeantruntime()
<add> ? getClass().getClassLoader()
<add> : new AntClassLoader(new RootLoader(new URL[0], null), getProject(), getClasspath());
<ide> if (parent instanceof AntClassLoader) {
<ide> AntClassLoader antLoader = (AntClassLoader) parent;
<ide> String[] pathElm = antLoader.getClasspath().split(File.pathSeparator); |
|
Java | epl-1.0 | 876c5996173de83753a6a4759cb20397180203ed | 0 | stormc/hawkbit,stormc/hawkbit,eclipse/hawkbit,StBurcher/hawkbit,StBurcher/hawkbit,StBurcher/hawkbit,bsinno/hawkbit,bsinno/hawkbit,eclipse/hawkbit,bsinno/hawkbit,eclipse/hawkbit,stormc/hawkbit,eclipse/hawkbit | /**
* Copyright (c) 2015 Bosch Software Innovations GmbH and others.
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*/
package org.eclipse.hawkbit.ui.filtermanagement;
import static org.apache.commons.lang3.ArrayUtils.isEmpty;
import static org.eclipse.hawkbit.ui.utils.HawkbitCommonUtil.isNotNullOrEmpty;
import static org.eclipse.hawkbit.ui.utils.SPUIDefinitions.FILTER_BY_QUERY;
import static org.springframework.data.domain.Sort.Direction.ASC;
import static org.springframework.data.domain.Sort.Direction.DESC;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import org.eclipse.hawkbit.repository.TargetManagement;
import org.eclipse.hawkbit.repository.model.Target;
import org.eclipse.hawkbit.ui.common.UserDetailsFormatter;
import org.eclipse.hawkbit.ui.components.ProxyTarget;
import org.eclipse.hawkbit.ui.filtermanagement.state.FilterManagementUIState;
import org.eclipse.hawkbit.ui.utils.HawkbitCommonUtil;
import org.eclipse.hawkbit.ui.utils.I18N;
import org.eclipse.hawkbit.ui.utils.SPDateTimeUtil;
import org.eclipse.hawkbit.ui.utils.SPUIDefinitions;
import org.eclipse.hawkbit.ui.utils.SpringContextHelper;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Slice;
import org.springframework.data.domain.Sort;
import org.springframework.data.domain.Sort.Direction;
import org.vaadin.addons.lazyquerycontainer.AbstractBeanQuery;
import org.vaadin.addons.lazyquerycontainer.QueryDefinition;
import com.google.common.base.Strings;
/**
* Simple implementation of generics bean query which dynamically loads
* {@link ProxyTarget} batch of beans.
*
*/
public class CustomTargetBeanQuery extends AbstractBeanQuery<ProxyTarget> {
private static final long serialVersionUID = 6490445732785388071L;
private Sort sort = new Sort(Direction.DESC, "createdAt");
private transient TargetManagement targetManagement;
private FilterManagementUIState filterManagementUIState;
private transient I18N i18N;
private String filterQuery;
/**
* Parametric Constructor.
*
* @param definition
* as Def
* @param queryConfig
* as Config
* @param sortIds
* as sort
* @param sortStates
* as Sort status
*/
public CustomTargetBeanQuery(final QueryDefinition definition, final Map<String, Object> queryConfig,
final Object[] sortIds, final boolean[] sortStates) {
super(definition, queryConfig, sortIds, sortStates);
if (isNotNullOrEmpty(queryConfig)) {
filterQuery = (String) queryConfig.get(FILTER_BY_QUERY);
}
if (!isEmpty(sortStates)) {
sort = new Sort(sortStates[0] ? ASC : DESC, (String) sortIds[0]);
for (int targetId = 1; targetId < sortIds.length; targetId++) {
sort.and(new Sort(sortStates[targetId] ? ASC : DESC, (String) sortIds[targetId]));
}
}
}
@Override
protected ProxyTarget constructBean() {
return new ProxyTarget();
}
@Override
protected List<ProxyTarget> loadBeans(final int startIndex, final int count) {
Slice<Target> targetBeans;
final List<ProxyTarget> proxyTargetBeans = new ArrayList<>();
if (!Strings.isNullOrEmpty(filterQuery)) {
targetBeans = targetManagement.findTargetsAll(filterQuery,
new PageRequest(startIndex / SPUIDefinitions.PAGE_SIZE, SPUIDefinitions.PAGE_SIZE, sort));
} else {
targetBeans = targetManagement.findTargetsAll(
new PageRequest(startIndex / SPUIDefinitions.PAGE_SIZE, SPUIDefinitions.PAGE_SIZE, sort));
}
for (final Target targ : targetBeans) {
final ProxyTarget prxyTarget = new ProxyTarget();
prxyTarget.setTargetIdName(targ.getTargetIdName());
prxyTarget.setName(targ.getName());
prxyTarget.setDescription(targ.getDescription());
prxyTarget.setControllerId(targ.getControllerId());
prxyTarget.setInstallationDate(targ.getTargetInfo().getInstallationDate());
prxyTarget.setAddress(targ.getTargetInfo().getAddress());
prxyTarget.setLastTargetQuery(targ.getTargetInfo().getLastTargetQuery());
prxyTarget.setUpdateStatus(targ.getTargetInfo().getUpdateStatus());
prxyTarget.setLastModifiedDate(SPDateTimeUtil.getFormattedDate(targ.getLastModifiedAt()));
prxyTarget.setCreatedDate(SPDateTimeUtil.getFormattedDate(targ.getCreatedAt()));
prxyTarget.setCreatedAt(targ.getCreatedAt());
prxyTarget.setCreatedByUser(UserDetailsFormatter.loadAndFormatCreatedBy(targ));
prxyTarget.setModifiedByUser(UserDetailsFormatter.loadAndFormatLastModifiedBy(targ));
prxyTarget.setUpdateStatus(targ.getTargetInfo().getUpdateStatus());
prxyTarget.setLastTargetQuery(targ.getTargetInfo().getLastTargetQuery());
prxyTarget.setTargetInfo(targ.getTargetInfo());
prxyTarget.setPollStatusToolTip(
HawkbitCommonUtil.getPollStatusToolTip(prxyTarget.getTargetInfo().getPollStatus(), getI18N()));
proxyTargetBeans.add(prxyTarget);
}
return proxyTargetBeans;
}
@Override
protected void saveBeans(final List<ProxyTarget> arg0, final List<ProxyTarget> arg1, final List<ProxyTarget> arg2) {
// CRUD operations on Target will be done through repository methods
}
@Override
public int size() {
long size = 0;
if (!Strings.isNullOrEmpty(filterQuery)) {
size = getTargetManagement().countTargetByTargetFilterQuery(filterQuery);
}
getFilterManagementUIState().setTargetsCountAll(size);
if (size > SPUIDefinitions.MAX_TABLE_ENTRIES) {
getFilterManagementUIState().setTargetsTruncated(size - SPUIDefinitions.MAX_TABLE_ENTRIES);
size = SPUIDefinitions.MAX_TABLE_ENTRIES;
} else {
getFilterManagementUIState().setTargetsTruncated(null);
}
return (int) size;
}
private TargetManagement getTargetManagement() {
if (targetManagement == null) {
targetManagement = SpringContextHelper.getBean(TargetManagement.class);
}
return targetManagement;
}
private FilterManagementUIState getFilterManagementUIState() {
if (filterManagementUIState == null) {
filterManagementUIState = SpringContextHelper.getBean(FilterManagementUIState.class);
}
return filterManagementUIState;
}
private I18N getI18N() {
if (i18N == null) {
i18N = SpringContextHelper.getBean(I18N.class);
}
return i18N;
}
}
| hawkbit-ui/src/main/java/org/eclipse/hawkbit/ui/filtermanagement/CustomTargetBeanQuery.java | /**
* Copyright (c) 2015 Bosch Software Innovations GmbH and others.
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*/
package org.eclipse.hawkbit.ui.filtermanagement;
import static org.apache.commons.lang3.ArrayUtils.isEmpty;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import org.eclipse.hawkbit.repository.TargetManagement;
import org.eclipse.hawkbit.repository.model.Target;
import org.eclipse.hawkbit.ui.common.UserDetailsFormatter;
import org.eclipse.hawkbit.ui.components.ProxyTarget;
import org.eclipse.hawkbit.ui.filtermanagement.state.FilterManagementUIState;
import org.eclipse.hawkbit.ui.utils.HawkbitCommonUtil;
import org.eclipse.hawkbit.ui.utils.I18N;
import org.eclipse.hawkbit.ui.utils.SPDateTimeUtil;
import org.eclipse.hawkbit.ui.utils.SPUIDefinitions;
import org.eclipse.hawkbit.ui.utils.SpringContextHelper;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Slice;
import org.springframework.data.domain.Sort;
import org.springframework.data.domain.Sort.Direction;
import org.vaadin.addons.lazyquerycontainer.AbstractBeanQuery;
import org.vaadin.addons.lazyquerycontainer.QueryDefinition;
import com.google.common.base.Strings;
/**
* Simple implementation of generics bean query which dynamically loads
* {@link ProxyTarget} batch of beans.
*
*/
public class CustomTargetBeanQuery extends AbstractBeanQuery<ProxyTarget> {
private static final long serialVersionUID = 6490445732785388071L;
private Sort sort = new Sort(Direction.DESC, "createdAt");
private transient TargetManagement targetManagement;
private FilterManagementUIState filterManagementUIState;
private transient I18N i18N;
private String filterQuery;
/**
* Parametric Constructor.
*
* @param definition
* as Def
* @param queryConfig
* as Config
* @param sortIds
* as sort
* @param sortStates
* as Sort status
*/
public CustomTargetBeanQuery(final QueryDefinition definition, final Map<String, Object> queryConfig,
final Object[] sortIds, final boolean[] sortStates) {
super(definition, queryConfig, sortIds, sortStates);
if (HawkbitCommonUtil.isNotNullOrEmpty(queryConfig)) {
filterQuery = (String) queryConfig.get(SPUIDefinitions.FILTER_BY_QUERY);
}
if (isEmpty(sortStates)) {
// Initalize Sor
sort = new Sort(sortStates[0] ? Direction.ASC : Direction.DESC, (String) sortIds[0]);
// Add sort.
for (int targetId = 1; targetId < sortIds.length; targetId++) {
sort.and(new Sort(sortStates[targetId] ? Direction.ASC : Direction.DESC, (String) sortIds[targetId]));
}
}
}
@Override
protected ProxyTarget constructBean() {
return new ProxyTarget();
}
@Override
protected List<ProxyTarget> loadBeans(final int startIndex, final int count) {
Slice<Target> targetBeans;
final List<ProxyTarget> proxyTargetBeans = new ArrayList<>();
if (!Strings.isNullOrEmpty(filterQuery)) {
targetBeans = targetManagement.findTargetsAll(filterQuery,
new PageRequest(startIndex / SPUIDefinitions.PAGE_SIZE, SPUIDefinitions.PAGE_SIZE, sort));
} else {
targetBeans = targetManagement.findTargetsAll(
new PageRequest(startIndex / SPUIDefinitions.PAGE_SIZE, SPUIDefinitions.PAGE_SIZE, sort));
}
for (final Target targ : targetBeans) {
final ProxyTarget prxyTarget = new ProxyTarget();
prxyTarget.setTargetIdName(targ.getTargetIdName());
prxyTarget.setName(targ.getName());
prxyTarget.setDescription(targ.getDescription());
prxyTarget.setControllerId(targ.getControllerId());
prxyTarget.setInstallationDate(targ.getTargetInfo().getInstallationDate());
prxyTarget.setAddress(targ.getTargetInfo().getAddress());
prxyTarget.setLastTargetQuery(targ.getTargetInfo().getLastTargetQuery());
prxyTarget.setUpdateStatus(targ.getTargetInfo().getUpdateStatus());
prxyTarget.setLastModifiedDate(SPDateTimeUtil.getFormattedDate(targ.getLastModifiedAt()));
prxyTarget.setCreatedDate(SPDateTimeUtil.getFormattedDate(targ.getCreatedAt()));
prxyTarget.setCreatedAt(targ.getCreatedAt());
prxyTarget.setCreatedByUser(UserDetailsFormatter.loadAndFormatCreatedBy(targ));
prxyTarget.setModifiedByUser(UserDetailsFormatter.loadAndFormatLastModifiedBy(targ));
prxyTarget.setUpdateStatus(targ.getTargetInfo().getUpdateStatus());
prxyTarget.setLastTargetQuery(targ.getTargetInfo().getLastTargetQuery());
prxyTarget.setTargetInfo(targ.getTargetInfo());
prxyTarget.setPollStatusToolTip(
HawkbitCommonUtil.getPollStatusToolTip(prxyTarget.getTargetInfo().getPollStatus(), getI18N()));
proxyTargetBeans.add(prxyTarget);
}
return proxyTargetBeans;
}
@Override
protected void saveBeans(final List<ProxyTarget> arg0, final List<ProxyTarget> arg1, final List<ProxyTarget> arg2) {
// CRUD operations on Target will be done through repository methods
}
@Override
public int size() {
long size = 0;
if (!Strings.isNullOrEmpty(filterQuery)) {
size = getTargetManagement().countTargetByTargetFilterQuery(filterQuery);
}
getFilterManagementUIState().setTargetsCountAll(size);
if (size > SPUIDefinitions.MAX_TABLE_ENTRIES) {
getFilterManagementUIState().setTargetsTruncated(size - SPUIDefinitions.MAX_TABLE_ENTRIES);
size = SPUIDefinitions.MAX_TABLE_ENTRIES;
} else {
getFilterManagementUIState().setTargetsTruncated(null);
}
return (int) size;
}
private TargetManagement getTargetManagement() {
if (targetManagement == null) {
targetManagement = SpringContextHelper.getBean(TargetManagement.class);
}
return targetManagement;
}
private FilterManagementUIState getFilterManagementUIState() {
if (filterManagementUIState == null) {
filterManagementUIState = SpringContextHelper.getBean(FilterManagementUIState.class);
}
return filterManagementUIState;
}
private I18N getI18N() {
if (i18N == null) {
i18N = SpringContextHelper.getBean(I18N.class);
}
return i18N;
}
}
| Found another place where a NOT was missing
Signed-off-by: Dominic Schabel <[email protected]>
| hawkbit-ui/src/main/java/org/eclipse/hawkbit/ui/filtermanagement/CustomTargetBeanQuery.java | Found another place where a NOT was missing | <ide><path>awkbit-ui/src/main/java/org/eclipse/hawkbit/ui/filtermanagement/CustomTargetBeanQuery.java
<ide> package org.eclipse.hawkbit.ui.filtermanagement;
<ide>
<ide> import static org.apache.commons.lang3.ArrayUtils.isEmpty;
<add>import static org.eclipse.hawkbit.ui.utils.HawkbitCommonUtil.isNotNullOrEmpty;
<add>import static org.eclipse.hawkbit.ui.utils.SPUIDefinitions.FILTER_BY_QUERY;
<add>import static org.springframework.data.domain.Sort.Direction.ASC;
<add>import static org.springframework.data.domain.Sort.Direction.DESC;
<ide>
<ide> import java.util.ArrayList;
<ide> import java.util.List;
<ide> final Object[] sortIds, final boolean[] sortStates) {
<ide> super(definition, queryConfig, sortIds, sortStates);
<ide>
<del> if (HawkbitCommonUtil.isNotNullOrEmpty(queryConfig)) {
<del> filterQuery = (String) queryConfig.get(SPUIDefinitions.FILTER_BY_QUERY);
<add> if (isNotNullOrEmpty(queryConfig)) {
<add> filterQuery = (String) queryConfig.get(FILTER_BY_QUERY);
<ide> }
<del> if (isEmpty(sortStates)) {
<del> // Initalize Sor
<del> sort = new Sort(sortStates[0] ? Direction.ASC : Direction.DESC, (String) sortIds[0]);
<del> // Add sort.
<add>
<add> if (!isEmpty(sortStates)) {
<add>
<add> sort = new Sort(sortStates[0] ? ASC : DESC, (String) sortIds[0]);
<add>
<ide> for (int targetId = 1; targetId < sortIds.length; targetId++) {
<del> sort.and(new Sort(sortStates[targetId] ? Direction.ASC : Direction.DESC, (String) sortIds[targetId]));
<add> sort.and(new Sort(sortStates[targetId] ? ASC : DESC, (String) sortIds[targetId]));
<ide> }
<ide> }
<ide> }
<ide>
<ide> @Override
<ide> protected ProxyTarget constructBean() {
<del>
<ide> return new ProxyTarget();
<ide> }
<ide> |
|
Java | apache-2.0 | af660cdf2fc1a365b279eb6047fc5956e585cfb9 | 0 | graphhopper/graphhopper,graphhopper/graphhopper,graphhopper/graphhopper,graphhopper/graphhopper | /*
* Licensed to GraphHopper GmbH under one or more contributor
* license agreements. See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*
* GraphHopper GmbH licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.graphhopper.routing;
import com.graphhopper.routing.ev.*;
import com.graphhopper.routing.util.EncodingManager;
import com.graphhopper.routing.util.TraversalMode;
import com.graphhopper.routing.weighting.FastestWeighting;
import com.graphhopper.routing.weighting.ShortestWeighting;
import com.graphhopper.routing.weighting.Weighting;
import com.graphhopper.storage.BaseGraph;
import com.graphhopper.storage.Graph;
import com.graphhopper.storage.NodeAccess;
import com.graphhopper.util.*;
import com.graphhopper.util.details.PathDetail;
import com.graphhopper.util.details.PathDetailsBuilderFactory;
import com.graphhopper.util.details.PathDetailsFromEdges;
import org.junit.jupiter.api.Test;
import java.util.*;
import static com.graphhopper.search.EdgeKVStorage.KeyValue.createKV;
import static com.graphhopper.storage.AbstractGraphStorageTester.assertPList;
import static com.graphhopper.util.Parameters.Details.*;
import com.graphhopper.search.EdgeKVStorage.KeyValue;
import static org.junit.jupiter.api.Assertions.*;
/**
* @author Peter Karich
*/
public class PathTest {
private final BooleanEncodedValue carAccessEnc = new SimpleBooleanEncodedValue("access", true);
private final DecimalEncodedValue carAvSpeedEnc = new DecimalEncodedValueImpl("speed", 5, 5, false);
private final EncodingManager carManager = EncodingManager.start().add(carAccessEnc).add(carAvSpeedEnc).build();
private final BooleanEncodedValue mixedCarAccessEnc = new SimpleBooleanEncodedValue("mixed_car_access", true);
private final DecimalEncodedValue mixedCarSpeedEnc = new DecimalEncodedValueImpl("mixed_car_speed", 5, 5, false);
private final BooleanEncodedValue mixedFootAccessEnc = new SimpleBooleanEncodedValue("mixed_foot_access", true);
private final DecimalEncodedValue mixedFootSpeedEnc = new DecimalEncodedValueImpl("mixed_foot_speed", 4, 1, false);
private final EncodingManager mixedEncodingManager = EncodingManager.start().add(mixedCarAccessEnc).add(mixedCarSpeedEnc).add(mixedFootAccessEnc).add(mixedFootSpeedEnc).build();
private final TranslationMap trMap = TranslationMapTest.SINGLETON;
private final Translation tr = trMap.getWithFallBack(Locale.US);
private final RoundaboutGraph roundaboutGraph = new RoundaboutGraph();
private final Graph pathDetailGraph = generatePathDetailsGraph();
@Test
public void testFound() {
BaseGraph g = new BaseGraph.Builder(carManager).create();
Path p = new Path(g);
assertFalse(p.isFound());
assertEquals(0, p.getDistance(), 1e-7);
assertEquals(0, p.calcNodes().size());
}
@Test
public void testWayList() {
BaseGraph g = new BaseGraph.Builder(carManager).create();
NodeAccess na = g.getNodeAccess();
na.setNode(0, 0.0, 0.1);
na.setNode(1, 1.0, 0.1);
na.setNode(2, 2.0, 0.1);
EdgeIteratorState edge1 = g.edge(0, 1).setDistance(1000).set(carAccessEnc, true, true).set(carAvSpeedEnc, 10.0);
edge1.setWayGeometry(Helper.createPointList(8, 1, 9, 1));
EdgeIteratorState edge2 = g.edge(2, 1).setDistance(2000).set(carAccessEnc, true, true).set(carAvSpeedEnc, 50.0);
edge2.setWayGeometry(Helper.createPointList(11, 1, 10, 1));
SPTEntry e1 = new SPTEntry(edge2.getEdge(), 2, 1, new SPTEntry(edge1.getEdge(), 1, 1, new SPTEntry(0, 1)));
FastestWeighting weighting = new FastestWeighting(carAccessEnc, carAvSpeedEnc);
Path path = extractPath(g, weighting, e1);
// 0-1-2
assertPList(Helper.createPointList(0, 0.1, 8, 1, 9, 1, 1, 0.1, 10, 1, 11, 1, 2, 0.1), path.calcPoints());
InstructionList instr = InstructionsFromEdges.calcInstructions(path, path.graph, weighting, carManager, tr);
Instruction tmp = instr.get(0);
assertEquals(3000.0, tmp.getDistance(), 0.0);
assertEquals(504000L, tmp.getTime());
assertEquals("continue", tmp.getTurnDescription(tr));
assertEquals(6, tmp.getLength());
tmp = instr.get(1);
assertEquals(0.0, tmp.getDistance(), 0.0);
assertEquals(0L, tmp.getTime());
assertEquals("arrive at destination", tmp.getTurnDescription(tr));
assertEquals(0, tmp.getLength());
int acc = 0;
for (Instruction instruction : instr) {
acc += instruction.getLength();
}
assertEquals(path.calcPoints().size() - 1, acc);
// force minor change for instructions
edge2.setKeyValues(createKV("name", "2"));
na.setNode(3, 1.0, 1.0);
g.edge(1, 3).setDistance(1000).set(carAccessEnc, true, true).set(carAvSpeedEnc, 10.0);
e1 = new SPTEntry(edge2.getEdge(), 2, 1,
new SPTEntry(edge1.getEdge(), 1, 1,
new SPTEntry(0, 1)
)
);
path = extractPath(g, weighting, e1);
instr = InstructionsFromEdges.calcInstructions(path, path.graph, weighting, carManager, tr);
tmp = instr.get(0);
assertEquals(1000.0, tmp.getDistance(), 0);
assertEquals(360000L, tmp.getTime());
assertEquals("continue", tmp.getTurnDescription(tr));
assertEquals(3, tmp.getLength());
tmp = instr.get(1);
assertEquals(2000.0, tmp.getDistance(), 0);
assertEquals(144000L, tmp.getTime());
assertEquals("turn sharp right onto 2", tmp.getTurnDescription(tr));
assertEquals(3, tmp.getLength());
acc = 0;
for (Instruction instruction : instr) {
acc += instruction.getLength();
}
assertEquals(path.calcPoints().size() - 1, acc);
// now reverse order
e1 = new SPTEntry(edge1.getEdge(), 0, 1, new SPTEntry(edge2.getEdge(), 1, 1, new SPTEntry(2, 1)));
path = extractPath(g, weighting, e1);
// 2-1-0
assertPList(Helper.createPointList(2, 0.1, 11, 1, 10, 1, 1, 0.1, 9, 1, 8, 1, 0, 0.1), path.calcPoints());
instr = InstructionsFromEdges.calcInstructions(path, path.graph, weighting, carManager, tr);
tmp = instr.get(0);
assertEquals(2000.0, tmp.getDistance(), 0);
assertEquals(144000L, tmp.getTime());
assertEquals("continue onto 2", tmp.getTurnDescription(tr));
assertEquals(3, tmp.getLength());
tmp = instr.get(1);
assertEquals(1000.0, tmp.getDistance(), 0);
assertEquals(360000L, tmp.getTime());
assertEquals("turn sharp left", tmp.getTurnDescription(tr));
assertEquals(3, tmp.getLength());
acc = 0;
for (Instruction instruction : instr) {
acc += instruction.getLength();
}
assertEquals(path.calcPoints().size() - 1, acc);
}
@Test
public void testFindInstruction() {
BaseGraph g = new BaseGraph.Builder(carManager).create();
NodeAccess na = g.getNodeAccess();
na.setNode(0, 0.0, 0.0);
na.setNode(1, 5.0, 0.0);
na.setNode(2, 5.0, 0.5);
na.setNode(3, 10.0, 0.5);
na.setNode(4, 7.5, 0.25);
na.setNode(5, 5.0, 1.0);
EdgeIteratorState edge1 = g.edge(0, 1).setDistance(1000).set(carAccessEnc, true, true).set(carAvSpeedEnc, 50.0);
edge1.setWayGeometry(Helper.createPointList());
edge1.setKeyValues(createKV("name", "Street 1"));
EdgeIteratorState edge2 = g.edge(1, 2).setDistance(1000).set(carAccessEnc, true, true).set(carAvSpeedEnc, 50.0);
edge2.setWayGeometry(Helper.createPointList());
edge2.setKeyValues(createKV("name", "Street 2"));
EdgeIteratorState edge3 = g.edge(2, 3).setDistance(1000).set(carAccessEnc, true, true).set(carAvSpeedEnc, 50.0);
edge3.setWayGeometry(Helper.createPointList());
edge3.setKeyValues(createKV("name", "Street 3"));
EdgeIteratorState edge4 = g.edge(3, 4).setDistance(500).set(carAccessEnc, true, true).set(carAvSpeedEnc, 50.0);
edge4.setWayGeometry(Helper.createPointList());
edge4.setKeyValues(createKV("name", "Street 4"));
g.edge(1, 5).setDistance(10000).set(carAccessEnc, true, true).set(carAvSpeedEnc, 50.0);
g.edge(2, 5).setDistance(10000).set(carAccessEnc, true, true).set(carAvSpeedEnc, 50.0);
g.edge(3, 5).setDistance(100000).set(carAccessEnc, true, true).set(carAvSpeedEnc, 50.0);
SPTEntry e1 =
new SPTEntry(edge4.getEdge(), 4, 1,
new SPTEntry(edge3.getEdge(), 3, 1,
new SPTEntry(edge2.getEdge(), 2, 1,
new SPTEntry(edge1.getEdge(), 1, 1,
new SPTEntry(0, 1)
))));
FastestWeighting weighting = new FastestWeighting(carAccessEnc, carAvSpeedEnc);
Path path = extractPath(g, weighting, e1);
InstructionList il = InstructionsFromEdges.calcInstructions(path, path.graph, weighting, carManager, tr);
assertEquals(5, il.size());
assertEquals(Instruction.CONTINUE_ON_STREET, il.get(0).getSign());
assertEquals(Instruction.TURN_RIGHT, il.get(1).getSign());
assertEquals(Instruction.TURN_LEFT, il.get(2).getSign());
assertEquals(Instruction.TURN_SHARP_LEFT, il.get(3).getSign());
assertEquals(Instruction.FINISH, il.get(4).getSign());
}
/**
* Test roundabout instructions for different profiles
*/
@Test
void testCalcInstructionsRoundabout() {
calcInstructionsRoundabout(mixedCarAccessEnc, mixedCarSpeedEnc);
calcInstructionsRoundabout(mixedFootAccessEnc, mixedFootSpeedEnc);
}
public void calcInstructionsRoundabout(BooleanEncodedValue accessEnc, DecimalEncodedValue speedEnc) {
ShortestWeighting weighting = new ShortestWeighting(accessEnc, speedEnc);
Path p = new Dijkstra(roundaboutGraph.g, weighting, TraversalMode.NODE_BASED)
.calcPath(1, 8);
assertTrue(p.isFound());
assertEquals("[1, 2, 3, 4, 5, 8]", p.calcNodes().toString());
InstructionList wayList = InstructionsFromEdges.calcInstructions(p, p.graph, weighting, mixedEncodingManager, tr);
// Test instructions
List<String> tmpList = getTurnDescriptions(wayList);
assertEquals(Arrays.asList("continue onto MainStreet 1 2",
"At roundabout, take exit 3 onto 5-8",
"arrive at destination"),
tmpList);
// Test Radian
double delta = roundaboutGraph.getAngle(1, 2, 5, 8);
RoundaboutInstruction instr = (RoundaboutInstruction) wayList.get(1);
assertEquals(delta, instr.getTurnAngle(), 0.01);
// case of continuing a street through a roundabout
p = new Dijkstra(roundaboutGraph.g, weighting, TraversalMode.NODE_BASED).
calcPath(1, 7);
wayList = InstructionsFromEdges.calcInstructions(p, p.graph, weighting, mixedEncodingManager, tr);
tmpList = getTurnDescriptions(wayList);
assertEquals(Arrays.asList("continue onto MainStreet 1 2",
"At roundabout, take exit 2 onto MainStreet 4 7",
"arrive at destination"),
tmpList);
// Test Radian
delta = roundaboutGraph.getAngle(1, 2, 4, 7);
instr = (RoundaboutInstruction) wayList.get(1);
assertEquals(delta, instr.getTurnAngle(), 0.01);
}
@Test
public void testCalcInstructionsRoundaboutBegin() {
ShortestWeighting weighting = new ShortestWeighting(mixedCarAccessEnc, mixedCarSpeedEnc);
Path p = new Dijkstra(roundaboutGraph.g, weighting, TraversalMode.NODE_BASED)
.calcPath(2, 8);
assertTrue(p.isFound());
InstructionList wayList = InstructionsFromEdges.calcInstructions(p, p.graph, weighting, mixedEncodingManager, tr);
List<String> tmpList = getTurnDescriptions(wayList);
assertEquals(Arrays.asList("At roundabout, take exit 3 onto 5-8",
"arrive at destination"),
tmpList);
}
@Test
public void testCalcInstructionsRoundaboutDirectExit() {
roundaboutGraph.inverse3to9();
ShortestWeighting weighting = new ShortestWeighting(mixedCarAccessEnc, mixedCarSpeedEnc);
Path p = new Dijkstra(roundaboutGraph.g, weighting, TraversalMode.NODE_BASED)
.calcPath(6, 8);
assertTrue(p.isFound());
InstructionList wayList = InstructionsFromEdges.calcInstructions(p, p.graph, weighting, mixedEncodingManager, tr);
List<String> tmpList = getTurnDescriptions(wayList);
assertEquals(Arrays.asList("continue onto 3-6",
"At roundabout, take exit 3 onto 5-8",
"arrive at destination"),
tmpList);
roundaboutGraph.inverse3to9();
}
@Test
public void testCalcAverageSpeedDetails() {
ShortestWeighting weighting = new ShortestWeighting(carAccessEnc, carAvSpeedEnc);
Path p = new Dijkstra(pathDetailGraph, weighting, TraversalMode.NODE_BASED).calcPath(1, 5);
assertTrue(p.isFound());
Map<String, List<PathDetail>> details = PathDetailsFromEdges.calcDetails(p, carManager, weighting,
Arrays.asList(AVERAGE_SPEED), new PathDetailsBuilderFactory(), 0);
assertTrue(details.size() == 1);
List<PathDetail> averageSpeedDetails = details.get(AVERAGE_SPEED);
assertEquals(4, averageSpeedDetails.size());
assertEquals(45.0, averageSpeedDetails.get(0).getValue());
assertEquals(90.0, averageSpeedDetails.get(1).getValue());
assertEquals(10.0, averageSpeedDetails.get(2).getValue());
assertEquals(45.0, averageSpeedDetails.get(3).getValue());
assertEquals(0, averageSpeedDetails.get(0).getFirst());
assertEquals(1, averageSpeedDetails.get(1).getFirst());
assertEquals(2, averageSpeedDetails.get(2).getFirst());
assertEquals(3, averageSpeedDetails.get(3).getFirst());
assertEquals(4, averageSpeedDetails.get(3).getLast());
}
@Test
public void testCalcAverageSpeedDetailsWithShortDistances_issue1848() {
ShortestWeighting weighting = new ShortestWeighting(carAccessEnc, carAvSpeedEnc);
Path p = new Dijkstra(pathDetailGraph, weighting, TraversalMode.NODE_BASED).calcPath(1, 6);
assertTrue(p.isFound());
Map<String, List<PathDetail>> details = PathDetailsFromEdges.calcDetails(p, carManager, weighting,
Arrays.asList(AVERAGE_SPEED), new PathDetailsBuilderFactory(), 0);
assertTrue(details.size() == 1);
List<PathDetail> averageSpeedDetails = details.get(AVERAGE_SPEED);
assertEquals(4, averageSpeedDetails.size());
// reverse path includes 'null' value as first
p = new Dijkstra(pathDetailGraph, weighting, TraversalMode.NODE_BASED).calcPath(6, 1);
assertTrue(p.isFound());
details = PathDetailsFromEdges.calcDetails(p, carManager, weighting,
Arrays.asList(AVERAGE_SPEED), new PathDetailsBuilderFactory(), 0);
assertTrue(details.size() == 1);
averageSpeedDetails = details.get(AVERAGE_SPEED);
assertEquals(5, averageSpeedDetails.size());
assertNull(averageSpeedDetails.get(0).getValue());
}
@Test
public void testCalcStreetNameDetails() {
ShortestWeighting weighting = new ShortestWeighting(carAccessEnc, carAvSpeedEnc);
Path p = new Dijkstra(pathDetailGraph, weighting, TraversalMode.NODE_BASED).calcPath(1, 5);
assertTrue(p.isFound());
Map<String, List<PathDetail>> details = PathDetailsFromEdges.calcDetails(p, carManager, weighting,
Arrays.asList(STREET_NAME), new PathDetailsBuilderFactory(), 0);
assertTrue(details.size() == 1);
List<PathDetail> streetNameDetails = details.get(STREET_NAME);
assertTrue(details.size() == 1);
assertEquals(4, streetNameDetails.size());
assertEquals("1-2", streetNameDetails.get(0).getValue());
assertEquals("2-3", streetNameDetails.get(1).getValue());
assertEquals("3-4", streetNameDetails.get(2).getValue());
assertEquals("4-5", streetNameDetails.get(3).getValue());
assertEquals(0, streetNameDetails.get(0).getFirst());
assertEquals(1, streetNameDetails.get(1).getFirst());
assertEquals(2, streetNameDetails.get(2).getFirst());
assertEquals(3, streetNameDetails.get(3).getFirst());
assertEquals(4, streetNameDetails.get(3).getLast());
}
@Test
public void testCalcEdgeIdDetails() {
ShortestWeighting weighting = new ShortestWeighting(carAccessEnc, carAvSpeedEnc);
Path p = new Dijkstra(pathDetailGraph, weighting, TraversalMode.NODE_BASED).calcPath(1, 5);
assertTrue(p.isFound());
Map<String, List<PathDetail>> details = PathDetailsFromEdges.calcDetails(p, carManager, weighting,
Arrays.asList(EDGE_ID), new PathDetailsBuilderFactory(), 0);
assertTrue(details.size() == 1);
List<PathDetail> edgeIdDetails = details.get(EDGE_ID);
assertEquals(4, edgeIdDetails.size());
assertEquals(0, edgeIdDetails.get(0).getValue());
// This is out of order because we don't create the edges in order
assertEquals(2, edgeIdDetails.get(1).getValue());
assertEquals(3, edgeIdDetails.get(2).getValue());
assertEquals(1, edgeIdDetails.get(3).getValue());
assertEquals(0, edgeIdDetails.get(0).getFirst());
assertEquals(1, edgeIdDetails.get(1).getFirst());
assertEquals(2, edgeIdDetails.get(2).getFirst());
assertEquals(3, edgeIdDetails.get(3).getFirst());
assertEquals(4, edgeIdDetails.get(3).getLast());
}
@Test
public void testCalcEdgeKeyDetailsForward() {
ShortestWeighting weighting = new ShortestWeighting(carAccessEnc, carAvSpeedEnc);
Path p = new Dijkstra(pathDetailGraph, weighting, TraversalMode.NODE_BASED).calcPath(1, 5);
assertTrue(p.isFound());
Map<String, List<PathDetail>> details = PathDetailsFromEdges.calcDetails(p, carManager, weighting,
Arrays.asList(EDGE_KEY), new PathDetailsBuilderFactory(), 0);
List<PathDetail> edgeKeyDetails = details.get(EDGE_KEY);
assertEquals(4, edgeKeyDetails.size());
assertEquals(0, edgeKeyDetails.get(0).getValue());
assertEquals(4, edgeKeyDetails.get(1).getValue());
assertEquals(6, edgeKeyDetails.get(2).getValue());
assertEquals(2, edgeKeyDetails.get(3).getValue());
}
@Test
public void testCalcEdgeKeyDetailsBackward() {
ShortestWeighting weighting = new ShortestWeighting(carAccessEnc, carAvSpeedEnc);
Path p = new Dijkstra(pathDetailGraph, weighting, TraversalMode.NODE_BASED).calcPath(5, 1);
assertTrue(p.isFound());
Map<String, List<PathDetail>> details = PathDetailsFromEdges.calcDetails(p, carManager, weighting,
Arrays.asList(EDGE_KEY), new PathDetailsBuilderFactory(), 0);
List<PathDetail> edgeKeyDetails = details.get(EDGE_KEY);
assertEquals(4, edgeKeyDetails.size());
assertEquals(3, edgeKeyDetails.get(0).getValue());
assertEquals(7, edgeKeyDetails.get(1).getValue());
assertEquals(5, edgeKeyDetails.get(2).getValue());
assertEquals(1, edgeKeyDetails.get(3).getValue());
}
@Test
public void testCalcTimeDetails() {
ShortestWeighting weighting = new ShortestWeighting(carAccessEnc, carAvSpeedEnc);
Path p = new Dijkstra(pathDetailGraph, weighting, TraversalMode.NODE_BASED).calcPath(1, 5);
assertTrue(p.isFound());
Map<String, List<PathDetail>> details = PathDetailsFromEdges.calcDetails(p, carManager, weighting,
Arrays.asList(TIME), new PathDetailsBuilderFactory(), 0);
assertTrue(details.size() == 1);
List<PathDetail> timeDetails = details.get(TIME);
assertEquals(4, timeDetails.size());
assertEquals(400L, timeDetails.get(0).getValue());
assertEquals(200L, timeDetails.get(1).getValue());
assertEquals(3600L, timeDetails.get(2).getValue());
assertEquals(400L, timeDetails.get(3).getValue());
assertEquals(0, timeDetails.get(0).getFirst());
assertEquals(1, timeDetails.get(1).getFirst());
assertEquals(2, timeDetails.get(2).getFirst());
assertEquals(3, timeDetails.get(3).getFirst());
assertEquals(4, timeDetails.get(3).getLast());
}
@Test
public void testCalcDistanceDetails() {
ShortestWeighting weighting = new ShortestWeighting(carAccessEnc, carAvSpeedEnc);
Path p = new Dijkstra(pathDetailGraph, weighting, TraversalMode.NODE_BASED).calcPath(1, 5);
assertTrue(p.isFound());
Map<String, List<PathDetail>> details = PathDetailsFromEdges.calcDetails(p, carManager, weighting,
Arrays.asList(DISTANCE), new PathDetailsBuilderFactory(), 0);
assertTrue(details.size() == 1);
List<PathDetail> distanceDetails = details.get(DISTANCE);
assertEquals(5D, distanceDetails.get(0).getValue());
assertEquals(5D, distanceDetails.get(1).getValue());
assertEquals(10D, distanceDetails.get(2).getValue());
assertEquals(5D, distanceDetails.get(3).getValue());
}
/**
* case with one edge being not an exit
*/
@Test
public void testCalcInstructionsRoundabout2() {
roundaboutGraph.inverse3to6();
ShortestWeighting weighting = new ShortestWeighting(mixedCarAccessEnc, mixedCarSpeedEnc);
Path p = new Dijkstra(roundaboutGraph.g, weighting, TraversalMode.NODE_BASED)
.calcPath(1, 8);
assertTrue(p.isFound());
InstructionList wayList = InstructionsFromEdges.calcInstructions(p, p.graph, weighting, mixedEncodingManager, tr);
List<String> tmpList = getTurnDescriptions(wayList);
assertEquals(Arrays.asList("continue onto MainStreet 1 2",
"At roundabout, take exit 2 onto 5-8",
"arrive at destination"),
tmpList);
// Test Radian
double delta = roundaboutGraph.getAngle(1, 2, 5, 8);
RoundaboutInstruction instr = (RoundaboutInstruction) wayList.get(1);
assertEquals(delta, instr.getTurnAngle(), 0.01);
roundaboutGraph.inverse3to6();
}
@Test
public void testCalcInstructionsRoundaboutIssue353() {
final BaseGraph graph = new BaseGraph.Builder(carManager).create();
final NodeAccess na = graph.getNodeAccess();
//
// 8
// \
// 5
// / \
// 11- 1 - 2 4 - 7
// | \ /
// 10 -9 -3
// \ |
// --- 6
na.setNode(1, 52.514, 13.348);
na.setNode(2, 52.514, 13.349);
na.setNode(3, 52.5135, 13.35);
na.setNode(4, 52.514, 13.351);
na.setNode(5, 52.5145, 13.351);
na.setNode(6, 52.513, 13.35);
na.setNode(7, 52.514, 13.352);
na.setNode(8, 52.515, 13.351);
na.setNode(9, 52.5135, 13.349);
na.setNode(10, 52.5135, 13.348);
na.setNode(11, 52.514, 13.347);
GHUtility.setSpeed(60, true, false, carAccessEnc, carAvSpeedEnc, graph.edge(2, 1).setDistance(5)).setKeyValues(createKV("name", "MainStreet 2 1"));
GHUtility.setSpeed(60, true, false, carAccessEnc, carAvSpeedEnc, graph.edge(1, 11).setDistance(5)).setKeyValues(createKV("name", "MainStreet 1 11"));
// roundabout
EdgeIteratorState tmpEdge;
tmpEdge = GHUtility.setSpeed(60, true, false, carAccessEnc, carAvSpeedEnc, graph.edge(3, 9).setDistance(2)).setKeyValues(createKV("name", "3-9"));
BooleanEncodedValue carManagerRoundabout = carManager.getBooleanEncodedValue(Roundabout.KEY);
tmpEdge.set(carManagerRoundabout, true);
tmpEdge = GHUtility.setSpeed(60, true, false, carAccessEnc, carAvSpeedEnc, graph.edge(9, 10).setDistance(2)).setKeyValues(createKV("name", "9-10"));
tmpEdge.set(carManagerRoundabout, true);
tmpEdge = GHUtility.setSpeed(60, true, false, carAccessEnc, carAvSpeedEnc, graph.edge(6, 10).setDistance(2)).setKeyValues(createKV("name", "6-10"));
tmpEdge.set(carManagerRoundabout, true);
tmpEdge = GHUtility.setSpeed(60, true, false, carAccessEnc, carAvSpeedEnc, graph.edge(10, 1).setDistance(2)).setKeyValues(createKV("name", "10-1"));
tmpEdge.set(carManagerRoundabout, true);
tmpEdge = GHUtility.setSpeed(60, true, false, carAccessEnc, carAvSpeedEnc, graph.edge(3, 2).setDistance(5)).setKeyValues(createKV("name", "2-3"));
tmpEdge.set(carManagerRoundabout, true);
tmpEdge = GHUtility.setSpeed(60, true, false, carAccessEnc, carAvSpeedEnc, graph.edge(4, 3).setDistance(5)).setKeyValues(createKV("name", "3-4"));
tmpEdge.set(carManagerRoundabout, true);
tmpEdge = GHUtility.setSpeed(60, true, false, carAccessEnc, carAvSpeedEnc, graph.edge(5, 4).setDistance(5)).setKeyValues(createKV("name", "4-5"));
tmpEdge.set(carManagerRoundabout, true);
tmpEdge = GHUtility.setSpeed(60, true, false, carAccessEnc, carAvSpeedEnc, graph.edge(2, 5).setDistance(5)).setKeyValues(createKV("name", "5-2"));
tmpEdge.set(carManagerRoundabout, true);
GHUtility.setSpeed(60, true, true, carAccessEnc, carAvSpeedEnc, graph.edge(4, 7).setDistance(5)).setKeyValues(createKV("name", "MainStreet 4 7"));
GHUtility.setSpeed(60, true, true, carAccessEnc, carAvSpeedEnc, graph.edge(5, 8).setDistance(5)).setKeyValues(createKV("name", "5-8"));
GHUtility.setSpeed(60, true, true, carAccessEnc, carAvSpeedEnc, graph.edge(3, 6).setDistance(5)).setKeyValues(createKV("name", "3-6"));
ShortestWeighting weighting = new ShortestWeighting(carAccessEnc, carAvSpeedEnc);
Path p = new Dijkstra(graph, weighting, TraversalMode.NODE_BASED)
.calcPath(6, 11);
assertTrue(p.isFound());
InstructionList wayList = InstructionsFromEdges.calcInstructions(p, p.graph, weighting, carManager, tr);
List<String> tmpList = getTurnDescriptions(wayList);
assertEquals(Arrays.asList("At roundabout, take exit 1 onto MainStreet 1 11",
"arrive at destination"),
tmpList);
}
@Test
public void testCalcInstructionsRoundaboutClockwise() {
roundaboutGraph.setRoundabout(true);
ShortestWeighting weighting = new ShortestWeighting(mixedCarAccessEnc, mixedCarSpeedEnc);
Path p = new Dijkstra(roundaboutGraph.g, weighting, TraversalMode.NODE_BASED)
.calcPath(1, 8);
assertTrue(p.isFound());
InstructionList wayList = InstructionsFromEdges.calcInstructions(p, p.graph, weighting, mixedEncodingManager, tr);
List<String> tmpList = getTurnDescriptions(wayList);
assertEquals(Arrays.asList("continue onto MainStreet 1 2",
"At roundabout, take exit 1 onto 5-8",
"arrive at destination"),
tmpList);
// Test Radian
double delta = roundaboutGraph.getAngle(1, 2, 5, 8);
RoundaboutInstruction instr = (RoundaboutInstruction) wayList.get(1);
assertEquals(delta, instr.getTurnAngle(), 0.01);
}
@Test
public void testCalcInstructionsIgnoreContinue() {
// Follow a couple of straight edges, including a name change
ShortestWeighting weighting = new ShortestWeighting(mixedCarAccessEnc, mixedCarSpeedEnc);
Path p = new Dijkstra(roundaboutGraph.g, weighting, TraversalMode.NODE_BASED)
.calcPath(4, 11);
assertTrue(p.isFound());
InstructionList wayList = InstructionsFromEdges.calcInstructions(p, p.graph, weighting, mixedEncodingManager, tr);
// Contain only start and finish instruction, no CONTINUE
assertEquals(2, wayList.size());
}
@Test
public void testCalcInstructionsIgnoreTurnIfNoAlternative() {
// The street turns left, but there is not turn
ShortestWeighting weighting = new ShortestWeighting(mixedCarAccessEnc, mixedCarSpeedEnc);
Path p = new Dijkstra(roundaboutGraph.g, weighting, TraversalMode.NODE_BASED)
.calcPath(10, 12);
assertTrue(p.isFound());
InstructionList wayList = InstructionsFromEdges.calcInstructions(p, p.graph, weighting, mixedEncodingManager, tr);
// Contain only start and finish instruction
assertEquals(2, wayList.size());
}
@Test
public void testCalcInstructionForForkWithSameName() {
final BaseGraph graph = new BaseGraph.Builder(carManager).create();
final NodeAccess na = graph.getNodeAccess();
// Actual example: point=48.982618%2C13.122021&point=48.982336%2C13.121002
// 1-2 & 2-4 have the same Street name, but other from that, it would be hard to see the difference
// We have to enforce a turn instruction here
// 3
// \
// 2 -- 1
// /
// 4
na.setNode(1, 48.982618, 13.122021);
na.setNode(2, 48.982565, 13.121597);
na.setNode(3, 48.982611, 13.121012);
na.setNode(4, 48.982336, 13.121002);
GHUtility.setSpeed(60, true, true, carAccessEnc, carAvSpeedEnc, graph.edge(1, 2).setDistance(5)).setKeyValues(createKV("name", "Regener Weg"));
GHUtility.setSpeed(60, true, true, carAccessEnc, carAvSpeedEnc, graph.edge(2, 4).setDistance(5)).setKeyValues(createKV("name", "Regener Weg"));
GHUtility.setSpeed(60, true, true, carAccessEnc, carAvSpeedEnc, graph.edge(2, 3).setDistance(5));
ShortestWeighting weighting = new ShortestWeighting(carAccessEnc, carAvSpeedEnc);
Path p = new Dijkstra(graph, weighting, TraversalMode.NODE_BASED)
.calcPath(1, 4);
assertTrue(p.isFound());
InstructionList wayList = InstructionsFromEdges.calcInstructions(p, p.graph, weighting, carManager, tr);
assertEquals(3, wayList.size());
assertEquals(-7, wayList.get(1).getSign());
}
@Test
public void testCalcInstructionForMotorwayFork() {
final BaseGraph graph = new BaseGraph.Builder(carManager).create();
final NodeAccess na = graph.getNodeAccess();
// Actual example: point=48.909071%2C8.647136&point=48.908789%2C8.649244
// 1-2 & 2-4 is a motorway, 2-3 is a motorway_link
// We should skip the instruction here
// 1 ---- 2 ---- 4
// \
// 3
na.setNode(1, 48.909071, 8.647136);
na.setNode(2, 48.908962, 8.647978);
na.setNode(3, 48.908867, 8.648155);
na.setNode(4, 48.908789, 8.649244);
EnumEncodedValue<RoadClass> roadClassEnc = carManager.getEnumEncodedValue(RoadClass.KEY, RoadClass.class);
BooleanEncodedValue roadClassLinkEnc = carManager.getBooleanEncodedValue(RoadClassLink.KEY);
GHUtility.setSpeed(60, true, true, carAccessEnc, carAvSpeedEnc, graph.edge(1, 2).setDistance(5)).setKeyValues(createKV("name", "A 8")).set(roadClassEnc, RoadClass.MOTORWAY).set(roadClassLinkEnc, false);
GHUtility.setSpeed(60, true, true, carAccessEnc, carAvSpeedEnc, graph.edge(2, 4).setDistance(5)).setKeyValues(createKV("name", "A 8")).set(roadClassEnc, RoadClass.MOTORWAY).set(roadClassLinkEnc, false);
GHUtility.setSpeed(60, true, true, carAccessEnc, carAvSpeedEnc, graph.edge(2, 3).setDistance(5)).set(roadClassEnc, RoadClass.MOTORWAY).set(roadClassLinkEnc, true);
ShortestWeighting weighting = new ShortestWeighting(carAccessEnc, carAvSpeedEnc);
Path p = new Dijkstra(graph, weighting, TraversalMode.NODE_BASED)
.calcPath(1, 4);
assertTrue(p.isFound());
InstructionList wayList = InstructionsFromEdges.calcInstructions(p, p.graph, weighting, carManager, tr);
assertEquals(2, wayList.size());
}
@Test
public void testCalcInstructionsEnterMotorway() {
final BaseGraph graph = new BaseGraph.Builder(carManager).create();
final NodeAccess na = graph.getNodeAccess();
// Actual example: point=48.630533%2C9.459416&point=48.630544%2C9.459829
// 1 -2 -3 is a motorway and tagged as oneway
// 1 ->- 2 ->- 3
// /
// 4
na.setNode(1, 48.630647, 9.459041);
na.setNode(2, 48.630586, 9.459604);
na.setNode(3, 48.630558, 9.459851);
na.setNode(4, 48.63054, 9.459406);
GHUtility.setSpeed(60, true, false, carAccessEnc, carAvSpeedEnc, graph.edge(1, 2).setDistance(5)).setKeyValues(createKV("name", "A 8"));
GHUtility.setSpeed(60, true, false, carAccessEnc, carAvSpeedEnc, graph.edge(2, 3).setDistance(5)).setKeyValues(createKV("name", "A 8"));
GHUtility.setSpeed(60, true, false, carAccessEnc, carAvSpeedEnc, graph.edge(4, 2).setDistance(5)).setKeyValues(createKV("name", "A 8"));
ShortestWeighting weighting = new ShortestWeighting(carAccessEnc, carAvSpeedEnc);
Path p = new Dijkstra(graph, weighting, TraversalMode.NODE_BASED)
.calcPath(4, 3);
assertTrue(p.isFound());
InstructionList wayList = InstructionsFromEdges.calcInstructions(p, p.graph, weighting, carManager, tr);
// no turn instruction for entering the highway
assertEquals(2, wayList.size());
}
@Test
public void testCalcInstructionsMotorwayJunction() {
final BaseGraph g = new BaseGraph.Builder(carManager).create();
final NodeAccess na = g.getNodeAccess();
// Actual example: point=48.70672%2C9.164266&point=48.706805%2C9.162995
// A typical motorway junction, when following 1-2-3, there should be a keep right at 2
// -- 4
// /
// 1 -- 2 -- 3
na.setNode(1, 48.70672, 9.164266);
na.setNode(2, 48.706741, 9.163719);
na.setNode(3, 48.706805, 9.162995);
na.setNode(4, 48.706705, 9.16329);
GHUtility.setSpeed(60, true, false, carAccessEnc, carAvSpeedEnc, g.edge(1, 2).setDistance(5)).setKeyValues(createKV("name", "A 8"));
GHUtility.setSpeed(60, true, false, carAccessEnc, carAvSpeedEnc, g.edge(2, 3).setDistance(5)).setKeyValues(createKV("name", "A 8"));
GHUtility.setSpeed(60, true, false, carAccessEnc, carAvSpeedEnc, g.edge(2, 4).setDistance(5)).setKeyValues(createKV("name", "A 8"));
ShortestWeighting weighting = new ShortestWeighting(carAccessEnc, carAvSpeedEnc);
Path p = new Dijkstra(g, weighting, TraversalMode.NODE_BASED)
.calcPath(1, 3);
assertTrue(p.isFound());
InstructionList wayList = InstructionsFromEdges.calcInstructions(p, p.graph, weighting, carManager, tr);
assertEquals(3, wayList.size());
// TODO this should be a keep_right
assertEquals(0, wayList.get(1).getSign());
}
@Test
public void testCalcInstructionsOntoOneway() {
final BaseGraph g = new BaseGraph.Builder(carManager).create();
final NodeAccess na = g.getNodeAccess();
// Actual example: point=-33.824566%2C151.187834&point=-33.82441%2C151.188231
// 1 -2 -3 is a oneway
// 1 ->- 2 ->- 3
// |
// 4
na.setNode(1, -33.824245, 151.187866);
na.setNode(2, -33.824335, 151.188017);
na.setNode(3, -33.824415, 151.188177);
na.setNode(4, -33.824437, 151.187925);
GHUtility.setSpeed(60, true, false, carAccessEnc, carAvSpeedEnc, g.edge(1, 2).setDistance(5)).setKeyValues(createKV("name", "Pacific Highway"));
GHUtility.setSpeed(60, true, false, carAccessEnc, carAvSpeedEnc, g.edge(2, 3).setDistance(5)).setKeyValues(createKV("name", "Pacific Highway"));
GHUtility.setSpeed(60, true, true, carAccessEnc, carAvSpeedEnc, g.edge(4, 2).setDistance(5)).setKeyValues(createKV("name", "Greenwich Road"));
ShortestWeighting weighting = new ShortestWeighting(carAccessEnc, carAvSpeedEnc);
Path p = new Dijkstra(g, weighting, TraversalMode.NODE_BASED)
.calcPath(4, 3);
assertTrue(p.isFound());
InstructionList wayList = InstructionsFromEdges.calcInstructions(p, p.graph, weighting, carManager, tr);
assertEquals(3, wayList.size());
assertEquals(2, wayList.get(1).getSign());
}
@Test
public void testCalcInstructionIssue1047() {
final BaseGraph g = new BaseGraph.Builder(carManager).create();
final NodeAccess na = g.getNodeAccess();
// Actual example: point=51.367105%2C14.491246&point=51.369048%2C14.483092
// 1-2 & 2-3 is a road that is turning right, 2-4 is a that is branching off.
// When driving 1-2-4, we should create an instruction notifying the user to continue straight instead of turning and following the road
// When driving 1-2-3, we should create an instruction as well
//
// 1 ---- 2 ---- 4
// |
// 3
na.setNode(1, 51.367544, 14.488209);
na.setNode(2, 51.368046, 14.486525);
na.setNode(3, 51.36875, 14.487019);
na.setNode(4, 51.368428, 14.485173);
EnumEncodedValue<RoadClass> roadClassEnc = carManager.getEnumEncodedValue(RoadClass.KEY, RoadClass.class);
BooleanEncodedValue roadClassLinkEnc = carManager.getBooleanEncodedValue(RoadClassLink.KEY);
GHUtility.setSpeed(60, true, true, carAccessEnc, carAvSpeedEnc, g.edge(1, 2).setDistance(5)).setKeyValues(createKV("name", "B 156")).set(roadClassEnc, RoadClass.PRIMARY).set(roadClassLinkEnc, false);
GHUtility.setSpeed(60, true, true, carAccessEnc, carAvSpeedEnc, g.edge(2, 4).setDistance(5)).setKeyValues(createKV("name", "S 108")).set(roadClassEnc, RoadClass.SECONDARY).set(roadClassLinkEnc, false);
GHUtility.setSpeed(60, true, true, carAccessEnc, carAvSpeedEnc, g.edge(2, 3).setDistance(5)).setKeyValues(createKV("name", "B 156")).set(roadClassEnc, RoadClass.PRIMARY).set(roadClassLinkEnc, false);
ShortestWeighting weighting = new ShortestWeighting(carAccessEnc, carAvSpeedEnc);
Path p = new Dijkstra(g, weighting, TraversalMode.NODE_BASED)
.calcPath(1, 4);
assertTrue(p.isFound());
InstructionList wayList = InstructionsFromEdges.calcInstructions(p, p.graph, weighting, carManager, tr);
assertEquals(3, wayList.size());
p = new Dijkstra(g, weighting, TraversalMode.NODE_BASED)
.calcPath(1, 3);
assertTrue(p.isFound());
wayList = InstructionsFromEdges.calcInstructions(p, p.graph, weighting, carManager, tr);
assertEquals(3, wayList.size());
}
@Test
public void testCalcInstructionContinueLeavingStreet() {
final BaseGraph g = new BaseGraph.Builder(carManager).create();
final NodeAccess na = g.getNodeAccess();
// When leaving the current street via a Continue, we should show it
// 3
// \
// 4 - 2 -- 1
na.setNode(1, 48.982618, 13.122021);
na.setNode(2, 48.982565, 13.121597);
na.setNode(3, 48.982611, 13.121012);
na.setNode(4, 48.982565, 13.121002);
GHUtility.setSpeed(60, true, true, carAccessEnc, carAvSpeedEnc, g.edge(1, 2).setDistance(5)).setKeyValues(createKV("name", "Regener Weg"));
GHUtility.setSpeed(60, true, true, carAccessEnc, carAvSpeedEnc, g.edge(2, 4).setDistance(5));
GHUtility.setSpeed(60, true, true, carAccessEnc, carAvSpeedEnc, g.edge(2, 3).setDistance(5)).setKeyValues(createKV("name", "Regener Weg"));
ShortestWeighting weighting = new ShortestWeighting(carAccessEnc, carAvSpeedEnc);
Path p = new Dijkstra(g, weighting, TraversalMode.NODE_BASED)
.calcPath(1, 4);
assertTrue(p.isFound());
InstructionList wayList = InstructionsFromEdges.calcInstructions(p, p.graph, weighting, carManager, tr);
assertEquals(3, wayList.size());
assertEquals(-7, wayList.get(1).getSign());
}
@Test
public void testCalcInstructionSlightTurn() {
final BaseGraph g = new BaseGraph.Builder(carManager).create();
final NodeAccess na = g.getNodeAccess();
// Real Situation: point=48.411927%2C15.599197&point=48.412094%2C15.598816
// When reaching this Crossing, you cannot know if you should turn left or right
// Google Maps and Bing show a turn, OSRM does not
// 1 ---2--- 3
// \
// 4
na.setNode(1, 48.412094, 15.598816);
na.setNode(2, 48.412055, 15.599068);
na.setNode(3, 48.412034, 15.599411);
na.setNode(4, 48.411927, 15.599197);
GHUtility.setSpeed(60, true, true, carAccessEnc, carAvSpeedEnc, g.edge(1, 2).setDistance(5)).setKeyValues(createKV("name", "Stöhrgasse"));
GHUtility.setSpeed(60, true, true, carAccessEnc, carAvSpeedEnc, g.edge(2, 3).setDistance(5));
GHUtility.setSpeed(60, true, true, carAccessEnc, carAvSpeedEnc, g.edge(2, 4).setDistance(5)).setKeyValues(createKV("name", "Stöhrgasse"));
ShortestWeighting weighting = new ShortestWeighting(carAccessEnc, carAvSpeedEnc);
Path p = new Dijkstra(g, weighting, TraversalMode.NODE_BASED)
.calcPath(4, 1);
assertTrue(p.isFound());
InstructionList wayList = InstructionsFromEdges.calcInstructions(p, p.graph, weighting, carManager, tr);
assertEquals(3, wayList.size());
assertEquals(-1, wayList.get(1).getSign());
}
@Test
public void testUTurnLeft() {
final BaseGraph g = new BaseGraph.Builder(carManager).create();
final NodeAccess na = g.getNodeAccess();
// Real Situation: point=48.402116%2C9.994367&point=48.402198%2C9.99507
// 7
// |
// 4----5----6
// |
// 1----2----3
na.setNode(1, 48.402116, 9.994367);
na.setNode(2, 48.402198, 9.99507);
na.setNode(3, 48.402344, 9.996266);
na.setNode(4, 48.402191, 9.994351);
na.setNode(5, 48.402298, 9.995053);
na.setNode(6, 48.402422, 9.996067);
na.setNode(7, 48.402604, 9.994962);
GHUtility.setSpeed(60, 0, carAccessEnc, carAvSpeedEnc,
g.edge(1, 2).setDistance(5).setKeyValues(createKV("name", "Olgastraße")),
g.edge(2, 3).setDistance(5).setKeyValues(createKV("name", "Olgastraße")),
g.edge(6, 5).setDistance(5).setKeyValues(createKV("name", "Olgastraße")),
g.edge(5, 4).setDistance(5).setKeyValues(createKV("name", "Olgastraße")));
GHUtility.setSpeed(60, 60, carAccessEnc, carAvSpeedEnc,
g.edge(2, 5).setDistance(5).setKeyValues(createKV("name", "Neithardtstraße")),
g.edge(5, 7).setDistance(5).setKeyValues(createKV("name", "Neithardtstraße")));
ShortestWeighting weighting = new ShortestWeighting(carAccessEnc, carAvSpeedEnc);
Path p = new Dijkstra(g, weighting, TraversalMode.NODE_BASED)
.calcPath(1, 4);
assertTrue(p.isFound());
InstructionList wayList = InstructionsFromEdges.calcInstructions(p, p.graph, weighting, carManager, tr);
assertEquals(3, wayList.size());
assertEquals(Instruction.U_TURN_LEFT, wayList.get(1).getSign());
}
@Test
public void testUTurnRight() {
final BaseGraph g = new BaseGraph.Builder(carManager).create();
final NodeAccess na = g.getNodeAccess();
// Real Situation: point=-33.885758,151.181472&point=-33.885692,151.181445
// 7
// |
// 4----5----6
// |
// 3----2----1
na.setNode(1, -33.885758, 151.181472);
na.setNode(2, -33.885852, 151.180968);
na.setNode(3, -33.885968, 151.180501);
na.setNode(4, -33.885883, 151.180442);
na.setNode(5, -33.885772, 151.180941);
na.setNode(6, -33.885692, 151.181445);
na.setNode(7, -33.885692, 151.181445);
GHUtility.setSpeed(60, 0, carAccessEnc, carAvSpeedEnc,
g.edge(1, 2).setDistance(5).setKeyValues(createKV("name", "Parramatta Road")),
g.edge(2, 3).setDistance(5).setKeyValues(createKV("name", "Parramatta Road")),
g.edge(4, 5).setDistance(5).setKeyValues(createKV("name", "Parramatta Road")),
g.edge(5, 6).setDistance(5).setKeyValues(createKV("name", "Parramatta Road")));
GHUtility.setSpeed(60, 60, carAccessEnc, carAvSpeedEnc,
g.edge(2, 5).setDistance(5).setKeyValues(createKV("name", "Larkin Street")),
g.edge(5, 7).setDistance(5).setKeyValues(createKV("name", "Larkin Street")));
ShortestWeighting weighting = new ShortestWeighting(carAccessEnc, carAvSpeedEnc);
Path p = new Dijkstra(g, weighting, TraversalMode.NODE_BASED)
.calcPath(1, 6);
assertTrue(p.isFound());
InstructionList wayList = InstructionsFromEdges.calcInstructions(p, p.graph, weighting, carManager, tr);
assertEquals(3, wayList.size());
assertEquals(Instruction.U_TURN_RIGHT, wayList.get(1).getSign());
}
@Test
public void testCalcInstructionsForTurn() {
// The street turns left, but there is not turn
ShortestWeighting weighting = new ShortestWeighting(mixedCarAccessEnc, mixedCarSpeedEnc);
Path p = new Dijkstra(roundaboutGraph.g, weighting, TraversalMode.NODE_BASED)
.calcPath(11, 13);
assertTrue(p.isFound());
InstructionList wayList = InstructionsFromEdges.calcInstructions(p, p.graph, weighting, mixedEncodingManager, tr);
// Contain start, turn, and finish instruction
assertEquals(3, wayList.size());
// Assert turn right
assertEquals(2, wayList.get(1).getSign());
}
@Test
public void testCalcInstructionsForSlightTurnWithOtherSlightTurn() {
// Test for a fork with two slight turns. Since there are two slight turns, show the turn instruction
ShortestWeighting weighting = new ShortestWeighting(mixedCarAccessEnc, mixedCarSpeedEnc);
Path p = new Dijkstra(roundaboutGraph.g, weighting, TraversalMode.NODE_BASED)
.calcPath(12, 16);
assertTrue(p.isFound());
InstructionList wayList = InstructionsFromEdges.calcInstructions(p, p.graph, weighting, mixedEncodingManager, tr);
// Contain start, turn, and finish instruction
assertEquals(3, wayList.size());
// Assert turn right
assertEquals(7, wayList.get(1).getSign());
}
@Test
public void testCalcInstructionsForSlightTurnOntoDifferentStreet() {
final BaseGraph g = new BaseGraph.Builder(carManager).create();
final NodeAccess na = g.getNodeAccess();
// Actual example: point=48.76445%2C8.679054&point=48.764152%2C8.678722
// 1
// /
// 2 - 3 - 4
//
na.setNode(1, 48.76423, 8.679103);
na.setNode(2, 48.76417, 8.678647);
na.setNode(3, 48.764149, 8.678926);
na.setNode(4, 48.764085, 8.679183);
GHUtility.setSpeed(60, true, true, carAccessEnc, carAvSpeedEnc, g.edge(1, 3).setDistance(5)).setKeyValues(createKV("name", "Talstraße, K 4313"));
GHUtility.setSpeed(60, true, true, carAccessEnc, carAvSpeedEnc, g.edge(2, 3).setDistance(5)).setKeyValues(createKV("name", "Calmbacher Straße, K 4312"));
GHUtility.setSpeed(60, true, true, carAccessEnc, carAvSpeedEnc, g.edge(3, 4).setDistance(5)).setKeyValues(createKV("name", "Calmbacher Straße, K 4312"));
ShortestWeighting weighting = new ShortestWeighting(carAccessEnc, carAvSpeedEnc);
Path p = new Dijkstra(g, weighting, TraversalMode.NODE_BASED)
.calcPath(1, 2);
assertTrue(p.isFound());
InstructionList wayList = InstructionsFromEdges.calcInstructions(p, p.graph, weighting, carManager, tr);
assertEquals(3, wayList.size());
assertEquals(Instruction.TURN_SLIGHT_RIGHT, wayList.get(1).getSign());
}
@Test
public void testIgnoreInstructionsForSlightTurnWithOtherTurn() {
// Test for a fork with one sligh turn and one actual turn. We are going along the slight turn. No turn instruction needed in this case
ShortestWeighting weighting = new ShortestWeighting(mixedCarAccessEnc, mixedCarSpeedEnc);
Path p = new Dijkstra(roundaboutGraph.g, weighting, TraversalMode.NODE_BASED)
.calcPath(16, 19);
assertTrue(p.isFound());
InstructionList wayList = InstructionsFromEdges.calcInstructions(p, p.graph, weighting, mixedEncodingManager, tr);
// Contain start, and finish instruction
assertEquals(2, wayList.size());
}
List<String> getTurnDescriptions(InstructionList instructionJson) {
List<String> list = new ArrayList<>();
for (Instruction instruction : instructionJson) {
list.add(instruction.getTurnDescription(tr));
}
return list;
}
private Graph generatePathDetailsGraph() {
final BaseGraph graph = new BaseGraph.Builder(carManager).create();
final NodeAccess na = graph.getNodeAccess();
na.setNode(1, 52.514, 13.348);
na.setNode(2, 52.514, 13.349);
na.setNode(3, 52.514, 13.350);
na.setNode(4, 52.515, 13.349);
na.setNode(5, 52.516, 13.3452);
na.setNode(6, 52.516, 13.344);
GHUtility.setSpeed(45, true, true, carAccessEnc, carAvSpeedEnc, graph.edge(1, 2).setDistance(5)).setKeyValues(createKV("name", "1-2"));
GHUtility.setSpeed(45, true, true, carAccessEnc, carAvSpeedEnc, graph.edge(4, 5).setDistance(5)).setKeyValues(createKV("name", "4-5"));
GHUtility.setSpeed(90, true, true, carAccessEnc, carAvSpeedEnc, graph.edge(2, 3).setDistance(5)).setKeyValues(createKV("name", "2-3"));
GHUtility.setSpeed(9, true, true, carAccessEnc, carAvSpeedEnc, graph.edge(3, 4).setDistance(10)).setKeyValues(createKV("name", "3-4"));
GHUtility.setSpeed(9, true, true, carAccessEnc, carAvSpeedEnc, graph.edge(5, 6).setDistance(0.001)).setKeyValues(createKV("name", "3-4"));
return graph;
}
private class RoundaboutGraph {
final BaseGraph g;
final NodeAccess na;
final EdgeIteratorState edge3to6, edge3to9;
boolean clockwise = false;
List<EdgeIteratorState> roundaboutEdges = new LinkedList<>();
private RoundaboutGraph() {
g = new BaseGraph.Builder(mixedEncodingManager).create();
na = g.getNodeAccess();
// 18
// 8 14 |
// \ | / 16 - 17
// 5 12 - 13 \-- 19
// / \ | \ 15
// 1 - 2 4 - 7 - 10 - 11
// \ /
// 3
// | \
// 6 [ 9 ] edge 9 is turned off in default mode
na.setNode(1, 52.514, 13.348);
na.setNode(2, 52.514, 13.349);
na.setNode(3, 52.5135, 13.35);
na.setNode(4, 52.514, 13.351);
na.setNode(5, 52.5145, 13.351);
na.setNode(6, 52.513, 13.35);
na.setNode(7, 52.514, 13.352);
na.setNode(8, 52.515, 13.351);
na.setNode(9, 52.513, 13.351);
na.setNode(10, 52.514, 13.353);
na.setNode(11, 52.514, 13.354);
na.setNode(12, 52.515, 13.354);
na.setNode(13, 52.515, 13.355);
na.setNode(14, 52.516, 13.354);
na.setNode(15, 52.516, 13.360);
na.setNode(16, 52.514, 13.360);
na.setNode(17, 52.514, 13.361);
na.setNode(18, 52.513, 13.361);
na.setNode(19, 52.515, 13.368);
// roundabout
roundaboutEdges.add(g.edge(3, 2).setDistance(5).setKeyValues(createKV("name", "2-3")));
roundaboutEdges.add(g.edge(4, 3).setDistance(5).setKeyValues(createKV("name", "3-4")));
roundaboutEdges.add(g.edge(5, 4).setDistance(5).setKeyValues(createKV("name", "4-5")));
roundaboutEdges.add(g.edge(2, 5).setDistance(5).setKeyValues(createKV("name", "5-2")));
List<EdgeIteratorState> bothDir = new ArrayList<>();
List<EdgeIteratorState> oneDir = new ArrayList<>(roundaboutEdges);
bothDir.add(g.edge(1, 2).setDistance(5).setKeyValues(createKV("name", "MainStreet 1 2")));
bothDir.add(g.edge(4, 7).setDistance(5).setKeyValues(createKV("name", "MainStreet 4 7")));
bothDir.add(g.edge(5, 8).setDistance(5).setKeyValues(createKV("name", "5-8")));
bothDir.add(edge3to6 = g.edge(3, 6).setDistance(5).setKeyValues(createKV("name", "3-6")));
oneDir.add(edge3to9 = g.edge(3, 9).setDistance(5).setKeyValues(createKV("name", "3-9")));
bothDir.add(g.edge(7, 10).setDistance(5));
bothDir.add(g.edge(10, 11).setDistance(5));
bothDir.add(g.edge(11, 12).setDistance(5));
bothDir.add(g.edge(12, 13).setDistance(5));
bothDir.add(g.edge(12, 14).setDistance(5));
bothDir.add(g.edge(13, 15).setDistance(5));
bothDir.add(g.edge(13, 16).setDistance(5));
bothDir.add(g.edge(16, 17).setDistance(5));
bothDir.add(g.edge(17, 18).setDistance(5));
bothDir.add(g.edge(17, 19).setDistance(5));
for (EdgeIteratorState edge : bothDir) {
GHUtility.setSpeed(70, 70, mixedCarAccessEnc, mixedCarSpeedEnc, edge);
GHUtility.setSpeed(7, 7, mixedFootAccessEnc, mixedFootSpeedEnc, edge);
}
for (EdgeIteratorState edge : oneDir) {
GHUtility.setSpeed(70, 0, mixedCarAccessEnc, mixedCarSpeedEnc, edge);
GHUtility.setSpeed(7, 0, mixedFootAccessEnc, mixedFootSpeedEnc, edge);
}
setRoundabout(clockwise);
inverse3to9();
}
public void setRoundabout(boolean clockwise) {
BooleanEncodedValue mixedRoundabout = mixedEncodingManager.getBooleanEncodedValue(Roundabout.KEY);
for (EdgeIteratorState edge : roundaboutEdges) {
edge.set(mixedCarAccessEnc, clockwise).setReverse(mixedCarAccessEnc, !clockwise);
edge.set(mixedFootAccessEnc, clockwise).setReverse(mixedFootAccessEnc, !clockwise);
edge.set(mixedRoundabout, true);
}
this.clockwise = clockwise;
}
public void inverse3to9() {
edge3to9.set(mixedCarAccessEnc, !edge3to9.get(mixedCarAccessEnc)).setReverse(mixedCarAccessEnc, false);
edge3to9.set(mixedFootAccessEnc, !edge3to9.get(mixedFootAccessEnc)).setReverse(mixedFootAccessEnc, false);
}
public void inverse3to6() {
edge3to6.set(mixedCarAccessEnc, !edge3to6.get(mixedCarAccessEnc)).setReverse(mixedCarAccessEnc, true);
edge3to6.set(mixedFootAccessEnc, !edge3to6.get(mixedFootAccessEnc)).setReverse(mixedFootAccessEnc, true);
}
private double getAngle(int n1, int n2, int n3, int n4) {
double inOrientation = AngleCalc.ANGLE_CALC.calcOrientation(na.getLat(n1), na.getLon(n1), na.getLat(n2), na.getLon(n2));
double outOrientation = AngleCalc.ANGLE_CALC.calcOrientation(na.getLat(n3), na.getLon(n3), na.getLat(n4), na.getLon(n4));
outOrientation = AngleCalc.ANGLE_CALC.alignOrientation(inOrientation, outOrientation);
double delta = (inOrientation - outOrientation);
delta = clockwise ? (Math.PI + delta) : -1 * (Math.PI - delta);
return delta;
}
}
private static Path extractPath(Graph graph, Weighting weighting, SPTEntry sptEntry) {
return PathExtractor.extractPath(graph, weighting, sptEntry);
}
} | core/src/test/java/com/graphhopper/routing/PathTest.java | /*
* Licensed to GraphHopper GmbH under one or more contributor
* license agreements. See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*
* GraphHopper GmbH licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.graphhopper.routing;
import com.graphhopper.routing.ev.*;
import com.graphhopper.routing.util.EncodingManager;
import com.graphhopper.routing.util.TraversalMode;
import com.graphhopper.routing.weighting.FastestWeighting;
import com.graphhopper.routing.weighting.ShortestWeighting;
import com.graphhopper.routing.weighting.Weighting;
import com.graphhopper.storage.BaseGraph;
import com.graphhopper.storage.Graph;
import com.graphhopper.storage.NodeAccess;
import com.graphhopper.util.*;
import com.graphhopper.util.details.PathDetail;
import com.graphhopper.util.details.PathDetailsBuilderFactory;
import com.graphhopper.util.details.PathDetailsFromEdges;
import org.junit.jupiter.api.Test;
import java.util.*;
import static com.graphhopper.search.EdgeKVStorage.KeyValue.createKV;
import static com.graphhopper.storage.AbstractGraphStorageTester.assertPList;
import static com.graphhopper.util.Parameters.Details.*;
import com.graphhopper.search.EdgeKVStorage.KeyValue;
import static org.junit.jupiter.api.Assertions.*;
/**
* @author Peter Karich
*/
public class PathTest {
private final BooleanEncodedValue carAccessEnc = new SimpleBooleanEncodedValue("access", true);
private final DecimalEncodedValue carAvSpeedEnc = new DecimalEncodedValueImpl("speed", 5, 5, false);
private final EncodingManager carManager = EncodingManager.start().add(carAccessEnc).add(carAvSpeedEnc).build();
private final BooleanEncodedValue mixedCarAccessEnc = new SimpleBooleanEncodedValue("mixed_car_access", true);
private final DecimalEncodedValue mixedCarSpeedEnc = new DecimalEncodedValueImpl("mixed_car_speed", 5, 5, false);
private final BooleanEncodedValue mixedFootAccessEnc = new SimpleBooleanEncodedValue("mixed_foot_access", true);
private final DecimalEncodedValue mixedFootSpeedEnc = new DecimalEncodedValueImpl("mixed_foot_speed", 4, 1, false);
private final EncodingManager mixedEncodingManager = EncodingManager.start().add(mixedCarAccessEnc).add(mixedCarSpeedEnc).add(mixedFootAccessEnc).add(mixedFootSpeedEnc).build();
private final TranslationMap trMap = TranslationMapTest.SINGLETON;
private final Translation tr = trMap.getWithFallBack(Locale.US);
private final RoundaboutGraph roundaboutGraph = new RoundaboutGraph();
private final Graph pathDetailGraph = generatePathDetailsGraph();
@Test
public void testFound() {
BaseGraph g = new BaseGraph.Builder(carManager).create();
Path p = new Path(g);
assertFalse(p.isFound());
assertEquals(0, p.getDistance(), 1e-7);
assertEquals(0, p.calcNodes().size());
}
@Test
public void testWayList() {
BaseGraph g = new BaseGraph.Builder(carManager).create();
NodeAccess na = g.getNodeAccess();
na.setNode(0, 0.0, 0.1);
na.setNode(1, 1.0, 0.1);
na.setNode(2, 2.0, 0.1);
EdgeIteratorState edge1 = g.edge(0, 1).setDistance(1000).set(carAccessEnc, true, true).set(carAvSpeedEnc, 10.0);
edge1.setWayGeometry(Helper.createPointList(8, 1, 9, 1));
EdgeIteratorState edge2 = g.edge(2, 1).setDistance(2000).set(carAccessEnc, true, true).set(carAvSpeedEnc, 50.0);
edge2.setWayGeometry(Helper.createPointList(11, 1, 10, 1));
SPTEntry e1 = new SPTEntry(edge2.getEdge(), 2, 1, new SPTEntry(edge1.getEdge(), 1, 1, new SPTEntry(0, 1)));
FastestWeighting weighting = new FastestWeighting(carAccessEnc, carAvSpeedEnc);
Path path = extractPath(g, weighting, e1);
// 0-1-2
assertPList(Helper.createPointList(0, 0.1, 8, 1, 9, 1, 1, 0.1, 10, 1, 11, 1, 2, 0.1), path.calcPoints());
InstructionList instr = InstructionsFromEdges.calcInstructions(path, path.graph, weighting, carManager, tr);
Instruction tmp = instr.get(0);
assertEquals(3000.0, tmp.getDistance(), 0.0);
assertEquals(504000L, tmp.getTime());
assertEquals("continue", tmp.getTurnDescription(tr));
assertEquals(6, tmp.getLength());
tmp = instr.get(1);
assertEquals(0.0, tmp.getDistance(), 0.0);
assertEquals(0L, tmp.getTime());
assertEquals("arrive at destination", tmp.getTurnDescription(tr));
assertEquals(0, tmp.getLength());
int acc = 0;
for (Instruction instruction : instr) {
acc += instruction.getLength();
}
assertEquals(path.calcPoints().size() - 1, acc);
// force minor change for instructions
edge2.setKeyValues(createKV("name", "2"));
na.setNode(3, 1.0, 1.0);
g.edge(1, 3).setDistance(1000).set(carAccessEnc, true, true).set(carAvSpeedEnc, 10.0);
e1 = new SPTEntry(edge2.getEdge(), 2, 1,
new SPTEntry(edge1.getEdge(), 1, 1,
new SPTEntry(0, 1)
)
);
path = extractPath(g, weighting, e1);
instr = InstructionsFromEdges.calcInstructions(path, path.graph, weighting, carManager, tr);
tmp = instr.get(0);
assertEquals(1000.0, tmp.getDistance(), 0);
assertEquals(360000L, tmp.getTime());
assertEquals("continue", tmp.getTurnDescription(tr));
assertEquals(3, tmp.getLength());
tmp = instr.get(1);
assertEquals(2000.0, tmp.getDistance(), 0);
assertEquals(144000L, tmp.getTime());
assertEquals("turn sharp right onto 2", tmp.getTurnDescription(tr));
assertEquals(3, tmp.getLength());
acc = 0;
for (Instruction instruction : instr) {
acc += instruction.getLength();
}
assertEquals(path.calcPoints().size() - 1, acc);
// now reverse order
e1 = new SPTEntry(edge1.getEdge(), 0, 1, new SPTEntry(edge2.getEdge(), 1, 1, new SPTEntry(2, 1)));
path = extractPath(g, weighting, e1);
// 2-1-0
assertPList(Helper.createPointList(2, 0.1, 11, 1, 10, 1, 1, 0.1, 9, 1, 8, 1, 0, 0.1), path.calcPoints());
instr = InstructionsFromEdges.calcInstructions(path, path.graph, weighting, carManager, tr);
tmp = instr.get(0);
assertEquals(2000.0, tmp.getDistance(), 0);
assertEquals(144000L, tmp.getTime());
assertEquals("continue onto 2", tmp.getTurnDescription(tr));
assertEquals(3, tmp.getLength());
tmp = instr.get(1);
assertEquals(1000.0, tmp.getDistance(), 0);
assertEquals(360000L, tmp.getTime());
assertEquals("turn sharp left", tmp.getTurnDescription(tr));
assertEquals(3, tmp.getLength());
acc = 0;
for (Instruction instruction : instr) {
acc += instruction.getLength();
}
assertEquals(path.calcPoints().size() - 1, acc);
}
@Test
public void testFindInstruction() {
BaseGraph g = new BaseGraph.Builder(carManager).create();
NodeAccess na = g.getNodeAccess();
na.setNode(0, 0.0, 0.0);
na.setNode(1, 5.0, 0.0);
na.setNode(2, 5.0, 0.5);
na.setNode(3, 10.0, 0.5);
na.setNode(4, 7.5, 0.25);
na.setNode(5, 5.0, 1.0);
EdgeIteratorState edge1 = g.edge(0, 1).setDistance(1000).set(carAccessEnc, true, true).set(carAvSpeedEnc, 50.0);
edge1.setWayGeometry(Helper.createPointList());
edge1.setKeyValues(createKV("name", "Street 1"));
EdgeIteratorState edge2 = g.edge(1, 2).setDistance(1000).set(carAccessEnc, true, true).set(carAvSpeedEnc, 50.0);
edge2.setWayGeometry(Helper.createPointList());
edge2.setKeyValues(createKV("name", "Street 2"));
EdgeIteratorState edge3 = g.edge(2, 3).setDistance(1000).set(carAccessEnc, true, true).set(carAvSpeedEnc, 50.0);
edge3.setWayGeometry(Helper.createPointList());
edge3.setKeyValues(createKV("name", "Street 3"));
EdgeIteratorState edge4 = g.edge(3, 4).setDistance(500).set(carAccessEnc, true, true).set(carAvSpeedEnc, 50.0);
edge4.setWayGeometry(Helper.createPointList());
edge4.setKeyValues(createKV("name", "Street 4"));
g.edge(1, 5).setDistance(10000).set(carAccessEnc, true, true).set(carAvSpeedEnc, 50.0);
g.edge(2, 5).setDistance(10000).set(carAccessEnc, true, true).set(carAvSpeedEnc, 50.0);
g.edge(3, 5).setDistance(100000).set(carAccessEnc, true, true).set(carAvSpeedEnc, 50.0);
SPTEntry e1 =
new SPTEntry(edge4.getEdge(), 4, 1,
new SPTEntry(edge3.getEdge(), 3, 1,
new SPTEntry(edge2.getEdge(), 2, 1,
new SPTEntry(edge1.getEdge(), 1, 1,
new SPTEntry(0, 1)
))));
FastestWeighting weighting = new FastestWeighting(carAccessEnc, carAvSpeedEnc);
Path path = extractPath(g, weighting, e1);
InstructionList il = InstructionsFromEdges.calcInstructions(path, path.graph, weighting, carManager, tr);
assertEquals(5, il.size());
assertEquals(Instruction.CONTINUE_ON_STREET, il.get(0).getSign());
assertEquals(Instruction.TURN_RIGHT, il.get(1).getSign());
assertEquals(Instruction.TURN_LEFT, il.get(2).getSign());
assertEquals(Instruction.TURN_SHARP_LEFT, il.get(3).getSign());
assertEquals(Instruction.FINISH, il.get(4).getSign());
}
/**
* Test roundabout instructions for different profiles
*/
@Test
void testCalcInstructionsRoundabout() {
calcInstructionsRoundabout(mixedCarAccessEnc, mixedCarSpeedEnc);
calcInstructionsRoundabout(mixedFootAccessEnc, mixedFootSpeedEnc);
}
public void calcInstructionsRoundabout(BooleanEncodedValue accessEnc, DecimalEncodedValue speedEnc) {
ShortestWeighting weighting = new ShortestWeighting(accessEnc, speedEnc);
Path p = new Dijkstra(roundaboutGraph.g, weighting, TraversalMode.NODE_BASED)
.calcPath(1, 8);
assertTrue(p.isFound());
assertEquals("[1, 2, 3, 4, 5, 8]", p.calcNodes().toString());
InstructionList wayList = InstructionsFromEdges.calcInstructions(p, p.graph, weighting, mixedEncodingManager, tr);
// Test instructions
List<String> tmpList = getTurnDescriptions(wayList);
assertEquals(Arrays.asList("continue onto MainStreet 1 2",
"At roundabout, take exit 3 onto 5-8",
"arrive at destination"),
tmpList);
// Test Radian
double delta = roundaboutGraph.getAngle(1, 2, 5, 8);
RoundaboutInstruction instr = (RoundaboutInstruction) wayList.get(1);
assertEquals(delta, instr.getTurnAngle(), 0.01);
// case of continuing a street through a roundabout
p = new Dijkstra(roundaboutGraph.g, weighting, TraversalMode.NODE_BASED).
calcPath(1, 7);
wayList = InstructionsFromEdges.calcInstructions(p, p.graph, weighting, mixedEncodingManager, tr);
tmpList = getTurnDescriptions(wayList);
assertEquals(Arrays.asList("continue onto MainStreet 1 2",
"At roundabout, take exit 2 onto MainStreet 4 7",
"arrive at destination"),
tmpList);
// Test Radian
delta = roundaboutGraph.getAngle(1, 2, 4, 7);
instr = (RoundaboutInstruction) wayList.get(1);
assertEquals(delta, instr.getTurnAngle(), 0.01);
}
@Test
public void testCalcInstructionsRoundaboutBegin() {
ShortestWeighting weighting = new ShortestWeighting(mixedCarAccessEnc, mixedCarSpeedEnc);
Path p = new Dijkstra(roundaboutGraph.g, weighting, TraversalMode.NODE_BASED)
.calcPath(2, 8);
assertTrue(p.isFound());
InstructionList wayList = InstructionsFromEdges.calcInstructions(p, p.graph, weighting, mixedEncodingManager, tr);
List<String> tmpList = getTurnDescriptions(wayList);
assertEquals(Arrays.asList("At roundabout, take exit 3 onto 5-8",
"arrive at destination"),
tmpList);
}
@Test
public void testCalcInstructionsRoundaboutDirectExit() {
roundaboutGraph.inverse3to9();
ShortestWeighting weighting = new ShortestWeighting(mixedCarAccessEnc, mixedCarSpeedEnc);
Path p = new Dijkstra(roundaboutGraph.g, weighting, TraversalMode.NODE_BASED)
.calcPath(6, 8);
assertTrue(p.isFound());
InstructionList wayList = InstructionsFromEdges.calcInstructions(p, p.graph, weighting, mixedEncodingManager, tr);
List<String> tmpList = getTurnDescriptions(wayList);
assertEquals(Arrays.asList("continue onto 3-6",
"At roundabout, take exit 3 onto 5-8",
"arrive at destination"),
tmpList);
roundaboutGraph.inverse3to9();
}
@Test
public void testCalcAverageSpeedDetails() {
ShortestWeighting weighting = new ShortestWeighting(carAccessEnc, carAvSpeedEnc);
Path p = new Dijkstra(pathDetailGraph, weighting, TraversalMode.NODE_BASED).calcPath(1, 5);
assertTrue(p.isFound());
Map<String, List<PathDetail>> details = PathDetailsFromEdges.calcDetails(p, carManager, weighting,
Arrays.asList(AVERAGE_SPEED), new PathDetailsBuilderFactory(), 0);
assertTrue(details.size() == 1);
List<PathDetail> averageSpeedDetails = details.get(AVERAGE_SPEED);
assertEquals(4, averageSpeedDetails.size());
assertEquals(45.0, averageSpeedDetails.get(0).getValue());
assertEquals(90.0, averageSpeedDetails.get(1).getValue());
assertEquals(10.0, averageSpeedDetails.get(2).getValue());
assertEquals(45.0, averageSpeedDetails.get(3).getValue());
assertEquals(0, averageSpeedDetails.get(0).getFirst());
assertEquals(1, averageSpeedDetails.get(1).getFirst());
assertEquals(2, averageSpeedDetails.get(2).getFirst());
assertEquals(3, averageSpeedDetails.get(3).getFirst());
assertEquals(4, averageSpeedDetails.get(3).getLast());
}
@Test
public void testCalcAverageSpeedDetailsWithShortDistances_issue1848() {
ShortestWeighting weighting = new ShortestWeighting(carAccessEnc, carAvSpeedEnc);
Path p = new Dijkstra(pathDetailGraph, weighting, TraversalMode.NODE_BASED).calcPath(1, 6);
assertTrue(p.isFound());
Map<String, List<PathDetail>> details = PathDetailsFromEdges.calcDetails(p, carManager, weighting,
Arrays.asList(AVERAGE_SPEED), new PathDetailsBuilderFactory(), 0);
assertTrue(details.size() == 1);
List<PathDetail> averageSpeedDetails = details.get(AVERAGE_SPEED);
assertEquals(4, averageSpeedDetails.size());
// reverse path includes 'null' value as first
p = new Dijkstra(pathDetailGraph, weighting, TraversalMode.NODE_BASED).calcPath(6, 1);
assertTrue(p.isFound());
details = PathDetailsFromEdges.calcDetails(p, carManager, weighting,
Arrays.asList(AVERAGE_SPEED), new PathDetailsBuilderFactory(), 0);
assertTrue(details.size() == 1);
averageSpeedDetails = details.get(AVERAGE_SPEED);
assertEquals(5, averageSpeedDetails.size());
assertNull(averageSpeedDetails.get(0).getValue());
}
@Test
public void testCalcStreetNameDetails() {
ShortestWeighting weighting = new ShortestWeighting(carAccessEnc, carAvSpeedEnc);
Path p = new Dijkstra(pathDetailGraph, weighting, TraversalMode.NODE_BASED).calcPath(1, 5);
assertTrue(p.isFound());
Map<String, List<PathDetail>> details = PathDetailsFromEdges.calcDetails(p, carManager, weighting,
Arrays.asList(STREET_NAME), new PathDetailsBuilderFactory(), 0);
assertTrue(details.size() == 1);
List<PathDetail> streetNameDetails = details.get(STREET_NAME);
assertTrue(details.size() == 1);
assertEquals(4, streetNameDetails.size());
assertEquals("1-2", streetNameDetails.get(0).getValue());
assertEquals("2-3", streetNameDetails.get(1).getValue());
assertEquals("3-4", streetNameDetails.get(2).getValue());
assertEquals("4-5", streetNameDetails.get(3).getValue());
assertEquals(0, streetNameDetails.get(0).getFirst());
assertEquals(1, streetNameDetails.get(1).getFirst());
assertEquals(2, streetNameDetails.get(2).getFirst());
assertEquals(3, streetNameDetails.get(3).getFirst());
assertEquals(4, streetNameDetails.get(3).getLast());
}
@Test
public void testCalcEdgeIdDetails() {
ShortestWeighting weighting = new ShortestWeighting(carAccessEnc, carAvSpeedEnc);
Path p = new Dijkstra(pathDetailGraph, weighting, TraversalMode.NODE_BASED).calcPath(1, 5);
assertTrue(p.isFound());
Map<String, List<PathDetail>> details = PathDetailsFromEdges.calcDetails(p, carManager, weighting,
Arrays.asList(EDGE_ID), new PathDetailsBuilderFactory(), 0);
assertTrue(details.size() == 1);
List<PathDetail> edgeIdDetails = details.get(EDGE_ID);
assertEquals(4, edgeIdDetails.size());
assertEquals(0, edgeIdDetails.get(0).getValue());
// This is out of order because we don't create the edges in order
assertEquals(2, edgeIdDetails.get(1).getValue());
assertEquals(3, edgeIdDetails.get(2).getValue());
assertEquals(1, edgeIdDetails.get(3).getValue());
assertEquals(0, edgeIdDetails.get(0).getFirst());
assertEquals(1, edgeIdDetails.get(1).getFirst());
assertEquals(2, edgeIdDetails.get(2).getFirst());
assertEquals(3, edgeIdDetails.get(3).getFirst());
assertEquals(4, edgeIdDetails.get(3).getLast());
}
@Test
public void testCalcEdgeKeyDetailsForward() {
ShortestWeighting weighting = new ShortestWeighting(carAccessEnc, carAvSpeedEnc);
Path p = new Dijkstra(pathDetailGraph, weighting, TraversalMode.NODE_BASED).calcPath(1, 5);
assertTrue(p.isFound());
Map<String, List<PathDetail>> details = PathDetailsFromEdges.calcDetails(p, carManager, weighting,
Arrays.asList(EDGE_KEY), new PathDetailsBuilderFactory(), 0);
List<PathDetail> edgeKeyDetails = details.get(EDGE_KEY);
assertEquals(4, edgeKeyDetails.size());
assertEquals(0, edgeKeyDetails.get(0).getValue());
assertEquals(4, edgeKeyDetails.get(1).getValue());
assertEquals(6, edgeKeyDetails.get(2).getValue());
assertEquals(2, edgeKeyDetails.get(3).getValue());
}
@Test
public void testCalcEdgeKeyDetailsBackward() {
ShortestWeighting weighting = new ShortestWeighting(carAccessEnc, carAvSpeedEnc);
Path p = new Dijkstra(pathDetailGraph, weighting, TraversalMode.NODE_BASED).calcPath(5, 1);
assertTrue(p.isFound());
Map<String, List<PathDetail>> details = PathDetailsFromEdges.calcDetails(p, carManager, weighting,
Arrays.asList(EDGE_KEY), new PathDetailsBuilderFactory(), 0);
List<PathDetail> edgeKeyDetails = details.get(EDGE_KEY);
assertEquals(4, edgeKeyDetails.size());
assertEquals(3, edgeKeyDetails.get(0).getValue());
assertEquals(7, edgeKeyDetails.get(1).getValue());
assertEquals(5, edgeKeyDetails.get(2).getValue());
assertEquals(1, edgeKeyDetails.get(3).getValue());
}
@Test
public void testCalcTimeDetails() {
ShortestWeighting weighting = new ShortestWeighting(carAccessEnc, carAvSpeedEnc);
Path p = new Dijkstra(pathDetailGraph, weighting, TraversalMode.NODE_BASED).calcPath(1, 5);
assertTrue(p.isFound());
Map<String, List<PathDetail>> details = PathDetailsFromEdges.calcDetails(p, carManager, weighting,
Arrays.asList(TIME), new PathDetailsBuilderFactory(), 0);
assertTrue(details.size() == 1);
List<PathDetail> timeDetails = details.get(TIME);
assertEquals(4, timeDetails.size());
assertEquals(400L, timeDetails.get(0).getValue());
assertEquals(200L, timeDetails.get(1).getValue());
assertEquals(3600L, timeDetails.get(2).getValue());
assertEquals(400L, timeDetails.get(3).getValue());
assertEquals(0, timeDetails.get(0).getFirst());
assertEquals(1, timeDetails.get(1).getFirst());
assertEquals(2, timeDetails.get(2).getFirst());
assertEquals(3, timeDetails.get(3).getFirst());
assertEquals(4, timeDetails.get(3).getLast());
}
@Test
public void testCalcDistanceDetails() {
ShortestWeighting weighting = new ShortestWeighting(carAccessEnc, carAvSpeedEnc);
Path p = new Dijkstra(pathDetailGraph, weighting, TraversalMode.NODE_BASED).calcPath(1, 5);
assertTrue(p.isFound());
Map<String, List<PathDetail>> details = PathDetailsFromEdges.calcDetails(p, carManager, weighting,
Arrays.asList(DISTANCE), new PathDetailsBuilderFactory(), 0);
assertTrue(details.size() == 1);
List<PathDetail> distanceDetails = details.get(DISTANCE);
assertEquals(5D, distanceDetails.get(0).getValue());
assertEquals(5D, distanceDetails.get(1).getValue());
assertEquals(10D, distanceDetails.get(2).getValue());
assertEquals(5D, distanceDetails.get(3).getValue());
}
/**
* case with one edge being not an exit
*/
@Test
public void testCalcInstructionsRoundabout2() {
roundaboutGraph.inverse3to6();
ShortestWeighting weighting = new ShortestWeighting(mixedCarAccessEnc, mixedCarSpeedEnc);
Path p = new Dijkstra(roundaboutGraph.g, weighting, TraversalMode.NODE_BASED)
.calcPath(1, 8);
assertTrue(p.isFound());
InstructionList wayList = InstructionsFromEdges.calcInstructions(p, p.graph, weighting, mixedEncodingManager, tr);
List<String> tmpList = getTurnDescriptions(wayList);
assertEquals(Arrays.asList("continue onto MainStreet 1 2",
"At roundabout, take exit 2 onto 5-8",
"arrive at destination"),
tmpList);
// Test Radian
double delta = roundaboutGraph.getAngle(1, 2, 5, 8);
RoundaboutInstruction instr = (RoundaboutInstruction) wayList.get(1);
assertEquals(delta, instr.getTurnAngle(), 0.01);
roundaboutGraph.inverse3to6();
}
@Test
public void testCalcInstructionsRoundaboutIssue353() {
final BaseGraph graph = new BaseGraph.Builder(carManager).create();
final NodeAccess na = graph.getNodeAccess();
//
// 8
// \
// 5
// / \
// 11- 1 - 2 4 - 7
// | \ /
// 10 -9 -3
// \ |
// --- 6
na.setNode(1, 52.514, 13.348);
na.setNode(2, 52.514, 13.349);
na.setNode(3, 52.5135, 13.35);
na.setNode(4, 52.514, 13.351);
na.setNode(5, 52.5145, 13.351);
na.setNode(6, 52.513, 13.35);
na.setNode(7, 52.514, 13.352);
na.setNode(8, 52.515, 13.351);
na.setNode(9, 52.5135, 13.349);
na.setNode(10, 52.5135, 13.348);
na.setNode(11, 52.514, 13.347);
GHUtility.setSpeed(60, true, false, carAccessEnc, carAvSpeedEnc, graph.edge(2, 1).setDistance(5)).setKeyValues(createKV("name", "MainStreet 2 1"));
GHUtility.setSpeed(60, true, false, carAccessEnc, carAvSpeedEnc, graph.edge(1, 11).setDistance(5)).setKeyValues(createKV("name", "MainStreet 1 11"));
// roundabout
EdgeIteratorState tmpEdge;
tmpEdge = GHUtility.setSpeed(60, true, false, carAccessEnc, carAvSpeedEnc, graph.edge(3, 9).setDistance(2)).setKeyValues(createKV("name", "3-9"));
BooleanEncodedValue carManagerRoundabout = carManager.getBooleanEncodedValue(Roundabout.KEY);
tmpEdge.set(carManagerRoundabout, true);
tmpEdge = GHUtility.setSpeed(60, true, false, carAccessEnc, carAvSpeedEnc, graph.edge(9, 10).setDistance(2)).setKeyValues(createKV("name", "9-10"));
tmpEdge.set(carManagerRoundabout, true);
tmpEdge = GHUtility.setSpeed(60, true, false, carAccessEnc, carAvSpeedEnc, graph.edge(6, 10).setDistance(2)).setKeyValues(createKV("name", "6-10"));
tmpEdge.set(carManagerRoundabout, true);
tmpEdge = GHUtility.setSpeed(60, true, false, carAccessEnc, carAvSpeedEnc, graph.edge(10, 1).setDistance(2)).setKeyValues(createKV("name", "10-1"));
tmpEdge.set(carManagerRoundabout, true);
tmpEdge = GHUtility.setSpeed(60, true, false, carAccessEnc, carAvSpeedEnc, graph.edge(3, 2).setDistance(5)).setKeyValues(createKV("name", "2-3"));
tmpEdge.set(carManagerRoundabout, true);
tmpEdge = GHUtility.setSpeed(60, true, false, carAccessEnc, carAvSpeedEnc, graph.edge(4, 3).setDistance(5)).setKeyValues(createKV("name", "3-4"));
tmpEdge.set(carManagerRoundabout, true);
tmpEdge = GHUtility.setSpeed(60, true, false, carAccessEnc, carAvSpeedEnc, graph.edge(5, 4).setDistance(5)).setKeyValues(createKV("name", "4-5"));
tmpEdge.set(carManagerRoundabout, true);
tmpEdge = GHUtility.setSpeed(60, true, false, carAccessEnc, carAvSpeedEnc, graph.edge(2, 5).setDistance(5)).setKeyValues(createKV("name", "5-2"));
tmpEdge.set(carManagerRoundabout, true);
GHUtility.setSpeed(60, true, true, carAccessEnc, carAvSpeedEnc, graph.edge(4, 7).setDistance(5)).setKeyValues(createKV("name", "MainStreet 4 7"));
GHUtility.setSpeed(60, true, true, carAccessEnc, carAvSpeedEnc, graph.edge(5, 8).setDistance(5)).setKeyValues(createKV("name", "5-8"));
GHUtility.setSpeed(60, true, true, carAccessEnc, carAvSpeedEnc, graph.edge(3, 6).setDistance(5)).setKeyValues(createKV("name", "3-6"));
ShortestWeighting weighting = new ShortestWeighting(carAccessEnc, carAvSpeedEnc);
Path p = new Dijkstra(graph, weighting, TraversalMode.NODE_BASED)
.calcPath(6, 11);
assertTrue(p.isFound());
InstructionList wayList = InstructionsFromEdges.calcInstructions(p, p.graph, weighting, carManager, tr);
List<String> tmpList = getTurnDescriptions(wayList);
assertEquals(Arrays.asList("At roundabout, take exit 1 onto MainStreet 1 11",
"arrive at destination"),
tmpList);
}
@Test
public void testCalcInstructionsRoundaboutClockwise() {
roundaboutGraph.setRoundabout(true);
ShortestWeighting weighting = new ShortestWeighting(mixedCarAccessEnc, mixedCarSpeedEnc);
Path p = new Dijkstra(roundaboutGraph.g, weighting, TraversalMode.NODE_BASED)
.calcPath(1, 8);
assertTrue(p.isFound());
InstructionList wayList = InstructionsFromEdges.calcInstructions(p, p.graph, weighting, mixedEncodingManager, tr);
List<String> tmpList = getTurnDescriptions(wayList);
assertEquals(Arrays.asList("continue onto MainStreet 1 2",
"At roundabout, take exit 1 onto 5-8",
"arrive at destination"),
tmpList);
// Test Radian
double delta = roundaboutGraph.getAngle(1, 2, 5, 8);
RoundaboutInstruction instr = (RoundaboutInstruction) wayList.get(1);
assertEquals(delta, instr.getTurnAngle(), 0.01);
}
@Test
public void testCalcInstructionsIgnoreContinue() {
// Follow a couple of straight edges, including a name change
ShortestWeighting weighting = new ShortestWeighting(mixedCarAccessEnc, mixedCarSpeedEnc);
Path p = new Dijkstra(roundaboutGraph.g, weighting, TraversalMode.NODE_BASED)
.calcPath(4, 11);
assertTrue(p.isFound());
InstructionList wayList = InstructionsFromEdges.calcInstructions(p, p.graph, weighting, mixedEncodingManager, tr);
// Contain only start and finish instruction, no CONTINUE
assertEquals(2, wayList.size());
}
@Test
public void testCalcInstructionsIgnoreTurnIfNoAlternative() {
// The street turns left, but there is not turn
ShortestWeighting weighting = new ShortestWeighting(mixedCarAccessEnc, mixedCarSpeedEnc);
Path p = new Dijkstra(roundaboutGraph.g, weighting, TraversalMode.NODE_BASED)
.calcPath(10, 12);
assertTrue(p.isFound());
InstructionList wayList = InstructionsFromEdges.calcInstructions(p, p.graph, weighting, mixedEncodingManager, tr);
// Contain only start and finish instruction
assertEquals(2, wayList.size());
}
@Test
public void testCalcInstructionForForkWithSameName() {
final BaseGraph graph = new BaseGraph.Builder(carManager).create();
final NodeAccess na = graph.getNodeAccess();
// Actual example: point=48.982618%2C13.122021&point=48.982336%2C13.121002
// 1-2 & 2-4 have the same Street name, but other from that, it would be hard to see the difference
// We have to enforce a turn instruction here
// 3
// \
// 2 -- 1
// /
// 4
na.setNode(1, 48.982618, 13.122021);
na.setNode(2, 48.982565, 13.121597);
na.setNode(3, 48.982611, 13.121012);
na.setNode(4, 48.982336, 13.121002);
GHUtility.setSpeed(60, true, true, carAccessEnc, carAvSpeedEnc, graph.edge(1, 2).setDistance(5)).setKeyValues(createKV("name", "Regener Weg"));
GHUtility.setSpeed(60, true, true, carAccessEnc, carAvSpeedEnc, graph.edge(2, 4).setDistance(5)).setKeyValues(createKV("name", "Regener Weg"));
GHUtility.setSpeed(60, true, true, carAccessEnc, carAvSpeedEnc, graph.edge(2, 3).setDistance(5));
ShortestWeighting weighting = new ShortestWeighting(carAccessEnc, carAvSpeedEnc);
Path p = new Dijkstra(graph, weighting, TraversalMode.NODE_BASED)
.calcPath(1, 4);
assertTrue(p.isFound());
InstructionList wayList = InstructionsFromEdges.calcInstructions(p, p.graph, weighting, carManager, tr);
assertEquals(3, wayList.size());
assertEquals(-7, wayList.get(1).getSign());
}
@Test
public void testCalcInstructionForMotorwayFork() {
final BaseGraph graph = new BaseGraph.Builder(carManager).create();
final NodeAccess na = graph.getNodeAccess();
// Actual example: point=48.909071%2C8.647136&point=48.908789%2C8.649244
// 1-2 & 2-4 is a motorway, 2-3 is a motorway_link
// We should skip the instruction here
// 1 ---- 2 ---- 4
// \
// 3
na.setNode(1, 48.909071, 8.647136);
na.setNode(2, 48.908962, 8.647978);
na.setNode(3, 48.908867, 8.648155);
na.setNode(4, 48.908789, 8.649244);
EnumEncodedValue<RoadClass> roadClassEnc = carManager.getEnumEncodedValue(RoadClass.KEY, RoadClass.class);
BooleanEncodedValue roadClassLinkEnc = carManager.getBooleanEncodedValue(RoadClassLink.KEY);
GHUtility.setSpeed(60, true, true, carAccessEnc, carAvSpeedEnc, graph.edge(1, 2).setDistance(5)).setKeyValues(createKV("name", "A 8")).set(roadClassEnc, RoadClass.MOTORWAY).set(roadClassLinkEnc, false);
GHUtility.setSpeed(60, true, true, carAccessEnc, carAvSpeedEnc, graph.edge(2, 4).setDistance(5)).setKeyValues(createKV("name", "A 8")).set(roadClassEnc, RoadClass.MOTORWAY).set(roadClassLinkEnc, false);
GHUtility.setSpeed(60, true, true, carAccessEnc, carAvSpeedEnc, graph.edge(2, 3).setDistance(5)).set(roadClassEnc, RoadClass.MOTORWAY).set(roadClassLinkEnc, true);
ShortestWeighting weighting = new ShortestWeighting(carAccessEnc, carAvSpeedEnc);
Path p = new Dijkstra(graph, weighting, TraversalMode.NODE_BASED)
.calcPath(1, 4);
assertTrue(p.isFound());
InstructionList wayList = InstructionsFromEdges.calcInstructions(p, p.graph, weighting, carManager, tr);
assertEquals(2, wayList.size());
}
@Test
public void testCalcInstructionsEnterMotorway() {
final BaseGraph graph = new BaseGraph.Builder(carManager).create();
final NodeAccess na = graph.getNodeAccess();
// Actual example: point=48.630533%2C9.459416&point=48.630544%2C9.459829
// 1 -2 -3 is a motorway and tagged as oneway
// 1 ->- 2 ->- 3
// /
// 4
na.setNode(1, 48.630647, 9.459041);
na.setNode(2, 48.630586, 9.459604);
na.setNode(3, 48.630558, 9.459851);
na.setNode(4, 48.63054, 9.459406);
GHUtility.setSpeed(60, true, false, carAccessEnc, carAvSpeedEnc, graph.edge(1, 2).setDistance(5)).setKeyValues(createKV("name", "A 8"));
GHUtility.setSpeed(60, true, false, carAccessEnc, carAvSpeedEnc, graph.edge(2, 3).setDistance(5)).setKeyValues(createKV("name", "A 8"));
GHUtility.setSpeed(60, true, false, carAccessEnc, carAvSpeedEnc, graph.edge(4, 2).setDistance(5)).setKeyValues(createKV("name", "A 8"));
ShortestWeighting weighting = new ShortestWeighting(carAccessEnc, carAvSpeedEnc);
Path p = new Dijkstra(graph, weighting, TraversalMode.NODE_BASED)
.calcPath(4, 3);
assertTrue(p.isFound());
InstructionList wayList = InstructionsFromEdges.calcInstructions(p, p.graph, weighting, carManager, tr);
// no turn instruction for entering the highway
assertEquals(2, wayList.size());
}
@Test
public void testCalcInstructionsMotorwayJunction() {
final BaseGraph g = new BaseGraph.Builder(carManager).create();
final NodeAccess na = g.getNodeAccess();
// Actual example: point=48.70672%2C9.164266&point=48.706805%2C9.162995
// A typical motorway junction, when following 1-2-3, there should be a keep right at 2
// -- 4
// /
// 1 -- 2 -- 3
na.setNode(1, 48.70672, 9.164266);
na.setNode(2, 48.706741, 9.163719);
na.setNode(3, 48.706805, 9.162995);
na.setNode(4, 48.706705, 9.16329);
GHUtility.setSpeed(60, true, false, carAccessEnc, carAvSpeedEnc, g.edge(1, 2).setDistance(5)).setKeyValues(createKV("name", "A 8"));
GHUtility.setSpeed(60, true, false, carAccessEnc, carAvSpeedEnc, g.edge(2, 3).setDistance(5)).setKeyValues(createKV("name", "A 8"));
GHUtility.setSpeed(60, true, false, carAccessEnc, carAvSpeedEnc, g.edge(2, 4).setDistance(5)).setKeyValues(createKV("name", "A 8"));
ShortestWeighting weighting = new ShortestWeighting(carAccessEnc, carAvSpeedEnc);
Path p = new Dijkstra(g, weighting, TraversalMode.NODE_BASED)
.calcPath(1, 3);
assertTrue(p.isFound());
InstructionList wayList = InstructionsFromEdges.calcInstructions(p, p.graph, weighting, carManager, tr);
assertEquals(3, wayList.size());
// TODO this should be a keep_right
assertEquals(0, wayList.get(1).getSign());
}
@Test
public void testCalcInstructionsOntoOneway() {
final BaseGraph g = new BaseGraph.Builder(carManager).create();
final NodeAccess na = g.getNodeAccess();
// Actual example: point=-33.824566%2C151.187834&point=-33.82441%2C151.188231
// 1 -2 -3 is a oneway
// 1 ->- 2 ->- 3
// |
// 4
na.setNode(1, -33.824245, 151.187866);
na.setNode(2, -33.824335, 151.188017);
na.setNode(3, -33.824415, 151.188177);
na.setNode(4, -33.824437, 151.187925);
GHUtility.setSpeed(60, true, false, carAccessEnc, carAvSpeedEnc, g.edge(1, 2).setDistance(5)).setKeyValues(createKV("name", "Pacific Highway"));
GHUtility.setSpeed(60, true, false, carAccessEnc, carAvSpeedEnc, g.edge(2, 3).setDistance(5)).setKeyValues(createKV("name", "Pacific Highway"));
GHUtility.setSpeed(60, true, true, carAccessEnc, carAvSpeedEnc, g.edge(4, 2).setDistance(5)).setKeyValues(createKV("name", "Greenwich Road"));
ShortestWeighting weighting = new ShortestWeighting(carAccessEnc, carAvSpeedEnc);
Path p = new Dijkstra(g, weighting, TraversalMode.NODE_BASED)
.calcPath(4, 3);
assertTrue(p.isFound());
InstructionList wayList = InstructionsFromEdges.calcInstructions(p, p.graph, weighting, carManager, tr);
assertEquals(3, wayList.size());
assertEquals(2, wayList.get(1).getSign());
}
@Test
public void testCalcInstructionIssue1047() {
final BaseGraph g = new BaseGraph.Builder(carManager).create();
final NodeAccess na = g.getNodeAccess();
// Actual example: point=51.367105%2C14.491246&point=51.369048%2C14.483092
// 1-2 & 2-3 is a road that is turning right, 2-4 is a that is branching off.
// When driving 1-2-4, we should create an instruction notifying the user to continue straight instead of turning and following the road
// When driving 1-2-3, we should create an instruction as well
//
// 1 ---- 2 ---- 4
// |
// 3
na.setNode(1, 51.367544, 14.488209);
na.setNode(2, 51.368046, 14.486525);
na.setNode(3, 51.36875, 14.487019);
na.setNode(4, 51.368428, 14.485173);
EnumEncodedValue<RoadClass> roadClassEnc = carManager.getEnumEncodedValue(RoadClass.KEY, RoadClass.class);
BooleanEncodedValue roadClassLinkEnc = carManager.getBooleanEncodedValue(RoadClassLink.KEY);
GHUtility.setSpeed(60, true, true, carAccessEnc, carAvSpeedEnc, g.edge(1, 2).setDistance(5)).setKeyValues(createKV("name", "B 156")).set(roadClassEnc, RoadClass.PRIMARY).set(roadClassLinkEnc, false);
GHUtility.setSpeed(60, true, true, carAccessEnc, carAvSpeedEnc, g.edge(2, 4).setDistance(5)).setKeyValues(createKV("name", "S 108")).set(roadClassEnc, RoadClass.SECONDARY).set(roadClassLinkEnc, false);
GHUtility.setSpeed(60, true, true, carAccessEnc, carAvSpeedEnc, g.edge(2, 3).setDistance(5)).setKeyValues(createKV("name", "B 156")).set(roadClassEnc, RoadClass.PRIMARY).set(roadClassLinkEnc, false);
ShortestWeighting weighting = new ShortestWeighting(carAccessEnc, carAvSpeedEnc);
Path p = new Dijkstra(g, weighting, TraversalMode.NODE_BASED)
.calcPath(1, 4);
assertTrue(p.isFound());
InstructionList wayList = InstructionsFromEdges.calcInstructions(p, p.graph, weighting, carManager, tr);
assertEquals(3, wayList.size());
p = new Dijkstra(g, weighting, TraversalMode.NODE_BASED)
.calcPath(1, 3);
assertTrue(p.isFound());
wayList = InstructionsFromEdges.calcInstructions(p, p.graph, weighting, carManager, tr);
assertEquals(3, wayList.size());
}
@Test
public void testCalcInstructionContinueLeavingStreet() {
final BaseGraph g = new BaseGraph.Builder(carManager).create();
final NodeAccess na = g.getNodeAccess();
// When leaving the current street via a Continue, we should show it
// 3
// \
// 4 - 2 -- 1
na.setNode(1, 48.982618, 13.122021);
na.setNode(2, 48.982565, 13.121597);
na.setNode(3, 48.982611, 13.121012);
na.setNode(4, 48.982565, 13.121002);
GHUtility.setSpeed(60, true, true, carAccessEnc, carAvSpeedEnc, g.edge(1, 2).setDistance(5)).setKeyValues(createKV("name", "Regener Weg"));
GHUtility.setSpeed(60, true, true, carAccessEnc, carAvSpeedEnc, g.edge(2, 4).setDistance(5));
GHUtility.setSpeed(60, true, true, carAccessEnc, carAvSpeedEnc, g.edge(2, 3).setDistance(5)).setKeyValues(createKV("name", "Regener Weg"));
ShortestWeighting weighting = new ShortestWeighting(carAccessEnc, carAvSpeedEnc);
Path p = new Dijkstra(g, weighting, TraversalMode.NODE_BASED)
.calcPath(1, 4);
assertTrue(p.isFound());
InstructionList wayList = InstructionsFromEdges.calcInstructions(p, p.graph, weighting, carManager, tr);
assertEquals(3, wayList.size());
assertEquals(-7, wayList.get(1).getSign());
}
@Test
public void testCalcInstructionSlightTurn() {
final BaseGraph g = new BaseGraph.Builder(carManager).create();
final NodeAccess na = g.getNodeAccess();
// Real Situation: point=48.411927%2C15.599197&point=48.412094%2C15.598816
// When reaching this Crossing, you cannot know if you should turn left or right
// Google Maps and Bing show a turn, OSRM does not
// 1 ---2--- 3
// \
// 4
na.setNode(1, 48.412094, 15.598816);
na.setNode(2, 48.412055, 15.599068);
na.setNode(3, 48.412034, 15.599411);
na.setNode(4, 48.411927, 15.599197);
GHUtility.setSpeed(60, true, true, carAccessEnc, carAvSpeedEnc, g.edge(1, 2).setDistance(5)).setKeyValues(createKV("name", "Stöhrgasse"));
GHUtility.setSpeed(60, true, true, carAccessEnc, carAvSpeedEnc, g.edge(2, 3).setDistance(5));
GHUtility.setSpeed(60, true, true, carAccessEnc, carAvSpeedEnc, g.edge(2, 4).setDistance(5)).setKeyValues(createKV("name", "Stöhrgasse"));
ShortestWeighting weighting = new ShortestWeighting(carAccessEnc, carAvSpeedEnc);
Path p = new Dijkstra(g, weighting, TraversalMode.NODE_BASED)
.calcPath(4, 1);
assertTrue(p.isFound());
InstructionList wayList = InstructionsFromEdges.calcInstructions(p, p.graph, weighting, carManager, tr);
assertEquals(3, wayList.size());
assertEquals(-1, wayList.get(1).getSign());
}
@Test
public void testUTurnLeft() {
final BaseGraph g = new BaseGraph.Builder(carManager).create();
final NodeAccess na = g.getNodeAccess();
// Real Situation: point=48.402116%2C9.994367&point=48.402198%2C9.99507
// 7
// |
// 4----5----6
// |
// 1----2----3
na.setNode(1, 48.402116, 9.994367);
na.setNode(2, 48.402198, 9.99507);
na.setNode(3, 48.402344, 9.996266);
na.setNode(4, 48.402191, 9.994351);
na.setNode(5, 48.402298, 9.995053);
na.setNode(6, 48.402422, 9.996067);
na.setNode(7, 48.402604, 9.994962);
GHUtility.setSpeed(60, 0, carAccessEnc, carAvSpeedEnc,
g.edge(1, 2).setDistance(5).setKeyValues(createKV("name", "Olgastraße")),
g.edge(2, 3).setDistance(5).setKeyValues(createKV("name", "Olgastraße")),
g.edge(6, 5).setDistance(5).setKeyValues(createKV("name", "Olgastraße")),
g.edge(5, 4).setDistance(5).setKeyValues(createKV("name", "Olgastraße")));
GHUtility.setSpeed(60, 60, carAccessEnc, carAvSpeedEnc,
g.edge(2, 5).setDistance(5).setKeyValues(createKV("name", "Neithardtstraße")),
g.edge(5, 7).setDistance(5).setKeyValues(createKV("name", "Neithardtstraße")));
ShortestWeighting weighting = new ShortestWeighting(carAccessEnc, carAvSpeedEnc);
Path p = new Dijkstra(g, weighting, TraversalMode.NODE_BASED)
.calcPath(1, 4);
assertTrue(p.isFound());
InstructionList wayList = InstructionsFromEdges.calcInstructions(p, p.graph, weighting, carManager, tr);
assertEquals(3, wayList.size());
assertEquals(Instruction.U_TURN_LEFT, wayList.get(1).getSign());
}
@Test
public void testUTurnRight() {
final BaseGraph g = new BaseGraph.Builder(carManager).create();
final NodeAccess na = g.getNodeAccess();
// Real Situation: point=-33.885758,151.181472&point=-33.885692,151.181445
// 7
// |
// 4----5----6
// |
// 3----2----1
na.setNode(1, -33.885758, 151.181472);
na.setNode(2, -33.885852, 151.180968);
na.setNode(3, -33.885968, 151.180501);
na.setNode(4, -33.885883, 151.180442);
na.setNode(5, -33.885772, 151.180941);
na.setNode(6, -33.885692, 151.181445);
na.setNode(7, -33.885692, 151.181445);
GHUtility.setSpeed(60, 0, carAccessEnc, carAvSpeedEnc,
g.edge(1, 2).setDistance(5).setKeyValues(createKV("name", "Parramatta Road")),
g.edge(2, 3).setDistance(5).setKeyValues(createKV("name", "Parramatta Road")),
g.edge(4, 5).setDistance(5).setKeyValues(createKV("name", "Parramatta Road")),
g.edge(5, 6).setDistance(5).setKeyValues(createKV("name", "Parramatta Road")));
GHUtility.setSpeed(60, 60, carAccessEnc, carAvSpeedEnc,
g.edge(2, 5).setDistance(5).setKeyValues(createKV("name", "Larkin Street")),
g.edge(5, 7).setDistance(5).setKeyValues(createKV("name", "Larkin Street")));
ShortestWeighting weighting = new ShortestWeighting(carAccessEnc, carAvSpeedEnc);
Path p = new Dijkstra(g, weighting, TraversalMode.NODE_BASED)
.calcPath(1, 6);
assertTrue(p.isFound());
InstructionList wayList = InstructionsFromEdges.calcInstructions(p, p.graph, weighting, carManager, tr);
assertEquals(3, wayList.size());
assertEquals(Instruction.U_TURN_RIGHT, wayList.get(1).getSign());
}
@Test
public void testCalcInstructionsForTurn() {
// The street turns left, but there is not turn
ShortestWeighting weighting = new ShortestWeighting(mixedCarAccessEnc, mixedCarSpeedEnc);
Path p = new Dijkstra(roundaboutGraph.g, weighting, TraversalMode.NODE_BASED)
.calcPath(11, 13);
assertTrue(p.isFound());
InstructionList wayList = InstructionsFromEdges.calcInstructions(p, p.graph, weighting, mixedEncodingManager, tr);
// Contain start, turn, and finish instruction
assertEquals(3, wayList.size());
// Assert turn right
assertEquals(2, wayList.get(1).getSign());
}
@Test
public void testCalcInstructionsForSlightTurnWithOtherSlightTurn() {
// Test for a fork with two slight turns. Since there are two slight turns, show the turn instruction
ShortestWeighting weighting = new ShortestWeighting(mixedCarAccessEnc, mixedCarSpeedEnc);
Path p = new Dijkstra(roundaboutGraph.g, weighting, TraversalMode.NODE_BASED)
.calcPath(12, 16);
assertTrue(p.isFound());
InstructionList wayList = InstructionsFromEdges.calcInstructions(p, p.graph, weighting, mixedEncodingManager, tr);
// Contain start, turn, and finish instruction
assertEquals(3, wayList.size());
// Assert turn right
assertEquals(7, wayList.get(1).getSign());
}
@Test
public void testCalcInstructionsForSlightTurnOntoDifferentStreet() {
final BaseGraph g = new BaseGraph.Builder(carManager).create();
final NodeAccess na = g.getNodeAccess();
// Actual example: point=48.76445%2C8.679054&point=48.764152%2C8.678722
// 1
// /
// 2 - 3 - 4
//
na.setNode(1, 48.76423, 8.679103);
na.setNode(2, 48.76417, 8.678647);
na.setNode(3, 48.764149, 8.678926);
na.setNode(4, 48.764085, 8.679183);
GHUtility.setSpeed(60, true, true, carAccessEnc, carAvSpeedEnc, g.edge(1, 3).setDistance(5)).setKeyValues(createKV("name", "Talstraße, K 4313"));
GHUtility.setSpeed(60, true, true, carAccessEnc, carAvSpeedEnc, g.edge(2, 3).setDistance(5)).setKeyValues(createKV("name", "Calmbacher Straße, K 4312"));
GHUtility.setSpeed(60, true, true, carAccessEnc, carAvSpeedEnc, g.edge(3, 4).setDistance(5)).setKeyValues(createKV("name", "Calmbacher Straße, K 4312"));
ShortestWeighting weighting = new ShortestWeighting(carAccessEnc, carAvSpeedEnc);
Path p = new Dijkstra(g, weighting, TraversalMode.NODE_BASED)
.calcPath(1, 2);
assertTrue(p.isFound());
InstructionList wayList = InstructionsFromEdges.calcInstructions(p, p.graph, weighting, carManager, tr);
assertEquals(3, wayList.size());
assertEquals(Instruction.TURN_SLIGHT_RIGHT, wayList.get(1).getSign());
}
@Test
public void testIgnoreInstructionsForSlightTurnWithOtherTurn() {
// Test for a fork with one sligh turn and one actual turn. We are going along the slight turn. No turn instruction needed in this case
ShortestWeighting weighting = new ShortestWeighting(mixedCarAccessEnc, mixedCarSpeedEnc);
Path p = new Dijkstra(roundaboutGraph.g, weighting, TraversalMode.NODE_BASED)
.calcPath(16, 19);
assertTrue(p.isFound());
InstructionList wayList = InstructionsFromEdges.calcInstructions(p, p.graph, weighting, mixedEncodingManager, tr);
// Contain start, and finish instruction
assertEquals(2, wayList.size());
}
List<String> getTurnDescriptions(InstructionList instructionJson) {
List<String> list = new ArrayList<>();
for (Instruction instruction : instructionJson) {
list.add(instruction.getTurnDescription(tr));
}
return list;
}
private Graph generatePathDetailsGraph() {
final BaseGraph graph = new BaseGraph.Builder(carManager).create();
final NodeAccess na = graph.getNodeAccess();
na.setNode(1, 52.514, 13.348);
na.setNode(2, 52.514, 13.349);
na.setNode(3, 52.514, 13.350);
na.setNode(4, 52.515, 13.349);
na.setNode(5, 52.516, 13.3452);
na.setNode(6, 52.516, 13.344);
GHUtility.setSpeed(45, true, true, carAccessEnc, carAvSpeedEnc, graph.edge(1, 2).setDistance(5)).setKeyValues(createKV("name", "1-2"));
GHUtility.setSpeed(45, true, true, carAccessEnc, carAvSpeedEnc, graph.edge(4, 5).setDistance(5)).setKeyValues(createKV("name", "4-5"));
GHUtility.setSpeed(90, true, true, carAccessEnc, carAvSpeedEnc, graph.edge(2, 3).setDistance(5)).setKeyValues(createKV("name", "2-3"));
GHUtility.setSpeed(9, true, true, carAccessEnc, carAvSpeedEnc, graph.edge(3, 4).setDistance(10)).setKeyValues(createKV("name", "3-4"));
GHUtility.setSpeed(9, true, true, carAccessEnc, carAvSpeedEnc, graph.edge(5, 6).setDistance(0.01)).setKeyValues(createKV("name", "3-4"));
return graph;
}
private class RoundaboutGraph {
final BaseGraph g;
final NodeAccess na;
final EdgeIteratorState edge3to6, edge3to9;
boolean clockwise = false;
List<EdgeIteratorState> roundaboutEdges = new LinkedList<>();
private RoundaboutGraph() {
g = new BaseGraph.Builder(mixedEncodingManager).create();
na = g.getNodeAccess();
// 18
// 8 14 |
// \ | / 16 - 17
// 5 12 - 13 \-- 19
// / \ | \ 15
// 1 - 2 4 - 7 - 10 - 11
// \ /
// 3
// | \
// 6 [ 9 ] edge 9 is turned off in default mode
na.setNode(1, 52.514, 13.348);
na.setNode(2, 52.514, 13.349);
na.setNode(3, 52.5135, 13.35);
na.setNode(4, 52.514, 13.351);
na.setNode(5, 52.5145, 13.351);
na.setNode(6, 52.513, 13.35);
na.setNode(7, 52.514, 13.352);
na.setNode(8, 52.515, 13.351);
na.setNode(9, 52.513, 13.351);
na.setNode(10, 52.514, 13.353);
na.setNode(11, 52.514, 13.354);
na.setNode(12, 52.515, 13.354);
na.setNode(13, 52.515, 13.355);
na.setNode(14, 52.516, 13.354);
na.setNode(15, 52.516, 13.360);
na.setNode(16, 52.514, 13.360);
na.setNode(17, 52.514, 13.361);
na.setNode(18, 52.513, 13.361);
na.setNode(19, 52.515, 13.368);
// roundabout
roundaboutEdges.add(g.edge(3, 2).setDistance(5).setKeyValues(createKV("name", "2-3")));
roundaboutEdges.add(g.edge(4, 3).setDistance(5).setKeyValues(createKV("name", "3-4")));
roundaboutEdges.add(g.edge(5, 4).setDistance(5).setKeyValues(createKV("name", "4-5")));
roundaboutEdges.add(g.edge(2, 5).setDistance(5).setKeyValues(createKV("name", "5-2")));
List<EdgeIteratorState> bothDir = new ArrayList<>();
List<EdgeIteratorState> oneDir = new ArrayList<>(roundaboutEdges);
bothDir.add(g.edge(1, 2).setDistance(5).setKeyValues(createKV("name", "MainStreet 1 2")));
bothDir.add(g.edge(4, 7).setDistance(5).setKeyValues(createKV("name", "MainStreet 4 7")));
bothDir.add(g.edge(5, 8).setDistance(5).setKeyValues(createKV("name", "5-8")));
bothDir.add(edge3to6 = g.edge(3, 6).setDistance(5).setKeyValues(createKV("name", "3-6")));
oneDir.add(edge3to9 = g.edge(3, 9).setDistance(5).setKeyValues(createKV("name", "3-9")));
bothDir.add(g.edge(7, 10).setDistance(5));
bothDir.add(g.edge(10, 11).setDistance(5));
bothDir.add(g.edge(11, 12).setDistance(5));
bothDir.add(g.edge(12, 13).setDistance(5));
bothDir.add(g.edge(12, 14).setDistance(5));
bothDir.add(g.edge(13, 15).setDistance(5));
bothDir.add(g.edge(13, 16).setDistance(5));
bothDir.add(g.edge(16, 17).setDistance(5));
bothDir.add(g.edge(17, 18).setDistance(5));
bothDir.add(g.edge(17, 19).setDistance(5));
for (EdgeIteratorState edge : bothDir) {
GHUtility.setSpeed(70, 70, mixedCarAccessEnc, mixedCarSpeedEnc, edge);
GHUtility.setSpeed(7, 7, mixedFootAccessEnc, mixedFootSpeedEnc, edge);
}
for (EdgeIteratorState edge : oneDir) {
GHUtility.setSpeed(70, 0, mixedCarAccessEnc, mixedCarSpeedEnc, edge);
GHUtility.setSpeed(7, 0, mixedFootAccessEnc, mixedFootSpeedEnc, edge);
}
setRoundabout(clockwise);
inverse3to9();
}
public void setRoundabout(boolean clockwise) {
BooleanEncodedValue mixedRoundabout = mixedEncodingManager.getBooleanEncodedValue(Roundabout.KEY);
for (EdgeIteratorState edge : roundaboutEdges) {
edge.set(mixedCarAccessEnc, clockwise).setReverse(mixedCarAccessEnc, !clockwise);
edge.set(mixedFootAccessEnc, clockwise).setReverse(mixedFootAccessEnc, !clockwise);
edge.set(mixedRoundabout, true);
}
this.clockwise = clockwise;
}
public void inverse3to9() {
edge3to9.set(mixedCarAccessEnc, !edge3to9.get(mixedCarAccessEnc)).setReverse(mixedCarAccessEnc, false);
edge3to9.set(mixedFootAccessEnc, !edge3to9.get(mixedFootAccessEnc)).setReverse(mixedFootAccessEnc, false);
}
public void inverse3to6() {
edge3to6.set(mixedCarAccessEnc, !edge3to6.get(mixedCarAccessEnc)).setReverse(mixedCarAccessEnc, true);
edge3to6.set(mixedFootAccessEnc, !edge3to6.get(mixedFootAccessEnc)).setReverse(mixedFootAccessEnc, true);
}
private double getAngle(int n1, int n2, int n3, int n4) {
double inOrientation = AngleCalc.ANGLE_CALC.calcOrientation(na.getLat(n1), na.getLon(n1), na.getLat(n2), na.getLon(n2));
double outOrientation = AngleCalc.ANGLE_CALC.calcOrientation(na.getLat(n3), na.getLon(n3), na.getLat(n4), na.getLon(n4));
outOrientation = AngleCalc.ANGLE_CALC.alignOrientation(inOrientation, outOrientation);
double delta = (inOrientation - outOrientation);
delta = clockwise ? (Math.PI + delta) : -1 * (Math.PI - delta);
return delta;
}
}
private static Path extractPath(Graph graph, Weighting weighting, SPTEntry sptEntry) {
return PathExtractor.extractPath(graph, weighting, sptEntry);
}
} | fix test
| core/src/test/java/com/graphhopper/routing/PathTest.java | fix test | <ide><path>ore/src/test/java/com/graphhopper/routing/PathTest.java
<ide> GHUtility.setSpeed(45, true, true, carAccessEnc, carAvSpeedEnc, graph.edge(4, 5).setDistance(5)).setKeyValues(createKV("name", "4-5"));
<ide> GHUtility.setSpeed(90, true, true, carAccessEnc, carAvSpeedEnc, graph.edge(2, 3).setDistance(5)).setKeyValues(createKV("name", "2-3"));
<ide> GHUtility.setSpeed(9, true, true, carAccessEnc, carAvSpeedEnc, graph.edge(3, 4).setDistance(10)).setKeyValues(createKV("name", "3-4"));
<del> GHUtility.setSpeed(9, true, true, carAccessEnc, carAvSpeedEnc, graph.edge(5, 6).setDistance(0.01)).setKeyValues(createKV("name", "3-4"));
<add> GHUtility.setSpeed(9, true, true, carAccessEnc, carAvSpeedEnc, graph.edge(5, 6).setDistance(0.001)).setKeyValues(createKV("name", "3-4"));
<ide> return graph;
<ide> }
<ide> |
|
Java | mit | 07ec1068746c2b9fc156309961eb973ecf91d427 | 0 | ictrobot/Cubes,RedTroop/Cubes_2,ictrobot/Cubes,RedTroop/Cubes_2 | package ethanjones.cubes.side.client;
import ethanjones.cubes.core.platform.Compatibility;
import ethanjones.cubes.core.system.Branding;
import ethanjones.cubes.entity.living.player.Player;
import ethanjones.cubes.side.common.Cubes;
import ethanjones.cubes.world.CoordinateConverter;
import ethanjones.cubes.world.storage.Area;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.graphics.profiling.GLProfiler;
import com.badlogic.gdx.math.Vector3;
public class ClientDebug {
public static class Average {
public int current;
public long total;
public int num;
public int average;
public void add(int i) {
current = i;
total += i;
num++;
average = (int) (total / num);
}
}
private static final String lineSeparator = System.getProperty("line.separator");
static Average fps = new Average();
static Average loop = new Average();
static long lastTime = System.currentTimeMillis();
private static String debugString = "";
public static void update() {
Vector3 p = Cubes.getClient().player.position;
loop.add((int) (System.currentTimeMillis() - lastTime));
lastTime = System.currentTimeMillis();
String str = Branding.VERSION_HASH;
if (!str.isEmpty()) {
str = "HASH: " + str + lineSeparator;
}
String performance = "FPS:" + fps.current + " AVG:" + fps.average + " MS:" + String.format("%01d", loop.current) + " AVG:" + String.format("%01d", loop.average) + " MEM:" + Compatibility.get().getFreeMemory() + "MB";
String position = "POS X:" + String.format("%.2f", p.x) + "(" + CoordinateConverter.area(p.x) + ")" + " Y:" + String.format("%.2f", p.y) + " Z:" + String.format("%.2f", p.z) + "(" + CoordinateConverter.area(p.z) + ")";
String direction = "DIR X:" + String.format("%.2f", Cubes.getClient().player.angle.x) + " Y:" + String.format("%.2f", Cubes.getClient().player.angle.y) + " Z:" + String.format("%.2f", Cubes.getClient().player.angle.z);
String light = "L:" + getLight();
debugString = Branding.DEBUG + lineSeparator + performance + lineSeparator + position + lineSeparator + direction + lineSeparator + light;
GLProfiler.calls = 0;
GLProfiler.drawCalls = 0;
GLProfiler.shaderSwitches = 0;
GLProfiler.textureBindings = 0;
}
private static int getLight() {
Player player = Cubes.getClient().player;
Area area = Cubes.getClient().world.getArea(CoordinateConverter.area(player.position.x), CoordinateConverter.area(player.position.z));
if (area != null) {
int x = CoordinateConverter.block(player.position.x);
int y = CoordinateConverter.block(player.position.y - player.height - 0.01f);
int z = CoordinateConverter.block(player.position.z);
return area.getLight(x - area.minBlockX, y, z - area.minBlockZ);
}
return 0;
}
public static String getDebugString() {
return debugString;
}
public static void tick() {
fps.add(Gdx.graphics.getFramesPerSecond());
}
}
| core/src/ethanjones/cubes/side/client/ClientDebug.java | package ethanjones.cubes.side.client;
import ethanjones.cubes.core.platform.Compatibility;
import ethanjones.cubes.core.system.Branding;
import ethanjones.cubes.side.common.Cubes;
import ethanjones.cubes.world.CoordinateConverter;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.graphics.profiling.GLProfiler;
import com.badlogic.gdx.math.Vector3;
public class ClientDebug {
public static class Average {
public int current;
public long total;
public int num;
public int average;
public void add(int i) {
current = i;
total += i;
num++;
average = (int) (total / num);
}
}
private static final String lineSeparator = System.getProperty("line.separator");
static Average fps = new Average();
static Average loop = new Average();
static long lastTime = System.currentTimeMillis();
private static String debugString = "";
public static void update() {
Vector3 p = Cubes.getClient().player.position;
loop.add((int) (System.currentTimeMillis() - lastTime));
lastTime = System.currentTimeMillis();
String str = Branding.VERSION_HASH;
if (!str.isEmpty()) {
str = "HASH: " + str + lineSeparator;
}
String performance = "FPS:" + fps.current + " AVG:" + fps.average + " MS:" + String.format("%01d", loop.current) + " AVG:" + String.format("%01d", loop.average) + " MEM:" + Compatibility.get().getFreeMemory() + "MB";
String position = "POS X:" + String.format("%.2f", p.x) + "(" + CoordinateConverter.area(p.x) + ")" + " Y:" + String.format("%.2f", p.y) + " Z:" + String.format("%.2f", p.z) + "(" + CoordinateConverter.area(p.z) + ")";
String direction = "DIR X:" + String.format("%.2f", Cubes.getClient().player.angle.x) + " Y:" + String.format("%.2f", Cubes.getClient().player.angle.y) + " Z:" + String.format("%.2f", Cubes.getClient().player.angle.z);
debugString = Branding.DEBUG + lineSeparator + performance + lineSeparator + position + lineSeparator + direction;
GLProfiler.calls = 0;
GLProfiler.drawCalls = 0;
GLProfiler.shaderSwitches = 0;
GLProfiler.textureBindings = 0;
}
public static String getDebugString() {
return debugString;
}
public static void tick() {
fps.add(Gdx.graphics.getFramesPerSecond());
}
}
| ClientDebug Light
| core/src/ethanjones/cubes/side/client/ClientDebug.java | ClientDebug Light | <ide><path>ore/src/ethanjones/cubes/side/client/ClientDebug.java
<ide>
<ide> import ethanjones.cubes.core.platform.Compatibility;
<ide> import ethanjones.cubes.core.system.Branding;
<add>import ethanjones.cubes.entity.living.player.Player;
<ide> import ethanjones.cubes.side.common.Cubes;
<ide> import ethanjones.cubes.world.CoordinateConverter;
<add>import ethanjones.cubes.world.storage.Area;
<ide>
<ide> import com.badlogic.gdx.Gdx;
<ide> import com.badlogic.gdx.graphics.profiling.GLProfiler;
<ide> String performance = "FPS:" + fps.current + " AVG:" + fps.average + " MS:" + String.format("%01d", loop.current) + " AVG:" + String.format("%01d", loop.average) + " MEM:" + Compatibility.get().getFreeMemory() + "MB";
<ide> String position = "POS X:" + String.format("%.2f", p.x) + "(" + CoordinateConverter.area(p.x) + ")" + " Y:" + String.format("%.2f", p.y) + " Z:" + String.format("%.2f", p.z) + "(" + CoordinateConverter.area(p.z) + ")";
<ide> String direction = "DIR X:" + String.format("%.2f", Cubes.getClient().player.angle.x) + " Y:" + String.format("%.2f", Cubes.getClient().player.angle.y) + " Z:" + String.format("%.2f", Cubes.getClient().player.angle.z);
<del>
<del> debugString = Branding.DEBUG + lineSeparator + performance + lineSeparator + position + lineSeparator + direction;
<add> String light = "L:" + getLight();
<add> debugString = Branding.DEBUG + lineSeparator + performance + lineSeparator + position + lineSeparator + direction + lineSeparator + light;
<ide>
<ide> GLProfiler.calls = 0;
<ide> GLProfiler.drawCalls = 0;
<ide> GLProfiler.shaderSwitches = 0;
<ide> GLProfiler.textureBindings = 0;
<add> }
<add>
<add> private static int getLight() {
<add> Player player = Cubes.getClient().player;
<add> Area area = Cubes.getClient().world.getArea(CoordinateConverter.area(player.position.x), CoordinateConverter.area(player.position.z));
<add> if (area != null) {
<add> int x = CoordinateConverter.block(player.position.x);
<add> int y = CoordinateConverter.block(player.position.y - player.height - 0.01f);
<add> int z = CoordinateConverter.block(player.position.z);
<add> return area.getLight(x - area.minBlockX, y, z - area.minBlockZ);
<add> }
<add> return 0;
<ide> }
<ide>
<ide> public static String getDebugString() { |
|
Java | mit | 9970969c75630b5fda7f924afc910511ffd36532 | 0 | InnovateUKGitHub/innovation-funding-service,InnovateUKGitHub/innovation-funding-service,InnovateUKGitHub/innovation-funding-service,InnovateUKGitHub/innovation-funding-service,InnovateUKGitHub/innovation-funding-service | package org.innovateuk.ifs.assessment.feedback.controller;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.innovateuk.ifs.application.populator.OrganisationDetailsModelPopulator;
import org.innovateuk.ifs.application.service.QuestionRestService;
import org.innovateuk.ifs.application.service.QuestionService;
import org.innovateuk.ifs.assessment.feedback.populator.AssessmentFeedbackApplicationDetailsModelPopulator;
import org.innovateuk.ifs.assessment.feedback.populator.AssessmentFeedbackModelPopulator;
import org.innovateuk.ifs.assessment.feedback.populator.AssessmentFeedbackNavigationModelPopulator;
import org.innovateuk.ifs.assessment.feedback.viewmodel.AssessmentFeedbackApplicationDetailsViewModel;
import org.innovateuk.ifs.assessment.feedback.viewmodel.AssessmentFeedbackNavigationViewModel;
import org.innovateuk.ifs.assessment.feedback.viewmodel.AssessmentFeedbackViewModel;
import org.innovateuk.ifs.assessment.resource.AssessorFormInputResponseResource;
import org.innovateuk.ifs.assessment.resource.AssessorFormInputResponsesResource;
import org.innovateuk.ifs.assessment.service.AssessorFormInputResponseRestService;
import org.innovateuk.ifs.commons.rest.RestResult;
import org.innovateuk.ifs.commons.security.SecuredBySpring;
import org.innovateuk.ifs.controller.ValidationHandler;
import org.innovateuk.ifs.form.Form;
import org.innovateuk.ifs.form.resource.FormInputResource;
import org.innovateuk.ifs.form.resource.QuestionResource;
import org.innovateuk.ifs.form.service.FormInputRestService;
import org.innovateuk.ifs.question.resource.QuestionSetupType;
import org.innovateuk.ifs.user.resource.ProcessRoleResource;
import org.innovateuk.ifs.user.service.UserRestService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.validation.BindingResult;
import org.springframework.web.bind.annotation.*;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.function.Supplier;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import static com.google.common.collect.Sets.newHashSet;
import static java.lang.String.format;
import static org.innovateuk.ifs.controller.ErrorToObjectErrorConverterFactory.asGlobalErrors;
import static org.innovateuk.ifs.controller.ErrorToObjectErrorConverterFactory.mappingFieldErrorToField;
import static org.innovateuk.ifs.form.resource.FormInputScope.APPLICATION;
import static org.innovateuk.ifs.form.resource.FormInputScope.ASSESSMENT;
import static org.innovateuk.ifs.form.resource.FormInputType.ASSESSOR_APPLICATION_IN_SCOPE;
import static org.innovateuk.ifs.util.CollectionFunctions.*;
@Controller
@RequestMapping("/{assessmentId}")
@SecuredBySpring(value = "Controller", description = "TODO", securedType = AssessmentFeedbackController.class)
@PreAuthorize("hasAuthority('assessor')")
public class AssessmentFeedbackController {
private static final Log LOG = LogFactory.getLog(AssessmentFeedbackController.class);
private static final String FORM_ATTR_NAME = "form";
@Autowired
private FormInputRestService formInputRestService;
@Autowired
private QuestionService questionService;
@Autowired
private QuestionRestService questionRestService;
@Autowired
private AssessorFormInputResponseRestService assessorFormInputResponseRestService;
@Autowired
private AssessmentFeedbackModelPopulator assessmentFeedbackModelPopulator;
@Autowired
private AssessmentFeedbackApplicationDetailsModelPopulator assessmentFeedbackApplicationDetailsModelPopulator;
@Autowired
private AssessmentFeedbackNavigationModelPopulator assessmentFeedbackNavigationModelPopulator;
@Autowired
private OrganisationDetailsModelPopulator organisationDetailsModelPopulator;
@Autowired
private UserRestService userRestService;
@GetMapping("/question/{questionId}")
public String getQuestion(Model model,
@ModelAttribute(name = FORM_ATTR_NAME, binding = false) Form form,
@PathVariable("assessmentId") long assessmentId,
@PathVariable("questionId") long questionId) {
QuestionResource question = getQuestionForAssessment(questionId, assessmentId);
if (isApplicationDetailsQuestion(questionId)) {
return getApplicationDetails(model, assessmentId, question);
}
populateQuestionForm(form, assessmentId, questionId);
return doViewQuestion(model, assessmentId, question);
}
@PostMapping("/formInput/{formInputId}")
public
@ResponseBody
JsonNode updateFormInputResponse(
@PathVariable("assessmentId") long assessmentId,
@PathVariable("formInputId") long formInputId,
@RequestParam("value") String value) {
try {
assessorFormInputResponseRestService.updateFormInputResponse(assessmentId, formInputId, value)
.getSuccess();
return createJsonObjectNode(true);
} catch (Exception e) {
LOG.error("exception thrown updating input form response", e);
return createJsonObjectNode(false);
}
}
@PostMapping("/question/{questionId}")
public String save(
Model model,
@ModelAttribute(FORM_ATTR_NAME) Form form,
@SuppressWarnings("UnusedParameters") BindingResult bindingResult,
ValidationHandler validationHandler,
@PathVariable("assessmentId") long assessmentId,
@PathVariable("questionId") long questionId) {
Supplier<String> failureView = () -> doViewQuestion(model, assessmentId, getQuestionForAssessment(questionId, assessmentId));
return validationHandler.failNowOrSucceedWith(failureView, () -> {
List<FormInputResource> formInputs = getAssessmentFormInputsForQuestion(questionId);
AssessorFormInputResponsesResource responses = getFormInputResponses(form, formInputs, assessmentId);
RestResult<Void> updateResult = assessorFormInputResponseRestService.updateFormInputResponses(responses);
return validationHandler.addAnyErrors(updateResult, mappingFieldErrorToField(e -> {
Matcher matcher = Pattern.compile("responses\\[(\\d)\\]\\.value").matcher(e.getFieldName());
if (matcher.find()) {
int errorIndex = Integer.parseInt(matcher.group(1));
Long formInputResponseWithError = responses.getResponses().get(errorIndex).getFormInput();
return format("formInput[%s]", formInputResponseWithError);
} else if (e.isFieldError() && !e.getArguments().isEmpty()) {
return format("formInput[%s]", e.getArguments().get(0));
}
return e.getFieldName();
}), asGlobalErrors()).failNowOrSucceedWith(failureView, () -> redirectToAssessmentOverview(assessmentId));
});
}
private QuestionResource getQuestionForAssessment(long questionId, long assessmentId) {
return questionService.getByIdAndAssessmentId(questionId, assessmentId);
}
private List<AssessorFormInputResponseResource> getAssessorResponses(long assessmentId, long questionId) {
return assessorFormInputResponseRestService.getAllAssessorFormInputResponsesByAssessmentAndQuestion(
assessmentId, questionId).getSuccess();
}
private Form populateQuestionForm(Form form, long assessmentId, long questionId) {
List<AssessorFormInputResponseResource> assessorResponses = getAssessorResponses(assessmentId, questionId);
Map<Long, AssessorFormInputResponseResource> mappedResponses = simpleToMap(assessorResponses, AssessorFormInputResponseResource::getFormInput);
mappedResponses.forEach((k, v) -> form.addFormInput(k.toString(), v.getValue()));
processScopeInput(form, mappedResponses, questionId);
return form;
}
private void processScopeInput(Form form, Map<Long, AssessorFormInputResponseResource> mappedResponses, long questionId) {
Optional<FormInputResource> scopeInput = getScopeFormInput(getAssessmentFormInputsForQuestion(questionId));
scopeInput.ifPresent(scope -> {
if (!mappedResponses.containsKey(scope.getId())) {
form.addFormInput(scope.getId().toString(), "none");
}
});
}
private List<FormInputResource> getAssessmentFormInputsForQuestion(long questionId) {
return formInputRestService.getByQuestionIdAndScope(questionId, ASSESSMENT)
.getSuccess();
}
private String doViewQuestion(Model model, long assessmentId, QuestionResource question) {
AssessmentFeedbackViewModel viewModel = assessmentFeedbackModelPopulator.populateModel(assessmentId, question);
model.addAttribute("model", viewModel);
model.addAttribute("navigation", assessmentFeedbackNavigationModelPopulator.populateModel(assessmentId, question));
return "assessment/application-question";
}
private String redirectToAssessmentOverview(long assessmentId) {
return "redirect:/" + assessmentId;
}
private boolean isApplicationDetailsQuestion(long questionId) {
return questionRestService.findById(questionId).getSuccess().getQuestionSetupType().equals(QuestionSetupType.APPLICATION_DETAILS);
}
private String getApplicationDetails(Model model, long assessmentId, QuestionResource question) {
AssessmentFeedbackApplicationDetailsViewModel viewModel = assessmentFeedbackApplicationDetailsModelPopulator.populateModel(assessmentId, question);
AssessmentFeedbackNavigationViewModel navigationViewModel = assessmentFeedbackNavigationModelPopulator.populateModel(assessmentId, question);
model.addAttribute("model", viewModel);
model.addAttribute("navigation", navigationViewModel);
List<ProcessRoleResource> userApplicationRoles = userRestService.findProcessRole(viewModel.getApplicationId()).getSuccess();
organisationDetailsModelPopulator.populateModel(model, viewModel.getApplicationId(), userApplicationRoles);
return "assessment/application-details";
}
private List<FormInputResource> getApplicationFormInputs(long questionId) {
return formInputRestService.getByQuestionIdAndScope(questionId, APPLICATION).getSuccess();
}
private Optional<FormInputResource> getScopeFormInput(List<FormInputResource> formInputs) {
return formInputs.stream()
.filter(input -> input.getType().equals(ASSESSOR_APPLICATION_IN_SCOPE))
.findAny();
}
private AssessorFormInputResponsesResource getFormInputResponses(Form form, List<FormInputResource> formInputs, long assessmentId) {
Set<Long> formInputResourceIds = newHashSet(simpleMap(formInputs, FormInputResource::getId));
Map<Long, String> responseStrings = simpleMapEntry(form.getFormInput(), formInput -> Long.valueOf(formInput.getKey()), Map.Entry::getValue);
// Filter the responses to include only those for which a form input exist
Map<Long, String> filtered = simpleFilter(responseStrings, (id, value) -> formInputResourceIds.contains(id));
List<AssessorFormInputResponseResource> assessorFormInputResponses = simpleMap(
filtered,
(id, value) -> new AssessorFormInputResponseResource(assessmentId, id, value)
);
return new AssessorFormInputResponsesResource(assessorFormInputResponses);
}
private ObjectNode createJsonObjectNode(boolean success) {
ObjectMapper mapper = new ObjectMapper();
ObjectNode node = mapper.createObjectNode();
node.put("success", success ? "true" : "false");
return node;
}
}
| ifs-web-service/ifs-assessment-service/src/main/java/org/innovateuk/ifs/assessment/feedback/controller/AssessmentFeedbackController.java | package org.innovateuk.ifs.assessment.feedback.controller;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.innovateuk.ifs.form.Form;
import org.innovateuk.ifs.application.service.QuestionService;
import org.innovateuk.ifs.assessment.feedback.populator.AssessmentFeedbackApplicationDetailsModelPopulator;
import org.innovateuk.ifs.assessment.feedback.populator.AssessmentFeedbackModelPopulator;
import org.innovateuk.ifs.assessment.feedback.populator.AssessmentFeedbackNavigationModelPopulator;
import org.innovateuk.ifs.assessment.feedback.viewmodel.AssessmentFeedbackApplicationDetailsViewModel;
import org.innovateuk.ifs.assessment.feedback.viewmodel.AssessmentFeedbackNavigationViewModel;
import org.innovateuk.ifs.assessment.feedback.viewmodel.AssessmentFeedbackViewModel;
import org.innovateuk.ifs.assessment.resource.AssessorFormInputResponseResource;
import org.innovateuk.ifs.assessment.resource.AssessorFormInputResponsesResource;
import org.innovateuk.ifs.assessment.service.AssessorFormInputResponseRestService;
import org.innovateuk.ifs.commons.rest.RestResult;
import org.innovateuk.ifs.commons.security.SecuredBySpring;
import org.innovateuk.ifs.controller.ValidationHandler;
import org.innovateuk.ifs.form.resource.FormInputResource;
import org.innovateuk.ifs.form.resource.FormInputType;
import org.innovateuk.ifs.form.resource.QuestionResource;
import org.innovateuk.ifs.form.service.FormInputRestService;
import org.innovateuk.ifs.application.populator.OrganisationDetailsModelPopulator;
import org.innovateuk.ifs.user.resource.ProcessRoleResource;
import org.innovateuk.ifs.user.service.ProcessRoleService;
import org.innovateuk.ifs.user.service.UserRestService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.validation.BindingResult;
import org.springframework.web.bind.annotation.*;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.function.Supplier;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import static com.google.common.collect.Sets.newHashSet;
import static java.lang.String.format;
import static org.innovateuk.ifs.controller.ErrorToObjectErrorConverterFactory.asGlobalErrors;
import static org.innovateuk.ifs.controller.ErrorToObjectErrorConverterFactory.mappingFieldErrorToField;
import static org.innovateuk.ifs.form.resource.FormInputScope.APPLICATION;
import static org.innovateuk.ifs.form.resource.FormInputScope.ASSESSMENT;
import static org.innovateuk.ifs.form.resource.FormInputType.ASSESSOR_APPLICATION_IN_SCOPE;
import static org.innovateuk.ifs.util.CollectionFunctions.*;
@Controller
@RequestMapping("/{assessmentId}")
@SecuredBySpring(value = "Controller", description = "TODO", securedType = AssessmentFeedbackController.class)
@PreAuthorize("hasAuthority('assessor')")
public class AssessmentFeedbackController {
private static final Log LOG = LogFactory.getLog(AssessmentFeedbackController.class);
private static final String FORM_ATTR_NAME = "form";
@Autowired
private FormInputRestService formInputRestService;
@Autowired
private QuestionService questionService;
@Autowired
private AssessorFormInputResponseRestService assessorFormInputResponseRestService;
@Autowired
private AssessmentFeedbackModelPopulator assessmentFeedbackModelPopulator;
@Autowired
private AssessmentFeedbackApplicationDetailsModelPopulator assessmentFeedbackApplicationDetailsModelPopulator;
@Autowired
private AssessmentFeedbackNavigationModelPopulator assessmentFeedbackNavigationModelPopulator;
@Autowired
private OrganisationDetailsModelPopulator organisationDetailsModelPopulator;
@Autowired
private UserRestService userRestService;
@GetMapping("/question/{questionId}")
public String getQuestion(Model model,
@ModelAttribute(name = FORM_ATTR_NAME, binding = false) Form form,
@PathVariable("assessmentId") long assessmentId,
@PathVariable("questionId") long questionId) {
QuestionResource question = getQuestionForAssessment(questionId, assessmentId);
if (isApplicationDetailsQuestion(questionId)) {
return getApplicationDetails(model, assessmentId, question);
}
populateQuestionForm(form, assessmentId, questionId);
return doViewQuestion(model, assessmentId, question);
}
@PostMapping("/formInput/{formInputId}")
public
@ResponseBody
JsonNode updateFormInputResponse(
@PathVariable("assessmentId") long assessmentId,
@PathVariable("formInputId") long formInputId,
@RequestParam("value") String value) {
try {
assessorFormInputResponseRestService.updateFormInputResponse(assessmentId, formInputId, value)
.getSuccess();
return createJsonObjectNode(true);
} catch (Exception e) {
LOG.error("exception thrown updating input form response", e);
return createJsonObjectNode(false);
}
}
@PostMapping("/question/{questionId}")
public String save(
Model model,
@ModelAttribute(FORM_ATTR_NAME) Form form,
@SuppressWarnings("UnusedParameters") BindingResult bindingResult,
ValidationHandler validationHandler,
@PathVariable("assessmentId") long assessmentId,
@PathVariable("questionId") long questionId) {
Supplier<String> failureView = () -> doViewQuestion(model, assessmentId, getQuestionForAssessment(questionId, assessmentId));
return validationHandler.failNowOrSucceedWith(failureView, () -> {
List<FormInputResource> formInputs = getAssessmentFormInputsForQuestion(questionId);
AssessorFormInputResponsesResource responses = getFormInputResponses(form, formInputs, assessmentId);
RestResult<Void> updateResult = assessorFormInputResponseRestService.updateFormInputResponses(responses);
return validationHandler.addAnyErrors(updateResult, mappingFieldErrorToField(e -> {
Matcher matcher = Pattern.compile("responses\\[(\\d)\\]\\.value").matcher(e.getFieldName());
if (matcher.find()) {
int errorIndex = Integer.parseInt(matcher.group(1));
Long formInputResponseWithError = responses.getResponses().get(errorIndex).getFormInput();
return format("formInput[%s]", formInputResponseWithError);
} else if (e.isFieldError() && !e.getArguments().isEmpty()) {
return format("formInput[%s]", e.getArguments().get(0));
}
return e.getFieldName();
}), asGlobalErrors()).failNowOrSucceedWith(failureView, () -> redirectToAssessmentOverview(assessmentId));
});
}
private QuestionResource getQuestionForAssessment(long questionId, long assessmentId) {
return questionService.getByIdAndAssessmentId(questionId, assessmentId);
}
private List<AssessorFormInputResponseResource> getAssessorResponses(long assessmentId, long questionId) {
return assessorFormInputResponseRestService.getAllAssessorFormInputResponsesByAssessmentAndQuestion(
assessmentId, questionId).getSuccess();
}
private Form populateQuestionForm(Form form, long assessmentId, long questionId) {
List<AssessorFormInputResponseResource> assessorResponses = getAssessorResponses(assessmentId, questionId);
Map<Long, AssessorFormInputResponseResource> mappedResponses = simpleToMap(assessorResponses, AssessorFormInputResponseResource::getFormInput);
mappedResponses.forEach((k, v) -> form.addFormInput(k.toString(), v.getValue()));
processScopeInput(form, mappedResponses, questionId);
return form;
}
private void processScopeInput(Form form, Map<Long, AssessorFormInputResponseResource> mappedResponses, long questionId) {
Optional<FormInputResource> scopeInput = getScopeFormInput(getAssessmentFormInputsForQuestion(questionId));
scopeInput.ifPresent(scope -> {
if (!mappedResponses.containsKey(scope.getId())) {
form.addFormInput(scope.getId().toString(), "none");
}
});
}
private List<FormInputResource> getAssessmentFormInputsForQuestion(long questionId) {
return formInputRestService.getByQuestionIdAndScope(questionId, ASSESSMENT)
.getSuccess();
}
private String doViewQuestion(Model model, long assessmentId, QuestionResource question) {
AssessmentFeedbackViewModel viewModel = assessmentFeedbackModelPopulator.populateModel(assessmentId, question);
model.addAttribute("model", viewModel);
model.addAttribute("navigation", assessmentFeedbackNavigationModelPopulator.populateModel(assessmentId, question));
return "assessment/application-question";
}
private String redirectToAssessmentOverview(long assessmentId) {
return "redirect:/" + assessmentId;
}
private boolean isApplicationDetailsQuestion(long questionId) {
List<FormInputResource> applicationFormInputs = getApplicationFormInputs(questionId);
return applicationFormInputs.stream().anyMatch(formInputResource -> FormInputType.APPLICATION_DETAILS == formInputResource.getType());
}
private String getApplicationDetails(Model model, long assessmentId, QuestionResource question) {
AssessmentFeedbackApplicationDetailsViewModel viewModel = assessmentFeedbackApplicationDetailsModelPopulator.populateModel(assessmentId, question);
AssessmentFeedbackNavigationViewModel navigationViewModel = assessmentFeedbackNavigationModelPopulator.populateModel(assessmentId, question);
model.addAttribute("model", viewModel);
model.addAttribute("navigation", navigationViewModel);
List<ProcessRoleResource> userApplicationRoles = userRestService.findProcessRole(viewModel.getApplicationId()).getSuccess();
organisationDetailsModelPopulator.populateModel(model, viewModel.getApplicationId(), userApplicationRoles);
return "assessment/application-details";
}
private List<FormInputResource> getApplicationFormInputs(long questionId) {
return formInputRestService.getByQuestionIdAndScope(questionId, APPLICATION).getSuccess();
}
private Optional<FormInputResource> getScopeFormInput(List<FormInputResource> formInputs) {
return formInputs.stream()
.filter(input -> input.getType().equals(ASSESSOR_APPLICATION_IN_SCOPE))
.findAny();
}
private AssessorFormInputResponsesResource getFormInputResponses(Form form, List<FormInputResource> formInputs, long assessmentId) {
Set<Long> formInputResourceIds = newHashSet(simpleMap(formInputs, FormInputResource::getId));
Map<Long, String> responseStrings = simpleMapEntry(form.getFormInput(), formInput -> Long.valueOf(formInput.getKey()), Map.Entry::getValue);
// Filter the responses to include only those for which a form input exist
Map<Long, String> filtered = simpleFilter(responseStrings, (id, value) -> formInputResourceIds.contains(id));
List<AssessorFormInputResponseResource> assessorFormInputResponses = simpleMap(
filtered,
(id, value) -> new AssessorFormInputResponseResource(assessmentId, id, value)
);
return new AssessorFormInputResponsesResource(assessorFormInputResponses);
}
private ObjectNode createJsonObjectNode(boolean success) {
ObjectMapper mapper = new ObjectMapper();
ObjectNode node = mapper.createObjectNode();
node.put("success", success ? "true" : "false");
return node;
}
}
| IFS-6216 fixing error.
| ifs-web-service/ifs-assessment-service/src/main/java/org/innovateuk/ifs/assessment/feedback/controller/AssessmentFeedbackController.java | IFS-6216 fixing error. | <ide><path>fs-web-service/ifs-assessment-service/src/main/java/org/innovateuk/ifs/assessment/feedback/controller/AssessmentFeedbackController.java
<ide> import com.fasterxml.jackson.databind.node.ObjectNode;
<ide> import org.apache.commons.logging.Log;
<ide> import org.apache.commons.logging.LogFactory;
<del>import org.innovateuk.ifs.form.Form;
<add>import org.innovateuk.ifs.application.populator.OrganisationDetailsModelPopulator;
<add>import org.innovateuk.ifs.application.service.QuestionRestService;
<ide> import org.innovateuk.ifs.application.service.QuestionService;
<ide> import org.innovateuk.ifs.assessment.feedback.populator.AssessmentFeedbackApplicationDetailsModelPopulator;
<ide> import org.innovateuk.ifs.assessment.feedback.populator.AssessmentFeedbackModelPopulator;
<ide> import org.innovateuk.ifs.commons.rest.RestResult;
<ide> import org.innovateuk.ifs.commons.security.SecuredBySpring;
<ide> import org.innovateuk.ifs.controller.ValidationHandler;
<add>import org.innovateuk.ifs.form.Form;
<ide> import org.innovateuk.ifs.form.resource.FormInputResource;
<del>import org.innovateuk.ifs.form.resource.FormInputType;
<ide> import org.innovateuk.ifs.form.resource.QuestionResource;
<ide> import org.innovateuk.ifs.form.service.FormInputRestService;
<del>import org.innovateuk.ifs.application.populator.OrganisationDetailsModelPopulator;
<add>import org.innovateuk.ifs.question.resource.QuestionSetupType;
<ide> import org.innovateuk.ifs.user.resource.ProcessRoleResource;
<del>import org.innovateuk.ifs.user.service.ProcessRoleService;
<ide> import org.innovateuk.ifs.user.service.UserRestService;
<ide> import org.springframework.beans.factory.annotation.Autowired;
<ide> import org.springframework.security.access.prepost.PreAuthorize;
<ide>
<ide> @Autowired
<ide> private QuestionService questionService;
<add>
<add> @Autowired
<add> private QuestionRestService questionRestService;
<ide>
<ide> @Autowired
<ide> private AssessorFormInputResponseRestService assessorFormInputResponseRestService;
<ide> }
<ide>
<ide> private boolean isApplicationDetailsQuestion(long questionId) {
<del> List<FormInputResource> applicationFormInputs = getApplicationFormInputs(questionId);
<del> return applicationFormInputs.stream().anyMatch(formInputResource -> FormInputType.APPLICATION_DETAILS == formInputResource.getType());
<add> return questionRestService.findById(questionId).getSuccess().getQuestionSetupType().equals(QuestionSetupType.APPLICATION_DETAILS);
<ide> }
<ide>
<ide> private String getApplicationDetails(Model model, long assessmentId, QuestionResource question) { |
|
Java | mit | error: pathspec 'AdventOfCode/src/BinaryString.java' did not match any file(s) known to git
| 68fd093d616ba04a8d24ed16751bdcb5ddd4c8e7 | 1 | melonhead901/programming-competition,melonhead901/programming-competition | import java.util.Scanner;
/**
* Created by kdonohue on 12/15/16.
*/
public class BinaryString {
private String string;
private final int requiredLength;
private BinaryString(String string, int requiredLength) {
this.string = string;
this.requiredLength = requiredLength;
}
public static void main(String[] args) {
Scanner in = new Scanner(System.in);
BinaryString binaryString = new BinaryString(in.nextLine().trim(), 272);
binaryString.randomize();
binaryString.printChecksumLength();
}
private void randomize() {
while (!longEnough()) {
String copy = string;
copy = new StringBuilder(copy).reverse().toString();
copy = flipBits(copy);
string = string + "0" + copy;
}
}
private String flipBits(String string) {
StringBuilder builder = new StringBuilder();
for (char c : string.toCharArray()) {
switch (c) {
case '0':
builder.append(1);
break;
case '1':
builder.append(0);
break;
default:
throw new IllegalArgumentException("Unexpected char: " + c);
}
}
return builder.toString();
}
private void printChecksumLength() {
if (!longEnough()) {
throw new IllegalStateException();
}
String checkSum = string.substring(0, requiredLength);
while ((checkSum.length() % 2) == 0) {
StringBuilder newCheckSum = new StringBuilder();
for (int i = 0; i < checkSum.length(); i += 2) {
char c1 = checkSum.charAt(i);
char c2 = checkSum.charAt(i + 1);
if (c1 == c2) {
newCheckSum.append("1");
} else {
newCheckSum.append("0");
}
}
checkSum = newCheckSum.toString();
}
System.out.println(checkSum);
}
private boolean longEnough() {
return string.length() >= requiredLength;
}
}
| AdventOfCode/src/BinaryString.java | Day 15 part 1, for real
| AdventOfCode/src/BinaryString.java | Day 15 part 1, for real | <ide><path>dventOfCode/src/BinaryString.java
<add>import java.util.Scanner;
<add>
<add>/**
<add> * Created by kdonohue on 12/15/16.
<add> */
<add>public class BinaryString {
<add>
<add> private String string;
<add> private final int requiredLength;
<add>
<add> private BinaryString(String string, int requiredLength) {
<add> this.string = string;
<add> this.requiredLength = requiredLength;
<add> }
<add>
<add> public static void main(String[] args) {
<add> Scanner in = new Scanner(System.in);
<add> BinaryString binaryString = new BinaryString(in.nextLine().trim(), 272);
<add> binaryString.randomize();
<add> binaryString.printChecksumLength();
<add> }
<add>
<add> private void randomize() {
<add> while (!longEnough()) {
<add> String copy = string;
<add> copy = new StringBuilder(copy).reverse().toString();
<add> copy = flipBits(copy);
<add> string = string + "0" + copy;
<add> }
<add> }
<add>
<add> private String flipBits(String string) {
<add> StringBuilder builder = new StringBuilder();
<add> for (char c : string.toCharArray()) {
<add> switch (c) {
<add> case '0':
<add> builder.append(1);
<add> break;
<add> case '1':
<add> builder.append(0);
<add> break;
<add> default:
<add> throw new IllegalArgumentException("Unexpected char: " + c);
<add> }
<add> }
<add> return builder.toString();
<add> }
<add>
<add> private void printChecksumLength() {
<add> if (!longEnough()) {
<add> throw new IllegalStateException();
<add> }
<add> String checkSum = string.substring(0, requiredLength);
<add> while ((checkSum.length() % 2) == 0) {
<add> StringBuilder newCheckSum = new StringBuilder();
<add> for (int i = 0; i < checkSum.length(); i += 2) {
<add> char c1 = checkSum.charAt(i);
<add> char c2 = checkSum.charAt(i + 1);
<add> if (c1 == c2) {
<add> newCheckSum.append("1");
<add> } else {
<add> newCheckSum.append("0");
<add> }
<add> }
<add> checkSum = newCheckSum.toString();
<add> }
<add> System.out.println(checkSum);
<add> }
<add>
<add> private boolean longEnough() {
<add> return string.length() >= requiredLength;
<add> }
<add>
<add>} |
|
Java | apache-2.0 | fdcdf25ff5f5f6330ce1db4bcf05f2d3abeb86ba | 0 | pivotalsoftware/pivotal-cla,pivotalsoftware/pivotal-cla,pivotalsoftware/pivotal-cla,pivotalsoftware/pivotal-cla | /*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.pivotal.cla.config;
import org.springframework.boot.autoconfigure.session.SessionAutoConfiguration;
import org.springframework.cloud.Cloud;
import org.springframework.cloud.CloudFactory;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
import org.springframework.context.annotation.Profile;
import org.springframework.data.redis.connection.RedisConnectionFactory;
/**
* @author Mark Paluch
*/
@Configuration
@Import(SessionAutoConfiguration.class)
public class SessionConfig {
@Profile(GithubClaProfiles.CLOUDFOUNDRY)
@Bean
public RedisConnectionFactory redisConnectionFactory() {
CloudFactory cloudFactory = new CloudFactory();
Cloud cloud = cloudFactory.getCloud();
return cloud.getSingletonServiceConnector(RedisConnectionFactory.class, null);
}
}
| src/main/java/io/pivotal/cla/config/SessionConfig.java | /*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.pivotal.cla.config;
import org.springframework.boot.autoconfigure.session.SessionAutoConfiguration;
import org.springframework.cloud.Cloud;
import org.springframework.cloud.CloudFactory;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
import org.springframework.context.annotation.Profile;
import org.springframework.data.redis.connection.RedisConnectionFactory;
/**
* @author Mark Paluch
*/
@Profile(GithubClaProfiles.CLOUDFOUNDRY)
@Configuration
@Import(SessionAutoConfiguration.class)
public class SessionConfig {
@Bean
public RedisConnectionFactory redisConnectionFactory() {
CloudFactory cloudFactory = new CloudFactory();
Cloud cloud = cloudFactory.getCloud();
return cloud.getSingletonServiceConnector(RedisConnectionFactory.class, null);
}
}
| Require Redis Sessions
| src/main/java/io/pivotal/cla/config/SessionConfig.java | Require Redis Sessions | <ide><path>rc/main/java/io/pivotal/cla/config/SessionConfig.java
<ide> /**
<ide> * @author Mark Paluch
<ide> */
<del>@Profile(GithubClaProfiles.CLOUDFOUNDRY)
<ide> @Configuration
<ide> @Import(SessionAutoConfiguration.class)
<ide> public class SessionConfig {
<ide>
<add> @Profile(GithubClaProfiles.CLOUDFOUNDRY)
<ide> @Bean
<ide> public RedisConnectionFactory redisConnectionFactory() {
<ide> CloudFactory cloudFactory = new CloudFactory(); |
|
Java | apache-2.0 | 60e4071a604aace79db7a172ff615c1275994f65 | 0 | cbeams-archive/spring-framework-2.5.x,cbeams-archive/spring-framework-2.5.x,cbeams-archive/spring-framework-2.5.x,cbeams-archive/spring-framework-2.5.x | package org.springframework.web.servlet.handler;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import org.springframework.util.PathMatcher;
import org.springframework.web.util.WebUtils;
/**
* Abstract base class for URL-mapped HandlerMapping implementations.
* Provides infrastructure for mapping handlers to URLs and configurable
* URL lookup. For information on the latter, see alwaysUseFullPath property.
*
* <p>Supports direct matches, e.g. a registered "/test" matches "/test",
* and a various Ant-style pattern matches, e.g. a registered "/t*" matches
* both "/test" and "/team". For details, see the PathMatcher class.
*
* @author Juergen Hoeller
* @since 16.04.2003
* @see #setAlwaysUseFullPath
* @see org.springframework.util.PathMatcher
*/
public abstract class AbstractUrlHandlerMapping extends AbstractHandlerMapping {
private boolean alwaysUseFullPath = false;
private Map handlerMap = new HashMap();
/**
* Set if URL lookup should always use full path within current servlet
* context. Else, the path within the current servlet mapping is used
* if applicable (i.e. in the case of a ".../*" servlet mapping in web.xml).
* Default is false.
*/
public final void setAlwaysUseFullPath(boolean alwaysUseFullPath) {
this.alwaysUseFullPath = alwaysUseFullPath;
}
/**
* Look up a handler for the URL path of the given request.
* @param request current HTTP request
* @return the looked up handler instance, or null
*/
protected Object getHandlerInternal(HttpServletRequest request) {
String lookupPath = WebUtils.getLookupPathForRequest(request, this.alwaysUseFullPath);
logger.debug("Looking up handler for [" + lookupPath + "]");
return lookupHandler(lookupPath);
}
/**
* Look up a handler instance for the given URL path.
* <p>Supports direct matches, e.g. a registered "/test" matches "/test",
* and a various Ant-style pattern matches, e.g. a registered "/t*" matches
* both "/test" and "/team". For details, see the PathMatcher class.
* @param urlPath URL the bean is mapped to
* @return the associated handler instance, or null if not found
* @see org.springframework.util.PathMatcher
*/
protected final Object lookupHandler(String urlPath) {
// direct match?
Object handler = this.handlerMap.get(urlPath);
if (handler != null) {
return handler;
}
// pattern match?
for (Iterator it = this.handlerMap.keySet().iterator(); it.hasNext();) {
String registeredPath = (String) it.next();
if (PathMatcher.match(registeredPath, urlPath)) {
return this.handlerMap.get(registeredPath);
}
}
// no match found
return null;
}
/**
* Register the given handler instance for the given URL path.
* @param urlPath URL the bean is mapped to
* @param handler the handler instance
*/
protected final void registerHandler(String urlPath, Object handler) {
this.handlerMap.put(urlPath, handler);
logger.info("Mapped URL path [" + urlPath + "] onto handler [" + handler + "]");
}
/**
* Initialize the given handler object for the given URL.
* Sets the mapped URL if the handler is UrlHandlerAware.
* @param handler name of the bean in the application context
* @param urlPath URL the bean is mapped to
* @return the initialized handler instance
* @see UrlAwareHandler
*/
protected final Object initHandler(Object handler, String urlPath) {
logger.debug("Initializing handler [" + handler + "] for URL path [" + urlPath + "]");
if (handler instanceof UrlAwareHandler) {
((UrlAwareHandler) handler).setUrlMapping(urlPath);
}
return handler;
}
}
| src/org/springframework/web/servlet/handler/AbstractUrlHandlerMapping.java | package org.springframework.web.servlet.handler;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import org.springframework.util.PathMatcher;
import org.springframework.web.util.WebUtils;
/**
* Abstract base class for URL-mapped HandlerMapping implementations.
* Provides infrastructure for mapping handlers to URLs and configurable
* URL lookup. For information on the latter, see alwaysUseFullPath property.
*
* <p>Supports direct matches, e.g. a registered "/test" matches "/test",
* and a various Ant-style pattern matches, e.g. a registered "/t*" matches
* both "/test" and "/team". For details, see the PathMatcher class.
*
* @author Juergen Hoeller
* @since 16.04.2003
* @see #setAlwaysUseFullPath
* @see org.springframework.util.PathMatcher
*/
public abstract class AbstractUrlHandlerMapping extends AbstractHandlerMapping {
private boolean alwaysUseFullPath = false;
private Map handlerMap = new HashMap();
/**
* Set if URL lookup should always use full path within current servlet
* context. Else, the path within the current servlet mapping is used
* if applicable (i.e. in the case of a ".../*" servlet mapping in web.xml).
* Default is false.
*/
public final void setAlwaysUseFullPath(boolean alwaysUseFullPath) {
this.alwaysUseFullPath = alwaysUseFullPath;
}
/**
* Look up a handler for the URL path of the given request.
* @param request current HTTP request
* @return the looked up handler instance, or null
*/
protected Object getHandlerInternal(HttpServletRequest request) {
String lookupPath = WebUtils.getLookupPathForRequest(request, this.alwaysUseFullPath);
logger.debug("Looking up handler for: " + lookupPath);
return lookupHandler(lookupPath);
}
/**
* Look up a handler instance for the given URL path.
* <p>Supports direct matches, e.g. a registered "/test" matches "/test",
* and a various Ant-style pattern matches, e.g. a registered "/t*" matches
* both "/test" and "/team". For details, see the PathMatcher class.
* @param urlPath URL the bean is mapped to
* @return the associated handler instance, or null if not found
* @see org.springframework.util.PathMatcher
*/
protected final Object lookupHandler(String urlPath) {
// direct match?
Object handler = this.handlerMap.get(urlPath);
if (handler != null) {
return handler;
}
// pattern match?
for (Iterator it = this.handlerMap.keySet().iterator(); it.hasNext();) {
String registeredPath = (String) it.next();
if (PathMatcher.match(registeredPath, urlPath)) {
return this.handlerMap.get(registeredPath);
}
}
// no match found
return null;
}
/**
* Register the given handler instance for the given URL path.
* @param urlPath URL the bean is mapped to
* @param handler the handler instance
*/
protected final void registerHandler(String urlPath, Object handler) {
this.handlerMap.put(urlPath, handler);
logger.info("Mapped URL path [" + urlPath + "] onto handler [" + handler + "]");
}
/**
* Initialize the given handler object for the given URL.
* Sets the mapped URL if the handler is UrlHandlerAware.
* @param handler name of the bean in the application context
* @param urlPath URL the bean is mapped to
* @return the initialized handler instance
* @see UrlAwareHandler
*/
protected final Object initHandler(Object handler, String urlPath) {
logger.debug("Initializing handler [" + handler + "] for URL path [" + urlPath + "]");
if (handler instanceof UrlAwareHandler) {
((UrlAwareHandler) handler).setUrlMapping(urlPath);
}
return handler;
}
}
| polished log output
git-svn-id: b619a0c99665f88f1afe72824344cefe9a1c8c90@228 fd5a2b45-1f63-4059-99e9-3c7cb7fd75c8
| src/org/springframework/web/servlet/handler/AbstractUrlHandlerMapping.java | polished log output | <ide><path>rc/org/springframework/web/servlet/handler/AbstractUrlHandlerMapping.java
<ide> */
<ide> protected Object getHandlerInternal(HttpServletRequest request) {
<ide> String lookupPath = WebUtils.getLookupPathForRequest(request, this.alwaysUseFullPath);
<del> logger.debug("Looking up handler for: " + lookupPath);
<add> logger.debug("Looking up handler for [" + lookupPath + "]");
<ide> return lookupHandler(lookupPath);
<ide> }
<ide> |
|
Java | apache-2.0 | 285902493fdea62190aacd43dd79d67f7d376a96 | 0 | argv-minus-one/fop,spepping/fop-cs,StrategyObject/fop,StrategyObject/fop,Distrotech/fop,argv-minus-one/fop,StrategyObject/fop,StrategyObject/fop,argv-minus-one/fop,spepping/fop-cs,spepping/fop-cs,spepping/fop-cs,Distrotech/fop,argv-minus-one/fop,Distrotech/fop,argv-minus-one/fop,Distrotech/fop,StrategyObject/fop | /*
* Copyright 1999-2005 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/* $Id: LineLayoutManager.java,v 1.17 2004/04/02 10:38:29 cbowditch Exp $ */
package org.apache.fop.layoutmgr;
import org.apache.fop.datatypes.Length;
import org.apache.fop.fo.Constants;
import org.apache.fop.fo.flow.Block;
import org.apache.fop.fo.properties.CommonHyphenation;
import org.apache.fop.hyphenation.Hyphenation;
import org.apache.fop.hyphenation.Hyphenator;
import org.apache.fop.area.LineArea;
import java.util.ListIterator;
import java.util.List;
import java.util.ArrayList;
import java.util.LinkedList;
import org.apache.fop.traits.MinOptMax;
/**
* LayoutManager for lines. It builds one or more lines containing
* inline areas generated by its sub layout managers.
* A break is found for each line which may contain one of more
* breaks from the child layout managers.
* Once a break is found then it is return for the parent layout
* manager to handle.
* When the areas are being added to the page this manager
* creates a line area to contain the inline areas added by the
* child layout managers.
*/
public class LineLayoutManager extends InlineStackingLayoutManager
implements BlockLevelLayoutManager {
private Block fobj;
private void initialize() {
bTextAlignment = fobj.getTextAlign();
bTextAlignmentLast = fobj.getTextAlignLast();
textIndent = fobj.getTextIndent();
hyphProps = fobj.getCommonHyphenation();
//
if (bTextAlignment != EN_JUSTIFY && bTextAlignmentLast == EN_JUSTIFY) {
effectiveAlignment = 0;
} else {
effectiveAlignment = bTextAlignment;
}
}
/**
* Private class to store information about inline breaks.
* Each value holds the start and end indexes into a List of
* inline break positions.
*/
private static class LineBreakPosition extends LeafPosition {
// int iPos;
int iParIndex; // index of the Paragraph this Position refers to
int availableShrink;
int availableStretch;
int difference;
double dAdjust; // Percentage to adjust (stretch or shrink)
double ipdAdjust; // Percentage to adjust (stretch or shrink)
int startIndent;
int lineHeight;
int lineWidth;
int baseline;
int topShift;
int bottomShift;
LineBreakPosition(LayoutManager lm, int index, int iBreakIndex,
int shrink, int stretch, int diff,
double ipdA, double adjust, int ind,
int lh, int lw, int bl, int ts, int bs) {
super(lm, iBreakIndex);
availableShrink = shrink;
availableStretch = stretch;
difference = diff;
iParIndex = index;
ipdAdjust = ipdA;
dAdjust = adjust;
startIndent = ind;
lineHeight = lh;
lineWidth = lw;
baseline = bl;
topShift = ts;
bottomShift = bs;
}
}
/** Break positions returned by inline content. */
private List vecInlineBreaks = new java.util.ArrayList();
private int bTextAlignment = EN_JUSTIFY;
private int bTextAlignmentLast;
private int effectiveAlignment;
private Length textIndent;
private int iIndents = 0;
private CommonHyphenation hyphProps;
//private LayoutProps layoutProps;
private int lineHeight;
private int lead;
private int follow;
// offset of the middle baseline with respect to the main baseline
private int middleShift;
private List knuthParagraphs = null;
private int iReturnedLBP = 0;
private int iStartElement = 0;
private int iEndElement = 0;
// parameters of Knuth's algorithm:
// penalty value for flagged penalties
private int flaggedPenalty = 50;
private LineLayoutPossibilities lineLayouts;
private List lineLayoutsList;
private int iLineWidth = 0;
// this constant is used to create elements when text-align is center:
// every TextLM descendant of LineLM must use the same value,
// otherwise the line breaking algorithm does not find the right
// break point
public static final int DEFAULT_SPACE_WIDTH = 3336;
// this class is used to remember
// which was the first element in the paragraph
// returned by each LM
private class Update {
private InlineLevelLayoutManager inlineLM;
private int iFirstIndex;
public Update(InlineLevelLayoutManager lm, int index) {
inlineLM = lm;
iFirstIndex = index;
}
}
// this class represents a paragraph
private class Paragraph extends KnuthSequence {
// space at the end of the last line (in millipoints)
private MinOptMax lineFiller;
private int textAlignment;
private int textAlignmentLast;
private int textIndent;
private int lineWidth;
// the LM which created the paragraph
private LineLayoutManager layoutManager;
public Paragraph(LineLayoutManager llm, int alignment, int alignmentLast,
int indent) {
super();
layoutManager = llm;
textAlignment = alignment;
textAlignmentLast = alignmentLast;
textIndent = indent;
}
public void startParagraph(int lw) {
lineWidth = lw;
startSequence();
}
public void startSequence() {
// set the minimum amount of empty space at the end of the
// last line
if (bTextAlignment == EN_CENTER) {
lineFiller = new MinOptMax(0);
} else {
lineFiller = new MinOptMax(0, (int)(lineWidth / 12), lineWidth);
}
// add auxiliary elements at the beginning of the paragraph
if (bTextAlignment == EN_CENTER && bTextAlignmentLast != EN_JUSTIFY) {
this.add(new KnuthGlue(0, 3 * DEFAULT_SPACE_WIDTH, 0,
null, false));
ignoreAtStart ++;
}
// add the element representing text indentation
// at the beginning of the first paragraph
if (knuthParagraphs.size() == 0
&& fobj.getTextIndent().getValue() != 0) {
this.add(new KnuthInlineBox(fobj.getTextIndent().getValue(), 0, 0, 0,
null, false));
ignoreAtStart ++;
}
}
public void endParagraph() {
KnuthSequence finishedPar = this.endSequence();
if (finishedPar != null) {
knuthParagraphs.add(finishedPar);
}
}
public KnuthSequence endSequence() {
// remove glue and penalty item at the end of the paragraph
while (this.size() > ignoreAtStart
&& !((KnuthElement)this.get(this.size() - 1)).isBox()) {
this.remove(this.size() - 1);
}
if (this.size() > ignoreAtStart) {
if (bTextAlignment == EN_CENTER
&& bTextAlignmentLast != EN_JUSTIFY) {
this.add(new KnuthGlue(0, 3 * DEFAULT_SPACE_WIDTH, 0,
null, false));
this.add(new KnuthPenalty(0, -KnuthElement.INFINITE,
false, null, false));
ignoreAtEnd = 2;
} else if (bTextAlignmentLast != EN_JUSTIFY) {
// add the elements representing the space
// at the end of the last line
// and the forced break
this.add(new KnuthPenalty(0, KnuthElement.INFINITE,
false, null, false));
this.add(new KnuthGlue(lineFiller.opt,
lineFiller.max - lineFiller.opt,
lineFiller.opt - lineFiller.min, null, false));
this.add(new KnuthPenalty(0, -KnuthElement.INFINITE,
false, null, false));
ignoreAtEnd = 3;
} else {
// add only the element representing the forced break
this.add(new KnuthPenalty(0, -KnuthElement.INFINITE,
false, null, false));
ignoreAtEnd = 1;
}
return this;
} else {
this.clear();
return null;
}
}
}
private class LineBreakingAlgorithm extends BreakingAlgorithm {
private LineLayoutManager thisLLM;
private int pageAlignment;
private int activePossibility;
private int addedPositions;
private int textIndent;
private int fillerMinWidth;
private int lineHeight;
private int lead;
private int follow;
private int middleshift;
private int maxDiff;
private static final double MAX_DEMERITS = 10e6;
public LineBreakingAlgorithm (int pageAlign,
int textAlign, int textAlignLast,
int indent, int fillerWidth,
int lh, int ld, int fl, int ms, boolean first,
LineLayoutManager llm) {
super(textAlign, textAlignLast, first, false);
pageAlignment = pageAlign;
textIndent = indent;
fillerMinWidth = fillerWidth;
lineHeight = lh;
lead = ld;
follow = fl;
middleshift = ms;
thisLLM = llm;
activePossibility = -1;
maxDiff = fobj.getWidows() >= fobj.getOrphans()
? fobj.getWidows()
: fobj.getOrphans();
}
public void updateData1(int lineCount, double demerits) {
lineLayouts.addPossibility(lineCount, demerits);
log.trace("Layout possibility in " + lineCount + " lines; break at position:");
}
public void updateData2(KnuthNode bestActiveNode,
KnuthSequence par,
int total) {
// compute indent and adjustment ratio, according to
// the value of text-align and text-align-last
int indent = 0;
int difference = (bestActiveNode.line < total) ? bestActiveNode.difference : bestActiveNode.difference + fillerMinWidth;
int textAlign = (bestActiveNode.line < total) ? alignment : alignmentLast;
indent += (textAlign == Constants.EN_CENTER) ?
difference / 2 :
(textAlign == Constants.EN_END) ? difference : 0;
indent += (bestActiveNode.line == 1 && bFirst) ?
textIndent : 0;
double ratio = (textAlign == Constants.EN_JUSTIFY
|| bestActiveNode.adjustRatio < 0) ? bestActiveNode.adjustRatio : 0;
// add nodes at the beginning of the list, as they are found
// backwards, from the last one to the first one
// the first time this method is called, initialize activePossibility
if (activePossibility == -1) {
activePossibility = 0;
addedPositions = 0;
}
if (addedPositions == lineLayouts.getLineCount(activePossibility)) {
activePossibility ++;
addedPositions = 0;
//System.out.println(" ");
}
//System.out.println("LLM> (" + (lineLayouts.getLineNumber(activePossibility) - addedPositions) + ") difference = " + difference + " ratio = " + ratio);
lineLayouts.addBreakPosition(makeLineBreakPosition(par,
(bestActiveNode.line > 1 ? bestActiveNode.previous.position + 1: 0),
bestActiveNode.position,
bestActiveNode.availableShrink - (addedPositions > 0 ? 0 : ((Paragraph)par).lineFiller.opt - ((Paragraph)par).lineFiller.min), bestActiveNode.availableStretch, difference, ratio, indent),
activePossibility);
addedPositions ++;
}
/* reset activePossibility, as if breakpoints have not yet been computed
*/
public void resetAlgorithm() {
activePossibility = -1;
}
private LineBreakPosition makeLineBreakPosition(KnuthSequence par,
int firstElementIndex,
int lastElementIndex,
int availableShrink, int availableStretch, int difference,
double ratio,
int indent) {
// line height calculation
int halfLeading = (lineHeight - lead - follow) / 2;
// height before the main baseline
int lineLead = lead;
// maximum size of top and bottom alignment
int maxtb = follow;
// max size of middle alignment before and after the middle baseline
int middlefollow = maxtb;
// true if this line contains only zero-height, auxiliary boxes
// and the actual line width is 0; in this case, the line "collapses"
// i.e. the line area will have bpd = 0
boolean bZeroHeightLine = (difference == iLineWidth);
// if line-stacking-strategy is "font-height", the line height
// is not affected by its content
if (fobj.getLineStackingStrategy() != EN_FONT_HEIGHT) {
ListIterator inlineIterator
= par.listIterator(firstElementIndex);
for (int j = firstElementIndex;
j <= lastElementIndex;
j++) {
KnuthElement element = (KnuthElement) inlineIterator.next();
if (element.isBox()) {
if (((KnuthInlineBox) element).getLead() > lineLead) {
lineLead = ((KnuthInlineBox) element).getLead();
}
if (((KnuthInlineBox) element).getTotal() > maxtb) {
maxtb = ((KnuthInlineBox) element).getTotal();
}
if (((KnuthInlineBox) element).getMiddle() > lineLead + middleShift) {
lineLead += ((KnuthInlineBox) element).getMiddle()
- lineLead - middleShift;
}
if (((KnuthInlineBox) element).getMiddle() > middlefollow - middleShift) {
middlefollow += ((KnuthInlineBox) element).getMiddle()
- middlefollow + middleShift;
}
if (bZeroHeightLine
&& (!element.isAuxiliary()
|| ((KnuthInlineBox) element).getTotal() > 0
|| ((KnuthInlineBox) element).getLead() > 0
|| ((KnuthInlineBox) element).getMiddle() > 0)) {
bZeroHeightLine = false;
}
}
}
if (maxtb - lineLead > middlefollow) {
middlefollow = maxtb - lineLead;
}
}
constantLineHeight = lineLead + middlefollow + (lineHeight - lead - follow);
if (bZeroHeightLine) {
return new LineBreakPosition(thisLLM,
knuthParagraphs.indexOf(par),
lastElementIndex,
availableShrink, availableStretch, difference, ratio, 0, indent,
0, iLineWidth,
0, 0, 0);
} else {
return new LineBreakPosition(thisLLM,
knuthParagraphs.indexOf(par),
lastElementIndex,
availableShrink, availableStretch, difference, ratio, 0, indent,
lineLead + middlefollow + (lineHeight - lead - follow), iLineWidth,
lineLead + halfLeading,
- lineLead, middlefollow);
}
}
public int findBreakingPoints(Paragraph par, /*int lineWidth,*/
double threshold, boolean force,
boolean hyphenationAllowed) {
return super.findBreakingPoints(par, /*lineWidth,*/
threshold, force, hyphenationAllowed);
}
protected int filterActiveNodes() {
KnuthNode bestActiveNode = null;
if (pageAlignment == EN_JUSTIFY) {
// leave all active nodes and find the optimum line number
//System.out.println("LBA.filterActiveNodes> " + activeNodeCount + " layouts");
for (int i = startLine; i < endLine; i++) {
for (KnuthNode node = getNode(i); node != null; node = node.next) {
//System.out.println(" + lines = " + node.line + " demerits = " + node.totalDemerits);
bestActiveNode = compareNodes(bestActiveNode, node);
}
}
// scan the node set once again and remove some nodes
//System.out.println("LBA.filterActiveList> layout selection");
for (int i = startLine; i < endLine; i++) {
for (KnuthNode node = getNode(i); node != null; node = node.next) {
//if (Math.abs(node.line - bestActiveNode.line) > maxDiff) {
//if (false) {
if (node.line != bestActiveNode.line
&& node.totalDemerits > MAX_DEMERITS) {
//System.out.println(" XXX lines = " + node.line + " demerits = " + node.totalDemerits);
removeNode(i, node);
} else {
//System.out.println(" ok lines = " + node.line + " demerits = " + node.totalDemerits);
}
}
}
} else {
// leave only the active node with fewest total demerits
for (int i = startLine; i < endLine; i++) {
for (KnuthNode node = getNode(i); node != null; node = node.next) {
bestActiveNode = compareNodes(bestActiveNode, node);
if (node != bestActiveNode) {
removeNode(i, node);
}
}
}
}
return bestActiveNode.line;
}
}
private int constantLineHeight = 12000;
/**
* Create a new Line Layout Manager.
* This is used by the block layout manager to create
* line managers for handling inline areas flowing into line areas.
*
* @param lh the default line height
* @param l the default lead, from top to baseline
* @param f the default follow, from baseline to bottom
*/
public LineLayoutManager(Block block, int lh, int l, int f, int ms) {
super(block);
fobj = block;
// the child FObj are owned by the parent BlockLM
// this LM has all its childLMs preloaded
fobjIter = null;
lineHeight = lh;
lead = l;
follow = f;
middleShift = ms;
initialize(); // Normally done when started by parent!
}
public LinkedList getNextKnuthElements(LayoutContext context, int alignment) {
// Get a break from currently active child LM
// Set up constraints for inline level managers
InlineLevelLayoutManager curLM ; // currently active LM
// IPD remaining in line
MinOptMax availIPD = context.getStackLimit();
clearPrevIPD();
int iPrevLineEnd = vecInlineBreaks.size();
if (iPrevLineEnd == 0 && bTextAlignment == EN_START) {
availIPD.subtract(new MinOptMax(textIndent.getValue()));
}
//PHASE 1: Create Knuth elements
if (knuthParagraphs == null) {
// it's the first time this method is called
knuthParagraphs = new ArrayList();
// here starts Knuth's algorithm
//TODO availIPD should not really be used here, so we can later support custom line
//widths for for each line (side-floats, differing available IPD after page break)
collectInlineKnuthElements(context, availIPD);
} else {
// this method has been called before
// all line breaks are already calculated
}
// return finished when there's no content
if (knuthParagraphs.size() == 0) {
setFinished(true);
return null;
}
//PHASE 2: Create line breaks
return findOptimalLineBreakingPoints(alignment);
/*
LineBreakPosition lbp = null;
if (breakpoints == null) {
// find the optimal line breaking points for each paragraph
breakpoints = new ArrayList();
ListIterator paragraphsIterator
= knuthParagraphs.listIterator(knuthParagraphs.size());
Paragraph currPar = null;
while (paragraphsIterator.hasPrevious()) {
currPar = (Paragraph) paragraphsIterator.previous();
findBreakingPoints(currPar, context.getStackLimit().opt);
}
}*/
//PHASE 3: Return lines
/*
// get a break point from the list
lbp = (LineBreakPosition) breakpoints.get(iReturnedLBP ++);
if (iReturnedLBP == breakpoints.size()) {
setFinished(true);
}
BreakPoss curLineBP = new BreakPoss(lbp);
curLineBP.setFlag(BreakPoss.ISLAST, isFinished());
curLineBP.setStackingSize(new MinOptMax(lbp.lineHeight));
return curLineBP;
*/
}
/**
* Phase 1 of Knuth algorithm: Collect all inline Knuth elements before determining line breaks.
* @param context the LayoutContext
* @param availIPD available IPD for line (should be removed!)
*/
private void collectInlineKnuthElements(LayoutContext context, MinOptMax availIPD) {
LayoutContext inlineLC = new LayoutContext(context);
InlineLevelLayoutManager curLM;
KnuthElement thisElement = null;
LinkedList returnedList = null;
iLineWidth = context.getStackLimit().opt;
// convert all the text in a sequence of paragraphs made
// of KnuthBox, KnuthGlue and KnuthPenalty objects
boolean bPrevWasKnuthBox = false;
KnuthBox prevBox = null;
Paragraph knuthPar = new Paragraph(this,
bTextAlignment, bTextAlignmentLast,
textIndent.getValue());
knuthPar.startParagraph(availIPD.opt);
while ((curLM = (InlineLevelLayoutManager) getChildLM()) != null) {
if ((returnedList
= curLM.getNextKnuthElements(inlineLC,
effectiveAlignment))
!= null) {
if (returnedList.size() == 0) {
continue;
}
// look at the first element
thisElement = (KnuthElement) returnedList.getFirst();
if (thisElement.isBox() && !thisElement.isAuxiliary()
&& bPrevWasKnuthBox) {
prevBox = (KnuthBox) knuthPar.removeLast();
LinkedList oldList = new LinkedList();
// if there are two consecutive KnuthBoxes the
// first one does not represent a whole word,
// so it must be given one more letter space
if (!prevBox.isAuxiliary()) {
// if letter spacing is constant,
// only prevBox needs to be replaced;
oldList.add(prevBox);
} else {
// prevBox is the last element
// in the sub-sequence
// <box> <aux penalty> <aux glue> <aux box>
// the letter space is added to <aux glue>,
// while the other elements are not changed
oldList.add(prevBox);
oldList.addFirst((KnuthGlue) knuthPar.removeLast());
oldList.addFirst((KnuthPenalty) knuthPar.removeLast());
}
// adding a letter space could involve, according to the text
// represented by oldList, replacing a glue element or adding
// new elements
knuthPar.addAll(((InlineLevelLayoutManager)
prevBox.getLayoutManager())
.addALetterSpaceTo(oldList));
if (((KnuthInlineBox) prevBox).isAnchor()) {
// prevBox represents a footnote citation: copy footnote info
// from prevBox to the new box
KnuthInlineBox newBox = (KnuthInlineBox) knuthPar.getLast();
newBox.setFootnoteBodyLM(((KnuthInlineBox) prevBox).getFootnoteBodyLM());
}
}
// look at the last element
KnuthElement lastElement = (KnuthElement) returnedList.getLast();
boolean bForceLinefeed = false;
if (lastElement.isBox()) {
bPrevWasKnuthBox = true;
} else {
bPrevWasKnuthBox = false;
if (lastElement.isPenalty()
&& ((KnuthPenalty) lastElement).getP()
== -KnuthPenalty.INFINITE) {
// a penalty item whose value is -inf
// represents a preserved linefeed,
// wich forces a line break
bForceLinefeed = true;
returnedList.removeLast();
}
}
// add the new elements to the paragraph
knuthPar.addAll(returnedList);
if (bForceLinefeed) {
if (knuthPar.size() == 0) {
//only a forced linefeed on this line
//-> compensate with a zero width box
knuthPar.add(new KnuthInlineBox(0, 0, 0, 0,
null, false));
}
knuthPar.endParagraph();
knuthPar = new Paragraph(this,
bTextAlignment, bTextAlignmentLast,
textIndent.getValue());
knuthPar.startParagraph(availIPD.opt);
bPrevWasKnuthBox = false;
}
} else {
// curLM returned null; this can happen
// if it has nothing more to layout,
// so just iterate once more to see
// if there are other children
}
}
knuthPar.endParagraph();
ElementListObserver.observe(knuthPar, "line", null);
}
/**
* Find a set of breaking points.
* This method is called only once by getNextBreakPoss, and it
* subsequently calls the other findBreakingPoints() method with
* different parameters, until a set of breaking points is found.
*
* @param par the list of elements that must be parted
* into lines
* @param lineWidth the desired length ot the lines
*/
/*
private void findBreakingPoints(Paragraph par, int lineWidth) {
// maximum adjustment ratio permitted
float maxAdjustment = 1;
// first try
if (!findBreakingPoints(par, lineWidth, maxAdjustment, false)) {
// the first try failed, now try something different
log.debug("No set of breaking points found with maxAdjustment = " + maxAdjustment);
if (hyphProps.hyphenate == Constants.EN_TRUE) {
// consider every hyphenation point as a legal break
findHyphenationPoints(par);
} else {
// try with a higher threshold
maxAdjustment = 5;
}
if (!findBreakingPoints(par, lineWidth, maxAdjustment, false)) {
// the second try failed too, try with a huge threshold;
// if this fails too, use a different algorithm
log.debug("No set of breaking points found with maxAdjustment = " + maxAdjustment
+ (hyphProps.hyphenate == Constants.EN_TRUE ? " and hyphenation" : ""));
maxAdjustment = 20;
if (!findBreakingPoints(par, lineWidth, maxAdjustment, true)) {
log.debug("No set of breaking points found, using first-fit algorithm");
}
}
}
}
private boolean findBreakingPoints(Paragraph par, int lineWidth,
double threshold, boolean force) {
KnuthParagraph knuthPara = new KnuthParagraph(par);
int lines = knuthPara.findBreakPoints(lineWidth, threshold, force);
if (lines == 0) {
return false;
}
for (int i = lines-1; i >= 0; i--) {
int line = i+1;
if (log.isTraceEnabled()) {
log.trace("Making line from " + knuthPara.getStart(i) + " to " +
knuthPara.getEnd(i));
}
// compute indent and adjustment ratio, according to
// the value of text-align and text-align-last
int difference = knuthPara.getDifference(i);
if (line == lines) {
difference += par.lineFillerWidth;
}
int textAlign = (line < lines)
? bTextAlignment : bTextAlignmentLast;
int indent = (textAlign == EN_CENTER)
? difference / 2
: (textAlign == EN_END) ? difference : 0;
indent += (line == 1 && knuthParagraphs.indexOf(par) == 0)
? textIndent.getValue() : 0;
double ratio = (textAlign == EN_JUSTIFY)
? knuthPara.getAdjustRatio(i) : 0;
int start = knuthPara.getStart(i);
int end = knuthPara.getEnd(i);
makeLineBreakPosition(par, start, end, 0, ratio, indent);
}
return true;
}
private void makeLineBreakPosition(Paragraph par,
int firstElementIndex, int lastElementIndex,
int insertIndex, double ratio, int indent) {
// line height calculation
int halfLeading = (lineHeight - lead - follow) / 2;
// height above the main baseline
int lineLead = lead + halfLeading;
// maximum size of top and bottom alignment
int maxtb = follow + halfLeading;
// max size of middle alignment above and below the middle baseline
int middlefollow = maxtb;
ListIterator inlineIterator
= par.listIterator(firstElementIndex);
for (int j = firstElementIndex;
j <= lastElementIndex;
j++) {
KnuthElement element = (KnuthElement) inlineIterator.next();
if (element.isBox()) {
KnuthInlineBox box = (KnuthInlineBox)element;
if (box.getLead() > lineLead) {
lineLead = box.getLead();
}
if (box.getTotal() > maxtb) {
maxtb = box.getTotal();
}
if (box.getMiddle() > lineLead + middleShift) {
lineLead += box.getMiddle()
- lineLead - middleShift;
}
if (box.getMiddle() > middlefollow - middleShift) {
middlefollow += box.getMiddle()
- middlefollow + middleShift;
}
}
}
if (maxtb - lineLead > middlefollow) {
middlefollow = maxtb - lineLead;
}
breakpoints.add(insertIndex,
new LineBreakPosition(this,
knuthParagraphs.indexOf(par),
lastElementIndex ,
ratio, 0, indent,
lineLead + middlefollow,
lineLead));
}*/
/**
* Phase 2 of Knuth algorithm: find optimal break points.
* @param alignment alignmenr of the paragraph
* @return a list of Knuth elements representing broken lines
*/
private LinkedList findOptimalLineBreakingPoints(int alignment) {
// find the optimal line breaking points for each paragraph
ListIterator paragraphsIterator
= knuthParagraphs.listIterator(knuthParagraphs.size());
Paragraph currPar = null;
LineBreakingAlgorithm alg;
lineLayoutsList = new ArrayList(knuthParagraphs.size());
while (paragraphsIterator.hasPrevious()) {
lineLayouts = new LineLayoutPossibilities();
currPar = (Paragraph) paragraphsIterator.previous();
double maxAdjustment = 1;
int iBPcount = 0;
alg = new LineBreakingAlgorithm(alignment,
bTextAlignment, bTextAlignmentLast,
textIndent.getValue(), currPar.lineFiller.opt,
lineHeight, lead, follow, middleShift,
(knuthParagraphs.indexOf(currPar) == 0),
this);
if (hyphProps.hyphenate == EN_TRUE) {
findHyphenationPoints(currPar);
}
// first try
boolean bHyphenationAllowed = false;
alg.setConstantLineWidth(iLineWidth);
iBPcount = alg.findBreakingPoints(currPar,
maxAdjustment, false, bHyphenationAllowed);
if (iBPcount == 0 || alignment == EN_JUSTIFY) {
// if the first try found a set of breaking points, save them
if (iBPcount > 0) {
alg.resetAlgorithm();
lineLayouts.savePossibilities(false);
} else {
// the first try failed
log.debug("No set of breaking points found with maxAdjustment = " + maxAdjustment);
}
// now try something different
log.debug("Hyphenation possible? " + (hyphProps.hyphenate == EN_TRUE));
if (hyphProps.hyphenate == EN_TRUE) {
// consider every hyphenation point as a legal break
bHyphenationAllowed = true;
} else {
// try with a higher threshold
maxAdjustment = 5;
}
if ((iBPcount
= alg.findBreakingPoints(currPar,
maxAdjustment, false, bHyphenationAllowed)) == 0) {
// the second try failed too, try with a huge threshold
// and force the algorithm to find
// a set of breaking points
log.debug("No set of breaking points found with maxAdjustment = " + maxAdjustment
+ (hyphProps.hyphenate == EN_TRUE ? " and hyphenation" : ""));
maxAdjustment = 20;
iBPcount
= alg.findBreakingPoints(currPar,
maxAdjustment, true, bHyphenationAllowed);
}
// use non-hyphenated breaks, when possible
lineLayouts.restorePossibilities();
/* extension (not in the XSL FO recommendation): if vertical alignment
is justify and the paragraph has only one layout, try using
shorter or longer lines */
//TODO This code snippet is disabled. Reenable?
if (false && alignment == EN_JUSTIFY && bTextAlignment == EN_JUSTIFY) {
//System.out.println("LLM.getNextKnuthElements> layouts with more lines? " + lineLayouts.canUseMoreLines());
//System.out.println(" layouts with fewer lines? " + lineLayouts.canUseLessLines());
if (!lineLayouts.canUseMoreLines()) {
alg.resetAlgorithm();
lineLayouts.savePossibilities(true);
// try with shorter lines
int savedLineWidth = iLineWidth;
iLineWidth = (int) (iLineWidth * 0.95);
iBPcount = alg.findBreakingPoints(currPar,
maxAdjustment, true, bHyphenationAllowed);
// use normal lines, when possible
lineLayouts.restorePossibilities();
iLineWidth = savedLineWidth;
}
if (!lineLayouts.canUseLessLines()) {
alg.resetAlgorithm();
lineLayouts.savePossibilities(true);
// try with longer lines
int savedLineWidth = iLineWidth;
iLineWidth = (int) (iLineWidth * 1.05);
alg.setConstantLineWidth(iLineWidth);
iBPcount = alg.findBreakingPoints(currPar,
maxAdjustment, true, bHyphenationAllowed);
// use normal lines, when possible
lineLayouts.restorePossibilities();
iLineWidth = savedLineWidth;
}
//System.out.println("LLM.getNextKnuthElements> now, layouts with more lines? " + lineLayouts.canUseMoreLines());
//System.out.println(" now, layouts with fewer lines? " + lineLayouts.canUseLessLines());
}
}
lineLayoutsList.add(0, lineLayouts);
}
setFinished(true);
//Post-process the line breaks found
return postProcessLineBreaks(alignment);
}
private LinkedList postProcessLineBreaks(int alignment) {
LinkedList returnList = new LinkedList();
for (int p = 0; p < knuthParagraphs.size(); p ++) {
// null penalty between paragraphs
if (p > 0
&& !((BlockLevelLayoutManager) parentLM).mustKeepTogether()) {
returnList.add(new KnuthPenalty(0, 0, false, new Position(this), false));
}
lineLayouts = (LineLayoutPossibilities)lineLayoutsList.get(p);
if (alignment == EN_JUSTIFY) {
/* justified vertical alignment (not in the XSL FO recommendation):
create a multi-layout sequence whose elements will contain
a conventional Position */
Position returnPosition = new LeafPosition(this, p);
createElements(returnList, lineLayouts, returnPosition);
} else {
/* "normal" vertical alignment: create a sequence whose boxes
represent effective lines, and contain LineBreakPositions */
Position returnPosition = new LeafPosition(this, p);
int startIndex = 0;
for (int i = 0;
i < lineLayouts.getChosenLineCount();
i++) {
if (!((BlockLevelLayoutManager) parentLM).mustKeepTogether()
&& i >= fobj.getOrphans()
&& i <= lineLayouts.getChosenLineCount() - fobj.getWidows()
&& returnList.size() > 0) {
// null penalty allowing a page break between lines
returnList.add(new KnuthPenalty(0, 0, false, returnPosition, false));
}
int endIndex = ((LineBreakPosition) lineLayouts.getChosenPosition(i)).getLeafPos();
// create a list of the FootnoteBodyLM handling footnotes
// whose citations are in this line
LinkedList footnoteList = new LinkedList();
ListIterator elementIterator = ((Paragraph) knuthParagraphs.get(p)).listIterator(startIndex);
while (elementIterator.nextIndex() <= endIndex) {
KnuthElement element = (KnuthElement) elementIterator.next();
if (element instanceof KnuthInlineBox
&& ((KnuthInlineBox) element).isAnchor()) {
footnoteList.add(((KnuthInlineBox) element).getFootnoteBodyLM());
}
}
startIndex = endIndex + 1;
returnList.add(new KnuthBlockBox(((LineBreakPosition) lineLayouts.getChosenPosition(i)).lineHeight,
footnoteList, lineLayouts.getChosenPosition(i), false));
}
}
}
return returnList;
}
private void createElements(List list, LineLayoutPossibilities lineLayouts,
Position elementPosition) {
/* number of normal, inner lines */
int nInnerLines = 0;
/* number of lines that can be used in order to fill more space */
int nOptionalLines = 0;
/* number of lines that can be used in order to fill more space
only if the paragraph is not parted */
int nConditionalOptionalLines = 0;
/* number of lines that can be omitted in order to fill less space */
int nEliminableLines = 0;
/* number of lines that can be omitted in order to fill less space
only if the paragraph is not parted */
int nConditionalEliminableLines = 0;
/* number of the first unbreakable lines */
int nFirstLines = fobj.getOrphans();
/* number of the last unbreakable lines */
int nLastLines = fobj.getWidows();
/* sub-sequence used to separate the elements representing different lines */
List breaker = new LinkedList();
/* comment out the next lines in order to test particular situations */
if (fobj.getOrphans() + fobj.getWidows() <= lineLayouts.getMinLineCount()) {
nInnerLines = lineLayouts.getMinLineCount() - (fobj.getOrphans() + fobj.getWidows());
nOptionalLines = lineLayouts.getMaxLineCount() - lineLayouts.getOptLineCount();
nEliminableLines = lineLayouts.getOptLineCount() - lineLayouts.getMinLineCount();
} else if (fobj.getOrphans() + fobj.getWidows() <= lineLayouts.getOptLineCount()) {
nOptionalLines = lineLayouts.getMaxLineCount() - lineLayouts.getOptLineCount();
nEliminableLines = lineLayouts.getOptLineCount() - (fobj.getOrphans() + fobj.getWidows());
nConditionalEliminableLines = (fobj.getOrphans() + fobj.getWidows()) - lineLayouts.getMinLineCount();
} else if (fobj.getOrphans() + fobj.getWidows() <= lineLayouts.getMaxLineCount()) {
nOptionalLines = lineLayouts.getMaxLineCount() - (fobj.getOrphans() + fobj.getWidows());
nConditionalOptionalLines = (fobj.getOrphans() + fobj.getWidows()) - lineLayouts.getOptLineCount();
nConditionalEliminableLines = lineLayouts.getOptLineCount() - lineLayouts.getMinLineCount();
nFirstLines -= nConditionalOptionalLines;
} else {
nConditionalOptionalLines = lineLayouts.getMaxLineCount() - lineLayouts.getOptLineCount();
nConditionalEliminableLines = lineLayouts.getOptLineCount() - lineLayouts.getMinLineCount();
nFirstLines = lineLayouts.getOptLineCount();
nLastLines = 0;
}
/* comment out the previous lines in order to test particular situations */
/* use these lines to test particular situations
nInnerLines = 0;
nOptionalLines = 1;
nConditionalOptionalLines = 2;
nEliminableLines = 0;
nConditionalEliminableLines = 0;
nFirstLines = 1;
nLastLines = 3;
*/
if (nLastLines != 0
&& (nConditionalOptionalLines > 0 || nConditionalEliminableLines > 0)) {
breaker.add(new KnuthPenalty(0, KnuthElement.INFINITE, false, elementPosition, false));
breaker.add(new KnuthGlue(0, -nConditionalOptionalLines * constantLineHeight,
-nConditionalEliminableLines * constantLineHeight,
LINE_NUMBER_ADJUSTMENT, elementPosition, false));
breaker.add(new KnuthPenalty(nConditionalOptionalLines * constantLineHeight,
0, false, elementPosition, false));
breaker.add(new KnuthGlue(0, nConditionalOptionalLines * constantLineHeight,
nConditionalEliminableLines * constantLineHeight,
LINE_NUMBER_ADJUSTMENT, elementPosition, false));
} else if (nLastLines != 0) {
breaker.add(new KnuthPenalty(0, 0, false, elementPosition, false));
}
//System.out.println("first=" + nFirstLines + " inner=" + nInnerLines
// + " optional=" + nOptionalLines + " eliminable=" + nEliminableLines
// + " last=" + nLastLines
// + " (condOpt=" + nConditionalOptionalLines + " condEl=" + nConditionalEliminableLines + ")");
// creation of the elements:
// first group of lines
list.add(new KnuthBox(nFirstLines * constantLineHeight, elementPosition,
(nLastLines == 0
&& nConditionalOptionalLines == 0
&& nConditionalEliminableLines == 0 ? true : false)));
if (nConditionalOptionalLines > 0
|| nConditionalEliminableLines > 0) {
list.add(new KnuthPenalty(0, KnuthElement.INFINITE, false, elementPosition, false));
list.add(new KnuthGlue(0, nConditionalOptionalLines * constantLineHeight,
nConditionalEliminableLines * constantLineHeight,
LINE_NUMBER_ADJUSTMENT, elementPosition, false));
list.add(new KnuthBox(0, elementPosition,
(nLastLines == 0 ? true : false)));
}
// optional lines
for (int i = 0; i < nOptionalLines; i++) {
list.addAll(breaker);
list.add(new KnuthBox(0, elementPosition, false));
list.add(new KnuthPenalty(0, KnuthElement.INFINITE, false, elementPosition, false));
list.add(new KnuthGlue(0, 1 * constantLineHeight, 0,
LINE_NUMBER_ADJUSTMENT, elementPosition, false));
list.add(new KnuthBox(0, elementPosition, false));
}
// eliminable lines
for (int i = 0; i < nEliminableLines; i++) {
list.addAll(breaker);
list.add(new KnuthBox(1 * constantLineHeight, elementPosition, false));
list.add(new KnuthPenalty(0, KnuthElement.INFINITE, false, elementPosition, false));
list.add(new KnuthGlue(0, 0, 1 * constantLineHeight,
LINE_NUMBER_ADJUSTMENT, elementPosition, false));
list.add(new KnuthBox(0, elementPosition, false));
}
// inner lines
for (int i = 0; i < nInnerLines; i++) {
list.addAll(breaker);
list.add(new KnuthBox(1 * constantLineHeight, elementPosition, false));
}
// last group of lines
if (nLastLines > 0) {
list.addAll(breaker);
list.add(new KnuthBox(nLastLines * constantLineHeight,
elementPosition, true));
}
}
public boolean mustKeepTogether() {
return false;
}
public boolean mustKeepWithPrevious() {
return false;
}
public boolean mustKeepWithNext() {
return false;
}
public int negotiateBPDAdjustment(int adj, KnuthElement lastElement) {
LeafPosition pos = (LeafPosition)lastElement.getPosition();
int totalAdj = adj;
//if (lastElement.isPenalty()) {
// totalAdj += lastElement.getW();
//}
//int lineNumberDifference = (int)((double) totalAdj / constantLineHeight);
int lineNumberDifference = (int) Math.round((double) totalAdj / constantLineHeight + (adj > 0 ? - 0.4 : 0.4));
//System.out.println(" LLM> variazione calcolata = " + ((double) totalAdj / constantLineHeight) + " variazione applicata = " + lineNumberDifference);
lineLayouts = (LineLayoutPossibilities)lineLayoutsList.get(pos.getLeafPos());
lineNumberDifference = lineLayouts.applyLineCountAdjustment(lineNumberDifference);
return lineNumberDifference * constantLineHeight;
}
public void discardSpace(KnuthGlue spaceGlue) {
}
public LinkedList getChangedKnuthElements(List oldList, int alignment) {
LinkedList returnList = new LinkedList();
for (int p = 0;
p < knuthParagraphs.size();
p ++) {
lineLayouts = (LineLayoutPossibilities)lineLayoutsList.get(p);
//System.out.println("demerits of the chosen layout: " + lineLayouts.getChosenDemerits());
for (int i = 0;
i < lineLayouts.getChosenLineCount();
i ++) {
if (!((BlockLevelLayoutManager) parentLM).mustKeepTogether()
&& i >= fobj.getOrphans()
&& i <= lineLayouts.getChosenLineCount() - fobj.getWidows()) {
// null penalty allowing a page break between lines
returnList.add(new KnuthPenalty(0, 0, false, new Position(this), false));
}
LineBreakPosition lbp = (LineBreakPosition) lineLayouts.getChosenPosition(i);
//System.out.println("LLM.getChangedKnuthElements> lineWidth= " + lbp.lineWidth + " difference= " + lbp.difference);
//System.out.println(" shrink= " + lbp.availableShrink + " stretch= " + lbp.availableStretch);
//System.out.println("linewidth= " + lbp.lineWidth + " difference= " + lbp.difference + " indent= " + lbp.startIndent);
MinOptMax contentIPD;
if (alignment == EN_JUSTIFY) {
contentIPD = new MinOptMax(
lbp.lineWidth - lbp.difference - lbp.availableShrink,
lbp.lineWidth - lbp.difference,
lbp.lineWidth - lbp.difference + lbp.availableStretch);
} else if (alignment == EN_CENTER) {
contentIPD = new MinOptMax(lbp.lineWidth - 2 * lbp.startIndent);
} else if (alignment == EN_END) {
contentIPD = new MinOptMax(lbp.lineWidth - lbp.startIndent);
} else {
contentIPD = new MinOptMax(lbp.lineWidth - lbp.difference + lbp.startIndent);
}
returnList.add(new KnuthBlockBox(lbp.lineHeight,
contentIPD,
(lbp.ipdAdjust != 0 ? lbp.lineWidth - lbp.difference : 0),
lbp, false));
}
}
return returnList;
}
/**
* find hyphenation points for every word int the current paragraph
* @ param currPar the paragraph whose words will be hyphenated
*/
private void findHyphenationPoints(Paragraph currPar){
// hyphenate every word
ListIterator currParIterator
= currPar.listIterator(currPar.ignoreAtStart);
// list of TLM involved in hyphenation
LinkedList updateList = new LinkedList();
KnuthElement firstElement = null;
KnuthElement nextElement = null;
// current InlineLevelLayoutManager
InlineLevelLayoutManager currLM = null;
// number of KnuthBox elements containing word fragments
int boxCount;
// number of auxiliary KnuthElements between KnuthBoxes
int auxCount;
StringBuffer sbChars = null;
// find all hyphenation points
while (currParIterator.hasNext()) {
firstElement = (KnuthElement) currParIterator.next();
//
if (firstElement.getLayoutManager() != currLM) {
currLM = (InlineLevelLayoutManager) firstElement.getLayoutManager();
if (currLM != null) {
updateList.add(new Update(currLM, currParIterator.previousIndex()));
} else {
break;
}
}
// collect word fragments, ignoring auxiliary elements;
// each word fragment was created by a different TextLM
if (firstElement.isBox() && !firstElement.isAuxiliary()) {
boxCount = 1;
auxCount = 0;
sbChars = new StringBuffer();
currLM.getWordChars(sbChars, firstElement.getPosition());
// look if next elements are boxes too
while (currParIterator.hasNext()) {
nextElement = (KnuthElement) currParIterator.next();
if (nextElement.isBox() && !nextElement.isAuxiliary()) {
// a non-auxiliary KnuthBox: append word chars
if (currLM != nextElement.getLayoutManager()) {
currLM = (InlineLevelLayoutManager) nextElement.getLayoutManager();
updateList.add(new Update(currLM, currParIterator.previousIndex()));
}
// append text to recreate the whole word
boxCount ++;
currLM.getWordChars(sbChars, nextElement.getPosition());
} else if (!nextElement.isAuxiliary()) {
// a non-auxiliary non-box KnuthElement: stop
// go back to the last box or auxiliary element
currParIterator.previous();
break;
} else {
// an auxiliary KnuthElement: simply ignore it
auxCount ++;
}
}
log.trace(" Word to hyphenate: " + sbChars.toString());
// find hyphenation points
HyphContext hc = getHyphenContext(sbChars);
// ask each LM to hyphenate its word fragment
if (hc != null) {
KnuthElement element = null;
for (int i = 0; i < (boxCount + auxCount); i++) {
currParIterator.previous();
}
for (int i = 0; i < (boxCount + auxCount); i++) {
element = (KnuthElement) currParIterator.next();
if (element.isBox() && !element.isAuxiliary()) {
((InlineLevelLayoutManager)
element.getLayoutManager()).hyphenate(element.getPosition(), hc);
} else {
// nothing to do, element is an auxiliary KnuthElement
}
}
}
}
}
// create iterator for the updateList
ListIterator updateListIterator = updateList.listIterator();
Update currUpdate = null;
//int iPreservedElements = 0;
int iAddedElements = 0;
//int iRemovedElements = 0;
while (updateListIterator.hasNext()) {
// ask the LMs to apply the changes and return
// the new KnuthElements to replace the old ones
currUpdate = (Update) updateListIterator.next();
int fromIndex = currUpdate.iFirstIndex;
int toIndex;
if (updateListIterator.hasNext()) {
Update nextUpdate = (Update) updateListIterator.next();
toIndex = nextUpdate.iFirstIndex;
updateListIterator.previous();
} else {
// maybe this is not always correct!
toIndex = currPar.size() - currPar.ignoreAtEnd
- iAddedElements;
}
// applyChanges() returns true if the LM modifies its data,
// so it must return new KnuthElements to replace the old ones
if (((InlineLevelLayoutManager) currUpdate.inlineLM)
.applyChanges(currPar.subList(fromIndex + iAddedElements,
toIndex + iAddedElements))) {
// insert the new KnuthElements
LinkedList newElements = null;
newElements
= currUpdate.inlineLM.getChangedKnuthElements
(currPar.subList(fromIndex + iAddedElements,
toIndex + iAddedElements),
/*flaggedPenalty,*/ effectiveAlignment);
// remove the old elements
currPar.subList(fromIndex + iAddedElements,
toIndex + iAddedElements).clear();
// insert the new elements
currPar.addAll(fromIndex + iAddedElements, newElements);
iAddedElements += newElements.size() - (toIndex - fromIndex);
}
}
updateListIterator = null;
updateList.clear();
}
/** Line area is always considered to act as a fence. */
protected boolean hasLeadingFence(boolean bNotFirst) {
return true;
}
/** Line area is always considered to act as a fence. */
protected boolean hasTrailingFence(boolean bNotLast) {
return true;
}
private HyphContext getHyphenContext(StringBuffer sbChars) {
// Find all hyphenation points in this word
// (get in an array of offsets)
// hyphProps are from the block level?.
// Note that according to the spec,
// they also "apply to" fo:character.
// I don't know what that means, since
// if we change language in the middle of a "word",
// the effect would seem quite strange!
// Or perhaps in that case, we say that it's several words.
// We probably should bring the hyphenation props up from the actual
// TextLM which generate the hyphenation buffer,
// since these properties inherit and could be specified
// on an inline or wrapper below the block level.
Hyphenation hyph
= Hyphenator.hyphenate(hyphProps.language,
hyphProps.country, sbChars.toString(),
hyphProps.hyphenationRemainCharacterCount,
hyphProps.hyphenationPushCharacterCount);
// They hyph structure contains the information we need
// Now start from prev: reset to that position, ask that LM to get
// a Position for the first hyphenation offset. If the offset isn't in
// its characters, it returns null,
// but must tell how many chars it had.
// Keep looking at currentBP using next hyphenation point until the
// returned size is greater than the available size
// or no more hyphenation points remain. Choose the best break.
if (hyph != null) {
return new HyphContext(hyph.getHyphenationPoints());
} else {
return null;
}
}
/**
* Reset the positions to the given position.
*
* @param resetPos the position to reset to
*/
public void resetPosition(Position resetPos) {
if (resetPos == null) {
setFinished(false);
iReturnedLBP = 0;
} else {
if (isFinished()) {
// if isFinished is true, iReturned LBP == breakpoints.size()
// and breakpoints.get(iReturnedLBP) would generate
// an IndexOutOfBoundException
setFinished(false);
iReturnedLBP--;
}
while ((LineBreakPosition) lineLayouts.getChosenPosition(iReturnedLBP)
!= (LineBreakPosition) resetPos) {
iReturnedLBP--;
}
iReturnedLBP++;
}
}
/**
* Add the areas with the break points.
*
* @param parentIter the iterator of break positions
* @param context the context for adding areas
*/
public void addAreas(PositionIterator parentIter,
LayoutContext context) {
LayoutManager childLM;
LayoutContext lc = new LayoutContext(0);
int iCurrParIndex;
while (parentIter.hasNext()) {
Position pos = (Position) parentIter.next();
if (pos instanceof LineBreakPosition) {
ListIterator paragraphIterator = null;
KnuthElement tempElement = null;
// the TLM which created the last KnuthElement in this line
LayoutManager lastLM = null;
LineBreakPosition lbp = (LineBreakPosition) pos;
LineArea lineArea = new LineArea();
lineArea.setStartIndent(lbp.startIndent);
lineArea.setBPD(lbp.lineHeight);
lc.setBaseline(lbp.baseline);
lc.setLineHeight(lbp.lineHeight);
lc.setMiddleShift(middleShift);
lc.setTopShift(lbp.topShift);
lc.setBottomShift(lbp.bottomShift);
iCurrParIndex = lbp.iParIndex;
Paragraph currPar = (Paragraph) knuthParagraphs.get(iCurrParIndex);
iEndElement = lbp.getLeafPos();
// ignore the first elements added by the LineLayoutManager
iStartElement += (iStartElement == 0) ? currPar.ignoreAtStart : 0;
// ignore the last elements added by the LineLayoutManager
iEndElement -= (iEndElement == (currPar.size() - 1))
? currPar.ignoreAtEnd : 0;
// ignore the last element in the line if it is a KnuthGlue object
paragraphIterator = currPar.listIterator(iEndElement);
tempElement = (KnuthElement) paragraphIterator.next();
if (tempElement.isGlue()) {
iEndElement --;
// this returns the same KnuthElement
paragraphIterator.previous();
tempElement = (KnuthElement) paragraphIterator.previous();
}
lastLM = tempElement.getLayoutManager();
// ignore KnuthGlue and KnuthPenalty objects
// at the beginning of the line
paragraphIterator = currPar.listIterator(iStartElement);
tempElement = (KnuthElement) paragraphIterator.next();
while (!tempElement.isBox() && paragraphIterator.hasNext()) {
tempElement = (KnuthElement) paragraphIterator.next();
iStartElement ++;
}
// Add the inline areas to lineArea
PositionIterator inlinePosIter
= new KnuthPossPosIter(currPar, iStartElement,
iEndElement + 1);
iStartElement = lbp.getLeafPos() + 1;
if (iStartElement == currPar.size()) {
// advance to next paragraph
iStartElement = 0;
}
lc.setSpaceAdjust(lbp.dAdjust);
lc.setIPDAdjust(lbp.ipdAdjust);
lc.setLeadingSpace(new SpaceSpecifier(true));
lc.setTrailingSpace(new SpaceSpecifier(false));
lc.setFlags(LayoutContext.RESOLVE_LEADING_SPACE, true);
/* extension (not in the XSL FO recommendation): if the left and right margins
have been optimized, recompute indents and / or adjust ratio, according
to the paragraph horizontal alignment */
if (false && bTextAlignment == EN_JUSTIFY) {
// re-compute space adjust ratio
int updatedDifference = context.getStackLimit().opt - lbp.lineWidth + lbp.difference;
double updatedRatio = 0.0;
if (updatedDifference > 0) {
updatedRatio = (float) updatedDifference / lbp.availableStretch;
} else if (updatedDifference < 0) {
updatedRatio = (float) updatedDifference / lbp.availableShrink;
}
lc.setIPDAdjust(updatedRatio);
//System.out.println("LLM.addAreas> old difference = " + lbp.difference + " new difference = " + updatedDifference);
//System.out.println(" old ratio = " + lbp.ipdAdjust + " new ratio = " + updatedRatio);
} else if (false && bTextAlignment == EN_CENTER) {
// re-compute indent
int updatedIndent = lbp.startIndent + (context.getStackLimit().opt - lbp.lineWidth) / 2;
lineArea.setStartIndent(updatedIndent);
} else if (false && bTextAlignment == EN_END) {
// re-compute indent
int updatedIndent = lbp.startIndent + (context.getStackLimit().opt - lbp.lineWidth);
lineArea.setStartIndent(updatedIndent);
}
setCurrentArea(lineArea);
setChildContext(lc);
while ((childLM = inlinePosIter.getNextChildLM()) != null) {
lc.setFlags(LayoutContext.LAST_AREA, (childLM == lastLM));
childLM.addAreas(inlinePosIter, lc);
lc.setLeadingSpace(lc.getTrailingSpace());
lc.setTrailingSpace(new SpaceSpecifier(false));
}
// when can this be null?
// if display-align is distribute, add space after
if (context.getSpaceAfter() > 0
&& (!context.isLastArea() || parentIter.hasNext())) {
lineArea.setBPD(lineArea.getBPD() + context.getSpaceAfter());
}
parentLM.addChildArea(lineArea);
} else {
// pos was the Position inside a penalty item, nothing to do
}
}
setCurrentArea(null); // ?? necessary
}
}
| src/java/org/apache/fop/layoutmgr/LineLayoutManager.java | /*
* Copyright 1999-2005 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/* $Id: LineLayoutManager.java,v 1.17 2004/04/02 10:38:29 cbowditch Exp $ */
package org.apache.fop.layoutmgr;
import org.apache.fop.datatypes.Length;
import org.apache.fop.fo.Constants;
import org.apache.fop.fo.flow.Block;
import org.apache.fop.fo.properties.CommonHyphenation;
import org.apache.fop.hyphenation.Hyphenation;
import org.apache.fop.hyphenation.Hyphenator;
import org.apache.fop.area.LineArea;
import java.util.ListIterator;
import java.util.List;
import java.util.ArrayList;
import java.util.LinkedList;
import org.apache.fop.traits.MinOptMax;
/**
* LayoutManager for lines. It builds one or more lines containing
* inline areas generated by its sub layout managers.
* A break is found for each line which may contain one of more
* breaks from the child layout managers.
* Once a break is found then it is return for the parent layout
* manager to handle.
* When the areas are being added to the page this manager
* creates a line area to contain the inline areas added by the
* child layout managers.
*/
public class LineLayoutManager extends InlineStackingLayoutManager
implements BlockLevelLayoutManager {
private Block fobj;
private void initialize() {
bTextAlignment = fobj.getTextAlign();
bTextAlignmentLast = fobj.getTextAlignLast();
textIndent = fobj.getTextIndent();
hyphProps = fobj.getCommonHyphenation();
//
if (bTextAlignment != EN_JUSTIFY && bTextAlignmentLast == EN_JUSTIFY) {
effectiveAlignment = 0;
} else {
effectiveAlignment = bTextAlignment;
}
}
/**
* Private class to store information about inline breaks.
* Each value holds the start and end indexes into a List of
* inline break positions.
*/
private static class LineBreakPosition extends LeafPosition {
// int iPos;
int iParIndex; // index of the Paragraph this Position refers to
int availableShrink;
int availableStretch;
int difference;
double dAdjust; // Percentage to adjust (stretch or shrink)
double ipdAdjust; // Percentage to adjust (stretch or shrink)
int startIndent;
int lineHeight;
int lineWidth;
int baseline;
int topShift;
int bottomShift;
LineBreakPosition(LayoutManager lm, int index, int iBreakIndex,
int shrink, int stretch, int diff,
double ipdA, double adjust, int ind,
int lh, int lw, int bl, int ts, int bs) {
super(lm, iBreakIndex);
availableShrink = shrink;
availableStretch = stretch;
difference = diff;
iParIndex = index;
ipdAdjust = ipdA;
dAdjust = adjust;
startIndent = ind;
lineHeight = lh;
lineWidth = lw;
baseline = bl;
topShift = ts;
bottomShift = bs;
}
}
/** Break positions returned by inline content. */
private List vecInlineBreaks = new java.util.ArrayList();
private int bTextAlignment = EN_JUSTIFY;
private int bTextAlignmentLast;
private int effectiveAlignment;
private Length textIndent;
private int iIndents = 0;
private CommonHyphenation hyphProps;
//private LayoutProps layoutProps;
private int lineHeight;
private int lead;
private int follow;
// offset of the middle baseline with respect to the main baseline
private int middleShift;
private List knuthParagraphs = null;
private int iReturnedLBP = 0;
private int iStartElement = 0;
private int iEndElement = 0;
// parameters of Knuth's algorithm:
// penalty value for flagged penalties
private int flaggedPenalty = 50;
private LineLayoutPossibilities lineLayouts;
private List lineLayoutsList;
private int iLineWidth = 0;
// this constant is used to create elements when text-align is center:
// every TextLM descendant of LineLM must use the same value,
// otherwise the line breaking algorithm does not find the right
// break point
public static final int DEFAULT_SPACE_WIDTH = 3336;
// this class is used to remember
// which was the first element in the paragraph
// returned by each LM
private class Update {
private InlineLevelLayoutManager inlineLM;
private int iFirstIndex;
public Update(InlineLevelLayoutManager lm, int index) {
inlineLM = lm;
iFirstIndex = index;
}
}
// this class represents a paragraph
private class Paragraph extends KnuthSequence {
// space at the end of the last line (in millipoints)
private MinOptMax lineFiller;
private int textAlignment;
private int textAlignmentLast;
private int textIndent;
private int lineWidth;
// the LM which created the paragraph
private LineLayoutManager layoutManager;
public Paragraph(LineLayoutManager llm, int alignment, int alignmentLast,
int indent) {
super();
layoutManager = llm;
textAlignment = alignment;
textAlignmentLast = alignmentLast;
textIndent = indent;
}
public void startParagraph(int lw) {
lineWidth = lw;
startSequence();
}
public void startSequence() {
// set the minimum amount of empty space at the end of the
// last line
if (bTextAlignment == EN_CENTER) {
lineFiller = new MinOptMax(0);
} else {
lineFiller = new MinOptMax(0, (int)(lineWidth / 12), lineWidth);
}
// add auxiliary elements at the beginning of the paragraph
if (bTextAlignment == EN_CENTER && bTextAlignmentLast != EN_JUSTIFY) {
this.add(new KnuthGlue(0, 3 * DEFAULT_SPACE_WIDTH, 0,
null, false));
ignoreAtStart ++;
}
// add the element representing text indentation
// at the beginning of the first paragraph
if (knuthParagraphs.size() == 0
&& fobj.getTextIndent().getValue() != 0) {
this.add(new KnuthInlineBox(fobj.getTextIndent().getValue(), 0, 0, 0,
null, false));
ignoreAtStart ++;
}
}
public void endParagraph() {
KnuthSequence finishedPar = this.endSequence();
if (finishedPar != null) {
knuthParagraphs.add(finishedPar);
}
}
public KnuthSequence endSequence() {
// remove glue and penalty item at the end of the paragraph
while (this.size() > ignoreAtStart
&& !((KnuthElement)this.get(this.size() - 1)).isBox()) {
this.remove(this.size() - 1);
}
if (this.size() > ignoreAtStart) {
if (bTextAlignment == EN_CENTER
&& bTextAlignmentLast != EN_JUSTIFY) {
this.add(new KnuthGlue(0, 3 * DEFAULT_SPACE_WIDTH, 0,
null, false));
this.add(new KnuthPenalty(0, -KnuthElement.INFINITE,
false, null, false));
ignoreAtEnd = 2;
} else if (bTextAlignmentLast != EN_JUSTIFY) {
// add the elements representing the space
// at the end of the last line
// and the forced break
this.add(new KnuthPenalty(0, KnuthElement.INFINITE,
false, null, false));
this.add(new KnuthGlue(lineFiller.opt,
lineFiller.max - lineFiller.opt,
lineFiller.opt - lineFiller.min, null, false));
this.add(new KnuthPenalty(0, -KnuthElement.INFINITE,
false, null, false));
ignoreAtEnd = 3;
} else {
// add only the element representing the forced break
this.add(new KnuthPenalty(0, -KnuthElement.INFINITE,
false, null, false));
ignoreAtEnd = 1;
}
return this;
} else {
this.clear();
return null;
}
}
}
private class LineBreakingAlgorithm extends BreakingAlgorithm {
private LineLayoutManager thisLLM;
private int pageAlignment;
private int activePossibility;
private int addedPositions;
private int textIndent;
private int fillerMinWidth;
private int lineHeight;
private int lead;
private int follow;
private int middleshift;
private int maxDiff;
private static final double MAX_DEMERITS = 10e6;
public LineBreakingAlgorithm (int pageAlign,
int textAlign, int textAlignLast,
int indent, int fillerWidth,
int lh, int ld, int fl, int ms, boolean first,
LineLayoutManager llm) {
super(textAlign, textAlignLast, first, false);
pageAlignment = pageAlign;
textIndent = indent;
fillerMinWidth = fillerWidth;
lineHeight = lh;
lead = ld;
follow = fl;
middleshift = ms;
thisLLM = llm;
activePossibility = -1;
maxDiff = fobj.getWidows() >= fobj.getOrphans()
? fobj.getWidows()
: fobj.getOrphans();
}
public void updateData1(int lineCount, double demerits) {
lineLayouts.addPossibility(lineCount, demerits);
log.trace("Layout possibility in " + lineCount + " lines; break at position:");
}
public void updateData2(KnuthNode bestActiveNode,
KnuthSequence par,
int total) {
// compute indent and adjustment ratio, according to
// the value of text-align and text-align-last
int indent = 0;
int difference = (bestActiveNode.line < total) ? bestActiveNode.difference : bestActiveNode.difference + fillerMinWidth;
int textAlign = (bestActiveNode.line < total) ? alignment : alignmentLast;
indent += (textAlign == Constants.EN_CENTER) ?
difference / 2 :
(textAlign == Constants.EN_END) ? difference : 0;
indent += (bestActiveNode.line == 1 && bFirst) ?
textIndent : 0;
double ratio = (textAlign == Constants.EN_JUSTIFY
|| bestActiveNode.adjustRatio < 0) ? bestActiveNode.adjustRatio : 0;
// add nodes at the beginning of the list, as they are found
// backwards, from the last one to the first one
// the first time this method is called, initialize activePossibility
if (activePossibility == -1) {
activePossibility = 0;
addedPositions = 0;
}
if (addedPositions == lineLayouts.getLineCount(activePossibility)) {
activePossibility ++;
addedPositions = 0;
//System.out.println(" ");
}
//System.out.println("LLM> (" + (lineLayouts.getLineNumber(activePossibility) - addedPositions) + ") difference = " + difference + " ratio = " + ratio);
lineLayouts.addBreakPosition(makeLineBreakPosition(par,
(bestActiveNode.line > 1 ? bestActiveNode.previous.position + 1: 0),
bestActiveNode.position,
bestActiveNode.availableShrink - (addedPositions > 0 ? 0 : ((Paragraph)par).lineFiller.opt - ((Paragraph)par).lineFiller.min), bestActiveNode.availableStretch, difference, ratio, indent),
activePossibility);
addedPositions ++;
}
/* reset activePossibility, as if breakpoints have not yet been computed
*/
public void resetAlgorithm() {
activePossibility = -1;
}
private LineBreakPosition makeLineBreakPosition(KnuthSequence par,
int firstElementIndex,
int lastElementIndex,
int availableShrink, int availableStretch, int difference,
double ratio,
int indent) {
// line height calculation
int halfLeading = (lineHeight - lead - follow) / 2;
// height before the main baseline
int lineLead = lead;
// maximum size of top and bottom alignment
int maxtb = follow;
// max size of middle alignment before and after the middle baseline
int middlefollow = maxtb;
// true if this line contains only zero-height, auxiliary boxes
// and the actual line width is 0; in this case, the line "collapses"
// i.e. the line area will have bpd = 0
boolean bZeroHeightLine = (difference == iLineWidth);
// if line-stacking-strategy is "font-height", the line height
// is not affected by its content
if (fobj.getLineStackingStrategy() != EN_FONT_HEIGHT) {
ListIterator inlineIterator
= par.listIterator(firstElementIndex);
for (int j = firstElementIndex;
j <= lastElementIndex;
j++) {
KnuthElement element = (KnuthElement) inlineIterator.next();
if (element.isBox()) {
if (((KnuthInlineBox) element).getLead() > lineLead) {
lineLead = ((KnuthInlineBox) element).getLead();
}
if (((KnuthInlineBox) element).getTotal() > maxtb) {
maxtb = ((KnuthInlineBox) element).getTotal();
}
if (((KnuthInlineBox) element).getMiddle() > lineLead + middleShift) {
lineLead += ((KnuthInlineBox) element).getMiddle()
- lineLead - middleShift;
}
if (((KnuthInlineBox) element).getMiddle() > middlefollow - middleShift) {
middlefollow += ((KnuthInlineBox) element).getMiddle()
- middlefollow + middleShift;
}
if (bZeroHeightLine
&& (!element.isAuxiliary()
|| ((KnuthInlineBox) element).getTotal() > 0
|| ((KnuthInlineBox) element).getLead() > 0
|| ((KnuthInlineBox) element).getMiddle() > 0)) {
bZeroHeightLine = false;
}
}
}
if (maxtb - lineLead > middlefollow) {
middlefollow = maxtb - lineLead;
}
}
constantLineHeight = lineLead + middlefollow + (lineHeight - lead - follow);
if (bZeroHeightLine) {
return new LineBreakPosition(thisLLM,
knuthParagraphs.indexOf(par),
lastElementIndex,
availableShrink, availableStretch, difference, ratio, 0, indent,
0, iLineWidth,
0, 0, 0);
} else {
return new LineBreakPosition(thisLLM,
knuthParagraphs.indexOf(par),
lastElementIndex,
availableShrink, availableStretch, difference, ratio, 0, indent,
lineLead + middlefollow + (lineHeight - lead - follow), iLineWidth,
lineLead + halfLeading,
- lineLead, middlefollow);
}
}
public int findBreakingPoints(Paragraph par, /*int lineWidth,*/
double threshold, boolean force,
boolean hyphenationAllowed) {
return super.findBreakingPoints(par, /*lineWidth,*/
threshold, force, hyphenationAllowed);
}
protected int filterActiveNodes() {
KnuthNode bestActiveNode = null;
if (pageAlignment == EN_JUSTIFY) {
// leave all active nodes and find the optimum line number
//System.out.println("LBA.filterActiveNodes> " + activeNodeCount + " layouts");
for (int i = startLine; i < endLine; i++) {
for (KnuthNode node = getNode(i); node != null; node = node.next) {
//System.out.println(" + lines = " + node.line + " demerits = " + node.totalDemerits);
bestActiveNode = compareNodes(bestActiveNode, node);
}
}
// scan the node set once again and remove some nodes
//System.out.println("LBA.filterActiveList> layout selection");
for (int i = startLine; i < endLine; i++) {
for (KnuthNode node = getNode(i); node != null; node = node.next) {
//if (Math.abs(node.line - bestActiveNode.line) > maxDiff) {
//if (false) {
if (node.line != bestActiveNode.line
&& node.totalDemerits > MAX_DEMERITS) {
//System.out.println(" XXX lines = " + node.line + " demerits = " + node.totalDemerits);
removeNode(i, node);
} else {
//System.out.println(" ok lines = " + node.line + " demerits = " + node.totalDemerits);
}
}
}
} else {
// leave only the active node with fewest total demerits
for (int i = startLine; i < endLine; i++) {
for (KnuthNode node = getNode(i); node != null; node = node.next) {
bestActiveNode = compareNodes(bestActiveNode, node);
if (node != bestActiveNode) {
removeNode(i, node);
}
}
}
}
return bestActiveNode.line;
}
}
private int constantLineHeight = 12000;
/**
* Create a new Line Layout Manager.
* This is used by the block layout manager to create
* line managers for handling inline areas flowing into line areas.
*
* @param lh the default line height
* @param l the default lead, from top to baseline
* @param f the default follow, from baseline to bottom
*/
public LineLayoutManager(Block block, int lh, int l, int f, int ms) {
super(block);
fobj = block;
// the child FObj are owned by the parent BlockLM
// this LM has all its childLMs preloaded
fobjIter = null;
lineHeight = lh;
lead = l;
follow = f;
middleShift = ms;
initialize(); // Normally done when started by parent!
}
public LinkedList getNextKnuthElements(LayoutContext context, int alignment) {
// Get a break from currently active child LM
// Set up constraints for inline level managers
InlineLevelLayoutManager curLM ; // currently active LM
// IPD remaining in line
MinOptMax availIPD = context.getStackLimit();
clearPrevIPD();
int iPrevLineEnd = vecInlineBreaks.size();
if (iPrevLineEnd == 0 && bTextAlignment == EN_START) {
availIPD.subtract(new MinOptMax(textIndent.getValue()));
}
//PHASE 1: Create Knuth elements
if (knuthParagraphs == null) {
// it's the first time this method is called
knuthParagraphs = new ArrayList();
// here starts Knuth's algorithm
//TODO availIPD should not really be used here, so we can later support custom line
//widths for for each line (side-floats, differing available IPD after page break)
collectInlineKnuthElements(context, availIPD);
} else {
// this method has been called before
// all line breaks are already calculated
}
// return finished when there's no content
if (knuthParagraphs.size() == 0) {
setFinished(true);
return null;
}
//PHASE 2: Create line breaks
return findOptimalLineBreakingPoints(alignment);
/*
LineBreakPosition lbp = null;
if (breakpoints == null) {
// find the optimal line breaking points for each paragraph
breakpoints = new ArrayList();
ListIterator paragraphsIterator
= knuthParagraphs.listIterator(knuthParagraphs.size());
Paragraph currPar = null;
while (paragraphsIterator.hasPrevious()) {
currPar = (Paragraph) paragraphsIterator.previous();
findBreakingPoints(currPar, context.getStackLimit().opt);
}
}*/
//PHASE 3: Return lines
/*
// get a break point from the list
lbp = (LineBreakPosition) breakpoints.get(iReturnedLBP ++);
if (iReturnedLBP == breakpoints.size()) {
setFinished(true);
}
BreakPoss curLineBP = new BreakPoss(lbp);
curLineBP.setFlag(BreakPoss.ISLAST, isFinished());
curLineBP.setStackingSize(new MinOptMax(lbp.lineHeight));
return curLineBP;
*/
}
/**
* Phase 1 of Knuth algorithm: Collect all inline Knuth elements before determining line breaks.
* @param context the LayoutContext
* @param availIPD available IPD for line (should be removed!)
*/
private void collectInlineKnuthElements(LayoutContext context, MinOptMax availIPD) {
LayoutContext inlineLC = new LayoutContext(context);
InlineLevelLayoutManager curLM;
KnuthElement thisElement = null;
LinkedList returnedList = null;
iLineWidth = context.getStackLimit().opt;
// convert all the text in a sequence of paragraphs made
// of KnuthBox, KnuthGlue and KnuthPenalty objects
boolean bPrevWasKnuthBox = false;
KnuthBox prevBox = null;
Paragraph knuthPar = new Paragraph(this,
bTextAlignment, bTextAlignmentLast,
textIndent.getValue());
knuthPar.startParagraph(availIPD.opt);
while ((curLM = (InlineLevelLayoutManager) getChildLM()) != null) {
if ((returnedList
= curLM.getNextKnuthElements(inlineLC,
effectiveAlignment))
!= null) {
if (returnedList.size() == 0) {
continue;
}
// look at the first element
thisElement = (KnuthElement) returnedList.getFirst();
if (thisElement.isBox() && !thisElement.isAuxiliary()
&& bPrevWasKnuthBox) {
prevBox = (KnuthBox) knuthPar.removeLast();
LinkedList oldList = new LinkedList();
// if there are two consecutive KnuthBoxes the
// first one does not represent a whole word,
// so it must be given one more letter space
if (!prevBox.isAuxiliary()) {
// if letter spacing is constant,
// only prevBox needs to be replaced;
oldList.add(prevBox);
} else {
// prevBox is the last element
// in the sub-sequence
// <box> <aux penalty> <aux glue> <aux box>
// the letter space is added to <aux glue>,
// while the other elements are not changed
oldList.add(prevBox);
oldList.addFirst((KnuthGlue) knuthPar.removeLast());
oldList.addFirst((KnuthPenalty) knuthPar.removeLast());
}
// adding a letter space could involve, according to the text
// represented by oldList, replacing a glue element or adding
// new elements
knuthPar.addAll(((InlineLevelLayoutManager)
prevBox.getLayoutManager())
.addALetterSpaceTo(oldList));
if (((KnuthInlineBox) prevBox).isAnchor()) {
// prevBox represents a footnote citation: copy footnote info
// from prevBox to the new box
KnuthInlineBox newBox = (KnuthInlineBox) knuthPar.getLast();
newBox.setFootnoteBodyLM(((KnuthInlineBox) prevBox).getFootnoteBodyLM());
}
}
// look at the last element
KnuthElement lastElement = (KnuthElement) returnedList.getLast();
boolean bForceLinefeed = false;
if (lastElement.isBox()) {
bPrevWasKnuthBox = true;
} else {
bPrevWasKnuthBox = false;
if (lastElement.isPenalty()
&& ((KnuthPenalty) lastElement).getP()
== -KnuthPenalty.INFINITE) {
// a penalty item whose value is -inf
// represents a preserved linefeed,
// wich forces a line break
bForceLinefeed = true;
returnedList.removeLast();
}
}
// add the new elements to the paragraph
knuthPar.addAll(returnedList);
if (bForceLinefeed) {
if (knuthPar.size() == 0) {
//only a forced linefeed on this line
//-> compensate with a zero width box
knuthPar.add(new KnuthInlineBox(0, 0, 0, 0,
null, false));
}
knuthPar.endParagraph();
knuthPar = new Paragraph(this,
bTextAlignment, bTextAlignmentLast,
textIndent.getValue());
knuthPar.startParagraph(availIPD.opt);
bPrevWasKnuthBox = false;
}
} else {
// curLM returned null; this can happen
// if it has nothing more to layout,
// so just iterate once more to see
// if there are other children
}
}
knuthPar.endParagraph();
}
/**
* Find a set of breaking points.
* This method is called only once by getNextBreakPoss, and it
* subsequently calls the other findBreakingPoints() method with
* different parameters, until a set of breaking points is found.
*
* @param par the list of elements that must be parted
* into lines
* @param lineWidth the desired length ot the lines
*/
/*
private void findBreakingPoints(Paragraph par, int lineWidth) {
// maximum adjustment ratio permitted
float maxAdjustment = 1;
// first try
if (!findBreakingPoints(par, lineWidth, maxAdjustment, false)) {
// the first try failed, now try something different
log.debug("No set of breaking points found with maxAdjustment = " + maxAdjustment);
if (hyphProps.hyphenate == Constants.EN_TRUE) {
// consider every hyphenation point as a legal break
findHyphenationPoints(par);
} else {
// try with a higher threshold
maxAdjustment = 5;
}
if (!findBreakingPoints(par, lineWidth, maxAdjustment, false)) {
// the second try failed too, try with a huge threshold;
// if this fails too, use a different algorithm
log.debug("No set of breaking points found with maxAdjustment = " + maxAdjustment
+ (hyphProps.hyphenate == Constants.EN_TRUE ? " and hyphenation" : ""));
maxAdjustment = 20;
if (!findBreakingPoints(par, lineWidth, maxAdjustment, true)) {
log.debug("No set of breaking points found, using first-fit algorithm");
}
}
}
}
private boolean findBreakingPoints(Paragraph par, int lineWidth,
double threshold, boolean force) {
KnuthParagraph knuthPara = new KnuthParagraph(par);
int lines = knuthPara.findBreakPoints(lineWidth, threshold, force);
if (lines == 0) {
return false;
}
for (int i = lines-1; i >= 0; i--) {
int line = i+1;
if (log.isTraceEnabled()) {
log.trace("Making line from " + knuthPara.getStart(i) + " to " +
knuthPara.getEnd(i));
}
// compute indent and adjustment ratio, according to
// the value of text-align and text-align-last
int difference = knuthPara.getDifference(i);
if (line == lines) {
difference += par.lineFillerWidth;
}
int textAlign = (line < lines)
? bTextAlignment : bTextAlignmentLast;
int indent = (textAlign == EN_CENTER)
? difference / 2
: (textAlign == EN_END) ? difference : 0;
indent += (line == 1 && knuthParagraphs.indexOf(par) == 0)
? textIndent.getValue() : 0;
double ratio = (textAlign == EN_JUSTIFY)
? knuthPara.getAdjustRatio(i) : 0;
int start = knuthPara.getStart(i);
int end = knuthPara.getEnd(i);
makeLineBreakPosition(par, start, end, 0, ratio, indent);
}
return true;
}
private void makeLineBreakPosition(Paragraph par,
int firstElementIndex, int lastElementIndex,
int insertIndex, double ratio, int indent) {
// line height calculation
int halfLeading = (lineHeight - lead - follow) / 2;
// height above the main baseline
int lineLead = lead + halfLeading;
// maximum size of top and bottom alignment
int maxtb = follow + halfLeading;
// max size of middle alignment above and below the middle baseline
int middlefollow = maxtb;
ListIterator inlineIterator
= par.listIterator(firstElementIndex);
for (int j = firstElementIndex;
j <= lastElementIndex;
j++) {
KnuthElement element = (KnuthElement) inlineIterator.next();
if (element.isBox()) {
KnuthInlineBox box = (KnuthInlineBox)element;
if (box.getLead() > lineLead) {
lineLead = box.getLead();
}
if (box.getTotal() > maxtb) {
maxtb = box.getTotal();
}
if (box.getMiddle() > lineLead + middleShift) {
lineLead += box.getMiddle()
- lineLead - middleShift;
}
if (box.getMiddle() > middlefollow - middleShift) {
middlefollow += box.getMiddle()
- middlefollow + middleShift;
}
}
}
if (maxtb - lineLead > middlefollow) {
middlefollow = maxtb - lineLead;
}
breakpoints.add(insertIndex,
new LineBreakPosition(this,
knuthParagraphs.indexOf(par),
lastElementIndex ,
ratio, 0, indent,
lineLead + middlefollow,
lineLead));
}*/
/**
* Phase 2 of Knuth algorithm: find optimal break points.
* @param alignment alignmenr of the paragraph
* @return a list of Knuth elements representing broken lines
*/
private LinkedList findOptimalLineBreakingPoints(int alignment) {
// find the optimal line breaking points for each paragraph
ListIterator paragraphsIterator
= knuthParagraphs.listIterator(knuthParagraphs.size());
Paragraph currPar = null;
LineBreakingAlgorithm alg;
lineLayoutsList = new ArrayList(knuthParagraphs.size());
while (paragraphsIterator.hasPrevious()) {
lineLayouts = new LineLayoutPossibilities();
currPar = (Paragraph) paragraphsIterator.previous();
double maxAdjustment = 1;
int iBPcount = 0;
alg = new LineBreakingAlgorithm(alignment,
bTextAlignment, bTextAlignmentLast,
textIndent.getValue(), currPar.lineFiller.opt,
lineHeight, lead, follow, middleShift,
(knuthParagraphs.indexOf(currPar) == 0),
this);
if (hyphProps.hyphenate == EN_TRUE) {
findHyphenationPoints(currPar);
}
// first try
boolean bHyphenationAllowed = false;
alg.setConstantLineWidth(iLineWidth);
iBPcount = alg.findBreakingPoints(currPar,
maxAdjustment, false, bHyphenationAllowed);
if (iBPcount == 0 || alignment == EN_JUSTIFY) {
// if the first try found a set of breaking points, save them
if (iBPcount > 0) {
alg.resetAlgorithm();
lineLayouts.savePossibilities(false);
} else {
// the first try failed
log.debug("No set of breaking points found with maxAdjustment = " + maxAdjustment);
}
// now try something different
log.debug("Hyphenation possible? " + (hyphProps.hyphenate == EN_TRUE));
if (hyphProps.hyphenate == EN_TRUE) {
// consider every hyphenation point as a legal break
bHyphenationAllowed = true;
} else {
// try with a higher threshold
maxAdjustment = 5;
}
if ((iBPcount
= alg.findBreakingPoints(currPar,
maxAdjustment, false, bHyphenationAllowed)) == 0) {
// the second try failed too, try with a huge threshold
// and force the algorithm to find
// a set of breaking points
log.debug("No set of breaking points found with maxAdjustment = " + maxAdjustment
+ (hyphProps.hyphenate == EN_TRUE ? " and hyphenation" : ""));
maxAdjustment = 20;
iBPcount
= alg.findBreakingPoints(currPar,
maxAdjustment, true, bHyphenationAllowed);
}
// use non-hyphenated breaks, when possible
lineLayouts.restorePossibilities();
/* extension (not in the XSL FO recommendation): if vertical alignment
is justify and the paragraph has only one layout, try using
shorter or longer lines */
//TODO This code snippet is disabled. Reenable?
if (false && alignment == EN_JUSTIFY && bTextAlignment == EN_JUSTIFY) {
//System.out.println("LLM.getNextKnuthElements> layouts with more lines? " + lineLayouts.canUseMoreLines());
//System.out.println(" layouts with fewer lines? " + lineLayouts.canUseLessLines());
if (!lineLayouts.canUseMoreLines()) {
alg.resetAlgorithm();
lineLayouts.savePossibilities(true);
// try with shorter lines
int savedLineWidth = iLineWidth;
iLineWidth = (int) (iLineWidth * 0.95);
iBPcount = alg.findBreakingPoints(currPar,
maxAdjustment, true, bHyphenationAllowed);
// use normal lines, when possible
lineLayouts.restorePossibilities();
iLineWidth = savedLineWidth;
}
if (!lineLayouts.canUseLessLines()) {
alg.resetAlgorithm();
lineLayouts.savePossibilities(true);
// try with longer lines
int savedLineWidth = iLineWidth;
iLineWidth = (int) (iLineWidth * 1.05);
alg.setConstantLineWidth(iLineWidth);
iBPcount = alg.findBreakingPoints(currPar,
maxAdjustment, true, bHyphenationAllowed);
// use normal lines, when possible
lineLayouts.restorePossibilities();
iLineWidth = savedLineWidth;
}
//System.out.println("LLM.getNextKnuthElements> now, layouts with more lines? " + lineLayouts.canUseMoreLines());
//System.out.println(" now, layouts with fewer lines? " + lineLayouts.canUseLessLines());
}
}
lineLayoutsList.add(0, lineLayouts);
}
setFinished(true);
//Post-process the line breaks found
return postProcessLineBreaks(alignment);
}
private LinkedList postProcessLineBreaks(int alignment) {
LinkedList returnList = new LinkedList();
for (int p = 0; p < knuthParagraphs.size(); p ++) {
// null penalty between paragraphs
if (p > 0
&& !((BlockLevelLayoutManager) parentLM).mustKeepTogether()) {
returnList.add(new KnuthPenalty(0, 0, false, new Position(this), false));
}
lineLayouts = (LineLayoutPossibilities)lineLayoutsList.get(p);
if (alignment == EN_JUSTIFY) {
/* justified vertical alignment (not in the XSL FO recommendation):
create a multi-layout sequence whose elements will contain
a conventional Position */
Position returnPosition = new LeafPosition(this, p);
createElements(returnList, lineLayouts, returnPosition);
} else {
/* "normal" vertical alignment: create a sequence whose boxes
represent effective lines, and contain LineBreakPositions */
Position returnPosition = new LeafPosition(this, p);
int startIndex = 0;
for (int i = 0;
i < lineLayouts.getChosenLineCount();
i++) {
if (!((BlockLevelLayoutManager) parentLM).mustKeepTogether()
&& i >= fobj.getOrphans()
&& i <= lineLayouts.getChosenLineCount() - fobj.getWidows()
&& returnList.size() > 0) {
// null penalty allowing a page break between lines
returnList.add(new KnuthPenalty(0, 0, false, returnPosition, false));
}
int endIndex = ((LineBreakPosition) lineLayouts.getChosenPosition(i)).getLeafPos();
// create a list of the FootnoteBodyLM handling footnotes
// whose citations are in this line
LinkedList footnoteList = new LinkedList();
ListIterator elementIterator = ((Paragraph) knuthParagraphs.get(p)).listIterator(startIndex);
while (elementIterator.nextIndex() <= endIndex) {
KnuthElement element = (KnuthElement) elementIterator.next();
if (element instanceof KnuthInlineBox
&& ((KnuthInlineBox) element).isAnchor()) {
footnoteList.add(((KnuthInlineBox) element).getFootnoteBodyLM());
}
}
startIndex = endIndex + 1;
returnList.add(new KnuthBlockBox(((LineBreakPosition) lineLayouts.getChosenPosition(i)).lineHeight,
footnoteList, lineLayouts.getChosenPosition(i), false));
}
}
}
return returnList;
}
private void createElements(List list, LineLayoutPossibilities lineLayouts,
Position elementPosition) {
/* number of normal, inner lines */
int nInnerLines = 0;
/* number of lines that can be used in order to fill more space */
int nOptionalLines = 0;
/* number of lines that can be used in order to fill more space
only if the paragraph is not parted */
int nConditionalOptionalLines = 0;
/* number of lines that can be omitted in order to fill less space */
int nEliminableLines = 0;
/* number of lines that can be omitted in order to fill less space
only if the paragraph is not parted */
int nConditionalEliminableLines = 0;
/* number of the first unbreakable lines */
int nFirstLines = fobj.getOrphans();
/* number of the last unbreakable lines */
int nLastLines = fobj.getWidows();
/* sub-sequence used to separate the elements representing different lines */
List breaker = new LinkedList();
/* comment out the next lines in order to test particular situations */
if (fobj.getOrphans() + fobj.getWidows() <= lineLayouts.getMinLineCount()) {
nInnerLines = lineLayouts.getMinLineCount() - (fobj.getOrphans() + fobj.getWidows());
nOptionalLines = lineLayouts.getMaxLineCount() - lineLayouts.getOptLineCount();
nEliminableLines = lineLayouts.getOptLineCount() - lineLayouts.getMinLineCount();
} else if (fobj.getOrphans() + fobj.getWidows() <= lineLayouts.getOptLineCount()) {
nOptionalLines = lineLayouts.getMaxLineCount() - lineLayouts.getOptLineCount();
nEliminableLines = lineLayouts.getOptLineCount() - (fobj.getOrphans() + fobj.getWidows());
nConditionalEliminableLines = (fobj.getOrphans() + fobj.getWidows()) - lineLayouts.getMinLineCount();
} else if (fobj.getOrphans() + fobj.getWidows() <= lineLayouts.getMaxLineCount()) {
nOptionalLines = lineLayouts.getMaxLineCount() - (fobj.getOrphans() + fobj.getWidows());
nConditionalOptionalLines = (fobj.getOrphans() + fobj.getWidows()) - lineLayouts.getOptLineCount();
nConditionalEliminableLines = lineLayouts.getOptLineCount() - lineLayouts.getMinLineCount();
nFirstLines -= nConditionalOptionalLines;
} else {
nConditionalOptionalLines = lineLayouts.getMaxLineCount() - lineLayouts.getOptLineCount();
nConditionalEliminableLines = lineLayouts.getOptLineCount() - lineLayouts.getMinLineCount();
nFirstLines = lineLayouts.getOptLineCount();
nLastLines = 0;
}
/* comment out the previous lines in order to test particular situations */
/* use these lines to test particular situations
nInnerLines = 0;
nOptionalLines = 1;
nConditionalOptionalLines = 2;
nEliminableLines = 0;
nConditionalEliminableLines = 0;
nFirstLines = 1;
nLastLines = 3;
*/
if (nLastLines != 0
&& (nConditionalOptionalLines > 0 || nConditionalEliminableLines > 0)) {
breaker.add(new KnuthPenalty(0, KnuthElement.INFINITE, false, elementPosition, false));
breaker.add(new KnuthGlue(0, -nConditionalOptionalLines * constantLineHeight,
-nConditionalEliminableLines * constantLineHeight,
LINE_NUMBER_ADJUSTMENT, elementPosition, false));
breaker.add(new KnuthPenalty(nConditionalOptionalLines * constantLineHeight,
0, false, elementPosition, false));
breaker.add(new KnuthGlue(0, nConditionalOptionalLines * constantLineHeight,
nConditionalEliminableLines * constantLineHeight,
LINE_NUMBER_ADJUSTMENT, elementPosition, false));
} else if (nLastLines != 0) {
breaker.add(new KnuthPenalty(0, 0, false, elementPosition, false));
}
//System.out.println("first=" + nFirstLines + " inner=" + nInnerLines
// + " optional=" + nOptionalLines + " eliminable=" + nEliminableLines
// + " last=" + nLastLines
// + " (condOpt=" + nConditionalOptionalLines + " condEl=" + nConditionalEliminableLines + ")");
// creation of the elements:
// first group of lines
list.add(new KnuthBox(nFirstLines * constantLineHeight, elementPosition,
(nLastLines == 0
&& nConditionalOptionalLines == 0
&& nConditionalEliminableLines == 0 ? true : false)));
if (nConditionalOptionalLines > 0
|| nConditionalEliminableLines > 0) {
list.add(new KnuthPenalty(0, KnuthElement.INFINITE, false, elementPosition, false));
list.add(new KnuthGlue(0, nConditionalOptionalLines * constantLineHeight,
nConditionalEliminableLines * constantLineHeight,
LINE_NUMBER_ADJUSTMENT, elementPosition, false));
list.add(new KnuthBox(0, elementPosition,
(nLastLines == 0 ? true : false)));
}
// optional lines
for (int i = 0; i < nOptionalLines; i++) {
list.addAll(breaker);
list.add(new KnuthBox(0, elementPosition, false));
list.add(new KnuthPenalty(0, KnuthElement.INFINITE, false, elementPosition, false));
list.add(new KnuthGlue(0, 1 * constantLineHeight, 0,
LINE_NUMBER_ADJUSTMENT, elementPosition, false));
list.add(new KnuthBox(0, elementPosition, false));
}
// eliminable lines
for (int i = 0; i < nEliminableLines; i++) {
list.addAll(breaker);
list.add(new KnuthBox(1 * constantLineHeight, elementPosition, false));
list.add(new KnuthPenalty(0, KnuthElement.INFINITE, false, elementPosition, false));
list.add(new KnuthGlue(0, 0, 1 * constantLineHeight,
LINE_NUMBER_ADJUSTMENT, elementPosition, false));
list.add(new KnuthBox(0, elementPosition, false));
}
// inner lines
for (int i = 0; i < nInnerLines; i++) {
list.addAll(breaker);
list.add(new KnuthBox(1 * constantLineHeight, elementPosition, false));
}
// last group of lines
if (nLastLines > 0) {
list.addAll(breaker);
list.add(new KnuthBox(nLastLines * constantLineHeight,
elementPosition, true));
}
}
public boolean mustKeepTogether() {
return false;
}
public boolean mustKeepWithPrevious() {
return false;
}
public boolean mustKeepWithNext() {
return false;
}
public int negotiateBPDAdjustment(int adj, KnuthElement lastElement) {
LeafPosition pos = (LeafPosition)lastElement.getPosition();
int totalAdj = adj;
//if (lastElement.isPenalty()) {
// totalAdj += lastElement.getW();
//}
//int lineNumberDifference = (int)((double) totalAdj / constantLineHeight);
int lineNumberDifference = (int) Math.round((double) totalAdj / constantLineHeight + (adj > 0 ? - 0.4 : 0.4));
//System.out.println(" LLM> variazione calcolata = " + ((double) totalAdj / constantLineHeight) + " variazione applicata = " + lineNumberDifference);
lineLayouts = (LineLayoutPossibilities)lineLayoutsList.get(pos.getLeafPos());
lineNumberDifference = lineLayouts.applyLineCountAdjustment(lineNumberDifference);
return lineNumberDifference * constantLineHeight;
}
public void discardSpace(KnuthGlue spaceGlue) {
}
public LinkedList getChangedKnuthElements(List oldList, int alignment) {
LinkedList returnList = new LinkedList();
for (int p = 0;
p < knuthParagraphs.size();
p ++) {
lineLayouts = (LineLayoutPossibilities)lineLayoutsList.get(p);
//System.out.println("demerits of the chosen layout: " + lineLayouts.getChosenDemerits());
for (int i = 0;
i < lineLayouts.getChosenLineCount();
i ++) {
if (!((BlockLevelLayoutManager) parentLM).mustKeepTogether()
&& i >= fobj.getOrphans()
&& i <= lineLayouts.getChosenLineCount() - fobj.getWidows()) {
// null penalty allowing a page break between lines
returnList.add(new KnuthPenalty(0, 0, false, new Position(this), false));
}
LineBreakPosition lbp = (LineBreakPosition) lineLayouts.getChosenPosition(i);
//System.out.println("LLM.getChangedKnuthElements> lineWidth= " + lbp.lineWidth + " difference= " + lbp.difference);
//System.out.println(" shrink= " + lbp.availableShrink + " stretch= " + lbp.availableStretch);
//System.out.println("linewidth= " + lbp.lineWidth + " difference= " + lbp.difference + " indent= " + lbp.startIndent);
MinOptMax contentIPD;
if (alignment == EN_JUSTIFY) {
contentIPD = new MinOptMax(
lbp.lineWidth - lbp.difference - lbp.availableShrink,
lbp.lineWidth - lbp.difference,
lbp.lineWidth - lbp.difference + lbp.availableStretch);
} else if (alignment == EN_CENTER) {
contentIPD = new MinOptMax(lbp.lineWidth - 2 * lbp.startIndent);
} else if (alignment == EN_END) {
contentIPD = new MinOptMax(lbp.lineWidth - lbp.startIndent);
} else {
contentIPD = new MinOptMax(lbp.lineWidth - lbp.difference + lbp.startIndent);
}
returnList.add(new KnuthBlockBox(lbp.lineHeight,
contentIPD,
(lbp.ipdAdjust != 0 ? lbp.lineWidth - lbp.difference : 0),
lbp, false));
}
}
return returnList;
}
/**
* find hyphenation points for every word int the current paragraph
* @ param currPar the paragraph whose words will be hyphenated
*/
private void findHyphenationPoints(Paragraph currPar){
// hyphenate every word
ListIterator currParIterator
= currPar.listIterator(currPar.ignoreAtStart);
// list of TLM involved in hyphenation
LinkedList updateList = new LinkedList();
KnuthElement firstElement = null;
KnuthElement nextElement = null;
// current InlineLevelLayoutManager
InlineLevelLayoutManager currLM = null;
// number of KnuthBox elements containing word fragments
int boxCount;
// number of auxiliary KnuthElements between KnuthBoxes
int auxCount;
StringBuffer sbChars = null;
// find all hyphenation points
while (currParIterator.hasNext()) {
firstElement = (KnuthElement) currParIterator.next();
//
if (firstElement.getLayoutManager() != currLM) {
currLM = (InlineLevelLayoutManager) firstElement.getLayoutManager();
if (currLM != null) {
updateList.add(new Update(currLM, currParIterator.previousIndex()));
} else {
break;
}
}
// collect word fragments, ignoring auxiliary elements;
// each word fragment was created by a different TextLM
if (firstElement.isBox() && !firstElement.isAuxiliary()) {
boxCount = 1;
auxCount = 0;
sbChars = new StringBuffer();
currLM.getWordChars(sbChars, firstElement.getPosition());
// look if next elements are boxes too
while (currParIterator.hasNext()) {
nextElement = (KnuthElement) currParIterator.next();
if (nextElement.isBox() && !nextElement.isAuxiliary()) {
// a non-auxiliary KnuthBox: append word chars
if (currLM != nextElement.getLayoutManager()) {
currLM = (InlineLevelLayoutManager) nextElement.getLayoutManager();
updateList.add(new Update(currLM, currParIterator.previousIndex()));
}
// append text to recreate the whole word
boxCount ++;
currLM.getWordChars(sbChars, nextElement.getPosition());
} else if (!nextElement.isAuxiliary()) {
// a non-auxiliary non-box KnuthElement: stop
// go back to the last box or auxiliary element
currParIterator.previous();
break;
} else {
// an auxiliary KnuthElement: simply ignore it
auxCount ++;
}
}
log.trace(" Word to hyphenate: " + sbChars.toString());
// find hyphenation points
HyphContext hc = getHyphenContext(sbChars);
// ask each LM to hyphenate its word fragment
if (hc != null) {
KnuthElement element = null;
for (int i = 0; i < (boxCount + auxCount); i++) {
currParIterator.previous();
}
for (int i = 0; i < (boxCount + auxCount); i++) {
element = (KnuthElement) currParIterator.next();
if (element.isBox() && !element.isAuxiliary()) {
((InlineLevelLayoutManager)
element.getLayoutManager()).hyphenate(element.getPosition(), hc);
} else {
// nothing to do, element is an auxiliary KnuthElement
}
}
}
}
}
// create iterator for the updateList
ListIterator updateListIterator = updateList.listIterator();
Update currUpdate = null;
//int iPreservedElements = 0;
int iAddedElements = 0;
//int iRemovedElements = 0;
while (updateListIterator.hasNext()) {
// ask the LMs to apply the changes and return
// the new KnuthElements to replace the old ones
currUpdate = (Update) updateListIterator.next();
int fromIndex = currUpdate.iFirstIndex;
int toIndex;
if (updateListIterator.hasNext()) {
Update nextUpdate = (Update) updateListIterator.next();
toIndex = nextUpdate.iFirstIndex;
updateListIterator.previous();
} else {
// maybe this is not always correct!
toIndex = currPar.size() - currPar.ignoreAtEnd
- iAddedElements;
}
// applyChanges() returns true if the LM modifies its data,
// so it must return new KnuthElements to replace the old ones
if (((InlineLevelLayoutManager) currUpdate.inlineLM)
.applyChanges(currPar.subList(fromIndex + iAddedElements,
toIndex + iAddedElements))) {
// insert the new KnuthElements
LinkedList newElements = null;
newElements
= currUpdate.inlineLM.getChangedKnuthElements
(currPar.subList(fromIndex + iAddedElements,
toIndex + iAddedElements),
/*flaggedPenalty,*/ effectiveAlignment);
// remove the old elements
currPar.subList(fromIndex + iAddedElements,
toIndex + iAddedElements).clear();
// insert the new elements
currPar.addAll(fromIndex + iAddedElements, newElements);
iAddedElements += newElements.size() - (toIndex - fromIndex);
}
}
updateListIterator = null;
updateList.clear();
}
/** Line area is always considered to act as a fence. */
protected boolean hasLeadingFence(boolean bNotFirst) {
return true;
}
/** Line area is always considered to act as a fence. */
protected boolean hasTrailingFence(boolean bNotLast) {
return true;
}
private HyphContext getHyphenContext(StringBuffer sbChars) {
// Find all hyphenation points in this word
// (get in an array of offsets)
// hyphProps are from the block level?.
// Note that according to the spec,
// they also "apply to" fo:character.
// I don't know what that means, since
// if we change language in the middle of a "word",
// the effect would seem quite strange!
// Or perhaps in that case, we say that it's several words.
// We probably should bring the hyphenation props up from the actual
// TextLM which generate the hyphenation buffer,
// since these properties inherit and could be specified
// on an inline or wrapper below the block level.
Hyphenation hyph
= Hyphenator.hyphenate(hyphProps.language,
hyphProps.country, sbChars.toString(),
hyphProps.hyphenationRemainCharacterCount,
hyphProps.hyphenationPushCharacterCount);
// They hyph structure contains the information we need
// Now start from prev: reset to that position, ask that LM to get
// a Position for the first hyphenation offset. If the offset isn't in
// its characters, it returns null,
// but must tell how many chars it had.
// Keep looking at currentBP using next hyphenation point until the
// returned size is greater than the available size
// or no more hyphenation points remain. Choose the best break.
if (hyph != null) {
return new HyphContext(hyph.getHyphenationPoints());
} else {
return null;
}
}
/**
* Reset the positions to the given position.
*
* @param resetPos the position to reset to
*/
public void resetPosition(Position resetPos) {
if (resetPos == null) {
setFinished(false);
iReturnedLBP = 0;
} else {
if (isFinished()) {
// if isFinished is true, iReturned LBP == breakpoints.size()
// and breakpoints.get(iReturnedLBP) would generate
// an IndexOutOfBoundException
setFinished(false);
iReturnedLBP--;
}
while ((LineBreakPosition) lineLayouts.getChosenPosition(iReturnedLBP)
!= (LineBreakPosition) resetPos) {
iReturnedLBP--;
}
iReturnedLBP++;
}
}
/**
* Add the areas with the break points.
*
* @param parentIter the iterator of break positions
* @param context the context for adding areas
*/
public void addAreas(PositionIterator parentIter,
LayoutContext context) {
LayoutManager childLM;
LayoutContext lc = new LayoutContext(0);
int iCurrParIndex;
while (parentIter.hasNext()) {
Position pos = (Position) parentIter.next();
if (pos instanceof LineBreakPosition) {
ListIterator paragraphIterator = null;
KnuthElement tempElement = null;
// the TLM which created the last KnuthElement in this line
LayoutManager lastLM = null;
LineBreakPosition lbp = (LineBreakPosition) pos;
LineArea lineArea = new LineArea();
lineArea.setStartIndent(lbp.startIndent);
lineArea.setBPD(lbp.lineHeight);
lc.setBaseline(lbp.baseline);
lc.setLineHeight(lbp.lineHeight);
lc.setMiddleShift(middleShift);
lc.setTopShift(lbp.topShift);
lc.setBottomShift(lbp.bottomShift);
iCurrParIndex = lbp.iParIndex;
Paragraph currPar = (Paragraph) knuthParagraphs.get(iCurrParIndex);
iEndElement = lbp.getLeafPos();
// ignore the first elements added by the LineLayoutManager
iStartElement += (iStartElement == 0) ? currPar.ignoreAtStart : 0;
// ignore the last elements added by the LineLayoutManager
iEndElement -= (iEndElement == (currPar.size() - 1))
? currPar.ignoreAtEnd : 0;
// ignore the last element in the line if it is a KnuthGlue object
paragraphIterator = currPar.listIterator(iEndElement);
tempElement = (KnuthElement) paragraphIterator.next();
if (tempElement.isGlue()) {
iEndElement --;
// this returns the same KnuthElement
paragraphIterator.previous();
tempElement = (KnuthElement) paragraphIterator.previous();
}
lastLM = tempElement.getLayoutManager();
// ignore KnuthGlue and KnuthPenalty objects
// at the beginning of the line
paragraphIterator = currPar.listIterator(iStartElement);
tempElement = (KnuthElement) paragraphIterator.next();
while (!tempElement.isBox() && paragraphIterator.hasNext()) {
tempElement = (KnuthElement) paragraphIterator.next();
iStartElement ++;
}
// Add the inline areas to lineArea
PositionIterator inlinePosIter
= new KnuthPossPosIter(currPar, iStartElement,
iEndElement + 1);
iStartElement = lbp.getLeafPos() + 1;
if (iStartElement == currPar.size()) {
// advance to next paragraph
iStartElement = 0;
}
lc.setSpaceAdjust(lbp.dAdjust);
lc.setIPDAdjust(lbp.ipdAdjust);
lc.setLeadingSpace(new SpaceSpecifier(true));
lc.setTrailingSpace(new SpaceSpecifier(false));
lc.setFlags(LayoutContext.RESOLVE_LEADING_SPACE, true);
/* extension (not in the XSL FO recommendation): if the left and right margins
have been optimized, recompute indents and / or adjust ratio, according
to the paragraph horizontal alignment */
if (false && bTextAlignment == EN_JUSTIFY) {
// re-compute space adjust ratio
int updatedDifference = context.getStackLimit().opt - lbp.lineWidth + lbp.difference;
double updatedRatio = 0.0;
if (updatedDifference > 0) {
updatedRatio = (float) updatedDifference / lbp.availableStretch;
} else if (updatedDifference < 0) {
updatedRatio = (float) updatedDifference / lbp.availableShrink;
}
lc.setIPDAdjust(updatedRatio);
//System.out.println("LLM.addAreas> old difference = " + lbp.difference + " new difference = " + updatedDifference);
//System.out.println(" old ratio = " + lbp.ipdAdjust + " new ratio = " + updatedRatio);
} else if (false && bTextAlignment == EN_CENTER) {
// re-compute indent
int updatedIndent = lbp.startIndent + (context.getStackLimit().opt - lbp.lineWidth) / 2;
lineArea.setStartIndent(updatedIndent);
} else if (false && bTextAlignment == EN_END) {
// re-compute indent
int updatedIndent = lbp.startIndent + (context.getStackLimit().opt - lbp.lineWidth);
lineArea.setStartIndent(updatedIndent);
}
setCurrentArea(lineArea);
setChildContext(lc);
while ((childLM = inlinePosIter.getNextChildLM()) != null) {
lc.setFlags(LayoutContext.LAST_AREA, (childLM == lastLM));
childLM.addAreas(inlinePosIter, lc);
lc.setLeadingSpace(lc.getTrailingSpace());
lc.setTrailingSpace(new SpaceSpecifier(false));
}
// when can this be null?
// if display-align is distribute, add space after
if (context.getSpaceAfter() > 0
&& (!context.isLastArea() || parentIter.hasNext())) {
lineArea.setBPD(lineArea.getBPD() + context.getSpaceAfter());
}
parentLM.addChildArea(lineArea);
} else {
// pos was the Position inside a penalty item, nothing to do
}
}
setCurrentArea(null); // ?? necessary
}
}
| Observe line layout element lists, too.
git-svn-id: 102839466c3b40dd9c7e25c0a1a6d26afc40150a@202426 13f79535-47bb-0310-9956-ffa450edef68
| src/java/org/apache/fop/layoutmgr/LineLayoutManager.java | Observe line layout element lists, too. | <ide><path>rc/java/org/apache/fop/layoutmgr/LineLayoutManager.java
<ide> }
<ide> }
<ide> knuthPar.endParagraph();
<add> ElementListObserver.observe(knuthPar, "line", null);
<ide> }
<ide>
<ide> /** |
|
JavaScript | apache-2.0 | 47fecf8342fe566858ef075776e5a0572c878f79 | 0 | jkingdon/ghilbert,raphlinus/ghilbert,raphlinus/ghilbert,kryptine/ghilbert,kryptine/ghilbert,raphlinus/ghilbert,raphlinus/ghilbert,jkingdon/ghilbert | // <license>
GH.min = function(x, y) {
return x < y ? x : y;
};
GH.max = function(x, y) {
return x > y ? x : y;
};
GH.abs = function(x) {
return x >= 0 ? x : -x;
};
GH.cursormin = function(c1, c2) {
if (c1[0] === c2[0]) {
return c1[1] < c2[1] ? c1 : c2;
}
return c1[0] < c2[0] ? c1 : c2;
};
GH.cursormax = function(c1, c2) {
if (c1[0] === c2[0]) {
return c1[1] > c2[1] ? c1 : c2;
}
return c1[0] > c2[0] ? c1 : c2;
};
// As it's written now, this class combines both model and view of the text.
// It's probably a good idea to separate these out a bit.
GH.CanvasEdit = function(canvas, inputlayer) {
var self = this;
this.canvas = canvas;
this.inputlayer = inputlayer;
if (inputlayer) {
inputlayer.set_handler(function(evt, data) {
return self.handler(evt, data);
});
}
this.text = [''];
this.fontsize = 16;
this.font = this.fontsize + "px Times";
this.setcursor([0, 0]); // line, offset
this.linespace = this.fontsize + 2;
this.cursorvisible = true;
this.undostack = [];
// todo: use slightly different logic for identifier->symbols, these
// fire too easily as substrings
this.imtrans = {
'et': '\u03b7',
'th': '\u03b8',
'ta': '\u03c4',
'ph': '\u03c6',
'ch': '\u03c7',
'ps': '\u03c8',
'-.': '\u00ac',
'->': '\u2192',
'<->': '\u2194',
'A.': '\u2200',
'E.': '\u2203',
'{/}': '\u2205',
'e.': '\u2208',
'x.': '\u2219',
'/\\': '\u2227',
'\\/': '\u2228',
'i^i': '\u2229',
'u.': '\u222a',
'C.': '\u2282',
'C_': '\u2286'
};
this.imtranslongest = 3;
this.imbuf = null;
this.listeners = [function() { self.draw(); }];
};
GH.CanvasEdit.prototype.dirty = function() {
for (var i = 0; i < this.listeners.length; i++) {
this.listeners[i]();
}
};
GH.CanvasEdit.prototype.addundo = function(title) {
// Full copy is expensive asymptotically, but should be okay for small
// buffers.
this.undostack.push([this.text.slice(), this.cursor]);
};
GH.CanvasEdit.prototype.undo = function() {
if (this.undostack.length) {
var newstate = this.undostack.pop();
this.text = newstate[0];
this.cursor = newstate[1];
this.dirty();
}
};
GH.CanvasEdit.prototype.canvasctx = function() {
var ctx = this.canvas.getContext("2d");
ctx.font = this.font;
return ctx;
};
GH.CanvasEdit.prototype.draw = function() {
var ctx = this.canvasctx();
var x = 4;
var y = this.linespace;
var x0, x1;
// So this is a funny story: on FF, subpixel text rendering happens
// if you fillRect white, but not if you clearRect. Bizarre.
ctx.fillStyle = "white";
ctx.fillRect(0, 0, this.canvas.width, this.canvas.height);
ctx.fillStyle = "black";
for (var i = 0; i < this.text.length; i++) {
var line = this.text[i];
var cursor = this.cursor;
if (this.selectionpt !== null) {
var cmin = GH.cursormin(this.selectionpt, cursor);
var cmax = GH.cursormax(this.selectionpt, cursor);
if (i >= cmin[0] && i <= cmax[0]) {
if (i === cmin[0]) {
x0 = x + ctx.measureText(line.substr(0, cmin[1])).width;
} else {
x0 = x;
}
if (i === cmax[0]) {
x1 = x + ctx.measureText(line.substr(0, cmax[1])).width;
} else {
x1 = this.canvas.width;
}
ctx.fillStyle = '#b4d5fe';
ctx.fillRect(x0, y - this.linespace + 3, x1 - x0, this.linespace);
ctx.fillStyle = 'black';
}
}
ctx.fillText(line, x, y);
if (this.cursorvisible && i === cursor[0] && this.selectionempty()) {
var string_width = ctx.measureText(line.substr(0, cursor[1])).width;
ctx.strokeStyle = "black";
ctx.beginPath();
ctx.moveTo(x + string_width + 0.5, y - this.fontsize + 3);
ctx.lineTo(x + string_width + 0.5, y + 3);
ctx.stroke();
}
y = y + this.linespace;
}
};
GH.CanvasEdit.prototype.handler = function(evt, data) {
if (evt === 'textinput') {
return this.handle_textinput(data);
} else if (evt === 'keydown') {
return this.handle_keydown(data);
} else if (evt === 'cut') {
this.imbuf = null; // should be for copy too?
this.addundo('Cut');
this.deleteselection();
this.dirty();
return true;
} else if (evt === 'paste') {
this.imbuf = null;
this.addundo('Paste');
this.inserttext(data);
return true;
} else if (evt === 'undo') {
this.imbuf = null;
this.undo();
return true;
} else if (evt === 'focus') {
this.cursorvisible = data;
this.dirty();
return true;
}
};
GH.CanvasEdit.prototype.selectionempty = function() {
return (this.selectionpt === null ||
(this.selectionpt[0] === this.cursor[0] &&
this.selectionpt[1] === this.cursor[1]));
};
GH.CanvasEdit.prototype.selectiontext = function() {
if (this.selectionpt === null) {
return null;
}
var cmin = GH.cursormin(this.selectionpt, this.cursor);
var cmax = GH.cursormax(this.selectionpt, this.cursor);
if (cmin[0] === cmax[0]) {
return this.text[cmin[0]].substring(cmin[1], cmax[1]);
}
var result = [this.text[cmin[0]].substring(cmin[1])];
for (var i = cmin[0] + 1; i < cmax[0]; i++) {
result.push(this.text[i]);
}
result.push(this.text[cmax[0]].substring(0, cmax[1]));
return result.join('\n');
};
GH.CanvasEdit.prototype.setcursor = function(cursor) {
this.selectionpt = null;
this.cursor = cursor;
var ctx = this.canvasctx();
var line = this.text[cursor[0]];
this.cursorx = ctx.measureText(line.substr(0, cursor[1])).width;
// todo: dirty?
};
GH.CanvasEdit.prototype.xtopos = function(x, lineno) {
var text = this.text[lineno];
var ctx = this.canvasctx();
var r = text.length + 1;
var l = 0;
while (r > l + 1) {
var m = (r + l) >> 1;
if (ctx.measureText(text.substr(0, m)).width > x) {
r = m;
} else {
l = m;
}
}
// todo: maybe pos to the right is closer
return (r + l) >> 1;
};
GH.CanvasEdit.prototype.deleteselection = function() {
if (this.selectionpt === null) {
return;
}
var cmin = GH.cursormin(this.selectionpt, this.cursor);
var cmax = GH.cursormax(this.selectionpt, this.cursor);
this.text.splice(cmin[0], cmax[0] - cmin[0] + 1,
this.text[cmin[0]].substr(0, cmin[1]) +
this.text[cmax[0]].substr(cmax[1]));
this.setcursor(cmin);
};
GH.CanvasEdit.prototype.save = function() {
var req = new XMLHttpRequest();
var text = ('name=' + encodeURIComponent(name) +
'&content=' + encodeURIComponent(this.text.join('\n')));
req.open('POST', '/save', false);
req.setRequestHeader('Content-Type', 'application/x-www-form-urlencoded');
req.send(text);
};
GH.CanvasEdit.prototype.handle_keydown = function(evt) {
var lineno = this.cursor[0];
var pos = this.cursor[1];
var text = this.text[lineno];
var updown = false;
var newcursor = null;
if (evt.keyCode === 8) {
this.addundo('Backspace');
this.imbuf = null;
if (this.selectionpt === null) {
if (pos) {
this.selectionpt = [lineno, pos - 1];
} else if (lineno) {
this.selectionpt = [lineno - 1, this.text[lineno - 1].length];
}
}
this.deleteselection();
} else if (evt.keyCode === 13) {
this.deleteselection();
this.imbuf = null;
this.inserttext('\n');
} else if (evt.keyCode === 37) {
if (pos) {
newcursor = [lineno, pos - 1];
} else if (lineno) {
newcursor = [lineno - 1, this.text[lineno - 1].length];
}
} else if (evt.keyCode === 39) {
if (pos < text.length) {
newcursor = [lineno, pos + 1];
} else if (lineno < this.text.length - 1) {
newcursor = [lineno + 1, 0];
}
} else if (evt.keyCode === 38 || evt.keyCode === 40) {
updown = true;
if (evt.keyCode === 38 && lineno === 0) {
newcursor = [0, 0];
} else if (evt.keyCode === 40 && lineno === this.text.length - 1) {
newcursor = [lineno, text.length];
} else {
lineno += evt.keyCode === 38 ? -1 : 1;
newcursor = [lineno, this.xtopos(this.cursorx, lineno)];
}
} else if (evt.keyCode === 65 && evt.ctrlKey) {
newcursor = [lineno, 0];
} else if (evt.keyCode === 69 && evt.ctrlKey) {
newcursor = [lineno, text.length];
} else if (evt.keyCode === 83 && evt.ctrlKey) {
this.save();
return true;
} else {
return false;
}
if (newcursor !== null) {
this.imbuf = null;
if (evt.shiftKey) {
if (this.selectionpt === null) {
this.selectionpt = this.cursor;
}
this.cursor = newcursor;
this.inputlayer.set_selection(this.selectiontext());
} else {
if (updown) {
this.selectionpt = null;
this.cursor = newcursor;
} else {
this.setcursor(newcursor);
}
}
}
this.dirty();
return true;
};
GH.CanvasEdit.prototype.inserttext = function(data) {
this.deleteselection();
var lineno = this.cursor[0];
var pos = this.cursor[1];
var text = this.text[lineno];
var spl = data.split('\n');
if (spl.length === 1) {
this.text[lineno] = text.substr(0, pos) + data + text.substr(pos);
this.setcursor([lineno, pos + data.length]);
} else {
this.text = this.text.slice(0, lineno).concat(
text.substr(0, pos) + spl[0],
spl.slice(1, spl.length - 1),
spl[spl.length - 1] + text.substr(pos),
this.text.slice(lineno + 1));
this.setcursor([lineno + spl.length - 1, spl[spl.length - 1].length]);
}
this.dirty();
};
GH.CanvasEdit.prototype.handle_textinput = function(data) {
this.addundo('Insert text');
if (this.imbuf === null) {
this.imbuf = '';
}
this.imbuf += data;
// Note, this functionality doesn't work at all at present since direct.js
// sets this.imtrans to {}.
for (var i = GH.min(this.imbuf.length, this.imtranslongest); i >= 1; i--) {
var seq = this.imbuf.substr(-i);
if (this.imtrans.hasOwnProperty(seq)) {
var lineno = this.cursor[0];
var pos = this.cursor[1];
var line = this.text[lineno];
// Note: there are some logic errors if one substitution
// is a prefix of another (eg <-, <->).
var newpos = pos - i + 1;
this.text[lineno] = line.substr(0, newpos) + line.substr(pos);
this.cursor = [lineno, newpos];
this.inserttext(this.imtrans[seq]);
return true;
}
}
this.inserttext(data);
return true;
};
// This gets an editor instantiated quickly, for testing.
GH.CanvasEdit.init = function() {
var canvas = document.getElementById('canvas');
var inputlayer = new GH.InputLayer();
inputlayer.attach(canvas);
var text = new GH.CanvasEdit(canvas, inputlayer);
canvas.focus();
text.dirty();
return text;
};
function myalert(s) {
document.getElementById('status').firstChild.nodeValue = s;
}
| js/edit.js | // <license>
GH.min = function(x, y) {
return x < y ? x : y;
};
GH.max = function(x, y) {
return x > y ? x : y;
};
GH.abs = function(x) {
return x >= 0 ? x : -x;
};
GH.cursormin = function(c1, c2) {
if (c1[0] == c2[0]) return c1[1] < c2[1] ? c1 : c2;
return c1[0] < c2[0] ? c1 : c2;
};
GH.cursormax = function(c1, c2) {
if (c1[0] == c2[0]) return c1[1] > c2[1] ? c1 : c2;
return c1[0] > c2[0] ? c1 : c2;
};
// As it's written now, this class combines both model and view of the text.
// It's probably a good idea to separate these out a bit.
GH.CanvasEdit = function(canvas, inputlayer) {
var self = this;
this.canvas = canvas;
this.inputlayer = inputlayer;
if (inputlayer) {
inputlayer.set_handler(function(evt, data) {
return self.handler(evt, data);
});
}
this.text = [''];
this.fontsize = 16;
this.font = this.fontsize + "px Times";
this.setcursor([0, 0]); // line, offset
this.linespace = this.fontsize + 2;
this.cursorvisible = true;
this.undostack = [];
this.imtrans = {};
// todo: use slightly different logic for identifier->symbols, these
// fire too easily as substrings
this.imtrans['et'] = '\u03b7';
this.imtrans['th'] = '\u03b8';
this.imtrans['ta'] = '\u03c4';
this.imtrans['ph'] = '\u03c6';
this.imtrans['ch'] = '\u03c7';
this.imtrans['ps'] = '\u03c8';
this.imtrans['-.'] = '\u00ac';
this.imtrans['->'] = '\u2192';
this.imtrans['<->'] = '\u2194';
this.imtrans['A.'] = '\u2200';
this.imtrans['E.'] = '\u2203';
this.imtrans['{/}'] = '\u2205';
this.imtrans['e.'] = '\u2208';
this.imtrans['x.'] = '\u2219';
this.imtrans['/\\'] = '\u2227';
this.imtrans['\\/'] = '\u2228';
this.imtrans['i^i'] = '\u2229';
this.imtrans['u.'] = '\u222a';
this.imtrans['C.'] = '\u2282';
this.imtrans['C_'] = '\u2286';
this.imtranslongest = 3;
this.imbuf = null;
this.listeners = [function() { self.draw(); }];
};
GH.CanvasEdit.prototype.dirty = function() {
for (var i = 0; i < this.listeners.length; i++) {
this.listeners[i]();
}
};
GH.CanvasEdit.prototype.addundo = function(title) {
// Full copy is expensive asymptotically, but should be okay for small
// buffers.
this.undostack.push([this.text.slice(), this.cursor]);
};
GH.CanvasEdit.prototype.undo = function() {
if (this.undostack.length) {
var newstate = this.undostack.pop();
this.text = newstate[0];
this.cursor = newstate[1];
this.dirty();
}
};
GH.CanvasEdit.prototype.canvasctx = function() {
var ctx = this.canvas.getContext("2d");
ctx.font = this.font;
return ctx;
};
GH.CanvasEdit.prototype.draw = function() {
var ctx = this.canvasctx();
var x = 4;
var y = this.linespace;
// So this is a funny story: on FF, subpixel text rendering happens
// if you fillRect white, but not if you clearRect. Bizarre.
ctx.fillStyle = "white";
ctx.fillRect(0, 0, this.canvas.width, this.canvas.height);
ctx.fillStyle = "black";
for (var i = 0; i < this.text.length; i++) {
var line = this.text[i];
var cursor = this.cursor;
if (this.selectionpt != null) {
var cmin = GH.cursormin(this.selectionpt, cursor);
var cmax = GH.cursormax(this.selectionpt, cursor);
if (i >= cmin[0] && i <= cmax[0]) {
if (i == cmin[0]) {
var x0 = x + ctx.measureText(line.substr(0, cmin[1])).width;
} else {
var x0 = x;
}
if (i == cmax[0]) {
var x1 = x + ctx.measureText(line.substr(0, cmax[1])).width;
} else {
var x1 = this.canvas.width;
}
ctx.fillStyle = '#b4d5fe';
ctx.fillRect(x0, y - this.linespace + 3, x1 - x0, this.linespace);
ctx.fillStyle = 'black';
}
}
ctx.fillText(line, x, y);
if (this.cursorvisible && i == cursor[0] && this.selectionempty()) {
var string_width = ctx.measureText(line.substr(0, cursor[1])).width;
ctx.strokeStyle = "black";
ctx.beginPath();
ctx.moveTo(x + string_width + 0.5, y - this.fontsize + 3);
ctx.lineTo(x + string_width + 0.5, y + 3);
ctx.stroke();
}
y = y + this.linespace;
}
};
GH.CanvasEdit.prototype.handler = function(evt, data) {
if (evt == 'textinput') {
return this.handle_textinput(data);
} else if (evt == 'keydown') {
return this.handle_keydown(data);
} else if (evt == 'cut') {
this.imbuf = null; // should be for copy too?
this.addundo('Cut');
this.deleteselection();
this.dirty();
return true;
} else if (evt == 'paste') {
this.imbuf = null;
this.addundo('Paste');
this.inserttext(data);
return true;
} else if (evt == 'undo') {
this.imbuf = null;
this.undo();
return true;
} else if (evt == 'focus') {
this.cursorvisible = data;
this.dirty();
return true;
}
};
GH.CanvasEdit.prototype.selectionempty = function() {
return this.selectionpt == null || (this.selectionpt[0] == this.cursor[0] &&
this.selectionpt[1] == this.cursor[1]);
}
GH.CanvasEdit.prototype.selectiontext = function() {
if (this.selectionpt == null)
return null;
var cmin = GH.cursormin(this.selectionpt, this.cursor);
var cmax = GH.cursormax(this.selectionpt, this.cursor);
if (cmin[0] == cmax[0]) {
return this.text[cmin[0]].substring(cmin[1], cmax[1]);
}
var result = [this.text[cmin[0]].substring(cmin[1])];
for (var i = cmin[0] + 1; i < cmax[0]; i++) {
result.push(this.text[i]);
}
result.push(this.text[cmax[0]].substring(0, cmax[1]));
return result.join('\n');
};
GH.CanvasEdit.prototype.setcursor = function(cursor) {
this.selectionpt = null;
this.cursor = cursor;
var ctx = this.canvasctx();
var line = this.text[cursor[0]];
this.cursorx = ctx.measureText(line.substr(0, cursor[1])).width;
// todo: dirty?
};
GH.CanvasEdit.prototype.xtopos = function(x, lineno) {
var text = this.text[lineno];
var ctx = this.canvasctx();
var r = text.length + 1;
var l = 0;
while (r > l + 1) {
var m = (r + l) >> 1;
if (ctx.measureText(text.substr(0, m)).width > x) {
r = m;
} else {
l = m;
}
}
// todo: maybe pos to the right is closer
return (r + l) >> 1;
};
GH.CanvasEdit.prototype.deleteselection = function() {
if (this.selectionpt == null) {
return;
}
var cmin = GH.cursormin(this.selectionpt, this.cursor);
var cmax = GH.cursormax(this.selectionpt, this.cursor);
this.text.splice(cmin[0], cmax[0] - cmin[0] + 1,
this.text[cmin[0]].substr(0, cmin[1]) +
this.text[cmax[0]].substr(cmax[1]));
this.setcursor(cmin);
};
GH.CanvasEdit.prototype.save = function() {
var req = new XMLHttpRequest();
var text = ('name=' + encodeURIComponent(name) +
'&content=' + encodeURIComponent(this.text.join('\n')));
req.open('POST', '/save', false);
req.setRequestHeader('Content-Type', 'application/x-www-form-urlencoded');
req.send(text);
};
GH.CanvasEdit.prototype.handle_keydown = function(evt) {
var lineno = this.cursor[0];
var pos = this.cursor[1];
var text = this.text[lineno];
var updown = false;
var newcursor = null;
if (evt.keyCode == 8) {
this.addundo('Backspace');
this.imbuf = null;
if (this.selectionpt == null) {
if (pos) {
this.selectionpt = [lineno, pos - 1];
} else if (lineno) {
this.selectionpt = [lineno - 1, this.text[lineno - 1].length];
}
}
this.deleteselection();
} else if (evt.keyCode == 13) {
this.deleteselection();
this.imbuf = null;
this.inserttext('\n');
} else if (evt.keyCode == 37) {
if (pos) {
newcursor = [lineno, pos - 1];
} else if (lineno) {
newcursor = [lineno - 1, this.text[lineno - 1].length];
}
} else if (evt.keyCode == 39) {
if (pos < text.length) {
newcursor = [lineno, pos + 1];
} else if (lineno < this.text.length - 1) {
newcursor = [lineno + 1, 0];
}
} else if (evt.keyCode == 38 || evt.keyCode == 40) {
updown = true;
if (evt.keyCode == 38 && lineno == 0) {
newcursor = [0, 0];
} else if (evt.keyCode == 40 && lineno == this.text.length - 1) {
newcursor = [lineno, text.length];
} else {
lineno += evt.keyCode == 38 ? -1 : 1;
newcursor = [lineno, this.xtopos(this.cursorx, lineno)];
}
} else if (evt.keyCode == 65 && evt.ctrlKey) {
newcursor = [lineno, 0];
} else if (evt.keyCode == 69 && evt.ctrlKey) {
newcursor = [lineno, text.length];
} else if (evt.keyCode == 83 && evt.ctrlKey) {
this.save();
return true;
} else {
return false;
}
if (newcursor != null) {
this.imbuf = null;
if (evt.shiftKey) {
if (this.selectionpt == null) {
this.selectionpt = this.cursor;
}
this.cursor = newcursor;
this.inputlayer.set_selection(this.selectiontext());
} else {
if (updown) {
this.selectionpt = null;
this.cursor = newcursor;
this.dirty();
} else {
this.setcursor(newcursor);
}
}
}
this.dirty();
return true;
};
GH.CanvasEdit.prototype.inserttext = function(data) {
this.deleteselection();
var lineno = this.cursor[0];
var pos = this.cursor[1];
var text = this.text[lineno];
var spl = data.split('\n');
if (spl.length == 1) {
this.text[lineno] = text.substr(0, pos) + data + text.substr(pos);
this.setcursor([lineno, pos + data.length]);
} else {
this.text = this.text.slice(0, lineno).concat(
text.substr(0, pos) + spl[0],
spl.slice(1, spl.length - 1),
spl[spl.length - 1] + text.substr(pos),
this.text.slice(lineno + 1));
this.setcursor([lineno + spl.length - 1, spl[spl.length - 1].length]);
}
this.dirty();
};
GH.CanvasEdit.prototype.handle_textinput = function(data) {
this.addundo('Insert text');
if (this.imbuf == null) {
this.imbuf = '';
}
this.imbuf += data;
for (var i = GH.min(this.imbuf.length, this.imtranslongest); i >= 1; i--) {
var seq = this.imbuf.substr(-i);
if (seq in this.imtrans) {
var lineno = this.cursor[0];
var pos = this.cursor[1];
var line = this.text[lineno];
// Note: there are some logic errors if one substitution
// is a prefix of another (eg <-, <->).
var newpos = pos - i + 1;
this.text[lineno] = line.substr(0, newpos) + line.substr(pos);
this.cursor = [lineno, newpos];
this.inserttext(this.imtrans[seq]);
return true;
}
}
this.inserttext(data);
return true;
};
// This gets an editor instantiated quickly, for testing.
GH.CanvasEdit.init = function() {
var canvas = document.getElementById('canvas');
var inputlayer = new GH.InputLayer();
inputlayer.attach(canvas);
var text = new GH.CanvasEdit(canvas, inputlayer);
canvas.focus();
text.dirty();
return text;
}
function myalert(s) {
document.getElementById('status').firstChild.nodeValue = s;
}
| JSLintify edit.js
git-svn-id: 090da6488605dfa80d897e2ab24bbff42289b741@19 c014309e-2312-11df-8901-e16673284e38
| js/edit.js | JSLintify edit.js | <ide><path>s/edit.js
<ide> };
<ide>
<ide> GH.cursormin = function(c1, c2) {
<del> if (c1[0] == c2[0]) return c1[1] < c2[1] ? c1 : c2;
<add> if (c1[0] === c2[0]) {
<add> return c1[1] < c2[1] ? c1 : c2;
<add> }
<ide> return c1[0] < c2[0] ? c1 : c2;
<ide> };
<ide>
<ide> GH.cursormax = function(c1, c2) {
<del> if (c1[0] == c2[0]) return c1[1] > c2[1] ? c1 : c2;
<add> if (c1[0] === c2[0]) {
<add> return c1[1] > c2[1] ? c1 : c2;
<add> }
<ide> return c1[0] > c2[0] ? c1 : c2;
<ide> };
<ide>
<ide>
<ide> this.undostack = [];
<ide>
<del> this.imtrans = {};
<ide> // todo: use slightly different logic for identifier->symbols, these
<ide> // fire too easily as substrings
<del> this.imtrans['et'] = '\u03b7';
<del> this.imtrans['th'] = '\u03b8';
<del> this.imtrans['ta'] = '\u03c4';
<del> this.imtrans['ph'] = '\u03c6';
<del> this.imtrans['ch'] = '\u03c7';
<del> this.imtrans['ps'] = '\u03c8';
<del>
<del> this.imtrans['-.'] = '\u00ac';
<del> this.imtrans['->'] = '\u2192';
<del> this.imtrans['<->'] = '\u2194';
<del> this.imtrans['A.'] = '\u2200';
<del> this.imtrans['E.'] = '\u2203';
<del> this.imtrans['{/}'] = '\u2205';
<del> this.imtrans['e.'] = '\u2208';
<del> this.imtrans['x.'] = '\u2219';
<del> this.imtrans['/\\'] = '\u2227';
<del> this.imtrans['\\/'] = '\u2228';
<del> this.imtrans['i^i'] = '\u2229';
<del> this.imtrans['u.'] = '\u222a';
<del> this.imtrans['C.'] = '\u2282';
<del> this.imtrans['C_'] = '\u2286';
<add> this.imtrans = {
<add> 'et': '\u03b7',
<add> 'th': '\u03b8',
<add> 'ta': '\u03c4',
<add> 'ph': '\u03c6',
<add> 'ch': '\u03c7',
<add> 'ps': '\u03c8',
<add>
<add> '-.': '\u00ac',
<add> '->': '\u2192',
<add> '<->': '\u2194',
<add> 'A.': '\u2200',
<add> 'E.': '\u2203',
<add> '{/}': '\u2205',
<add> 'e.': '\u2208',
<add> 'x.': '\u2219',
<add> '/\\': '\u2227',
<add> '\\/': '\u2228',
<add> 'i^i': '\u2229',
<add> 'u.': '\u222a',
<add> 'C.': '\u2282',
<add> 'C_': '\u2286'
<add> };
<add>
<ide> this.imtranslongest = 3;
<ide>
<ide> this.imbuf = null;
<ide> var ctx = this.canvasctx();
<ide> var x = 4;
<ide> var y = this.linespace;
<add> var x0, x1;
<ide> // So this is a funny story: on FF, subpixel text rendering happens
<ide> // if you fillRect white, but not if you clearRect. Bizarre.
<ide> ctx.fillStyle = "white";
<ide> for (var i = 0; i < this.text.length; i++) {
<ide> var line = this.text[i];
<ide> var cursor = this.cursor;
<del> if (this.selectionpt != null) {
<add> if (this.selectionpt !== null) {
<ide> var cmin = GH.cursormin(this.selectionpt, cursor);
<ide> var cmax = GH.cursormax(this.selectionpt, cursor);
<ide> if (i >= cmin[0] && i <= cmax[0]) {
<del> if (i == cmin[0]) {
<del> var x0 = x + ctx.measureText(line.substr(0, cmin[1])).width;
<add> if (i === cmin[0]) {
<add> x0 = x + ctx.measureText(line.substr(0, cmin[1])).width;
<ide> } else {
<del> var x0 = x;
<add> x0 = x;
<ide> }
<del> if (i == cmax[0]) {
<del> var x1 = x + ctx.measureText(line.substr(0, cmax[1])).width;
<add> if (i === cmax[0]) {
<add> x1 = x + ctx.measureText(line.substr(0, cmax[1])).width;
<ide> } else {
<del> var x1 = this.canvas.width;
<add> x1 = this.canvas.width;
<ide> }
<ide> ctx.fillStyle = '#b4d5fe';
<ide> ctx.fillRect(x0, y - this.linespace + 3, x1 - x0, this.linespace);
<ide> }
<ide> }
<ide> ctx.fillText(line, x, y);
<del> if (this.cursorvisible && i == cursor[0] && this.selectionempty()) {
<add> if (this.cursorvisible && i === cursor[0] && this.selectionempty()) {
<ide> var string_width = ctx.measureText(line.substr(0, cursor[1])).width;
<ide> ctx.strokeStyle = "black";
<ide> ctx.beginPath();
<ide> };
<ide>
<ide> GH.CanvasEdit.prototype.handler = function(evt, data) {
<del> if (evt == 'textinput') {
<add> if (evt === 'textinput') {
<ide> return this.handle_textinput(data);
<del> } else if (evt == 'keydown') {
<add> } else if (evt === 'keydown') {
<ide> return this.handle_keydown(data);
<del> } else if (evt == 'cut') {
<add> } else if (evt === 'cut') {
<ide> this.imbuf = null; // should be for copy too?
<ide> this.addundo('Cut');
<ide> this.deleteselection();
<ide> this.dirty();
<ide> return true;
<del> } else if (evt == 'paste') {
<add> } else if (evt === 'paste') {
<ide> this.imbuf = null;
<ide> this.addundo('Paste');
<ide> this.inserttext(data);
<ide> return true;
<del> } else if (evt == 'undo') {
<add> } else if (evt === 'undo') {
<ide> this.imbuf = null;
<ide> this.undo();
<ide> return true;
<del> } else if (evt == 'focus') {
<add> } else if (evt === 'focus') {
<ide> this.cursorvisible = data;
<ide> this.dirty();
<ide> return true;
<ide> };
<ide>
<ide> GH.CanvasEdit.prototype.selectionempty = function() {
<del> return this.selectionpt == null || (this.selectionpt[0] == this.cursor[0] &&
<del> this.selectionpt[1] == this.cursor[1]);
<del>}
<add> return (this.selectionpt === null ||
<add> (this.selectionpt[0] === this.cursor[0] &&
<add> this.selectionpt[1] === this.cursor[1]));
<add>};
<ide>
<ide> GH.CanvasEdit.prototype.selectiontext = function() {
<del> if (this.selectionpt == null)
<add> if (this.selectionpt === null) {
<ide> return null;
<add> }
<ide> var cmin = GH.cursormin(this.selectionpt, this.cursor);
<ide> var cmax = GH.cursormax(this.selectionpt, this.cursor);
<del> if (cmin[0] == cmax[0]) {
<add> if (cmin[0] === cmax[0]) {
<ide> return this.text[cmin[0]].substring(cmin[1], cmax[1]);
<ide> }
<ide> var result = [this.text[cmin[0]].substring(cmin[1])];
<ide> };
<ide>
<ide> GH.CanvasEdit.prototype.deleteselection = function() {
<del> if (this.selectionpt == null) {
<add> if (this.selectionpt === null) {
<ide> return;
<ide> }
<ide> var cmin = GH.cursormin(this.selectionpt, this.cursor);
<ide> var text = this.text[lineno];
<ide> var updown = false;
<ide> var newcursor = null;
<del> if (evt.keyCode == 8) {
<add> if (evt.keyCode === 8) {
<ide> this.addundo('Backspace');
<ide> this.imbuf = null;
<del> if (this.selectionpt == null) {
<add> if (this.selectionpt === null) {
<ide> if (pos) {
<ide> this.selectionpt = [lineno, pos - 1];
<ide> } else if (lineno) {
<ide> }
<ide> }
<ide> this.deleteselection();
<del> } else if (evt.keyCode == 13) {
<add> } else if (evt.keyCode === 13) {
<ide> this.deleteselection();
<ide> this.imbuf = null;
<ide> this.inserttext('\n');
<del> } else if (evt.keyCode == 37) {
<add> } else if (evt.keyCode === 37) {
<ide> if (pos) {
<ide> newcursor = [lineno, pos - 1];
<ide> } else if (lineno) {
<ide> newcursor = [lineno - 1, this.text[lineno - 1].length];
<ide> }
<del> } else if (evt.keyCode == 39) {
<add> } else if (evt.keyCode === 39) {
<ide> if (pos < text.length) {
<ide> newcursor = [lineno, pos + 1];
<ide> } else if (lineno < this.text.length - 1) {
<ide> newcursor = [lineno + 1, 0];
<ide> }
<del> } else if (evt.keyCode == 38 || evt.keyCode == 40) {
<add> } else if (evt.keyCode === 38 || evt.keyCode === 40) {
<ide> updown = true;
<del> if (evt.keyCode == 38 && lineno == 0) {
<add> if (evt.keyCode === 38 && lineno === 0) {
<ide> newcursor = [0, 0];
<del> } else if (evt.keyCode == 40 && lineno == this.text.length - 1) {
<add> } else if (evt.keyCode === 40 && lineno === this.text.length - 1) {
<ide> newcursor = [lineno, text.length];
<ide> } else {
<del> lineno += evt.keyCode == 38 ? -1 : 1;
<add> lineno += evt.keyCode === 38 ? -1 : 1;
<ide> newcursor = [lineno, this.xtopos(this.cursorx, lineno)];
<ide> }
<del> } else if (evt.keyCode == 65 && evt.ctrlKey) {
<add> } else if (evt.keyCode === 65 && evt.ctrlKey) {
<ide> newcursor = [lineno, 0];
<del> } else if (evt.keyCode == 69 && evt.ctrlKey) {
<add> } else if (evt.keyCode === 69 && evt.ctrlKey) {
<ide> newcursor = [lineno, text.length];
<del> } else if (evt.keyCode == 83 && evt.ctrlKey) {
<add> } else if (evt.keyCode === 83 && evt.ctrlKey) {
<ide> this.save();
<ide> return true;
<ide> } else {
<ide> return false;
<ide> }
<del> if (newcursor != null) {
<add> if (newcursor !== null) {
<ide> this.imbuf = null;
<ide> if (evt.shiftKey) {
<del> if (this.selectionpt == null) {
<add> if (this.selectionpt === null) {
<ide> this.selectionpt = this.cursor;
<ide> }
<ide> this.cursor = newcursor;
<ide> if (updown) {
<ide> this.selectionpt = null;
<ide> this.cursor = newcursor;
<del> this.dirty();
<ide> } else {
<ide> this.setcursor(newcursor);
<ide> }
<ide> var pos = this.cursor[1];
<ide> var text = this.text[lineno];
<ide> var spl = data.split('\n');
<del> if (spl.length == 1) {
<add> if (spl.length === 1) {
<ide> this.text[lineno] = text.substr(0, pos) + data + text.substr(pos);
<ide> this.setcursor([lineno, pos + data.length]);
<ide> } else {
<ide>
<ide> GH.CanvasEdit.prototype.handle_textinput = function(data) {
<ide> this.addundo('Insert text');
<del> if (this.imbuf == null) {
<add> if (this.imbuf === null) {
<ide> this.imbuf = '';
<ide> }
<ide> this.imbuf += data;
<add> // Note, this functionality doesn't work at all at present since direct.js
<add> // sets this.imtrans to {}.
<ide> for (var i = GH.min(this.imbuf.length, this.imtranslongest); i >= 1; i--) {
<ide> var seq = this.imbuf.substr(-i);
<del> if (seq in this.imtrans) {
<add> if (this.imtrans.hasOwnProperty(seq)) {
<ide> var lineno = this.cursor[0];
<ide> var pos = this.cursor[1];
<ide> var line = this.text[lineno];
<ide> canvas.focus();
<ide> text.dirty();
<ide> return text;
<del>}
<add>};
<ide>
<ide> function myalert(s) {
<ide> document.getElementById('status').firstChild.nodeValue = s; |
|
JavaScript | isc | a308d86a439041afcc6dac39fb72e91fee4bf2a2 | 0 | pevers/images-scraper | 'use strict'
var request = require('request')
, cheerio = require('cheerio')
, Promise = require('bluebird')
, Nightmare = require('nightmare')
, RateLimiter = require('limiter').RateLimiter
, EventEmitter = require('events')
, util = require('util');
function Scraper () {
EventEmitter.call(this);
}
util.inherits(Scraper, EventEmitter);
/**
* Get the image src for images, options specify the details.
*/
Scraper.prototype.list = function (options) {
var self = this;
if (!options || !options.keyword) return Promise.reject(new Error('no keyword provided'));
this.keyword = options.keyword;
this.rlimit = new RateLimiter(options.rlimit || 0, 'second');
this.userAgent = options.userAgent || 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36';
this.noptions = options.nightmare || {};
this.timeout = options.timeout || 10000;
this.advanced = options.advanced;
return self._links().then(function (res) {
if (options.num) {
res = res.slice(0, options.num);
}
res.filter(function (r) {
return r !== null;
});
self.emit('end', res);
return res;
});
}
/**
* Returns a complete list of all the image details.
*/
Scraper.prototype._links = function () {
var self = this;
var search_base = 'https://www.google.com/search?q=%&source=lnms&tbm=isch&sa=X';
if (this.advanced) {
var base = '&tbs=';
var build = [];
if (this.advanced.resolution) {
build.push('isz:'+this.advanced.resolution);
}
if (this.advanced.imgType) {
build.push('itp:'+this.advanced.imgType);
}
build = build.length > 1 ? build.join(',') : build[0];
search_base += '&tbs='+build;
}
return new Promise.resolve(
new Nightmare(self.noptions)
.useragent(self.userAgent)
.goto(search_base.replace('%', encodeURIComponent(self.keyword)))
.wait()
.inject('js', __dirname + '/jquery-2.1.4.min.js')
.evaluate(function (timeout) {
$.data(document, 'timeout', false);
setTimeout(function () {
$.data(document, 'timeout', true);
}, timeout);
setInterval(function() {
$('html, body').animate({ scrollTop: $(document).height() }, 1000);
var button = $('.ksb._kvc'); // try to load more
if (button) {
$.data(document, 'finished', false);
button.click();
}
}, 1000);
// catch all AJAX events such that we can determine when we are finished
var oldSend = XMLHttpRequest.prototype.send;
XMLHttpRequest.prototype.send = function () {
var oldOnReady = this.onreadystatechange;
this.onreadystatechange = function () {
oldOnReady.call(this);
if (this.readyState === XMLHttpRequest.DONE)
$.data(document, 'finished', true);
}
oldSend.apply(this, arguments);
}
}, self.timeout)
.wait(function () {
return (($(window).scrollTop() + $(window).height() == $(document).height()) &&
!$('.ksb._kvc').is(':visible') &&
$.data(document, 'finished')) || $.data(document, 'timeout');
})
.evaluate(function () {
// get all the src's
var results = [];
$('.rg_l').each(function () {
var meta = JSON.parse($(this).parent().find('.rg_meta').text());
var item = {
type: 'image/' + meta.ity,
width: meta.ow,
height: meta.oh,
// size: meta.os.match(/[-+]?(\d*[.])?\d+/)[0], // fails query as property no longer exists
url: meta.ou,
thumb_url: meta.tu,
thumb_width: meta.tw,
thumb_height: meta.th
// unit: meta.os.match(/\D+/).slice(-1)[0] // fails query as property no longer exists
};
results.push(item);
});
return results;
}).end()
);
}
module.exports = Scraper;
| lib/google-images-scraper.js | 'use strict'
var request = require('request')
, cheerio = require('cheerio')
, Promise = require('bluebird')
, Nightmare = require('nightmare')
, RateLimiter = require('limiter').RateLimiter
, EventEmitter = require('events')
, util = require('util');
function Scraper () {
EventEmitter.call(this);
}
util.inherits(Scraper, EventEmitter);
/**
* Get the image src for images, options specify the details.
*/
Scraper.prototype.list = function (options) {
var self = this;
if (!options || !options.keyword) return Promise.reject(new Error('no keyword provided'));
this.keyword = options.keyword;
this.rlimit = new RateLimiter(options.rlimit || 0, 'second');
this.userAgent = options.userAgent || 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36';
this.noptions = options.nightmare || {};
this.timeout = options.timeout || 10000;
this.advanced = options.advanced;
return self._links().then(function (res) {
if (options.num) {
res = res.slice(0, options.num);
}
res.filter(function (r) {
return r !== null;
});
self.emit('end', res);
return res;
});
}
/**
* Returns a complete list of all the image details.
*/
Scraper.prototype._links = function () {
var self = this;
var search_base = 'https://www.google.com/search?q=%&source=lnms&tbm=isch&sa=X';
if (this.advanced) {
var base = '&tbs=';
var build = [];
if (this.advanced.resolution) {
build.push('isz:'+this.advanced.resolution);
}
if (this.advanced.imgType) {
build.push('itp:'+this.advanced.imgType);
}
build = build.length > 1 ? build.join(',') : build[0];
search_base += '&tbs='+build;
}
return new Promise.resolve(
new Nightmare(self.noptions)
.useragent(self.userAgent)
.goto(search_base.replace('%', encodeURIComponent(self.keyword)))
.wait()
.inject('js', __dirname + '/jquery-2.1.4.min.js')
.evaluate(function (timeout) {
$.data(document, 'timeout', false);
setTimeout(function () {
$.data(document, 'timeout', true);
}, timeout);
setInterval(function() {
$('html, body').animate({ scrollTop: $(document).height() }, 1000);
var button = $('.ksb._kvc'); // try to load more
if (button) {
$.data(document, 'finished', false);
button.click();
}
}, 1000);
// catch all AJAX events such that we can determine when we are finished
var oldSend = XMLHttpRequest.prototype.send;
XMLHttpRequest.prototype.send = function () {
var oldOnReady = this.onreadystatechange;
this.onreadystatechange = function () {
oldOnReady.call(this);
if (this.readyState === XMLHttpRequest.DONE)
$.data(document, 'finished', true);
}
oldSend.apply(this, arguments);
}
}, self.timeout)
.wait(function () {
return (($(window).scrollTop() + $(window).height() == $(document).height()) &&
!$('.ksb._kvc').is(':visible') &&
$.data(document, 'finished')) || $.data(document, 'timeout');
})
.on('console', function(type, msg) {
console.log('msg ' + msg);
})
.evaluate(function () {
// get all the src's
var results = [];
$('.rg_l').each(function () {
var meta = JSON.parse($(this).parent().find('.rg_meta').text());
console.log(JSON.stringify(meta));
var item = {
type: 'image/' + meta.ity,
width: meta.ow,
height: meta.oh,
// size: meta.os.match(/[-+]?(\d*[.])?\d+/)[0], // fails query as property no longer exists
url: meta.ou,
thumb_url: meta.tu,
thumb_width: meta.tw,
thumb_height: meta.th
// unit: meta.os.match(/\D+/).slice(-1)[0] // fails query as property no longer exists
};
results.push(item);
});
return results;
}).end()
);
}
module.exports = Scraper; | removed console log statements | lib/google-images-scraper.js | removed console log statements | <ide><path>ib/google-images-scraper.js
<ide> $.data(document, 'finished')) || $.data(document, 'timeout');
<ide> })
<ide>
<del> .on('console', function(type, msg) {
<del> console.log('msg ' + msg);
<del> })
<del>
<ide> .evaluate(function () {
<ide> // get all the src's
<ide> var results = [];
<ide> $('.rg_l').each(function () {
<ide> var meta = JSON.parse($(this).parent().find('.rg_meta').text());
<del>
<del> console.log(JSON.stringify(meta));
<ide>
<ide> var item = {
<ide> type: 'image/' + meta.ity, |
|
JavaScript | apache-2.0 | 69cd5df2c913ceac331dae16abcd21f0e2d44e73 | 0 | fsteff/FlowMusic,fsteff/FlowMusic,fsteff/FlowMusic,fsteff/FlowMusic | /**
* @author Fixl Stefan
* Copyright 2017 Fixl Stefan
*/
//----------------------------------------------- CLASS PlayQueue ---------------------------------------
/**
* PlayQueue stores and manages the list of songs in the current playlistId/queue.
* The PlayQueue instance is stored by Central - use Central.getPlayQueue() to retrieve it
* @constructor
*/
function PlayQueue() {
this.songs = [];
this.listeners = [];
this.currentPos = 0;
this.history = [];
}
/**
* Play next song in the list (called by MusicPlayer - not for other use!)
* @param random
* @returns the new current song
*/
PlayQueue.prototype.next = function (random) {
if(this.songs.length > this.currentPos && this.songs[this.currentPos] != null) {
this.history.push(this.songs[this.currentPos]);
while (this.history.length > 10) {
this.history.shift();
}
}
if (random) {
this.currentPos = Math.round(Math.random() * this.songs.length) % this.songs.length;
} else {
if (this.songs.length == 0) {
return null;
} else {
this.currentPos = ((this.currentPos+1) % this.songs.length);
}
}
this.notifyListeners(this.songs[this.currentPos]);
return this.songs[this.currentPos];
}
/**
* Play the last song (called by MusicPlayer - not for other use!)
* @returns {*}
*/
PlayQueue.prototype.prev = function(){
if(this.history.length > 0) {
var prev = this.history[this.history.length-1];
this.currentPos = this.songs.indexOf(prev);
if(this.currentPos < 0){
this.songs.splice(0, 0, prev);
this.currentPos = 0;
}
this.history.pop();
}
this.notifyListeners(this.songs[this.currentPos]);
return this.songs[this.currentPos];
}
/**
* Add a new song to the list
* @param song in the format {artist:"...",title:"...",plugin:"...",source:"..."}
* or array of such objects
*/
PlayQueue.prototype.add = function (song) {
var actual = song;
if(Object.prototype.toString.call( song ) === '[object Array]' ){
actual = song[0];
for(var i = 0; i < song.length; i++){
this.songs.push(song[i]);
}
}else{
this.songs.push(song);
}
this.notifyListeners(this.current());
}
/**
* Add a new song to the list
* @param artist
* @param title
* @param plugin
* @param source
*/
PlayQueue.prototype.addNew = function(artist, title, plugin, source){
var song = {
artist: artist,
title: title,
plugin: plugin,
source: source
};
this.add(song);
}
/**
* @returns the current song
*/
PlayQueue.prototype.current = function () {
if(this.songs.length == 0 || this.songs.length <= this.currentPos){
return null;
}else {
return this.songs[this.currentPos];
}
}
/**
* Get a list of all songs
* @returns {Array}
*/
PlayQueue.prototype.getSongs = function(){
return this.songs;
}
/**
* Add a change listener - everytime the queue or the actually played song is changed,
* all all listeners are called (parameter is the current song)
* @param listener
*/
PlayQueue.prototype.addListener = function(listener){
this.listeners.push(listener);
if(this.current() != null){
listener(this.current());
}
}
/**
* Removes a listener
* @param the listener function to be removed
*/
PlayQueue.prototype.removeListener = function(object){
var i = this.listeners.indexOf(object);
if(i >= 0){
this.listeners.splice(i, 1);
}else{
log("cannnot remove listener - not presend", "error");
}
}
/**
* Calls all listeners
* @param parameter of these calls
*/
PlayQueue.prototype.notifyListeners = function(song){
for(var i = 0; i < this.listeners.length; i++){
if(typeof this.listeners[i] === "function"){
this.listeners[i](song);
}
}
}
/**
* Get the internal array index of a song with given artist and title
* @param artist
* @param title
* @returns {Integer} index number
*/
PlayQueue.prototype.getSongNr = function(artist, title){
var nr = 0;
var found = false;
for(; nr < this.songs.length && !found; nr++){
if(this.songs[nr].artist == artist && this.songs[nr].title == title){
return nr;
}
}
return -1;
}
/**
* Get the song of a given index number
* @param nr
* @returns song object {artist, title, plugin, source}
*/
PlayQueue.prototype.getSongByNr = function (nr) {
if(nr < this.songs.length){
return this.songs[nr];
}
}
/**
* Play the given song
* @param song (has to have at least following structure: {artist:"...",title:"..."})
* @returns the (new) current song (if not found the previous one)
*/
PlayQueue.prototype.playSong = function(song){
if(this.songs.length > this.currentPos && this.songs[this.currentPos] != null) {
this.history.push(this.songs[this.currentPos]);
while (this.history.length > 10) {
this.history.shift();
}
}
const nr = this.getSongNr(song.artist, song.title);
if(nr < this.songs.length){
this.currentPos = nr;
this.notifyListeners(this.songs[this.currentPos]);
}
return this.current();
}
/**
* Removes the song with the given internal index
* (To get this index call getSongNr(...))
* @param nr
*/
PlayQueue.prototype.removeSongNr = function(nr){
if(nr == this.currentPos && this.songs.length > 1){
Central.getPlayer().nextSong();
}
this.songs.splice(nr, 1);
this.notifyListeners(this.current());
}
PlayQueue.prototype.removeAll = function(){
this.songs = [];
this.currentPos = 0;
this.history = [];
// TODO: what do we tell the listeners?
}
//----------------------------------------------- CLASS PlayerSettings ---------------------------------
/**
* Player settings all plugins share
* @constructor
*/
function PlayerSettings(){
this.volume = 100;
this.playing = false;
}
//----------------------------------------------- CLASS MusicPlayer -------------------------------------
/**
* Music Player that controls the playlistId and all plugged-in music players
* @constructor
*/
function MusicPlayer() {
this.currentPlayer = null;
this.playQueue = null;
this.currentSong = null;
this.settings = new PlayerSettings();
this.players = [];
this.playQueue = new PlayQueue();
this.currentSong = this.playQueue.current();
this.currentPlayer = null;
}
/**
* @returns the currently playing plugin
*/
MusicPlayer.prototype.getCurrentPlayer = function () {
return this.currentPlayer;
}
/**
* @returns the current song
*/
MusicPlayer.prototype.getCurrentSong = function () {
return this.currentSong;
}
/**
* Adds a plugin (has to have alt least all functions of the class BaseMusicPlayer
* (use the extend() function for easy extension)
* @param player
*/
MusicPlayer.prototype.addPlugin = function (player) {
player.settings = this.settings;
this.players.push(player);
Log.info("MusicPlayer: registered plugin '"+player.getName()+"'");
this.currentSong = this.playQueue.current();
if(this.currentSong != null) {
for (var i = 0; i < this.players.length; i++) {
if (this.currentSong.plugin == this.players[i].getName()) {
this.currentPlayer = this.players[i];
}
}
if(this.currentPlayer != null) {
this.currentPlayer.load(this.currentSong.source);
}
}
}
/**
* Play the given song
* If it does not have the source and plugin information, these are provided by the playQueue
* @param song (at least {artist, title})
*/
MusicPlayer.prototype.playSong = function (song) {
song = this.playQueue.playSong(song);
this.currentSong = song;
if(this.currentPlayer != null){
this.currentPlayer.stop();
}
var foundPlayer = false;
if(this.currentSong != null) {
for (var i = 0; i < this.players.length; i++) {
if (this.currentSong.plugin === this.players[i].getName()) {
this.currentPlayer = this.players[i];
foundPlayer = true;
}
}
}
if(foundPlayer) {
this.currentPlayer.load(this.currentSong.source);
this.currentPlayer.play();
this.playQueue.notifyListeners(song);
Log.info("MusicPlayer: playing song "+ JSON.stringify(song));
}else{
Log.warning("Cannot find a plugin for "+JSON.stringify(song));
}
}
/**
* Play the next song
* @param {boolean} if true, play a random song from the queue (optional, default is true)
*/
MusicPlayer.prototype.nextSong = function (random) {
if(random !== true){
random = false;
}
this.currentSong = this.playQueue.next(random);
this.playSong(this.currentSong);
}
/**
* Play the previous song
*/
MusicPlayer.prototype.lastSong = function () {
this.currentSong = this.playQueue.prev();
this.playSong(this.currentSong);
}
/**
* @returns {PlayQueue} the playQueue
*/
MusicPlayer.prototype.getPlayQueue = function(){
return this.playQueue;
}
/**
* Tries to load a source and calls the callback afterwards with true or false
* (the callback has to have exactly one parameter, which accepts boolean)
* @param plugin {string} name of the plugin
* @param source {string} source, id or whatever the plugin understands
* @param callback {function(boolean)}
*/
MusicPlayer.prototype.tryLoadSource = function(plugin, source, callback){
var found = false;
for(var i = 0; i < this.players.length && !found; i++){
if(this.players[i].getName() === plugin){
this.players[i].tryLoadSource(source, callback);
found = true;
}
}
if(! found) {
callback(false);
}
}
/**
*
* @param song {Song}
* @param play {Boolean}
*/
MusicPlayer.prototype.addToQueue = function (song, play) {
const playable = [];
const state = {
countDown: 0,
finished: false
}
function choose() {
let chosen = -1;
for (let i2 = 0; i2 < song.sources.length && chosen < 0; i2++) {
if (playable[i2] === true) {
chosen = i2;
}
}
if (chosen >= 0) {
const s = {
artist: song.artist,
title: song.title,
plugin: song.sources.get(chosen).type,
source: song.sources.get(chosen).value
};
Central.getPlayer().getPlayQueue().add(s);
if (play) {
Central.getPlayer().playSong(s);
}
} else {
Log.warning("Cannnot get a valid source for " + JSON.stringify(elem));
}
}
for (let i = 0; i < song.sources.length; i++) {
const src = song.sources[i];
state.countDown++;
Central.getPlayer().tryLoadSource(src.type, src.value, function (valid) {
playable[i] = valid;
state.countDown--;
if (state.countDown === 0 && state.finished) {
choose();
}
});
}
;
state.finished = true;
// if all callbacks returned immediately
if (state.countDown == 0) {
choose();
}
}
| FlowMusicUI/js/models/Player.js | /**
* @author Fixl Stefan
* Copyright 2017 Fixl Stefan
*/
//----------------------------------------------- CLASS PlayQueue ---------------------------------------
/**
* PlayQueue stores and manages the list of songs in the current playlistId/queue.
* The PlayQueue instance is stored by Central - use Central.getPlayQueue() to retrieve it
* @constructor
*/
function PlayQueue() {
this.songs = [];
this.listeners = [];
this.currentPos = 0;
this.history = [];
}
/**
* Play next song in the list (called by MusicPlayer - not for other use!)
* @param random
* @returns the new current song
*/
PlayQueue.prototype.next = function (random) {
if(this.songs.length > this.currentPos && this.songs[this.currentPos] != null) {
this.history.push(this.songs[this.currentPos]);
while (this.history.length > 10) {
this.history.shift();
}
}
if (random) {
this.currentPos = Math.round(Math.random() * this.songs.length) % this.songs.length;
} else {
if (this.songs.length == 0) {
return null;
} else {
this.currentPos = ((this.currentPos+1) % this.songs.length);
}
}
this.notifyListeners(this.songs[this.currentPos]);
return this.songs[this.currentPos];
}
/**
* Play the last song (called by MusicPlayer - not for other use!)
* @returns {*}
*/
PlayQueue.prototype.prev = function(){
if(this.history.length > 0) {
var prev = this.history[this.history.length-1];
this.currentPos = this.songs.indexOf(prev);
if(this.currentPos < 0){
this.songs.splice(0, 0, prev);
this.currentPos = 0;
}
this.history.pop();
}
this.notifyListeners(this.songs[this.currentPos]);
return this.songs[this.currentPos];
}
/**
* Add a new song to the list
* @param song in the format {artist:"...",title:"...",plugin:"...",source:"..."}
* or array of such objects
*/
PlayQueue.prototype.add = function (song) {
var actual = song;
if(Object.prototype.toString.call( song ) === '[object Array]' ){
actual = song[0];
for(var i = 0; i < song.length; i++){
this.songs.push(song[i]);
}
}else{
this.songs.push(song);
}
this.notifyListeners(this.current());
}
/**
* Add a new song to the list
* @param artist
* @param title
* @param plugin
* @param source
*/
PlayQueue.prototype.addNew = function(artist, title, plugin, source){
var song = {
artist: artist,
title: title,
plugin: plugin,
source: source
};
this.add(song);
}
/**
* @returns the current song
*/
PlayQueue.prototype.current = function () {
if(this.songs.length == 0 || this.songs.length <= this.currentPos){
return null;
}else {
return this.songs[this.currentPos];
}
}
/**
* Get a list of all songs
* @returns {Array}
*/
PlayQueue.prototype.getSongs = function(){
return this.songs;
}
/**
* Add a change listener - everytime the queue or the actually played song is changed,
* all all listeners are called (parameter is the current song)
* @param listener
*/
PlayQueue.prototype.addListener = function(listener){
this.listeners.push(listener);
if(this.current() != null){
listener(this.current());
}
}
/**
* Removes a listener
* @param the listener function to be removed
*/
PlayQueue.prototype.removeListener = function(object){
var i = this.listeners.indexOf(object);
if(i >= 0){
this.listeners.splice(i, 1);
}else{
log("cannnot remove listener - not presend", "error");
}
}
/**
* Calls all listeners
* @param parameter of these calls
*/
PlayQueue.prototype.notifyListeners = function(song){
for(var i = 0; i < this.listeners.length; i++){
if(typeof this.listeners[i] === "function"){
this.listeners[i](song);
}
}
}
/**
* Get the internal array index of a song with given artist and title
* @param artist
* @param title
* @returns {Integer} index number
*/
PlayQueue.prototype.getSongNr = function(artist, title){
var nr = 0;
var found = false;
for(; nr < this.songs.length && !found; nr++){
if(this.songs[nr].artist == artist && this.songs[nr].title == title){
return nr;
}
}
return -1;
}
/**
* Get the song of a given index number
* @param nr
* @returns song object {artist, title, plugin, source}
*/
PlayQueue.prototype.getSongByNr = function (nr) {
if(nr < this.songs.length){
return this.songs[nr];
}
}
/**
* Play the given song
* @param song (has to have at least following structure: {artist:"...",title:"..."})
* @returns the (new) current song (if not found the previous one)
*/
PlayQueue.prototype.playSong = function(song){
const nr = this.getSongNr(song.artist, song.title);
if(nr < this.songs.length){
this.currentPos = nr;
this.notifyListeners(this.songs[this.currentPos]);
}
return this.current();
}
/**
* Removes the song with the given internal index
* (To get this index call getSongNr(...))
* @param nr
*/
PlayQueue.prototype.removeSongNr = function(nr){
if(nr == this.currentPos && this.songs.length > 1){
Central.getPlayer().nextSong();
}
this.songs.splice(nr, 1);
this.notifyListeners(this.current());
}
PlayQueue.prototype.removeAll = function(){
this.songs = [];
this.currentPos = 0;
this.history = [];
// TODO: what do we tell the listeners?
}
//----------------------------------------------- CLASS PlayerSettings ---------------------------------
/**
* Player settings all plugins share
* @constructor
*/
function PlayerSettings(){
this.volume = 100;
this.playing = false;
}
//----------------------------------------------- CLASS MusicPlayer -------------------------------------
/**
* Music Player that controls the playlistId and all plugged-in music players
* @constructor
*/
function MusicPlayer() {
this.currentPlayer = null;
this.playQueue = null;
this.currentSong = null;
this.settings = new PlayerSettings();
this.players = [];
this.playQueue = new PlayQueue();
this.currentSong = this.playQueue.current();
this.currentPlayer = null;
}
/**
* @returns the currently playing plugin
*/
MusicPlayer.prototype.getCurrentPlayer = function () {
return this.currentPlayer;
}
/**
* @returns the current song
*/
MusicPlayer.prototype.getCurrentSong = function () {
return this.currentSong;
}
/**
* Adds a plugin (has to have alt least all functions of the class BaseMusicPlayer
* (use the extend() function for easy extension)
* @param player
*/
MusicPlayer.prototype.addPlugin = function (player) {
player.settings = this.settings;
this.players.push(player);
Log.info("MusicPlayer: registered plugin '"+player.getName()+"'");
this.currentSong = this.playQueue.current();
if(this.currentSong != null) {
for (var i = 0; i < this.players.length; i++) {
if (this.currentSong.plugin == this.players[i].getName()) {
this.currentPlayer = this.players[i];
}
}
if(this.currentPlayer != null) {
this.currentPlayer.load(this.currentSong.source);
}
}
}
/**
* Play the given song
* If it does not have the source and plugin information, these are provided by the playQueue
* @param song (at least {artist, title})
*/
MusicPlayer.prototype.playSong = function (song) {
song = this.playQueue.playSong(song);
this.currentSong = song;
if(this.currentPlayer != null){
this.currentPlayer.stop();
}
var foundPlayer = false;
if(this.currentSong != null) {
for (var i = 0; i < this.players.length; i++) {
if (this.currentSong.plugin === this.players[i].getName()) {
this.currentPlayer = this.players[i];
foundPlayer = true;
}
}
}
if(foundPlayer) {
this.currentPlayer.load(this.currentSong.source);
this.currentPlayer.play();
this.playQueue.notifyListeners(song);
Log.info("MusicPlayer: playing song "+ JSON.stringify(song));
}else{
Log.warning("Cannot find a plugin for "+JSON.stringify(song));
}
}
/**
* Play the next song
* @param {boolean} if true, play a random song from the queue (optional, default is true)
*/
MusicPlayer.prototype.nextSong = function (random) {
if(random !== true){
random = false;
}
this.currentSong = this.playQueue.next(random);
this.playSong(this.currentSong);
}
/**
* Play the previous song
*/
MusicPlayer.prototype.lastSong = function () {
this.currentSong = this.playQueue.prev();
this.playSong(this.currentSong);
}
/**
* @returns {PlayQueue} the playQueue
*/
MusicPlayer.prototype.getPlayQueue = function(){
return this.playQueue;
}
/**
* Tries to load a source and calls the callback afterwards with true or false
* (the callback has to have exactly one parameter, which accepts boolean)
* @param plugin {string} name of the plugin
* @param source {string} source, id or whatever the plugin understands
* @param callback {function(boolean)}
*/
MusicPlayer.prototype.tryLoadSource = function(plugin, source, callback){
var found = false;
for(var i = 0; i < this.players.length && !found; i++){
if(this.players[i].getName() === plugin){
this.players[i].tryLoadSource(source, callback);
found = true;
}
}
if(! found) {
callback(false);
}
}
/**
*
* @param song {Song}
* @param play {Boolean}
*/
MusicPlayer.prototype.addToQueue = function (song, play) {
const playable = [];
const state = {
countDown: 0,
finished: false
}
function choose() {
let chosen = -1;
for (let i2 = 0; i2 < song.sources.length && chosen < 0; i2++) {
if (playable[i2] === true) {
chosen = i2;
}
}
if (chosen >= 0) {
const s = {
artist: song.artist,
title: song.title,
plugin: song.sources.get(chosen).type,
source: song.sources.get(chosen).value
};
Central.getPlayer().getPlayQueue().add(s);
if (play) {
Central.getPlayer().playSong(s);
}
} else {
Log.warning("Cannnot get a valid source for " + JSON.stringify(elem));
}
}
for (let i = 0; i < song.sources.length; i++) {
const src = song.sources[i];
state.countDown++;
Central.getPlayer().tryLoadSource(src.type, src.value, function (valid) {
playable[i] = valid;
state.countDown--;
if (state.countDown === 0 && state.finished) {
choose();
}
});
}
;
state.finished = true;
// if all callbacks returned immediately
if (state.countDown == 0) {
choose();
}
}
| little bugfix
| FlowMusicUI/js/models/Player.js | little bugfix | <ide><path>lowMusicUI/js/models/Player.js
<ide> * @returns the (new) current song (if not found the previous one)
<ide> */
<ide> PlayQueue.prototype.playSong = function(song){
<add> if(this.songs.length > this.currentPos && this.songs[this.currentPos] != null) {
<add> this.history.push(this.songs[this.currentPos]);
<add> while (this.history.length > 10) {
<add> this.history.shift();
<add> }
<add> }
<add>
<ide> const nr = this.getSongNr(song.artist, song.title);
<ide> if(nr < this.songs.length){
<ide> this.currentPos = nr; |
|
Java | lgpl-2.1 | 21c5c1c0ad447cb5814c0204d9d6087620cc7e86 | 0 | zanata/zanata-platform,zanata/zanata-platform,zanata/zanata-platform,zanata/zanata-platform,zanata/zanata-platform,zanata/zanata-platform,zanata/zanata-platform | package org.fedorahosted.flies.rest.service;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
import java.net.URI;
import javax.ws.rs.core.Response;
import org.dbunit.operation.DatabaseOperation;
import org.fedorahosted.flies.ContentType;
import org.fedorahosted.flies.LocaleId;
import org.fedorahosted.flies.rest.ApiKeyHeaderDecorator;
import org.fedorahosted.flies.rest.client.IDocumentsResource;
import org.fedorahosted.flies.rest.dto.Document;
import org.fedorahosted.flies.rest.dto.Documents;
import org.jboss.resteasy.client.ClientRequestFactory;
import org.jboss.resteasy.client.ClientResponse;
import org.jboss.resteasy.plugins.providers.RegisterBuiltin;
import org.jboss.resteasy.spi.ResteasyProviderFactory;
import org.jboss.seam.mock.DBUnitSeamTest;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
@Test(groups = { "seam-tests" })
public class DocumentsServiceSeamTest extends DBUnitSeamTest {
ClientRequestFactory clientRequestFactory;
IDocumentsResource docsService;
@BeforeClass
public void prepareRestEasyClientFramework() throws Exception {
ResteasyProviderFactory instance = ResteasyProviderFactory
.getInstance();
RegisterBuiltin.register(instance);
clientRequestFactory = new ClientRequestFactory(
new SeamMockClientExecutor(this), (URI)null);
clientRequestFactory.getPrefixInterceptors().registerInterceptor(
new ApiKeyHeaderDecorator("admin",
"12345678901234567890123456789012"));
docsService = clientRequestFactory
.createProxy(IDocumentsResource.class,
"/restv1/projects/p/sample-project/iterations/i/1.1/documents");
}
@Override
protected void prepareDBUnitOperations() {
beforeTestOperations.add(new DataSetOperation(
"org/fedorahosted/flies/test/model/DocumentsData.dbunit.xml",
DatabaseOperation.CLEAN_INSERT));
afterTestOperations.add(new DataSetOperation(
"org/fedorahosted/flies/test/model/DocumentsData.dbunit.xml",
DatabaseOperation.DELETE_ALL));
}
public void getZero() throws Exception {
expectDocs(0);
}
private void expectDocs(int expectDocs) {
ClientResponse<Documents> response = docsService.getDocuments();
assertThat(response.getStatus(), is(200));
assertThat(response.getEntity(), notNullValue());
assertThat(response.getEntity().getDocuments().size(), is(expectDocs));
}
private void putDoc1() {
Documents docs = new Documents();
ContentType contentType = ContentType.TextPlain;
Integer version = 1;
LocaleId lang = LocaleId.fromJavaName("es_ES");
docs.getDocuments().add(new Document("doc1", "doc1name", "path", contentType, version, lang));
Response response = docsService.put(docs);
assertThat(response.getStatus(), is(200));
}
private void postDoc2() {
Documents docs = new Documents();
ContentType contentType = ContentType.TextPlain;
Integer version = 1;
LocaleId lang = LocaleId.fromJavaName("es_ES");
docs.getDocuments().add(new Document("doc2", "doc2name", "path", contentType, version, lang));
Response response = docsService.post(docs);
assertThat(response.getStatus(), is(200));
}
public void putGet() throws Exception {
getZero();
putDoc1();
expectDocs(1);
}
public void putPostGet() throws Exception {
getZero();
putDoc1();
expectDocs(1);
postDoc2();
expectDocs(2);
}
}
| flies-war/src/test/java/org/fedorahosted/flies/rest/service/DocumentsServiceSeamTest.java | package org.fedorahosted.flies.rest.service;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
import java.net.URI;
import javax.ws.rs.core.Response;
import org.dbunit.operation.DatabaseOperation;
import org.fedorahosted.flies.ContentType;
import org.fedorahosted.flies.LocaleId;
import org.fedorahosted.flies.rest.ApiKeyHeaderDecorator;
import org.fedorahosted.flies.rest.client.IDocumentsResource;
import org.fedorahosted.flies.rest.dto.Document;
import org.fedorahosted.flies.rest.dto.Documents;
import org.jboss.resteasy.client.ClientRequestFactory;
import org.jboss.resteasy.client.ClientResponse;
import org.jboss.resteasy.plugins.providers.RegisterBuiltin;
import org.jboss.resteasy.spi.ResteasyProviderFactory;
import org.jboss.seam.mock.DBUnitSeamTest;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
@Test(groups = { "seam-tests" })
public class DocumentsServiceSeamTest extends DBUnitSeamTest {
ClientRequestFactory clientRequestFactory;
IDocumentsResource docsService;
@BeforeClass
public void prepareRestEasyClientFramework() throws Exception {
ResteasyProviderFactory instance = ResteasyProviderFactory
.getInstance();
RegisterBuiltin.register(instance);
clientRequestFactory = new ClientRequestFactory(
new SeamMockClientExecutor(this), (URI)null);
clientRequestFactory.getPrefixInterceptors().registerInterceptor(
new ApiKeyHeaderDecorator("admin",
"12345678901234567890123456789012"));
docsService = clientRequestFactory
.createProxy(IDocumentsResource.class,
"/restv1/projects/p/sample-project/iterations/i/1.1/documents");
}
@Override
protected void prepareDBUnitOperations() {
beforeTestOperations.add(new DataSetOperation(
"org/fedorahosted/flies/test/model/DocumentsData.dbunit.xml",
DatabaseOperation.CLEAN_INSERT));
}
public void getZero() throws Exception {
expectDocs(0);
}
private void expectDocs(int expectDocs) {
ClientResponse<Documents> response = docsService.getDocuments();
assertThat(response.getStatus(), is(200));
assertThat(response.getEntity(), notNullValue());
assertThat(response.getEntity().getDocuments().size(), is(expectDocs));
}
private void putDoc1() {
Documents docs = new Documents();
ContentType contentType = ContentType.TextPlain;
Integer version = 1;
LocaleId lang = LocaleId.fromJavaName("es_ES");
docs.getDocuments().add(new Document("doc1", "doc1name", "path", contentType, version, lang));
Response response = docsService.put(docs);
assertThat(response.getStatus(), is(200));
}
private void postDoc2() {
Documents docs = new Documents();
ContentType contentType = ContentType.TextPlain;
Integer version = 1;
LocaleId lang = LocaleId.fromJavaName("es_ES");
docs.getDocuments().add(new Document("doc2", "doc2name", "path", contentType, version, lang));
Response response = docsService.post(docs);
assertThat(response.getStatus(), is(200));
}
public void putGet() throws Exception {
getZero();
putDoc1();
expectDocs(1);
}
public void putPostGet() throws Exception {
getZero();
putDoc1();
expectDocs(1);
postDoc2();
expectDocs(2);
}
}
| test cleanup
| flies-war/src/test/java/org/fedorahosted/flies/rest/service/DocumentsServiceSeamTest.java | test cleanup | <ide><path>lies-war/src/test/java/org/fedorahosted/flies/rest/service/DocumentsServiceSeamTest.java
<ide> beforeTestOperations.add(new DataSetOperation(
<ide> "org/fedorahosted/flies/test/model/DocumentsData.dbunit.xml",
<ide> DatabaseOperation.CLEAN_INSERT));
<add> afterTestOperations.add(new DataSetOperation(
<add> "org/fedorahosted/flies/test/model/DocumentsData.dbunit.xml",
<add> DatabaseOperation.DELETE_ALL));
<ide> }
<ide>
<ide> public void getZero() throws Exception { |
|
Java | epl-1.0 | f6212ff2274171e646318a4f65c776e25360b91c | 0 | floralvikings/jenjin | package com.jenjinstudios.world;
import com.jenjinstudios.io.Message;
import com.jenjinstudios.net.AuthClient;
import com.jenjinstudios.world.io.WorldFileReader;
import com.jenjinstudios.world.state.MoveState;
import com.jenjinstudios.world.util.WorldClientMessageGenerator;
import org.xml.sax.SAXException;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.TransformerException;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.security.NoSuchAlgorithmException;
import java.util.Arrays;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* The WorldClient class is used to connect to a WorldServer and stores information about the environment immediately
* surrounding the player.
* @author Caleb Brinkman
*/
public class WorldClient extends AuthClient
{
/** The logger associated with this class. */
private static final Logger LOGGER = Logger.getLogger(WorldClient.class.getName());
/** The number of milliseconds before a blocking method should time out. */
public static long TIMEOUT_MILLIS = 30000;
/** The password used to login to the world. */
private final String password;
/** The world. */
private World world;
/** The actor representing the player controlled by this client. */
private ClientPlayer player;
/** Whether this client has received a world file checksum from the server. */
private boolean hasReceivedWorldFileChecksum;
/** The world file checksum received from the server. */
private byte[] serverWorldFileChecksum;
/** The world file. */
private File worldFile;
/** The world file reader for this client. */
private WorldFileReader worldFileReader;
/** Whether this client has received the world file. */
private boolean hasReceivedWorldFile;
/** The bytes in the world server file. */
private byte[] serverWorldFileBytes;
/**
* Construct a client connecting to the given address over the given port. This client <i>must</i> have a username and
* password.
* @param worldFile The file containing the world information.
* @param address The address to which this client will attempt to connect.
* @param port The port over which this client will attempt to connect.
* @param username The username that will be used by this client.
* @param password The password that will be used by this client.
* @throws java.security.NoSuchAlgorithmException If there is an error generating encryption keys.
* @throws java.io.IOException If there's an error reading the world file.
* @throws javax.xml.parsers.ParserConfigurationException If there's an error configuring the xml parser.
* @throws javax.xml.transform.TransformerException If there's an error transforming the xml file.
* @throws org.xml.sax.SAXException If there's an error in the XML syntax.
*/
public WorldClient(File worldFile, String address, int port, String username, String password)
throws NoSuchAlgorithmException, SAXException, TransformerException, ParserConfigurationException, IOException
{
super(address, port, username, password);
this.password = password;
this.worldFile = worldFile;
if (worldFile.exists())
{
this.worldFileReader = new WorldFileReader(worldFile);
this.world = worldFileReader.read();
}
}
@Override
public boolean sendBlockingLoginRequest() {
sendLoginRequest();
long startTime = System.currentTimeMillis();
long timePast = System.currentTimeMillis() - startTime;
while (!isLoggedIn() && isWaitingForLoginResponse() && (timePast < TIMEOUT_MILLIS))
{
try
{
sleep(1);
} catch (InterruptedException e)
{
LOGGER.log(Level.WARNING, "Interrupted while waiting for login response.", e);
}
timePast = System.currentTimeMillis() - startTime;
}
return isLoggedIn();
}
/**
* Get the player associated with this client.
* @return The player (ClientActor) associated with this client.
*/
public ClientPlayer getPlayer() { return player; }
/**
* Set the player being controlled by this client.
* @param player The player to be controlled by this client.
*/
public void setPlayer(ClientPlayer player) {
this.player = player;
}
/**
* Get the world for this client.
* @return The world being managed by this client.
*/
public World getWorld() { return world; }
/**
* Set the world managed by this client.
* @param world The world managed by this client.
*/
public void setWorld(World world) {
this.world = world;
}
/**
* Set whether the world file checksum has been received.
* @param hasReceivedWorldFileChecksum Whether the checksum has been received.
*/
public void setHasReceivedWorldFileChecksum(boolean hasReceivedWorldFileChecksum) {
this.hasReceivedWorldFileChecksum = hasReceivedWorldFileChecksum;
}
/**
* Set the checksum received from the server.
* @param serverWorldFileChecksum The checksum received from the server.
*/
public void setServerWorldFileChecksum(byte[] serverWorldFileChecksum) {
this.serverWorldFileChecksum = serverWorldFileChecksum;
}
/**
* Set whether the client has received the world file.
* @param hasReceivedWorldFile Whether the client has received the world file.
*/
public void setHasReceivedWorldFile(boolean hasReceivedWorldFile) {
this.hasReceivedWorldFile = hasReceivedWorldFile;
}
/**
* Set the bytes of the world file stored on the server.
* @param serverWorldFileBytes The bytes.
*/
public void setServerWorldFileBytes(byte[] serverWorldFileBytes) {
this.serverWorldFileBytes = serverWorldFileBytes;
}
/**
* Send a request for the world file, and wait for the response to return.
* @throws InterruptedException If the thread is interrupted while waiting for responses.
* @throws java.io.IOException If there's an error writing the world file.
* @throws java.security.NoSuchAlgorithmException If the MD5 algorithm can't be found.
* @throws javax.xml.parsers.ParserConfigurationException If there's an error configuring the XML parser.
* @throws javax.xml.transform.TransformerException If there's an error with the XML transformer.
* @throws org.xml.sax.SAXException If there's an error with the XML.
*/
public void sendBlockingWorldFileRequest() throws InterruptedException, NoSuchAlgorithmException, SAXException, TransformerException, ParserConfigurationException, IOException {
Message worldFileChecksumRequest = new Message("WorldChecksumRequest");
queueMessage(worldFileChecksumRequest);
while (!hasReceivedWorldFileChecksum)
{
Thread.sleep(10);
}
if (worldFileReader == null || !Arrays.equals(serverWorldFileChecksum, worldFileReader.getWorldFileChecksum()))
{
queueMessage(new Message("WorldFileRequest"));
while (!hasReceivedWorldFile)
{
Thread.sleep(10);
}
if ((!worldFile.getParentFile().exists() && !worldFile.getParentFile().mkdirs()) || (!worldFile.exists() && !worldFile.createNewFile()))
{
throw new IOException("Unable to create new world file!");
}
FileOutputStream worldOut = new FileOutputStream(worldFile);
worldOut.write(serverWorldFileBytes);
worldOut.close();
worldFileReader = new WorldFileReader(new ByteArrayInputStream(serverWorldFileBytes));
world = worldFileReader.read();
}
}
/** Send a LoginRequest to the server. */
private void sendLoginRequest() {
Message loginRequest = WorldClientMessageGenerator.generateLoginRequest(getUsername(), password);
setWaitingForLoginResponse(true);
queueMessage(loginRequest);
}
/**
* Send a state change request to the server.
* @param moveState The move state used to generate the request.
*/
protected void sendStateChangeRequest(MoveState moveState) {
Message stateChangeRequest = WorldClientMessageGenerator.generateStateChangeRequest(moveState);
queueMessage(stateChangeRequest);
}
@Override
protected void sendLogoutRequest() {
Message logoutRequest = new Message("WorldLogoutRequest");
// Send the request, continue when response is received.
setWaitingForLogoutResponse(true);
queueMessage(logoutRequest);
}
}
| jenjin-world-client/src/main/java/com/jenjinstudios/world/WorldClient.java | package com.jenjinstudios.world;
import com.jenjinstudios.io.Message;
import com.jenjinstudios.net.AuthClient;
import com.jenjinstudios.world.io.WorldFileReader;
import com.jenjinstudios.world.state.MoveState;
import com.jenjinstudios.world.util.WorldClientMessageGenerator;
import org.xml.sax.SAXException;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.TransformerException;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.security.NoSuchAlgorithmException;
import java.util.Arrays;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* The WorldClient class is used to connect to a WorldServer and stores information about the environment immediately
* surrounding the player.
* @author Caleb Brinkman
*/
public class WorldClient extends AuthClient
{
/** The logger associated with this class. */
private static final Logger LOGGER = Logger.getLogger(WorldClient.class.getName());
/** The number of milliseconds before a blocking method should time out. */
public static long TIMEOUT_MILLIS = 30000;
/** The password used to login to the world. */
private final String password;
/** The world. */
private World world;
/** The actor representing the player controlled by this client. */
private ClientPlayer player;
/** Whether this client has received a world file checksum from the server. */
private boolean hasReceivedWorldFileChecksum;
/** The world file checksum received from the server. */
private byte[] serverWorldFileChecksum;
/** The world file. */
private File worldFile;
/** The world file reader for this client. */
private WorldFileReader worldFileReader;
/** Whether this client has received the world file. */
private boolean hasReceivedWorldFile;
/** The bytes in the world server file. */
private byte[] serverWorldFileBytes;
/**
* Construct a client connecting to the given address over the given port. This client <i>must</i> have a username and
* password.
* @param worldFile The file containing the world information.
* @param address The address to which this client will attempt to connect.
* @param port The port over which this client will attempt to connect.
* @param username The username that will be used by this client.
* @param password The password that will be used by this client.
* @throws java.security.NoSuchAlgorithmException If there is an error generating encryption keys.
* @throws java.io.IOException If there's an error reading the world file.
* @throws javax.xml.parsers.ParserConfigurationException If there's an error configuring the xml parser.
* @throws javax.xml.transform.TransformerException If there's an error transforming the xml file.
* @throws org.xml.sax.SAXException If there's an error in the XML syntax.
*/
public WorldClient(File worldFile, String address, int port, String username, String password)
throws NoSuchAlgorithmException, SAXException, TransformerException, ParserConfigurationException, IOException
{
super(address, port, username, password);
this.password = password;
this.worldFile = worldFile;
if (worldFile.exists())
{
this.worldFileReader = new WorldFileReader(worldFile);
this.world = worldFileReader.read();
}
}
@Override
public boolean sendBlockingLoginRequest() {
sendLoginRequest();
long startTime = System.currentTimeMillis();
long timePast = System.currentTimeMillis() - startTime;
while (!isLoggedIn() && isWaitingForLoginResponse() && (timePast < TIMEOUT_MILLIS))
{
try
{
sleep(1);
} catch (InterruptedException e)
{
LOGGER.log(Level.WARNING, "Interrupted while waiting for login response.", e);
}
timePast = System.currentTimeMillis() - startTime;
}
return isLoggedIn();
}
/**
* Get the player associated with this client.
* @return The player (ClientActor) associated with this client.
*/
public ClientPlayer getPlayer() { return player; }
/**
* Set the player being controlled by this client.
* @param player The player to be controlled by this client.
*/
public void setPlayer(ClientPlayer player) {
this.player = player;
}
/**
* Get the world for this client.
* @return The world being managed by this client.
*/
public World getWorld() { return world; }
/**
* Set the world managed by this client.
* @param world The world managed by this client.
*/
public void setWorld(World world) {
this.world = world;
}
/**
* Set whether the world file checksum has been received.
* @param hasReceivedWorldFileChecksum Whether the checksum has been received.
*/
public void setHasReceivedWorldFileChecksum(boolean hasReceivedWorldFileChecksum) {
this.hasReceivedWorldFileChecksum = hasReceivedWorldFileChecksum;
}
/**
* Set the checksum received from the server.
* @param serverWorldFileChecksum The checksum received from the server.
*/
public void setServerWorldFileChecksum(byte[] serverWorldFileChecksum) {
this.serverWorldFileChecksum = serverWorldFileChecksum;
}
/**
* Set whether the client has received the world file.
* @param hasReceivedWorldFile Whether the client has received the world file.
*/
public void setHasReceivedWorldFile(boolean hasReceivedWorldFile) {
this.hasReceivedWorldFile = hasReceivedWorldFile;
}
/**
* Set the bytes of the world file stored on the server.
* @param serverWorldFileBytes The bytes.
*/
public void setServerWorldFileBytes(byte[] serverWorldFileBytes) {
this.serverWorldFileBytes = serverWorldFileBytes;
}
/**
* Send a request for the world file, and wait for the response to return.
* @throws InterruptedException If the thread is interrupted while waiting for responses.
* @throws java.io.IOException If there's an error writing the world file.
* @throws java.security.NoSuchAlgorithmException If the MD5 algorithm can't be found.
* @throws javax.xml.parsers.ParserConfigurationException If there's an error configuring the XML parser.
* @throws javax.xml.transform.TransformerException If there's an error with the XML transformer.
* @throws org.xml.sax.SAXException If there's an error with the XML.
*/
public void sendBlockingWorldFileRequest() throws InterruptedException, NoSuchAlgorithmException, SAXException, TransformerException, ParserConfigurationException, IOException {
Message worldFileChecksumRequest = new Message("WorldChecksumRequest");
queueMessage(worldFileChecksumRequest);
while (!hasReceivedWorldFileChecksum)
{
Thread.sleep(10);
}
if (worldFileReader == null || !Arrays.equals(serverWorldFileChecksum, worldFileReader.getWorldFileChecksum()))
{
queueMessage(new Message("WorldFileRequest"));
while (!hasReceivedWorldFile)
{
Thread.sleep(10);
}
if ((!worldFile.getParentFile().exists() && !worldFile.getParentFile().mkdirs()) || (!worldFile.exists() && !worldFile.createNewFile()))
{
throw new IOException("Unable to create new world file!");
}
FileOutputStream worldOut = new FileOutputStream(worldFile);
worldOut.write(serverWorldFileBytes);
worldOut.close();
worldFileReader = new WorldFileReader(new ByteArrayInputStream(serverWorldFileBytes));
world = worldFileReader.read();
}
}
/** Send a LoginRequest to the server. */
private void sendLoginRequest() {
Message loginRequest = WorldClientMessageGenerator.generateLoginRequest(getUsername(), password);
setWaitingForLoginResponse(true);
queueMessage(loginRequest);
}
/**
* Send a state change request to the server.
* @param moveState The move state used to generate the request.
*/
protected void sendStateChangeRequest(MoveState moveState) {
LOGGER.log(Level.FINEST, "Sending state change request: {0} from {1}",
new Object[] {moveState, Thread.currentThread().getName()});
Message stateChangeRequest = WorldClientMessageGenerator.generateStateChangeRequest(moveState);
queueMessage(stateChangeRequest);
}
@Override
protected void sendLogoutRequest() {
Message logoutRequest = new Message("WorldLogoutRequest");
// Send the request, continue when response is received.
setWaitingForLogoutResponse(true);
queueMessage(logoutRequest);
}
}
| Removed frivolous log statement.
| jenjin-world-client/src/main/java/com/jenjinstudios/world/WorldClient.java | Removed frivolous log statement. | <ide><path>enjin-world-client/src/main/java/com/jenjinstudios/world/WorldClient.java
<ide> * @param moveState The move state used to generate the request.
<ide> */
<ide> protected void sendStateChangeRequest(MoveState moveState) {
<del> LOGGER.log(Level.FINEST, "Sending state change request: {0} from {1}",
<del> new Object[] {moveState, Thread.currentThread().getName()});
<ide> Message stateChangeRequest = WorldClientMessageGenerator.generateStateChangeRequest(moveState);
<ide> queueMessage(stateChangeRequest);
<ide> } |
|
Java | apache-2.0 | 7e096e07ff52c96e276b2fe19db87c88efe6cc99 | 0 | HubSpot/hbase,HubSpot/hbase,HubSpot/hbase,HubSpot/hbase,HubSpot/hbase,HubSpot/hbase,HubSpot/hbase,HubSpot/hbase,HubSpot/hbase,HubSpot/hbase | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.regionserver;
import static org.apache.hadoop.hbase.HBaseTestingUtility.COLUMNS;
import static org.apache.hadoop.hbase.HBaseTestingUtility.fam1;
import static org.apache.hadoop.hbase.HBaseTestingUtility.fam2;
import static org.apache.hadoop.hbase.HBaseTestingUtility.fam3;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyBoolean;
import static org.mockito.ArgumentMatchers.anyLong;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.io.IOException;
import java.io.InterruptedIOException;
import java.math.BigDecimal;
import java.nio.charset.StandardCharsets;
import java.security.PrivilegedExceptionAction;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.NavigableMap;
import java.util.Objects;
import java.util.TreeMap;
import java.util.concurrent.Callable;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import org.apache.commons.lang3.RandomStringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.Cell.Type;
import org.apache.hadoop.hbase.CellBuilderFactory;
import org.apache.hadoop.hbase.CellBuilderType;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.CompareOperator;
import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
import org.apache.hadoop.hbase.DroppedSnapshotException;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HConstants.OperationStatusCode;
import org.apache.hadoop.hbase.HDFSBlocksDistribution;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.MiniHBaseCluster;
import org.apache.hadoop.hbase.MultithreadedTestUtil;
import org.apache.hadoop.hbase.MultithreadedTestUtil.RepeatingTestThread;
import org.apache.hadoop.hbase.MultithreadedTestUtil.TestThread;
import org.apache.hadoop.hbase.NotServingRegionException;
import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.hadoop.hbase.RegionTooBusyException;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.StartMiniClusterOption;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.TagType;
import org.apache.hadoop.hbase.Waiter;
import org.apache.hadoop.hbase.client.Append;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Increment;
import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.client.RegionInfoBuilder;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.RowMutations;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;
import org.apache.hadoop.hbase.filter.BigDecimalComparator;
import org.apache.hadoop.hbase.filter.BinaryComparator;
import org.apache.hadoop.hbase.filter.ColumnCountGetFilter;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.filter.FilterBase;
import org.apache.hadoop.hbase.filter.FilterList;
import org.apache.hadoop.hbase.filter.NullComparator;
import org.apache.hadoop.hbase.filter.PrefixFilter;
import org.apache.hadoop.hbase.filter.SingleColumnValueExcludeFilter;
import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
import org.apache.hadoop.hbase.filter.SubstringComparator;
import org.apache.hadoop.hbase.filter.ValueFilter;
import org.apache.hadoop.hbase.io.hfile.HFile;
import org.apache.hadoop.hbase.monitoring.MonitoredRPCHandler;
import org.apache.hadoop.hbase.monitoring.MonitoredTask;
import org.apache.hadoop.hbase.monitoring.TaskMonitor;
import org.apache.hadoop.hbase.regionserver.HRegion.MutationBatchOperation;
import org.apache.hadoop.hbase.regionserver.HRegion.RegionScannerImpl;
import org.apache.hadoop.hbase.regionserver.Region.RowLock;
import org.apache.hadoop.hbase.regionserver.TestHStore.FaultyFileSystem;
import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequestImpl;
import org.apache.hadoop.hbase.regionserver.wal.FSHLog;
import org.apache.hadoop.hbase.regionserver.wal.MetricsWALSource;
import org.apache.hadoop.hbase.regionserver.wal.WALUtil;
import org.apache.hadoop.hbase.replication.regionserver.ReplicationObserver;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.test.MetricsAssertHelper;
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.testclassification.VerySlowRegionServerTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManagerTestHelper;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.IncrementingEnvironmentEdge;
import org.apache.hadoop.hbase.util.ManualEnvironmentEdge;
import org.apache.hadoop.hbase.util.Threads;
import org.apache.hadoop.hbase.wal.AbstractFSWALProvider;
import org.apache.hadoop.hbase.wal.FaultyFSLog;
import org.apache.hadoop.hbase.wal.NettyAsyncFSWALConfigHelper;
import org.apache.hadoop.hbase.wal.WAL;
import org.apache.hadoop.hbase.wal.WALEdit;
import org.apache.hadoop.hbase.wal.WALFactory;
import org.apache.hadoop.hbase.wal.WALKeyImpl;
import org.apache.hadoop.hbase.wal.WALProvider;
import org.apache.hadoop.hbase.wal.WALProvider.Writer;
import org.apache.hadoop.hbase.wal.WALSplitUtil;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.ClassRule;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.ExpectedException;
import org.junit.rules.TestName;
import org.mockito.ArgumentCaptor;
import org.mockito.ArgumentMatcher;
import org.mockito.Mockito;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hbase.thirdparty.com.google.common.collect.Lists;
import org.apache.hbase.thirdparty.com.google.protobuf.ByteString;
import org.apache.hbase.thirdparty.io.netty.channel.EventLoopGroup;
import org.apache.hbase.thirdparty.io.netty.channel.nio.NioEventLoopGroup;
import org.apache.hbase.thirdparty.io.netty.channel.socket.nio.NioSocketChannel;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor;
import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor;
import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.FlushAction;
import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor;
import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor;
import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor;
/**
* Basic stand-alone testing of HRegion. No clusters!
*
* A lot of the meta information for an HRegion now lives inside other HRegions
* or in the HBaseMaster, so only basic testing is possible.
*/
@Category({VerySlowRegionServerTests.class, LargeTests.class})
@SuppressWarnings("deprecation")
public class TestHRegion {
@ClassRule
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestHRegion.class);
// Do not spin up clusters in here. If you need to spin up a cluster, do it
// over in TestHRegionOnCluster.
private static final Logger LOG = LoggerFactory.getLogger(TestHRegion.class);
@Rule
public TestName name = new TestName();
@Rule public final ExpectedException thrown = ExpectedException.none();
private static final String COLUMN_FAMILY = "MyCF";
private static final byte [] COLUMN_FAMILY_BYTES = Bytes.toBytes(COLUMN_FAMILY);
private static final EventLoopGroup GROUP = new NioEventLoopGroup();
HRegion region = null;
// Do not run unit tests in parallel (? Why not? It don't work? Why not? St.Ack)
protected static HBaseTestingUtility TEST_UTIL;
public static Configuration CONF ;
private String dir;
private static FileSystem FILESYSTEM;
private final int MAX_VERSIONS = 2;
// Test names
protected TableName tableName;
protected String method;
protected final byte[] qual = Bytes.toBytes("qual");
protected final byte[] qual1 = Bytes.toBytes("qual1");
protected final byte[] qual2 = Bytes.toBytes("qual2");
protected final byte[] qual3 = Bytes.toBytes("qual3");
protected final byte[] value = Bytes.toBytes("value");
protected final byte[] value1 = Bytes.toBytes("value1");
protected final byte[] value2 = Bytes.toBytes("value2");
protected final byte[] row = Bytes.toBytes("rowA");
protected final byte[] row2 = Bytes.toBytes("rowB");
protected final MetricsAssertHelper metricsAssertHelper = CompatibilitySingletonFactory
.getInstance(MetricsAssertHelper.class);
@Before
public void setup() throws IOException {
TEST_UTIL = HBaseTestingUtility.createLocalHTU();
FILESYSTEM = TEST_UTIL.getTestFileSystem();
CONF = TEST_UTIL.getConfiguration();
NettyAsyncFSWALConfigHelper.setEventLoopConfig(CONF, GROUP, NioSocketChannel.class);
dir = TEST_UTIL.getDataTestDir("TestHRegion").toString();
method = name.getMethodName();
tableName = TableName.valueOf(method);
CONF.set(CompactingMemStore.IN_MEMORY_FLUSH_THRESHOLD_FACTOR_KEY, String.valueOf(0.09));
}
@After
public void tearDown() throws IOException {
// Region may have been closed, but it is still no harm if we close it again here using HTU.
HBaseTestingUtility.closeRegionAndWAL(region);
EnvironmentEdgeManagerTestHelper.reset();
LOG.info("Cleaning test directory: " + TEST_UTIL.getDataTestDir());
TEST_UTIL.cleanupTestDir();
}
/**
* Test that I can use the max flushed sequence id after the close.
* @throws IOException
*/
@Test
public void testSequenceId() throws IOException {
region = initHRegion(tableName, method, CONF, COLUMN_FAMILY_BYTES);
assertEquals(HConstants.NO_SEQNUM, region.getMaxFlushedSeqId());
// Weird. This returns 0 if no store files or no edits. Afraid to change it.
assertEquals(0, (long)region.getMaxStoreSeqId().get(COLUMN_FAMILY_BYTES));
HBaseTestingUtility.closeRegionAndWAL(this.region);
assertEquals(HConstants.NO_SEQNUM, region.getMaxFlushedSeqId());
assertEquals(0, (long)region.getMaxStoreSeqId().get(COLUMN_FAMILY_BYTES));
// Open region again.
region = initHRegion(tableName, method, CONF, COLUMN_FAMILY_BYTES);
byte [] value = Bytes.toBytes(method);
// Make a random put against our cf.
Put put = new Put(value);
put.addColumn(COLUMN_FAMILY_BYTES, null, value);
region.put(put);
// No flush yet so init numbers should still be in place.
assertEquals(HConstants.NO_SEQNUM, region.getMaxFlushedSeqId());
assertEquals(0, (long)region.getMaxStoreSeqId().get(COLUMN_FAMILY_BYTES));
region.flush(true);
long max = region.getMaxFlushedSeqId();
HBaseTestingUtility.closeRegionAndWAL(this.region);
assertEquals(max, region.getMaxFlushedSeqId());
this.region = null;
}
/**
* Test for Bug 2 of HBASE-10466.
* "Bug 2: Conditions for the first flush of region close (so-called pre-flush) If memstoreSize
* is smaller than a certain value, or when region close starts a flush is ongoing, the first
* flush is skipped and only the second flush takes place. However, two flushes are required in
* case previous flush fails and leaves some data in snapshot. The bug could cause loss of data
* in current memstore. The fix is removing all conditions except abort check so we ensure 2
* flushes for region close."
* @throws IOException
*/
@Test
public void testCloseCarryingSnapshot() throws IOException {
region = initHRegion(tableName, method, CONF, COLUMN_FAMILY_BYTES);
HStore store = region.getStore(COLUMN_FAMILY_BYTES);
// Get some random bytes.
byte [] value = Bytes.toBytes(method);
// Make a random put against our cf.
Put put = new Put(value);
put.addColumn(COLUMN_FAMILY_BYTES, null, value);
// First put something in current memstore, which will be in snapshot after flusher.prepare()
region.put(put);
StoreFlushContext storeFlushCtx = store.createFlushContext(12345, FlushLifeCycleTracker.DUMMY);
storeFlushCtx.prepare();
// Second put something in current memstore
put.addColumn(COLUMN_FAMILY_BYTES, Bytes.toBytes("abc"), value);
region.put(put);
// Close with something in memstore and something in the snapshot. Make sure all is cleared.
HBaseTestingUtility.closeRegionAndWAL(region);
assertEquals(0, region.getMemStoreDataSize());
region = null;
}
/*
* This test is for verifying memstore snapshot size is correctly updated in case of rollback
* See HBASE-10845
*/
@Test
public void testMemstoreSnapshotSize() throws IOException {
class MyFaultyFSLog extends FaultyFSLog {
StoreFlushContext storeFlushCtx;
public MyFaultyFSLog(FileSystem fs, Path rootDir, String logName, Configuration conf)
throws IOException {
super(fs, rootDir, logName, conf);
}
void setStoreFlushCtx(StoreFlushContext storeFlushCtx) {
this.storeFlushCtx = storeFlushCtx;
}
@Override
public void sync(long txid) throws IOException {
storeFlushCtx.prepare();
super.sync(txid);
}
}
FileSystem fs = FileSystem.get(CONF);
Path rootDir = new Path(dir + "testMemstoreSnapshotSize");
MyFaultyFSLog faultyLog = new MyFaultyFSLog(fs, rootDir, "testMemstoreSnapshotSize", CONF);
region = initHRegion(tableName, null, null, false, Durability.SYNC_WAL, faultyLog,
COLUMN_FAMILY_BYTES);
HStore store = region.getStore(COLUMN_FAMILY_BYTES);
// Get some random bytes.
byte [] value = Bytes.toBytes(method);
faultyLog.setStoreFlushCtx(store.createFlushContext(12345, FlushLifeCycleTracker.DUMMY));
Put put = new Put(value);
put.addColumn(COLUMN_FAMILY_BYTES, Bytes.toBytes("abc"), value);
faultyLog.setFailureType(FaultyFSLog.FailureType.SYNC);
boolean threwIOE = false;
try {
region.put(put);
} catch (IOException ioe) {
threwIOE = true;
} finally {
assertTrue("The regionserver should have thrown an exception", threwIOE);
}
MemStoreSize mss = store.getFlushableSize();
assertTrue("flushable size should be zero, but it is " + mss,
mss.getDataSize() == 0);
}
/**
* Create a WAL outside of the usual helper in
* {@link HBaseTestingUtility#createWal(Configuration, Path, RegionInfo)} because that method
* doesn't play nicely with FaultyFileSystem. Call this method before overriding
* {@code fs.file.impl}.
* @param callingMethod a unique component for the path, probably the name of the test method.
*/
private static WAL createWALCompatibleWithFaultyFileSystem(String callingMethod,
Configuration conf, TableName tableName) throws IOException {
final Path logDir = TEST_UTIL.getDataTestDirOnTestFS(callingMethod + ".log");
final Configuration walConf = new Configuration(conf);
FSUtils.setRootDir(walConf, logDir);
return new WALFactory(walConf, callingMethod)
.getWAL(RegionInfoBuilder.newBuilder(tableName).build());
}
@Test
public void testMemstoreSizeAccountingWithFailedPostBatchMutate() throws IOException {
String testName = "testMemstoreSizeAccountingWithFailedPostBatchMutate";
FileSystem fs = FileSystem.get(CONF);
Path rootDir = new Path(dir + testName);
FSHLog hLog = new FSHLog(fs, rootDir, testName, CONF);
hLog.init();
HRegion region = initHRegion(tableName, null, null, false, Durability.SYNC_WAL, hLog,
COLUMN_FAMILY_BYTES);
HStore store = region.getStore(COLUMN_FAMILY_BYTES);
assertEquals(0, region.getMemStoreDataSize());
// Put one value
byte [] value = Bytes.toBytes(method);
Put put = new Put(value);
put.addColumn(COLUMN_FAMILY_BYTES, Bytes.toBytes("abc"), value);
region.put(put);
long onePutSize = region.getMemStoreDataSize();
assertTrue(onePutSize > 0);
RegionCoprocessorHost mockedCPHost = Mockito.mock(RegionCoprocessorHost.class);
doThrow(new IOException())
.when(mockedCPHost).postBatchMutate(Mockito.<MiniBatchOperationInProgress<Mutation>>any());
region.setCoprocessorHost(mockedCPHost);
put = new Put(value);
put.addColumn(COLUMN_FAMILY_BYTES, Bytes.toBytes("dfg"), value);
try {
region.put(put);
fail("Should have failed with IOException");
} catch (IOException expected) {
}
long expectedSize = onePutSize * 2;
assertEquals("memstoreSize should be incremented",
expectedSize, region.getMemStoreDataSize());
assertEquals("flushable size should be incremented",
expectedSize, store.getFlushableSize().getDataSize());
region.setCoprocessorHost(null);
}
/**
* A test case of HBASE-21041
* @throws Exception Exception
*/
@Test
public void testFlushAndMemstoreSizeCounting() throws Exception {
byte[] family = Bytes.toBytes("family");
this.region = initHRegion(tableName, method, CONF, family);
final WALFactory wals = new WALFactory(CONF, method);
try {
for (byte[] row : HBaseTestingUtility.ROWS) {
Put put = new Put(row);
put.addColumn(family, family, row);
region.put(put);
}
region.flush(true);
// After flush, data size should be zero
assertEquals(0, region.getMemStoreDataSize());
// After flush, a new active mutable segment is created, so the heap size
// should equal to MutableSegment.DEEP_OVERHEAD
assertEquals(MutableSegment.DEEP_OVERHEAD, region.getMemStoreHeapSize());
// After flush, offheap should be zero
assertEquals(0, region.getMemStoreOffHeapSize());
} finally {
HBaseTestingUtility.closeRegionAndWAL(this.region);
this.region = null;
wals.close();
}
}
/**
* Test we do not lose data if we fail a flush and then close.
* Part of HBase-10466. Tests the following from the issue description:
* "Bug 1: Wrong calculation of HRegion.memstoreSize: When a flush fails, data to be flushed is
* kept in each MemStore's snapshot and wait for next flush attempt to continue on it. But when
* the next flush succeeds, the counter of total memstore size in HRegion is always deduced by
* the sum of current memstore sizes instead of snapshots left from previous failed flush. This
* calculation is problematic that almost every time there is failed flush, HRegion.memstoreSize
* gets reduced by a wrong value. If region flush could not proceed for a couple cycles, the size
* in current memstore could be much larger than the snapshot. It's likely to drift memstoreSize
* much smaller than expected. In extreme case, if the error accumulates to even bigger than
* HRegion's memstore size limit, any further flush is skipped because flush does not do anything
* if memstoreSize is not larger than 0."
* @throws Exception
*/
@Test
public void testFlushSizeAccounting() throws Exception {
final Configuration conf = HBaseConfiguration.create(CONF);
final WAL wal = createWALCompatibleWithFaultyFileSystem(method, conf, tableName);
// Only retry once.
conf.setInt("hbase.hstore.flush.retries.number", 1);
final User user =
User.createUserForTesting(conf, method, new String[]{"foo"});
// Inject our faulty LocalFileSystem
conf.setClass("fs.file.impl", FaultyFileSystem.class, FileSystem.class);
user.runAs(new PrivilegedExceptionAction<Object>() {
@Override
public Object run() throws Exception {
// Make sure it worked (above is sensitive to caching details in hadoop core)
FileSystem fs = FileSystem.get(conf);
Assert.assertEquals(FaultyFileSystem.class, fs.getClass());
FaultyFileSystem ffs = (FaultyFileSystem)fs;
HRegion region = null;
try {
// Initialize region
region = initHRegion(tableName, null, null, false, Durability.SYNC_WAL, wal,
COLUMN_FAMILY_BYTES);
long size = region.getMemStoreDataSize();
Assert.assertEquals(0, size);
// Put one item into memstore. Measure the size of one item in memstore.
Put p1 = new Put(row);
p1.add(new KeyValue(row, COLUMN_FAMILY_BYTES, qual1, 1, (byte[]) null));
region.put(p1);
final long sizeOfOnePut = region.getMemStoreDataSize();
// Fail a flush which means the current memstore will hang out as memstore 'snapshot'.
try {
LOG.info("Flushing");
region.flush(true);
Assert.fail("Didn't bubble up IOE!");
} catch (DroppedSnapshotException dse) {
// What we are expecting
region.closing.set(false); // this is needed for the rest of the test to work
}
// Make it so all writes succeed from here on out
ffs.fault.set(false);
// Check sizes. Should still be the one entry.
Assert.assertEquals(sizeOfOnePut, region.getMemStoreDataSize());
// Now add two entries so that on this next flush that fails, we can see if we
// subtract the right amount, the snapshot size only.
Put p2 = new Put(row);
p2.add(new KeyValue(row, COLUMN_FAMILY_BYTES, qual2, 2, (byte[])null));
p2.add(new KeyValue(row, COLUMN_FAMILY_BYTES, qual3, 3, (byte[])null));
region.put(p2);
long expectedSize = sizeOfOnePut * 3;
Assert.assertEquals(expectedSize, region.getMemStoreDataSize());
// Do a successful flush. It will clear the snapshot only. Thats how flushes work.
// If already a snapshot, we clear it else we move the memstore to be snapshot and flush
// it
region.flush(true);
// Make sure our memory accounting is right.
Assert.assertEquals(sizeOfOnePut * 2, region.getMemStoreDataSize());
} finally {
HBaseTestingUtility.closeRegionAndWAL(region);
}
return null;
}
});
FileSystem.closeAllForUGI(user.getUGI());
}
@Test
public void testCloseWithFailingFlush() throws Exception {
final Configuration conf = HBaseConfiguration.create(CONF);
final WAL wal = createWALCompatibleWithFaultyFileSystem(method, conf, tableName);
// Only retry once.
conf.setInt("hbase.hstore.flush.retries.number", 1);
final User user =
User.createUserForTesting(conf, this.method, new String[]{"foo"});
// Inject our faulty LocalFileSystem
conf.setClass("fs.file.impl", FaultyFileSystem.class, FileSystem.class);
user.runAs(new PrivilegedExceptionAction<Object>() {
@Override
public Object run() throws Exception {
// Make sure it worked (above is sensitive to caching details in hadoop core)
FileSystem fs = FileSystem.get(conf);
Assert.assertEquals(FaultyFileSystem.class, fs.getClass());
FaultyFileSystem ffs = (FaultyFileSystem)fs;
HRegion region = null;
try {
// Initialize region
region = initHRegion(tableName, null, null, false,
Durability.SYNC_WAL, wal, COLUMN_FAMILY_BYTES);
long size = region.getMemStoreDataSize();
Assert.assertEquals(0, size);
// Put one item into memstore. Measure the size of one item in memstore.
Put p1 = new Put(row);
p1.add(new KeyValue(row, COLUMN_FAMILY_BYTES, qual1, 1, (byte[])null));
region.put(p1);
// Manufacture an outstanding snapshot -- fake a failed flush by doing prepare step only.
HStore store = region.getStore(COLUMN_FAMILY_BYTES);
StoreFlushContext storeFlushCtx =
store.createFlushContext(12345, FlushLifeCycleTracker.DUMMY);
storeFlushCtx.prepare();
// Now add two entries to the foreground memstore.
Put p2 = new Put(row);
p2.add(new KeyValue(row, COLUMN_FAMILY_BYTES, qual2, 2, (byte[])null));
p2.add(new KeyValue(row, COLUMN_FAMILY_BYTES, qual3, 3, (byte[])null));
region.put(p2);
// Now try close on top of a failing flush.
HBaseTestingUtility.closeRegionAndWAL(region);
region = null;
fail();
} catch (DroppedSnapshotException dse) {
// Expected
LOG.info("Expected DroppedSnapshotException");
} finally {
// Make it so all writes succeed from here on out so can close clean
ffs.fault.set(false);
HBaseTestingUtility.closeRegionAndWAL(region);
}
return null;
}
});
FileSystem.closeAllForUGI(user.getUGI());
}
@Test
public void testCompactionAffectedByScanners() throws Exception {
byte[] family = Bytes.toBytes("family");
this.region = initHRegion(tableName, method, CONF, family);
Put put = new Put(Bytes.toBytes("r1"));
put.addColumn(family, Bytes.toBytes("q1"), Bytes.toBytes("v1"));
region.put(put);
region.flush(true);
Scan scan = new Scan();
scan.setMaxVersions(3);
// open the first scanner
RegionScanner scanner1 = region.getScanner(scan);
Delete delete = new Delete(Bytes.toBytes("r1"));
region.delete(delete);
region.flush(true);
// open the second scanner
RegionScanner scanner2 = region.getScanner(scan);
List<Cell> results = new ArrayList<>();
System.out.println("Smallest read point:" + region.getSmallestReadPoint());
// make a major compaction
region.compact(true);
// open the third scanner
RegionScanner scanner3 = region.getScanner(scan);
// get data from scanner 1, 2, 3 after major compaction
scanner1.next(results);
System.out.println(results);
assertEquals(1, results.size());
results.clear();
scanner2.next(results);
System.out.println(results);
assertEquals(0, results.size());
results.clear();
scanner3.next(results);
System.out.println(results);
assertEquals(0, results.size());
}
@Test
public void testToShowNPEOnRegionScannerReseek() throws Exception {
byte[] family = Bytes.toBytes("family");
this.region = initHRegion(tableName, method, CONF, family);
Put put = new Put(Bytes.toBytes("r1"));
put.addColumn(family, Bytes.toBytes("q1"), Bytes.toBytes("v1"));
region.put(put);
put = new Put(Bytes.toBytes("r2"));
put.addColumn(family, Bytes.toBytes("q1"), Bytes.toBytes("v1"));
region.put(put);
region.flush(true);
Scan scan = new Scan();
scan.setMaxVersions(3);
// open the first scanner
RegionScanner scanner1 = region.getScanner(scan);
System.out.println("Smallest read point:" + region.getSmallestReadPoint());
region.compact(true);
scanner1.reseek(Bytes.toBytes("r2"));
List<Cell> results = new ArrayList<>();
scanner1.next(results);
Cell keyValue = results.get(0);
Assert.assertTrue(Bytes.compareTo(CellUtil.cloneRow(keyValue), Bytes.toBytes("r2")) == 0);
scanner1.close();
}
@Test
public void testSkipRecoveredEditsReplay() throws Exception {
byte[] family = Bytes.toBytes("family");
this.region = initHRegion(tableName, method, CONF, family);
final WALFactory wals = new WALFactory(CONF, method);
try {
Path regiondir = region.getRegionFileSystem().getRegionDir();
FileSystem fs = region.getRegionFileSystem().getFileSystem();
byte[] regionName = region.getRegionInfo().getEncodedNameAsBytes();
Path recoveredEditsDir = WALSplitUtil.getRegionDirRecoveredEditsDir(regiondir);
long maxSeqId = 1050;
long minSeqId = 1000;
for (long i = minSeqId; i <= maxSeqId; i += 10) {
Path recoveredEdits = new Path(recoveredEditsDir, String.format("%019d", i));
fs.create(recoveredEdits);
WALProvider.Writer writer = wals.createRecoveredEditsWriter(fs, recoveredEdits);
long time = System.nanoTime();
WALEdit edit = new WALEdit();
edit.add(new KeyValue(row, family, Bytes.toBytes(i), time, KeyValue.Type.Put, Bytes
.toBytes(i)));
writer.append(new WAL.Entry(new WALKeyImpl(regionName, tableName, i, time,
HConstants.DEFAULT_CLUSTER_ID), edit));
writer.close();
}
MonitoredTask status = TaskMonitor.get().createStatus(method);
Map<byte[], Long> maxSeqIdInStores = new TreeMap<>(Bytes.BYTES_COMPARATOR);
for (HStore store : region.getStores()) {
maxSeqIdInStores.put(Bytes.toBytes(store.getColumnFamilyName()), minSeqId - 1);
}
long seqId = region.replayRecoveredEditsIfAny(maxSeqIdInStores, null, status);
assertEquals(maxSeqId, seqId);
region.getMVCC().advanceTo(seqId);
Get get = new Get(row);
Result result = region.get(get);
for (long i = minSeqId; i <= maxSeqId; i += 10) {
List<Cell> kvs = result.getColumnCells(family, Bytes.toBytes(i));
assertEquals(1, kvs.size());
assertArrayEquals(Bytes.toBytes(i), CellUtil.cloneValue(kvs.get(0)));
}
} finally {
HBaseTestingUtility.closeRegionAndWAL(this.region);
this.region = null;
wals.close();
}
}
@Test
public void testSkipRecoveredEditsReplaySomeIgnored() throws Exception {
byte[] family = Bytes.toBytes("family");
this.region = initHRegion(tableName, method, CONF, family);
final WALFactory wals = new WALFactory(CONF, method);
try {
Path regiondir = region.getRegionFileSystem().getRegionDir();
FileSystem fs = region.getRegionFileSystem().getFileSystem();
byte[] regionName = region.getRegionInfo().getEncodedNameAsBytes();
Path recoveredEditsDir = WALSplitUtil.getRegionDirRecoveredEditsDir(regiondir);
long maxSeqId = 1050;
long minSeqId = 1000;
for (long i = minSeqId; i <= maxSeqId; i += 10) {
Path recoveredEdits = new Path(recoveredEditsDir, String.format("%019d", i));
fs.create(recoveredEdits);
WALProvider.Writer writer = wals.createRecoveredEditsWriter(fs, recoveredEdits);
long time = System.nanoTime();
WALEdit edit = new WALEdit();
edit.add(new KeyValue(row, family, Bytes.toBytes(i), time, KeyValue.Type.Put, Bytes
.toBytes(i)));
writer.append(new WAL.Entry(new WALKeyImpl(regionName, tableName, i, time,
HConstants.DEFAULT_CLUSTER_ID), edit));
writer.close();
}
long recoverSeqId = 1030;
MonitoredTask status = TaskMonitor.get().createStatus(method);
Map<byte[], Long> maxSeqIdInStores = new TreeMap<>(Bytes.BYTES_COMPARATOR);
for (HStore store : region.getStores()) {
maxSeqIdInStores.put(Bytes.toBytes(store.getColumnFamilyName()), recoverSeqId - 1);
}
long seqId = region.replayRecoveredEditsIfAny(maxSeqIdInStores, null, status);
assertEquals(maxSeqId, seqId);
region.getMVCC().advanceTo(seqId);
Get get = new Get(row);
Result result = region.get(get);
for (long i = minSeqId; i <= maxSeqId; i += 10) {
List<Cell> kvs = result.getColumnCells(family, Bytes.toBytes(i));
if (i < recoverSeqId) {
assertEquals(0, kvs.size());
} else {
assertEquals(1, kvs.size());
assertArrayEquals(Bytes.toBytes(i), CellUtil.cloneValue(kvs.get(0)));
}
}
} finally {
HBaseTestingUtility.closeRegionAndWAL(this.region);
this.region = null;
wals.close();
}
}
@Test
public void testSkipRecoveredEditsReplayAllIgnored() throws Exception {
byte[] family = Bytes.toBytes("family");
this.region = initHRegion(tableName, method, CONF, family);
Path regiondir = region.getRegionFileSystem().getRegionDir();
FileSystem fs = region.getRegionFileSystem().getFileSystem();
Path recoveredEditsDir = WALSplitUtil.getRegionDirRecoveredEditsDir(regiondir);
for (int i = 1000; i < 1050; i += 10) {
Path recoveredEdits = new Path(recoveredEditsDir, String.format("%019d", i));
FSDataOutputStream dos = fs.create(recoveredEdits);
dos.writeInt(i);
dos.close();
}
long minSeqId = 2000;
Path recoveredEdits = new Path(recoveredEditsDir, String.format("%019d", minSeqId - 1));
FSDataOutputStream dos = fs.create(recoveredEdits);
dos.close();
Map<byte[], Long> maxSeqIdInStores = new TreeMap<>(Bytes.BYTES_COMPARATOR);
for (HStore store : region.getStores()) {
maxSeqIdInStores.put(Bytes.toBytes(store.getColumnFamilyName()), minSeqId);
}
long seqId = region.replayRecoveredEditsIfAny(maxSeqIdInStores, null, null);
assertEquals(minSeqId, seqId);
}
@Test
public void testSkipRecoveredEditsReplayTheLastFileIgnored() throws Exception {
byte[] family = Bytes.toBytes("family");
this.region = initHRegion(tableName, method, CONF, family);
final WALFactory wals = new WALFactory(CONF, method);
try {
Path regiondir = region.getRegionFileSystem().getRegionDir();
FileSystem fs = region.getRegionFileSystem().getFileSystem();
byte[] regionName = region.getRegionInfo().getEncodedNameAsBytes();
byte[][] columns = region.getTableDescriptor().getColumnFamilyNames().toArray(new byte[0][]);
assertEquals(0, region.getStoreFileList(columns).size());
Path recoveredEditsDir = WALSplitUtil.getRegionDirRecoveredEditsDir(regiondir);
long maxSeqId = 1050;
long minSeqId = 1000;
for (long i = minSeqId; i <= maxSeqId; i += 10) {
Path recoveredEdits = new Path(recoveredEditsDir, String.format("%019d", i));
fs.create(recoveredEdits);
WALProvider.Writer writer = wals.createRecoveredEditsWriter(fs, recoveredEdits);
long time = System.nanoTime();
WALEdit edit = null;
if (i == maxSeqId) {
edit = WALEdit.createCompaction(region.getRegionInfo(),
CompactionDescriptor.newBuilder()
.setTableName(ByteString.copyFrom(tableName.getName()))
.setFamilyName(ByteString.copyFrom(regionName))
.setEncodedRegionName(ByteString.copyFrom(regionName))
.setStoreHomeDirBytes(ByteString.copyFrom(Bytes.toBytes(regiondir.toString())))
.setRegionName(ByteString.copyFrom(region.getRegionInfo().getRegionName()))
.build());
} else {
edit = new WALEdit();
edit.add(new KeyValue(row, family, Bytes.toBytes(i), time, KeyValue.Type.Put, Bytes
.toBytes(i)));
}
writer.append(new WAL.Entry(new WALKeyImpl(regionName, tableName, i, time,
HConstants.DEFAULT_CLUSTER_ID), edit));
writer.close();
}
long recoverSeqId = 1030;
Map<byte[], Long> maxSeqIdInStores = new TreeMap<>(Bytes.BYTES_COMPARATOR);
MonitoredTask status = TaskMonitor.get().createStatus(method);
for (HStore store : region.getStores()) {
maxSeqIdInStores.put(Bytes.toBytes(store.getColumnFamilyName()), recoverSeqId - 1);
}
long seqId = region.replayRecoveredEditsIfAny(maxSeqIdInStores, null, status);
assertEquals(maxSeqId, seqId);
// assert that the files are flushed
assertEquals(1, region.getStoreFileList(columns).size());
} finally {
HBaseTestingUtility.closeRegionAndWAL(this.region);
this.region = null;
wals.close();
}
}
@Test
public void testRecoveredEditsReplayCompaction() throws Exception {
testRecoveredEditsReplayCompaction(false);
testRecoveredEditsReplayCompaction(true);
}
public void testRecoveredEditsReplayCompaction(boolean mismatchedRegionName) throws Exception {
CONF.setClass(HConstants.REGION_IMPL, HRegionForTesting.class, Region.class);
byte[] family = Bytes.toBytes("family");
this.region = initHRegion(tableName, method, CONF, family);
final WALFactory wals = new WALFactory(CONF, method);
try {
Path regiondir = region.getRegionFileSystem().getRegionDir();
FileSystem fs = region.getRegionFileSystem().getFileSystem();
byte[] regionName = region.getRegionInfo().getEncodedNameAsBytes();
long maxSeqId = 3;
long minSeqId = 0;
for (long i = minSeqId; i < maxSeqId; i++) {
Put put = new Put(Bytes.toBytes(i));
put.addColumn(family, Bytes.toBytes(i), Bytes.toBytes(i));
region.put(put);
region.flush(true);
}
// this will create a region with 3 files
assertEquals(3, region.getStore(family).getStorefilesCount());
List<Path> storeFiles = new ArrayList<>(3);
for (HStoreFile sf : region.getStore(family).getStorefiles()) {
storeFiles.add(sf.getPath());
}
// disable compaction completion
CONF.setBoolean("hbase.hstore.compaction.complete", false);
region.compactStores();
// ensure that nothing changed
assertEquals(3, region.getStore(family).getStorefilesCount());
// now find the compacted file, and manually add it to the recovered edits
Path tmpDir = new Path(region.getRegionFileSystem().getTempDir(), Bytes.toString(family));
FileStatus[] files = FSUtils.listStatus(fs, tmpDir);
String errorMsg = "Expected to find 1 file in the region temp directory "
+ "from the compaction, could not find any";
assertNotNull(errorMsg, files);
assertEquals(errorMsg, 1, files.length);
// move the file inside region dir
Path newFile = region.getRegionFileSystem().commitStoreFile(Bytes.toString(family),
files[0].getPath());
byte[] encodedNameAsBytes = this.region.getRegionInfo().getEncodedNameAsBytes();
byte[] fakeEncodedNameAsBytes = new byte [encodedNameAsBytes.length];
for (int i=0; i < encodedNameAsBytes.length; i++) {
// Mix the byte array to have a new encodedName
fakeEncodedNameAsBytes[i] = (byte) (encodedNameAsBytes[i] + 1);
}
CompactionDescriptor compactionDescriptor = ProtobufUtil.toCompactionDescriptor(this.region
.getRegionInfo(), mismatchedRegionName ? fakeEncodedNameAsBytes : null, family,
storeFiles, Lists.newArrayList(newFile),
region.getRegionFileSystem().getStoreDir(Bytes.toString(family)));
WALUtil.writeCompactionMarker(region.getWAL(), this.region.getReplicationScope(),
this.region.getRegionInfo(), compactionDescriptor, region.getMVCC());
Path recoveredEditsDir = WALSplitUtil.getRegionDirRecoveredEditsDir(regiondir);
Path recoveredEdits = new Path(recoveredEditsDir, String.format("%019d", 1000));
fs.create(recoveredEdits);
WALProvider.Writer writer = wals.createRecoveredEditsWriter(fs, recoveredEdits);
long time = System.nanoTime();
writer.append(new WAL.Entry(new WALKeyImpl(regionName, tableName, 10, time,
HConstants.DEFAULT_CLUSTER_ID), WALEdit.createCompaction(region.getRegionInfo(),
compactionDescriptor)));
writer.close();
// close the region now, and reopen again
region.getTableDescriptor();
region.getRegionInfo();
HBaseTestingUtility.closeRegionAndWAL(this.region);
try {
region = HRegion.openHRegion(region, null);
} catch (WrongRegionException wre) {
fail("Matching encoded region name should not have produced WrongRegionException");
}
// now check whether we have only one store file, the compacted one
Collection<HStoreFile> sfs = region.getStore(family).getStorefiles();
for (HStoreFile sf : sfs) {
LOG.info(Objects.toString(sf.getPath()));
}
if (!mismatchedRegionName) {
assertEquals(1, region.getStore(family).getStorefilesCount());
}
files = FSUtils.listStatus(fs, tmpDir);
assertTrue("Expected to find 0 files inside " + tmpDir, files == null || files.length == 0);
for (long i = minSeqId; i < maxSeqId; i++) {
Get get = new Get(Bytes.toBytes(i));
Result result = region.get(get);
byte[] value = result.getValue(family, Bytes.toBytes(i));
assertArrayEquals(Bytes.toBytes(i), value);
}
} finally {
HBaseTestingUtility.closeRegionAndWAL(this.region);
this.region = null;
wals.close();
CONF.setClass(HConstants.REGION_IMPL, HRegion.class, Region.class);
}
}
@Test
public void testFlushMarkers() throws Exception {
// tests that flush markers are written to WAL and handled at recovered edits
byte[] family = Bytes.toBytes("family");
Path logDir = TEST_UTIL.getDataTestDirOnTestFS(method + ".log");
final Configuration walConf = new Configuration(TEST_UTIL.getConfiguration());
FSUtils.setRootDir(walConf, logDir);
final WALFactory wals = new WALFactory(walConf, method);
final WAL wal = wals.getWAL(RegionInfoBuilder.newBuilder(tableName).build());
this.region = initHRegion(tableName, HConstants.EMPTY_START_ROW,
HConstants.EMPTY_END_ROW, false, Durability.USE_DEFAULT, wal, family);
try {
Path regiondir = region.getRegionFileSystem().getRegionDir();
FileSystem fs = region.getRegionFileSystem().getFileSystem();
byte[] regionName = region.getRegionInfo().getEncodedNameAsBytes();
long maxSeqId = 3;
long minSeqId = 0;
for (long i = minSeqId; i < maxSeqId; i++) {
Put put = new Put(Bytes.toBytes(i));
put.addColumn(family, Bytes.toBytes(i), Bytes.toBytes(i));
region.put(put);
region.flush(true);
}
// this will create a region with 3 files from flush
assertEquals(3, region.getStore(family).getStorefilesCount());
List<String> storeFiles = new ArrayList<>(3);
for (HStoreFile sf : region.getStore(family).getStorefiles()) {
storeFiles.add(sf.getPath().getName());
}
// now verify that the flush markers are written
wal.shutdown();
WAL.Reader reader = WALFactory.createReader(fs, AbstractFSWALProvider.getCurrentFileName(wal),
TEST_UTIL.getConfiguration());
try {
List<WAL.Entry> flushDescriptors = new ArrayList<>();
long lastFlushSeqId = -1;
while (true) {
WAL.Entry entry = reader.next();
if (entry == null) {
break;
}
Cell cell = entry.getEdit().getCells().get(0);
if (WALEdit.isMetaEditFamily(cell)) {
FlushDescriptor flushDesc = WALEdit.getFlushDescriptor(cell);
assertNotNull(flushDesc);
assertArrayEquals(tableName.getName(), flushDesc.getTableName().toByteArray());
if (flushDesc.getAction() == FlushAction.START_FLUSH) {
assertTrue(flushDesc.getFlushSequenceNumber() > lastFlushSeqId);
} else if (flushDesc.getAction() == FlushAction.COMMIT_FLUSH) {
assertTrue(flushDesc.getFlushSequenceNumber() == lastFlushSeqId);
}
lastFlushSeqId = flushDesc.getFlushSequenceNumber();
assertArrayEquals(regionName, flushDesc.getEncodedRegionName().toByteArray());
assertEquals(1, flushDesc.getStoreFlushesCount()); //only one store
StoreFlushDescriptor storeFlushDesc = flushDesc.getStoreFlushes(0);
assertArrayEquals(family, storeFlushDesc.getFamilyName().toByteArray());
assertEquals("family", storeFlushDesc.getStoreHomeDir());
if (flushDesc.getAction() == FlushAction.START_FLUSH) {
assertEquals(0, storeFlushDesc.getFlushOutputCount());
} else {
assertEquals(1, storeFlushDesc.getFlushOutputCount()); //only one file from flush
assertTrue(storeFiles.contains(storeFlushDesc.getFlushOutput(0)));
}
flushDescriptors.add(entry);
}
}
assertEquals(3 * 2, flushDescriptors.size()); // START_FLUSH and COMMIT_FLUSH per flush
// now write those markers to the recovered edits again.
Path recoveredEditsDir = WALSplitUtil.getRegionDirRecoveredEditsDir(regiondir);
Path recoveredEdits = new Path(recoveredEditsDir, String.format("%019d", 1000));
fs.create(recoveredEdits);
WALProvider.Writer writer = wals.createRecoveredEditsWriter(fs, recoveredEdits);
for (WAL.Entry entry : flushDescriptors) {
writer.append(entry);
}
writer.close();
} finally {
if (null != reader) {
try {
reader.close();
} catch (IOException exception) {
LOG.warn("Problem closing wal: " + exception.getMessage());
LOG.debug("exception details", exception);
}
}
}
// close the region now, and reopen again
HBaseTestingUtility.closeRegionAndWAL(this.region);
region = HRegion.openHRegion(region, null);
// now check whether we have can read back the data from region
for (long i = minSeqId; i < maxSeqId; i++) {
Get get = new Get(Bytes.toBytes(i));
Result result = region.get(get);
byte[] value = result.getValue(family, Bytes.toBytes(i));
assertArrayEquals(Bytes.toBytes(i), value);
}
} finally {
HBaseTestingUtility.closeRegionAndWAL(this.region);
this.region = null;
wals.close();
}
}
static class IsFlushWALMarker implements ArgumentMatcher<WALEdit> {
volatile FlushAction[] actions;
public IsFlushWALMarker(FlushAction... actions) {
this.actions = actions;
}
@Override
public boolean matches(WALEdit edit) {
List<Cell> cells = edit.getCells();
if (cells.isEmpty()) {
return false;
}
if (WALEdit.isMetaEditFamily(cells.get(0))) {
FlushDescriptor desc;
try {
desc = WALEdit.getFlushDescriptor(cells.get(0));
} catch (IOException e) {
LOG.warn(e.toString(), e);
return false;
}
if (desc != null) {
for (FlushAction action : actions) {
if (desc.getAction() == action) {
return true;
}
}
}
}
return false;
}
public IsFlushWALMarker set(FlushAction... actions) {
this.actions = actions;
return this;
}
}
@Test
public void testFlushMarkersWALFail() throws Exception {
// test the cases where the WAL append for flush markers fail.
byte[] family = Bytes.toBytes("family");
// spy an actual WAL implementation to throw exception (was not able to mock)
Path logDir = TEST_UTIL.getDataTestDirOnTestFS(method + "log");
final Configuration walConf = new Configuration(TEST_UTIL.getConfiguration());
FSUtils.setRootDir(walConf, logDir);
// Make up a WAL that we can manipulate at append time.
class FailAppendFlushMarkerWAL extends FSHLog {
volatile FlushAction [] flushActions = null;
public FailAppendFlushMarkerWAL(FileSystem fs, Path root, String logDir, Configuration conf)
throws IOException {
super(fs, root, logDir, conf);
}
@Override
protected Writer createWriterInstance(Path path) throws IOException {
final Writer w = super.createWriterInstance(path);
return new Writer() {
@Override
public void close() throws IOException {
w.close();
}
@Override
public void sync(boolean forceSync) throws IOException {
w.sync(forceSync);
}
@Override
public void append(Entry entry) throws IOException {
List<Cell> cells = entry.getEdit().getCells();
if (WALEdit.isMetaEditFamily(cells.get(0))) {
FlushDescriptor desc = WALEdit.getFlushDescriptor(cells.get(0));
if (desc != null) {
for (FlushAction flushAction: flushActions) {
if (desc.getAction().equals(flushAction)) {
throw new IOException("Failed to append flush marker! " + flushAction);
}
}
}
}
w.append(entry);
}
@Override
public long getLength() {
return w.getLength();
}
};
}
}
FailAppendFlushMarkerWAL wal =
new FailAppendFlushMarkerWAL(FileSystem.get(walConf), FSUtils.getRootDir(walConf),
method, walConf);
wal.init();
this.region = initHRegion(tableName, HConstants.EMPTY_START_ROW,
HConstants.EMPTY_END_ROW, false, Durability.USE_DEFAULT, wal, family);
int i = 0;
Put put = new Put(Bytes.toBytes(i));
put.setDurability(Durability.SKIP_WAL); // have to skip mocked wal
put.addColumn(family, Bytes.toBytes(i), Bytes.toBytes(i));
region.put(put);
// 1. Test case where START_FLUSH throws exception
wal.flushActions = new FlushAction [] {FlushAction.START_FLUSH};
// start cache flush will throw exception
try {
region.flush(true);
fail("This should have thrown exception");
} catch (DroppedSnapshotException unexpected) {
// this should not be a dropped snapshot exception. Meaning that RS will not abort
throw unexpected;
} catch (IOException expected) {
// expected
}
// The WAL is hosed now. It has two edits appended. We cannot roll the log without it
// throwing a DroppedSnapshotException to force an abort. Just clean up the mess.
region.close(true);
wal.close();
// 2. Test case where START_FLUSH succeeds but COMMIT_FLUSH will throw exception
wal.flushActions = new FlushAction [] {FlushAction.COMMIT_FLUSH};
wal = new FailAppendFlushMarkerWAL(FileSystem.get(walConf), FSUtils.getRootDir(walConf),
method, walConf);
wal.init();
this.region = initHRegion(tableName, HConstants.EMPTY_START_ROW,
HConstants.EMPTY_END_ROW, false, Durability.USE_DEFAULT, wal, family);
region.put(put);
// 3. Test case where ABORT_FLUSH will throw exception.
// Even if ABORT_FLUSH throws exception, we should not fail with IOE, but continue with
// DroppedSnapshotException. Below COMMIT_FLUSH will cause flush to abort
wal.flushActions = new FlushAction [] {FlushAction.COMMIT_FLUSH, FlushAction.ABORT_FLUSH};
try {
region.flush(true);
fail("This should have thrown exception");
} catch (DroppedSnapshotException expected) {
// we expect this exception, since we were able to write the snapshot, but failed to
// write the flush marker to WAL
} catch (IOException unexpected) {
throw unexpected;
}
}
@Test
public void testGetWhileRegionClose() throws IOException {
Configuration hc = initSplit();
int numRows = 100;
byte[][] families = { fam1, fam2, fam3 };
// Setting up region
this.region = initHRegion(tableName, method, hc, families);
// Put data in region
final int startRow = 100;
putData(startRow, numRows, qual1, families);
putData(startRow, numRows, qual2, families);
putData(startRow, numRows, qual3, families);
final AtomicBoolean done = new AtomicBoolean(false);
final AtomicInteger gets = new AtomicInteger(0);
GetTillDoneOrException[] threads = new GetTillDoneOrException[10];
try {
// Set ten threads running concurrently getting from the region.
for (int i = 0; i < threads.length / 2; i++) {
threads[i] = new GetTillDoneOrException(i, Bytes.toBytes("" + startRow), done, gets);
threads[i].setDaemon(true);
threads[i].start();
}
// Artificially make the condition by setting closing flag explicitly.
// I can't make the issue happen with a call to region.close().
this.region.closing.set(true);
for (int i = threads.length / 2; i < threads.length; i++) {
threads[i] = new GetTillDoneOrException(i, Bytes.toBytes("" + startRow), done, gets);
threads[i].setDaemon(true);
threads[i].start();
}
} finally {
if (this.region != null) {
HBaseTestingUtility.closeRegionAndWAL(this.region);
this.region = null;
}
}
done.set(true);
for (GetTillDoneOrException t : threads) {
try {
t.join();
} catch (InterruptedException e) {
e.printStackTrace();
}
if (t.e != null) {
LOG.info("Exception=" + t.e);
assertFalse("Found a NPE in " + t.getName(), t.e instanceof NullPointerException);
}
}
}
/*
* Thread that does get on single row until 'done' flag is flipped. If an
* exception causes us to fail, it records it.
*/
class GetTillDoneOrException extends Thread {
private final Get g;
private final AtomicBoolean done;
private final AtomicInteger count;
private Exception e;
GetTillDoneOrException(final int i, final byte[] r, final AtomicBoolean d,
final AtomicInteger c) {
super("getter." + i);
this.g = new Get(r);
this.done = d;
this.count = c;
}
@Override
public void run() {
while (!this.done.get()) {
try {
assertTrue(region.get(g).size() > 0);
this.count.incrementAndGet();
} catch (Exception e) {
this.e = e;
break;
}
}
}
}
/*
* An involved filter test. Has multiple column families and deletes in mix.
*/
@Test
public void testWeirdCacheBehaviour() throws Exception {
final TableName tableName = TableName.valueOf(name.getMethodName());
byte[][] FAMILIES = new byte[][] { Bytes.toBytes("trans-blob"), Bytes.toBytes("trans-type"),
Bytes.toBytes("trans-date"), Bytes.toBytes("trans-tags"), Bytes.toBytes("trans-group") };
this.region = initHRegion(tableName, method, CONF, FAMILIES);
String value = "this is the value";
String value2 = "this is some other value";
String keyPrefix1 = "prefix1";
String keyPrefix2 = "prefix2";
String keyPrefix3 = "prefix3";
putRows(this.region, 3, value, keyPrefix1);
putRows(this.region, 3, value, keyPrefix2);
putRows(this.region, 3, value, keyPrefix3);
putRows(this.region, 3, value2, keyPrefix1);
putRows(this.region, 3, value2, keyPrefix2);
putRows(this.region, 3, value2, keyPrefix3);
System.out.println("Checking values for key: " + keyPrefix1);
assertEquals("Got back incorrect number of rows from scan", 3,
getNumberOfRows(keyPrefix1, value2, this.region));
System.out.println("Checking values for key: " + keyPrefix2);
assertEquals("Got back incorrect number of rows from scan", 3,
getNumberOfRows(keyPrefix2, value2, this.region));
System.out.println("Checking values for key: " + keyPrefix3);
assertEquals("Got back incorrect number of rows from scan", 3,
getNumberOfRows(keyPrefix3, value2, this.region));
deleteColumns(this.region, value2, keyPrefix1);
deleteColumns(this.region, value2, keyPrefix2);
deleteColumns(this.region, value2, keyPrefix3);
System.out.println("Starting important checks.....");
assertEquals("Got back incorrect number of rows from scan: " + keyPrefix1, 0,
getNumberOfRows(keyPrefix1, value2, this.region));
assertEquals("Got back incorrect number of rows from scan: " + keyPrefix2, 0,
getNumberOfRows(keyPrefix2, value2, this.region));
assertEquals("Got back incorrect number of rows from scan: " + keyPrefix3, 0,
getNumberOfRows(keyPrefix3, value2, this.region));
}
@Test
public void testAppendWithReadOnlyTable() throws Exception {
final TableName tableName = TableName.valueOf(name.getMethodName());
this.region = initHRegion(tableName, method, CONF, true, Bytes.toBytes("somefamily"));
boolean exceptionCaught = false;
Append append = new Append(Bytes.toBytes("somerow"));
append.setDurability(Durability.SKIP_WAL);
append.addColumn(Bytes.toBytes("somefamily"), Bytes.toBytes("somequalifier"),
Bytes.toBytes("somevalue"));
try {
region.append(append);
} catch (IOException e) {
exceptionCaught = true;
}
assertTrue(exceptionCaught == true);
}
@Test
public void testIncrWithReadOnlyTable() throws Exception {
final TableName tableName = TableName.valueOf(name.getMethodName());
this.region = initHRegion(tableName, method, CONF, true, Bytes.toBytes("somefamily"));
boolean exceptionCaught = false;
Increment inc = new Increment(Bytes.toBytes("somerow"));
inc.setDurability(Durability.SKIP_WAL);
inc.addColumn(Bytes.toBytes("somefamily"), Bytes.toBytes("somequalifier"), 1L);
try {
region.increment(inc);
} catch (IOException e) {
exceptionCaught = true;
}
assertTrue(exceptionCaught == true);
}
private void deleteColumns(HRegion r, String value, String keyPrefix) throws IOException {
InternalScanner scanner = buildScanner(keyPrefix, value, r);
int count = 0;
boolean more = false;
List<Cell> results = new ArrayList<>();
do {
more = scanner.next(results);
if (results != null && !results.isEmpty())
count++;
else
break;
Delete delete = new Delete(CellUtil.cloneRow(results.get(0)));
delete.addColumn(Bytes.toBytes("trans-tags"), Bytes.toBytes("qual2"));
r.delete(delete);
results.clear();
} while (more);
assertEquals("Did not perform correct number of deletes", 3, count);
}
private int getNumberOfRows(String keyPrefix, String value, HRegion r) throws Exception {
InternalScanner resultScanner = buildScanner(keyPrefix, value, r);
int numberOfResults = 0;
List<Cell> results = new ArrayList<>();
boolean more = false;
do {
more = resultScanner.next(results);
if (results != null && !results.isEmpty())
numberOfResults++;
else
break;
for (Cell kv : results) {
System.out.println("kv=" + kv.toString() + ", " + Bytes.toString(CellUtil.cloneValue(kv)));
}
results.clear();
} while (more);
return numberOfResults;
}
private InternalScanner buildScanner(String keyPrefix, String value, HRegion r)
throws IOException {
// Defaults FilterList.Operator.MUST_PASS_ALL.
FilterList allFilters = new FilterList();
allFilters.addFilter(new PrefixFilter(Bytes.toBytes(keyPrefix)));
// Only return rows where this column value exists in the row.
SingleColumnValueFilter filter = new SingleColumnValueFilter(Bytes.toBytes("trans-tags"),
Bytes.toBytes("qual2"), CompareOp.EQUAL, Bytes.toBytes(value));
filter.setFilterIfMissing(true);
allFilters.addFilter(filter);
Scan scan = new Scan();
scan.addFamily(Bytes.toBytes("trans-blob"));
scan.addFamily(Bytes.toBytes("trans-type"));
scan.addFamily(Bytes.toBytes("trans-date"));
scan.addFamily(Bytes.toBytes("trans-tags"));
scan.addFamily(Bytes.toBytes("trans-group"));
scan.setFilter(allFilters);
return r.getScanner(scan);
}
private void putRows(HRegion r, int numRows, String value, String key) throws IOException {
for (int i = 0; i < numRows; i++) {
String row = key + "_" + i/* UUID.randomUUID().toString() */;
System.out.println(String.format("Saving row: %s, with value %s", row, value));
Put put = new Put(Bytes.toBytes(row));
put.setDurability(Durability.SKIP_WAL);
put.addColumn(Bytes.toBytes("trans-blob"), null, Bytes.toBytes("value for blob"));
put.addColumn(Bytes.toBytes("trans-type"), null, Bytes.toBytes("statement"));
put.addColumn(Bytes.toBytes("trans-date"), null, Bytes.toBytes("20090921010101999"));
put.addColumn(Bytes.toBytes("trans-tags"), Bytes.toBytes("qual2"), Bytes.toBytes(value));
put.addColumn(Bytes.toBytes("trans-group"), null, Bytes.toBytes("adhocTransactionGroupId"));
r.put(put);
}
}
@Test
public void testFamilyWithAndWithoutColon() throws Exception {
byte[] cf = Bytes.toBytes(COLUMN_FAMILY);
this.region = initHRegion(tableName, method, CONF, cf);
Put p = new Put(tableName.toBytes());
byte[] cfwithcolon = Bytes.toBytes(COLUMN_FAMILY + ":");
p.addColumn(cfwithcolon, cfwithcolon, cfwithcolon);
boolean exception = false;
try {
this.region.put(p);
} catch (NoSuchColumnFamilyException e) {
exception = true;
}
assertTrue(exception);
}
@Test
public void testBatchPut_whileNoRowLocksHeld() throws IOException {
final Put[] puts = new Put[10];
MetricsWALSource source = CompatibilitySingletonFactory.getInstance(MetricsWALSource.class);
long syncs = prepareRegionForBachPut(puts, source, false);
OperationStatus[] codes = this.region.batchMutate(puts);
assertEquals(10, codes.length);
for (int i = 0; i < 10; i++) {
assertEquals(OperationStatusCode.SUCCESS, codes[i].getOperationStatusCode());
}
metricsAssertHelper.assertCounter("syncTimeNumOps", syncs + 1, source);
LOG.info("Next a batch put with one invalid family");
puts[5].addColumn(Bytes.toBytes("BAD_CF"), qual, value);
codes = this.region.batchMutate(puts);
assertEquals(10, codes.length);
for (int i = 0; i < 10; i++) {
assertEquals((i == 5) ? OperationStatusCode.BAD_FAMILY : OperationStatusCode.SUCCESS,
codes[i].getOperationStatusCode());
}
metricsAssertHelper.assertCounter("syncTimeNumOps", syncs + 2, source);
}
@Test
public void testBatchPut_whileMultipleRowLocksHeld() throws Exception {
final Put[] puts = new Put[10];
MetricsWALSource source = CompatibilitySingletonFactory.getInstance(MetricsWALSource.class);
long syncs = prepareRegionForBachPut(puts, source, false);
puts[5].addColumn(Bytes.toBytes("BAD_CF"), qual, value);
LOG.info("batchPut will have to break into four batches to avoid row locks");
RowLock rowLock1 = region.getRowLock(Bytes.toBytes("row_2"));
RowLock rowLock2 = region.getRowLock(Bytes.toBytes("row_1"));
RowLock rowLock3 = region.getRowLock(Bytes.toBytes("row_3"));
RowLock rowLock4 = region.getRowLock(Bytes.toBytes("row_3"), true);
MultithreadedTestUtil.TestContext ctx = new MultithreadedTestUtil.TestContext(CONF);
final AtomicReference<OperationStatus[]> retFromThread = new AtomicReference<>();
final CountDownLatch startingPuts = new CountDownLatch(1);
final CountDownLatch startingClose = new CountDownLatch(1);
TestThread putter = new TestThread(ctx) {
@Override
public void doWork() throws IOException {
startingPuts.countDown();
retFromThread.set(region.batchMutate(puts));
}
};
LOG.info("...starting put thread while holding locks");
ctx.addThread(putter);
ctx.startThreads();
// Now attempt to close the region from another thread. Prior to HBASE-12565
// this would cause the in-progress batchMutate operation to to fail with
// exception because it use to release and re-acquire the close-guard lock
// between batches. Caller then didn't get status indicating which writes succeeded.
// We now expect this thread to block until the batchMutate call finishes.
Thread regionCloseThread = new TestThread(ctx) {
@Override
public void doWork() {
try {
startingPuts.await();
// Give some time for the batch mutate to get in.
// We don't want to race with the mutate
Thread.sleep(10);
startingClose.countDown();
HBaseTestingUtility.closeRegionAndWAL(region);
region = null;
} catch (IOException e) {
throw new RuntimeException(e);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
};
regionCloseThread.start();
startingClose.await();
startingPuts.await();
Thread.sleep(100);
LOG.info("...releasing row lock 1, which should let put thread continue");
rowLock1.release();
rowLock2.release();
rowLock3.release();
waitForCounter(source, "syncTimeNumOps", syncs + 1);
LOG.info("...joining on put thread");
ctx.stop();
regionCloseThread.join();
OperationStatus[] codes = retFromThread.get();
for (int i = 0; i < codes.length; i++) {
assertEquals((i == 5) ? OperationStatusCode.BAD_FAMILY : OperationStatusCode.SUCCESS,
codes[i].getOperationStatusCode());
}
rowLock4.release();
}
private void waitForCounter(MetricsWALSource source, String metricName, long expectedCount)
throws InterruptedException {
long startWait = System.currentTimeMillis();
long currentCount;
while ((currentCount = metricsAssertHelper.getCounter(metricName, source)) < expectedCount) {
Thread.sleep(100);
if (System.currentTimeMillis() - startWait > 10000) {
fail(String.format("Timed out waiting for '%s' >= '%s', currentCount=%s", metricName,
expectedCount, currentCount));
}
}
}
@Test
public void testAtomicBatchPut() throws IOException {
final Put[] puts = new Put[10];
MetricsWALSource source = CompatibilitySingletonFactory.getInstance(MetricsWALSource.class);
long syncs = prepareRegionForBachPut(puts, source, false);
// 1. Straight forward case, should succeed
MutationBatchOperation batchOp = new MutationBatchOperation(region, puts, true,
HConstants.NO_NONCE, HConstants.NO_NONCE);
OperationStatus[] codes = this.region.batchMutate(batchOp);
assertEquals(10, codes.length);
for (int i = 0; i < 10; i++) {
assertEquals(OperationStatusCode.SUCCESS, codes[i].getOperationStatusCode());
}
metricsAssertHelper.assertCounter("syncTimeNumOps", syncs + 1, source);
// 2. Failed to get lock
RowLock lock = region.getRowLock(Bytes.toBytes("row_" + 3));
// Method {@link HRegion#getRowLock(byte[])} is reentrant. As 'row_3' is locked in this
// thread, need to run {@link HRegion#batchMutate(HRegion.BatchOperation)} in different thread
MultithreadedTestUtil.TestContext ctx = new MultithreadedTestUtil.TestContext(CONF);
final AtomicReference<IOException> retFromThread = new AtomicReference<>();
final CountDownLatch finishedPuts = new CountDownLatch(1);
final MutationBatchOperation finalBatchOp = new MutationBatchOperation(region, puts, true,
HConstants
.NO_NONCE,
HConstants.NO_NONCE);
TestThread putter = new TestThread(ctx) {
@Override
public void doWork() throws IOException {
try {
region.batchMutate(finalBatchOp);
} catch (IOException ioe) {
LOG.error("test failed!", ioe);
retFromThread.set(ioe);
}
finishedPuts.countDown();
}
};
LOG.info("...starting put thread while holding locks");
ctx.addThread(putter);
ctx.startThreads();
LOG.info("...waiting for batch puts while holding locks");
try {
finishedPuts.await();
} catch (InterruptedException e) {
LOG.error("Interrupted!", e);
} finally {
if (lock != null) {
lock.release();
}
}
assertNotNull(retFromThread.get());
metricsAssertHelper.assertCounter("syncTimeNumOps", syncs + 1, source);
// 3. Exception thrown in validation
LOG.info("Next a batch put with one invalid family");
puts[5].addColumn(Bytes.toBytes("BAD_CF"), qual, value);
batchOp = new MutationBatchOperation(region, puts, true, HConstants.NO_NONCE,
HConstants.NO_NONCE);
thrown.expect(NoSuchColumnFamilyException.class);
this.region.batchMutate(batchOp);
}
@Test
public void testBatchPutWithTsSlop() throws Exception {
// add data with a timestamp that is too recent for range. Ensure assert
CONF.setInt("hbase.hregion.keyvalue.timestamp.slop.millisecs", 1000);
final Put[] puts = new Put[10];
MetricsWALSource source = CompatibilitySingletonFactory.getInstance(MetricsWALSource.class);
long syncs = prepareRegionForBachPut(puts, source, true);
OperationStatus[] codes = this.region.batchMutate(puts);
assertEquals(10, codes.length);
for (int i = 0; i < 10; i++) {
assertEquals(OperationStatusCode.SANITY_CHECK_FAILURE, codes[i].getOperationStatusCode());
}
metricsAssertHelper.assertCounter("syncTimeNumOps", syncs, source);
}
/**
* @return syncs initial syncTimeNumOps
*/
private long prepareRegionForBachPut(final Put[] puts, final MetricsWALSource source,
boolean slop) throws IOException {
this.region = initHRegion(tableName, method, CONF, COLUMN_FAMILY_BYTES);
LOG.info("First a batch put with all valid puts");
for (int i = 0; i < puts.length; i++) {
puts[i] = slop ? new Put(Bytes.toBytes("row_" + i), Long.MAX_VALUE - 100) :
new Put(Bytes.toBytes("row_" + i));
puts[i].addColumn(COLUMN_FAMILY_BYTES, qual, value);
}
long syncs = metricsAssertHelper.getCounter("syncTimeNumOps", source);
metricsAssertHelper.assertCounter("syncTimeNumOps", syncs, source);
return syncs;
}
// ////////////////////////////////////////////////////////////////////////////
// checkAndMutate tests
// ////////////////////////////////////////////////////////////////////////////
@Test
public void testCheckAndMutate_WithEmptyRowValue() throws IOException {
byte[] row1 = Bytes.toBytes("row1");
byte[] fam1 = Bytes.toBytes("fam1");
byte[] qf1 = Bytes.toBytes("qualifier");
byte[] emptyVal = new byte[] {};
byte[] val1 = Bytes.toBytes("value1");
byte[] val2 = Bytes.toBytes("value2");
// Setting up region
this.region = initHRegion(tableName, method, CONF, fam1);
// Putting empty data in key
Put put = new Put(row1);
put.addColumn(fam1, qf1, emptyVal);
// checkAndPut with empty value
boolean res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL,
new BinaryComparator(emptyVal), put);
assertTrue(res);
// Putting data in key
put = new Put(row1);
put.addColumn(fam1, qf1, val1);
// checkAndPut with correct value
res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL,
new BinaryComparator(emptyVal), put);
assertTrue(res);
// not empty anymore
res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL,
new BinaryComparator(emptyVal), put);
assertFalse(res);
Delete delete = new Delete(row1);
delete.addColumn(fam1, qf1);
res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL,
new BinaryComparator(emptyVal), delete);
assertFalse(res);
put = new Put(row1);
put.addColumn(fam1, qf1, val2);
// checkAndPut with correct value
res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL,
new BinaryComparator(val1), put);
assertTrue(res);
// checkAndDelete with correct value
delete = new Delete(row1);
delete.addColumn(fam1, qf1);
delete.addColumn(fam1, qf1);
res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL,
new BinaryComparator(val2), delete);
assertTrue(res);
delete = new Delete(row1);
res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL,
new BinaryComparator(emptyVal), delete);
assertTrue(res);
// checkAndPut looking for a null value
put = new Put(row1);
put.addColumn(fam1, qf1, val1);
res = region
.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL, new NullComparator(), put);
assertTrue(res);
}
@Test
public void testCheckAndMutate_WithWrongValue() throws IOException {
byte[] row1 = Bytes.toBytes("row1");
byte[] fam1 = Bytes.toBytes("fam1");
byte[] qf1 = Bytes.toBytes("qualifier");
byte[] val1 = Bytes.toBytes("value1");
byte[] val2 = Bytes.toBytes("value2");
BigDecimal bd1 = new BigDecimal(Double.MAX_VALUE);
BigDecimal bd2 = new BigDecimal(Double.MIN_VALUE);
// Setting up region
this.region = initHRegion(tableName, method, CONF, fam1);
// Putting data in key
Put put = new Put(row1);
put.addColumn(fam1, qf1, val1);
region.put(put);
// checkAndPut with wrong value
boolean res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL,
new BinaryComparator(val2), put);
assertEquals(false, res);
// checkAndDelete with wrong value
Delete delete = new Delete(row1);
delete.addFamily(fam1);
res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL,
new BinaryComparator(val2), put);
assertEquals(false, res);
// Putting data in key
put = new Put(row1);
put.addColumn(fam1, qf1, Bytes.toBytes(bd1));
region.put(put);
// checkAndPut with wrong value
res =
region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL,
new BigDecimalComparator(bd2), put);
assertEquals(false, res);
// checkAndDelete with wrong value
delete = new Delete(row1);
delete.addFamily(fam1);
res =
region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL,
new BigDecimalComparator(bd2), put);
assertEquals(false, res);
}
@Test
public void testCheckAndMutate_WithCorrectValue() throws IOException {
byte[] row1 = Bytes.toBytes("row1");
byte[] fam1 = Bytes.toBytes("fam1");
byte[] qf1 = Bytes.toBytes("qualifier");
byte[] val1 = Bytes.toBytes("value1");
BigDecimal bd1 = new BigDecimal(Double.MIN_VALUE);
// Setting up region
this.region = initHRegion(tableName, method, CONF, fam1);
// Putting data in key
Put put = new Put(row1);
put.addColumn(fam1, qf1, val1);
region.put(put);
// checkAndPut with correct value
boolean res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL,
new BinaryComparator(val1), put);
assertEquals(true, res);
// checkAndDelete with correct value
Delete delete = new Delete(row1);
delete.addColumn(fam1, qf1);
res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL, new BinaryComparator(val1),
delete);
assertEquals(true, res);
// Putting data in key
put = new Put(row1);
put.addColumn(fam1, qf1, Bytes.toBytes(bd1));
region.put(put);
// checkAndPut with correct value
res =
region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL, new BigDecimalComparator(
bd1), put);
assertEquals(true, res);
// checkAndDelete with correct value
delete = new Delete(row1);
delete.addColumn(fam1, qf1);
res =
region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL, new BigDecimalComparator(
bd1), delete);
assertEquals(true, res);
}
@Test
public void testCheckAndMutate_WithNonEqualCompareOp() throws IOException {
byte[] row1 = Bytes.toBytes("row1");
byte[] fam1 = Bytes.toBytes("fam1");
byte[] qf1 = Bytes.toBytes("qualifier");
byte[] val1 = Bytes.toBytes("value1");
byte[] val2 = Bytes.toBytes("value2");
byte[] val3 = Bytes.toBytes("value3");
byte[] val4 = Bytes.toBytes("value4");
// Setting up region
this.region = initHRegion(tableName, method, CONF, fam1);
// Putting val3 in key
Put put = new Put(row1);
put.addColumn(fam1, qf1, val3);
region.put(put);
// Test CompareOp.LESS: original = val3, compare with val3, fail
boolean res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.LESS,
new BinaryComparator(val3), put);
assertEquals(false, res);
// Test CompareOp.LESS: original = val3, compare with val4, fail
res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.LESS,
new BinaryComparator(val4), put);
assertEquals(false, res);
// Test CompareOp.LESS: original = val3, compare with val2,
// succeed (now value = val2)
put = new Put(row1);
put.addColumn(fam1, qf1, val2);
res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.LESS,
new BinaryComparator(val2), put);
assertEquals(true, res);
// Test CompareOp.LESS_OR_EQUAL: original = val2, compare with val3, fail
res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.LESS_OR_EQUAL,
new BinaryComparator(val3), put);
assertEquals(false, res);
// Test CompareOp.LESS_OR_EQUAL: original = val2, compare with val2,
// succeed (value still = val2)
res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.LESS_OR_EQUAL,
new BinaryComparator(val2), put);
assertEquals(true, res);
// Test CompareOp.LESS_OR_EQUAL: original = val2, compare with val1,
// succeed (now value = val3)
put = new Put(row1);
put.addColumn(fam1, qf1, val3);
res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.LESS_OR_EQUAL,
new BinaryComparator(val1), put);
assertEquals(true, res);
// Test CompareOp.GREATER: original = val3, compare with val3, fail
res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.GREATER,
new BinaryComparator(val3), put);
assertEquals(false, res);
// Test CompareOp.GREATER: original = val3, compare with val2, fail
res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.GREATER,
new BinaryComparator(val2), put);
assertEquals(false, res);
// Test CompareOp.GREATER: original = val3, compare with val4,
// succeed (now value = val2)
put = new Put(row1);
put.addColumn(fam1, qf1, val2);
res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.GREATER,
new BinaryComparator(val4), put);
assertEquals(true, res);
// Test CompareOp.GREATER_OR_EQUAL: original = val2, compare with val1, fail
res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.GREATER_OR_EQUAL,
new BinaryComparator(val1), put);
assertEquals(false, res);
// Test CompareOp.GREATER_OR_EQUAL: original = val2, compare with val2,
// succeed (value still = val2)
res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.GREATER_OR_EQUAL,
new BinaryComparator(val2), put);
assertEquals(true, res);
// Test CompareOp.GREATER_OR_EQUAL: original = val2, compare with val3, succeed
res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.GREATER_OR_EQUAL,
new BinaryComparator(val3), put);
assertEquals(true, res);
}
@Test
public void testCheckAndPut_ThatPutWasWritten() throws IOException {
byte[] row1 = Bytes.toBytes("row1");
byte[] fam1 = Bytes.toBytes("fam1");
byte[] fam2 = Bytes.toBytes("fam2");
byte[] qf1 = Bytes.toBytes("qualifier");
byte[] val1 = Bytes.toBytes("value1");
byte[] val2 = Bytes.toBytes("value2");
byte[][] families = { fam1, fam2 };
// Setting up region
this.region = initHRegion(tableName, method, CONF, families);
// Putting data in the key to check
Put put = new Put(row1);
put.addColumn(fam1, qf1, val1);
region.put(put);
// Creating put to add
long ts = System.currentTimeMillis();
KeyValue kv = new KeyValue(row1, fam2, qf1, ts, KeyValue.Type.Put, val2);
put = new Put(row1);
put.add(kv);
// checkAndPut with wrong value
boolean res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL,
new BinaryComparator(val1), put);
assertEquals(true, res);
Get get = new Get(row1);
get.addColumn(fam2, qf1);
Cell[] actual = region.get(get).rawCells();
Cell[] expected = { kv };
assertEquals(expected.length, actual.length);
for (int i = 0; i < actual.length; i++) {
assertEquals(expected[i], actual[i]);
}
}
@Test
public void testCheckAndPut_wrongRowInPut() throws IOException {
this.region = initHRegion(tableName, method, CONF, COLUMNS);
Put put = new Put(row2);
put.addColumn(fam1, qual1, value1);
try {
region.checkAndMutate(row, fam1, qual1, CompareOperator.EQUAL,
new BinaryComparator(value2), put);
fail();
} catch (org.apache.hadoop.hbase.DoNotRetryIOException expected) {
// expected exception.
}
}
@Test
public void testCheckAndDelete_ThatDeleteWasWritten() throws IOException {
byte[] row1 = Bytes.toBytes("row1");
byte[] fam1 = Bytes.toBytes("fam1");
byte[] fam2 = Bytes.toBytes("fam2");
byte[] qf1 = Bytes.toBytes("qualifier1");
byte[] qf2 = Bytes.toBytes("qualifier2");
byte[] qf3 = Bytes.toBytes("qualifier3");
byte[] val1 = Bytes.toBytes("value1");
byte[] val2 = Bytes.toBytes("value2");
byte[] val3 = Bytes.toBytes("value3");
byte[] emptyVal = new byte[] {};
byte[][] families = { fam1, fam2 };
// Setting up region
this.region = initHRegion(tableName, method, CONF, families);
// Put content
Put put = new Put(row1);
put.addColumn(fam1, qf1, val1);
region.put(put);
Threads.sleep(2);
put = new Put(row1);
put.addColumn(fam1, qf1, val2);
put.addColumn(fam2, qf1, val3);
put.addColumn(fam2, qf2, val2);
put.addColumn(fam2, qf3, val1);
put.addColumn(fam1, qf3, val1);
region.put(put);
// Multi-column delete
Delete delete = new Delete(row1);
delete.addColumn(fam1, qf1);
delete.addColumn(fam2, qf1);
delete.addColumn(fam1, qf3);
boolean res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL,
new BinaryComparator(val2), delete);
assertEquals(true, res);
Get get = new Get(row1);
get.addColumn(fam1, qf1);
get.addColumn(fam1, qf3);
get.addColumn(fam2, qf2);
Result r = region.get(get);
assertEquals(2, r.size());
assertArrayEquals(val1, r.getValue(fam1, qf1));
assertArrayEquals(val2, r.getValue(fam2, qf2));
// Family delete
delete = new Delete(row1);
delete.addFamily(fam2);
res = region.checkAndMutate(row1, fam2, qf1, CompareOperator.EQUAL,
new BinaryComparator(emptyVal), delete);
assertEquals(true, res);
get = new Get(row1);
r = region.get(get);
assertEquals(1, r.size());
assertArrayEquals(val1, r.getValue(fam1, qf1));
// Row delete
delete = new Delete(row1);
res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL, new BinaryComparator(val1),
delete);
assertEquals(true, res);
get = new Get(row1);
r = region.get(get);
assertEquals(0, r.size());
}
// ////////////////////////////////////////////////////////////////////////////
// Delete tests
// ////////////////////////////////////////////////////////////////////////////
@Test
public void testDelete_multiDeleteColumn() throws IOException {
byte[] row1 = Bytes.toBytes("row1");
byte[] fam1 = Bytes.toBytes("fam1");
byte[] qual = Bytes.toBytes("qualifier");
byte[] value = Bytes.toBytes("value");
Put put = new Put(row1);
put.addColumn(fam1, qual, 1, value);
put.addColumn(fam1, qual, 2, value);
this.region = initHRegion(tableName, method, CONF, fam1);
region.put(put);
// We do support deleting more than 1 'latest' version
Delete delete = new Delete(row1);
delete.addColumn(fam1, qual);
delete.addColumn(fam1, qual);
region.delete(delete);
Get get = new Get(row1);
get.addFamily(fam1);
Result r = region.get(get);
assertEquals(0, r.size());
}
@Test
public void testDelete_CheckFamily() throws IOException {
byte[] row1 = Bytes.toBytes("row1");
byte[] fam1 = Bytes.toBytes("fam1");
byte[] fam2 = Bytes.toBytes("fam2");
byte[] fam3 = Bytes.toBytes("fam3");
byte[] fam4 = Bytes.toBytes("fam4");
// Setting up region
this.region = initHRegion(tableName, method, CONF, fam1, fam2, fam3);
List<Cell> kvs = new ArrayList<>();
kvs.add(new KeyValue(row1, fam4, null, null));
// testing existing family
byte[] family = fam2;
NavigableMap<byte[], List<Cell>> deleteMap = new TreeMap<>(Bytes.BYTES_COMPARATOR);
deleteMap.put(family, kvs);
region.delete(deleteMap, Durability.SYNC_WAL);
// testing non existing family
boolean ok = false;
family = fam4;
try {
deleteMap = new TreeMap<>(Bytes.BYTES_COMPARATOR);
deleteMap.put(family, kvs);
region.delete(deleteMap, Durability.SYNC_WAL);
} catch (Exception e) {
ok = true;
}
assertTrue("Family " + new String(family, StandardCharsets.UTF_8) + " does exist", ok);
}
@Test
public void testDelete_mixed() throws IOException, InterruptedException {
byte[] fam = Bytes.toBytes("info");
byte[][] families = { fam };
this.region = initHRegion(tableName, method, CONF, families);
EnvironmentEdgeManagerTestHelper.injectEdge(new IncrementingEnvironmentEdge());
byte[] row = Bytes.toBytes("table_name");
// column names
byte[] serverinfo = Bytes.toBytes("serverinfo");
byte[] splitA = Bytes.toBytes("splitA");
byte[] splitB = Bytes.toBytes("splitB");
// add some data:
Put put = new Put(row);
put.addColumn(fam, splitA, Bytes.toBytes("reference_A"));
region.put(put);
put = new Put(row);
put.addColumn(fam, splitB, Bytes.toBytes("reference_B"));
region.put(put);
put = new Put(row);
put.addColumn(fam, serverinfo, Bytes.toBytes("ip_address"));
region.put(put);
// ok now delete a split:
Delete delete = new Delete(row);
delete.addColumns(fam, splitA);
region.delete(delete);
// assert some things:
Get get = new Get(row).addColumn(fam, serverinfo);
Result result = region.get(get);
assertEquals(1, result.size());
get = new Get(row).addColumn(fam, splitA);
result = region.get(get);
assertEquals(0, result.size());
get = new Get(row).addColumn(fam, splitB);
result = region.get(get);
assertEquals(1, result.size());
// Assert that after a delete, I can put.
put = new Put(row);
put.addColumn(fam, splitA, Bytes.toBytes("reference_A"));
region.put(put);
get = new Get(row);
result = region.get(get);
assertEquals(3, result.size());
// Now delete all... then test I can add stuff back
delete = new Delete(row);
region.delete(delete);
assertEquals(0, region.get(get).size());
region.put(new Put(row).addColumn(fam, splitA, Bytes.toBytes("reference_A")));
result = region.get(get);
assertEquals(1, result.size());
}
@Test
public void testDeleteRowWithFutureTs() throws IOException {
byte[] fam = Bytes.toBytes("info");
byte[][] families = { fam };
this.region = initHRegion(tableName, method, CONF, families);
byte[] row = Bytes.toBytes("table_name");
// column names
byte[] serverinfo = Bytes.toBytes("serverinfo");
// add data in the far future
Put put = new Put(row);
put.addColumn(fam, serverinfo, HConstants.LATEST_TIMESTAMP - 5, Bytes.toBytes("value"));
region.put(put);
// now delete something in the present
Delete delete = new Delete(row);
region.delete(delete);
// make sure we still see our data
Get get = new Get(row).addColumn(fam, serverinfo);
Result result = region.get(get);
assertEquals(1, result.size());
// delete the future row
delete = new Delete(row, HConstants.LATEST_TIMESTAMP - 3);
region.delete(delete);
// make sure it is gone
get = new Get(row).addColumn(fam, serverinfo);
result = region.get(get);
assertEquals(0, result.size());
}
/**
* Tests that the special LATEST_TIMESTAMP option for puts gets replaced by
* the actual timestamp
*/
@Test
public void testPutWithLatestTS() throws IOException {
byte[] fam = Bytes.toBytes("info");
byte[][] families = { fam };
this.region = initHRegion(tableName, method, CONF, families);
byte[] row = Bytes.toBytes("row1");
// column names
byte[] qual = Bytes.toBytes("qual");
// add data with LATEST_TIMESTAMP, put without WAL
Put put = new Put(row);
put.addColumn(fam, qual, HConstants.LATEST_TIMESTAMP, Bytes.toBytes("value"));
region.put(put);
// Make sure it shows up with an actual timestamp
Get get = new Get(row).addColumn(fam, qual);
Result result = region.get(get);
assertEquals(1, result.size());
Cell kv = result.rawCells()[0];
LOG.info("Got: " + kv);
assertTrue("LATEST_TIMESTAMP was not replaced with real timestamp",
kv.getTimestamp() != HConstants.LATEST_TIMESTAMP);
// Check same with WAL enabled (historically these took different
// code paths, so check both)
row = Bytes.toBytes("row2");
put = new Put(row);
put.addColumn(fam, qual, HConstants.LATEST_TIMESTAMP, Bytes.toBytes("value"));
region.put(put);
// Make sure it shows up with an actual timestamp
get = new Get(row).addColumn(fam, qual);
result = region.get(get);
assertEquals(1, result.size());
kv = result.rawCells()[0];
LOG.info("Got: " + kv);
assertTrue("LATEST_TIMESTAMP was not replaced with real timestamp",
kv.getTimestamp() != HConstants.LATEST_TIMESTAMP);
}
/**
* Tests that there is server-side filtering for invalid timestamp upper
* bound. Note that the timestamp lower bound is automatically handled for us
* by the TTL field.
*/
@Test
public void testPutWithTsSlop() throws IOException {
byte[] fam = Bytes.toBytes("info");
byte[][] families = { fam };
// add data with a timestamp that is too recent for range. Ensure assert
CONF.setInt("hbase.hregion.keyvalue.timestamp.slop.millisecs", 1000);
this.region = initHRegion(tableName, method, CONF, families);
boolean caughtExcep = false;
try {
// no TS specified == use latest. should not error
region.put(new Put(row).addColumn(fam, Bytes.toBytes("qual"), Bytes.toBytes("value")));
// TS out of range. should error
region.put(new Put(row).addColumn(fam, Bytes.toBytes("qual"),
System.currentTimeMillis() + 2000, Bytes.toBytes("value")));
fail("Expected IOE for TS out of configured timerange");
} catch (FailedSanityCheckException ioe) {
LOG.debug("Received expected exception", ioe);
caughtExcep = true;
}
assertTrue("Should catch FailedSanityCheckException", caughtExcep);
}
@Test
public void testScanner_DeleteOneFamilyNotAnother() throws IOException {
byte[] fam1 = Bytes.toBytes("columnA");
byte[] fam2 = Bytes.toBytes("columnB");
this.region = initHRegion(tableName, method, CONF, fam1, fam2);
byte[] rowA = Bytes.toBytes("rowA");
byte[] rowB = Bytes.toBytes("rowB");
byte[] value = Bytes.toBytes("value");
Delete delete = new Delete(rowA);
delete.addFamily(fam1);
region.delete(delete);
// now create data.
Put put = new Put(rowA);
put.addColumn(fam2, null, value);
region.put(put);
put = new Put(rowB);
put.addColumn(fam1, null, value);
put.addColumn(fam2, null, value);
region.put(put);
Scan scan = new Scan();
scan.addFamily(fam1).addFamily(fam2);
InternalScanner s = region.getScanner(scan);
List<Cell> results = new ArrayList<>();
s.next(results);
assertTrue(CellUtil.matchingRows(results.get(0), rowA));
results.clear();
s.next(results);
assertTrue(CellUtil.matchingRows(results.get(0), rowB));
}
@Test
public void testDataInMemoryWithoutWAL() throws IOException {
FileSystem fs = FileSystem.get(CONF);
Path rootDir = new Path(dir + "testDataInMemoryWithoutWAL");
FSHLog hLog = new FSHLog(fs, rootDir, "testDataInMemoryWithoutWAL", CONF);
hLog.init();
// This chunk creation is done throughout the code base. Do we want to move it into core?
// It is missing from this test. W/o it we NPE.
region = initHRegion(tableName, null, null, false, Durability.SYNC_WAL, hLog,
COLUMN_FAMILY_BYTES);
Cell originalCell = CellUtil.createCell(row, COLUMN_FAMILY_BYTES, qual1,
System.currentTimeMillis(), KeyValue.Type.Put.getCode(), value1);
final long originalSize = originalCell.getSerializedSize();
Cell addCell = CellUtil.createCell(row, COLUMN_FAMILY_BYTES, qual1,
System.currentTimeMillis(), KeyValue.Type.Put.getCode(), Bytes.toBytes("xxxxxxxxxx"));
final long addSize = addCell.getSerializedSize();
LOG.info("originalSize:" + originalSize
+ ", addSize:" + addSize);
// start test. We expect that the addPut's durability will be replaced
// by originalPut's durability.
// case 1:
testDataInMemoryWithoutWAL(region,
new Put(row).add(originalCell).setDurability(Durability.SKIP_WAL),
new Put(row).add(addCell).setDurability(Durability.SKIP_WAL),
originalSize + addSize);
// case 2:
testDataInMemoryWithoutWAL(region,
new Put(row).add(originalCell).setDurability(Durability.SKIP_WAL),
new Put(row).add(addCell).setDurability(Durability.SYNC_WAL),
originalSize + addSize);
// case 3:
testDataInMemoryWithoutWAL(region,
new Put(row).add(originalCell).setDurability(Durability.SYNC_WAL),
new Put(row).add(addCell).setDurability(Durability.SKIP_WAL),
0);
// case 4:
testDataInMemoryWithoutWAL(region,
new Put(row).add(originalCell).setDurability(Durability.SYNC_WAL),
new Put(row).add(addCell).setDurability(Durability.SYNC_WAL),
0);
}
private static void testDataInMemoryWithoutWAL(HRegion region, Put originalPut,
final Put addPut, long delta) throws IOException {
final long initSize = region.getDataInMemoryWithoutWAL();
// save normalCPHost and replaced by mockedCPHost
RegionCoprocessorHost normalCPHost = region.getCoprocessorHost();
RegionCoprocessorHost mockedCPHost = Mockito.mock(RegionCoprocessorHost.class);
// Because the preBatchMutate returns void, we can't do usual Mockito when...then form. Must
// do below format (from Mockito doc).
Mockito.doAnswer(new Answer() {
@Override
public Object answer(InvocationOnMock invocation) throws Throwable {
MiniBatchOperationInProgress<Mutation> mb = invocation.getArgument(0);
mb.addOperationsFromCP(0, new Mutation[]{addPut});
return null;
}
}).when(mockedCPHost).preBatchMutate(Mockito.isA(MiniBatchOperationInProgress.class));
ColumnFamilyDescriptorBuilder builder = ColumnFamilyDescriptorBuilder.
newBuilder(COLUMN_FAMILY_BYTES);
ScanInfo info = new ScanInfo(CONF, builder.build(), Long.MAX_VALUE,
Long.MAX_VALUE, region.getCellComparator());
Mockito.when(mockedCPHost.preFlushScannerOpen(Mockito.any(HStore.class),
Mockito.any())).thenReturn(info);
Mockito.when(mockedCPHost.preFlush(Mockito.any(), Mockito.any(StoreScanner.class),
Mockito.any())).thenAnswer(i -> i.getArgument(1));
region.setCoprocessorHost(mockedCPHost);
region.put(originalPut);
region.setCoprocessorHost(normalCPHost);
final long finalSize = region.getDataInMemoryWithoutWAL();
assertEquals("finalSize:" + finalSize + ", initSize:"
+ initSize + ", delta:" + delta,finalSize, initSize + delta);
}
@Test
public void testDeleteColumns_PostInsert() throws IOException, InterruptedException {
Delete delete = new Delete(row);
delete.addColumns(fam1, qual1);
doTestDelete_AndPostInsert(delete);
}
@Test
public void testaddFamily_PostInsert() throws IOException, InterruptedException {
Delete delete = new Delete(row);
delete.addFamily(fam1);
doTestDelete_AndPostInsert(delete);
}
public void doTestDelete_AndPostInsert(Delete delete) throws IOException, InterruptedException {
this.region = initHRegion(tableName, method, CONF, fam1);
EnvironmentEdgeManagerTestHelper.injectEdge(new IncrementingEnvironmentEdge());
Put put = new Put(row);
put.addColumn(fam1, qual1, value1);
region.put(put);
// now delete the value:
region.delete(delete);
// ok put data:
put = new Put(row);
put.addColumn(fam1, qual1, value2);
region.put(put);
// ok get:
Get get = new Get(row);
get.addColumn(fam1, qual1);
Result r = region.get(get);
assertEquals(1, r.size());
assertArrayEquals(value2, r.getValue(fam1, qual1));
// next:
Scan scan = new Scan(row);
scan.addColumn(fam1, qual1);
InternalScanner s = region.getScanner(scan);
List<Cell> results = new ArrayList<>();
assertEquals(false, s.next(results));
assertEquals(1, results.size());
Cell kv = results.get(0);
assertArrayEquals(value2, CellUtil.cloneValue(kv));
assertArrayEquals(fam1, CellUtil.cloneFamily(kv));
assertArrayEquals(qual1, CellUtil.cloneQualifier(kv));
assertArrayEquals(row, CellUtil.cloneRow(kv));
}
@Test
public void testDelete_CheckTimestampUpdated() throws IOException {
byte[] row1 = Bytes.toBytes("row1");
byte[] col1 = Bytes.toBytes("col1");
byte[] col2 = Bytes.toBytes("col2");
byte[] col3 = Bytes.toBytes("col3");
// Setting up region
this.region = initHRegion(tableName, method, CONF, fam1);
// Building checkerList
List<Cell> kvs = new ArrayList<>();
kvs.add(new KeyValue(row1, fam1, col1, null));
kvs.add(new KeyValue(row1, fam1, col2, null));
kvs.add(new KeyValue(row1, fam1, col3, null));
NavigableMap<byte[], List<Cell>> deleteMap = new TreeMap<>(Bytes.BYTES_COMPARATOR);
deleteMap.put(fam1, kvs);
region.delete(deleteMap, Durability.SYNC_WAL);
// extract the key values out the memstore:
// This is kinda hacky, but better than nothing...
long now = System.currentTimeMillis();
AbstractMemStore memstore = (AbstractMemStore)region.getStore(fam1).memstore;
Cell firstCell = memstore.getActive().first();
assertTrue(firstCell.getTimestamp() <= now);
now = firstCell.getTimestamp();
for (Cell cell : memstore.getActive().getCellSet()) {
assertTrue(cell.getTimestamp() <= now);
now = cell.getTimestamp();
}
}
// ////////////////////////////////////////////////////////////////////////////
// Get tests
// ////////////////////////////////////////////////////////////////////////////
@Test
public void testGet_FamilyChecker() throws IOException {
byte[] row1 = Bytes.toBytes("row1");
byte[] fam1 = Bytes.toBytes("fam1");
byte[] fam2 = Bytes.toBytes("False");
byte[] col1 = Bytes.toBytes("col1");
// Setting up region
this.region = initHRegion(tableName, method, CONF, fam1);
Get get = new Get(row1);
get.addColumn(fam2, col1);
// Test
try {
region.get(get);
fail("Expecting DoNotRetryIOException in get but did not get any");
} catch (org.apache.hadoop.hbase.DoNotRetryIOException e) {
LOG.info("Got expected DoNotRetryIOException successfully");
}
}
@Test
public void testGet_Basic() throws IOException {
byte[] row1 = Bytes.toBytes("row1");
byte[] fam1 = Bytes.toBytes("fam1");
byte[] col1 = Bytes.toBytes("col1");
byte[] col2 = Bytes.toBytes("col2");
byte[] col3 = Bytes.toBytes("col3");
byte[] col4 = Bytes.toBytes("col4");
byte[] col5 = Bytes.toBytes("col5");
// Setting up region
this.region = initHRegion(tableName, method, CONF, fam1);
// Add to memstore
Put put = new Put(row1);
put.addColumn(fam1, col1, null);
put.addColumn(fam1, col2, null);
put.addColumn(fam1, col3, null);
put.addColumn(fam1, col4, null);
put.addColumn(fam1, col5, null);
region.put(put);
Get get = new Get(row1);
get.addColumn(fam1, col2);
get.addColumn(fam1, col4);
// Expected result
KeyValue kv1 = new KeyValue(row1, fam1, col2);
KeyValue kv2 = new KeyValue(row1, fam1, col4);
KeyValue[] expected = { kv1, kv2 };
// Test
Result res = region.get(get);
assertEquals(expected.length, res.size());
for (int i = 0; i < res.size(); i++) {
assertTrue(CellUtil.matchingRows(expected[i], res.rawCells()[i]));
assertTrue(CellUtil.matchingFamily(expected[i], res.rawCells()[i]));
assertTrue(CellUtil.matchingQualifier(expected[i], res.rawCells()[i]));
}
// Test using a filter on a Get
Get g = new Get(row1);
final int count = 2;
g.setFilter(new ColumnCountGetFilter(count));
res = region.get(g);
assertEquals(count, res.size());
}
@Test
public void testGet_Empty() throws IOException {
byte[] row = Bytes.toBytes("row");
byte[] fam = Bytes.toBytes("fam");
this.region = initHRegion(tableName, method, CONF, fam);
Get get = new Get(row);
get.addFamily(fam);
Result r = region.get(get);
assertTrue(r.isEmpty());
}
@Test
public void testGetWithFilter() throws IOException, InterruptedException {
byte[] row1 = Bytes.toBytes("row1");
byte[] fam1 = Bytes.toBytes("fam1");
byte[] col1 = Bytes.toBytes("col1");
byte[] value1 = Bytes.toBytes("value1");
byte[] value2 = Bytes.toBytes("value2");
final int maxVersions = 3;
HColumnDescriptor hcd = new HColumnDescriptor(fam1);
hcd.setMaxVersions(maxVersions);
HTableDescriptor htd = new HTableDescriptor(TableName.valueOf("testFilterAndColumnTracker"));
htd.addFamily(hcd);
ChunkCreator.initialize(MemStoreLABImpl.CHUNK_SIZE_DEFAULT, false, 0, 0, 0, null);
HRegionInfo info = new HRegionInfo(htd.getTableName(), null, null, false);
Path logDir = TEST_UTIL.getDataTestDirOnTestFS(method + ".log");
final WAL wal = HBaseTestingUtility.createWal(TEST_UTIL.getConfiguration(), logDir, info);
this.region = TEST_UTIL.createLocalHRegion(info, htd, wal);
// Put 4 version to memstore
long ts = 0;
Put put = new Put(row1, ts);
put.addColumn(fam1, col1, value1);
region.put(put);
put = new Put(row1, ts + 1);
put.addColumn(fam1, col1, Bytes.toBytes("filter1"));
region.put(put);
put = new Put(row1, ts + 2);
put.addColumn(fam1, col1, Bytes.toBytes("filter2"));
region.put(put);
put = new Put(row1, ts + 3);
put.addColumn(fam1, col1, value2);
region.put(put);
Get get = new Get(row1);
get.setMaxVersions();
Result res = region.get(get);
// Get 3 versions, the oldest version has gone from user view
assertEquals(maxVersions, res.size());
get.setFilter(new ValueFilter(CompareOp.EQUAL, new SubstringComparator("value")));
res = region.get(get);
// When use value filter, the oldest version should still gone from user view and it
// should only return one key vaule
assertEquals(1, res.size());
assertTrue(CellUtil.matchingValue(new KeyValue(row1, fam1, col1, value2), res.rawCells()[0]));
assertEquals(ts + 3, res.rawCells()[0].getTimestamp());
region.flush(true);
region.compact(true);
Thread.sleep(1000);
res = region.get(get);
// After flush and compact, the result should be consistent with previous result
assertEquals(1, res.size());
assertTrue(CellUtil.matchingValue(new KeyValue(row1, fam1, col1, value2), res.rawCells()[0]));
}
// ////////////////////////////////////////////////////////////////////////////
// Scanner tests
// ////////////////////////////////////////////////////////////////////////////
@Test
public void testGetScanner_WithOkFamilies() throws IOException {
byte[] fam1 = Bytes.toBytes("fam1");
byte[] fam2 = Bytes.toBytes("fam2");
byte[][] families = { fam1, fam2 };
// Setting up region
this.region = initHRegion(tableName, method, CONF, families);
Scan scan = new Scan();
scan.addFamily(fam1);
scan.addFamily(fam2);
try {
region.getScanner(scan);
} catch (Exception e) {
assertTrue("Families could not be found in Region", false);
}
}
@Test
public void testGetScanner_WithNotOkFamilies() throws IOException {
byte[] fam1 = Bytes.toBytes("fam1");
byte[] fam2 = Bytes.toBytes("fam2");
byte[][] families = { fam1 };
// Setting up region
this.region = initHRegion(tableName, method, CONF, families);
Scan scan = new Scan();
scan.addFamily(fam2);
boolean ok = false;
try {
region.getScanner(scan);
} catch (Exception e) {
ok = true;
}
assertTrue("Families could not be found in Region", ok);
}
@Test
public void testGetScanner_WithNoFamilies() throws IOException {
byte[] row1 = Bytes.toBytes("row1");
byte[] fam1 = Bytes.toBytes("fam1");
byte[] fam2 = Bytes.toBytes("fam2");
byte[] fam3 = Bytes.toBytes("fam3");
byte[] fam4 = Bytes.toBytes("fam4");
byte[][] families = { fam1, fam2, fam3, fam4 };
// Setting up region
this.region = initHRegion(tableName, method, CONF, families);
// Putting data in Region
Put put = new Put(row1);
put.addColumn(fam1, null, null);
put.addColumn(fam2, null, null);
put.addColumn(fam3, null, null);
put.addColumn(fam4, null, null);
region.put(put);
Scan scan = null;
HRegion.RegionScannerImpl is = null;
// Testing to see how many scanners that is produced by getScanner,
// starting
// with known number, 2 - current = 1
scan = new Scan();
scan.addFamily(fam2);
scan.addFamily(fam4);
is = region.getScanner(scan);
assertEquals(1, is.storeHeap.getHeap().size());
scan = new Scan();
is = region.getScanner(scan);
assertEquals(families.length - 1, is.storeHeap.getHeap().size());
}
/**
* This method tests https://issues.apache.org/jira/browse/HBASE-2516.
*
* @throws IOException
*/
@Test
public void testGetScanner_WithRegionClosed() throws IOException {
byte[] fam1 = Bytes.toBytes("fam1");
byte[] fam2 = Bytes.toBytes("fam2");
byte[][] families = { fam1, fam2 };
// Setting up region
try {
this.region = initHRegion(tableName, method, CONF, families);
} catch (IOException e) {
e.printStackTrace();
fail("Got IOException during initHRegion, " + e.getMessage());
}
region.closed.set(true);
try {
region.getScanner(null);
fail("Expected to get an exception during getScanner on a region that is closed");
} catch (NotServingRegionException e) {
// this is the correct exception that is expected
} catch (IOException e) {
fail("Got wrong type of exception - should be a NotServingRegionException, " +
"but was an IOException: "
+ e.getMessage());
}
}
@Test
public void testRegionScanner_Next() throws IOException {
byte[] row1 = Bytes.toBytes("row1");
byte[] row2 = Bytes.toBytes("row2");
byte[] fam1 = Bytes.toBytes("fam1");
byte[] fam2 = Bytes.toBytes("fam2");
byte[] fam3 = Bytes.toBytes("fam3");
byte[] fam4 = Bytes.toBytes("fam4");
byte[][] families = { fam1, fam2, fam3, fam4 };
long ts = System.currentTimeMillis();
// Setting up region
this.region = initHRegion(tableName, method, CONF, families);
// Putting data in Region
Put put = null;
put = new Put(row1);
put.addColumn(fam1, (byte[]) null, ts, null);
put.addColumn(fam2, (byte[]) null, ts, null);
put.addColumn(fam3, (byte[]) null, ts, null);
put.addColumn(fam4, (byte[]) null, ts, null);
region.put(put);
put = new Put(row2);
put.addColumn(fam1, (byte[]) null, ts, null);
put.addColumn(fam2, (byte[]) null, ts, null);
put.addColumn(fam3, (byte[]) null, ts, null);
put.addColumn(fam4, (byte[]) null, ts, null);
region.put(put);
Scan scan = new Scan();
scan.addFamily(fam2);
scan.addFamily(fam4);
InternalScanner is = region.getScanner(scan);
List<Cell> res = null;
// Result 1
List<Cell> expected1 = new ArrayList<>();
expected1.add(new KeyValue(row1, fam2, null, ts, KeyValue.Type.Put, null));
expected1.add(new KeyValue(row1, fam4, null, ts, KeyValue.Type.Put, null));
res = new ArrayList<>();
is.next(res);
for (int i = 0; i < res.size(); i++) {
assertTrue(PrivateCellUtil.equalsIgnoreMvccVersion(expected1.get(i), res.get(i)));
}
// Result 2
List<Cell> expected2 = new ArrayList<>();
expected2.add(new KeyValue(row2, fam2, null, ts, KeyValue.Type.Put, null));
expected2.add(new KeyValue(row2, fam4, null, ts, KeyValue.Type.Put, null));
res = new ArrayList<>();
is.next(res);
for (int i = 0; i < res.size(); i++) {
assertTrue(PrivateCellUtil.equalsIgnoreMvccVersion(expected2.get(i), res.get(i)));
}
}
@Test
public void testScanner_ExplicitColumns_FromMemStore_EnforceVersions() throws IOException {
byte[] row1 = Bytes.toBytes("row1");
byte[] qf1 = Bytes.toBytes("qualifier1");
byte[] qf2 = Bytes.toBytes("qualifier2");
byte[] fam1 = Bytes.toBytes("fam1");
byte[][] families = { fam1 };
long ts1 = System.currentTimeMillis();
long ts2 = ts1 + 1;
long ts3 = ts1 + 2;
// Setting up region
this.region = initHRegion(tableName, method, CONF, families);
// Putting data in Region
Put put = null;
KeyValue kv13 = new KeyValue(row1, fam1, qf1, ts3, KeyValue.Type.Put, null);
KeyValue kv12 = new KeyValue(row1, fam1, qf1, ts2, KeyValue.Type.Put, null);
KeyValue kv11 = new KeyValue(row1, fam1, qf1, ts1, KeyValue.Type.Put, null);
KeyValue kv23 = new KeyValue(row1, fam1, qf2, ts3, KeyValue.Type.Put, null);
KeyValue kv22 = new KeyValue(row1, fam1, qf2, ts2, KeyValue.Type.Put, null);
KeyValue kv21 = new KeyValue(row1, fam1, qf2, ts1, KeyValue.Type.Put, null);
put = new Put(row1);
put.add(kv13);
put.add(kv12);
put.add(kv11);
put.add(kv23);
put.add(kv22);
put.add(kv21);
region.put(put);
// Expected
List<Cell> expected = new ArrayList<>();
expected.add(kv13);
expected.add(kv12);
Scan scan = new Scan(row1);
scan.addColumn(fam1, qf1);
scan.setMaxVersions(MAX_VERSIONS);
List<Cell> actual = new ArrayList<>();
InternalScanner scanner = region.getScanner(scan);
boolean hasNext = scanner.next(actual);
assertEquals(false, hasNext);
// Verify result
for (int i = 0; i < expected.size(); i++) {
assertEquals(expected.get(i), actual.get(i));
}
}
@Test
public void testScanner_ExplicitColumns_FromFilesOnly_EnforceVersions() throws IOException {
byte[] row1 = Bytes.toBytes("row1");
byte[] qf1 = Bytes.toBytes("qualifier1");
byte[] qf2 = Bytes.toBytes("qualifier2");
byte[] fam1 = Bytes.toBytes("fam1");
byte[][] families = { fam1 };
long ts1 = 1; // System.currentTimeMillis();
long ts2 = ts1 + 1;
long ts3 = ts1 + 2;
// Setting up region
this.region = initHRegion(tableName, method, CONF, families);
// Putting data in Region
Put put = null;
KeyValue kv13 = new KeyValue(row1, fam1, qf1, ts3, KeyValue.Type.Put, null);
KeyValue kv12 = new KeyValue(row1, fam1, qf1, ts2, KeyValue.Type.Put, null);
KeyValue kv11 = new KeyValue(row1, fam1, qf1, ts1, KeyValue.Type.Put, null);
KeyValue kv23 = new KeyValue(row1, fam1, qf2, ts3, KeyValue.Type.Put, null);
KeyValue kv22 = new KeyValue(row1, fam1, qf2, ts2, KeyValue.Type.Put, null);
KeyValue kv21 = new KeyValue(row1, fam1, qf2, ts1, KeyValue.Type.Put, null);
put = new Put(row1);
put.add(kv13);
put.add(kv12);
put.add(kv11);
put.add(kv23);
put.add(kv22);
put.add(kv21);
region.put(put);
region.flush(true);
// Expected
List<Cell> expected = new ArrayList<>();
expected.add(kv13);
expected.add(kv12);
expected.add(kv23);
expected.add(kv22);
Scan scan = new Scan(row1);
scan.addColumn(fam1, qf1);
scan.addColumn(fam1, qf2);
scan.setMaxVersions(MAX_VERSIONS);
List<Cell> actual = new ArrayList<>();
InternalScanner scanner = region.getScanner(scan);
boolean hasNext = scanner.next(actual);
assertEquals(false, hasNext);
// Verify result
for (int i = 0; i < expected.size(); i++) {
assertTrue(PrivateCellUtil.equalsIgnoreMvccVersion(expected.get(i), actual.get(i)));
}
}
@Test
public void testScanner_ExplicitColumns_FromMemStoreAndFiles_EnforceVersions() throws
IOException {
byte[] row1 = Bytes.toBytes("row1");
byte[] fam1 = Bytes.toBytes("fam1");
byte[][] families = { fam1 };
byte[] qf1 = Bytes.toBytes("qualifier1");
byte[] qf2 = Bytes.toBytes("qualifier2");
long ts1 = 1;
long ts2 = ts1 + 1;
long ts3 = ts1 + 2;
long ts4 = ts1 + 3;
// Setting up region
this.region = initHRegion(tableName, method, CONF, families);
// Putting data in Region
KeyValue kv14 = new KeyValue(row1, fam1, qf1, ts4, KeyValue.Type.Put, null);
KeyValue kv13 = new KeyValue(row1, fam1, qf1, ts3, KeyValue.Type.Put, null);
KeyValue kv12 = new KeyValue(row1, fam1, qf1, ts2, KeyValue.Type.Put, null);
KeyValue kv11 = new KeyValue(row1, fam1, qf1, ts1, KeyValue.Type.Put, null);
KeyValue kv24 = new KeyValue(row1, fam1, qf2, ts4, KeyValue.Type.Put, null);
KeyValue kv23 = new KeyValue(row1, fam1, qf2, ts3, KeyValue.Type.Put, null);
KeyValue kv22 = new KeyValue(row1, fam1, qf2, ts2, KeyValue.Type.Put, null);
KeyValue kv21 = new KeyValue(row1, fam1, qf2, ts1, KeyValue.Type.Put, null);
Put put = null;
put = new Put(row1);
put.add(kv14);
put.add(kv24);
region.put(put);
region.flush(true);
put = new Put(row1);
put.add(kv23);
put.add(kv13);
region.put(put);
region.flush(true);
put = new Put(row1);
put.add(kv22);
put.add(kv12);
region.put(put);
region.flush(true);
put = new Put(row1);
put.add(kv21);
put.add(kv11);
region.put(put);
// Expected
List<Cell> expected = new ArrayList<>();
expected.add(kv14);
expected.add(kv13);
expected.add(kv12);
expected.add(kv24);
expected.add(kv23);
expected.add(kv22);
Scan scan = new Scan(row1);
scan.addColumn(fam1, qf1);
scan.addColumn(fam1, qf2);
int versions = 3;
scan.setMaxVersions(versions);
List<Cell> actual = new ArrayList<>();
InternalScanner scanner = region.getScanner(scan);
boolean hasNext = scanner.next(actual);
assertEquals(false, hasNext);
// Verify result
for (int i = 0; i < expected.size(); i++) {
assertTrue(PrivateCellUtil.equalsIgnoreMvccVersion(expected.get(i), actual.get(i)));
}
}
@Test
public void testScanner_Wildcard_FromMemStore_EnforceVersions() throws IOException {
byte[] row1 = Bytes.toBytes("row1");
byte[] qf1 = Bytes.toBytes("qualifier1");
byte[] qf2 = Bytes.toBytes("qualifier2");
byte[] fam1 = Bytes.toBytes("fam1");
byte[][] families = { fam1 };
long ts1 = System.currentTimeMillis();
long ts2 = ts1 + 1;
long ts3 = ts1 + 2;
// Setting up region
this.region = initHRegion(tableName, method, CONF, families);
// Putting data in Region
Put put = null;
KeyValue kv13 = new KeyValue(row1, fam1, qf1, ts3, KeyValue.Type.Put, null);
KeyValue kv12 = new KeyValue(row1, fam1, qf1, ts2, KeyValue.Type.Put, null);
KeyValue kv11 = new KeyValue(row1, fam1, qf1, ts1, KeyValue.Type.Put, null);
KeyValue kv23 = new KeyValue(row1, fam1, qf2, ts3, KeyValue.Type.Put, null);
KeyValue kv22 = new KeyValue(row1, fam1, qf2, ts2, KeyValue.Type.Put, null);
KeyValue kv21 = new KeyValue(row1, fam1, qf2, ts1, KeyValue.Type.Put, null);
put = new Put(row1);
put.add(kv13);
put.add(kv12);
put.add(kv11);
put.add(kv23);
put.add(kv22);
put.add(kv21);
region.put(put);
// Expected
List<Cell> expected = new ArrayList<>();
expected.add(kv13);
expected.add(kv12);
expected.add(kv23);
expected.add(kv22);
Scan scan = new Scan(row1);
scan.addFamily(fam1);
scan.setMaxVersions(MAX_VERSIONS);
List<Cell> actual = new ArrayList<>();
InternalScanner scanner = region.getScanner(scan);
boolean hasNext = scanner.next(actual);
assertEquals(false, hasNext);
// Verify result
for (int i = 0; i < expected.size(); i++) {
assertEquals(expected.get(i), actual.get(i));
}
}
@Test
public void testScanner_Wildcard_FromFilesOnly_EnforceVersions() throws IOException {
byte[] row1 = Bytes.toBytes("row1");
byte[] qf1 = Bytes.toBytes("qualifier1");
byte[] qf2 = Bytes.toBytes("qualifier2");
byte[] fam1 = Bytes.toBytes("fam1");
long ts1 = 1; // System.currentTimeMillis();
long ts2 = ts1 + 1;
long ts3 = ts1 + 2;
// Setting up region
this.region = initHRegion(tableName, method, CONF, fam1);
// Putting data in Region
Put put = null;
KeyValue kv13 = new KeyValue(row1, fam1, qf1, ts3, KeyValue.Type.Put, null);
KeyValue kv12 = new KeyValue(row1, fam1, qf1, ts2, KeyValue.Type.Put, null);
KeyValue kv11 = new KeyValue(row1, fam1, qf1, ts1, KeyValue.Type.Put, null);
KeyValue kv23 = new KeyValue(row1, fam1, qf2, ts3, KeyValue.Type.Put, null);
KeyValue kv22 = new KeyValue(row1, fam1, qf2, ts2, KeyValue.Type.Put, null);
KeyValue kv21 = new KeyValue(row1, fam1, qf2, ts1, KeyValue.Type.Put, null);
put = new Put(row1);
put.add(kv13);
put.add(kv12);
put.add(kv11);
put.add(kv23);
put.add(kv22);
put.add(kv21);
region.put(put);
region.flush(true);
// Expected
List<Cell> expected = new ArrayList<>();
expected.add(kv13);
expected.add(kv12);
expected.add(kv23);
expected.add(kv22);
Scan scan = new Scan(row1);
scan.addFamily(fam1);
scan.setMaxVersions(MAX_VERSIONS);
List<Cell> actual = new ArrayList<>();
InternalScanner scanner = region.getScanner(scan);
boolean hasNext = scanner.next(actual);
assertEquals(false, hasNext);
// Verify result
for (int i = 0; i < expected.size(); i++) {
assertTrue(PrivateCellUtil.equalsIgnoreMvccVersion(expected.get(i), actual.get(i)));
}
}
@Test
public void testScanner_StopRow1542() throws IOException {
byte[] family = Bytes.toBytes("testFamily");
this.region = initHRegion(tableName, method, CONF, family);
byte[] row1 = Bytes.toBytes("row111");
byte[] row2 = Bytes.toBytes("row222");
byte[] row3 = Bytes.toBytes("row333");
byte[] row4 = Bytes.toBytes("row444");
byte[] row5 = Bytes.toBytes("row555");
byte[] col1 = Bytes.toBytes("Pub111");
byte[] col2 = Bytes.toBytes("Pub222");
Put put = new Put(row1);
put.addColumn(family, col1, Bytes.toBytes(10L));
region.put(put);
put = new Put(row2);
put.addColumn(family, col1, Bytes.toBytes(15L));
region.put(put);
put = new Put(row3);
put.addColumn(family, col2, Bytes.toBytes(20L));
region.put(put);
put = new Put(row4);
put.addColumn(family, col2, Bytes.toBytes(30L));
region.put(put);
put = new Put(row5);
put.addColumn(family, col1, Bytes.toBytes(40L));
region.put(put);
Scan scan = new Scan(row3, row4);
scan.setMaxVersions();
scan.addColumn(family, col1);
InternalScanner s = region.getScanner(scan);
List<Cell> results = new ArrayList<>();
assertEquals(false, s.next(results));
assertEquals(0, results.size());
}
@Test
public void testScanner_Wildcard_FromMemStoreAndFiles_EnforceVersions() throws IOException {
byte[] row1 = Bytes.toBytes("row1");
byte[] fam1 = Bytes.toBytes("fam1");
byte[] qf1 = Bytes.toBytes("qualifier1");
byte[] qf2 = Bytes.toBytes("quateslifier2");
long ts1 = 1;
long ts2 = ts1 + 1;
long ts3 = ts1 + 2;
long ts4 = ts1 + 3;
// Setting up region
this.region = initHRegion(tableName, method, CONF, fam1);
// Putting data in Region
KeyValue kv14 = new KeyValue(row1, fam1, qf1, ts4, KeyValue.Type.Put, null);
KeyValue kv13 = new KeyValue(row1, fam1, qf1, ts3, KeyValue.Type.Put, null);
KeyValue kv12 = new KeyValue(row1, fam1, qf1, ts2, KeyValue.Type.Put, null);
KeyValue kv11 = new KeyValue(row1, fam1, qf1, ts1, KeyValue.Type.Put, null);
KeyValue kv24 = new KeyValue(row1, fam1, qf2, ts4, KeyValue.Type.Put, null);
KeyValue kv23 = new KeyValue(row1, fam1, qf2, ts3, KeyValue.Type.Put, null);
KeyValue kv22 = new KeyValue(row1, fam1, qf2, ts2, KeyValue.Type.Put, null);
KeyValue kv21 = new KeyValue(row1, fam1, qf2, ts1, KeyValue.Type.Put, null);
Put put = null;
put = new Put(row1);
put.add(kv14);
put.add(kv24);
region.put(put);
region.flush(true);
put = new Put(row1);
put.add(kv23);
put.add(kv13);
region.put(put);
region.flush(true);
put = new Put(row1);
put.add(kv22);
put.add(kv12);
region.put(put);
region.flush(true);
put = new Put(row1);
put.add(kv21);
put.add(kv11);
region.put(put);
// Expected
List<KeyValue> expected = new ArrayList<>();
expected.add(kv14);
expected.add(kv13);
expected.add(kv12);
expected.add(kv24);
expected.add(kv23);
expected.add(kv22);
Scan scan = new Scan(row1);
int versions = 3;
scan.setMaxVersions(versions);
List<Cell> actual = new ArrayList<>();
InternalScanner scanner = region.getScanner(scan);
boolean hasNext = scanner.next(actual);
assertEquals(false, hasNext);
// Verify result
for (int i = 0; i < expected.size(); i++) {
assertTrue(PrivateCellUtil.equalsIgnoreMvccVersion(expected.get(i), actual.get(i)));
}
}
/**
* Added for HBASE-5416
*
* Here we test scan optimization when only subset of CFs are used in filter
* conditions.
*/
@Test
public void testScanner_JoinedScanners() throws IOException {
byte[] cf_essential = Bytes.toBytes("essential");
byte[] cf_joined = Bytes.toBytes("joined");
byte[] cf_alpha = Bytes.toBytes("alpha");
this.region = initHRegion(tableName, method, CONF, cf_essential, cf_joined, cf_alpha);
byte[] row1 = Bytes.toBytes("row1");
byte[] row2 = Bytes.toBytes("row2");
byte[] row3 = Bytes.toBytes("row3");
byte[] col_normal = Bytes.toBytes("d");
byte[] col_alpha = Bytes.toBytes("a");
byte[] filtered_val = Bytes.toBytes(3);
Put put = new Put(row1);
put.addColumn(cf_essential, col_normal, Bytes.toBytes(1));
put.addColumn(cf_joined, col_alpha, Bytes.toBytes(1));
region.put(put);
put = new Put(row2);
put.addColumn(cf_essential, col_alpha, Bytes.toBytes(2));
put.addColumn(cf_joined, col_normal, Bytes.toBytes(2));
put.addColumn(cf_alpha, col_alpha, Bytes.toBytes(2));
region.put(put);
put = new Put(row3);
put.addColumn(cf_essential, col_normal, filtered_val);
put.addColumn(cf_joined, col_normal, filtered_val);
region.put(put);
// Check two things:
// 1. result list contains expected values
// 2. result list is sorted properly
Scan scan = new Scan();
Filter filter = new SingleColumnValueExcludeFilter(cf_essential, col_normal,
CompareOp.NOT_EQUAL, filtered_val);
scan.setFilter(filter);
scan.setLoadColumnFamiliesOnDemand(true);
InternalScanner s = region.getScanner(scan);
List<Cell> results = new ArrayList<>();
assertTrue(s.next(results));
assertEquals(1, results.size());
results.clear();
assertTrue(s.next(results));
assertEquals(3, results.size());
assertTrue("orderCheck", CellUtil.matchingFamily(results.get(0), cf_alpha));
assertTrue("orderCheck", CellUtil.matchingFamily(results.get(1), cf_essential));
assertTrue("orderCheck", CellUtil.matchingFamily(results.get(2), cf_joined));
results.clear();
assertFalse(s.next(results));
assertEquals(0, results.size());
}
/**
* HBASE-5416
*
* Test case when scan limits amount of KVs returned on each next() call.
*/
@Test
public void testScanner_JoinedScannersWithLimits() throws IOException {
final byte[] cf_first = Bytes.toBytes("first");
final byte[] cf_second = Bytes.toBytes("second");
this.region = initHRegion(tableName, method, CONF, cf_first, cf_second);
final byte[] col_a = Bytes.toBytes("a");
final byte[] col_b = Bytes.toBytes("b");
Put put;
for (int i = 0; i < 10; i++) {
put = new Put(Bytes.toBytes("r" + Integer.toString(i)));
put.addColumn(cf_first, col_a, Bytes.toBytes(i));
if (i < 5) {
put.addColumn(cf_first, col_b, Bytes.toBytes(i));
put.addColumn(cf_second, col_a, Bytes.toBytes(i));
put.addColumn(cf_second, col_b, Bytes.toBytes(i));
}
region.put(put);
}
Scan scan = new Scan();
scan.setLoadColumnFamiliesOnDemand(true);
Filter bogusFilter = new FilterBase() {
@Override
public ReturnCode filterCell(final Cell ignored) throws IOException {
return ReturnCode.INCLUDE;
}
@Override
public boolean isFamilyEssential(byte[] name) {
return Bytes.equals(name, cf_first);
}
};
scan.setFilter(bogusFilter);
InternalScanner s = region.getScanner(scan);
// Our data looks like this:
// r0: first:a, first:b, second:a, second:b
// r1: first:a, first:b, second:a, second:b
// r2: first:a, first:b, second:a, second:b
// r3: first:a, first:b, second:a, second:b
// r4: first:a, first:b, second:a, second:b
// r5: first:a
// r6: first:a
// r7: first:a
// r8: first:a
// r9: first:a
// But due to next's limit set to 3, we should get this:
// r0: first:a, first:b, second:a
// r0: second:b
// r1: first:a, first:b, second:a
// r1: second:b
// r2: first:a, first:b, second:a
// r2: second:b
// r3: first:a, first:b, second:a
// r3: second:b
// r4: first:a, first:b, second:a
// r4: second:b
// r5: first:a
// r6: first:a
// r7: first:a
// r8: first:a
// r9: first:a
List<Cell> results = new ArrayList<>();
int index = 0;
ScannerContext scannerContext = ScannerContext.newBuilder().setBatchLimit(3).build();
while (true) {
boolean more = s.next(results, scannerContext);
if ((index >> 1) < 5) {
if (index % 2 == 0) {
assertEquals(3, results.size());
} else {
assertEquals(1, results.size());
}
} else {
assertEquals(1, results.size());
}
results.clear();
index++;
if (!more) {
break;
}
}
}
/**
* Write an HFile block full with Cells whose qualifier that are identical between
* 0 and Short.MAX_VALUE. See HBASE-13329.
* @throws Exception
*/
@Test
public void testLongQualifier() throws Exception {
byte[] family = Bytes.toBytes("family");
this.region = initHRegion(tableName, method, CONF, family);
byte[] q = new byte[Short.MAX_VALUE+2];
Arrays.fill(q, 0, q.length-1, (byte)42);
for (byte i=0; i<10; i++) {
Put p = new Put(Bytes.toBytes("row"));
// qualifiers that differ past Short.MAX_VALUE
q[q.length-1]=i;
p.addColumn(family, q, q);
region.put(p);
}
region.flush(false);
}
/**
* Flushes the cache in a thread while scanning. The tests verify that the
* scan is coherent - e.g. the returned results are always of the same or
* later update as the previous results.
*
* @throws IOException
* scan / compact
* @throws InterruptedException
* thread join
*/
@Test
public void testFlushCacheWhileScanning() throws IOException, InterruptedException {
byte[] family = Bytes.toBytes("family");
int numRows = 1000;
int flushAndScanInterval = 10;
int compactInterval = 10 * flushAndScanInterval;
this.region = initHRegion(tableName, method, CONF, family);
FlushThread flushThread = new FlushThread();
try {
flushThread.start();
Scan scan = new Scan();
scan.addFamily(family);
scan.setFilter(new SingleColumnValueFilter(family, qual1, CompareOp.EQUAL,
new BinaryComparator(Bytes.toBytes(5L))));
int expectedCount = 0;
List<Cell> res = new ArrayList<>();
boolean toggle = true;
for (long i = 0; i < numRows; i++) {
Put put = new Put(Bytes.toBytes(i));
put.setDurability(Durability.SKIP_WAL);
put.addColumn(family, qual1, Bytes.toBytes(i % 10));
region.put(put);
if (i != 0 && i % compactInterval == 0) {
LOG.debug("iteration = " + i+ " ts="+System.currentTimeMillis());
region.compact(true);
}
if (i % 10 == 5L) {
expectedCount++;
}
if (i != 0 && i % flushAndScanInterval == 0) {
res.clear();
InternalScanner scanner = region.getScanner(scan);
if (toggle) {
flushThread.flush();
}
while (scanner.next(res))
;
if (!toggle) {
flushThread.flush();
}
assertEquals("toggle="+toggle+"i=" + i + " ts="+System.currentTimeMillis(),
expectedCount, res.size());
toggle = !toggle;
}
}
} finally {
try {
flushThread.done();
flushThread.join();
flushThread.checkNoError();
} catch (InterruptedException ie) {
LOG.warn("Caught exception when joining with flushThread", ie);
}
HBaseTestingUtility.closeRegionAndWAL(this.region);
this.region = null;
}
}
protected class FlushThread extends Thread {
private volatile boolean done;
private Throwable error = null;
FlushThread() {
super("FlushThread");
}
public void done() {
done = true;
synchronized (this) {
interrupt();
}
}
public void checkNoError() {
if (error != null) {
assertNull(error);
}
}
@Override
public void run() {
done = false;
while (!done) {
synchronized (this) {
try {
wait();
} catch (InterruptedException ignored) {
if (done) {
break;
}
}
}
try {
region.flush(true);
} catch (IOException e) {
if (!done) {
LOG.error("Error while flushing cache", e);
error = e;
}
break;
} catch (Throwable t) {
LOG.error("Uncaught exception", t);
throw t;
}
}
}
public void flush() {
synchronized (this) {
notify();
}
}
}
/**
* Writes very wide records and scans for the latest every time.. Flushes and
* compacts the region every now and then to keep things realistic.
*
* @throws IOException
* by flush / scan / compaction
* @throws InterruptedException
* when joining threads
*/
@Test
public void testWritesWhileScanning() throws IOException, InterruptedException {
int testCount = 100;
int numRows = 1;
int numFamilies = 10;
int numQualifiers = 100;
int flushInterval = 7;
int compactInterval = 5 * flushInterval;
byte[][] families = new byte[numFamilies][];
for (int i = 0; i < numFamilies; i++) {
families[i] = Bytes.toBytes("family" + i);
}
byte[][] qualifiers = new byte[numQualifiers][];
for (int i = 0; i < numQualifiers; i++) {
qualifiers[i] = Bytes.toBytes("qual" + i);
}
this.region = initHRegion(tableName, method, CONF, families);
FlushThread flushThread = new FlushThread();
PutThread putThread = new PutThread(numRows, families, qualifiers);
try {
putThread.start();
putThread.waitForFirstPut();
flushThread.start();
Scan scan = new Scan(Bytes.toBytes("row0"), Bytes.toBytes("row1"));
int expectedCount = numFamilies * numQualifiers;
List<Cell> res = new ArrayList<>();
long prevTimestamp = 0L;
for (int i = 0; i < testCount; i++) {
if (i != 0 && i % compactInterval == 0) {
region.compact(true);
for (HStore store : region.getStores()) {
store.closeAndArchiveCompactedFiles();
}
}
if (i != 0 && i % flushInterval == 0) {
flushThread.flush();
}
boolean previousEmpty = res.isEmpty();
res.clear();
InternalScanner scanner = region.getScanner(scan);
while (scanner.next(res))
;
if (!res.isEmpty() || !previousEmpty || i > compactInterval) {
assertEquals("i=" + i, expectedCount, res.size());
long timestamp = res.get(0).getTimestamp();
assertTrue("Timestamps were broke: " + timestamp + " prev: " + prevTimestamp,
timestamp >= prevTimestamp);
prevTimestamp = timestamp;
}
}
putThread.done();
region.flush(true);
} finally {
try {
flushThread.done();
flushThread.join();
flushThread.checkNoError();
putThread.join();
putThread.checkNoError();
} catch (InterruptedException ie) {
LOG.warn("Caught exception when joining with flushThread", ie);
}
try {
HBaseTestingUtility.closeRegionAndWAL(this.region);
} catch (DroppedSnapshotException dse) {
// We could get this on way out because we interrupt the background flusher and it could
// fail anywhere causing a DSE over in the background flusher... only it is not properly
// dealt with so could still be memory hanging out when we get to here -- memory we can't
// flush because the accounting is 'off' since original DSE.
}
this.region = null;
}
}
protected class PutThread extends Thread {
private volatile boolean done;
private volatile int numPutsFinished = 0;
private Throwable error = null;
private int numRows;
private byte[][] families;
private byte[][] qualifiers;
private PutThread(int numRows, byte[][] families, byte[][] qualifiers) {
super("PutThread");
this.numRows = numRows;
this.families = families;
this.qualifiers = qualifiers;
}
/**
* Block calling thread until this instance of PutThread has put at least one row.
*/
public void waitForFirstPut() throws InterruptedException {
// wait until put thread actually puts some data
while (isAlive() && numPutsFinished == 0) {
checkNoError();
Thread.sleep(50);
}
}
public void done() {
done = true;
synchronized (this) {
interrupt();
}
}
public void checkNoError() {
if (error != null) {
assertNull(error);
}
}
@Override
public void run() {
done = false;
while (!done) {
try {
for (int r = 0; r < numRows; r++) {
byte[] row = Bytes.toBytes("row" + r);
Put put = new Put(row);
put.setDurability(Durability.SKIP_WAL);
byte[] value = Bytes.toBytes(String.valueOf(numPutsFinished));
for (byte[] family : families) {
for (byte[] qualifier : qualifiers) {
put.addColumn(family, qualifier, numPutsFinished, value);
}
}
region.put(put);
numPutsFinished++;
if (numPutsFinished > 0 && numPutsFinished % 47 == 0) {
System.out.println("put iteration = " + numPutsFinished);
Delete delete = new Delete(row, (long) numPutsFinished - 30);
region.delete(delete);
}
numPutsFinished++;
}
} catch (InterruptedIOException e) {
// This is fine. It means we are done, or didn't get the lock on time
LOG.info("Interrupted", e);
} catch (IOException e) {
LOG.error("Error while putting records", e);
error = e;
break;
}
}
}
}
/**
* Writes very wide records and gets the latest row every time.. Flushes and
* compacts the region aggressivly to catch issues.
*
* @throws IOException
* by flush / scan / compaction
* @throws InterruptedException
* when joining threads
*/
@Test
public void testWritesWhileGetting() throws Exception {
int testCount = 50;
int numRows = 1;
int numFamilies = 10;
int numQualifiers = 100;
int compactInterval = 100;
byte[][] families = new byte[numFamilies][];
for (int i = 0; i < numFamilies; i++) {
families[i] = Bytes.toBytes("family" + i);
}
byte[][] qualifiers = new byte[numQualifiers][];
for (int i = 0; i < numQualifiers; i++) {
qualifiers[i] = Bytes.toBytes("qual" + i);
}
// This test flushes constantly and can cause many files to be created,
// possibly
// extending over the ulimit. Make sure compactions are aggressive in
// reducing
// the number of HFiles created.
Configuration conf = HBaseConfiguration.create(CONF);
conf.setInt("hbase.hstore.compaction.min", 1);
conf.setInt("hbase.hstore.compaction.max", 1000);
this.region = initHRegion(tableName, method, conf, families);
PutThread putThread = null;
MultithreadedTestUtil.TestContext ctx = new MultithreadedTestUtil.TestContext(conf);
try {
putThread = new PutThread(numRows, families, qualifiers);
putThread.start();
putThread.waitForFirstPut();
// Add a thread that flushes as fast as possible
ctx.addThread(new RepeatingTestThread(ctx) {
@Override
public void doAnAction() throws Exception {
region.flush(true);
// Compact regularly to avoid creating too many files and exceeding
// the ulimit.
region.compact(false);
for (HStore store : region.getStores()) {
store.closeAndArchiveCompactedFiles();
}
}
});
ctx.startThreads();
Get get = new Get(Bytes.toBytes("row0"));
Result result = null;
int expectedCount = numFamilies * numQualifiers;
long prevTimestamp = 0L;
for (int i = 0; i < testCount; i++) {
LOG.info("testWritesWhileGetting verify turn " + i);
boolean previousEmpty = result == null || result.isEmpty();
result = region.get(get);
if (!result.isEmpty() || !previousEmpty || i > compactInterval) {
assertEquals("i=" + i, expectedCount, result.size());
// TODO this was removed, now what dangit?!
// search looking for the qualifier in question?
long timestamp = 0;
for (Cell kv : result.rawCells()) {
if (CellUtil.matchingFamily(kv, families[0])
&& CellUtil.matchingQualifier(kv, qualifiers[0])) {
timestamp = kv.getTimestamp();
}
}
assertTrue(timestamp >= prevTimestamp);
prevTimestamp = timestamp;
Cell previousKV = null;
for (Cell kv : result.rawCells()) {
byte[] thisValue = CellUtil.cloneValue(kv);
if (previousKV != null) {
if (Bytes.compareTo(CellUtil.cloneValue(previousKV), thisValue) != 0) {
LOG.warn("These two KV should have the same value." + " Previous KV:" + previousKV
+ "(memStoreTS:" + previousKV.getSequenceId() + ")" + ", New KV: " + kv
+ "(memStoreTS:" + kv.getSequenceId() + ")");
assertEquals(0, Bytes.compareTo(CellUtil.cloneValue(previousKV), thisValue));
}
}
previousKV = kv;
}
}
}
} finally {
if (putThread != null)
putThread.done();
region.flush(true);
if (putThread != null) {
putThread.join();
putThread.checkNoError();
}
ctx.stop();
HBaseTestingUtility.closeRegionAndWAL(this.region);
this.region = null;
}
}
@Test
public void testHolesInMeta() throws Exception {
byte[] family = Bytes.toBytes("family");
this.region = initHRegion(tableName, Bytes.toBytes("x"), Bytes.toBytes("z"), method, CONF,
false, family);
byte[] rowNotServed = Bytes.toBytes("a");
Get g = new Get(rowNotServed);
try {
region.get(g);
fail();
} catch (WrongRegionException x) {
// OK
}
byte[] row = Bytes.toBytes("y");
g = new Get(row);
region.get(g);
}
@Test
public void testIndexesScanWithOneDeletedRow() throws IOException {
byte[] family = Bytes.toBytes("family");
// Setting up region
this.region = initHRegion(tableName, method, CONF, family);
Put put = new Put(Bytes.toBytes(1L));
put.addColumn(family, qual1, 1L, Bytes.toBytes(1L));
region.put(put);
region.flush(true);
Delete delete = new Delete(Bytes.toBytes(1L), 1L);
region.delete(delete);
put = new Put(Bytes.toBytes(2L));
put.addColumn(family, qual1, 2L, Bytes.toBytes(2L));
region.put(put);
Scan idxScan = new Scan();
idxScan.addFamily(family);
idxScan.setFilter(new FilterList(FilterList.Operator.MUST_PASS_ALL, Arrays.<Filter> asList(
new SingleColumnValueFilter(family, qual1, CompareOp.GREATER_OR_EQUAL,
new BinaryComparator(Bytes.toBytes(0L))), new SingleColumnValueFilter(family, qual1,
CompareOp.LESS_OR_EQUAL, new BinaryComparator(Bytes.toBytes(3L))))));
InternalScanner scanner = region.getScanner(idxScan);
List<Cell> res = new ArrayList<>();
while (scanner.next(res)) {
// Ignore res value.
}
assertEquals(1L, res.size());
}
// ////////////////////////////////////////////////////////////////////////////
// Bloom filter test
// ////////////////////////////////////////////////////////////////////////////
@Test
public void testBloomFilterSize() throws IOException {
byte[] fam1 = Bytes.toBytes("fam1");
byte[] qf1 = Bytes.toBytes("col");
byte[] val1 = Bytes.toBytes("value1");
// Create Table
HColumnDescriptor hcd = new HColumnDescriptor(fam1).setMaxVersions(Integer.MAX_VALUE)
.setBloomFilterType(BloomType.ROWCOL);
HTableDescriptor htd = new HTableDescriptor(tableName);
htd.addFamily(hcd);
HRegionInfo info = new HRegionInfo(htd.getTableName(), null, null, false);
this.region = TEST_UTIL.createLocalHRegion(info, htd);
int num_unique_rows = 10;
int duplicate_multiplier = 2;
int num_storefiles = 4;
int version = 0;
for (int f = 0; f < num_storefiles; f++) {
for (int i = 0; i < duplicate_multiplier; i++) {
for (int j = 0; j < num_unique_rows; j++) {
Put put = new Put(Bytes.toBytes("row" + j));
put.setDurability(Durability.SKIP_WAL);
long ts = version++;
put.addColumn(fam1, qf1, ts, val1);
region.put(put);
}
}
region.flush(true);
}
// before compaction
HStore store = region.getStore(fam1);
Collection<HStoreFile> storeFiles = store.getStorefiles();
for (HStoreFile storefile : storeFiles) {
StoreFileReader reader = storefile.getReader();
reader.loadFileInfo();
reader.loadBloomfilter();
assertEquals(num_unique_rows * duplicate_multiplier, reader.getEntries());
assertEquals(num_unique_rows, reader.getFilterEntries());
}
region.compact(true);
// after compaction
storeFiles = store.getStorefiles();
for (HStoreFile storefile : storeFiles) {
StoreFileReader reader = storefile.getReader();
reader.loadFileInfo();
reader.loadBloomfilter();
assertEquals(num_unique_rows * duplicate_multiplier * num_storefiles, reader.getEntries());
assertEquals(num_unique_rows, reader.getFilterEntries());
}
}
@Test
public void testAllColumnsWithBloomFilter() throws IOException {
byte[] TABLE = Bytes.toBytes(name.getMethodName());
byte[] FAMILY = Bytes.toBytes("family");
// Create table
HColumnDescriptor hcd = new HColumnDescriptor(FAMILY).setMaxVersions(Integer.MAX_VALUE)
.setBloomFilterType(BloomType.ROWCOL);
HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(TABLE));
htd.addFamily(hcd);
HRegionInfo info = new HRegionInfo(htd.getTableName(), null, null, false);
this.region = TEST_UTIL.createLocalHRegion(info, htd);
// For row:0, col:0: insert versions 1 through 5.
byte[] row = Bytes.toBytes("row:" + 0);
byte[] column = Bytes.toBytes("column:" + 0);
Put put = new Put(row);
put.setDurability(Durability.SKIP_WAL);
for (long idx = 1; idx <= 4; idx++) {
put.addColumn(FAMILY, column, idx, Bytes.toBytes("value-version-" + idx));
}
region.put(put);
// Flush
region.flush(true);
// Get rows
Get get = new Get(row);
get.setMaxVersions();
Cell[] kvs = region.get(get).rawCells();
// Check if rows are correct
assertEquals(4, kvs.length);
checkOneCell(kvs[0], FAMILY, 0, 0, 4);
checkOneCell(kvs[1], FAMILY, 0, 0, 3);
checkOneCell(kvs[2], FAMILY, 0, 0, 2);
checkOneCell(kvs[3], FAMILY, 0, 0, 1);
}
/**
* Testcase to cover bug-fix for HBASE-2823 Ensures correct delete when
* issuing delete row on columns with bloom filter set to row+col
* (BloomType.ROWCOL)
*/
@Test
public void testDeleteRowWithBloomFilter() throws IOException {
byte[] familyName = Bytes.toBytes("familyName");
// Create Table
HColumnDescriptor hcd = new HColumnDescriptor(familyName).setMaxVersions(Integer.MAX_VALUE)
.setBloomFilterType(BloomType.ROWCOL);
HTableDescriptor htd = new HTableDescriptor(tableName);
htd.addFamily(hcd);
HRegionInfo info = new HRegionInfo(htd.getTableName(), null, null, false);
this.region = TEST_UTIL.createLocalHRegion(info, htd);
// Insert some data
byte[] row = Bytes.toBytes("row1");
byte[] col = Bytes.toBytes("col1");
Put put = new Put(row);
put.addColumn(familyName, col, 1, Bytes.toBytes("SomeRandomValue"));
region.put(put);
region.flush(true);
Delete del = new Delete(row);
region.delete(del);
region.flush(true);
// Get remaining rows (should have none)
Get get = new Get(row);
get.addColumn(familyName, col);
Cell[] keyValues = region.get(get).rawCells();
assertEquals(0, keyValues.length);
}
@Test
public void testgetHDFSBlocksDistribution() throws Exception {
HBaseTestingUtility htu = new HBaseTestingUtility();
// Why do we set the block size in this test? If we set it smaller than the kvs, then we'll
// break up the file in to more pieces that can be distributed across the three nodes and we
// won't be able to have the condition this test asserts; that at least one node has
// a copy of all replicas -- if small block size, then blocks are spread evenly across the
// the three nodes. hfilev3 with tags seems to put us over the block size. St.Ack.
// final int DEFAULT_BLOCK_SIZE = 1024;
// htu.getConfiguration().setLong("dfs.blocksize", DEFAULT_BLOCK_SIZE);
htu.getConfiguration().setInt("dfs.replication", 2);
// set up a cluster with 3 nodes
MiniHBaseCluster cluster = null;
String dataNodeHosts[] = new String[] { "host1", "host2", "host3" };
int regionServersCount = 3;
try {
StartMiniClusterOption option = StartMiniClusterOption.builder()
.numRegionServers(regionServersCount).dataNodeHosts(dataNodeHosts).build();
cluster = htu.startMiniCluster(option);
byte[][] families = { fam1, fam2 };
Table ht = htu.createTable(tableName, families);
// Setting up region
byte row[] = Bytes.toBytes("row1");
byte col[] = Bytes.toBytes("col1");
Put put = new Put(row);
put.addColumn(fam1, col, 1, Bytes.toBytes("test1"));
put.addColumn(fam2, col, 1, Bytes.toBytes("test2"));
ht.put(put);
HRegion firstRegion = htu.getHBaseCluster().getRegions(tableName).get(0);
firstRegion.flush(true);
HDFSBlocksDistribution blocksDistribution1 = firstRegion.getHDFSBlocksDistribution();
// Given the default replication factor is 2 and we have 2 HFiles,
// we will have total of 4 replica of blocks on 3 datanodes; thus there
// must be at least one host that have replica for 2 HFiles. That host's
// weight will be equal to the unique block weight.
long uniqueBlocksWeight1 = blocksDistribution1.getUniqueBlocksTotalWeight();
StringBuilder sb = new StringBuilder();
for (String host: blocksDistribution1.getTopHosts()) {
if (sb.length() > 0) sb.append(", ");
sb.append(host);
sb.append("=");
sb.append(blocksDistribution1.getWeight(host));
}
String topHost = blocksDistribution1.getTopHosts().get(0);
long topHostWeight = blocksDistribution1.getWeight(topHost);
String msg = "uniqueBlocksWeight=" + uniqueBlocksWeight1 + ", topHostWeight=" +
topHostWeight + ", topHost=" + topHost + "; " + sb.toString();
LOG.info(msg);
assertTrue(msg, uniqueBlocksWeight1 == topHostWeight);
// use the static method to compute the value, it should be the same.
// static method is used by load balancer or other components
HDFSBlocksDistribution blocksDistribution2 = HRegion.computeHDFSBlocksDistribution(
htu.getConfiguration(), firstRegion.getTableDescriptor(), firstRegion.getRegionInfo());
long uniqueBlocksWeight2 = blocksDistribution2.getUniqueBlocksTotalWeight();
assertTrue(uniqueBlocksWeight1 == uniqueBlocksWeight2);
ht.close();
} finally {
if (cluster != null) {
htu.shutdownMiniCluster();
}
}
}
/**
* Testcase to check state of region initialization task set to ABORTED or not
* if any exceptions during initialization
*
* @throws Exception
*/
@Test
public void testStatusSettingToAbortIfAnyExceptionDuringRegionInitilization() throws Exception {
HRegionInfo info;
try {
FileSystem fs = Mockito.mock(FileSystem.class);
Mockito.when(fs.exists((Path) Mockito.anyObject())).thenThrow(new IOException());
HTableDescriptor htd = new HTableDescriptor(tableName);
htd.addFamily(new HColumnDescriptor("cf"));
info = new HRegionInfo(htd.getTableName(), HConstants.EMPTY_BYTE_ARRAY,
HConstants.EMPTY_BYTE_ARRAY, false);
Path path = new Path(dir + "testStatusSettingToAbortIfAnyExceptionDuringRegionInitilization");
region = HRegion.newHRegion(path, null, fs, CONF, info, htd, null);
// region initialization throws IOException and set task state to ABORTED.
region.initialize();
fail("Region initialization should fail due to IOException");
} catch (IOException io) {
List<MonitoredTask> tasks = TaskMonitor.get().getTasks();
for (MonitoredTask monitoredTask : tasks) {
if (!(monitoredTask instanceof MonitoredRPCHandler)
&& monitoredTask.getDescription().contains(region.toString())) {
assertTrue("Region state should be ABORTED.",
monitoredTask.getState().equals(MonitoredTask.State.ABORTED));
break;
}
}
}
}
/**
* Verifies that the .regioninfo file is written on region creation and that
* is recreated if missing during region opening.
*/
@Test
public void testRegionInfoFileCreation() throws IOException {
Path rootDir = new Path(dir + "testRegionInfoFileCreation");
HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(name.getMethodName()));
htd.addFamily(new HColumnDescriptor("cf"));
HRegionInfo hri = new HRegionInfo(htd.getTableName());
// Create a region and skip the initialization (like CreateTableHandler)
region = HBaseTestingUtility.createRegionAndWAL(hri, rootDir, CONF, htd, false);
Path regionDir = region.getRegionFileSystem().getRegionDir();
FileSystem fs = region.getRegionFileSystem().getFileSystem();
HBaseTestingUtility.closeRegionAndWAL(region);
Path regionInfoFile = new Path(regionDir, HRegionFileSystem.REGION_INFO_FILE);
// Verify that the .regioninfo file is present
assertTrue(HRegionFileSystem.REGION_INFO_FILE + " should be present in the region dir",
fs.exists(regionInfoFile));
// Try to open the region
region = HRegion.openHRegion(rootDir, hri, htd, null, CONF);
assertEquals(regionDir, region.getRegionFileSystem().getRegionDir());
HBaseTestingUtility.closeRegionAndWAL(region);
// Verify that the .regioninfo file is still there
assertTrue(HRegionFileSystem.REGION_INFO_FILE + " should be present in the region dir",
fs.exists(regionInfoFile));
// Remove the .regioninfo file and verify is recreated on region open
fs.delete(regionInfoFile, true);
assertFalse(HRegionFileSystem.REGION_INFO_FILE + " should be removed from the region dir",
fs.exists(regionInfoFile));
region = HRegion.openHRegion(rootDir, hri, htd, null, CONF);
// region = TEST_UTIL.openHRegion(hri, htd);
assertEquals(regionDir, region.getRegionFileSystem().getRegionDir());
HBaseTestingUtility.closeRegionAndWAL(region);
// Verify that the .regioninfo file is still there
assertTrue(HRegionFileSystem.REGION_INFO_FILE + " should be present in the region dir",
fs.exists(new Path(regionDir, HRegionFileSystem.REGION_INFO_FILE)));
region = null;
}
/**
* TestCase for increment
*/
private static class Incrementer implements Runnable {
private HRegion region;
private final static byte[] incRow = Bytes.toBytes("incRow");
private final static byte[] family = Bytes.toBytes("family");
private final static byte[] qualifier = Bytes.toBytes("qualifier");
private final static long ONE = 1L;
private int incCounter;
public Incrementer(HRegion region, int incCounter) {
this.region = region;
this.incCounter = incCounter;
}
@Override
public void run() {
int count = 0;
while (count < incCounter) {
Increment inc = new Increment(incRow);
inc.addColumn(family, qualifier, ONE);
count++;
try {
region.increment(inc);
} catch (IOException e) {
LOG.info("Count=" + count + ", " + e);
break;
}
}
}
}
/**
* Test case to check increment function with memstore flushing
* @throws Exception
*/
@Test
public void testParallelIncrementWithMemStoreFlush() throws Exception {
byte[] family = Incrementer.family;
this.region = initHRegion(tableName, method, CONF, family);
final HRegion region = this.region;
final AtomicBoolean incrementDone = new AtomicBoolean(false);
Runnable flusher = new Runnable() {
@Override
public void run() {
while (!incrementDone.get()) {
try {
region.flush(true);
} catch (Exception e) {
e.printStackTrace();
}
}
}
};
// after all increment finished, the row will increment to 20*100 = 2000
int threadNum = 20;
int incCounter = 100;
long expected = (long) threadNum * incCounter;
Thread[] incrementers = new Thread[threadNum];
Thread flushThread = new Thread(flusher);
for (int i = 0; i < threadNum; i++) {
incrementers[i] = new Thread(new Incrementer(this.region, incCounter));
incrementers[i].start();
}
flushThread.start();
for (int i = 0; i < threadNum; i++) {
incrementers[i].join();
}
incrementDone.set(true);
flushThread.join();
Get get = new Get(Incrementer.incRow);
get.addColumn(Incrementer.family, Incrementer.qualifier);
get.setMaxVersions(1);
Result res = this.region.get(get);
List<Cell> kvs = res.getColumnCells(Incrementer.family, Incrementer.qualifier);
// we just got the latest version
assertEquals(1, kvs.size());
Cell kv = kvs.get(0);
assertEquals(expected, Bytes.toLong(kv.getValueArray(), kv.getValueOffset()));
}
/**
* TestCase for append
*/
private static class Appender implements Runnable {
private HRegion region;
private final static byte[] appendRow = Bytes.toBytes("appendRow");
private final static byte[] family = Bytes.toBytes("family");
private final static byte[] qualifier = Bytes.toBytes("qualifier");
private final static byte[] CHAR = Bytes.toBytes("a");
private int appendCounter;
public Appender(HRegion region, int appendCounter) {
this.region = region;
this.appendCounter = appendCounter;
}
@Override
public void run() {
int count = 0;
while (count < appendCounter) {
Append app = new Append(appendRow);
app.addColumn(family, qualifier, CHAR);
count++;
try {
region.append(app);
} catch (IOException e) {
LOG.info("Count=" + count + ", max=" + appendCounter + ", " + e);
break;
}
}
}
}
/**
* Test case to check append function with memstore flushing
* @throws Exception
*/
@Test
public void testParallelAppendWithMemStoreFlush() throws Exception {
byte[] family = Appender.family;
this.region = initHRegion(tableName, method, CONF, family);
final HRegion region = this.region;
final AtomicBoolean appendDone = new AtomicBoolean(false);
Runnable flusher = new Runnable() {
@Override
public void run() {
while (!appendDone.get()) {
try {
region.flush(true);
} catch (Exception e) {
e.printStackTrace();
}
}
}
};
// After all append finished, the value will append to threadNum *
// appendCounter Appender.CHAR
int threadNum = 20;
int appendCounter = 100;
byte[] expected = new byte[threadNum * appendCounter];
for (int i = 0; i < threadNum * appendCounter; i++) {
System.arraycopy(Appender.CHAR, 0, expected, i, 1);
}
Thread[] appenders = new Thread[threadNum];
Thread flushThread = new Thread(flusher);
for (int i = 0; i < threadNum; i++) {
appenders[i] = new Thread(new Appender(this.region, appendCounter));
appenders[i].start();
}
flushThread.start();
for (int i = 0; i < threadNum; i++) {
appenders[i].join();
}
appendDone.set(true);
flushThread.join();
Get get = new Get(Appender.appendRow);
get.addColumn(Appender.family, Appender.qualifier);
get.setMaxVersions(1);
Result res = this.region.get(get);
List<Cell> kvs = res.getColumnCells(Appender.family, Appender.qualifier);
// we just got the latest version
assertEquals(1, kvs.size());
Cell kv = kvs.get(0);
byte[] appendResult = new byte[kv.getValueLength()];
System.arraycopy(kv.getValueArray(), kv.getValueOffset(), appendResult, 0, kv.getValueLength());
assertArrayEquals(expected, appendResult);
}
/**
* Test case to check put function with memstore flushing for same row, same ts
* @throws Exception
*/
@Test
public void testPutWithMemStoreFlush() throws Exception {
byte[] family = Bytes.toBytes("family");
byte[] qualifier = Bytes.toBytes("qualifier");
byte[] row = Bytes.toBytes("putRow");
byte[] value = null;
this.region = initHRegion(tableName, method, CONF, family);
Put put = null;
Get get = null;
List<Cell> kvs = null;
Result res = null;
put = new Put(row);
value = Bytes.toBytes("value0");
put.addColumn(family, qualifier, 1234567L, value);
region.put(put);
get = new Get(row);
get.addColumn(family, qualifier);
get.setMaxVersions();
res = this.region.get(get);
kvs = res.getColumnCells(family, qualifier);
assertEquals(1, kvs.size());
assertArrayEquals(Bytes.toBytes("value0"), CellUtil.cloneValue(kvs.get(0)));
region.flush(true);
get = new Get(row);
get.addColumn(family, qualifier);
get.setMaxVersions();
res = this.region.get(get);
kvs = res.getColumnCells(family, qualifier);
assertEquals(1, kvs.size());
assertArrayEquals(Bytes.toBytes("value0"), CellUtil.cloneValue(kvs.get(0)));
put = new Put(row);
value = Bytes.toBytes("value1");
put.addColumn(family, qualifier, 1234567L, value);
region.put(put);
get = new Get(row);
get.addColumn(family, qualifier);
get.setMaxVersions();
res = this.region.get(get);
kvs = res.getColumnCells(family, qualifier);
assertEquals(1, kvs.size());
assertArrayEquals(Bytes.toBytes("value1"), CellUtil.cloneValue(kvs.get(0)));
region.flush(true);
get = new Get(row);
get.addColumn(family, qualifier);
get.setMaxVersions();
res = this.region.get(get);
kvs = res.getColumnCells(family, qualifier);
assertEquals(1, kvs.size());
assertArrayEquals(Bytes.toBytes("value1"), CellUtil.cloneValue(kvs.get(0)));
}
@Test
public void testDurability() throws Exception {
// there are 5 x 5 cases:
// table durability(SYNC,FSYNC,ASYC,SKIP,USE_DEFAULT) x mutation
// durability(SYNC,FSYNC,ASYC,SKIP,USE_DEFAULT)
// expected cases for append and sync wal
durabilityTest(method, Durability.SYNC_WAL, Durability.SYNC_WAL, 0, true, true, false);
durabilityTest(method, Durability.SYNC_WAL, Durability.FSYNC_WAL, 0, true, true, false);
durabilityTest(method, Durability.SYNC_WAL, Durability.USE_DEFAULT, 0, true, true, false);
durabilityTest(method, Durability.FSYNC_WAL, Durability.SYNC_WAL, 0, true, true, false);
durabilityTest(method, Durability.FSYNC_WAL, Durability.FSYNC_WAL, 0, true, true, false);
durabilityTest(method, Durability.FSYNC_WAL, Durability.USE_DEFAULT, 0, true, true, false);
durabilityTest(method, Durability.ASYNC_WAL, Durability.SYNC_WAL, 0, true, true, false);
durabilityTest(method, Durability.ASYNC_WAL, Durability.FSYNC_WAL, 0, true, true, false);
durabilityTest(method, Durability.SKIP_WAL, Durability.SYNC_WAL, 0, true, true, false);
durabilityTest(method, Durability.SKIP_WAL, Durability.FSYNC_WAL, 0, true, true, false);
durabilityTest(method, Durability.USE_DEFAULT, Durability.SYNC_WAL, 0, true, true, false);
durabilityTest(method, Durability.USE_DEFAULT, Durability.FSYNC_WAL, 0, true, true, false);
durabilityTest(method, Durability.USE_DEFAULT, Durability.USE_DEFAULT, 0, true, true, false);
// expected cases for async wal
durabilityTest(method, Durability.SYNC_WAL, Durability.ASYNC_WAL, 0, true, false, false);
durabilityTest(method, Durability.FSYNC_WAL, Durability.ASYNC_WAL, 0, true, false, false);
durabilityTest(method, Durability.ASYNC_WAL, Durability.ASYNC_WAL, 0, true, false, false);
durabilityTest(method, Durability.SKIP_WAL, Durability.ASYNC_WAL, 0, true, false, false);
durabilityTest(method, Durability.USE_DEFAULT, Durability.ASYNC_WAL, 0, true, false, false);
durabilityTest(method, Durability.ASYNC_WAL, Durability.USE_DEFAULT, 0, true, false, false);
durabilityTest(method, Durability.SYNC_WAL, Durability.ASYNC_WAL, 5000, true, false, true);
durabilityTest(method, Durability.FSYNC_WAL, Durability.ASYNC_WAL, 5000, true, false, true);
durabilityTest(method, Durability.ASYNC_WAL, Durability.ASYNC_WAL, 5000, true, false, true);
durabilityTest(method, Durability.SKIP_WAL, Durability.ASYNC_WAL, 5000, true, false, true);
durabilityTest(method, Durability.USE_DEFAULT, Durability.ASYNC_WAL, 5000, true, false, true);
durabilityTest(method, Durability.ASYNC_WAL, Durability.USE_DEFAULT, 5000, true, false, true);
// expect skip wal cases
durabilityTest(method, Durability.SYNC_WAL, Durability.SKIP_WAL, 0, false, false, false);
durabilityTest(method, Durability.FSYNC_WAL, Durability.SKIP_WAL, 0, false, false, false);
durabilityTest(method, Durability.ASYNC_WAL, Durability.SKIP_WAL, 0, false, false, false);
durabilityTest(method, Durability.SKIP_WAL, Durability.SKIP_WAL, 0, false, false, false);
durabilityTest(method, Durability.USE_DEFAULT, Durability.SKIP_WAL, 0, false, false, false);
durabilityTest(method, Durability.SKIP_WAL, Durability.USE_DEFAULT, 0, false, false, false);
}
private void durabilityTest(String method, Durability tableDurability,
Durability mutationDurability, long timeout, boolean expectAppend, final boolean expectSync,
final boolean expectSyncFromLogSyncer) throws Exception {
Configuration conf = HBaseConfiguration.create(CONF);
method = method + "_" + tableDurability.name() + "_" + mutationDurability.name();
byte[] family = Bytes.toBytes("family");
Path logDir = new Path(new Path(dir + method), "log");
final Configuration walConf = new Configuration(conf);
FSUtils.setRootDir(walConf, logDir);
// XXX: The spied AsyncFSWAL can not work properly because of a Mockito defect that can not
// deal with classes which have a field of an inner class. See discussions in HBASE-15536.
walConf.set(WALFactory.WAL_PROVIDER, "filesystem");
final WALFactory wals = new WALFactory(walConf, TEST_UTIL.getRandomUUID().toString());
final WAL wal = spy(wals.getWAL(RegionInfoBuilder.newBuilder(tableName).build()));
this.region = initHRegion(tableName, HConstants.EMPTY_START_ROW,
HConstants.EMPTY_END_ROW, false, tableDurability, wal,
new byte[][] { family });
Put put = new Put(Bytes.toBytes("r1"));
put.addColumn(family, Bytes.toBytes("q1"), Bytes.toBytes("v1"));
put.setDurability(mutationDurability);
region.put(put);
//verify append called or not
verify(wal, expectAppend ? times(1) : never())
.append((HRegionInfo)any(), (WALKeyImpl)any(),
(WALEdit)any(), Mockito.anyBoolean());
// verify sync called or not
if (expectSync || expectSyncFromLogSyncer) {
TEST_UTIL.waitFor(timeout, new Waiter.Predicate<Exception>() {
@Override
public boolean evaluate() throws Exception {
try {
if (expectSync) {
verify(wal, times(1)).sync(anyLong()); // Hregion calls this one
} else if (expectSyncFromLogSyncer) {
verify(wal, times(1)).sync(); // wal syncer calls this one
}
} catch (Throwable ignore) {
}
return true;
}
});
} else {
//verify(wal, never()).sync(anyLong());
verify(wal, never()).sync();
}
HBaseTestingUtility.closeRegionAndWAL(this.region);
wals.close();
this.region = null;
}
@Test
public void testRegionReplicaSecondary() throws IOException {
// create a primary region, load some data and flush
// create a secondary region, and do a get against that
Path rootDir = new Path(dir + name.getMethodName());
FSUtils.setRootDir(TEST_UTIL.getConfiguration(), rootDir);
byte[][] families = new byte[][] {
Bytes.toBytes("cf1"), Bytes.toBytes("cf2"), Bytes.toBytes("cf3")
};
byte[] cq = Bytes.toBytes("cq");
HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(name.getMethodName()));
for (byte[] family : families) {
htd.addFamily(new HColumnDescriptor(family));
}
long time = System.currentTimeMillis();
HRegionInfo primaryHri = new HRegionInfo(htd.getTableName(),
HConstants.EMPTY_START_ROW, HConstants.EMPTY_END_ROW,
false, time, 0);
HRegionInfo secondaryHri = new HRegionInfo(htd.getTableName(),
HConstants.EMPTY_START_ROW, HConstants.EMPTY_END_ROW,
false, time, 1);
HRegion primaryRegion = null, secondaryRegion = null;
try {
primaryRegion = HBaseTestingUtility.createRegionAndWAL(primaryHri,
rootDir, TEST_UTIL.getConfiguration(), htd);
// load some data
putData(primaryRegion, 0, 1000, cq, families);
// flush region
primaryRegion.flush(true);
// open secondary region
secondaryRegion = HRegion.openHRegion(rootDir, secondaryHri, htd, null, CONF);
verifyData(secondaryRegion, 0, 1000, cq, families);
} finally {
if (primaryRegion != null) {
HBaseTestingUtility.closeRegionAndWAL(primaryRegion);
}
if (secondaryRegion != null) {
HBaseTestingUtility.closeRegionAndWAL(secondaryRegion);
}
}
}
@Test
public void testRegionReplicaSecondaryIsReadOnly() throws IOException {
// create a primary region, load some data and flush
// create a secondary region, and do a put against that
Path rootDir = new Path(dir + name.getMethodName());
FSUtils.setRootDir(TEST_UTIL.getConfiguration(), rootDir);
byte[][] families = new byte[][] {
Bytes.toBytes("cf1"), Bytes.toBytes("cf2"), Bytes.toBytes("cf3")
};
byte[] cq = Bytes.toBytes("cq");
HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(name.getMethodName()));
for (byte[] family : families) {
htd.addFamily(new HColumnDescriptor(family));
}
long time = System.currentTimeMillis();
HRegionInfo primaryHri = new HRegionInfo(htd.getTableName(),
HConstants.EMPTY_START_ROW, HConstants.EMPTY_END_ROW,
false, time, 0);
HRegionInfo secondaryHri = new HRegionInfo(htd.getTableName(),
HConstants.EMPTY_START_ROW, HConstants.EMPTY_END_ROW,
false, time, 1);
HRegion primaryRegion = null, secondaryRegion = null;
try {
primaryRegion = HBaseTestingUtility.createRegionAndWAL(primaryHri,
rootDir, TEST_UTIL.getConfiguration(), htd);
// load some data
putData(primaryRegion, 0, 1000, cq, families);
// flush region
primaryRegion.flush(true);
// open secondary region
secondaryRegion = HRegion.openHRegion(rootDir, secondaryHri, htd, null, CONF);
try {
putData(secondaryRegion, 0, 1000, cq, families);
fail("Should have thrown exception");
} catch (IOException ex) {
// expected
}
} finally {
if (primaryRegion != null) {
HBaseTestingUtility.closeRegionAndWAL(primaryRegion);
}
if (secondaryRegion != null) {
HBaseTestingUtility.closeRegionAndWAL(secondaryRegion);
}
}
}
static WALFactory createWALFactory(Configuration conf, Path rootDir) throws IOException {
Configuration confForWAL = new Configuration(conf);
confForWAL.set(HConstants.HBASE_DIR, rootDir.toString());
return new WALFactory(confForWAL, "hregion-" + RandomStringUtils.randomNumeric(8));
}
@Test
public void testCompactionFromPrimary() throws IOException {
Path rootDir = new Path(dir + name.getMethodName());
FSUtils.setRootDir(TEST_UTIL.getConfiguration(), rootDir);
byte[][] families = new byte[][] {
Bytes.toBytes("cf1"), Bytes.toBytes("cf2"), Bytes.toBytes("cf3")
};
byte[] cq = Bytes.toBytes("cq");
HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(name.getMethodName()));
for (byte[] family : families) {
htd.addFamily(new HColumnDescriptor(family));
}
long time = System.currentTimeMillis();
HRegionInfo primaryHri = new HRegionInfo(htd.getTableName(),
HConstants.EMPTY_START_ROW, HConstants.EMPTY_END_ROW,
false, time, 0);
HRegionInfo secondaryHri = new HRegionInfo(htd.getTableName(),
HConstants.EMPTY_START_ROW, HConstants.EMPTY_END_ROW,
false, time, 1);
HRegion primaryRegion = null, secondaryRegion = null;
try {
primaryRegion = HBaseTestingUtility.createRegionAndWAL(primaryHri,
rootDir, TEST_UTIL.getConfiguration(), htd);
// load some data
putData(primaryRegion, 0, 1000, cq, families);
// flush region
primaryRegion.flush(true);
// open secondary region
secondaryRegion = HRegion.openHRegion(rootDir, secondaryHri, htd, null, CONF);
// move the file of the primary region to the archive, simulating a compaction
Collection<HStoreFile> storeFiles = primaryRegion.getStore(families[0]).getStorefiles();
primaryRegion.getRegionFileSystem().removeStoreFiles(Bytes.toString(families[0]), storeFiles);
Collection<StoreFileInfo> storeFileInfos = primaryRegion.getRegionFileSystem()
.getStoreFiles(families[0]);
Assert.assertTrue(storeFileInfos == null || storeFileInfos.isEmpty());
verifyData(secondaryRegion, 0, 1000, cq, families);
} finally {
if (primaryRegion != null) {
HBaseTestingUtility.closeRegionAndWAL(primaryRegion);
}
if (secondaryRegion != null) {
HBaseTestingUtility.closeRegionAndWAL(secondaryRegion);
}
}
}
private void putData(int startRow, int numRows, byte[] qf, byte[]... families) throws
IOException {
putData(this.region, startRow, numRows, qf, families);
}
private void putData(HRegion region,
int startRow, int numRows, byte[] qf, byte[]... families) throws IOException {
putData(region, Durability.SKIP_WAL, startRow, numRows, qf, families);
}
static void putData(HRegion region, Durability durability,
int startRow, int numRows, byte[] qf, byte[]... families) throws IOException {
for (int i = startRow; i < startRow + numRows; i++) {
Put put = new Put(Bytes.toBytes("" + i));
put.setDurability(durability);
for (byte[] family : families) {
put.addColumn(family, qf, null);
}
region.put(put);
LOG.info(put.toString());
}
}
static void verifyData(HRegion newReg, int startRow, int numRows, byte[] qf, byte[]... families)
throws IOException {
for (int i = startRow; i < startRow + numRows; i++) {
byte[] row = Bytes.toBytes("" + i);
Get get = new Get(row);
for (byte[] family : families) {
get.addColumn(family, qf);
}
Result result = newReg.get(get);
Cell[] raw = result.rawCells();
assertEquals(families.length, result.size());
for (int j = 0; j < families.length; j++) {
assertTrue(CellUtil.matchingRows(raw[j], row));
assertTrue(CellUtil.matchingFamily(raw[j], families[j]));
assertTrue(CellUtil.matchingQualifier(raw[j], qf));
}
}
}
static void assertGet(final HRegion r, final byte[] family, final byte[] k) throws IOException {
// Now I have k, get values out and assert they are as expected.
Get get = new Get(k).addFamily(family).setMaxVersions();
Cell[] results = r.get(get).rawCells();
for (int j = 0; j < results.length; j++) {
byte[] tmp = CellUtil.cloneValue(results[j]);
// Row should be equal to value every time.
assertTrue(Bytes.equals(k, tmp));
}
}
/*
* Assert first value in the passed region is <code>firstValue</code>.
*
* @param r
*
* @param fs
*
* @param firstValue
*
* @throws IOException
*/
protected void assertScan(final HRegion r, final byte[] fs, final byte[] firstValue)
throws IOException {
byte[][] families = { fs };
Scan scan = new Scan();
for (int i = 0; i < families.length; i++)
scan.addFamily(families[i]);
InternalScanner s = r.getScanner(scan);
try {
List<Cell> curVals = new ArrayList<>();
boolean first = true;
OUTER_LOOP: while (s.next(curVals)) {
for (Cell kv : curVals) {
byte[] val = CellUtil.cloneValue(kv);
byte[] curval = val;
if (first) {
first = false;
assertTrue(Bytes.compareTo(curval, firstValue) == 0);
} else {
// Not asserting anything. Might as well break.
break OUTER_LOOP;
}
}
}
} finally {
s.close();
}
}
/**
* Test that we get the expected flush results back
*/
@Test
public void testFlushResult() throws IOException {
byte[] family = Bytes.toBytes("family");
this.region = initHRegion(tableName, method, family);
// empty memstore, flush doesn't run
HRegion.FlushResult fr = region.flush(true);
assertFalse(fr.isFlushSucceeded());
assertFalse(fr.isCompactionNeeded());
// Flush enough files to get up to the threshold, doesn't need compactions
for (int i = 0; i < 2; i++) {
Put put = new Put(tableName.toBytes()).addColumn(family, family, tableName.toBytes());
region.put(put);
fr = region.flush(true);
assertTrue(fr.isFlushSucceeded());
assertFalse(fr.isCompactionNeeded());
}
// Two flushes after the threshold, compactions are needed
for (int i = 0; i < 2; i++) {
Put put = new Put(tableName.toBytes()).addColumn(family, family, tableName.toBytes());
region.put(put);
fr = region.flush(true);
assertTrue(fr.isFlushSucceeded());
assertTrue(fr.isCompactionNeeded());
}
}
protected Configuration initSplit() {
// Always compact if there is more than one store file.
CONF.setInt("hbase.hstore.compactionThreshold", 2);
CONF.setInt(HConstants.HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD, 10 * 1000);
// Increase the amount of time between client retries
CONF.setLong("hbase.client.pause", 15 * 1000);
// This size should make it so we always split using the addContent
// below. After adding all data, the first region is 1.3M
CONF.setLong(HConstants.HREGION_MAX_FILESIZE, 1024 * 128);
return CONF;
}
/**
* @return A region on which you must call
* {@link HBaseTestingUtility#closeRegionAndWAL(HRegion)} when done.
*/
protected HRegion initHRegion(TableName tableName, String callingMethod, Configuration conf,
byte[]... families) throws IOException {
return initHRegion(tableName, callingMethod, conf, false, families);
}
/**
* @return A region on which you must call
* {@link HBaseTestingUtility#closeRegionAndWAL(HRegion)} when done.
*/
protected HRegion initHRegion(TableName tableName, String callingMethod, Configuration conf,
boolean isReadOnly, byte[]... families) throws IOException {
return initHRegion(tableName, null, null, callingMethod, conf, isReadOnly, families);
}
protected HRegion initHRegion(TableName tableName, byte[] startKey, byte[] stopKey,
String callingMethod, Configuration conf, boolean isReadOnly, byte[]... families)
throws IOException {
Path logDir = TEST_UTIL.getDataTestDirOnTestFS(callingMethod + ".log");
HRegionInfo hri = new HRegionInfo(tableName, startKey, stopKey);
final WAL wal = HBaseTestingUtility.createWal(conf, logDir, hri);
return initHRegion(tableName, startKey, stopKey, isReadOnly,
Durability.SYNC_WAL, wal, families);
}
/**
* @return A region on which you must call
* {@link HBaseTestingUtility#closeRegionAndWAL(HRegion)} when done.
*/
public HRegion initHRegion(TableName tableName, byte[] startKey, byte[] stopKey,
boolean isReadOnly, Durability durability, WAL wal, byte[]... families) throws IOException {
ChunkCreator.initialize(MemStoreLABImpl.CHUNK_SIZE_DEFAULT, false, 0, 0, 0, null);
return TEST_UTIL.createLocalHRegion(tableName, startKey, stopKey,
isReadOnly, durability, wal, families);
}
/**
* Assert that the passed in Cell has expected contents for the specified row,
* column & timestamp.
*/
private void checkOneCell(Cell kv, byte[] cf, int rowIdx, int colIdx, long ts) {
String ctx = "rowIdx=" + rowIdx + "; colIdx=" + colIdx + "; ts=" + ts;
assertEquals("Row mismatch which checking: " + ctx, "row:" + rowIdx,
Bytes.toString(CellUtil.cloneRow(kv)));
assertEquals("ColumnFamily mismatch while checking: " + ctx, Bytes.toString(cf),
Bytes.toString(CellUtil.cloneFamily(kv)));
assertEquals("Column qualifier mismatch while checking: " + ctx, "column:" + colIdx,
Bytes.toString(CellUtil.cloneQualifier(kv)));
assertEquals("Timestamp mismatch while checking: " + ctx, ts, kv.getTimestamp());
assertEquals("Value mismatch while checking: " + ctx, "value-version-" + ts,
Bytes.toString(CellUtil.cloneValue(kv)));
}
@Test
public void testReverseScanner_FromMemStore_SingleCF_Normal()
throws IOException {
byte[] rowC = Bytes.toBytes("rowC");
byte[] rowA = Bytes.toBytes("rowA");
byte[] rowB = Bytes.toBytes("rowB");
byte[] cf = Bytes.toBytes("CF");
byte[][] families = { cf };
byte[] col = Bytes.toBytes("C");
long ts = 1;
this.region = initHRegion(tableName, method, families);
KeyValue kv1 = new KeyValue(rowC, cf, col, ts, KeyValue.Type.Put, null);
KeyValue kv11 = new KeyValue(rowC, cf, col, ts + 1, KeyValue.Type.Put,
null);
KeyValue kv2 = new KeyValue(rowA, cf, col, ts, KeyValue.Type.Put, null);
KeyValue kv3 = new KeyValue(rowB, cf, col, ts, KeyValue.Type.Put, null);
Put put = null;
put = new Put(rowC);
put.add(kv1);
put.add(kv11);
region.put(put);
put = new Put(rowA);
put.add(kv2);
region.put(put);
put = new Put(rowB);
put.add(kv3);
region.put(put);
Scan scan = new Scan(rowC);
scan.setMaxVersions(5);
scan.setReversed(true);
InternalScanner scanner = region.getScanner(scan);
List<Cell> currRow = new ArrayList<>();
boolean hasNext = scanner.next(currRow);
assertEquals(2, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), rowC, 0, rowC.length));
assertTrue(hasNext);
currRow.clear();
hasNext = scanner.next(currRow);
assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), rowB, 0, rowB.length));
assertTrue(hasNext);
currRow.clear();
hasNext = scanner.next(currRow);
assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), rowA, 0, rowA.length));
assertFalse(hasNext);
scanner.close();
}
@Test
public void testReverseScanner_FromMemStore_SingleCF_LargerKey()
throws IOException {
byte[] rowC = Bytes.toBytes("rowC");
byte[] rowA = Bytes.toBytes("rowA");
byte[] rowB = Bytes.toBytes("rowB");
byte[] rowD = Bytes.toBytes("rowD");
byte[] cf = Bytes.toBytes("CF");
byte[][] families = { cf };
byte[] col = Bytes.toBytes("C");
long ts = 1;
this.region = initHRegion(tableName, method, families);
KeyValue kv1 = new KeyValue(rowC, cf, col, ts, KeyValue.Type.Put, null);
KeyValue kv11 = new KeyValue(rowC, cf, col, ts + 1, KeyValue.Type.Put,
null);
KeyValue kv2 = new KeyValue(rowA, cf, col, ts, KeyValue.Type.Put, null);
KeyValue kv3 = new KeyValue(rowB, cf, col, ts, KeyValue.Type.Put, null);
Put put = null;
put = new Put(rowC);
put.add(kv1);
put.add(kv11);
region.put(put);
put = new Put(rowA);
put.add(kv2);
region.put(put);
put = new Put(rowB);
put.add(kv3);
region.put(put);
Scan scan = new Scan(rowD);
List<Cell> currRow = new ArrayList<>();
scan.setReversed(true);
scan.setMaxVersions(5);
InternalScanner scanner = region.getScanner(scan);
boolean hasNext = scanner.next(currRow);
assertEquals(2, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), rowC, 0, rowC.length));
assertTrue(hasNext);
currRow.clear();
hasNext = scanner.next(currRow);
assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), rowB, 0, rowB.length));
assertTrue(hasNext);
currRow.clear();
hasNext = scanner.next(currRow);
assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), rowA, 0, rowA.length));
assertFalse(hasNext);
scanner.close();
}
@Test
public void testReverseScanner_FromMemStore_SingleCF_FullScan()
throws IOException {
byte[] rowC = Bytes.toBytes("rowC");
byte[] rowA = Bytes.toBytes("rowA");
byte[] rowB = Bytes.toBytes("rowB");
byte[] cf = Bytes.toBytes("CF");
byte[][] families = { cf };
byte[] col = Bytes.toBytes("C");
long ts = 1;
this.region = initHRegion(tableName, method, families);
KeyValue kv1 = new KeyValue(rowC, cf, col, ts, KeyValue.Type.Put, null);
KeyValue kv11 = new KeyValue(rowC, cf, col, ts + 1, KeyValue.Type.Put,
null);
KeyValue kv2 = new KeyValue(rowA, cf, col, ts, KeyValue.Type.Put, null);
KeyValue kv3 = new KeyValue(rowB, cf, col, ts, KeyValue.Type.Put, null);
Put put = null;
put = new Put(rowC);
put.add(kv1);
put.add(kv11);
region.put(put);
put = new Put(rowA);
put.add(kv2);
region.put(put);
put = new Put(rowB);
put.add(kv3);
region.put(put);
Scan scan = new Scan();
List<Cell> currRow = new ArrayList<>();
scan.setReversed(true);
InternalScanner scanner = region.getScanner(scan);
boolean hasNext = scanner.next(currRow);
assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), rowC, 0, rowC.length));
assertTrue(hasNext);
currRow.clear();
hasNext = scanner.next(currRow);
assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), rowB, 0, rowB.length));
assertTrue(hasNext);
currRow.clear();
hasNext = scanner.next(currRow);
assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), rowA, 0, rowA.length));
assertFalse(hasNext);
scanner.close();
}
@Test
public void testReverseScanner_moreRowsMayExistAfter() throws IOException {
// case for "INCLUDE_AND_SEEK_NEXT_ROW & SEEK_NEXT_ROW" endless loop
byte[] rowA = Bytes.toBytes("rowA");
byte[] rowB = Bytes.toBytes("rowB");
byte[] rowC = Bytes.toBytes("rowC");
byte[] rowD = Bytes.toBytes("rowD");
byte[] rowE = Bytes.toBytes("rowE");
byte[] cf = Bytes.toBytes("CF");
byte[][] families = { cf };
byte[] col1 = Bytes.toBytes("col1");
byte[] col2 = Bytes.toBytes("col2");
long ts = 1;
this.region = initHRegion(tableName, method, families);
KeyValue kv1 = new KeyValue(rowA, cf, col1, ts, KeyValue.Type.Put, null);
KeyValue kv2 = new KeyValue(rowB, cf, col1, ts, KeyValue.Type.Put, null);
KeyValue kv3 = new KeyValue(rowC, cf, col1, ts, KeyValue.Type.Put, null);
KeyValue kv4_1 = new KeyValue(rowD, cf, col1, ts, KeyValue.Type.Put, null);
KeyValue kv4_2 = new KeyValue(rowD, cf, col2, ts, KeyValue.Type.Put, null);
KeyValue kv5 = new KeyValue(rowE, cf, col1, ts, KeyValue.Type.Put, null);
Put put = null;
put = new Put(rowA);
put.add(kv1);
region.put(put);
put = new Put(rowB);
put.add(kv2);
region.put(put);
put = new Put(rowC);
put.add(kv3);
region.put(put);
put = new Put(rowD);
put.add(kv4_1);
region.put(put);
put = new Put(rowD);
put.add(kv4_2);
region.put(put);
put = new Put(rowE);
put.add(kv5);
region.put(put);
region.flush(true);
Scan scan = new Scan(rowD, rowA);
scan.addColumn(families[0], col1);
scan.setReversed(true);
List<Cell> currRow = new ArrayList<>();
InternalScanner scanner = region.getScanner(scan);
boolean hasNext = scanner.next(currRow);
assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), rowD, 0, rowD.length));
assertTrue(hasNext);
currRow.clear();
hasNext = scanner.next(currRow);
assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), rowC, 0, rowC.length));
assertTrue(hasNext);
currRow.clear();
hasNext = scanner.next(currRow);
assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), rowB, 0, rowB.length));
assertFalse(hasNext);
scanner.close();
scan = new Scan(rowD, rowA);
scan.addColumn(families[0], col2);
scan.setReversed(true);
currRow.clear();
scanner = region.getScanner(scan);
hasNext = scanner.next(currRow);
assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), rowD, 0, rowD.length));
scanner.close();
}
@Test
public void testReverseScanner_smaller_blocksize() throws IOException {
// case to ensure no conflict with HFile index optimization
byte[] rowA = Bytes.toBytes("rowA");
byte[] rowB = Bytes.toBytes("rowB");
byte[] rowC = Bytes.toBytes("rowC");
byte[] rowD = Bytes.toBytes("rowD");
byte[] rowE = Bytes.toBytes("rowE");
byte[] cf = Bytes.toBytes("CF");
byte[][] families = { cf };
byte[] col1 = Bytes.toBytes("col1");
byte[] col2 = Bytes.toBytes("col2");
long ts = 1;
HBaseConfiguration config = new HBaseConfiguration();
config.setInt("test.block.size", 1);
this.region = initHRegion(tableName, method, config, families);
KeyValue kv1 = new KeyValue(rowA, cf, col1, ts, KeyValue.Type.Put, null);
KeyValue kv2 = new KeyValue(rowB, cf, col1, ts, KeyValue.Type.Put, null);
KeyValue kv3 = new KeyValue(rowC, cf, col1, ts, KeyValue.Type.Put, null);
KeyValue kv4_1 = new KeyValue(rowD, cf, col1, ts, KeyValue.Type.Put, null);
KeyValue kv4_2 = new KeyValue(rowD, cf, col2, ts, KeyValue.Type.Put, null);
KeyValue kv5 = new KeyValue(rowE, cf, col1, ts, KeyValue.Type.Put, null);
Put put = null;
put = new Put(rowA);
put.add(kv1);
region.put(put);
put = new Put(rowB);
put.add(kv2);
region.put(put);
put = new Put(rowC);
put.add(kv3);
region.put(put);
put = new Put(rowD);
put.add(kv4_1);
region.put(put);
put = new Put(rowD);
put.add(kv4_2);
region.put(put);
put = new Put(rowE);
put.add(kv5);
region.put(put);
region.flush(true);
Scan scan = new Scan(rowD, rowA);
scan.addColumn(families[0], col1);
scan.setReversed(true);
List<Cell> currRow = new ArrayList<>();
InternalScanner scanner = region.getScanner(scan);
boolean hasNext = scanner.next(currRow);
assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), rowD, 0, rowD.length));
assertTrue(hasNext);
currRow.clear();
hasNext = scanner.next(currRow);
assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), rowC, 0, rowC.length));
assertTrue(hasNext);
currRow.clear();
hasNext = scanner.next(currRow);
assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), rowB, 0, rowB.length));
assertFalse(hasNext);
scanner.close();
scan = new Scan(rowD, rowA);
scan.addColumn(families[0], col2);
scan.setReversed(true);
currRow.clear();
scanner = region.getScanner(scan);
hasNext = scanner.next(currRow);
assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), rowD, 0, rowD.length));
scanner.close();
}
@Test
public void testReverseScanner_FromMemStoreAndHFiles_MultiCFs1()
throws IOException {
byte[] row0 = Bytes.toBytes("row0"); // 1 kv
byte[] row1 = Bytes.toBytes("row1"); // 2 kv
byte[] row2 = Bytes.toBytes("row2"); // 4 kv
byte[] row3 = Bytes.toBytes("row3"); // 2 kv
byte[] row4 = Bytes.toBytes("row4"); // 5 kv
byte[] row5 = Bytes.toBytes("row5"); // 2 kv
byte[] cf1 = Bytes.toBytes("CF1");
byte[] cf2 = Bytes.toBytes("CF2");
byte[] cf3 = Bytes.toBytes("CF3");
byte[][] families = { cf1, cf2, cf3 };
byte[] col = Bytes.toBytes("C");
long ts = 1;
HBaseConfiguration conf = new HBaseConfiguration();
// disable compactions in this test.
conf.setInt("hbase.hstore.compactionThreshold", 10000);
this.region = initHRegion(tableName, method, conf, families);
// kv naming style: kv(row number) totalKvCountInThisRow seq no
KeyValue kv0_1_1 = new KeyValue(row0, cf1, col, ts, KeyValue.Type.Put,
null);
KeyValue kv1_2_1 = new KeyValue(row1, cf2, col, ts, KeyValue.Type.Put,
null);
KeyValue kv1_2_2 = new KeyValue(row1, cf1, col, ts + 1,
KeyValue.Type.Put, null);
KeyValue kv2_4_1 = new KeyValue(row2, cf2, col, ts, KeyValue.Type.Put,
null);
KeyValue kv2_4_2 = new KeyValue(row2, cf1, col, ts, KeyValue.Type.Put,
null);
KeyValue kv2_4_3 = new KeyValue(row2, cf3, col, ts, KeyValue.Type.Put,
null);
KeyValue kv2_4_4 = new KeyValue(row2, cf1, col, ts + 4,
KeyValue.Type.Put, null);
KeyValue kv3_2_1 = new KeyValue(row3, cf2, col, ts, KeyValue.Type.Put,
null);
KeyValue kv3_2_2 = new KeyValue(row3, cf1, col, ts + 4,
KeyValue.Type.Put, null);
KeyValue kv4_5_1 = new KeyValue(row4, cf1, col, ts, KeyValue.Type.Put,
null);
KeyValue kv4_5_2 = new KeyValue(row4, cf3, col, ts, KeyValue.Type.Put,
null);
KeyValue kv4_5_3 = new KeyValue(row4, cf3, col, ts + 5,
KeyValue.Type.Put, null);
KeyValue kv4_5_4 = new KeyValue(row4, cf2, col, ts, KeyValue.Type.Put,
null);
KeyValue kv4_5_5 = new KeyValue(row4, cf1, col, ts + 3,
KeyValue.Type.Put, null);
KeyValue kv5_2_1 = new KeyValue(row5, cf2, col, ts, KeyValue.Type.Put,
null);
KeyValue kv5_2_2 = new KeyValue(row5, cf3, col, ts, KeyValue.Type.Put,
null);
// hfiles(cf1/cf2) :"row1"(1 kv) / "row2"(1 kv) / "row4"(2 kv)
Put put = null;
put = new Put(row1);
put.add(kv1_2_1);
region.put(put);
put = new Put(row2);
put.add(kv2_4_1);
region.put(put);
put = new Put(row4);
put.add(kv4_5_4);
put.add(kv4_5_5);
region.put(put);
region.flush(true);
// hfiles(cf1/cf3) : "row1" (1 kvs) / "row2" (1 kv) / "row4" (2 kv)
put = new Put(row4);
put.add(kv4_5_1);
put.add(kv4_5_3);
region.put(put);
put = new Put(row1);
put.add(kv1_2_2);
region.put(put);
put = new Put(row2);
put.add(kv2_4_4);
region.put(put);
region.flush(true);
// hfiles(cf1/cf3) : "row2"(2 kv) / "row3"(1 kvs) / "row4" (1 kv)
put = new Put(row4);
put.add(kv4_5_2);
region.put(put);
put = new Put(row2);
put.add(kv2_4_2);
put.add(kv2_4_3);
region.put(put);
put = new Put(row3);
put.add(kv3_2_2);
region.put(put);
region.flush(true);
// memstore(cf1/cf2/cf3) : "row0" (1 kvs) / "row3" ( 1 kv) / "row5" (max)
// ( 2 kv)
put = new Put(row0);
put.add(kv0_1_1);
region.put(put);
put = new Put(row3);
put.add(kv3_2_1);
region.put(put);
put = new Put(row5);
put.add(kv5_2_1);
put.add(kv5_2_2);
region.put(put);
// scan range = ["row4", min), skip the max "row5"
Scan scan = new Scan(row4);
scan.setMaxVersions(5);
scan.setBatch(3);
scan.setReversed(true);
InternalScanner scanner = region.getScanner(scan);
List<Cell> currRow = new ArrayList<>();
boolean hasNext = false;
// 1. scan out "row4" (5 kvs), "row5" can't be scanned out since not
// included in scan range
// "row4" takes 2 next() calls since batch=3
hasNext = scanner.next(currRow);
assertEquals(3, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), row4, 0, row4.length));
assertTrue(hasNext);
currRow.clear();
hasNext = scanner.next(currRow);
assertEquals(2, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(),
currRow.get(0).getRowLength(), row4, 0,
row4.length));
assertTrue(hasNext);
// 2. scan out "row3" (2 kv)
currRow.clear();
hasNext = scanner.next(currRow);
assertEquals(2, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), row3, 0, row3.length));
assertTrue(hasNext);
// 3. scan out "row2" (4 kvs)
// "row2" takes 2 next() calls since batch=3
currRow.clear();
hasNext = scanner.next(currRow);
assertEquals(3, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), row2, 0, row2.length));
assertTrue(hasNext);
currRow.clear();
hasNext = scanner.next(currRow);
assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), row2, 0, row2.length));
assertTrue(hasNext);
// 4. scan out "row1" (2 kv)
currRow.clear();
hasNext = scanner.next(currRow);
assertEquals(2, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), row1, 0, row1.length));
assertTrue(hasNext);
// 5. scan out "row0" (1 kv)
currRow.clear();
hasNext = scanner.next(currRow);
assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), row0, 0, row0.length));
assertFalse(hasNext);
scanner.close();
}
@Test
public void testReverseScanner_FromMemStoreAndHFiles_MultiCFs2()
throws IOException {
byte[] row1 = Bytes.toBytes("row1");
byte[] row2 = Bytes.toBytes("row2");
byte[] row3 = Bytes.toBytes("row3");
byte[] row4 = Bytes.toBytes("row4");
byte[] cf1 = Bytes.toBytes("CF1");
byte[] cf2 = Bytes.toBytes("CF2");
byte[] cf3 = Bytes.toBytes("CF3");
byte[] cf4 = Bytes.toBytes("CF4");
byte[][] families = { cf1, cf2, cf3, cf4 };
byte[] col = Bytes.toBytes("C");
long ts = 1;
HBaseConfiguration conf = new HBaseConfiguration();
// disable compactions in this test.
conf.setInt("hbase.hstore.compactionThreshold", 10000);
this.region = initHRegion(tableName, method, conf, families);
KeyValue kv1 = new KeyValue(row1, cf1, col, ts, KeyValue.Type.Put, null);
KeyValue kv2 = new KeyValue(row2, cf2, col, ts, KeyValue.Type.Put, null);
KeyValue kv3 = new KeyValue(row3, cf3, col, ts, KeyValue.Type.Put, null);
KeyValue kv4 = new KeyValue(row4, cf4, col, ts, KeyValue.Type.Put, null);
// storefile1
Put put = new Put(row1);
put.add(kv1);
region.put(put);
region.flush(true);
// storefile2
put = new Put(row2);
put.add(kv2);
region.put(put);
region.flush(true);
// storefile3
put = new Put(row3);
put.add(kv3);
region.put(put);
region.flush(true);
// memstore
put = new Put(row4);
put.add(kv4);
region.put(put);
// scan range = ["row4", min)
Scan scan = new Scan(row4);
scan.setReversed(true);
scan.setBatch(10);
InternalScanner scanner = region.getScanner(scan);
List<Cell> currRow = new ArrayList<>();
boolean hasNext = scanner.next(currRow);
assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), row4, 0, row4.length));
assertTrue(hasNext);
currRow.clear();
hasNext = scanner.next(currRow);
assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), row3, 0, row3.length));
assertTrue(hasNext);
currRow.clear();
hasNext = scanner.next(currRow);
assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), row2, 0, row2.length));
assertTrue(hasNext);
currRow.clear();
hasNext = scanner.next(currRow);
assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), row1, 0, row1.length));
assertFalse(hasNext);
}
/**
* Test for HBASE-14497: Reverse Scan threw StackOverflow caused by readPt checking
*/
@Test
public void testReverseScanner_StackOverflow() throws IOException {
byte[] cf1 = Bytes.toBytes("CF1");
byte[][] families = {cf1};
byte[] col = Bytes.toBytes("C");
HBaseConfiguration conf = new HBaseConfiguration();
this.region = initHRegion(tableName, method, conf, families);
// setup with one storefile and one memstore, to create scanner and get an earlier readPt
Put put = new Put(Bytes.toBytes("19998"));
put.addColumn(cf1, col, Bytes.toBytes("val"));
region.put(put);
region.flushcache(true, true, FlushLifeCycleTracker.DUMMY);
Put put2 = new Put(Bytes.toBytes("19997"));
put2.addColumn(cf1, col, Bytes.toBytes("val"));
region.put(put2);
Scan scan = new Scan(Bytes.toBytes("19998"));
scan.setReversed(true);
InternalScanner scanner = region.getScanner(scan);
// create one storefile contains many rows will be skipped
// to check StoreFileScanner.seekToPreviousRow
for (int i = 10000; i < 20000; i++) {
Put p = new Put(Bytes.toBytes(""+i));
p.addColumn(cf1, col, Bytes.toBytes("" + i));
region.put(p);
}
region.flushcache(true, true, FlushLifeCycleTracker.DUMMY);
// create one memstore contains many rows will be skipped
// to check MemStoreScanner.seekToPreviousRow
for (int i = 10000; i < 20000; i++) {
Put p = new Put(Bytes.toBytes(""+i));
p.addColumn(cf1, col, Bytes.toBytes("" + i));
region.put(p);
}
List<Cell> currRow = new ArrayList<>();
boolean hasNext;
do {
hasNext = scanner.next(currRow);
} while (hasNext);
assertEquals(2, currRow.size());
assertEquals("19998", Bytes.toString(currRow.get(0).getRowArray(),
currRow.get(0).getRowOffset(), currRow.get(0).getRowLength()));
assertEquals("19997", Bytes.toString(currRow.get(1).getRowArray(),
currRow.get(1).getRowOffset(), currRow.get(1).getRowLength()));
}
@Test
public void testReverseScanShouldNotScanMemstoreIfReadPtLesser() throws Exception {
byte[] cf1 = Bytes.toBytes("CF1");
byte[][] families = { cf1 };
byte[] col = Bytes.toBytes("C");
HBaseConfiguration conf = new HBaseConfiguration();
this.region = initHRegion(tableName, method, conf, families);
// setup with one storefile and one memstore, to create scanner and get an earlier readPt
Put put = new Put(Bytes.toBytes("19996"));
put.addColumn(cf1, col, Bytes.toBytes("val"));
region.put(put);
Put put2 = new Put(Bytes.toBytes("19995"));
put2.addColumn(cf1, col, Bytes.toBytes("val"));
region.put(put2);
// create a reverse scan
Scan scan = new Scan(Bytes.toBytes("19996"));
scan.setReversed(true);
RegionScannerImpl scanner = region.getScanner(scan);
// flush the cache. This will reset the store scanner
region.flushcache(true, true, FlushLifeCycleTracker.DUMMY);
// create one memstore contains many rows will be skipped
// to check MemStoreScanner.seekToPreviousRow
for (int i = 10000; i < 20000; i++) {
Put p = new Put(Bytes.toBytes("" + i));
p.addColumn(cf1, col, Bytes.toBytes("" + i));
region.put(p);
}
List<Cell> currRow = new ArrayList<>();
boolean hasNext;
boolean assertDone = false;
do {
hasNext = scanner.next(currRow);
// With HBASE-15871, after the scanner is reset the memstore scanner should not be
// added here
if (!assertDone) {
StoreScanner current =
(StoreScanner) (scanner.storeHeap).getCurrentForTesting();
List<KeyValueScanner> scanners = current.getAllScannersForTesting();
assertEquals("There should be only one scanner the store file scanner", 1,
scanners.size());
assertDone = true;
}
} while (hasNext);
assertEquals(2, currRow.size());
assertEquals("19996", Bytes.toString(currRow.get(0).getRowArray(),
currRow.get(0).getRowOffset(), currRow.get(0).getRowLength()));
assertEquals("19995", Bytes.toString(currRow.get(1).getRowArray(),
currRow.get(1).getRowOffset(), currRow.get(1).getRowLength()));
}
@Test
public void testReverseScanWhenPutCellsAfterOpenReverseScan() throws Exception {
byte[] cf1 = Bytes.toBytes("CF1");
byte[][] families = { cf1 };
byte[] col = Bytes.toBytes("C");
HBaseConfiguration conf = new HBaseConfiguration();
this.region = initHRegion(tableName, method, conf, families);
Put put = new Put(Bytes.toBytes("199996"));
put.addColumn(cf1, col, Bytes.toBytes("val"));
region.put(put);
Put put2 = new Put(Bytes.toBytes("199995"));
put2.addColumn(cf1, col, Bytes.toBytes("val"));
region.put(put2);
// Create a reverse scan
Scan scan = new Scan(Bytes.toBytes("199996"));
scan.setReversed(true);
RegionScannerImpl scanner = region.getScanner(scan);
// Put a lot of cells that have sequenceIDs grater than the readPt of the reverse scan
for (int i = 100000; i < 200000; i++) {
Put p = new Put(Bytes.toBytes("" + i));
p.addColumn(cf1, col, Bytes.toBytes("" + i));
region.put(p);
}
List<Cell> currRow = new ArrayList<>();
boolean hasNext;
do {
hasNext = scanner.next(currRow);
} while (hasNext);
assertEquals(2, currRow.size());
assertEquals("199996", Bytes.toString(currRow.get(0).getRowArray(),
currRow.get(0).getRowOffset(), currRow.get(0).getRowLength()));
assertEquals("199995", Bytes.toString(currRow.get(1).getRowArray(),
currRow.get(1).getRowOffset(), currRow.get(1).getRowLength()));
}
@Test
public void testWriteRequestsCounter() throws IOException {
byte[] fam = Bytes.toBytes("info");
byte[][] families = { fam };
this.region = initHRegion(tableName, method, CONF, families);
Assert.assertEquals(0L, region.getWriteRequestsCount());
Put put = new Put(row);
put.addColumn(fam, fam, fam);
Assert.assertEquals(0L, region.getWriteRequestsCount());
region.put(put);
Assert.assertEquals(1L, region.getWriteRequestsCount());
region.put(put);
Assert.assertEquals(2L, region.getWriteRequestsCount());
region.put(put);
Assert.assertEquals(3L, region.getWriteRequestsCount());
region.delete(new Delete(row));
Assert.assertEquals(4L, region.getWriteRequestsCount());
}
@Test
public void testOpenRegionWrittenToWAL() throws Exception {
final ServerName serverName = ServerName.valueOf(name.getMethodName(), 100, 42);
final RegionServerServices rss = spy(TEST_UTIL.createMockRegionServerService(serverName));
HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(name.getMethodName()));
htd.addFamily(new HColumnDescriptor(fam1));
htd.addFamily(new HColumnDescriptor(fam2));
HRegionInfo hri = new HRegionInfo(htd.getTableName(),
HConstants.EMPTY_BYTE_ARRAY, HConstants.EMPTY_BYTE_ARRAY);
// open the region w/o rss and wal and flush some files
region =
HBaseTestingUtility.createRegionAndWAL(hri, TEST_UTIL.getDataTestDir(), TEST_UTIL
.getConfiguration(), htd);
assertNotNull(region);
// create a file in fam1 for the region before opening in OpenRegionHandler
region.put(new Put(Bytes.toBytes("a")).addColumn(fam1, fam1, fam1));
region.flush(true);
HBaseTestingUtility.closeRegionAndWAL(region);
ArgumentCaptor<WALEdit> editCaptor = ArgumentCaptor.forClass(WALEdit.class);
// capture append() calls
WAL wal = mockWAL();
when(rss.getWAL((HRegionInfo) any())).thenReturn(wal);
region = HRegion.openHRegion(hri, htd, rss.getWAL(hri),
TEST_UTIL.getConfiguration(), rss, null);
verify(wal, times(1)).append((HRegionInfo)any(), (WALKeyImpl)any()
, editCaptor.capture(), anyBoolean());
WALEdit edit = editCaptor.getValue();
assertNotNull(edit);
assertNotNull(edit.getCells());
assertEquals(1, edit.getCells().size());
RegionEventDescriptor desc = WALEdit.getRegionEventDescriptor(edit.getCells().get(0));
assertNotNull(desc);
LOG.info("RegionEventDescriptor from WAL: " + desc);
assertEquals(RegionEventDescriptor.EventType.REGION_OPEN, desc.getEventType());
assertTrue(Bytes.equals(desc.getTableName().toByteArray(), htd.getTableName().toBytes()));
assertTrue(Bytes.equals(desc.getEncodedRegionName().toByteArray(),
hri.getEncodedNameAsBytes()));
assertTrue(desc.getLogSequenceNumber() > 0);
assertEquals(serverName, ProtobufUtil.toServerName(desc.getServer()));
assertEquals(2, desc.getStoresCount());
StoreDescriptor store = desc.getStores(0);
assertTrue(Bytes.equals(store.getFamilyName().toByteArray(), fam1));
assertEquals(store.getStoreHomeDir(), Bytes.toString(fam1));
assertEquals(1, store.getStoreFileCount()); // 1store file
assertFalse(store.getStoreFile(0).contains("/")); // ensure path is relative
store = desc.getStores(1);
assertTrue(Bytes.equals(store.getFamilyName().toByteArray(), fam2));
assertEquals(store.getStoreHomeDir(), Bytes.toString(fam2));
assertEquals(0, store.getStoreFileCount()); // no store files
}
// Helper for test testOpenRegionWrittenToWALForLogReplay
static class HRegionWithSeqId extends HRegion {
public HRegionWithSeqId(final Path tableDir, final WAL wal, final FileSystem fs,
final Configuration confParam, final RegionInfo regionInfo,
final TableDescriptor htd, final RegionServerServices rsServices) {
super(tableDir, wal, fs, confParam, regionInfo, htd, rsServices);
}
@Override
protected long getNextSequenceId(WAL wal) throws IOException {
return 42;
}
}
@Test
public void testFlushedFileWithNoTags() throws Exception {
final TableName tableName = TableName.valueOf(name.getMethodName());
HTableDescriptor htd = new HTableDescriptor(tableName);
htd.addFamily(new HColumnDescriptor(fam1));
HRegionInfo info = new HRegionInfo(tableName, null, null, false);
Path path = TEST_UTIL.getDataTestDir(getClass().getSimpleName());
region = HBaseTestingUtility.createRegionAndWAL(info, path, TEST_UTIL.getConfiguration(), htd);
Put put = new Put(Bytes.toBytes("a-b-0-0"));
put.addColumn(fam1, qual1, Bytes.toBytes("c1-value"));
region.put(put);
region.flush(true);
HStore store = region.getStore(fam1);
Collection<HStoreFile> storefiles = store.getStorefiles();
for (HStoreFile sf : storefiles) {
assertFalse("Tags should not be present "
,sf.getReader().getHFileReader().getFileContext().isIncludesTags());
}
}
/**
* Utility method to setup a WAL mock.
* Needs to do the bit where we close latch on the WALKeyImpl on append else test hangs.
* @return a mock WAL
* @throws IOException
*/
private WAL mockWAL() throws IOException {
WAL wal = mock(WAL.class);
Mockito.when(wal.append((HRegionInfo)Mockito.any(),
(WALKeyImpl)Mockito.any(), (WALEdit)Mockito.any(), Mockito.anyBoolean())).
thenAnswer(new Answer<Long>() {
@Override
public Long answer(InvocationOnMock invocation) throws Throwable {
WALKeyImpl key = invocation.getArgument(1);
MultiVersionConcurrencyControl.WriteEntry we = key.getMvcc().begin();
key.setWriteEntry(we);
return 1L;
}
});
return wal;
}
@Test
public void testCloseRegionWrittenToWAL() throws Exception {
Path rootDir = new Path(dir + name.getMethodName());
FSUtils.setRootDir(TEST_UTIL.getConfiguration(), rootDir);
final ServerName serverName = ServerName.valueOf("testCloseRegionWrittenToWAL", 100, 42);
final RegionServerServices rss = spy(TEST_UTIL.createMockRegionServerService(serverName));
HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(name.getMethodName()));
htd.addFamily(new HColumnDescriptor(fam1));
htd.addFamily(new HColumnDescriptor(fam2));
final HRegionInfo hri = new HRegionInfo(htd.getTableName(),
HConstants.EMPTY_BYTE_ARRAY, HConstants.EMPTY_BYTE_ARRAY);
ArgumentCaptor<WALEdit> editCaptor = ArgumentCaptor.forClass(WALEdit.class);
// capture append() calls
WAL wal = mockWAL();
when(rss.getWAL((HRegionInfo) any())).thenReturn(wal);
// create and then open a region first so that it can be closed later
region = HRegion.createHRegion(hri, rootDir, TEST_UTIL.getConfiguration(), htd, rss.getWAL(hri));
region = HRegion.openHRegion(hri, htd, rss.getWAL(hri),
TEST_UTIL.getConfiguration(), rss, null);
// close the region
region.close(false);
// 2 times, one for region open, the other close region
verify(wal, times(2)).append((HRegionInfo)any(), (WALKeyImpl)any(),
editCaptor.capture(), anyBoolean());
WALEdit edit = editCaptor.getAllValues().get(1);
assertNotNull(edit);
assertNotNull(edit.getCells());
assertEquals(1, edit.getCells().size());
RegionEventDescriptor desc = WALEdit.getRegionEventDescriptor(edit.getCells().get(0));
assertNotNull(desc);
LOG.info("RegionEventDescriptor from WAL: " + desc);
assertEquals(RegionEventDescriptor.EventType.REGION_CLOSE, desc.getEventType());
assertTrue(Bytes.equals(desc.getTableName().toByteArray(), htd.getTableName().toBytes()));
assertTrue(Bytes.equals(desc.getEncodedRegionName().toByteArray(),
hri.getEncodedNameAsBytes()));
assertTrue(desc.getLogSequenceNumber() > 0);
assertEquals(serverName, ProtobufUtil.toServerName(desc.getServer()));
assertEquals(2, desc.getStoresCount());
StoreDescriptor store = desc.getStores(0);
assertTrue(Bytes.equals(store.getFamilyName().toByteArray(), fam1));
assertEquals(store.getStoreHomeDir(), Bytes.toString(fam1));
assertEquals(0, store.getStoreFileCount()); // no store files
store = desc.getStores(1);
assertTrue(Bytes.equals(store.getFamilyName().toByteArray(), fam2));
assertEquals(store.getStoreHomeDir(), Bytes.toString(fam2));
assertEquals(0, store.getStoreFileCount()); // no store files
}
/**
* Test RegionTooBusyException thrown when region is busy
*/
@Test
public void testRegionTooBusy() throws IOException {
byte[] family = Bytes.toBytes("family");
long defaultBusyWaitDuration = CONF.getLong("hbase.busy.wait.duration",
HRegion.DEFAULT_BUSY_WAIT_DURATION);
CONF.setLong("hbase.busy.wait.duration", 1000);
region = initHRegion(tableName, method, CONF, family);
final AtomicBoolean stopped = new AtomicBoolean(true);
Thread t = new Thread(new Runnable() {
@Override
public void run() {
try {
region.lock.writeLock().lock();
stopped.set(false);
while (!stopped.get()) {
Thread.sleep(100);
}
} catch (InterruptedException ie) {
} finally {
region.lock.writeLock().unlock();
}
}
});
t.start();
Get get = new Get(row);
try {
while (stopped.get()) {
Thread.sleep(100);
}
region.get(get);
fail("Should throw RegionTooBusyException");
} catch (InterruptedException ie) {
fail("test interrupted");
} catch (RegionTooBusyException e) {
// Good, expected
} finally {
stopped.set(true);
try {
t.join();
} catch (Throwable e) {
}
HBaseTestingUtility.closeRegionAndWAL(region);
region = null;
CONF.setLong("hbase.busy.wait.duration", defaultBusyWaitDuration);
}
}
@Test
public void testCellTTLs() throws IOException {
IncrementingEnvironmentEdge edge = new IncrementingEnvironmentEdge();
EnvironmentEdgeManager.injectEdge(edge);
final byte[] row = Bytes.toBytes("testRow");
final byte[] q1 = Bytes.toBytes("q1");
final byte[] q2 = Bytes.toBytes("q2");
final byte[] q3 = Bytes.toBytes("q3");
final byte[] q4 = Bytes.toBytes("q4");
HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(name.getMethodName()));
HColumnDescriptor hcd = new HColumnDescriptor(fam1);
hcd.setTimeToLive(10); // 10 seconds
htd.addFamily(hcd);
Configuration conf = new Configuration(TEST_UTIL.getConfiguration());
conf.setInt(HFile.FORMAT_VERSION_KEY, HFile.MIN_FORMAT_VERSION_WITH_TAGS);
region = HBaseTestingUtility.createRegionAndWAL(new HRegionInfo(htd.getTableName(),
HConstants.EMPTY_BYTE_ARRAY, HConstants.EMPTY_BYTE_ARRAY),
TEST_UTIL.getDataTestDir(), conf, htd);
assertNotNull(region);
long now = EnvironmentEdgeManager.currentTime();
// Add a cell that will expire in 5 seconds via cell TTL
region.put(new Put(row).add(new KeyValue(row, fam1, q1, now,
HConstants.EMPTY_BYTE_ARRAY, new ArrayBackedTag[] {
// TTL tags specify ts in milliseconds
new ArrayBackedTag(TagType.TTL_TAG_TYPE, Bytes.toBytes(5000L)) })));
// Add a cell that will expire after 10 seconds via family setting
region.put(new Put(row).addColumn(fam1, q2, now, HConstants.EMPTY_BYTE_ARRAY));
// Add a cell that will expire in 15 seconds via cell TTL
region.put(new Put(row).add(new KeyValue(row, fam1, q3, now + 10000 - 1,
HConstants.EMPTY_BYTE_ARRAY, new ArrayBackedTag[] {
// TTL tags specify ts in milliseconds
new ArrayBackedTag(TagType.TTL_TAG_TYPE, Bytes.toBytes(5000L)) })));
// Add a cell that will expire in 20 seconds via family setting
region.put(new Put(row).addColumn(fam1, q4, now + 10000 - 1, HConstants.EMPTY_BYTE_ARRAY));
// Flush so we are sure store scanning gets this right
region.flush(true);
// A query at time T+0 should return all cells
Result r = region.get(new Get(row));
assertNotNull(r.getValue(fam1, q1));
assertNotNull(r.getValue(fam1, q2));
assertNotNull(r.getValue(fam1, q3));
assertNotNull(r.getValue(fam1, q4));
// Increment time to T+5 seconds
edge.incrementTime(5000);
r = region.get(new Get(row));
assertNull(r.getValue(fam1, q1));
assertNotNull(r.getValue(fam1, q2));
assertNotNull(r.getValue(fam1, q3));
assertNotNull(r.getValue(fam1, q4));
// Increment time to T+10 seconds
edge.incrementTime(5000);
r = region.get(new Get(row));
assertNull(r.getValue(fam1, q1));
assertNull(r.getValue(fam1, q2));
assertNotNull(r.getValue(fam1, q3));
assertNotNull(r.getValue(fam1, q4));
// Increment time to T+15 seconds
edge.incrementTime(5000);
r = region.get(new Get(row));
assertNull(r.getValue(fam1, q1));
assertNull(r.getValue(fam1, q2));
assertNull(r.getValue(fam1, q3));
assertNotNull(r.getValue(fam1, q4));
// Increment time to T+20 seconds
edge.incrementTime(10000);
r = region.get(new Get(row));
assertNull(r.getValue(fam1, q1));
assertNull(r.getValue(fam1, q2));
assertNull(r.getValue(fam1, q3));
assertNull(r.getValue(fam1, q4));
// Fun with disappearing increments
// Start at 1
region.put(new Put(row).addColumn(fam1, q1, Bytes.toBytes(1L)));
r = region.get(new Get(row));
byte[] val = r.getValue(fam1, q1);
assertNotNull(val);
assertEquals(1L, Bytes.toLong(val));
// Increment with a TTL of 5 seconds
Increment incr = new Increment(row).addColumn(fam1, q1, 1L);
incr.setTTL(5000);
region.increment(incr); // 2
// New value should be 2
r = region.get(new Get(row));
val = r.getValue(fam1, q1);
assertNotNull(val);
assertEquals(2L, Bytes.toLong(val));
// Increment time to T+25 seconds
edge.incrementTime(5000);
// Value should be back to 1
r = region.get(new Get(row));
val = r.getValue(fam1, q1);
assertNotNull(val);
assertEquals(1L, Bytes.toLong(val));
// Increment time to T+30 seconds
edge.incrementTime(5000);
// Original value written at T+20 should be gone now via family TTL
r = region.get(new Get(row));
assertNull(r.getValue(fam1, q1));
}
@Test
public void testIncrementTimestampsAreMonotonic() throws IOException {
region = initHRegion(tableName, method, CONF, fam1);
ManualEnvironmentEdge edge = new ManualEnvironmentEdge();
EnvironmentEdgeManager.injectEdge(edge);
edge.setValue(10);
Increment inc = new Increment(row);
inc.setDurability(Durability.SKIP_WAL);
inc.addColumn(fam1, qual1, 1L);
region.increment(inc);
Result result = region.get(new Get(row));
Cell c = result.getColumnLatestCell(fam1, qual1);
assertNotNull(c);
assertEquals(10L, c.getTimestamp());
edge.setValue(1); // clock goes back
region.increment(inc);
result = region.get(new Get(row));
c = result.getColumnLatestCell(fam1, qual1);
assertEquals(11L, c.getTimestamp());
assertEquals(2L, Bytes.toLong(c.getValueArray(), c.getValueOffset(), c.getValueLength()));
}
@Test
public void testAppendTimestampsAreMonotonic() throws IOException {
region = initHRegion(tableName, method, CONF, fam1);
ManualEnvironmentEdge edge = new ManualEnvironmentEdge();
EnvironmentEdgeManager.injectEdge(edge);
edge.setValue(10);
Append a = new Append(row);
a.setDurability(Durability.SKIP_WAL);
a.addColumn(fam1, qual1, qual1);
region.append(a);
Result result = region.get(new Get(row));
Cell c = result.getColumnLatestCell(fam1, qual1);
assertNotNull(c);
assertEquals(10L, c.getTimestamp());
edge.setValue(1); // clock goes back
region.append(a);
result = region.get(new Get(row));
c = result.getColumnLatestCell(fam1, qual1);
assertEquals(11L, c.getTimestamp());
byte[] expected = new byte[qual1.length*2];
System.arraycopy(qual1, 0, expected, 0, qual1.length);
System.arraycopy(qual1, 0, expected, qual1.length, qual1.length);
assertTrue(Bytes.equals(c.getValueArray(), c.getValueOffset(), c.getValueLength(),
expected, 0, expected.length));
}
@Test
public void testCheckAndMutateTimestampsAreMonotonic() throws IOException {
region = initHRegion(tableName, method, CONF, fam1);
ManualEnvironmentEdge edge = new ManualEnvironmentEdge();
EnvironmentEdgeManager.injectEdge(edge);
edge.setValue(10);
Put p = new Put(row);
p.setDurability(Durability.SKIP_WAL);
p.addColumn(fam1, qual1, qual1);
region.put(p);
Result result = region.get(new Get(row));
Cell c = result.getColumnLatestCell(fam1, qual1);
assertNotNull(c);
assertEquals(10L, c.getTimestamp());
edge.setValue(1); // clock goes back
p = new Put(row);
p.setDurability(Durability.SKIP_WAL);
p.addColumn(fam1, qual1, qual2);
region.checkAndMutate(row, fam1, qual1, CompareOperator.EQUAL, new BinaryComparator(qual1), p);
result = region.get(new Get(row));
c = result.getColumnLatestCell(fam1, qual1);
assertEquals(10L, c.getTimestamp());
assertTrue(Bytes.equals(c.getValueArray(), c.getValueOffset(), c.getValueLength(),
qual2, 0, qual2.length));
}
@Test
public void testBatchMutateWithWrongRegionException() throws Exception {
final byte[] a = Bytes.toBytes("a");
final byte[] b = Bytes.toBytes("b");
final byte[] c = Bytes.toBytes("c"); // exclusive
int prevLockTimeout = CONF.getInt("hbase.rowlock.wait.duration", 30000);
CONF.setInt("hbase.rowlock.wait.duration", 1000);
region = initHRegion(tableName, a, c, method, CONF, false, fam1);
Mutation[] mutations = new Mutation[] {
new Put(a)
.add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY)
.setRow(a)
.setFamily(fam1)
.setTimestamp(HConstants.LATEST_TIMESTAMP)
.setType(Cell.Type.Put)
.build()),
// this is outside the region boundary
new Put(c).add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY)
.setRow(c)
.setFamily(fam1)
.setTimestamp(HConstants.LATEST_TIMESTAMP)
.setType(Type.Put)
.build()),
new Put(b).add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY)
.setRow(b)
.setFamily(fam1)
.setTimestamp(HConstants.LATEST_TIMESTAMP)
.setType(Cell.Type.Put)
.build())
};
OperationStatus[] status = region.batchMutate(mutations);
assertEquals(OperationStatusCode.SUCCESS, status[0].getOperationStatusCode());
assertEquals(OperationStatusCode.SANITY_CHECK_FAILURE, status[1].getOperationStatusCode());
assertEquals(OperationStatusCode.SUCCESS, status[2].getOperationStatusCode());
// test with a row lock held for a long time
final CountDownLatch obtainedRowLock = new CountDownLatch(1);
ExecutorService exec = Executors.newFixedThreadPool(2);
Future<Void> f1 = exec.submit(new Callable<Void>() {
@Override
public Void call() throws Exception {
LOG.info("Acquiring row lock");
RowLock rl = region.getRowLock(b);
obtainedRowLock.countDown();
LOG.info("Waiting for 5 seconds before releasing lock");
Threads.sleep(5000);
LOG.info("Releasing row lock");
rl.release();
return null;
}
});
obtainedRowLock.await(30, TimeUnit.SECONDS);
Future<Void> f2 = exec.submit(new Callable<Void>() {
@Override
public Void call() throws Exception {
Mutation[] mutations = new Mutation[] {
new Put(a).add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY)
.setRow(a)
.setFamily(fam1)
.setTimestamp(HConstants.LATEST_TIMESTAMP)
.setType(Cell.Type.Put)
.build()),
new Put(b).add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY)
.setRow(b)
.setFamily(fam1)
.setTimestamp(HConstants.LATEST_TIMESTAMP)
.setType(Cell.Type.Put)
.build()),
};
// this will wait for the row lock, and it will eventually succeed
OperationStatus[] status = region.batchMutate(mutations);
assertEquals(OperationStatusCode.SUCCESS, status[0].getOperationStatusCode());
assertEquals(OperationStatusCode.SUCCESS, status[1].getOperationStatusCode());
return null;
}
});
f1.get();
f2.get();
CONF.setInt("hbase.rowlock.wait.duration", prevLockTimeout);
}
@Test
public void testCheckAndRowMutateTimestampsAreMonotonic() throws IOException {
region = initHRegion(tableName, method, CONF, fam1);
ManualEnvironmentEdge edge = new ManualEnvironmentEdge();
EnvironmentEdgeManager.injectEdge(edge);
edge.setValue(10);
Put p = new Put(row);
p.setDurability(Durability.SKIP_WAL);
p.addColumn(fam1, qual1, qual1);
region.put(p);
Result result = region.get(new Get(row));
Cell c = result.getColumnLatestCell(fam1, qual1);
assertNotNull(c);
assertEquals(10L, c.getTimestamp());
edge.setValue(1); // clock goes back
p = new Put(row);
p.setDurability(Durability.SKIP_WAL);
p.addColumn(fam1, qual1, qual2);
RowMutations rm = new RowMutations(row);
rm.add(p);
assertTrue(region.checkAndRowMutate(row, fam1, qual1, CompareOperator.EQUAL,
new BinaryComparator(qual1), rm));
result = region.get(new Get(row));
c = result.getColumnLatestCell(fam1, qual1);
assertEquals(10L, c.getTimestamp());
LOG.info("c value " +
Bytes.toStringBinary(c.getValueArray(), c.getValueOffset(), c.getValueLength()));
assertTrue(Bytes.equals(c.getValueArray(), c.getValueOffset(), c.getValueLength(),
qual2, 0, qual2.length));
}
HRegion initHRegion(TableName tableName, String callingMethod,
byte[]... families) throws IOException {
return initHRegion(tableName, callingMethod, HBaseConfiguration.create(),
families);
}
/**
* HBASE-16429 Make sure no stuck if roll writer when ring buffer is filled with appends
* @throws IOException if IO error occurred during test
*/
@Test
public void testWritesWhileRollWriter() throws IOException {
int testCount = 10;
int numRows = 1024;
int numFamilies = 2;
int numQualifiers = 2;
final byte[][] families = new byte[numFamilies][];
for (int i = 0; i < numFamilies; i++) {
families[i] = Bytes.toBytes("family" + i);
}
final byte[][] qualifiers = new byte[numQualifiers][];
for (int i = 0; i < numQualifiers; i++) {
qualifiers[i] = Bytes.toBytes("qual" + i);
}
CONF.setInt("hbase.regionserver.wal.disruptor.event.count", 2);
this.region = initHRegion(tableName, method, CONF, families);
try {
List<Thread> threads = new ArrayList<>();
for (int i = 0; i < numRows; i++) {
final int count = i;
Thread t = new Thread(new Runnable() {
@Override
public void run() {
byte[] row = Bytes.toBytes("row" + count);
Put put = new Put(row);
put.setDurability(Durability.SYNC_WAL);
byte[] value = Bytes.toBytes(String.valueOf(count));
for (byte[] family : families) {
for (byte[] qualifier : qualifiers) {
put.addColumn(family, qualifier, count, value);
}
}
try {
region.put(put);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
});
threads.add(t);
}
for (Thread t : threads) {
t.start();
}
for (int i = 0; i < testCount; i++) {
region.getWAL().rollWriter();
Thread.yield();
}
} finally {
try {
HBaseTestingUtility.closeRegionAndWAL(this.region);
CONF.setInt("hbase.regionserver.wal.disruptor.event.count", 16 * 1024);
} catch (DroppedSnapshotException dse) {
// We could get this on way out because we interrupt the background flusher and it could
// fail anywhere causing a DSE over in the background flusher... only it is not properly
// dealt with so could still be memory hanging out when we get to here -- memory we can't
// flush because the accounting is 'off' since original DSE.
}
this.region = null;
}
}
@Test
public void testMutateRow_WriteRequestCount() throws Exception {
byte[] row1 = Bytes.toBytes("row1");
byte[] fam1 = Bytes.toBytes("fam1");
byte[] qf1 = Bytes.toBytes("qualifier");
byte[] val1 = Bytes.toBytes("value1");
RowMutations rm = new RowMutations(row1);
Put put = new Put(row1);
put.addColumn(fam1, qf1, val1);
rm.add(put);
this.region = initHRegion(tableName, method, CONF, fam1);
long wrcBeforeMutate = this.region.writeRequestsCount.longValue();
this.region.mutateRow(rm);
long wrcAfterMutate = this.region.writeRequestsCount.longValue();
Assert.assertEquals(wrcBeforeMutate + rm.getMutations().size(), wrcAfterMutate);
}
@Test
public void testBulkLoadReplicationEnabled() throws IOException {
TEST_UTIL.getConfiguration().setBoolean(HConstants.REPLICATION_BULKLOAD_ENABLE_KEY, true);
final ServerName serverName = ServerName.valueOf(name.getMethodName(), 100, 42);
final RegionServerServices rss = spy(TEST_UTIL.createMockRegionServerService(serverName));
HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(name.getMethodName()));
htd.addFamily(new HColumnDescriptor(fam1));
HRegionInfo hri = new HRegionInfo(htd.getTableName(),
HConstants.EMPTY_BYTE_ARRAY, HConstants.EMPTY_BYTE_ARRAY);
region = HRegion.openHRegion(hri, htd, rss.getWAL(hri), TEST_UTIL.getConfiguration(),
rss, null);
assertTrue(region.conf.getBoolean(HConstants.REPLICATION_BULKLOAD_ENABLE_KEY, false));
String plugins = region.conf.get(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY, "");
String replicationCoprocessorClass = ReplicationObserver.class.getCanonicalName();
assertTrue(plugins.contains(replicationCoprocessorClass));
assertTrue(region.getCoprocessorHost().
getCoprocessors().contains(ReplicationObserver.class.getSimpleName()));
}
/**
* The same as HRegion class, the only difference is that instantiateHStore will
* create a different HStore - HStoreForTesting. [HBASE-8518]
*/
public static class HRegionForTesting extends HRegion {
public HRegionForTesting(final Path tableDir, final WAL wal, final FileSystem fs,
final Configuration confParam, final RegionInfo regionInfo,
final TableDescriptor htd, final RegionServerServices rsServices) {
this(new HRegionFileSystem(confParam, fs, tableDir, regionInfo),
wal, confParam, htd, rsServices);
}
public HRegionForTesting(HRegionFileSystem fs, WAL wal,
Configuration confParam, TableDescriptor htd,
RegionServerServices rsServices) {
super(fs, wal, confParam, htd, rsServices);
}
/**
* Create HStore instance.
* @return If Mob is enabled, return HMobStore, otherwise return HStoreForTesting.
*/
@Override
protected HStore instantiateHStore(final ColumnFamilyDescriptor family, boolean warmup)
throws IOException {
if (family.isMobEnabled()) {
if (HFile.getFormatVersion(this.conf) < HFile.MIN_FORMAT_VERSION_WITH_TAGS) {
throw new IOException("A minimum HFile version of " + HFile.MIN_FORMAT_VERSION_WITH_TAGS +
" is required for MOB feature. Consider setting " + HFile.FORMAT_VERSION_KEY +
" accordingly.");
}
return new HMobStore(this, family, this.conf, warmup);
}
return new HStoreForTesting(this, family, this.conf, warmup);
}
}
/**
* HStoreForTesting is merely the same as HStore, the difference is in the doCompaction method
* of HStoreForTesting there is a checkpoint "hbase.hstore.compaction.complete" which
* doesn't let hstore compaction complete. In the former edition, this config is set in
* HStore class inside compact method, though this is just for testing, otherwise it
* doesn't do any help. In HBASE-8518, we try to get rid of all "hbase.hstore.compaction.complete"
* config (except for testing code).
*/
public static class HStoreForTesting extends HStore {
protected HStoreForTesting(final HRegion region,
final ColumnFamilyDescriptor family,
final Configuration confParam, boolean warmup) throws IOException {
super(region, family, confParam, warmup);
}
@Override
protected List<HStoreFile> doCompaction(CompactionRequestImpl cr,
Collection<HStoreFile> filesToCompact, User user, long compactionStartTime,
List<Path> newFiles) throws IOException {
// let compaction incomplete.
if (!this.conf.getBoolean("hbase.hstore.compaction.complete", true)) {
LOG.warn("hbase.hstore.compaction.complete is set to false");
List<HStoreFile> sfs = new ArrayList<>(newFiles.size());
final boolean evictOnClose =
cacheConf != null? cacheConf.shouldEvictOnClose(): true;
for (Path newFile : newFiles) {
// Create storefile around what we wrote with a reader on it.
HStoreFile sf = createStoreFileAndReader(newFile);
sf.closeStoreFile(evictOnClose);
sfs.add(sf);
}
return sfs;
}
return super.doCompaction(cr, filesToCompact, user, compactionStartTime, newFiles);
}
}
}
| hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.regionserver;
import static org.apache.hadoop.hbase.HBaseTestingUtility.COLUMNS;
import static org.apache.hadoop.hbase.HBaseTestingUtility.fam1;
import static org.apache.hadoop.hbase.HBaseTestingUtility.fam2;
import static org.apache.hadoop.hbase.HBaseTestingUtility.fam3;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyBoolean;
import static org.mockito.ArgumentMatchers.anyLong;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.io.IOException;
import java.io.InterruptedIOException;
import java.math.BigDecimal;
import java.nio.charset.StandardCharsets;
import java.security.PrivilegedExceptionAction;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.NavigableMap;
import java.util.Objects;
import java.util.TreeMap;
import java.util.concurrent.Callable;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import org.apache.commons.lang3.RandomStringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.Cell.Type;
import org.apache.hadoop.hbase.CellBuilderFactory;
import org.apache.hadoop.hbase.CellBuilderType;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.CompareOperator;
import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
import org.apache.hadoop.hbase.DroppedSnapshotException;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HConstants.OperationStatusCode;
import org.apache.hadoop.hbase.HDFSBlocksDistribution;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.MiniHBaseCluster;
import org.apache.hadoop.hbase.MultithreadedTestUtil;
import org.apache.hadoop.hbase.MultithreadedTestUtil.RepeatingTestThread;
import org.apache.hadoop.hbase.MultithreadedTestUtil.TestThread;
import org.apache.hadoop.hbase.NotServingRegionException;
import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.hadoop.hbase.RegionTooBusyException;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.StartMiniClusterOption;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.TagType;
import org.apache.hadoop.hbase.Waiter;
import org.apache.hadoop.hbase.client.Append;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Increment;
import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.client.RegionInfoBuilder;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.RowMutations;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;
import org.apache.hadoop.hbase.filter.BigDecimalComparator;
import org.apache.hadoop.hbase.filter.BinaryComparator;
import org.apache.hadoop.hbase.filter.ColumnCountGetFilter;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.filter.FilterBase;
import org.apache.hadoop.hbase.filter.FilterList;
import org.apache.hadoop.hbase.filter.NullComparator;
import org.apache.hadoop.hbase.filter.PrefixFilter;
import org.apache.hadoop.hbase.filter.SingleColumnValueExcludeFilter;
import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
import org.apache.hadoop.hbase.filter.SubstringComparator;
import org.apache.hadoop.hbase.filter.ValueFilter;
import org.apache.hadoop.hbase.io.hfile.HFile;
import org.apache.hadoop.hbase.monitoring.MonitoredRPCHandler;
import org.apache.hadoop.hbase.monitoring.MonitoredTask;
import org.apache.hadoop.hbase.monitoring.TaskMonitor;
import org.apache.hadoop.hbase.regionserver.HRegion.MutationBatchOperation;
import org.apache.hadoop.hbase.regionserver.HRegion.RegionScannerImpl;
import org.apache.hadoop.hbase.regionserver.Region.RowLock;
import org.apache.hadoop.hbase.regionserver.TestHStore.FaultyFileSystem;
import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequestImpl;
import org.apache.hadoop.hbase.regionserver.wal.FSHLog;
import org.apache.hadoop.hbase.regionserver.wal.MetricsWALSource;
import org.apache.hadoop.hbase.regionserver.wal.WALUtil;
import org.apache.hadoop.hbase.replication.regionserver.ReplicationObserver;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.test.MetricsAssertHelper;
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.testclassification.VerySlowRegionServerTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManagerTestHelper;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.IncrementingEnvironmentEdge;
import org.apache.hadoop.hbase.util.ManualEnvironmentEdge;
import org.apache.hadoop.hbase.util.Threads;
import org.apache.hadoop.hbase.wal.AbstractFSWALProvider;
import org.apache.hadoop.hbase.wal.FaultyFSLog;
import org.apache.hadoop.hbase.wal.NettyAsyncFSWALConfigHelper;
import org.apache.hadoop.hbase.wal.WAL;
import org.apache.hadoop.hbase.wal.WALEdit;
import org.apache.hadoop.hbase.wal.WALFactory;
import org.apache.hadoop.hbase.wal.WALKeyImpl;
import org.apache.hadoop.hbase.wal.WALProvider;
import org.apache.hadoop.hbase.wal.WALProvider.Writer;
import org.apache.hadoop.hbase.wal.WALSplitUtil;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.ClassRule;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.ExpectedException;
import org.junit.rules.TestName;
import org.mockito.ArgumentCaptor;
import org.mockito.ArgumentMatcher;
import org.mockito.Mockito;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hbase.thirdparty.com.google.common.collect.Lists;
import org.apache.hbase.thirdparty.com.google.protobuf.ByteString;
import org.apache.hbase.thirdparty.io.netty.channel.EventLoopGroup;
import org.apache.hbase.thirdparty.io.netty.channel.nio.NioEventLoopGroup;
import org.apache.hbase.thirdparty.io.netty.channel.socket.nio.NioSocketChannel;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor;
import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor;
import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.FlushAction;
import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor;
import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor;
import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor;
/**
* Basic stand-alone testing of HRegion. No clusters!
*
* A lot of the meta information for an HRegion now lives inside other HRegions
* or in the HBaseMaster, so only basic testing is possible.
*/
@Category({VerySlowRegionServerTests.class, LargeTests.class})
@SuppressWarnings("deprecation")
public class TestHRegion {
@ClassRule
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestHRegion.class);
// Do not spin up clusters in here. If you need to spin up a cluster, do it
// over in TestHRegionOnCluster.
private static final Logger LOG = LoggerFactory.getLogger(TestHRegion.class);
@Rule
public TestName name = new TestName();
@Rule public final ExpectedException thrown = ExpectedException.none();
private static final String COLUMN_FAMILY = "MyCF";
private static final byte [] COLUMN_FAMILY_BYTES = Bytes.toBytes(COLUMN_FAMILY);
private static final EventLoopGroup GROUP = new NioEventLoopGroup();
HRegion region = null;
// Do not run unit tests in parallel (? Why not? It don't work? Why not? St.Ack)
protected static HBaseTestingUtility TEST_UTIL;
public static Configuration CONF ;
private String dir;
private static FileSystem FILESYSTEM;
private final int MAX_VERSIONS = 2;
// Test names
protected TableName tableName;
protected String method;
protected final byte[] qual = Bytes.toBytes("qual");
protected final byte[] qual1 = Bytes.toBytes("qual1");
protected final byte[] qual2 = Bytes.toBytes("qual2");
protected final byte[] qual3 = Bytes.toBytes("qual3");
protected final byte[] value = Bytes.toBytes("value");
protected final byte[] value1 = Bytes.toBytes("value1");
protected final byte[] value2 = Bytes.toBytes("value2");
protected final byte[] row = Bytes.toBytes("rowA");
protected final byte[] row2 = Bytes.toBytes("rowB");
protected final MetricsAssertHelper metricsAssertHelper = CompatibilitySingletonFactory
.getInstance(MetricsAssertHelper.class);
@Before
public void setup() throws IOException {
TEST_UTIL = HBaseTestingUtility.createLocalHTU();
FILESYSTEM = TEST_UTIL.getTestFileSystem();
CONF = TEST_UTIL.getConfiguration();
NettyAsyncFSWALConfigHelper.setEventLoopConfig(CONF, GROUP, NioSocketChannel.class);
dir = TEST_UTIL.getDataTestDir("TestHRegion").toString();
method = name.getMethodName();
tableName = TableName.valueOf(method);
CONF.set(CompactingMemStore.IN_MEMORY_FLUSH_THRESHOLD_FACTOR_KEY, String.valueOf(0.09));
}
@After
public void tearDown() throws IOException {
// Region may have been closed, but it is still no harm if we close it again here using HTU.
HBaseTestingUtility.closeRegionAndWAL(region);
EnvironmentEdgeManagerTestHelper.reset();
LOG.info("Cleaning test directory: " + TEST_UTIL.getDataTestDir());
TEST_UTIL.cleanupTestDir();
}
/**
* Test that I can use the max flushed sequence id after the close.
* @throws IOException
*/
@Test
public void testSequenceId() throws IOException {
region = initHRegion(tableName, method, CONF, COLUMN_FAMILY_BYTES);
assertEquals(HConstants.NO_SEQNUM, region.getMaxFlushedSeqId());
// Weird. This returns 0 if no store files or no edits. Afraid to change it.
assertEquals(0, (long)region.getMaxStoreSeqId().get(COLUMN_FAMILY_BYTES));
HBaseTestingUtility.closeRegionAndWAL(this.region);
assertEquals(HConstants.NO_SEQNUM, region.getMaxFlushedSeqId());
assertEquals(0, (long)region.getMaxStoreSeqId().get(COLUMN_FAMILY_BYTES));
// Open region again.
region = initHRegion(tableName, method, CONF, COLUMN_FAMILY_BYTES);
byte [] value = Bytes.toBytes(method);
// Make a random put against our cf.
Put put = new Put(value);
put.addColumn(COLUMN_FAMILY_BYTES, null, value);
region.put(put);
// No flush yet so init numbers should still be in place.
assertEquals(HConstants.NO_SEQNUM, region.getMaxFlushedSeqId());
assertEquals(0, (long)region.getMaxStoreSeqId().get(COLUMN_FAMILY_BYTES));
region.flush(true);
long max = region.getMaxFlushedSeqId();
HBaseTestingUtility.closeRegionAndWAL(this.region);
assertEquals(max, region.getMaxFlushedSeqId());
this.region = null;
}
/**
* Test for Bug 2 of HBASE-10466.
* "Bug 2: Conditions for the first flush of region close (so-called pre-flush) If memstoreSize
* is smaller than a certain value, or when region close starts a flush is ongoing, the first
* flush is skipped and only the second flush takes place. However, two flushes are required in
* case previous flush fails and leaves some data in snapshot. The bug could cause loss of data
* in current memstore. The fix is removing all conditions except abort check so we ensure 2
* flushes for region close."
* @throws IOException
*/
@Test
public void testCloseCarryingSnapshot() throws IOException {
region = initHRegion(tableName, method, CONF, COLUMN_FAMILY_BYTES);
HStore store = region.getStore(COLUMN_FAMILY_BYTES);
// Get some random bytes.
byte [] value = Bytes.toBytes(method);
// Make a random put against our cf.
Put put = new Put(value);
put.addColumn(COLUMN_FAMILY_BYTES, null, value);
// First put something in current memstore, which will be in snapshot after flusher.prepare()
region.put(put);
StoreFlushContext storeFlushCtx = store.createFlushContext(12345, FlushLifeCycleTracker.DUMMY);
storeFlushCtx.prepare();
// Second put something in current memstore
put.addColumn(COLUMN_FAMILY_BYTES, Bytes.toBytes("abc"), value);
region.put(put);
// Close with something in memstore and something in the snapshot. Make sure all is cleared.
HBaseTestingUtility.closeRegionAndWAL(region);
assertEquals(0, region.getMemStoreDataSize());
region = null;
}
/*
* This test is for verifying memstore snapshot size is correctly updated in case of rollback
* See HBASE-10845
*/
@Test
public void testMemstoreSnapshotSize() throws IOException {
class MyFaultyFSLog extends FaultyFSLog {
StoreFlushContext storeFlushCtx;
public MyFaultyFSLog(FileSystem fs, Path rootDir, String logName, Configuration conf)
throws IOException {
super(fs, rootDir, logName, conf);
}
void setStoreFlushCtx(StoreFlushContext storeFlushCtx) {
this.storeFlushCtx = storeFlushCtx;
}
@Override
public void sync(long txid) throws IOException {
storeFlushCtx.prepare();
super.sync(txid);
}
}
FileSystem fs = FileSystem.get(CONF);
Path rootDir = new Path(dir + "testMemstoreSnapshotSize");
MyFaultyFSLog faultyLog = new MyFaultyFSLog(fs, rootDir, "testMemstoreSnapshotSize", CONF);
region = initHRegion(tableName, null, null, false, Durability.SYNC_WAL, faultyLog,
COLUMN_FAMILY_BYTES);
HStore store = region.getStore(COLUMN_FAMILY_BYTES);
// Get some random bytes.
byte [] value = Bytes.toBytes(method);
faultyLog.setStoreFlushCtx(store.createFlushContext(12345, FlushLifeCycleTracker.DUMMY));
Put put = new Put(value);
put.addColumn(COLUMN_FAMILY_BYTES, Bytes.toBytes("abc"), value);
faultyLog.setFailureType(FaultyFSLog.FailureType.SYNC);
boolean threwIOE = false;
try {
region.put(put);
} catch (IOException ioe) {
threwIOE = true;
} finally {
assertTrue("The regionserver should have thrown an exception", threwIOE);
}
MemStoreSize mss = store.getFlushableSize();
assertTrue("flushable size should be zero, but it is " + mss,
mss.getDataSize() == 0);
}
/**
* Create a WAL outside of the usual helper in
* {@link HBaseTestingUtility#createWal(Configuration, Path, RegionInfo)} because that method
* doesn't play nicely with FaultyFileSystem. Call this method before overriding
* {@code fs.file.impl}.
* @param callingMethod a unique component for the path, probably the name of the test method.
*/
private static WAL createWALCompatibleWithFaultyFileSystem(String callingMethod,
Configuration conf, TableName tableName) throws IOException {
final Path logDir = TEST_UTIL.getDataTestDirOnTestFS(callingMethod + ".log");
final Configuration walConf = new Configuration(conf);
FSUtils.setRootDir(walConf, logDir);
return new WALFactory(walConf, callingMethod)
.getWAL(RegionInfoBuilder.newBuilder(tableName).build());
}
@Test
public void testMemstoreSizeAccountingWithFailedPostBatchMutate() throws IOException {
String testName = "testMemstoreSizeAccountingWithFailedPostBatchMutate";
FileSystem fs = FileSystem.get(CONF);
Path rootDir = new Path(dir + testName);
ChunkCreator.initialize(MemStoreLABImpl.CHUNK_SIZE_DEFAULT, false, 0, 0, 0, null);
FSHLog hLog = new FSHLog(fs, rootDir, testName, CONF);
hLog.init();
HRegion region = initHRegion(tableName, null, null, false, Durability.SYNC_WAL, hLog,
COLUMN_FAMILY_BYTES);
HStore store = region.getStore(COLUMN_FAMILY_BYTES);
assertEquals(0, region.getMemStoreDataSize());
// Put one value
byte [] value = Bytes.toBytes(method);
Put put = new Put(value);
put.addColumn(COLUMN_FAMILY_BYTES, Bytes.toBytes("abc"), value);
region.put(put);
long onePutSize = region.getMemStoreDataSize();
assertTrue(onePutSize > 0);
RegionCoprocessorHost mockedCPHost = Mockito.mock(RegionCoprocessorHost.class);
doThrow(new IOException())
.when(mockedCPHost).postBatchMutate(Mockito.<MiniBatchOperationInProgress<Mutation>>any());
region.setCoprocessorHost(mockedCPHost);
put = new Put(value);
put.addColumn(COLUMN_FAMILY_BYTES, Bytes.toBytes("dfg"), value);
try {
region.put(put);
fail("Should have failed with IOException");
} catch (IOException expected) {
}
long expectedSize = onePutSize * 2;
assertEquals("memstoreSize should be incremented",
expectedSize, region.getMemStoreDataSize());
assertEquals("flushable size should be incremented",
expectedSize, store.getFlushableSize().getDataSize());
region.setCoprocessorHost(null);
}
/**
* A test case of HBASE-21041
* @throws Exception Exception
*/
@Test
public void testFlushAndMemstoreSizeCounting() throws Exception {
byte[] family = Bytes.toBytes("family");
this.region = initHRegion(tableName, method, CONF, family);
final WALFactory wals = new WALFactory(CONF, method);
try {
for (byte[] row : HBaseTestingUtility.ROWS) {
Put put = new Put(row);
put.addColumn(family, family, row);
region.put(put);
}
region.flush(true);
// After flush, data size should be zero
assertEquals(0, region.getMemStoreDataSize());
// After flush, a new active mutable segment is created, so the heap size
// should equal to MutableSegment.DEEP_OVERHEAD
assertEquals(MutableSegment.DEEP_OVERHEAD, region.getMemStoreHeapSize());
// After flush, offheap should be zero
assertEquals(0, region.getMemStoreOffHeapSize());
} finally {
HBaseTestingUtility.closeRegionAndWAL(this.region);
this.region = null;
wals.close();
}
}
/**
* Test we do not lose data if we fail a flush and then close.
* Part of HBase-10466. Tests the following from the issue description:
* "Bug 1: Wrong calculation of HRegion.memstoreSize: When a flush fails, data to be flushed is
* kept in each MemStore's snapshot and wait for next flush attempt to continue on it. But when
* the next flush succeeds, the counter of total memstore size in HRegion is always deduced by
* the sum of current memstore sizes instead of snapshots left from previous failed flush. This
* calculation is problematic that almost every time there is failed flush, HRegion.memstoreSize
* gets reduced by a wrong value. If region flush could not proceed for a couple cycles, the size
* in current memstore could be much larger than the snapshot. It's likely to drift memstoreSize
* much smaller than expected. In extreme case, if the error accumulates to even bigger than
* HRegion's memstore size limit, any further flush is skipped because flush does not do anything
* if memstoreSize is not larger than 0."
* @throws Exception
*/
@Test
public void testFlushSizeAccounting() throws Exception {
final Configuration conf = HBaseConfiguration.create(CONF);
final WAL wal = createWALCompatibleWithFaultyFileSystem(method, conf, tableName);
// Only retry once.
conf.setInt("hbase.hstore.flush.retries.number", 1);
final User user =
User.createUserForTesting(conf, method, new String[]{"foo"});
// Inject our faulty LocalFileSystem
conf.setClass("fs.file.impl", FaultyFileSystem.class, FileSystem.class);
user.runAs(new PrivilegedExceptionAction<Object>() {
@Override
public Object run() throws Exception {
// Make sure it worked (above is sensitive to caching details in hadoop core)
FileSystem fs = FileSystem.get(conf);
Assert.assertEquals(FaultyFileSystem.class, fs.getClass());
FaultyFileSystem ffs = (FaultyFileSystem)fs;
HRegion region = null;
try {
// Initialize region
region = initHRegion(tableName, null, null, false, Durability.SYNC_WAL, wal,
COLUMN_FAMILY_BYTES);
long size = region.getMemStoreDataSize();
Assert.assertEquals(0, size);
// Put one item into memstore. Measure the size of one item in memstore.
Put p1 = new Put(row);
p1.add(new KeyValue(row, COLUMN_FAMILY_BYTES, qual1, 1, (byte[]) null));
region.put(p1);
final long sizeOfOnePut = region.getMemStoreDataSize();
// Fail a flush which means the current memstore will hang out as memstore 'snapshot'.
try {
LOG.info("Flushing");
region.flush(true);
Assert.fail("Didn't bubble up IOE!");
} catch (DroppedSnapshotException dse) {
// What we are expecting
region.closing.set(false); // this is needed for the rest of the test to work
}
// Make it so all writes succeed from here on out
ffs.fault.set(false);
// Check sizes. Should still be the one entry.
Assert.assertEquals(sizeOfOnePut, region.getMemStoreDataSize());
// Now add two entries so that on this next flush that fails, we can see if we
// subtract the right amount, the snapshot size only.
Put p2 = new Put(row);
p2.add(new KeyValue(row, COLUMN_FAMILY_BYTES, qual2, 2, (byte[])null));
p2.add(new KeyValue(row, COLUMN_FAMILY_BYTES, qual3, 3, (byte[])null));
region.put(p2);
long expectedSize = sizeOfOnePut * 3;
Assert.assertEquals(expectedSize, region.getMemStoreDataSize());
// Do a successful flush. It will clear the snapshot only. Thats how flushes work.
// If already a snapshot, we clear it else we move the memstore to be snapshot and flush
// it
region.flush(true);
// Make sure our memory accounting is right.
Assert.assertEquals(sizeOfOnePut * 2, region.getMemStoreDataSize());
} finally {
HBaseTestingUtility.closeRegionAndWAL(region);
}
return null;
}
});
FileSystem.closeAllForUGI(user.getUGI());
}
@Test
public void testCloseWithFailingFlush() throws Exception {
final Configuration conf = HBaseConfiguration.create(CONF);
final WAL wal = createWALCompatibleWithFaultyFileSystem(method, conf, tableName);
// Only retry once.
conf.setInt("hbase.hstore.flush.retries.number", 1);
final User user =
User.createUserForTesting(conf, this.method, new String[]{"foo"});
// Inject our faulty LocalFileSystem
conf.setClass("fs.file.impl", FaultyFileSystem.class, FileSystem.class);
user.runAs(new PrivilegedExceptionAction<Object>() {
@Override
public Object run() throws Exception {
// Make sure it worked (above is sensitive to caching details in hadoop core)
FileSystem fs = FileSystem.get(conf);
Assert.assertEquals(FaultyFileSystem.class, fs.getClass());
FaultyFileSystem ffs = (FaultyFileSystem)fs;
HRegion region = null;
try {
// Initialize region
region = initHRegion(tableName, null, null, false,
Durability.SYNC_WAL, wal, COLUMN_FAMILY_BYTES);
long size = region.getMemStoreDataSize();
Assert.assertEquals(0, size);
// Put one item into memstore. Measure the size of one item in memstore.
Put p1 = new Put(row);
p1.add(new KeyValue(row, COLUMN_FAMILY_BYTES, qual1, 1, (byte[])null));
region.put(p1);
// Manufacture an outstanding snapshot -- fake a failed flush by doing prepare step only.
HStore store = region.getStore(COLUMN_FAMILY_BYTES);
StoreFlushContext storeFlushCtx =
store.createFlushContext(12345, FlushLifeCycleTracker.DUMMY);
storeFlushCtx.prepare();
// Now add two entries to the foreground memstore.
Put p2 = new Put(row);
p2.add(new KeyValue(row, COLUMN_FAMILY_BYTES, qual2, 2, (byte[])null));
p2.add(new KeyValue(row, COLUMN_FAMILY_BYTES, qual3, 3, (byte[])null));
region.put(p2);
// Now try close on top of a failing flush.
HBaseTestingUtility.closeRegionAndWAL(region);
region = null;
fail();
} catch (DroppedSnapshotException dse) {
// Expected
LOG.info("Expected DroppedSnapshotException");
} finally {
// Make it so all writes succeed from here on out so can close clean
ffs.fault.set(false);
HBaseTestingUtility.closeRegionAndWAL(region);
}
return null;
}
});
FileSystem.closeAllForUGI(user.getUGI());
}
@Test
public void testCompactionAffectedByScanners() throws Exception {
byte[] family = Bytes.toBytes("family");
this.region = initHRegion(tableName, method, CONF, family);
Put put = new Put(Bytes.toBytes("r1"));
put.addColumn(family, Bytes.toBytes("q1"), Bytes.toBytes("v1"));
region.put(put);
region.flush(true);
Scan scan = new Scan();
scan.setMaxVersions(3);
// open the first scanner
RegionScanner scanner1 = region.getScanner(scan);
Delete delete = new Delete(Bytes.toBytes("r1"));
region.delete(delete);
region.flush(true);
// open the second scanner
RegionScanner scanner2 = region.getScanner(scan);
List<Cell> results = new ArrayList<>();
System.out.println("Smallest read point:" + region.getSmallestReadPoint());
// make a major compaction
region.compact(true);
// open the third scanner
RegionScanner scanner3 = region.getScanner(scan);
// get data from scanner 1, 2, 3 after major compaction
scanner1.next(results);
System.out.println(results);
assertEquals(1, results.size());
results.clear();
scanner2.next(results);
System.out.println(results);
assertEquals(0, results.size());
results.clear();
scanner3.next(results);
System.out.println(results);
assertEquals(0, results.size());
}
@Test
public void testToShowNPEOnRegionScannerReseek() throws Exception {
byte[] family = Bytes.toBytes("family");
this.region = initHRegion(tableName, method, CONF, family);
Put put = new Put(Bytes.toBytes("r1"));
put.addColumn(family, Bytes.toBytes("q1"), Bytes.toBytes("v1"));
region.put(put);
put = new Put(Bytes.toBytes("r2"));
put.addColumn(family, Bytes.toBytes("q1"), Bytes.toBytes("v1"));
region.put(put);
region.flush(true);
Scan scan = new Scan();
scan.setMaxVersions(3);
// open the first scanner
RegionScanner scanner1 = region.getScanner(scan);
System.out.println("Smallest read point:" + region.getSmallestReadPoint());
region.compact(true);
scanner1.reseek(Bytes.toBytes("r2"));
List<Cell> results = new ArrayList<>();
scanner1.next(results);
Cell keyValue = results.get(0);
Assert.assertTrue(Bytes.compareTo(CellUtil.cloneRow(keyValue), Bytes.toBytes("r2")) == 0);
scanner1.close();
}
@Test
public void testSkipRecoveredEditsReplay() throws Exception {
byte[] family = Bytes.toBytes("family");
this.region = initHRegion(tableName, method, CONF, family);
final WALFactory wals = new WALFactory(CONF, method);
try {
Path regiondir = region.getRegionFileSystem().getRegionDir();
FileSystem fs = region.getRegionFileSystem().getFileSystem();
byte[] regionName = region.getRegionInfo().getEncodedNameAsBytes();
Path recoveredEditsDir = WALSplitUtil.getRegionDirRecoveredEditsDir(regiondir);
long maxSeqId = 1050;
long minSeqId = 1000;
for (long i = minSeqId; i <= maxSeqId; i += 10) {
Path recoveredEdits = new Path(recoveredEditsDir, String.format("%019d", i));
fs.create(recoveredEdits);
WALProvider.Writer writer = wals.createRecoveredEditsWriter(fs, recoveredEdits);
long time = System.nanoTime();
WALEdit edit = new WALEdit();
edit.add(new KeyValue(row, family, Bytes.toBytes(i), time, KeyValue.Type.Put, Bytes
.toBytes(i)));
writer.append(new WAL.Entry(new WALKeyImpl(regionName, tableName, i, time,
HConstants.DEFAULT_CLUSTER_ID), edit));
writer.close();
}
MonitoredTask status = TaskMonitor.get().createStatus(method);
Map<byte[], Long> maxSeqIdInStores = new TreeMap<>(Bytes.BYTES_COMPARATOR);
for (HStore store : region.getStores()) {
maxSeqIdInStores.put(Bytes.toBytes(store.getColumnFamilyName()), minSeqId - 1);
}
long seqId = region.replayRecoveredEditsIfAny(maxSeqIdInStores, null, status);
assertEquals(maxSeqId, seqId);
region.getMVCC().advanceTo(seqId);
Get get = new Get(row);
Result result = region.get(get);
for (long i = minSeqId; i <= maxSeqId; i += 10) {
List<Cell> kvs = result.getColumnCells(family, Bytes.toBytes(i));
assertEquals(1, kvs.size());
assertArrayEquals(Bytes.toBytes(i), CellUtil.cloneValue(kvs.get(0)));
}
} finally {
HBaseTestingUtility.closeRegionAndWAL(this.region);
this.region = null;
wals.close();
}
}
@Test
public void testSkipRecoveredEditsReplaySomeIgnored() throws Exception {
byte[] family = Bytes.toBytes("family");
this.region = initHRegion(tableName, method, CONF, family);
final WALFactory wals = new WALFactory(CONF, method);
try {
Path regiondir = region.getRegionFileSystem().getRegionDir();
FileSystem fs = region.getRegionFileSystem().getFileSystem();
byte[] regionName = region.getRegionInfo().getEncodedNameAsBytes();
Path recoveredEditsDir = WALSplitUtil.getRegionDirRecoveredEditsDir(regiondir);
long maxSeqId = 1050;
long minSeqId = 1000;
for (long i = minSeqId; i <= maxSeqId; i += 10) {
Path recoveredEdits = new Path(recoveredEditsDir, String.format("%019d", i));
fs.create(recoveredEdits);
WALProvider.Writer writer = wals.createRecoveredEditsWriter(fs, recoveredEdits);
long time = System.nanoTime();
WALEdit edit = new WALEdit();
edit.add(new KeyValue(row, family, Bytes.toBytes(i), time, KeyValue.Type.Put, Bytes
.toBytes(i)));
writer.append(new WAL.Entry(new WALKeyImpl(regionName, tableName, i, time,
HConstants.DEFAULT_CLUSTER_ID), edit));
writer.close();
}
long recoverSeqId = 1030;
MonitoredTask status = TaskMonitor.get().createStatus(method);
Map<byte[], Long> maxSeqIdInStores = new TreeMap<>(Bytes.BYTES_COMPARATOR);
for (HStore store : region.getStores()) {
maxSeqIdInStores.put(Bytes.toBytes(store.getColumnFamilyName()), recoverSeqId - 1);
}
long seqId = region.replayRecoveredEditsIfAny(maxSeqIdInStores, null, status);
assertEquals(maxSeqId, seqId);
region.getMVCC().advanceTo(seqId);
Get get = new Get(row);
Result result = region.get(get);
for (long i = minSeqId; i <= maxSeqId; i += 10) {
List<Cell> kvs = result.getColumnCells(family, Bytes.toBytes(i));
if (i < recoverSeqId) {
assertEquals(0, kvs.size());
} else {
assertEquals(1, kvs.size());
assertArrayEquals(Bytes.toBytes(i), CellUtil.cloneValue(kvs.get(0)));
}
}
} finally {
HBaseTestingUtility.closeRegionAndWAL(this.region);
this.region = null;
wals.close();
}
}
@Test
public void testSkipRecoveredEditsReplayAllIgnored() throws Exception {
byte[] family = Bytes.toBytes("family");
this.region = initHRegion(tableName, method, CONF, family);
Path regiondir = region.getRegionFileSystem().getRegionDir();
FileSystem fs = region.getRegionFileSystem().getFileSystem();
Path recoveredEditsDir = WALSplitUtil.getRegionDirRecoveredEditsDir(regiondir);
for (int i = 1000; i < 1050; i += 10) {
Path recoveredEdits = new Path(recoveredEditsDir, String.format("%019d", i));
FSDataOutputStream dos = fs.create(recoveredEdits);
dos.writeInt(i);
dos.close();
}
long minSeqId = 2000;
Path recoveredEdits = new Path(recoveredEditsDir, String.format("%019d", minSeqId - 1));
FSDataOutputStream dos = fs.create(recoveredEdits);
dos.close();
Map<byte[], Long> maxSeqIdInStores = new TreeMap<>(Bytes.BYTES_COMPARATOR);
for (HStore store : region.getStores()) {
maxSeqIdInStores.put(Bytes.toBytes(store.getColumnFamilyName()), minSeqId);
}
long seqId = region.replayRecoveredEditsIfAny(maxSeqIdInStores, null, null);
assertEquals(minSeqId, seqId);
}
@Test
public void testSkipRecoveredEditsReplayTheLastFileIgnored() throws Exception {
byte[] family = Bytes.toBytes("family");
this.region = initHRegion(tableName, method, CONF, family);
final WALFactory wals = new WALFactory(CONF, method);
try {
Path regiondir = region.getRegionFileSystem().getRegionDir();
FileSystem fs = region.getRegionFileSystem().getFileSystem();
byte[] regionName = region.getRegionInfo().getEncodedNameAsBytes();
byte[][] columns = region.getTableDescriptor().getColumnFamilyNames().toArray(new byte[0][]);
assertEquals(0, region.getStoreFileList(columns).size());
Path recoveredEditsDir = WALSplitUtil.getRegionDirRecoveredEditsDir(regiondir);
long maxSeqId = 1050;
long minSeqId = 1000;
for (long i = minSeqId; i <= maxSeqId; i += 10) {
Path recoveredEdits = new Path(recoveredEditsDir, String.format("%019d", i));
fs.create(recoveredEdits);
WALProvider.Writer writer = wals.createRecoveredEditsWriter(fs, recoveredEdits);
long time = System.nanoTime();
WALEdit edit = null;
if (i == maxSeqId) {
edit = WALEdit.createCompaction(region.getRegionInfo(),
CompactionDescriptor.newBuilder()
.setTableName(ByteString.copyFrom(tableName.getName()))
.setFamilyName(ByteString.copyFrom(regionName))
.setEncodedRegionName(ByteString.copyFrom(regionName))
.setStoreHomeDirBytes(ByteString.copyFrom(Bytes.toBytes(regiondir.toString())))
.setRegionName(ByteString.copyFrom(region.getRegionInfo().getRegionName()))
.build());
} else {
edit = new WALEdit();
edit.add(new KeyValue(row, family, Bytes.toBytes(i), time, KeyValue.Type.Put, Bytes
.toBytes(i)));
}
writer.append(new WAL.Entry(new WALKeyImpl(regionName, tableName, i, time,
HConstants.DEFAULT_CLUSTER_ID), edit));
writer.close();
}
long recoverSeqId = 1030;
Map<byte[], Long> maxSeqIdInStores = new TreeMap<>(Bytes.BYTES_COMPARATOR);
MonitoredTask status = TaskMonitor.get().createStatus(method);
for (HStore store : region.getStores()) {
maxSeqIdInStores.put(Bytes.toBytes(store.getColumnFamilyName()), recoverSeqId - 1);
}
long seqId = region.replayRecoveredEditsIfAny(maxSeqIdInStores, null, status);
assertEquals(maxSeqId, seqId);
// assert that the files are flushed
assertEquals(1, region.getStoreFileList(columns).size());
} finally {
HBaseTestingUtility.closeRegionAndWAL(this.region);
this.region = null;
wals.close();
}
}
@Test
public void testRecoveredEditsReplayCompaction() throws Exception {
testRecoveredEditsReplayCompaction(false);
testRecoveredEditsReplayCompaction(true);
}
public void testRecoveredEditsReplayCompaction(boolean mismatchedRegionName) throws Exception {
CONF.setClass(HConstants.REGION_IMPL, HRegionForTesting.class, Region.class);
byte[] family = Bytes.toBytes("family");
this.region = initHRegion(tableName, method, CONF, family);
final WALFactory wals = new WALFactory(CONF, method);
try {
Path regiondir = region.getRegionFileSystem().getRegionDir();
FileSystem fs = region.getRegionFileSystem().getFileSystem();
byte[] regionName = region.getRegionInfo().getEncodedNameAsBytes();
long maxSeqId = 3;
long minSeqId = 0;
for (long i = minSeqId; i < maxSeqId; i++) {
Put put = new Put(Bytes.toBytes(i));
put.addColumn(family, Bytes.toBytes(i), Bytes.toBytes(i));
region.put(put);
region.flush(true);
}
// this will create a region with 3 files
assertEquals(3, region.getStore(family).getStorefilesCount());
List<Path> storeFiles = new ArrayList<>(3);
for (HStoreFile sf : region.getStore(family).getStorefiles()) {
storeFiles.add(sf.getPath());
}
// disable compaction completion
CONF.setBoolean("hbase.hstore.compaction.complete", false);
region.compactStores();
// ensure that nothing changed
assertEquals(3, region.getStore(family).getStorefilesCount());
// now find the compacted file, and manually add it to the recovered edits
Path tmpDir = new Path(region.getRegionFileSystem().getTempDir(), Bytes.toString(family));
FileStatus[] files = FSUtils.listStatus(fs, tmpDir);
String errorMsg = "Expected to find 1 file in the region temp directory "
+ "from the compaction, could not find any";
assertNotNull(errorMsg, files);
assertEquals(errorMsg, 1, files.length);
// move the file inside region dir
Path newFile = region.getRegionFileSystem().commitStoreFile(Bytes.toString(family),
files[0].getPath());
byte[] encodedNameAsBytes = this.region.getRegionInfo().getEncodedNameAsBytes();
byte[] fakeEncodedNameAsBytes = new byte [encodedNameAsBytes.length];
for (int i=0; i < encodedNameAsBytes.length; i++) {
// Mix the byte array to have a new encodedName
fakeEncodedNameAsBytes[i] = (byte) (encodedNameAsBytes[i] + 1);
}
CompactionDescriptor compactionDescriptor = ProtobufUtil.toCompactionDescriptor(this.region
.getRegionInfo(), mismatchedRegionName ? fakeEncodedNameAsBytes : null, family,
storeFiles, Lists.newArrayList(newFile),
region.getRegionFileSystem().getStoreDir(Bytes.toString(family)));
WALUtil.writeCompactionMarker(region.getWAL(), this.region.getReplicationScope(),
this.region.getRegionInfo(), compactionDescriptor, region.getMVCC());
Path recoveredEditsDir = WALSplitUtil.getRegionDirRecoveredEditsDir(regiondir);
Path recoveredEdits = new Path(recoveredEditsDir, String.format("%019d", 1000));
fs.create(recoveredEdits);
WALProvider.Writer writer = wals.createRecoveredEditsWriter(fs, recoveredEdits);
long time = System.nanoTime();
writer.append(new WAL.Entry(new WALKeyImpl(regionName, tableName, 10, time,
HConstants.DEFAULT_CLUSTER_ID), WALEdit.createCompaction(region.getRegionInfo(),
compactionDescriptor)));
writer.close();
// close the region now, and reopen again
region.getTableDescriptor();
region.getRegionInfo();
HBaseTestingUtility.closeRegionAndWAL(this.region);
try {
region = HRegion.openHRegion(region, null);
} catch (WrongRegionException wre) {
fail("Matching encoded region name should not have produced WrongRegionException");
}
// now check whether we have only one store file, the compacted one
Collection<HStoreFile> sfs = region.getStore(family).getStorefiles();
for (HStoreFile sf : sfs) {
LOG.info(Objects.toString(sf.getPath()));
}
if (!mismatchedRegionName) {
assertEquals(1, region.getStore(family).getStorefilesCount());
}
files = FSUtils.listStatus(fs, tmpDir);
assertTrue("Expected to find 0 files inside " + tmpDir, files == null || files.length == 0);
for (long i = minSeqId; i < maxSeqId; i++) {
Get get = new Get(Bytes.toBytes(i));
Result result = region.get(get);
byte[] value = result.getValue(family, Bytes.toBytes(i));
assertArrayEquals(Bytes.toBytes(i), value);
}
} finally {
HBaseTestingUtility.closeRegionAndWAL(this.region);
this.region = null;
wals.close();
CONF.setClass(HConstants.REGION_IMPL, HRegion.class, Region.class);
}
}
@Test
public void testFlushMarkers() throws Exception {
// tests that flush markers are written to WAL and handled at recovered edits
byte[] family = Bytes.toBytes("family");
Path logDir = TEST_UTIL.getDataTestDirOnTestFS(method + ".log");
final Configuration walConf = new Configuration(TEST_UTIL.getConfiguration());
FSUtils.setRootDir(walConf, logDir);
final WALFactory wals = new WALFactory(walConf, method);
final WAL wal = wals.getWAL(RegionInfoBuilder.newBuilder(tableName).build());
this.region = initHRegion(tableName, HConstants.EMPTY_START_ROW,
HConstants.EMPTY_END_ROW, false, Durability.USE_DEFAULT, wal, family);
try {
Path regiondir = region.getRegionFileSystem().getRegionDir();
FileSystem fs = region.getRegionFileSystem().getFileSystem();
byte[] regionName = region.getRegionInfo().getEncodedNameAsBytes();
long maxSeqId = 3;
long minSeqId = 0;
for (long i = minSeqId; i < maxSeqId; i++) {
Put put = new Put(Bytes.toBytes(i));
put.addColumn(family, Bytes.toBytes(i), Bytes.toBytes(i));
region.put(put);
region.flush(true);
}
// this will create a region with 3 files from flush
assertEquals(3, region.getStore(family).getStorefilesCount());
List<String> storeFiles = new ArrayList<>(3);
for (HStoreFile sf : region.getStore(family).getStorefiles()) {
storeFiles.add(sf.getPath().getName());
}
// now verify that the flush markers are written
wal.shutdown();
WAL.Reader reader = WALFactory.createReader(fs, AbstractFSWALProvider.getCurrentFileName(wal),
TEST_UTIL.getConfiguration());
try {
List<WAL.Entry> flushDescriptors = new ArrayList<>();
long lastFlushSeqId = -1;
while (true) {
WAL.Entry entry = reader.next();
if (entry == null) {
break;
}
Cell cell = entry.getEdit().getCells().get(0);
if (WALEdit.isMetaEditFamily(cell)) {
FlushDescriptor flushDesc = WALEdit.getFlushDescriptor(cell);
assertNotNull(flushDesc);
assertArrayEquals(tableName.getName(), flushDesc.getTableName().toByteArray());
if (flushDesc.getAction() == FlushAction.START_FLUSH) {
assertTrue(flushDesc.getFlushSequenceNumber() > lastFlushSeqId);
} else if (flushDesc.getAction() == FlushAction.COMMIT_FLUSH) {
assertTrue(flushDesc.getFlushSequenceNumber() == lastFlushSeqId);
}
lastFlushSeqId = flushDesc.getFlushSequenceNumber();
assertArrayEquals(regionName, flushDesc.getEncodedRegionName().toByteArray());
assertEquals(1, flushDesc.getStoreFlushesCount()); //only one store
StoreFlushDescriptor storeFlushDesc = flushDesc.getStoreFlushes(0);
assertArrayEquals(family, storeFlushDesc.getFamilyName().toByteArray());
assertEquals("family", storeFlushDesc.getStoreHomeDir());
if (flushDesc.getAction() == FlushAction.START_FLUSH) {
assertEquals(0, storeFlushDesc.getFlushOutputCount());
} else {
assertEquals(1, storeFlushDesc.getFlushOutputCount()); //only one file from flush
assertTrue(storeFiles.contains(storeFlushDesc.getFlushOutput(0)));
}
flushDescriptors.add(entry);
}
}
assertEquals(3 * 2, flushDescriptors.size()); // START_FLUSH and COMMIT_FLUSH per flush
// now write those markers to the recovered edits again.
Path recoveredEditsDir = WALSplitUtil.getRegionDirRecoveredEditsDir(regiondir);
Path recoveredEdits = new Path(recoveredEditsDir, String.format("%019d", 1000));
fs.create(recoveredEdits);
WALProvider.Writer writer = wals.createRecoveredEditsWriter(fs, recoveredEdits);
for (WAL.Entry entry : flushDescriptors) {
writer.append(entry);
}
writer.close();
} finally {
if (null != reader) {
try {
reader.close();
} catch (IOException exception) {
LOG.warn("Problem closing wal: " + exception.getMessage());
LOG.debug("exception details", exception);
}
}
}
// close the region now, and reopen again
HBaseTestingUtility.closeRegionAndWAL(this.region);
region = HRegion.openHRegion(region, null);
// now check whether we have can read back the data from region
for (long i = minSeqId; i < maxSeqId; i++) {
Get get = new Get(Bytes.toBytes(i));
Result result = region.get(get);
byte[] value = result.getValue(family, Bytes.toBytes(i));
assertArrayEquals(Bytes.toBytes(i), value);
}
} finally {
HBaseTestingUtility.closeRegionAndWAL(this.region);
this.region = null;
wals.close();
}
}
static class IsFlushWALMarker implements ArgumentMatcher<WALEdit> {
volatile FlushAction[] actions;
public IsFlushWALMarker(FlushAction... actions) {
this.actions = actions;
}
@Override
public boolean matches(WALEdit edit) {
List<Cell> cells = edit.getCells();
if (cells.isEmpty()) {
return false;
}
if (WALEdit.isMetaEditFamily(cells.get(0))) {
FlushDescriptor desc;
try {
desc = WALEdit.getFlushDescriptor(cells.get(0));
} catch (IOException e) {
LOG.warn(e.toString(), e);
return false;
}
if (desc != null) {
for (FlushAction action : actions) {
if (desc.getAction() == action) {
return true;
}
}
}
}
return false;
}
public IsFlushWALMarker set(FlushAction... actions) {
this.actions = actions;
return this;
}
}
@Test
public void testFlushMarkersWALFail() throws Exception {
// test the cases where the WAL append for flush markers fail.
byte[] family = Bytes.toBytes("family");
// spy an actual WAL implementation to throw exception (was not able to mock)
Path logDir = TEST_UTIL.getDataTestDirOnTestFS(method + "log");
final Configuration walConf = new Configuration(TEST_UTIL.getConfiguration());
FSUtils.setRootDir(walConf, logDir);
// Make up a WAL that we can manipulate at append time.
class FailAppendFlushMarkerWAL extends FSHLog {
volatile FlushAction [] flushActions = null;
public FailAppendFlushMarkerWAL(FileSystem fs, Path root, String logDir, Configuration conf)
throws IOException {
super(fs, root, logDir, conf);
}
@Override
protected Writer createWriterInstance(Path path) throws IOException {
final Writer w = super.createWriterInstance(path);
return new Writer() {
@Override
public void close() throws IOException {
w.close();
}
@Override
public void sync(boolean forceSync) throws IOException {
w.sync(forceSync);
}
@Override
public void append(Entry entry) throws IOException {
List<Cell> cells = entry.getEdit().getCells();
if (WALEdit.isMetaEditFamily(cells.get(0))) {
FlushDescriptor desc = WALEdit.getFlushDescriptor(cells.get(0));
if (desc != null) {
for (FlushAction flushAction: flushActions) {
if (desc.getAction().equals(flushAction)) {
throw new IOException("Failed to append flush marker! " + flushAction);
}
}
}
}
w.append(entry);
}
@Override
public long getLength() {
return w.getLength();
}
};
}
}
FailAppendFlushMarkerWAL wal =
new FailAppendFlushMarkerWAL(FileSystem.get(walConf), FSUtils.getRootDir(walConf),
method, walConf);
wal.init();
this.region = initHRegion(tableName, HConstants.EMPTY_START_ROW,
HConstants.EMPTY_END_ROW, false, Durability.USE_DEFAULT, wal, family);
int i = 0;
Put put = new Put(Bytes.toBytes(i));
put.setDurability(Durability.SKIP_WAL); // have to skip mocked wal
put.addColumn(family, Bytes.toBytes(i), Bytes.toBytes(i));
region.put(put);
// 1. Test case where START_FLUSH throws exception
wal.flushActions = new FlushAction [] {FlushAction.START_FLUSH};
// start cache flush will throw exception
try {
region.flush(true);
fail("This should have thrown exception");
} catch (DroppedSnapshotException unexpected) {
// this should not be a dropped snapshot exception. Meaning that RS will not abort
throw unexpected;
} catch (IOException expected) {
// expected
}
// The WAL is hosed now. It has two edits appended. We cannot roll the log without it
// throwing a DroppedSnapshotException to force an abort. Just clean up the mess.
region.close(true);
wal.close();
// 2. Test case where START_FLUSH succeeds but COMMIT_FLUSH will throw exception
wal.flushActions = new FlushAction [] {FlushAction.COMMIT_FLUSH};
wal = new FailAppendFlushMarkerWAL(FileSystem.get(walConf), FSUtils.getRootDir(walConf),
method, walConf);
wal.init();
this.region = initHRegion(tableName, HConstants.EMPTY_START_ROW,
HConstants.EMPTY_END_ROW, false, Durability.USE_DEFAULT, wal, family);
region.put(put);
// 3. Test case where ABORT_FLUSH will throw exception.
// Even if ABORT_FLUSH throws exception, we should not fail with IOE, but continue with
// DroppedSnapshotException. Below COMMMIT_FLUSH will cause flush to abort
wal.flushActions = new FlushAction [] {FlushAction.COMMIT_FLUSH, FlushAction.ABORT_FLUSH};
try {
region.flush(true);
fail("This should have thrown exception");
} catch (DroppedSnapshotException expected) {
// we expect this exception, since we were able to write the snapshot, but failed to
// write the flush marker to WAL
} catch (IOException unexpected) {
throw unexpected;
}
}
@Test
public void testGetWhileRegionClose() throws IOException {
Configuration hc = initSplit();
int numRows = 100;
byte[][] families = { fam1, fam2, fam3 };
// Setting up region
this.region = initHRegion(tableName, method, hc, families);
// Put data in region
final int startRow = 100;
putData(startRow, numRows, qual1, families);
putData(startRow, numRows, qual2, families);
putData(startRow, numRows, qual3, families);
final AtomicBoolean done = new AtomicBoolean(false);
final AtomicInteger gets = new AtomicInteger(0);
GetTillDoneOrException[] threads = new GetTillDoneOrException[10];
try {
// Set ten threads running concurrently getting from the region.
for (int i = 0; i < threads.length / 2; i++) {
threads[i] = new GetTillDoneOrException(i, Bytes.toBytes("" + startRow), done, gets);
threads[i].setDaemon(true);
threads[i].start();
}
// Artificially make the condition by setting closing flag explicitly.
// I can't make the issue happen with a call to region.close().
this.region.closing.set(true);
for (int i = threads.length / 2; i < threads.length; i++) {
threads[i] = new GetTillDoneOrException(i, Bytes.toBytes("" + startRow), done, gets);
threads[i].setDaemon(true);
threads[i].start();
}
} finally {
if (this.region != null) {
HBaseTestingUtility.closeRegionAndWAL(this.region);
this.region = null;
}
}
done.set(true);
for (GetTillDoneOrException t : threads) {
try {
t.join();
} catch (InterruptedException e) {
e.printStackTrace();
}
if (t.e != null) {
LOG.info("Exception=" + t.e);
assertFalse("Found a NPE in " + t.getName(), t.e instanceof NullPointerException);
}
}
}
/*
* Thread that does get on single row until 'done' flag is flipped. If an
* exception causes us to fail, it records it.
*/
class GetTillDoneOrException extends Thread {
private final Get g;
private final AtomicBoolean done;
private final AtomicInteger count;
private Exception e;
GetTillDoneOrException(final int i, final byte[] r, final AtomicBoolean d,
final AtomicInteger c) {
super("getter." + i);
this.g = new Get(r);
this.done = d;
this.count = c;
}
@Override
public void run() {
while (!this.done.get()) {
try {
assertTrue(region.get(g).size() > 0);
this.count.incrementAndGet();
} catch (Exception e) {
this.e = e;
break;
}
}
}
}
/*
* An involved filter test. Has multiple column families and deletes in mix.
*/
@Test
public void testWeirdCacheBehaviour() throws Exception {
final TableName tableName = TableName.valueOf(name.getMethodName());
byte[][] FAMILIES = new byte[][] { Bytes.toBytes("trans-blob"), Bytes.toBytes("trans-type"),
Bytes.toBytes("trans-date"), Bytes.toBytes("trans-tags"), Bytes.toBytes("trans-group") };
this.region = initHRegion(tableName, method, CONF, FAMILIES);
String value = "this is the value";
String value2 = "this is some other value";
String keyPrefix1 = "prefix1";
String keyPrefix2 = "prefix2";
String keyPrefix3 = "prefix3";
putRows(this.region, 3, value, keyPrefix1);
putRows(this.region, 3, value, keyPrefix2);
putRows(this.region, 3, value, keyPrefix3);
putRows(this.region, 3, value2, keyPrefix1);
putRows(this.region, 3, value2, keyPrefix2);
putRows(this.region, 3, value2, keyPrefix3);
System.out.println("Checking values for key: " + keyPrefix1);
assertEquals("Got back incorrect number of rows from scan", 3,
getNumberOfRows(keyPrefix1, value2, this.region));
System.out.println("Checking values for key: " + keyPrefix2);
assertEquals("Got back incorrect number of rows from scan", 3,
getNumberOfRows(keyPrefix2, value2, this.region));
System.out.println("Checking values for key: " + keyPrefix3);
assertEquals("Got back incorrect number of rows from scan", 3,
getNumberOfRows(keyPrefix3, value2, this.region));
deleteColumns(this.region, value2, keyPrefix1);
deleteColumns(this.region, value2, keyPrefix2);
deleteColumns(this.region, value2, keyPrefix3);
System.out.println("Starting important checks.....");
assertEquals("Got back incorrect number of rows from scan: " + keyPrefix1, 0,
getNumberOfRows(keyPrefix1, value2, this.region));
assertEquals("Got back incorrect number of rows from scan: " + keyPrefix2, 0,
getNumberOfRows(keyPrefix2, value2, this.region));
assertEquals("Got back incorrect number of rows from scan: " + keyPrefix3, 0,
getNumberOfRows(keyPrefix3, value2, this.region));
}
@Test
public void testAppendWithReadOnlyTable() throws Exception {
final TableName tableName = TableName.valueOf(name.getMethodName());
this.region = initHRegion(tableName, method, CONF, true, Bytes.toBytes("somefamily"));
boolean exceptionCaught = false;
Append append = new Append(Bytes.toBytes("somerow"));
append.setDurability(Durability.SKIP_WAL);
append.addColumn(Bytes.toBytes("somefamily"), Bytes.toBytes("somequalifier"),
Bytes.toBytes("somevalue"));
try {
region.append(append);
} catch (IOException e) {
exceptionCaught = true;
}
assertTrue(exceptionCaught == true);
}
@Test
public void testIncrWithReadOnlyTable() throws Exception {
final TableName tableName = TableName.valueOf(name.getMethodName());
this.region = initHRegion(tableName, method, CONF, true, Bytes.toBytes("somefamily"));
boolean exceptionCaught = false;
Increment inc = new Increment(Bytes.toBytes("somerow"));
inc.setDurability(Durability.SKIP_WAL);
inc.addColumn(Bytes.toBytes("somefamily"), Bytes.toBytes("somequalifier"), 1L);
try {
region.increment(inc);
} catch (IOException e) {
exceptionCaught = true;
}
assertTrue(exceptionCaught == true);
}
private void deleteColumns(HRegion r, String value, String keyPrefix) throws IOException {
InternalScanner scanner = buildScanner(keyPrefix, value, r);
int count = 0;
boolean more = false;
List<Cell> results = new ArrayList<>();
do {
more = scanner.next(results);
if (results != null && !results.isEmpty())
count++;
else
break;
Delete delete = new Delete(CellUtil.cloneRow(results.get(0)));
delete.addColumn(Bytes.toBytes("trans-tags"), Bytes.toBytes("qual2"));
r.delete(delete);
results.clear();
} while (more);
assertEquals("Did not perform correct number of deletes", 3, count);
}
private int getNumberOfRows(String keyPrefix, String value, HRegion r) throws Exception {
InternalScanner resultScanner = buildScanner(keyPrefix, value, r);
int numberOfResults = 0;
List<Cell> results = new ArrayList<>();
boolean more = false;
do {
more = resultScanner.next(results);
if (results != null && !results.isEmpty())
numberOfResults++;
else
break;
for (Cell kv : results) {
System.out.println("kv=" + kv.toString() + ", " + Bytes.toString(CellUtil.cloneValue(kv)));
}
results.clear();
} while (more);
return numberOfResults;
}
private InternalScanner buildScanner(String keyPrefix, String value, HRegion r)
throws IOException {
// Defaults FilterList.Operator.MUST_PASS_ALL.
FilterList allFilters = new FilterList();
allFilters.addFilter(new PrefixFilter(Bytes.toBytes(keyPrefix)));
// Only return rows where this column value exists in the row.
SingleColumnValueFilter filter = new SingleColumnValueFilter(Bytes.toBytes("trans-tags"),
Bytes.toBytes("qual2"), CompareOp.EQUAL, Bytes.toBytes(value));
filter.setFilterIfMissing(true);
allFilters.addFilter(filter);
Scan scan = new Scan();
scan.addFamily(Bytes.toBytes("trans-blob"));
scan.addFamily(Bytes.toBytes("trans-type"));
scan.addFamily(Bytes.toBytes("trans-date"));
scan.addFamily(Bytes.toBytes("trans-tags"));
scan.addFamily(Bytes.toBytes("trans-group"));
scan.setFilter(allFilters);
return r.getScanner(scan);
}
private void putRows(HRegion r, int numRows, String value, String key) throws IOException {
for (int i = 0; i < numRows; i++) {
String row = key + "_" + i/* UUID.randomUUID().toString() */;
System.out.println(String.format("Saving row: %s, with value %s", row, value));
Put put = new Put(Bytes.toBytes(row));
put.setDurability(Durability.SKIP_WAL);
put.addColumn(Bytes.toBytes("trans-blob"), null, Bytes.toBytes("value for blob"));
put.addColumn(Bytes.toBytes("trans-type"), null, Bytes.toBytes("statement"));
put.addColumn(Bytes.toBytes("trans-date"), null, Bytes.toBytes("20090921010101999"));
put.addColumn(Bytes.toBytes("trans-tags"), Bytes.toBytes("qual2"), Bytes.toBytes(value));
put.addColumn(Bytes.toBytes("trans-group"), null, Bytes.toBytes("adhocTransactionGroupId"));
r.put(put);
}
}
@Test
public void testFamilyWithAndWithoutColon() throws Exception {
byte[] cf = Bytes.toBytes(COLUMN_FAMILY);
this.region = initHRegion(tableName, method, CONF, cf);
Put p = new Put(tableName.toBytes());
byte[] cfwithcolon = Bytes.toBytes(COLUMN_FAMILY + ":");
p.addColumn(cfwithcolon, cfwithcolon, cfwithcolon);
boolean exception = false;
try {
this.region.put(p);
} catch (NoSuchColumnFamilyException e) {
exception = true;
}
assertTrue(exception);
}
@Test
public void testBatchPut_whileNoRowLocksHeld() throws IOException {
final Put[] puts = new Put[10];
MetricsWALSource source = CompatibilitySingletonFactory.getInstance(MetricsWALSource.class);
long syncs = prepareRegionForBachPut(puts, source, false);
OperationStatus[] codes = this.region.batchMutate(puts);
assertEquals(10, codes.length);
for (int i = 0; i < 10; i++) {
assertEquals(OperationStatusCode.SUCCESS, codes[i].getOperationStatusCode());
}
metricsAssertHelper.assertCounter("syncTimeNumOps", syncs + 1, source);
LOG.info("Next a batch put with one invalid family");
puts[5].addColumn(Bytes.toBytes("BAD_CF"), qual, value);
codes = this.region.batchMutate(puts);
assertEquals(10, codes.length);
for (int i = 0; i < 10; i++) {
assertEquals((i == 5) ? OperationStatusCode.BAD_FAMILY : OperationStatusCode.SUCCESS,
codes[i].getOperationStatusCode());
}
metricsAssertHelper.assertCounter("syncTimeNumOps", syncs + 2, source);
}
@Test
public void testBatchPut_whileMultipleRowLocksHeld() throws Exception {
final Put[] puts = new Put[10];
MetricsWALSource source = CompatibilitySingletonFactory.getInstance(MetricsWALSource.class);
long syncs = prepareRegionForBachPut(puts, source, false);
puts[5].addColumn(Bytes.toBytes("BAD_CF"), qual, value);
LOG.info("batchPut will have to break into four batches to avoid row locks");
RowLock rowLock1 = region.getRowLock(Bytes.toBytes("row_2"));
RowLock rowLock2 = region.getRowLock(Bytes.toBytes("row_1"));
RowLock rowLock3 = region.getRowLock(Bytes.toBytes("row_3"));
RowLock rowLock4 = region.getRowLock(Bytes.toBytes("row_3"), true);
MultithreadedTestUtil.TestContext ctx = new MultithreadedTestUtil.TestContext(CONF);
final AtomicReference<OperationStatus[]> retFromThread = new AtomicReference<>();
final CountDownLatch startingPuts = new CountDownLatch(1);
final CountDownLatch startingClose = new CountDownLatch(1);
TestThread putter = new TestThread(ctx) {
@Override
public void doWork() throws IOException {
startingPuts.countDown();
retFromThread.set(region.batchMutate(puts));
}
};
LOG.info("...starting put thread while holding locks");
ctx.addThread(putter);
ctx.startThreads();
// Now attempt to close the region from another thread. Prior to HBASE-12565
// this would cause the in-progress batchMutate operation to to fail with
// exception because it use to release and re-acquire the close-guard lock
// between batches. Caller then didn't get status indicating which writes succeeded.
// We now expect this thread to block until the batchMutate call finishes.
Thread regionCloseThread = new TestThread(ctx) {
@Override
public void doWork() {
try {
startingPuts.await();
// Give some time for the batch mutate to get in.
// We don't want to race with the mutate
Thread.sleep(10);
startingClose.countDown();
HBaseTestingUtility.closeRegionAndWAL(region);
region = null;
} catch (IOException e) {
throw new RuntimeException(e);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
};
regionCloseThread.start();
startingClose.await();
startingPuts.await();
Thread.sleep(100);
LOG.info("...releasing row lock 1, which should let put thread continue");
rowLock1.release();
rowLock2.release();
rowLock3.release();
waitForCounter(source, "syncTimeNumOps", syncs + 1);
LOG.info("...joining on put thread");
ctx.stop();
regionCloseThread.join();
OperationStatus[] codes = retFromThread.get();
for (int i = 0; i < codes.length; i++) {
assertEquals((i == 5) ? OperationStatusCode.BAD_FAMILY : OperationStatusCode.SUCCESS,
codes[i].getOperationStatusCode());
}
rowLock4.release();
}
private void waitForCounter(MetricsWALSource source, String metricName, long expectedCount)
throws InterruptedException {
long startWait = System.currentTimeMillis();
long currentCount;
while ((currentCount = metricsAssertHelper.getCounter(metricName, source)) < expectedCount) {
Thread.sleep(100);
if (System.currentTimeMillis() - startWait > 10000) {
fail(String.format("Timed out waiting for '%s' >= '%s', currentCount=%s", metricName,
expectedCount, currentCount));
}
}
}
@Test
public void testAtomicBatchPut() throws IOException {
final Put[] puts = new Put[10];
MetricsWALSource source = CompatibilitySingletonFactory.getInstance(MetricsWALSource.class);
long syncs = prepareRegionForBachPut(puts, source, false);
// 1. Straight forward case, should succeed
MutationBatchOperation batchOp = new MutationBatchOperation(region, puts, true,
HConstants.NO_NONCE, HConstants.NO_NONCE);
OperationStatus[] codes = this.region.batchMutate(batchOp);
assertEquals(10, codes.length);
for (int i = 0; i < 10; i++) {
assertEquals(OperationStatusCode.SUCCESS, codes[i].getOperationStatusCode());
}
metricsAssertHelper.assertCounter("syncTimeNumOps", syncs + 1, source);
// 2. Failed to get lock
RowLock lock = region.getRowLock(Bytes.toBytes("row_" + 3));
// Method {@link HRegion#getRowLock(byte[])} is reentrant. As 'row_3' is locked in this
// thread, need to run {@link HRegion#batchMutate(HRegion.BatchOperation)} in different thread
MultithreadedTestUtil.TestContext ctx = new MultithreadedTestUtil.TestContext(CONF);
final AtomicReference<IOException> retFromThread = new AtomicReference<>();
final CountDownLatch finishedPuts = new CountDownLatch(1);
final MutationBatchOperation finalBatchOp = new MutationBatchOperation(region, puts, true,
HConstants
.NO_NONCE,
HConstants.NO_NONCE);
TestThread putter = new TestThread(ctx) {
@Override
public void doWork() throws IOException {
try {
region.batchMutate(finalBatchOp);
} catch (IOException ioe) {
LOG.error("test failed!", ioe);
retFromThread.set(ioe);
}
finishedPuts.countDown();
}
};
LOG.info("...starting put thread while holding locks");
ctx.addThread(putter);
ctx.startThreads();
LOG.info("...waiting for batch puts while holding locks");
try {
finishedPuts.await();
} catch (InterruptedException e) {
LOG.error("Interrupted!", e);
} finally {
if (lock != null) {
lock.release();
}
}
assertNotNull(retFromThread.get());
metricsAssertHelper.assertCounter("syncTimeNumOps", syncs + 1, source);
// 3. Exception thrown in validation
LOG.info("Next a batch put with one invalid family");
puts[5].addColumn(Bytes.toBytes("BAD_CF"), qual, value);
batchOp = new MutationBatchOperation(region, puts, true, HConstants.NO_NONCE,
HConstants.NO_NONCE);
thrown.expect(NoSuchColumnFamilyException.class);
this.region.batchMutate(batchOp);
}
@Test
public void testBatchPutWithTsSlop() throws Exception {
// add data with a timestamp that is too recent for range. Ensure assert
CONF.setInt("hbase.hregion.keyvalue.timestamp.slop.millisecs", 1000);
final Put[] puts = new Put[10];
MetricsWALSource source = CompatibilitySingletonFactory.getInstance(MetricsWALSource.class);
long syncs = prepareRegionForBachPut(puts, source, true);
OperationStatus[] codes = this.region.batchMutate(puts);
assertEquals(10, codes.length);
for (int i = 0; i < 10; i++) {
assertEquals(OperationStatusCode.SANITY_CHECK_FAILURE, codes[i].getOperationStatusCode());
}
metricsAssertHelper.assertCounter("syncTimeNumOps", syncs, source);
}
/**
* @return syncs initial syncTimeNumOps
*/
private long prepareRegionForBachPut(final Put[] puts, final MetricsWALSource source,
boolean slop) throws IOException {
this.region = initHRegion(tableName, method, CONF, COLUMN_FAMILY_BYTES);
LOG.info("First a batch put with all valid puts");
for (int i = 0; i < puts.length; i++) {
puts[i] = slop ? new Put(Bytes.toBytes("row_" + i), Long.MAX_VALUE - 100) :
new Put(Bytes.toBytes("row_" + i));
puts[i].addColumn(COLUMN_FAMILY_BYTES, qual, value);
}
long syncs = metricsAssertHelper.getCounter("syncTimeNumOps", source);
metricsAssertHelper.assertCounter("syncTimeNumOps", syncs, source);
return syncs;
}
// ////////////////////////////////////////////////////////////////////////////
// checkAndMutate tests
// ////////////////////////////////////////////////////////////////////////////
@Test
public void testCheckAndMutate_WithEmptyRowValue() throws IOException {
byte[] row1 = Bytes.toBytes("row1");
byte[] fam1 = Bytes.toBytes("fam1");
byte[] qf1 = Bytes.toBytes("qualifier");
byte[] emptyVal = new byte[] {};
byte[] val1 = Bytes.toBytes("value1");
byte[] val2 = Bytes.toBytes("value2");
// Setting up region
this.region = initHRegion(tableName, method, CONF, fam1);
// Putting empty data in key
Put put = new Put(row1);
put.addColumn(fam1, qf1, emptyVal);
// checkAndPut with empty value
boolean res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL,
new BinaryComparator(emptyVal), put);
assertTrue(res);
// Putting data in key
put = new Put(row1);
put.addColumn(fam1, qf1, val1);
// checkAndPut with correct value
res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL,
new BinaryComparator(emptyVal), put);
assertTrue(res);
// not empty anymore
res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL,
new BinaryComparator(emptyVal), put);
assertFalse(res);
Delete delete = new Delete(row1);
delete.addColumn(fam1, qf1);
res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL,
new BinaryComparator(emptyVal), delete);
assertFalse(res);
put = new Put(row1);
put.addColumn(fam1, qf1, val2);
// checkAndPut with correct value
res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL,
new BinaryComparator(val1), put);
assertTrue(res);
// checkAndDelete with correct value
delete = new Delete(row1);
delete.addColumn(fam1, qf1);
delete.addColumn(fam1, qf1);
res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL,
new BinaryComparator(val2), delete);
assertTrue(res);
delete = new Delete(row1);
res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL,
new BinaryComparator(emptyVal), delete);
assertTrue(res);
// checkAndPut looking for a null value
put = new Put(row1);
put.addColumn(fam1, qf1, val1);
res = region
.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL, new NullComparator(), put);
assertTrue(res);
}
@Test
public void testCheckAndMutate_WithWrongValue() throws IOException {
byte[] row1 = Bytes.toBytes("row1");
byte[] fam1 = Bytes.toBytes("fam1");
byte[] qf1 = Bytes.toBytes("qualifier");
byte[] val1 = Bytes.toBytes("value1");
byte[] val2 = Bytes.toBytes("value2");
BigDecimal bd1 = new BigDecimal(Double.MAX_VALUE);
BigDecimal bd2 = new BigDecimal(Double.MIN_VALUE);
// Setting up region
this.region = initHRegion(tableName, method, CONF, fam1);
// Putting data in key
Put put = new Put(row1);
put.addColumn(fam1, qf1, val1);
region.put(put);
// checkAndPut with wrong value
boolean res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL,
new BinaryComparator(val2), put);
assertEquals(false, res);
// checkAndDelete with wrong value
Delete delete = new Delete(row1);
delete.addFamily(fam1);
res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL,
new BinaryComparator(val2), put);
assertEquals(false, res);
// Putting data in key
put = new Put(row1);
put.addColumn(fam1, qf1, Bytes.toBytes(bd1));
region.put(put);
// checkAndPut with wrong value
res =
region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL,
new BigDecimalComparator(bd2), put);
assertEquals(false, res);
// checkAndDelete with wrong value
delete = new Delete(row1);
delete.addFamily(fam1);
res =
region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL,
new BigDecimalComparator(bd2), put);
assertEquals(false, res);
}
@Test
public void testCheckAndMutate_WithCorrectValue() throws IOException {
byte[] row1 = Bytes.toBytes("row1");
byte[] fam1 = Bytes.toBytes("fam1");
byte[] qf1 = Bytes.toBytes("qualifier");
byte[] val1 = Bytes.toBytes("value1");
BigDecimal bd1 = new BigDecimal(Double.MIN_VALUE);
// Setting up region
this.region = initHRegion(tableName, method, CONF, fam1);
// Putting data in key
Put put = new Put(row1);
put.addColumn(fam1, qf1, val1);
region.put(put);
// checkAndPut with correct value
boolean res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL,
new BinaryComparator(val1), put);
assertEquals(true, res);
// checkAndDelete with correct value
Delete delete = new Delete(row1);
delete.addColumn(fam1, qf1);
res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL, new BinaryComparator(val1),
delete);
assertEquals(true, res);
// Putting data in key
put = new Put(row1);
put.addColumn(fam1, qf1, Bytes.toBytes(bd1));
region.put(put);
// checkAndPut with correct value
res =
region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL, new BigDecimalComparator(
bd1), put);
assertEquals(true, res);
// checkAndDelete with correct value
delete = new Delete(row1);
delete.addColumn(fam1, qf1);
res =
region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL, new BigDecimalComparator(
bd1), delete);
assertEquals(true, res);
}
@Test
public void testCheckAndMutate_WithNonEqualCompareOp() throws IOException {
byte[] row1 = Bytes.toBytes("row1");
byte[] fam1 = Bytes.toBytes("fam1");
byte[] qf1 = Bytes.toBytes("qualifier");
byte[] val1 = Bytes.toBytes("value1");
byte[] val2 = Bytes.toBytes("value2");
byte[] val3 = Bytes.toBytes("value3");
byte[] val4 = Bytes.toBytes("value4");
// Setting up region
this.region = initHRegion(tableName, method, CONF, fam1);
// Putting val3 in key
Put put = new Put(row1);
put.addColumn(fam1, qf1, val3);
region.put(put);
// Test CompareOp.LESS: original = val3, compare with val3, fail
boolean res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.LESS,
new BinaryComparator(val3), put);
assertEquals(false, res);
// Test CompareOp.LESS: original = val3, compare with val4, fail
res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.LESS,
new BinaryComparator(val4), put);
assertEquals(false, res);
// Test CompareOp.LESS: original = val3, compare with val2,
// succeed (now value = val2)
put = new Put(row1);
put.addColumn(fam1, qf1, val2);
res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.LESS,
new BinaryComparator(val2), put);
assertEquals(true, res);
// Test CompareOp.LESS_OR_EQUAL: original = val2, compare with val3, fail
res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.LESS_OR_EQUAL,
new BinaryComparator(val3), put);
assertEquals(false, res);
// Test CompareOp.LESS_OR_EQUAL: original = val2, compare with val2,
// succeed (value still = val2)
res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.LESS_OR_EQUAL,
new BinaryComparator(val2), put);
assertEquals(true, res);
// Test CompareOp.LESS_OR_EQUAL: original = val2, compare with val1,
// succeed (now value = val3)
put = new Put(row1);
put.addColumn(fam1, qf1, val3);
res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.LESS_OR_EQUAL,
new BinaryComparator(val1), put);
assertEquals(true, res);
// Test CompareOp.GREATER: original = val3, compare with val3, fail
res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.GREATER,
new BinaryComparator(val3), put);
assertEquals(false, res);
// Test CompareOp.GREATER: original = val3, compare with val2, fail
res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.GREATER,
new BinaryComparator(val2), put);
assertEquals(false, res);
// Test CompareOp.GREATER: original = val3, compare with val4,
// succeed (now value = val2)
put = new Put(row1);
put.addColumn(fam1, qf1, val2);
res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.GREATER,
new BinaryComparator(val4), put);
assertEquals(true, res);
// Test CompareOp.GREATER_OR_EQUAL: original = val2, compare with val1, fail
res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.GREATER_OR_EQUAL,
new BinaryComparator(val1), put);
assertEquals(false, res);
// Test CompareOp.GREATER_OR_EQUAL: original = val2, compare with val2,
// succeed (value still = val2)
res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.GREATER_OR_EQUAL,
new BinaryComparator(val2), put);
assertEquals(true, res);
// Test CompareOp.GREATER_OR_EQUAL: original = val2, compare with val3, succeed
res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.GREATER_OR_EQUAL,
new BinaryComparator(val3), put);
assertEquals(true, res);
}
@Test
public void testCheckAndPut_ThatPutWasWritten() throws IOException {
byte[] row1 = Bytes.toBytes("row1");
byte[] fam1 = Bytes.toBytes("fam1");
byte[] fam2 = Bytes.toBytes("fam2");
byte[] qf1 = Bytes.toBytes("qualifier");
byte[] val1 = Bytes.toBytes("value1");
byte[] val2 = Bytes.toBytes("value2");
byte[][] families = { fam1, fam2 };
// Setting up region
this.region = initHRegion(tableName, method, CONF, families);
// Putting data in the key to check
Put put = new Put(row1);
put.addColumn(fam1, qf1, val1);
region.put(put);
// Creating put to add
long ts = System.currentTimeMillis();
KeyValue kv = new KeyValue(row1, fam2, qf1, ts, KeyValue.Type.Put, val2);
put = new Put(row1);
put.add(kv);
// checkAndPut with wrong value
boolean res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL,
new BinaryComparator(val1), put);
assertEquals(true, res);
Get get = new Get(row1);
get.addColumn(fam2, qf1);
Cell[] actual = region.get(get).rawCells();
Cell[] expected = { kv };
assertEquals(expected.length, actual.length);
for (int i = 0; i < actual.length; i++) {
assertEquals(expected[i], actual[i]);
}
}
@Test
public void testCheckAndPut_wrongRowInPut() throws IOException {
this.region = initHRegion(tableName, method, CONF, COLUMNS);
Put put = new Put(row2);
put.addColumn(fam1, qual1, value1);
try {
region.checkAndMutate(row, fam1, qual1, CompareOperator.EQUAL,
new BinaryComparator(value2), put);
fail();
} catch (org.apache.hadoop.hbase.DoNotRetryIOException expected) {
// expected exception.
}
}
@Test
public void testCheckAndDelete_ThatDeleteWasWritten() throws IOException {
byte[] row1 = Bytes.toBytes("row1");
byte[] fam1 = Bytes.toBytes("fam1");
byte[] fam2 = Bytes.toBytes("fam2");
byte[] qf1 = Bytes.toBytes("qualifier1");
byte[] qf2 = Bytes.toBytes("qualifier2");
byte[] qf3 = Bytes.toBytes("qualifier3");
byte[] val1 = Bytes.toBytes("value1");
byte[] val2 = Bytes.toBytes("value2");
byte[] val3 = Bytes.toBytes("value3");
byte[] emptyVal = new byte[] {};
byte[][] families = { fam1, fam2 };
// Setting up region
this.region = initHRegion(tableName, method, CONF, families);
// Put content
Put put = new Put(row1);
put.addColumn(fam1, qf1, val1);
region.put(put);
Threads.sleep(2);
put = new Put(row1);
put.addColumn(fam1, qf1, val2);
put.addColumn(fam2, qf1, val3);
put.addColumn(fam2, qf2, val2);
put.addColumn(fam2, qf3, val1);
put.addColumn(fam1, qf3, val1);
region.put(put);
// Multi-column delete
Delete delete = new Delete(row1);
delete.addColumn(fam1, qf1);
delete.addColumn(fam2, qf1);
delete.addColumn(fam1, qf3);
boolean res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL,
new BinaryComparator(val2), delete);
assertEquals(true, res);
Get get = new Get(row1);
get.addColumn(fam1, qf1);
get.addColumn(fam1, qf3);
get.addColumn(fam2, qf2);
Result r = region.get(get);
assertEquals(2, r.size());
assertArrayEquals(val1, r.getValue(fam1, qf1));
assertArrayEquals(val2, r.getValue(fam2, qf2));
// Family delete
delete = new Delete(row1);
delete.addFamily(fam2);
res = region.checkAndMutate(row1, fam2, qf1, CompareOperator.EQUAL,
new BinaryComparator(emptyVal), delete);
assertEquals(true, res);
get = new Get(row1);
r = region.get(get);
assertEquals(1, r.size());
assertArrayEquals(val1, r.getValue(fam1, qf1));
// Row delete
delete = new Delete(row1);
res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL, new BinaryComparator(val1),
delete);
assertEquals(true, res);
get = new Get(row1);
r = region.get(get);
assertEquals(0, r.size());
}
// ////////////////////////////////////////////////////////////////////////////
// Delete tests
// ////////////////////////////////////////////////////////////////////////////
@Test
public void testDelete_multiDeleteColumn() throws IOException {
byte[] row1 = Bytes.toBytes("row1");
byte[] fam1 = Bytes.toBytes("fam1");
byte[] qual = Bytes.toBytes("qualifier");
byte[] value = Bytes.toBytes("value");
Put put = new Put(row1);
put.addColumn(fam1, qual, 1, value);
put.addColumn(fam1, qual, 2, value);
this.region = initHRegion(tableName, method, CONF, fam1);
region.put(put);
// We do support deleting more than 1 'latest' version
Delete delete = new Delete(row1);
delete.addColumn(fam1, qual);
delete.addColumn(fam1, qual);
region.delete(delete);
Get get = new Get(row1);
get.addFamily(fam1);
Result r = region.get(get);
assertEquals(0, r.size());
}
@Test
public void testDelete_CheckFamily() throws IOException {
byte[] row1 = Bytes.toBytes("row1");
byte[] fam1 = Bytes.toBytes("fam1");
byte[] fam2 = Bytes.toBytes("fam2");
byte[] fam3 = Bytes.toBytes("fam3");
byte[] fam4 = Bytes.toBytes("fam4");
// Setting up region
this.region = initHRegion(tableName, method, CONF, fam1, fam2, fam3);
List<Cell> kvs = new ArrayList<>();
kvs.add(new KeyValue(row1, fam4, null, null));
// testing existing family
byte[] family = fam2;
NavigableMap<byte[], List<Cell>> deleteMap = new TreeMap<>(Bytes.BYTES_COMPARATOR);
deleteMap.put(family, kvs);
region.delete(deleteMap, Durability.SYNC_WAL);
// testing non existing family
boolean ok = false;
family = fam4;
try {
deleteMap = new TreeMap<>(Bytes.BYTES_COMPARATOR);
deleteMap.put(family, kvs);
region.delete(deleteMap, Durability.SYNC_WAL);
} catch (Exception e) {
ok = true;
}
assertTrue("Family " + new String(family, StandardCharsets.UTF_8) + " does exist", ok);
}
@Test
public void testDelete_mixed() throws IOException, InterruptedException {
byte[] fam = Bytes.toBytes("info");
byte[][] families = { fam };
this.region = initHRegion(tableName, method, CONF, families);
EnvironmentEdgeManagerTestHelper.injectEdge(new IncrementingEnvironmentEdge());
byte[] row = Bytes.toBytes("table_name");
// column names
byte[] serverinfo = Bytes.toBytes("serverinfo");
byte[] splitA = Bytes.toBytes("splitA");
byte[] splitB = Bytes.toBytes("splitB");
// add some data:
Put put = new Put(row);
put.addColumn(fam, splitA, Bytes.toBytes("reference_A"));
region.put(put);
put = new Put(row);
put.addColumn(fam, splitB, Bytes.toBytes("reference_B"));
region.put(put);
put = new Put(row);
put.addColumn(fam, serverinfo, Bytes.toBytes("ip_address"));
region.put(put);
// ok now delete a split:
Delete delete = new Delete(row);
delete.addColumns(fam, splitA);
region.delete(delete);
// assert some things:
Get get = new Get(row).addColumn(fam, serverinfo);
Result result = region.get(get);
assertEquals(1, result.size());
get = new Get(row).addColumn(fam, splitA);
result = region.get(get);
assertEquals(0, result.size());
get = new Get(row).addColumn(fam, splitB);
result = region.get(get);
assertEquals(1, result.size());
// Assert that after a delete, I can put.
put = new Put(row);
put.addColumn(fam, splitA, Bytes.toBytes("reference_A"));
region.put(put);
get = new Get(row);
result = region.get(get);
assertEquals(3, result.size());
// Now delete all... then test I can add stuff back
delete = new Delete(row);
region.delete(delete);
assertEquals(0, region.get(get).size());
region.put(new Put(row).addColumn(fam, splitA, Bytes.toBytes("reference_A")));
result = region.get(get);
assertEquals(1, result.size());
}
@Test
public void testDeleteRowWithFutureTs() throws IOException {
byte[] fam = Bytes.toBytes("info");
byte[][] families = { fam };
this.region = initHRegion(tableName, method, CONF, families);
byte[] row = Bytes.toBytes("table_name");
// column names
byte[] serverinfo = Bytes.toBytes("serverinfo");
// add data in the far future
Put put = new Put(row);
put.addColumn(fam, serverinfo, HConstants.LATEST_TIMESTAMP - 5, Bytes.toBytes("value"));
region.put(put);
// now delete something in the present
Delete delete = new Delete(row);
region.delete(delete);
// make sure we still see our data
Get get = new Get(row).addColumn(fam, serverinfo);
Result result = region.get(get);
assertEquals(1, result.size());
// delete the future row
delete = new Delete(row, HConstants.LATEST_TIMESTAMP - 3);
region.delete(delete);
// make sure it is gone
get = new Get(row).addColumn(fam, serverinfo);
result = region.get(get);
assertEquals(0, result.size());
}
/**
* Tests that the special LATEST_TIMESTAMP option for puts gets replaced by
* the actual timestamp
*/
@Test
public void testPutWithLatestTS() throws IOException {
byte[] fam = Bytes.toBytes("info");
byte[][] families = { fam };
this.region = initHRegion(tableName, method, CONF, families);
byte[] row = Bytes.toBytes("row1");
// column names
byte[] qual = Bytes.toBytes("qual");
// add data with LATEST_TIMESTAMP, put without WAL
Put put = new Put(row);
put.addColumn(fam, qual, HConstants.LATEST_TIMESTAMP, Bytes.toBytes("value"));
region.put(put);
// Make sure it shows up with an actual timestamp
Get get = new Get(row).addColumn(fam, qual);
Result result = region.get(get);
assertEquals(1, result.size());
Cell kv = result.rawCells()[0];
LOG.info("Got: " + kv);
assertTrue("LATEST_TIMESTAMP was not replaced with real timestamp",
kv.getTimestamp() != HConstants.LATEST_TIMESTAMP);
// Check same with WAL enabled (historically these took different
// code paths, so check both)
row = Bytes.toBytes("row2");
put = new Put(row);
put.addColumn(fam, qual, HConstants.LATEST_TIMESTAMP, Bytes.toBytes("value"));
region.put(put);
// Make sure it shows up with an actual timestamp
get = new Get(row).addColumn(fam, qual);
result = region.get(get);
assertEquals(1, result.size());
kv = result.rawCells()[0];
LOG.info("Got: " + kv);
assertTrue("LATEST_TIMESTAMP was not replaced with real timestamp",
kv.getTimestamp() != HConstants.LATEST_TIMESTAMP);
}
/**
* Tests that there is server-side filtering for invalid timestamp upper
* bound. Note that the timestamp lower bound is automatically handled for us
* by the TTL field.
*/
@Test
public void testPutWithTsSlop() throws IOException {
byte[] fam = Bytes.toBytes("info");
byte[][] families = { fam };
// add data with a timestamp that is too recent for range. Ensure assert
CONF.setInt("hbase.hregion.keyvalue.timestamp.slop.millisecs", 1000);
this.region = initHRegion(tableName, method, CONF, families);
boolean caughtExcep = false;
try {
// no TS specified == use latest. should not error
region.put(new Put(row).addColumn(fam, Bytes.toBytes("qual"), Bytes.toBytes("value")));
// TS out of range. should error
region.put(new Put(row).addColumn(fam, Bytes.toBytes("qual"),
System.currentTimeMillis() + 2000, Bytes.toBytes("value")));
fail("Expected IOE for TS out of configured timerange");
} catch (FailedSanityCheckException ioe) {
LOG.debug("Received expected exception", ioe);
caughtExcep = true;
}
assertTrue("Should catch FailedSanityCheckException", caughtExcep);
}
@Test
public void testScanner_DeleteOneFamilyNotAnother() throws IOException {
byte[] fam1 = Bytes.toBytes("columnA");
byte[] fam2 = Bytes.toBytes("columnB");
this.region = initHRegion(tableName, method, CONF, fam1, fam2);
byte[] rowA = Bytes.toBytes("rowA");
byte[] rowB = Bytes.toBytes("rowB");
byte[] value = Bytes.toBytes("value");
Delete delete = new Delete(rowA);
delete.addFamily(fam1);
region.delete(delete);
// now create data.
Put put = new Put(rowA);
put.addColumn(fam2, null, value);
region.put(put);
put = new Put(rowB);
put.addColumn(fam1, null, value);
put.addColumn(fam2, null, value);
region.put(put);
Scan scan = new Scan();
scan.addFamily(fam1).addFamily(fam2);
InternalScanner s = region.getScanner(scan);
List<Cell> results = new ArrayList<>();
s.next(results);
assertTrue(CellUtil.matchingRows(results.get(0), rowA));
results.clear();
s.next(results);
assertTrue(CellUtil.matchingRows(results.get(0), rowB));
}
@Test
public void testDataInMemoryWithoutWAL() throws IOException {
FileSystem fs = FileSystem.get(CONF);
Path rootDir = new Path(dir + "testDataInMemoryWithoutWAL");
FSHLog hLog = new FSHLog(fs, rootDir, "testDataInMemoryWithoutWAL", CONF);
hLog.init();
// This chunk creation is done throughout the code base. Do we want to move it into core?
// It is missing from this test. W/o it we NPE.
ChunkCreator.initialize(MemStoreLABImpl.CHUNK_SIZE_DEFAULT, false, 0, 0, 0, null);
region = initHRegion(tableName, null, null, false, Durability.SYNC_WAL, hLog,
COLUMN_FAMILY_BYTES);
Cell originalCell = CellUtil.createCell(row, COLUMN_FAMILY_BYTES, qual1,
System.currentTimeMillis(), KeyValue.Type.Put.getCode(), value1);
final long originalSize = originalCell.getSerializedSize();
Cell addCell = CellUtil.createCell(row, COLUMN_FAMILY_BYTES, qual1,
System.currentTimeMillis(), KeyValue.Type.Put.getCode(), Bytes.toBytes("xxxxxxxxxx"));
final long addSize = addCell.getSerializedSize();
LOG.info("originalSize:" + originalSize
+ ", addSize:" + addSize);
// start test. We expect that the addPut's durability will be replaced
// by originalPut's durability.
// case 1:
testDataInMemoryWithoutWAL(region,
new Put(row).add(originalCell).setDurability(Durability.SKIP_WAL),
new Put(row).add(addCell).setDurability(Durability.SKIP_WAL),
originalSize + addSize);
// case 2:
testDataInMemoryWithoutWAL(region,
new Put(row).add(originalCell).setDurability(Durability.SKIP_WAL),
new Put(row).add(addCell).setDurability(Durability.SYNC_WAL),
originalSize + addSize);
// case 3:
testDataInMemoryWithoutWAL(region,
new Put(row).add(originalCell).setDurability(Durability.SYNC_WAL),
new Put(row).add(addCell).setDurability(Durability.SKIP_WAL),
0);
// case 4:
testDataInMemoryWithoutWAL(region,
new Put(row).add(originalCell).setDurability(Durability.SYNC_WAL),
new Put(row).add(addCell).setDurability(Durability.SYNC_WAL),
0);
}
private static void testDataInMemoryWithoutWAL(HRegion region, Put originalPut,
final Put addPut, long delta) throws IOException {
final long initSize = region.getDataInMemoryWithoutWAL();
// save normalCPHost and replaced by mockedCPHost
RegionCoprocessorHost normalCPHost = region.getCoprocessorHost();
RegionCoprocessorHost mockedCPHost = Mockito.mock(RegionCoprocessorHost.class);
// Because the preBatchMutate returns void, we can't do usual Mockito when...then form. Must
// do below format (from Mockito doc).
Mockito.doAnswer(new Answer() {
@Override
public Object answer(InvocationOnMock invocation) throws Throwable {
MiniBatchOperationInProgress<Mutation> mb = invocation.getArgument(0);
mb.addOperationsFromCP(0, new Mutation[]{addPut});
return null;
}
}).when(mockedCPHost).preBatchMutate(Mockito.isA(MiniBatchOperationInProgress.class));
ColumnFamilyDescriptorBuilder builder = ColumnFamilyDescriptorBuilder.
newBuilder(COLUMN_FAMILY_BYTES);
ScanInfo info = new ScanInfo(CONF, builder.build(), Long.MAX_VALUE,
Long.MAX_VALUE, region.getCellComparator());
Mockito.when(mockedCPHost.preFlushScannerOpen(Mockito.any(HStore.class),
Mockito.any())).thenReturn(info);
Mockito.when(mockedCPHost.preFlush(Mockito.any(), Mockito.any(StoreScanner.class),
Mockito.any())).thenAnswer(i -> i.getArgument(1));
region.setCoprocessorHost(mockedCPHost);
region.put(originalPut);
region.setCoprocessorHost(normalCPHost);
final long finalSize = region.getDataInMemoryWithoutWAL();
assertEquals("finalSize:" + finalSize + ", initSize:"
+ initSize + ", delta:" + delta,finalSize, initSize + delta);
}
@Test
public void testDeleteColumns_PostInsert() throws IOException, InterruptedException {
Delete delete = new Delete(row);
delete.addColumns(fam1, qual1);
doTestDelete_AndPostInsert(delete);
}
@Test
public void testaddFamily_PostInsert() throws IOException, InterruptedException {
Delete delete = new Delete(row);
delete.addFamily(fam1);
doTestDelete_AndPostInsert(delete);
}
public void doTestDelete_AndPostInsert(Delete delete) throws IOException, InterruptedException {
this.region = initHRegion(tableName, method, CONF, fam1);
EnvironmentEdgeManagerTestHelper.injectEdge(new IncrementingEnvironmentEdge());
Put put = new Put(row);
put.addColumn(fam1, qual1, value1);
region.put(put);
// now delete the value:
region.delete(delete);
// ok put data:
put = new Put(row);
put.addColumn(fam1, qual1, value2);
region.put(put);
// ok get:
Get get = new Get(row);
get.addColumn(fam1, qual1);
Result r = region.get(get);
assertEquals(1, r.size());
assertArrayEquals(value2, r.getValue(fam1, qual1));
// next:
Scan scan = new Scan(row);
scan.addColumn(fam1, qual1);
InternalScanner s = region.getScanner(scan);
List<Cell> results = new ArrayList<>();
assertEquals(false, s.next(results));
assertEquals(1, results.size());
Cell kv = results.get(0);
assertArrayEquals(value2, CellUtil.cloneValue(kv));
assertArrayEquals(fam1, CellUtil.cloneFamily(kv));
assertArrayEquals(qual1, CellUtil.cloneQualifier(kv));
assertArrayEquals(row, CellUtil.cloneRow(kv));
}
@Test
public void testDelete_CheckTimestampUpdated() throws IOException {
byte[] row1 = Bytes.toBytes("row1");
byte[] col1 = Bytes.toBytes("col1");
byte[] col2 = Bytes.toBytes("col2");
byte[] col3 = Bytes.toBytes("col3");
// Setting up region
this.region = initHRegion(tableName, method, CONF, fam1);
// Building checkerList
List<Cell> kvs = new ArrayList<>();
kvs.add(new KeyValue(row1, fam1, col1, null));
kvs.add(new KeyValue(row1, fam1, col2, null));
kvs.add(new KeyValue(row1, fam1, col3, null));
NavigableMap<byte[], List<Cell>> deleteMap = new TreeMap<>(Bytes.BYTES_COMPARATOR);
deleteMap.put(fam1, kvs);
region.delete(deleteMap, Durability.SYNC_WAL);
// extract the key values out the memstore:
// This is kinda hacky, but better than nothing...
long now = System.currentTimeMillis();
AbstractMemStore memstore = (AbstractMemStore)region.getStore(fam1).memstore;
Cell firstCell = memstore.getActive().first();
assertTrue(firstCell.getTimestamp() <= now);
now = firstCell.getTimestamp();
for (Cell cell : memstore.getActive().getCellSet()) {
assertTrue(cell.getTimestamp() <= now);
now = cell.getTimestamp();
}
}
// ////////////////////////////////////////////////////////////////////////////
// Get tests
// ////////////////////////////////////////////////////////////////////////////
@Test
public void testGet_FamilyChecker() throws IOException {
byte[] row1 = Bytes.toBytes("row1");
byte[] fam1 = Bytes.toBytes("fam1");
byte[] fam2 = Bytes.toBytes("False");
byte[] col1 = Bytes.toBytes("col1");
// Setting up region
this.region = initHRegion(tableName, method, CONF, fam1);
Get get = new Get(row1);
get.addColumn(fam2, col1);
// Test
try {
region.get(get);
fail("Expecting DoNotRetryIOException in get but did not get any");
} catch (org.apache.hadoop.hbase.DoNotRetryIOException e) {
LOG.info("Got expected DoNotRetryIOException successfully");
}
}
@Test
public void testGet_Basic() throws IOException {
byte[] row1 = Bytes.toBytes("row1");
byte[] fam1 = Bytes.toBytes("fam1");
byte[] col1 = Bytes.toBytes("col1");
byte[] col2 = Bytes.toBytes("col2");
byte[] col3 = Bytes.toBytes("col3");
byte[] col4 = Bytes.toBytes("col4");
byte[] col5 = Bytes.toBytes("col5");
// Setting up region
this.region = initHRegion(tableName, method, CONF, fam1);
// Add to memstore
Put put = new Put(row1);
put.addColumn(fam1, col1, null);
put.addColumn(fam1, col2, null);
put.addColumn(fam1, col3, null);
put.addColumn(fam1, col4, null);
put.addColumn(fam1, col5, null);
region.put(put);
Get get = new Get(row1);
get.addColumn(fam1, col2);
get.addColumn(fam1, col4);
// Expected result
KeyValue kv1 = new KeyValue(row1, fam1, col2);
KeyValue kv2 = new KeyValue(row1, fam1, col4);
KeyValue[] expected = { kv1, kv2 };
// Test
Result res = region.get(get);
assertEquals(expected.length, res.size());
for (int i = 0; i < res.size(); i++) {
assertTrue(CellUtil.matchingRows(expected[i], res.rawCells()[i]));
assertTrue(CellUtil.matchingFamily(expected[i], res.rawCells()[i]));
assertTrue(CellUtil.matchingQualifier(expected[i], res.rawCells()[i]));
}
// Test using a filter on a Get
Get g = new Get(row1);
final int count = 2;
g.setFilter(new ColumnCountGetFilter(count));
res = region.get(g);
assertEquals(count, res.size());
}
@Test
public void testGet_Empty() throws IOException {
byte[] row = Bytes.toBytes("row");
byte[] fam = Bytes.toBytes("fam");
this.region = initHRegion(tableName, method, CONF, fam);
Get get = new Get(row);
get.addFamily(fam);
Result r = region.get(get);
assertTrue(r.isEmpty());
}
@Test
public void testGetWithFilter() throws IOException, InterruptedException {
byte[] row1 = Bytes.toBytes("row1");
byte[] fam1 = Bytes.toBytes("fam1");
byte[] col1 = Bytes.toBytes("col1");
byte[] value1 = Bytes.toBytes("value1");
byte[] value2 = Bytes.toBytes("value2");
final int maxVersions = 3;
HColumnDescriptor hcd = new HColumnDescriptor(fam1);
hcd.setMaxVersions(maxVersions);
HTableDescriptor htd = new HTableDescriptor(TableName.valueOf("testFilterAndColumnTracker"));
htd.addFamily(hcd);
ChunkCreator.initialize(MemStoreLABImpl.CHUNK_SIZE_DEFAULT, false, 0, 0, 0, null);
HRegionInfo info = new HRegionInfo(htd.getTableName(), null, null, false);
Path logDir = TEST_UTIL.getDataTestDirOnTestFS(method + ".log");
final WAL wal = HBaseTestingUtility.createWal(TEST_UTIL.getConfiguration(), logDir, info);
this.region = TEST_UTIL.createLocalHRegion(info, htd, wal);
// Put 4 version to memstore
long ts = 0;
Put put = new Put(row1, ts);
put.addColumn(fam1, col1, value1);
region.put(put);
put = new Put(row1, ts + 1);
put.addColumn(fam1, col1, Bytes.toBytes("filter1"));
region.put(put);
put = new Put(row1, ts + 2);
put.addColumn(fam1, col1, Bytes.toBytes("filter2"));
region.put(put);
put = new Put(row1, ts + 3);
put.addColumn(fam1, col1, value2);
region.put(put);
Get get = new Get(row1);
get.setMaxVersions();
Result res = region.get(get);
// Get 3 versions, the oldest version has gone from user view
assertEquals(maxVersions, res.size());
get.setFilter(new ValueFilter(CompareOp.EQUAL, new SubstringComparator("value")));
res = region.get(get);
// When use value filter, the oldest version should still gone from user view and it
// should only return one key vaule
assertEquals(1, res.size());
assertTrue(CellUtil.matchingValue(new KeyValue(row1, fam1, col1, value2), res.rawCells()[0]));
assertEquals(ts + 3, res.rawCells()[0].getTimestamp());
region.flush(true);
region.compact(true);
Thread.sleep(1000);
res = region.get(get);
// After flush and compact, the result should be consistent with previous result
assertEquals(1, res.size());
assertTrue(CellUtil.matchingValue(new KeyValue(row1, fam1, col1, value2), res.rawCells()[0]));
}
// ////////////////////////////////////////////////////////////////////////////
// Scanner tests
// ////////////////////////////////////////////////////////////////////////////
@Test
public void testGetScanner_WithOkFamilies() throws IOException {
byte[] fam1 = Bytes.toBytes("fam1");
byte[] fam2 = Bytes.toBytes("fam2");
byte[][] families = { fam1, fam2 };
// Setting up region
this.region = initHRegion(tableName, method, CONF, families);
Scan scan = new Scan();
scan.addFamily(fam1);
scan.addFamily(fam2);
try {
region.getScanner(scan);
} catch (Exception e) {
assertTrue("Families could not be found in Region", false);
}
}
@Test
public void testGetScanner_WithNotOkFamilies() throws IOException {
byte[] fam1 = Bytes.toBytes("fam1");
byte[] fam2 = Bytes.toBytes("fam2");
byte[][] families = { fam1 };
// Setting up region
this.region = initHRegion(tableName, method, CONF, families);
Scan scan = new Scan();
scan.addFamily(fam2);
boolean ok = false;
try {
region.getScanner(scan);
} catch (Exception e) {
ok = true;
}
assertTrue("Families could not be found in Region", ok);
}
@Test
public void testGetScanner_WithNoFamilies() throws IOException {
byte[] row1 = Bytes.toBytes("row1");
byte[] fam1 = Bytes.toBytes("fam1");
byte[] fam2 = Bytes.toBytes("fam2");
byte[] fam3 = Bytes.toBytes("fam3");
byte[] fam4 = Bytes.toBytes("fam4");
byte[][] families = { fam1, fam2, fam3, fam4 };
// Setting up region
this.region = initHRegion(tableName, method, CONF, families);
// Putting data in Region
Put put = new Put(row1);
put.addColumn(fam1, null, null);
put.addColumn(fam2, null, null);
put.addColumn(fam3, null, null);
put.addColumn(fam4, null, null);
region.put(put);
Scan scan = null;
HRegion.RegionScannerImpl is = null;
// Testing to see how many scanners that is produced by getScanner,
// starting
// with known number, 2 - current = 1
scan = new Scan();
scan.addFamily(fam2);
scan.addFamily(fam4);
is = region.getScanner(scan);
assertEquals(1, is.storeHeap.getHeap().size());
scan = new Scan();
is = region.getScanner(scan);
assertEquals(families.length - 1, is.storeHeap.getHeap().size());
}
/**
* This method tests https://issues.apache.org/jira/browse/HBASE-2516.
*
* @throws IOException
*/
@Test
public void testGetScanner_WithRegionClosed() throws IOException {
byte[] fam1 = Bytes.toBytes("fam1");
byte[] fam2 = Bytes.toBytes("fam2");
byte[][] families = { fam1, fam2 };
// Setting up region
try {
this.region = initHRegion(tableName, method, CONF, families);
} catch (IOException e) {
e.printStackTrace();
fail("Got IOException during initHRegion, " + e.getMessage());
}
region.closed.set(true);
try {
region.getScanner(null);
fail("Expected to get an exception during getScanner on a region that is closed");
} catch (NotServingRegionException e) {
// this is the correct exception that is expected
} catch (IOException e) {
fail("Got wrong type of exception - should be a NotServingRegionException, " +
"but was an IOException: "
+ e.getMessage());
}
}
@Test
public void testRegionScanner_Next() throws IOException {
byte[] row1 = Bytes.toBytes("row1");
byte[] row2 = Bytes.toBytes("row2");
byte[] fam1 = Bytes.toBytes("fam1");
byte[] fam2 = Bytes.toBytes("fam2");
byte[] fam3 = Bytes.toBytes("fam3");
byte[] fam4 = Bytes.toBytes("fam4");
byte[][] families = { fam1, fam2, fam3, fam4 };
long ts = System.currentTimeMillis();
// Setting up region
this.region = initHRegion(tableName, method, CONF, families);
// Putting data in Region
Put put = null;
put = new Put(row1);
put.addColumn(fam1, (byte[]) null, ts, null);
put.addColumn(fam2, (byte[]) null, ts, null);
put.addColumn(fam3, (byte[]) null, ts, null);
put.addColumn(fam4, (byte[]) null, ts, null);
region.put(put);
put = new Put(row2);
put.addColumn(fam1, (byte[]) null, ts, null);
put.addColumn(fam2, (byte[]) null, ts, null);
put.addColumn(fam3, (byte[]) null, ts, null);
put.addColumn(fam4, (byte[]) null, ts, null);
region.put(put);
Scan scan = new Scan();
scan.addFamily(fam2);
scan.addFamily(fam4);
InternalScanner is = region.getScanner(scan);
List<Cell> res = null;
// Result 1
List<Cell> expected1 = new ArrayList<>();
expected1.add(new KeyValue(row1, fam2, null, ts, KeyValue.Type.Put, null));
expected1.add(new KeyValue(row1, fam4, null, ts, KeyValue.Type.Put, null));
res = new ArrayList<>();
is.next(res);
for (int i = 0; i < res.size(); i++) {
assertTrue(PrivateCellUtil.equalsIgnoreMvccVersion(expected1.get(i), res.get(i)));
}
// Result 2
List<Cell> expected2 = new ArrayList<>();
expected2.add(new KeyValue(row2, fam2, null, ts, KeyValue.Type.Put, null));
expected2.add(new KeyValue(row2, fam4, null, ts, KeyValue.Type.Put, null));
res = new ArrayList<>();
is.next(res);
for (int i = 0; i < res.size(); i++) {
assertTrue(PrivateCellUtil.equalsIgnoreMvccVersion(expected2.get(i), res.get(i)));
}
}
@Test
public void testScanner_ExplicitColumns_FromMemStore_EnforceVersions() throws IOException {
byte[] row1 = Bytes.toBytes("row1");
byte[] qf1 = Bytes.toBytes("qualifier1");
byte[] qf2 = Bytes.toBytes("qualifier2");
byte[] fam1 = Bytes.toBytes("fam1");
byte[][] families = { fam1 };
long ts1 = System.currentTimeMillis();
long ts2 = ts1 + 1;
long ts3 = ts1 + 2;
// Setting up region
this.region = initHRegion(tableName, method, CONF, families);
// Putting data in Region
Put put = null;
KeyValue kv13 = new KeyValue(row1, fam1, qf1, ts3, KeyValue.Type.Put, null);
KeyValue kv12 = new KeyValue(row1, fam1, qf1, ts2, KeyValue.Type.Put, null);
KeyValue kv11 = new KeyValue(row1, fam1, qf1, ts1, KeyValue.Type.Put, null);
KeyValue kv23 = new KeyValue(row1, fam1, qf2, ts3, KeyValue.Type.Put, null);
KeyValue kv22 = new KeyValue(row1, fam1, qf2, ts2, KeyValue.Type.Put, null);
KeyValue kv21 = new KeyValue(row1, fam1, qf2, ts1, KeyValue.Type.Put, null);
put = new Put(row1);
put.add(kv13);
put.add(kv12);
put.add(kv11);
put.add(kv23);
put.add(kv22);
put.add(kv21);
region.put(put);
// Expected
List<Cell> expected = new ArrayList<>();
expected.add(kv13);
expected.add(kv12);
Scan scan = new Scan(row1);
scan.addColumn(fam1, qf1);
scan.setMaxVersions(MAX_VERSIONS);
List<Cell> actual = new ArrayList<>();
InternalScanner scanner = region.getScanner(scan);
boolean hasNext = scanner.next(actual);
assertEquals(false, hasNext);
// Verify result
for (int i = 0; i < expected.size(); i++) {
assertEquals(expected.get(i), actual.get(i));
}
}
@Test
public void testScanner_ExplicitColumns_FromFilesOnly_EnforceVersions() throws IOException {
byte[] row1 = Bytes.toBytes("row1");
byte[] qf1 = Bytes.toBytes("qualifier1");
byte[] qf2 = Bytes.toBytes("qualifier2");
byte[] fam1 = Bytes.toBytes("fam1");
byte[][] families = { fam1 };
long ts1 = 1; // System.currentTimeMillis();
long ts2 = ts1 + 1;
long ts3 = ts1 + 2;
// Setting up region
this.region = initHRegion(tableName, method, CONF, families);
// Putting data in Region
Put put = null;
KeyValue kv13 = new KeyValue(row1, fam1, qf1, ts3, KeyValue.Type.Put, null);
KeyValue kv12 = new KeyValue(row1, fam1, qf1, ts2, KeyValue.Type.Put, null);
KeyValue kv11 = new KeyValue(row1, fam1, qf1, ts1, KeyValue.Type.Put, null);
KeyValue kv23 = new KeyValue(row1, fam1, qf2, ts3, KeyValue.Type.Put, null);
KeyValue kv22 = new KeyValue(row1, fam1, qf2, ts2, KeyValue.Type.Put, null);
KeyValue kv21 = new KeyValue(row1, fam1, qf2, ts1, KeyValue.Type.Put, null);
put = new Put(row1);
put.add(kv13);
put.add(kv12);
put.add(kv11);
put.add(kv23);
put.add(kv22);
put.add(kv21);
region.put(put);
region.flush(true);
// Expected
List<Cell> expected = new ArrayList<>();
expected.add(kv13);
expected.add(kv12);
expected.add(kv23);
expected.add(kv22);
Scan scan = new Scan(row1);
scan.addColumn(fam1, qf1);
scan.addColumn(fam1, qf2);
scan.setMaxVersions(MAX_VERSIONS);
List<Cell> actual = new ArrayList<>();
InternalScanner scanner = region.getScanner(scan);
boolean hasNext = scanner.next(actual);
assertEquals(false, hasNext);
// Verify result
for (int i = 0; i < expected.size(); i++) {
assertTrue(PrivateCellUtil.equalsIgnoreMvccVersion(expected.get(i), actual.get(i)));
}
}
@Test
public void testScanner_ExplicitColumns_FromMemStoreAndFiles_EnforceVersions() throws
IOException {
byte[] row1 = Bytes.toBytes("row1");
byte[] fam1 = Bytes.toBytes("fam1");
byte[][] families = { fam1 };
byte[] qf1 = Bytes.toBytes("qualifier1");
byte[] qf2 = Bytes.toBytes("qualifier2");
long ts1 = 1;
long ts2 = ts1 + 1;
long ts3 = ts1 + 2;
long ts4 = ts1 + 3;
// Setting up region
this.region = initHRegion(tableName, method, CONF, families);
// Putting data in Region
KeyValue kv14 = new KeyValue(row1, fam1, qf1, ts4, KeyValue.Type.Put, null);
KeyValue kv13 = new KeyValue(row1, fam1, qf1, ts3, KeyValue.Type.Put, null);
KeyValue kv12 = new KeyValue(row1, fam1, qf1, ts2, KeyValue.Type.Put, null);
KeyValue kv11 = new KeyValue(row1, fam1, qf1, ts1, KeyValue.Type.Put, null);
KeyValue kv24 = new KeyValue(row1, fam1, qf2, ts4, KeyValue.Type.Put, null);
KeyValue kv23 = new KeyValue(row1, fam1, qf2, ts3, KeyValue.Type.Put, null);
KeyValue kv22 = new KeyValue(row1, fam1, qf2, ts2, KeyValue.Type.Put, null);
KeyValue kv21 = new KeyValue(row1, fam1, qf2, ts1, KeyValue.Type.Put, null);
Put put = null;
put = new Put(row1);
put.add(kv14);
put.add(kv24);
region.put(put);
region.flush(true);
put = new Put(row1);
put.add(kv23);
put.add(kv13);
region.put(put);
region.flush(true);
put = new Put(row1);
put.add(kv22);
put.add(kv12);
region.put(put);
region.flush(true);
put = new Put(row1);
put.add(kv21);
put.add(kv11);
region.put(put);
// Expected
List<Cell> expected = new ArrayList<>();
expected.add(kv14);
expected.add(kv13);
expected.add(kv12);
expected.add(kv24);
expected.add(kv23);
expected.add(kv22);
Scan scan = new Scan(row1);
scan.addColumn(fam1, qf1);
scan.addColumn(fam1, qf2);
int versions = 3;
scan.setMaxVersions(versions);
List<Cell> actual = new ArrayList<>();
InternalScanner scanner = region.getScanner(scan);
boolean hasNext = scanner.next(actual);
assertEquals(false, hasNext);
// Verify result
for (int i = 0; i < expected.size(); i++) {
assertTrue(PrivateCellUtil.equalsIgnoreMvccVersion(expected.get(i), actual.get(i)));
}
}
@Test
public void testScanner_Wildcard_FromMemStore_EnforceVersions() throws IOException {
byte[] row1 = Bytes.toBytes("row1");
byte[] qf1 = Bytes.toBytes("qualifier1");
byte[] qf2 = Bytes.toBytes("qualifier2");
byte[] fam1 = Bytes.toBytes("fam1");
byte[][] families = { fam1 };
long ts1 = System.currentTimeMillis();
long ts2 = ts1 + 1;
long ts3 = ts1 + 2;
// Setting up region
this.region = initHRegion(tableName, method, CONF, families);
// Putting data in Region
Put put = null;
KeyValue kv13 = new KeyValue(row1, fam1, qf1, ts3, KeyValue.Type.Put, null);
KeyValue kv12 = new KeyValue(row1, fam1, qf1, ts2, KeyValue.Type.Put, null);
KeyValue kv11 = new KeyValue(row1, fam1, qf1, ts1, KeyValue.Type.Put, null);
KeyValue kv23 = new KeyValue(row1, fam1, qf2, ts3, KeyValue.Type.Put, null);
KeyValue kv22 = new KeyValue(row1, fam1, qf2, ts2, KeyValue.Type.Put, null);
KeyValue kv21 = new KeyValue(row1, fam1, qf2, ts1, KeyValue.Type.Put, null);
put = new Put(row1);
put.add(kv13);
put.add(kv12);
put.add(kv11);
put.add(kv23);
put.add(kv22);
put.add(kv21);
region.put(put);
// Expected
List<Cell> expected = new ArrayList<>();
expected.add(kv13);
expected.add(kv12);
expected.add(kv23);
expected.add(kv22);
Scan scan = new Scan(row1);
scan.addFamily(fam1);
scan.setMaxVersions(MAX_VERSIONS);
List<Cell> actual = new ArrayList<>();
InternalScanner scanner = region.getScanner(scan);
boolean hasNext = scanner.next(actual);
assertEquals(false, hasNext);
// Verify result
for (int i = 0; i < expected.size(); i++) {
assertEquals(expected.get(i), actual.get(i));
}
}
@Test
public void testScanner_Wildcard_FromFilesOnly_EnforceVersions() throws IOException {
byte[] row1 = Bytes.toBytes("row1");
byte[] qf1 = Bytes.toBytes("qualifier1");
byte[] qf2 = Bytes.toBytes("qualifier2");
byte[] fam1 = Bytes.toBytes("fam1");
long ts1 = 1; // System.currentTimeMillis();
long ts2 = ts1 + 1;
long ts3 = ts1 + 2;
// Setting up region
this.region = initHRegion(tableName, method, CONF, fam1);
// Putting data in Region
Put put = null;
KeyValue kv13 = new KeyValue(row1, fam1, qf1, ts3, KeyValue.Type.Put, null);
KeyValue kv12 = new KeyValue(row1, fam1, qf1, ts2, KeyValue.Type.Put, null);
KeyValue kv11 = new KeyValue(row1, fam1, qf1, ts1, KeyValue.Type.Put, null);
KeyValue kv23 = new KeyValue(row1, fam1, qf2, ts3, KeyValue.Type.Put, null);
KeyValue kv22 = new KeyValue(row1, fam1, qf2, ts2, KeyValue.Type.Put, null);
KeyValue kv21 = new KeyValue(row1, fam1, qf2, ts1, KeyValue.Type.Put, null);
put = new Put(row1);
put.add(kv13);
put.add(kv12);
put.add(kv11);
put.add(kv23);
put.add(kv22);
put.add(kv21);
region.put(put);
region.flush(true);
// Expected
List<Cell> expected = new ArrayList<>();
expected.add(kv13);
expected.add(kv12);
expected.add(kv23);
expected.add(kv22);
Scan scan = new Scan(row1);
scan.addFamily(fam1);
scan.setMaxVersions(MAX_VERSIONS);
List<Cell> actual = new ArrayList<>();
InternalScanner scanner = region.getScanner(scan);
boolean hasNext = scanner.next(actual);
assertEquals(false, hasNext);
// Verify result
for (int i = 0; i < expected.size(); i++) {
assertTrue(PrivateCellUtil.equalsIgnoreMvccVersion(expected.get(i), actual.get(i)));
}
}
@Test
public void testScanner_StopRow1542() throws IOException {
byte[] family = Bytes.toBytes("testFamily");
this.region = initHRegion(tableName, method, CONF, family);
byte[] row1 = Bytes.toBytes("row111");
byte[] row2 = Bytes.toBytes("row222");
byte[] row3 = Bytes.toBytes("row333");
byte[] row4 = Bytes.toBytes("row444");
byte[] row5 = Bytes.toBytes("row555");
byte[] col1 = Bytes.toBytes("Pub111");
byte[] col2 = Bytes.toBytes("Pub222");
Put put = new Put(row1);
put.addColumn(family, col1, Bytes.toBytes(10L));
region.put(put);
put = new Put(row2);
put.addColumn(family, col1, Bytes.toBytes(15L));
region.put(put);
put = new Put(row3);
put.addColumn(family, col2, Bytes.toBytes(20L));
region.put(put);
put = new Put(row4);
put.addColumn(family, col2, Bytes.toBytes(30L));
region.put(put);
put = new Put(row5);
put.addColumn(family, col1, Bytes.toBytes(40L));
region.put(put);
Scan scan = new Scan(row3, row4);
scan.setMaxVersions();
scan.addColumn(family, col1);
InternalScanner s = region.getScanner(scan);
List<Cell> results = new ArrayList<>();
assertEquals(false, s.next(results));
assertEquals(0, results.size());
}
@Test
public void testScanner_Wildcard_FromMemStoreAndFiles_EnforceVersions() throws IOException {
byte[] row1 = Bytes.toBytes("row1");
byte[] fam1 = Bytes.toBytes("fam1");
byte[] qf1 = Bytes.toBytes("qualifier1");
byte[] qf2 = Bytes.toBytes("quateslifier2");
long ts1 = 1;
long ts2 = ts1 + 1;
long ts3 = ts1 + 2;
long ts4 = ts1 + 3;
// Setting up region
this.region = initHRegion(tableName, method, CONF, fam1);
// Putting data in Region
KeyValue kv14 = new KeyValue(row1, fam1, qf1, ts4, KeyValue.Type.Put, null);
KeyValue kv13 = new KeyValue(row1, fam1, qf1, ts3, KeyValue.Type.Put, null);
KeyValue kv12 = new KeyValue(row1, fam1, qf1, ts2, KeyValue.Type.Put, null);
KeyValue kv11 = new KeyValue(row1, fam1, qf1, ts1, KeyValue.Type.Put, null);
KeyValue kv24 = new KeyValue(row1, fam1, qf2, ts4, KeyValue.Type.Put, null);
KeyValue kv23 = new KeyValue(row1, fam1, qf2, ts3, KeyValue.Type.Put, null);
KeyValue kv22 = new KeyValue(row1, fam1, qf2, ts2, KeyValue.Type.Put, null);
KeyValue kv21 = new KeyValue(row1, fam1, qf2, ts1, KeyValue.Type.Put, null);
Put put = null;
put = new Put(row1);
put.add(kv14);
put.add(kv24);
region.put(put);
region.flush(true);
put = new Put(row1);
put.add(kv23);
put.add(kv13);
region.put(put);
region.flush(true);
put = new Put(row1);
put.add(kv22);
put.add(kv12);
region.put(put);
region.flush(true);
put = new Put(row1);
put.add(kv21);
put.add(kv11);
region.put(put);
// Expected
List<KeyValue> expected = new ArrayList<>();
expected.add(kv14);
expected.add(kv13);
expected.add(kv12);
expected.add(kv24);
expected.add(kv23);
expected.add(kv22);
Scan scan = new Scan(row1);
int versions = 3;
scan.setMaxVersions(versions);
List<Cell> actual = new ArrayList<>();
InternalScanner scanner = region.getScanner(scan);
boolean hasNext = scanner.next(actual);
assertEquals(false, hasNext);
// Verify result
for (int i = 0; i < expected.size(); i++) {
assertTrue(PrivateCellUtil.equalsIgnoreMvccVersion(expected.get(i), actual.get(i)));
}
}
/**
* Added for HBASE-5416
*
* Here we test scan optimization when only subset of CFs are used in filter
* conditions.
*/
@Test
public void testScanner_JoinedScanners() throws IOException {
byte[] cf_essential = Bytes.toBytes("essential");
byte[] cf_joined = Bytes.toBytes("joined");
byte[] cf_alpha = Bytes.toBytes("alpha");
this.region = initHRegion(tableName, method, CONF, cf_essential, cf_joined, cf_alpha);
byte[] row1 = Bytes.toBytes("row1");
byte[] row2 = Bytes.toBytes("row2");
byte[] row3 = Bytes.toBytes("row3");
byte[] col_normal = Bytes.toBytes("d");
byte[] col_alpha = Bytes.toBytes("a");
byte[] filtered_val = Bytes.toBytes(3);
Put put = new Put(row1);
put.addColumn(cf_essential, col_normal, Bytes.toBytes(1));
put.addColumn(cf_joined, col_alpha, Bytes.toBytes(1));
region.put(put);
put = new Put(row2);
put.addColumn(cf_essential, col_alpha, Bytes.toBytes(2));
put.addColumn(cf_joined, col_normal, Bytes.toBytes(2));
put.addColumn(cf_alpha, col_alpha, Bytes.toBytes(2));
region.put(put);
put = new Put(row3);
put.addColumn(cf_essential, col_normal, filtered_val);
put.addColumn(cf_joined, col_normal, filtered_val);
region.put(put);
// Check two things:
// 1. result list contains expected values
// 2. result list is sorted properly
Scan scan = new Scan();
Filter filter = new SingleColumnValueExcludeFilter(cf_essential, col_normal,
CompareOp.NOT_EQUAL, filtered_val);
scan.setFilter(filter);
scan.setLoadColumnFamiliesOnDemand(true);
InternalScanner s = region.getScanner(scan);
List<Cell> results = new ArrayList<>();
assertTrue(s.next(results));
assertEquals(1, results.size());
results.clear();
assertTrue(s.next(results));
assertEquals(3, results.size());
assertTrue("orderCheck", CellUtil.matchingFamily(results.get(0), cf_alpha));
assertTrue("orderCheck", CellUtil.matchingFamily(results.get(1), cf_essential));
assertTrue("orderCheck", CellUtil.matchingFamily(results.get(2), cf_joined));
results.clear();
assertFalse(s.next(results));
assertEquals(0, results.size());
}
/**
* HBASE-5416
*
* Test case when scan limits amount of KVs returned on each next() call.
*/
@Test
public void testScanner_JoinedScannersWithLimits() throws IOException {
final byte[] cf_first = Bytes.toBytes("first");
final byte[] cf_second = Bytes.toBytes("second");
this.region = initHRegion(tableName, method, CONF, cf_first, cf_second);
final byte[] col_a = Bytes.toBytes("a");
final byte[] col_b = Bytes.toBytes("b");
Put put;
for (int i = 0; i < 10; i++) {
put = new Put(Bytes.toBytes("r" + Integer.toString(i)));
put.addColumn(cf_first, col_a, Bytes.toBytes(i));
if (i < 5) {
put.addColumn(cf_first, col_b, Bytes.toBytes(i));
put.addColumn(cf_second, col_a, Bytes.toBytes(i));
put.addColumn(cf_second, col_b, Bytes.toBytes(i));
}
region.put(put);
}
Scan scan = new Scan();
scan.setLoadColumnFamiliesOnDemand(true);
Filter bogusFilter = new FilterBase() {
@Override
public ReturnCode filterCell(final Cell ignored) throws IOException {
return ReturnCode.INCLUDE;
}
@Override
public boolean isFamilyEssential(byte[] name) {
return Bytes.equals(name, cf_first);
}
};
scan.setFilter(bogusFilter);
InternalScanner s = region.getScanner(scan);
// Our data looks like this:
// r0: first:a, first:b, second:a, second:b
// r1: first:a, first:b, second:a, second:b
// r2: first:a, first:b, second:a, second:b
// r3: first:a, first:b, second:a, second:b
// r4: first:a, first:b, second:a, second:b
// r5: first:a
// r6: first:a
// r7: first:a
// r8: first:a
// r9: first:a
// But due to next's limit set to 3, we should get this:
// r0: first:a, first:b, second:a
// r0: second:b
// r1: first:a, first:b, second:a
// r1: second:b
// r2: first:a, first:b, second:a
// r2: second:b
// r3: first:a, first:b, second:a
// r3: second:b
// r4: first:a, first:b, second:a
// r4: second:b
// r5: first:a
// r6: first:a
// r7: first:a
// r8: first:a
// r9: first:a
List<Cell> results = new ArrayList<>();
int index = 0;
ScannerContext scannerContext = ScannerContext.newBuilder().setBatchLimit(3).build();
while (true) {
boolean more = s.next(results, scannerContext);
if ((index >> 1) < 5) {
if (index % 2 == 0) {
assertEquals(3, results.size());
} else {
assertEquals(1, results.size());
}
} else {
assertEquals(1, results.size());
}
results.clear();
index++;
if (!more) {
break;
}
}
}
/**
* Write an HFile block full with Cells whose qualifier that are identical between
* 0 and Short.MAX_VALUE. See HBASE-13329.
* @throws Exception
*/
@Test
public void testLongQualifier() throws Exception {
byte[] family = Bytes.toBytes("family");
this.region = initHRegion(tableName, method, CONF, family);
byte[] q = new byte[Short.MAX_VALUE+2];
Arrays.fill(q, 0, q.length-1, (byte)42);
for (byte i=0; i<10; i++) {
Put p = new Put(Bytes.toBytes("row"));
// qualifiers that differ past Short.MAX_VALUE
q[q.length-1]=i;
p.addColumn(family, q, q);
region.put(p);
}
region.flush(false);
}
/**
* Flushes the cache in a thread while scanning. The tests verify that the
* scan is coherent - e.g. the returned results are always of the same or
* later update as the previous results.
*
* @throws IOException
* scan / compact
* @throws InterruptedException
* thread join
*/
@Test
public void testFlushCacheWhileScanning() throws IOException, InterruptedException {
byte[] family = Bytes.toBytes("family");
int numRows = 1000;
int flushAndScanInterval = 10;
int compactInterval = 10 * flushAndScanInterval;
this.region = initHRegion(tableName, method, CONF, family);
FlushThread flushThread = new FlushThread();
try {
flushThread.start();
Scan scan = new Scan();
scan.addFamily(family);
scan.setFilter(new SingleColumnValueFilter(family, qual1, CompareOp.EQUAL,
new BinaryComparator(Bytes.toBytes(5L))));
int expectedCount = 0;
List<Cell> res = new ArrayList<>();
boolean toggle = true;
for (long i = 0; i < numRows; i++) {
Put put = new Put(Bytes.toBytes(i));
put.setDurability(Durability.SKIP_WAL);
put.addColumn(family, qual1, Bytes.toBytes(i % 10));
region.put(put);
if (i != 0 && i % compactInterval == 0) {
LOG.debug("iteration = " + i+ " ts="+System.currentTimeMillis());
region.compact(true);
}
if (i % 10 == 5L) {
expectedCount++;
}
if (i != 0 && i % flushAndScanInterval == 0) {
res.clear();
InternalScanner scanner = region.getScanner(scan);
if (toggle) {
flushThread.flush();
}
while (scanner.next(res))
;
if (!toggle) {
flushThread.flush();
}
assertEquals("toggle="+toggle+"i=" + i + " ts="+System.currentTimeMillis(),
expectedCount, res.size());
toggle = !toggle;
}
}
} finally {
try {
flushThread.done();
flushThread.join();
flushThread.checkNoError();
} catch (InterruptedException ie) {
LOG.warn("Caught exception when joining with flushThread", ie);
}
HBaseTestingUtility.closeRegionAndWAL(this.region);
this.region = null;
}
}
protected class FlushThread extends Thread {
private volatile boolean done;
private Throwable error = null;
FlushThread() {
super("FlushThread");
}
public void done() {
done = true;
synchronized (this) {
interrupt();
}
}
public void checkNoError() {
if (error != null) {
assertNull(error);
}
}
@Override
public void run() {
done = false;
while (!done) {
synchronized (this) {
try {
wait();
} catch (InterruptedException ignored) {
if (done) {
break;
}
}
}
try {
region.flush(true);
} catch (IOException e) {
if (!done) {
LOG.error("Error while flushing cache", e);
error = e;
}
break;
} catch (Throwable t) {
LOG.error("Uncaught exception", t);
throw t;
}
}
}
public void flush() {
synchronized (this) {
notify();
}
}
}
/**
* Writes very wide records and scans for the latest every time.. Flushes and
* compacts the region every now and then to keep things realistic.
*
* @throws IOException
* by flush / scan / compaction
* @throws InterruptedException
* when joining threads
*/
@Test
public void testWritesWhileScanning() throws IOException, InterruptedException {
int testCount = 100;
int numRows = 1;
int numFamilies = 10;
int numQualifiers = 100;
int flushInterval = 7;
int compactInterval = 5 * flushInterval;
byte[][] families = new byte[numFamilies][];
for (int i = 0; i < numFamilies; i++) {
families[i] = Bytes.toBytes("family" + i);
}
byte[][] qualifiers = new byte[numQualifiers][];
for (int i = 0; i < numQualifiers; i++) {
qualifiers[i] = Bytes.toBytes("qual" + i);
}
this.region = initHRegion(tableName, method, CONF, families);
FlushThread flushThread = new FlushThread();
PutThread putThread = new PutThread(numRows, families, qualifiers);
try {
putThread.start();
putThread.waitForFirstPut();
flushThread.start();
Scan scan = new Scan(Bytes.toBytes("row0"), Bytes.toBytes("row1"));
int expectedCount = numFamilies * numQualifiers;
List<Cell> res = new ArrayList<>();
long prevTimestamp = 0L;
for (int i = 0; i < testCount; i++) {
if (i != 0 && i % compactInterval == 0) {
region.compact(true);
for (HStore store : region.getStores()) {
store.closeAndArchiveCompactedFiles();
}
}
if (i != 0 && i % flushInterval == 0) {
flushThread.flush();
}
boolean previousEmpty = res.isEmpty();
res.clear();
InternalScanner scanner = region.getScanner(scan);
while (scanner.next(res))
;
if (!res.isEmpty() || !previousEmpty || i > compactInterval) {
assertEquals("i=" + i, expectedCount, res.size());
long timestamp = res.get(0).getTimestamp();
assertTrue("Timestamps were broke: " + timestamp + " prev: " + prevTimestamp,
timestamp >= prevTimestamp);
prevTimestamp = timestamp;
}
}
putThread.done();
region.flush(true);
} finally {
try {
flushThread.done();
flushThread.join();
flushThread.checkNoError();
putThread.join();
putThread.checkNoError();
} catch (InterruptedException ie) {
LOG.warn("Caught exception when joining with flushThread", ie);
}
try {
HBaseTestingUtility.closeRegionAndWAL(this.region);
} catch (DroppedSnapshotException dse) {
// We could get this on way out because we interrupt the background flusher and it could
// fail anywhere causing a DSE over in the background flusher... only it is not properly
// dealt with so could still be memory hanging out when we get to here -- memory we can't
// flush because the accounting is 'off' since original DSE.
}
this.region = null;
}
}
protected class PutThread extends Thread {
private volatile boolean done;
private volatile int numPutsFinished = 0;
private Throwable error = null;
private int numRows;
private byte[][] families;
private byte[][] qualifiers;
private PutThread(int numRows, byte[][] families, byte[][] qualifiers) {
super("PutThread");
this.numRows = numRows;
this.families = families;
this.qualifiers = qualifiers;
}
/**
* Block calling thread until this instance of PutThread has put at least one row.
*/
public void waitForFirstPut() throws InterruptedException {
// wait until put thread actually puts some data
while (isAlive() && numPutsFinished == 0) {
checkNoError();
Thread.sleep(50);
}
}
public void done() {
done = true;
synchronized (this) {
interrupt();
}
}
public void checkNoError() {
if (error != null) {
assertNull(error);
}
}
@Override
public void run() {
done = false;
while (!done) {
try {
for (int r = 0; r < numRows; r++) {
byte[] row = Bytes.toBytes("row" + r);
Put put = new Put(row);
put.setDurability(Durability.SKIP_WAL);
byte[] value = Bytes.toBytes(String.valueOf(numPutsFinished));
for (byte[] family : families) {
for (byte[] qualifier : qualifiers) {
put.addColumn(family, qualifier, numPutsFinished, value);
}
}
region.put(put);
numPutsFinished++;
if (numPutsFinished > 0 && numPutsFinished % 47 == 0) {
System.out.println("put iteration = " + numPutsFinished);
Delete delete = new Delete(row, (long) numPutsFinished - 30);
region.delete(delete);
}
numPutsFinished++;
}
} catch (InterruptedIOException e) {
// This is fine. It means we are done, or didn't get the lock on time
LOG.info("Interrupted", e);
} catch (IOException e) {
LOG.error("Error while putting records", e);
error = e;
break;
}
}
}
}
/**
* Writes very wide records and gets the latest row every time.. Flushes and
* compacts the region aggressivly to catch issues.
*
* @throws IOException
* by flush / scan / compaction
* @throws InterruptedException
* when joining threads
*/
@Test
public void testWritesWhileGetting() throws Exception {
int testCount = 50;
int numRows = 1;
int numFamilies = 10;
int numQualifiers = 100;
int compactInterval = 100;
byte[][] families = new byte[numFamilies][];
for (int i = 0; i < numFamilies; i++) {
families[i] = Bytes.toBytes("family" + i);
}
byte[][] qualifiers = new byte[numQualifiers][];
for (int i = 0; i < numQualifiers; i++) {
qualifiers[i] = Bytes.toBytes("qual" + i);
}
// This test flushes constantly and can cause many files to be created,
// possibly
// extending over the ulimit. Make sure compactions are aggressive in
// reducing
// the number of HFiles created.
Configuration conf = HBaseConfiguration.create(CONF);
conf.setInt("hbase.hstore.compaction.min", 1);
conf.setInt("hbase.hstore.compaction.max", 1000);
this.region = initHRegion(tableName, method, conf, families);
PutThread putThread = null;
MultithreadedTestUtil.TestContext ctx = new MultithreadedTestUtil.TestContext(conf);
try {
putThread = new PutThread(numRows, families, qualifiers);
putThread.start();
putThread.waitForFirstPut();
// Add a thread that flushes as fast as possible
ctx.addThread(new RepeatingTestThread(ctx) {
@Override
public void doAnAction() throws Exception {
region.flush(true);
// Compact regularly to avoid creating too many files and exceeding
// the ulimit.
region.compact(false);
for (HStore store : region.getStores()) {
store.closeAndArchiveCompactedFiles();
}
}
});
ctx.startThreads();
Get get = new Get(Bytes.toBytes("row0"));
Result result = null;
int expectedCount = numFamilies * numQualifiers;
long prevTimestamp = 0L;
for (int i = 0; i < testCount; i++) {
LOG.info("testWritesWhileGetting verify turn " + i);
boolean previousEmpty = result == null || result.isEmpty();
result = region.get(get);
if (!result.isEmpty() || !previousEmpty || i > compactInterval) {
assertEquals("i=" + i, expectedCount, result.size());
// TODO this was removed, now what dangit?!
// search looking for the qualifier in question?
long timestamp = 0;
for (Cell kv : result.rawCells()) {
if (CellUtil.matchingFamily(kv, families[0])
&& CellUtil.matchingQualifier(kv, qualifiers[0])) {
timestamp = kv.getTimestamp();
}
}
assertTrue(timestamp >= prevTimestamp);
prevTimestamp = timestamp;
Cell previousKV = null;
for (Cell kv : result.rawCells()) {
byte[] thisValue = CellUtil.cloneValue(kv);
if (previousKV != null) {
if (Bytes.compareTo(CellUtil.cloneValue(previousKV), thisValue) != 0) {
LOG.warn("These two KV should have the same value." + " Previous KV:" + previousKV
+ "(memStoreTS:" + previousKV.getSequenceId() + ")" + ", New KV: " + kv
+ "(memStoreTS:" + kv.getSequenceId() + ")");
assertEquals(0, Bytes.compareTo(CellUtil.cloneValue(previousKV), thisValue));
}
}
previousKV = kv;
}
}
}
} finally {
if (putThread != null)
putThread.done();
region.flush(true);
if (putThread != null) {
putThread.join();
putThread.checkNoError();
}
ctx.stop();
HBaseTestingUtility.closeRegionAndWAL(this.region);
this.region = null;
}
}
@Test
public void testHolesInMeta() throws Exception {
byte[] family = Bytes.toBytes("family");
this.region = initHRegion(tableName, Bytes.toBytes("x"), Bytes.toBytes("z"), method, CONF,
false, family);
byte[] rowNotServed = Bytes.toBytes("a");
Get g = new Get(rowNotServed);
try {
region.get(g);
fail();
} catch (WrongRegionException x) {
// OK
}
byte[] row = Bytes.toBytes("y");
g = new Get(row);
region.get(g);
}
@Test
public void testIndexesScanWithOneDeletedRow() throws IOException {
byte[] family = Bytes.toBytes("family");
// Setting up region
this.region = initHRegion(tableName, method, CONF, family);
Put put = new Put(Bytes.toBytes(1L));
put.addColumn(family, qual1, 1L, Bytes.toBytes(1L));
region.put(put);
region.flush(true);
Delete delete = new Delete(Bytes.toBytes(1L), 1L);
region.delete(delete);
put = new Put(Bytes.toBytes(2L));
put.addColumn(family, qual1, 2L, Bytes.toBytes(2L));
region.put(put);
Scan idxScan = new Scan();
idxScan.addFamily(family);
idxScan.setFilter(new FilterList(FilterList.Operator.MUST_PASS_ALL, Arrays.<Filter> asList(
new SingleColumnValueFilter(family, qual1, CompareOp.GREATER_OR_EQUAL,
new BinaryComparator(Bytes.toBytes(0L))), new SingleColumnValueFilter(family, qual1,
CompareOp.LESS_OR_EQUAL, new BinaryComparator(Bytes.toBytes(3L))))));
InternalScanner scanner = region.getScanner(idxScan);
List<Cell> res = new ArrayList<>();
while (scanner.next(res)) {
// Ignore res value.
}
assertEquals(1L, res.size());
}
// ////////////////////////////////////////////////////////////////////////////
// Bloom filter test
// ////////////////////////////////////////////////////////////////////////////
@Test
public void testBloomFilterSize() throws IOException {
byte[] fam1 = Bytes.toBytes("fam1");
byte[] qf1 = Bytes.toBytes("col");
byte[] val1 = Bytes.toBytes("value1");
// Create Table
HColumnDescriptor hcd = new HColumnDescriptor(fam1).setMaxVersions(Integer.MAX_VALUE)
.setBloomFilterType(BloomType.ROWCOL);
HTableDescriptor htd = new HTableDescriptor(tableName);
htd.addFamily(hcd);
HRegionInfo info = new HRegionInfo(htd.getTableName(), null, null, false);
this.region = TEST_UTIL.createLocalHRegion(info, htd);
int num_unique_rows = 10;
int duplicate_multiplier = 2;
int num_storefiles = 4;
int version = 0;
for (int f = 0; f < num_storefiles; f++) {
for (int i = 0; i < duplicate_multiplier; i++) {
for (int j = 0; j < num_unique_rows; j++) {
Put put = new Put(Bytes.toBytes("row" + j));
put.setDurability(Durability.SKIP_WAL);
long ts = version++;
put.addColumn(fam1, qf1, ts, val1);
region.put(put);
}
}
region.flush(true);
}
// before compaction
HStore store = region.getStore(fam1);
Collection<HStoreFile> storeFiles = store.getStorefiles();
for (HStoreFile storefile : storeFiles) {
StoreFileReader reader = storefile.getReader();
reader.loadFileInfo();
reader.loadBloomfilter();
assertEquals(num_unique_rows * duplicate_multiplier, reader.getEntries());
assertEquals(num_unique_rows, reader.getFilterEntries());
}
region.compact(true);
// after compaction
storeFiles = store.getStorefiles();
for (HStoreFile storefile : storeFiles) {
StoreFileReader reader = storefile.getReader();
reader.loadFileInfo();
reader.loadBloomfilter();
assertEquals(num_unique_rows * duplicate_multiplier * num_storefiles, reader.getEntries());
assertEquals(num_unique_rows, reader.getFilterEntries());
}
}
@Test
public void testAllColumnsWithBloomFilter() throws IOException {
byte[] TABLE = Bytes.toBytes(name.getMethodName());
byte[] FAMILY = Bytes.toBytes("family");
// Create table
HColumnDescriptor hcd = new HColumnDescriptor(FAMILY).setMaxVersions(Integer.MAX_VALUE)
.setBloomFilterType(BloomType.ROWCOL);
HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(TABLE));
htd.addFamily(hcd);
HRegionInfo info = new HRegionInfo(htd.getTableName(), null, null, false);
this.region = TEST_UTIL.createLocalHRegion(info, htd);
// For row:0, col:0: insert versions 1 through 5.
byte[] row = Bytes.toBytes("row:" + 0);
byte[] column = Bytes.toBytes("column:" + 0);
Put put = new Put(row);
put.setDurability(Durability.SKIP_WAL);
for (long idx = 1; idx <= 4; idx++) {
put.addColumn(FAMILY, column, idx, Bytes.toBytes("value-version-" + idx));
}
region.put(put);
// Flush
region.flush(true);
// Get rows
Get get = new Get(row);
get.setMaxVersions();
Cell[] kvs = region.get(get).rawCells();
// Check if rows are correct
assertEquals(4, kvs.length);
checkOneCell(kvs[0], FAMILY, 0, 0, 4);
checkOneCell(kvs[1], FAMILY, 0, 0, 3);
checkOneCell(kvs[2], FAMILY, 0, 0, 2);
checkOneCell(kvs[3], FAMILY, 0, 0, 1);
}
/**
* Testcase to cover bug-fix for HBASE-2823 Ensures correct delete when
* issuing delete row on columns with bloom filter set to row+col
* (BloomType.ROWCOL)
*/
@Test
public void testDeleteRowWithBloomFilter() throws IOException {
byte[] familyName = Bytes.toBytes("familyName");
// Create Table
HColumnDescriptor hcd = new HColumnDescriptor(familyName).setMaxVersions(Integer.MAX_VALUE)
.setBloomFilterType(BloomType.ROWCOL);
HTableDescriptor htd = new HTableDescriptor(tableName);
htd.addFamily(hcd);
HRegionInfo info = new HRegionInfo(htd.getTableName(), null, null, false);
this.region = TEST_UTIL.createLocalHRegion(info, htd);
// Insert some data
byte[] row = Bytes.toBytes("row1");
byte[] col = Bytes.toBytes("col1");
Put put = new Put(row);
put.addColumn(familyName, col, 1, Bytes.toBytes("SomeRandomValue"));
region.put(put);
region.flush(true);
Delete del = new Delete(row);
region.delete(del);
region.flush(true);
// Get remaining rows (should have none)
Get get = new Get(row);
get.addColumn(familyName, col);
Cell[] keyValues = region.get(get).rawCells();
assertEquals(0, keyValues.length);
}
@Test
public void testgetHDFSBlocksDistribution() throws Exception {
HBaseTestingUtility htu = new HBaseTestingUtility();
// Why do we set the block size in this test? If we set it smaller than the kvs, then we'll
// break up the file in to more pieces that can be distributed across the three nodes and we
// won't be able to have the condition this test asserts; that at least one node has
// a copy of all replicas -- if small block size, then blocks are spread evenly across the
// the three nodes. hfilev3 with tags seems to put us over the block size. St.Ack.
// final int DEFAULT_BLOCK_SIZE = 1024;
// htu.getConfiguration().setLong("dfs.blocksize", DEFAULT_BLOCK_SIZE);
htu.getConfiguration().setInt("dfs.replication", 2);
// set up a cluster with 3 nodes
MiniHBaseCluster cluster = null;
String dataNodeHosts[] = new String[] { "host1", "host2", "host3" };
int regionServersCount = 3;
try {
StartMiniClusterOption option = StartMiniClusterOption.builder()
.numRegionServers(regionServersCount).dataNodeHosts(dataNodeHosts).build();
cluster = htu.startMiniCluster(option);
byte[][] families = { fam1, fam2 };
Table ht = htu.createTable(tableName, families);
// Setting up region
byte row[] = Bytes.toBytes("row1");
byte col[] = Bytes.toBytes("col1");
Put put = new Put(row);
put.addColumn(fam1, col, 1, Bytes.toBytes("test1"));
put.addColumn(fam2, col, 1, Bytes.toBytes("test2"));
ht.put(put);
HRegion firstRegion = htu.getHBaseCluster().getRegions(tableName).get(0);
firstRegion.flush(true);
HDFSBlocksDistribution blocksDistribution1 = firstRegion.getHDFSBlocksDistribution();
// Given the default replication factor is 2 and we have 2 HFiles,
// we will have total of 4 replica of blocks on 3 datanodes; thus there
// must be at least one host that have replica for 2 HFiles. That host's
// weight will be equal to the unique block weight.
long uniqueBlocksWeight1 = blocksDistribution1.getUniqueBlocksTotalWeight();
StringBuilder sb = new StringBuilder();
for (String host: blocksDistribution1.getTopHosts()) {
if (sb.length() > 0) sb.append(", ");
sb.append(host);
sb.append("=");
sb.append(blocksDistribution1.getWeight(host));
}
String topHost = blocksDistribution1.getTopHosts().get(0);
long topHostWeight = blocksDistribution1.getWeight(topHost);
String msg = "uniqueBlocksWeight=" + uniqueBlocksWeight1 + ", topHostWeight=" +
topHostWeight + ", topHost=" + topHost + "; " + sb.toString();
LOG.info(msg);
assertTrue(msg, uniqueBlocksWeight1 == topHostWeight);
// use the static method to compute the value, it should be the same.
// static method is used by load balancer or other components
HDFSBlocksDistribution blocksDistribution2 = HRegion.computeHDFSBlocksDistribution(
htu.getConfiguration(), firstRegion.getTableDescriptor(), firstRegion.getRegionInfo());
long uniqueBlocksWeight2 = blocksDistribution2.getUniqueBlocksTotalWeight();
assertTrue(uniqueBlocksWeight1 == uniqueBlocksWeight2);
ht.close();
} finally {
if (cluster != null) {
htu.shutdownMiniCluster();
}
}
}
/**
* Testcase to check state of region initialization task set to ABORTED or not
* if any exceptions during initialization
*
* @throws Exception
*/
@Test
public void testStatusSettingToAbortIfAnyExceptionDuringRegionInitilization() throws Exception {
HRegionInfo info;
try {
FileSystem fs = Mockito.mock(FileSystem.class);
Mockito.when(fs.exists((Path) Mockito.anyObject())).thenThrow(new IOException());
HTableDescriptor htd = new HTableDescriptor(tableName);
htd.addFamily(new HColumnDescriptor("cf"));
info = new HRegionInfo(htd.getTableName(), HConstants.EMPTY_BYTE_ARRAY,
HConstants.EMPTY_BYTE_ARRAY, false);
Path path = new Path(dir + "testStatusSettingToAbortIfAnyExceptionDuringRegionInitilization");
region = HRegion.newHRegion(path, null, fs, CONF, info, htd, null);
// region initialization throws IOException and set task state to ABORTED.
region.initialize();
fail("Region initialization should fail due to IOException");
} catch (IOException io) {
List<MonitoredTask> tasks = TaskMonitor.get().getTasks();
for (MonitoredTask monitoredTask : tasks) {
if (!(monitoredTask instanceof MonitoredRPCHandler)
&& monitoredTask.getDescription().contains(region.toString())) {
assertTrue("Region state should be ABORTED.",
monitoredTask.getState().equals(MonitoredTask.State.ABORTED));
break;
}
}
}
}
/**
* Verifies that the .regioninfo file is written on region creation and that
* is recreated if missing during region opening.
*/
@Test
public void testRegionInfoFileCreation() throws IOException {
Path rootDir = new Path(dir + "testRegionInfoFileCreation");
HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(name.getMethodName()));
htd.addFamily(new HColumnDescriptor("cf"));
HRegionInfo hri = new HRegionInfo(htd.getTableName());
// Create a region and skip the initialization (like CreateTableHandler)
region = HBaseTestingUtility.createRegionAndWAL(hri, rootDir, CONF, htd, false);
Path regionDir = region.getRegionFileSystem().getRegionDir();
FileSystem fs = region.getRegionFileSystem().getFileSystem();
HBaseTestingUtility.closeRegionAndWAL(region);
Path regionInfoFile = new Path(regionDir, HRegionFileSystem.REGION_INFO_FILE);
// Verify that the .regioninfo file is present
assertTrue(HRegionFileSystem.REGION_INFO_FILE + " should be present in the region dir",
fs.exists(regionInfoFile));
// Try to open the region
region = HRegion.openHRegion(rootDir, hri, htd, null, CONF);
assertEquals(regionDir, region.getRegionFileSystem().getRegionDir());
HBaseTestingUtility.closeRegionAndWAL(region);
// Verify that the .regioninfo file is still there
assertTrue(HRegionFileSystem.REGION_INFO_FILE + " should be present in the region dir",
fs.exists(regionInfoFile));
// Remove the .regioninfo file and verify is recreated on region open
fs.delete(regionInfoFile, true);
assertFalse(HRegionFileSystem.REGION_INFO_FILE + " should be removed from the region dir",
fs.exists(regionInfoFile));
region = HRegion.openHRegion(rootDir, hri, htd, null, CONF);
// region = TEST_UTIL.openHRegion(hri, htd);
assertEquals(regionDir, region.getRegionFileSystem().getRegionDir());
HBaseTestingUtility.closeRegionAndWAL(region);
// Verify that the .regioninfo file is still there
assertTrue(HRegionFileSystem.REGION_INFO_FILE + " should be present in the region dir",
fs.exists(new Path(regionDir, HRegionFileSystem.REGION_INFO_FILE)));
region = null;
}
/**
* TestCase for increment
*/
private static class Incrementer implements Runnable {
private HRegion region;
private final static byte[] incRow = Bytes.toBytes("incRow");
private final static byte[] family = Bytes.toBytes("family");
private final static byte[] qualifier = Bytes.toBytes("qualifier");
private final static long ONE = 1L;
private int incCounter;
public Incrementer(HRegion region, int incCounter) {
this.region = region;
this.incCounter = incCounter;
}
@Override
public void run() {
int count = 0;
while (count < incCounter) {
Increment inc = new Increment(incRow);
inc.addColumn(family, qualifier, ONE);
count++;
try {
region.increment(inc);
} catch (IOException e) {
LOG.info("Count=" + count + ", " + e);
break;
}
}
}
}
/**
* Test case to check increment function with memstore flushing
* @throws Exception
*/
@Test
public void testParallelIncrementWithMemStoreFlush() throws Exception {
byte[] family = Incrementer.family;
this.region = initHRegion(tableName, method, CONF, family);
final HRegion region = this.region;
final AtomicBoolean incrementDone = new AtomicBoolean(false);
Runnable flusher = new Runnable() {
@Override
public void run() {
while (!incrementDone.get()) {
try {
region.flush(true);
} catch (Exception e) {
e.printStackTrace();
}
}
}
};
// after all increment finished, the row will increment to 20*100 = 2000
int threadNum = 20;
int incCounter = 100;
long expected = (long) threadNum * incCounter;
Thread[] incrementers = new Thread[threadNum];
Thread flushThread = new Thread(flusher);
for (int i = 0; i < threadNum; i++) {
incrementers[i] = new Thread(new Incrementer(this.region, incCounter));
incrementers[i].start();
}
flushThread.start();
for (int i = 0; i < threadNum; i++) {
incrementers[i].join();
}
incrementDone.set(true);
flushThread.join();
Get get = new Get(Incrementer.incRow);
get.addColumn(Incrementer.family, Incrementer.qualifier);
get.setMaxVersions(1);
Result res = this.region.get(get);
List<Cell> kvs = res.getColumnCells(Incrementer.family, Incrementer.qualifier);
// we just got the latest version
assertEquals(1, kvs.size());
Cell kv = kvs.get(0);
assertEquals(expected, Bytes.toLong(kv.getValueArray(), kv.getValueOffset()));
}
/**
* TestCase for append
*/
private static class Appender implements Runnable {
private HRegion region;
private final static byte[] appendRow = Bytes.toBytes("appendRow");
private final static byte[] family = Bytes.toBytes("family");
private final static byte[] qualifier = Bytes.toBytes("qualifier");
private final static byte[] CHAR = Bytes.toBytes("a");
private int appendCounter;
public Appender(HRegion region, int appendCounter) {
this.region = region;
this.appendCounter = appendCounter;
}
@Override
public void run() {
int count = 0;
while (count < appendCounter) {
Append app = new Append(appendRow);
app.addColumn(family, qualifier, CHAR);
count++;
try {
region.append(app);
} catch (IOException e) {
LOG.info("Count=" + count + ", max=" + appendCounter + ", " + e);
break;
}
}
}
}
/**
* Test case to check append function with memstore flushing
* @throws Exception
*/
@Test
public void testParallelAppendWithMemStoreFlush() throws Exception {
byte[] family = Appender.family;
this.region = initHRegion(tableName, method, CONF, family);
final HRegion region = this.region;
final AtomicBoolean appendDone = new AtomicBoolean(false);
Runnable flusher = new Runnable() {
@Override
public void run() {
while (!appendDone.get()) {
try {
region.flush(true);
} catch (Exception e) {
e.printStackTrace();
}
}
}
};
// After all append finished, the value will append to threadNum *
// appendCounter Appender.CHAR
int threadNum = 20;
int appendCounter = 100;
byte[] expected = new byte[threadNum * appendCounter];
for (int i = 0; i < threadNum * appendCounter; i++) {
System.arraycopy(Appender.CHAR, 0, expected, i, 1);
}
Thread[] appenders = new Thread[threadNum];
Thread flushThread = new Thread(flusher);
for (int i = 0; i < threadNum; i++) {
appenders[i] = new Thread(new Appender(this.region, appendCounter));
appenders[i].start();
}
flushThread.start();
for (int i = 0; i < threadNum; i++) {
appenders[i].join();
}
appendDone.set(true);
flushThread.join();
Get get = new Get(Appender.appendRow);
get.addColumn(Appender.family, Appender.qualifier);
get.setMaxVersions(1);
Result res = this.region.get(get);
List<Cell> kvs = res.getColumnCells(Appender.family, Appender.qualifier);
// we just got the latest version
assertEquals(1, kvs.size());
Cell kv = kvs.get(0);
byte[] appendResult = new byte[kv.getValueLength()];
System.arraycopy(kv.getValueArray(), kv.getValueOffset(), appendResult, 0, kv.getValueLength());
assertArrayEquals(expected, appendResult);
}
/**
* Test case to check put function with memstore flushing for same row, same ts
* @throws Exception
*/
@Test
public void testPutWithMemStoreFlush() throws Exception {
byte[] family = Bytes.toBytes("family");
byte[] qualifier = Bytes.toBytes("qualifier");
byte[] row = Bytes.toBytes("putRow");
byte[] value = null;
this.region = initHRegion(tableName, method, CONF, family);
Put put = null;
Get get = null;
List<Cell> kvs = null;
Result res = null;
put = new Put(row);
value = Bytes.toBytes("value0");
put.addColumn(family, qualifier, 1234567L, value);
region.put(put);
get = new Get(row);
get.addColumn(family, qualifier);
get.setMaxVersions();
res = this.region.get(get);
kvs = res.getColumnCells(family, qualifier);
assertEquals(1, kvs.size());
assertArrayEquals(Bytes.toBytes("value0"), CellUtil.cloneValue(kvs.get(0)));
region.flush(true);
get = new Get(row);
get.addColumn(family, qualifier);
get.setMaxVersions();
res = this.region.get(get);
kvs = res.getColumnCells(family, qualifier);
assertEquals(1, kvs.size());
assertArrayEquals(Bytes.toBytes("value0"), CellUtil.cloneValue(kvs.get(0)));
put = new Put(row);
value = Bytes.toBytes("value1");
put.addColumn(family, qualifier, 1234567L, value);
region.put(put);
get = new Get(row);
get.addColumn(family, qualifier);
get.setMaxVersions();
res = this.region.get(get);
kvs = res.getColumnCells(family, qualifier);
assertEquals(1, kvs.size());
assertArrayEquals(Bytes.toBytes("value1"), CellUtil.cloneValue(kvs.get(0)));
region.flush(true);
get = new Get(row);
get.addColumn(family, qualifier);
get.setMaxVersions();
res = this.region.get(get);
kvs = res.getColumnCells(family, qualifier);
assertEquals(1, kvs.size());
assertArrayEquals(Bytes.toBytes("value1"), CellUtil.cloneValue(kvs.get(0)));
}
@Test
public void testDurability() throws Exception {
// there are 5 x 5 cases:
// table durability(SYNC,FSYNC,ASYC,SKIP,USE_DEFAULT) x mutation
// durability(SYNC,FSYNC,ASYC,SKIP,USE_DEFAULT)
// expected cases for append and sync wal
durabilityTest(method, Durability.SYNC_WAL, Durability.SYNC_WAL, 0, true, true, false);
durabilityTest(method, Durability.SYNC_WAL, Durability.FSYNC_WAL, 0, true, true, false);
durabilityTest(method, Durability.SYNC_WAL, Durability.USE_DEFAULT, 0, true, true, false);
durabilityTest(method, Durability.FSYNC_WAL, Durability.SYNC_WAL, 0, true, true, false);
durabilityTest(method, Durability.FSYNC_WAL, Durability.FSYNC_WAL, 0, true, true, false);
durabilityTest(method, Durability.FSYNC_WAL, Durability.USE_DEFAULT, 0, true, true, false);
durabilityTest(method, Durability.ASYNC_WAL, Durability.SYNC_WAL, 0, true, true, false);
durabilityTest(method, Durability.ASYNC_WAL, Durability.FSYNC_WAL, 0, true, true, false);
durabilityTest(method, Durability.SKIP_WAL, Durability.SYNC_WAL, 0, true, true, false);
durabilityTest(method, Durability.SKIP_WAL, Durability.FSYNC_WAL, 0, true, true, false);
durabilityTest(method, Durability.USE_DEFAULT, Durability.SYNC_WAL, 0, true, true, false);
durabilityTest(method, Durability.USE_DEFAULT, Durability.FSYNC_WAL, 0, true, true, false);
durabilityTest(method, Durability.USE_DEFAULT, Durability.USE_DEFAULT, 0, true, true, false);
// expected cases for async wal
durabilityTest(method, Durability.SYNC_WAL, Durability.ASYNC_WAL, 0, true, false, false);
durabilityTest(method, Durability.FSYNC_WAL, Durability.ASYNC_WAL, 0, true, false, false);
durabilityTest(method, Durability.ASYNC_WAL, Durability.ASYNC_WAL, 0, true, false, false);
durabilityTest(method, Durability.SKIP_WAL, Durability.ASYNC_WAL, 0, true, false, false);
durabilityTest(method, Durability.USE_DEFAULT, Durability.ASYNC_WAL, 0, true, false, false);
durabilityTest(method, Durability.ASYNC_WAL, Durability.USE_DEFAULT, 0, true, false, false);
durabilityTest(method, Durability.SYNC_WAL, Durability.ASYNC_WAL, 5000, true, false, true);
durabilityTest(method, Durability.FSYNC_WAL, Durability.ASYNC_WAL, 5000, true, false, true);
durabilityTest(method, Durability.ASYNC_WAL, Durability.ASYNC_WAL, 5000, true, false, true);
durabilityTest(method, Durability.SKIP_WAL, Durability.ASYNC_WAL, 5000, true, false, true);
durabilityTest(method, Durability.USE_DEFAULT, Durability.ASYNC_WAL, 5000, true, false, true);
durabilityTest(method, Durability.ASYNC_WAL, Durability.USE_DEFAULT, 5000, true, false, true);
// expect skip wal cases
durabilityTest(method, Durability.SYNC_WAL, Durability.SKIP_WAL, 0, false, false, false);
durabilityTest(method, Durability.FSYNC_WAL, Durability.SKIP_WAL, 0, false, false, false);
durabilityTest(method, Durability.ASYNC_WAL, Durability.SKIP_WAL, 0, false, false, false);
durabilityTest(method, Durability.SKIP_WAL, Durability.SKIP_WAL, 0, false, false, false);
durabilityTest(method, Durability.USE_DEFAULT, Durability.SKIP_WAL, 0, false, false, false);
durabilityTest(method, Durability.SKIP_WAL, Durability.USE_DEFAULT, 0, false, false, false);
}
private void durabilityTest(String method, Durability tableDurability,
Durability mutationDurability, long timeout, boolean expectAppend, final boolean expectSync,
final boolean expectSyncFromLogSyncer) throws Exception {
Configuration conf = HBaseConfiguration.create(CONF);
method = method + "_" + tableDurability.name() + "_" + mutationDurability.name();
byte[] family = Bytes.toBytes("family");
Path logDir = new Path(new Path(dir + method), "log");
final Configuration walConf = new Configuration(conf);
FSUtils.setRootDir(walConf, logDir);
// XXX: The spied AsyncFSWAL can not work properly because of a Mockito defect that can not
// deal with classes which have a field of an inner class. See discussions in HBASE-15536.
walConf.set(WALFactory.WAL_PROVIDER, "filesystem");
final WALFactory wals = new WALFactory(walConf, TEST_UTIL.getRandomUUID().toString());
final WAL wal = spy(wals.getWAL(RegionInfoBuilder.newBuilder(tableName).build()));
this.region = initHRegion(tableName, HConstants.EMPTY_START_ROW,
HConstants.EMPTY_END_ROW, false, tableDurability, wal,
new byte[][] { family });
Put put = new Put(Bytes.toBytes("r1"));
put.addColumn(family, Bytes.toBytes("q1"), Bytes.toBytes("v1"));
put.setDurability(mutationDurability);
region.put(put);
//verify append called or not
verify(wal, expectAppend ? times(1) : never())
.append((HRegionInfo)any(), (WALKeyImpl)any(),
(WALEdit)any(), Mockito.anyBoolean());
// verify sync called or not
if (expectSync || expectSyncFromLogSyncer) {
TEST_UTIL.waitFor(timeout, new Waiter.Predicate<Exception>() {
@Override
public boolean evaluate() throws Exception {
try {
if (expectSync) {
verify(wal, times(1)).sync(anyLong()); // Hregion calls this one
} else if (expectSyncFromLogSyncer) {
verify(wal, times(1)).sync(); // wal syncer calls this one
}
} catch (Throwable ignore) {
}
return true;
}
});
} else {
//verify(wal, never()).sync(anyLong());
verify(wal, never()).sync();
}
HBaseTestingUtility.closeRegionAndWAL(this.region);
wals.close();
this.region = null;
}
@Test
public void testRegionReplicaSecondary() throws IOException {
// create a primary region, load some data and flush
// create a secondary region, and do a get against that
Path rootDir = new Path(dir + name.getMethodName());
FSUtils.setRootDir(TEST_UTIL.getConfiguration(), rootDir);
byte[][] families = new byte[][] {
Bytes.toBytes("cf1"), Bytes.toBytes("cf2"), Bytes.toBytes("cf3")
};
byte[] cq = Bytes.toBytes("cq");
HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(name.getMethodName()));
for (byte[] family : families) {
htd.addFamily(new HColumnDescriptor(family));
}
long time = System.currentTimeMillis();
HRegionInfo primaryHri = new HRegionInfo(htd.getTableName(),
HConstants.EMPTY_START_ROW, HConstants.EMPTY_END_ROW,
false, time, 0);
HRegionInfo secondaryHri = new HRegionInfo(htd.getTableName(),
HConstants.EMPTY_START_ROW, HConstants.EMPTY_END_ROW,
false, time, 1);
HRegion primaryRegion = null, secondaryRegion = null;
try {
primaryRegion = HBaseTestingUtility.createRegionAndWAL(primaryHri,
rootDir, TEST_UTIL.getConfiguration(), htd);
// load some data
putData(primaryRegion, 0, 1000, cq, families);
// flush region
primaryRegion.flush(true);
// open secondary region
secondaryRegion = HRegion.openHRegion(rootDir, secondaryHri, htd, null, CONF);
verifyData(secondaryRegion, 0, 1000, cq, families);
} finally {
if (primaryRegion != null) {
HBaseTestingUtility.closeRegionAndWAL(primaryRegion);
}
if (secondaryRegion != null) {
HBaseTestingUtility.closeRegionAndWAL(secondaryRegion);
}
}
}
@Test
public void testRegionReplicaSecondaryIsReadOnly() throws IOException {
// create a primary region, load some data and flush
// create a secondary region, and do a put against that
Path rootDir = new Path(dir + name.getMethodName());
FSUtils.setRootDir(TEST_UTIL.getConfiguration(), rootDir);
byte[][] families = new byte[][] {
Bytes.toBytes("cf1"), Bytes.toBytes("cf2"), Bytes.toBytes("cf3")
};
byte[] cq = Bytes.toBytes("cq");
HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(name.getMethodName()));
for (byte[] family : families) {
htd.addFamily(new HColumnDescriptor(family));
}
long time = System.currentTimeMillis();
HRegionInfo primaryHri = new HRegionInfo(htd.getTableName(),
HConstants.EMPTY_START_ROW, HConstants.EMPTY_END_ROW,
false, time, 0);
HRegionInfo secondaryHri = new HRegionInfo(htd.getTableName(),
HConstants.EMPTY_START_ROW, HConstants.EMPTY_END_ROW,
false, time, 1);
HRegion primaryRegion = null, secondaryRegion = null;
try {
primaryRegion = HBaseTestingUtility.createRegionAndWAL(primaryHri,
rootDir, TEST_UTIL.getConfiguration(), htd);
// load some data
putData(primaryRegion, 0, 1000, cq, families);
// flush region
primaryRegion.flush(true);
// open secondary region
secondaryRegion = HRegion.openHRegion(rootDir, secondaryHri, htd, null, CONF);
try {
putData(secondaryRegion, 0, 1000, cq, families);
fail("Should have thrown exception");
} catch (IOException ex) {
// expected
}
} finally {
if (primaryRegion != null) {
HBaseTestingUtility.closeRegionAndWAL(primaryRegion);
}
if (secondaryRegion != null) {
HBaseTestingUtility.closeRegionAndWAL(secondaryRegion);
}
}
}
static WALFactory createWALFactory(Configuration conf, Path rootDir) throws IOException {
Configuration confForWAL = new Configuration(conf);
confForWAL.set(HConstants.HBASE_DIR, rootDir.toString());
return new WALFactory(confForWAL, "hregion-" + RandomStringUtils.randomNumeric(8));
}
@Test
public void testCompactionFromPrimary() throws IOException {
Path rootDir = new Path(dir + name.getMethodName());
FSUtils.setRootDir(TEST_UTIL.getConfiguration(), rootDir);
byte[][] families = new byte[][] {
Bytes.toBytes("cf1"), Bytes.toBytes("cf2"), Bytes.toBytes("cf3")
};
byte[] cq = Bytes.toBytes("cq");
HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(name.getMethodName()));
for (byte[] family : families) {
htd.addFamily(new HColumnDescriptor(family));
}
long time = System.currentTimeMillis();
HRegionInfo primaryHri = new HRegionInfo(htd.getTableName(),
HConstants.EMPTY_START_ROW, HConstants.EMPTY_END_ROW,
false, time, 0);
HRegionInfo secondaryHri = new HRegionInfo(htd.getTableName(),
HConstants.EMPTY_START_ROW, HConstants.EMPTY_END_ROW,
false, time, 1);
HRegion primaryRegion = null, secondaryRegion = null;
try {
primaryRegion = HBaseTestingUtility.createRegionAndWAL(primaryHri,
rootDir, TEST_UTIL.getConfiguration(), htd);
// load some data
putData(primaryRegion, 0, 1000, cq, families);
// flush region
primaryRegion.flush(true);
// open secondary region
secondaryRegion = HRegion.openHRegion(rootDir, secondaryHri, htd, null, CONF);
// move the file of the primary region to the archive, simulating a compaction
Collection<HStoreFile> storeFiles = primaryRegion.getStore(families[0]).getStorefiles();
primaryRegion.getRegionFileSystem().removeStoreFiles(Bytes.toString(families[0]), storeFiles);
Collection<StoreFileInfo> storeFileInfos = primaryRegion.getRegionFileSystem()
.getStoreFiles(families[0]);
Assert.assertTrue(storeFileInfos == null || storeFileInfos.isEmpty());
verifyData(secondaryRegion, 0, 1000, cq, families);
} finally {
if (primaryRegion != null) {
HBaseTestingUtility.closeRegionAndWAL(primaryRegion);
}
if (secondaryRegion != null) {
HBaseTestingUtility.closeRegionAndWAL(secondaryRegion);
}
}
}
private void putData(int startRow, int numRows, byte[] qf, byte[]... families) throws
IOException {
putData(this.region, startRow, numRows, qf, families);
}
private void putData(HRegion region,
int startRow, int numRows, byte[] qf, byte[]... families) throws IOException {
putData(region, Durability.SKIP_WAL, startRow, numRows, qf, families);
}
static void putData(HRegion region, Durability durability,
int startRow, int numRows, byte[] qf, byte[]... families) throws IOException {
for (int i = startRow; i < startRow + numRows; i++) {
Put put = new Put(Bytes.toBytes("" + i));
put.setDurability(durability);
for (byte[] family : families) {
put.addColumn(family, qf, null);
}
region.put(put);
LOG.info(put.toString());
}
}
static void verifyData(HRegion newReg, int startRow, int numRows, byte[] qf, byte[]... families)
throws IOException {
for (int i = startRow; i < startRow + numRows; i++) {
byte[] row = Bytes.toBytes("" + i);
Get get = new Get(row);
for (byte[] family : families) {
get.addColumn(family, qf);
}
Result result = newReg.get(get);
Cell[] raw = result.rawCells();
assertEquals(families.length, result.size());
for (int j = 0; j < families.length; j++) {
assertTrue(CellUtil.matchingRows(raw[j], row));
assertTrue(CellUtil.matchingFamily(raw[j], families[j]));
assertTrue(CellUtil.matchingQualifier(raw[j], qf));
}
}
}
static void assertGet(final HRegion r, final byte[] family, final byte[] k) throws IOException {
// Now I have k, get values out and assert they are as expected.
Get get = new Get(k).addFamily(family).setMaxVersions();
Cell[] results = r.get(get).rawCells();
for (int j = 0; j < results.length; j++) {
byte[] tmp = CellUtil.cloneValue(results[j]);
// Row should be equal to value every time.
assertTrue(Bytes.equals(k, tmp));
}
}
/*
* Assert first value in the passed region is <code>firstValue</code>.
*
* @param r
*
* @param fs
*
* @param firstValue
*
* @throws IOException
*/
protected void assertScan(final HRegion r, final byte[] fs, final byte[] firstValue)
throws IOException {
byte[][] families = { fs };
Scan scan = new Scan();
for (int i = 0; i < families.length; i++)
scan.addFamily(families[i]);
InternalScanner s = r.getScanner(scan);
try {
List<Cell> curVals = new ArrayList<>();
boolean first = true;
OUTER_LOOP: while (s.next(curVals)) {
for (Cell kv : curVals) {
byte[] val = CellUtil.cloneValue(kv);
byte[] curval = val;
if (first) {
first = false;
assertTrue(Bytes.compareTo(curval, firstValue) == 0);
} else {
// Not asserting anything. Might as well break.
break OUTER_LOOP;
}
}
}
} finally {
s.close();
}
}
/**
* Test that we get the expected flush results back
*/
@Test
public void testFlushResult() throws IOException {
byte[] family = Bytes.toBytes("family");
this.region = initHRegion(tableName, method, family);
// empty memstore, flush doesn't run
HRegion.FlushResult fr = region.flush(true);
assertFalse(fr.isFlushSucceeded());
assertFalse(fr.isCompactionNeeded());
// Flush enough files to get up to the threshold, doesn't need compactions
for (int i = 0; i < 2; i++) {
Put put = new Put(tableName.toBytes()).addColumn(family, family, tableName.toBytes());
region.put(put);
fr = region.flush(true);
assertTrue(fr.isFlushSucceeded());
assertFalse(fr.isCompactionNeeded());
}
// Two flushes after the threshold, compactions are needed
for (int i = 0; i < 2; i++) {
Put put = new Put(tableName.toBytes()).addColumn(family, family, tableName.toBytes());
region.put(put);
fr = region.flush(true);
assertTrue(fr.isFlushSucceeded());
assertTrue(fr.isCompactionNeeded());
}
}
protected Configuration initSplit() {
// Always compact if there is more than one store file.
CONF.setInt("hbase.hstore.compactionThreshold", 2);
CONF.setInt(HConstants.HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD, 10 * 1000);
// Increase the amount of time between client retries
CONF.setLong("hbase.client.pause", 15 * 1000);
// This size should make it so we always split using the addContent
// below. After adding all data, the first region is 1.3M
CONF.setLong(HConstants.HREGION_MAX_FILESIZE, 1024 * 128);
return CONF;
}
/**
* @return A region on which you must call
* {@link HBaseTestingUtility#closeRegionAndWAL(HRegion)} when done.
*/
protected HRegion initHRegion(TableName tableName, String callingMethod, Configuration conf,
byte[]... families) throws IOException {
return initHRegion(tableName, callingMethod, conf, false, families);
}
/**
* @return A region on which you must call
* {@link HBaseTestingUtility#closeRegionAndWAL(HRegion)} when done.
*/
protected HRegion initHRegion(TableName tableName, String callingMethod, Configuration conf,
boolean isReadOnly, byte[]... families) throws IOException {
return initHRegion(tableName, null, null, callingMethod, conf, isReadOnly, families);
}
protected HRegion initHRegion(TableName tableName, byte[] startKey, byte[] stopKey,
String callingMethod, Configuration conf, boolean isReadOnly, byte[]... families)
throws IOException {
Path logDir = TEST_UTIL.getDataTestDirOnTestFS(callingMethod + ".log");
ChunkCreator.initialize(MemStoreLABImpl.CHUNK_SIZE_DEFAULT, false, 0, 0, 0, null);
HRegionInfo hri = new HRegionInfo(tableName, startKey, stopKey);
final WAL wal = HBaseTestingUtility.createWal(conf, logDir, hri);
return initHRegion(tableName, startKey, stopKey, isReadOnly,
Durability.SYNC_WAL, wal, families);
}
/**
* @return A region on which you must call
* {@link HBaseTestingUtility#closeRegionAndWAL(HRegion)} when done.
*/
public HRegion initHRegion(TableName tableName, byte[] startKey, byte[] stopKey,
boolean isReadOnly, Durability durability, WAL wal, byte[]... families) throws IOException {
return TEST_UTIL.createLocalHRegion(tableName, startKey, stopKey,
isReadOnly, durability, wal, families);
}
/**
* Assert that the passed in Cell has expected contents for the specified row,
* column & timestamp.
*/
private void checkOneCell(Cell kv, byte[] cf, int rowIdx, int colIdx, long ts) {
String ctx = "rowIdx=" + rowIdx + "; colIdx=" + colIdx + "; ts=" + ts;
assertEquals("Row mismatch which checking: " + ctx, "row:" + rowIdx,
Bytes.toString(CellUtil.cloneRow(kv)));
assertEquals("ColumnFamily mismatch while checking: " + ctx, Bytes.toString(cf),
Bytes.toString(CellUtil.cloneFamily(kv)));
assertEquals("Column qualifier mismatch while checking: " + ctx, "column:" + colIdx,
Bytes.toString(CellUtil.cloneQualifier(kv)));
assertEquals("Timestamp mismatch while checking: " + ctx, ts, kv.getTimestamp());
assertEquals("Value mismatch while checking: " + ctx, "value-version-" + ts,
Bytes.toString(CellUtil.cloneValue(kv)));
}
@Test
public void testReverseScanner_FromMemStore_SingleCF_Normal()
throws IOException {
byte[] rowC = Bytes.toBytes("rowC");
byte[] rowA = Bytes.toBytes("rowA");
byte[] rowB = Bytes.toBytes("rowB");
byte[] cf = Bytes.toBytes("CF");
byte[][] families = { cf };
byte[] col = Bytes.toBytes("C");
long ts = 1;
this.region = initHRegion(tableName, method, families);
KeyValue kv1 = new KeyValue(rowC, cf, col, ts, KeyValue.Type.Put, null);
KeyValue kv11 = new KeyValue(rowC, cf, col, ts + 1, KeyValue.Type.Put,
null);
KeyValue kv2 = new KeyValue(rowA, cf, col, ts, KeyValue.Type.Put, null);
KeyValue kv3 = new KeyValue(rowB, cf, col, ts, KeyValue.Type.Put, null);
Put put = null;
put = new Put(rowC);
put.add(kv1);
put.add(kv11);
region.put(put);
put = new Put(rowA);
put.add(kv2);
region.put(put);
put = new Put(rowB);
put.add(kv3);
region.put(put);
Scan scan = new Scan(rowC);
scan.setMaxVersions(5);
scan.setReversed(true);
InternalScanner scanner = region.getScanner(scan);
List<Cell> currRow = new ArrayList<>();
boolean hasNext = scanner.next(currRow);
assertEquals(2, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), rowC, 0, rowC.length));
assertTrue(hasNext);
currRow.clear();
hasNext = scanner.next(currRow);
assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), rowB, 0, rowB.length));
assertTrue(hasNext);
currRow.clear();
hasNext = scanner.next(currRow);
assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), rowA, 0, rowA.length));
assertFalse(hasNext);
scanner.close();
}
@Test
public void testReverseScanner_FromMemStore_SingleCF_LargerKey()
throws IOException {
byte[] rowC = Bytes.toBytes("rowC");
byte[] rowA = Bytes.toBytes("rowA");
byte[] rowB = Bytes.toBytes("rowB");
byte[] rowD = Bytes.toBytes("rowD");
byte[] cf = Bytes.toBytes("CF");
byte[][] families = { cf };
byte[] col = Bytes.toBytes("C");
long ts = 1;
this.region = initHRegion(tableName, method, families);
KeyValue kv1 = new KeyValue(rowC, cf, col, ts, KeyValue.Type.Put, null);
KeyValue kv11 = new KeyValue(rowC, cf, col, ts + 1, KeyValue.Type.Put,
null);
KeyValue kv2 = new KeyValue(rowA, cf, col, ts, KeyValue.Type.Put, null);
KeyValue kv3 = new KeyValue(rowB, cf, col, ts, KeyValue.Type.Put, null);
Put put = null;
put = new Put(rowC);
put.add(kv1);
put.add(kv11);
region.put(put);
put = new Put(rowA);
put.add(kv2);
region.put(put);
put = new Put(rowB);
put.add(kv3);
region.put(put);
Scan scan = new Scan(rowD);
List<Cell> currRow = new ArrayList<>();
scan.setReversed(true);
scan.setMaxVersions(5);
InternalScanner scanner = region.getScanner(scan);
boolean hasNext = scanner.next(currRow);
assertEquals(2, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), rowC, 0, rowC.length));
assertTrue(hasNext);
currRow.clear();
hasNext = scanner.next(currRow);
assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), rowB, 0, rowB.length));
assertTrue(hasNext);
currRow.clear();
hasNext = scanner.next(currRow);
assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), rowA, 0, rowA.length));
assertFalse(hasNext);
scanner.close();
}
@Test
public void testReverseScanner_FromMemStore_SingleCF_FullScan()
throws IOException {
byte[] rowC = Bytes.toBytes("rowC");
byte[] rowA = Bytes.toBytes("rowA");
byte[] rowB = Bytes.toBytes("rowB");
byte[] cf = Bytes.toBytes("CF");
byte[][] families = { cf };
byte[] col = Bytes.toBytes("C");
long ts = 1;
this.region = initHRegion(tableName, method, families);
KeyValue kv1 = new KeyValue(rowC, cf, col, ts, KeyValue.Type.Put, null);
KeyValue kv11 = new KeyValue(rowC, cf, col, ts + 1, KeyValue.Type.Put,
null);
KeyValue kv2 = new KeyValue(rowA, cf, col, ts, KeyValue.Type.Put, null);
KeyValue kv3 = new KeyValue(rowB, cf, col, ts, KeyValue.Type.Put, null);
Put put = null;
put = new Put(rowC);
put.add(kv1);
put.add(kv11);
region.put(put);
put = new Put(rowA);
put.add(kv2);
region.put(put);
put = new Put(rowB);
put.add(kv3);
region.put(put);
Scan scan = new Scan();
List<Cell> currRow = new ArrayList<>();
scan.setReversed(true);
InternalScanner scanner = region.getScanner(scan);
boolean hasNext = scanner.next(currRow);
assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), rowC, 0, rowC.length));
assertTrue(hasNext);
currRow.clear();
hasNext = scanner.next(currRow);
assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), rowB, 0, rowB.length));
assertTrue(hasNext);
currRow.clear();
hasNext = scanner.next(currRow);
assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), rowA, 0, rowA.length));
assertFalse(hasNext);
scanner.close();
}
@Test
public void testReverseScanner_moreRowsMayExistAfter() throws IOException {
// case for "INCLUDE_AND_SEEK_NEXT_ROW & SEEK_NEXT_ROW" endless loop
byte[] rowA = Bytes.toBytes("rowA");
byte[] rowB = Bytes.toBytes("rowB");
byte[] rowC = Bytes.toBytes("rowC");
byte[] rowD = Bytes.toBytes("rowD");
byte[] rowE = Bytes.toBytes("rowE");
byte[] cf = Bytes.toBytes("CF");
byte[][] families = { cf };
byte[] col1 = Bytes.toBytes("col1");
byte[] col2 = Bytes.toBytes("col2");
long ts = 1;
this.region = initHRegion(tableName, method, families);
KeyValue kv1 = new KeyValue(rowA, cf, col1, ts, KeyValue.Type.Put, null);
KeyValue kv2 = new KeyValue(rowB, cf, col1, ts, KeyValue.Type.Put, null);
KeyValue kv3 = new KeyValue(rowC, cf, col1, ts, KeyValue.Type.Put, null);
KeyValue kv4_1 = new KeyValue(rowD, cf, col1, ts, KeyValue.Type.Put, null);
KeyValue kv4_2 = new KeyValue(rowD, cf, col2, ts, KeyValue.Type.Put, null);
KeyValue kv5 = new KeyValue(rowE, cf, col1, ts, KeyValue.Type.Put, null);
Put put = null;
put = new Put(rowA);
put.add(kv1);
region.put(put);
put = new Put(rowB);
put.add(kv2);
region.put(put);
put = new Put(rowC);
put.add(kv3);
region.put(put);
put = new Put(rowD);
put.add(kv4_1);
region.put(put);
put = new Put(rowD);
put.add(kv4_2);
region.put(put);
put = new Put(rowE);
put.add(kv5);
region.put(put);
region.flush(true);
Scan scan = new Scan(rowD, rowA);
scan.addColumn(families[0], col1);
scan.setReversed(true);
List<Cell> currRow = new ArrayList<>();
InternalScanner scanner = region.getScanner(scan);
boolean hasNext = scanner.next(currRow);
assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), rowD, 0, rowD.length));
assertTrue(hasNext);
currRow.clear();
hasNext = scanner.next(currRow);
assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), rowC, 0, rowC.length));
assertTrue(hasNext);
currRow.clear();
hasNext = scanner.next(currRow);
assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), rowB, 0, rowB.length));
assertFalse(hasNext);
scanner.close();
scan = new Scan(rowD, rowA);
scan.addColumn(families[0], col2);
scan.setReversed(true);
currRow.clear();
scanner = region.getScanner(scan);
hasNext = scanner.next(currRow);
assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), rowD, 0, rowD.length));
scanner.close();
}
@Test
public void testReverseScanner_smaller_blocksize() throws IOException {
// case to ensure no conflict with HFile index optimization
byte[] rowA = Bytes.toBytes("rowA");
byte[] rowB = Bytes.toBytes("rowB");
byte[] rowC = Bytes.toBytes("rowC");
byte[] rowD = Bytes.toBytes("rowD");
byte[] rowE = Bytes.toBytes("rowE");
byte[] cf = Bytes.toBytes("CF");
byte[][] families = { cf };
byte[] col1 = Bytes.toBytes("col1");
byte[] col2 = Bytes.toBytes("col2");
long ts = 1;
HBaseConfiguration config = new HBaseConfiguration();
config.setInt("test.block.size", 1);
this.region = initHRegion(tableName, method, config, families);
KeyValue kv1 = new KeyValue(rowA, cf, col1, ts, KeyValue.Type.Put, null);
KeyValue kv2 = new KeyValue(rowB, cf, col1, ts, KeyValue.Type.Put, null);
KeyValue kv3 = new KeyValue(rowC, cf, col1, ts, KeyValue.Type.Put, null);
KeyValue kv4_1 = new KeyValue(rowD, cf, col1, ts, KeyValue.Type.Put, null);
KeyValue kv4_2 = new KeyValue(rowD, cf, col2, ts, KeyValue.Type.Put, null);
KeyValue kv5 = new KeyValue(rowE, cf, col1, ts, KeyValue.Type.Put, null);
Put put = null;
put = new Put(rowA);
put.add(kv1);
region.put(put);
put = new Put(rowB);
put.add(kv2);
region.put(put);
put = new Put(rowC);
put.add(kv3);
region.put(put);
put = new Put(rowD);
put.add(kv4_1);
region.put(put);
put = new Put(rowD);
put.add(kv4_2);
region.put(put);
put = new Put(rowE);
put.add(kv5);
region.put(put);
region.flush(true);
Scan scan = new Scan(rowD, rowA);
scan.addColumn(families[0], col1);
scan.setReversed(true);
List<Cell> currRow = new ArrayList<>();
InternalScanner scanner = region.getScanner(scan);
boolean hasNext = scanner.next(currRow);
assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), rowD, 0, rowD.length));
assertTrue(hasNext);
currRow.clear();
hasNext = scanner.next(currRow);
assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), rowC, 0, rowC.length));
assertTrue(hasNext);
currRow.clear();
hasNext = scanner.next(currRow);
assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), rowB, 0, rowB.length));
assertFalse(hasNext);
scanner.close();
scan = new Scan(rowD, rowA);
scan.addColumn(families[0], col2);
scan.setReversed(true);
currRow.clear();
scanner = region.getScanner(scan);
hasNext = scanner.next(currRow);
assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), rowD, 0, rowD.length));
scanner.close();
}
@Test
public void testReverseScanner_FromMemStoreAndHFiles_MultiCFs1()
throws IOException {
byte[] row0 = Bytes.toBytes("row0"); // 1 kv
byte[] row1 = Bytes.toBytes("row1"); // 2 kv
byte[] row2 = Bytes.toBytes("row2"); // 4 kv
byte[] row3 = Bytes.toBytes("row3"); // 2 kv
byte[] row4 = Bytes.toBytes("row4"); // 5 kv
byte[] row5 = Bytes.toBytes("row5"); // 2 kv
byte[] cf1 = Bytes.toBytes("CF1");
byte[] cf2 = Bytes.toBytes("CF2");
byte[] cf3 = Bytes.toBytes("CF3");
byte[][] families = { cf1, cf2, cf3 };
byte[] col = Bytes.toBytes("C");
long ts = 1;
HBaseConfiguration conf = new HBaseConfiguration();
// disable compactions in this test.
conf.setInt("hbase.hstore.compactionThreshold", 10000);
this.region = initHRegion(tableName, method, conf, families);
// kv naming style: kv(row number) totalKvCountInThisRow seq no
KeyValue kv0_1_1 = new KeyValue(row0, cf1, col, ts, KeyValue.Type.Put,
null);
KeyValue kv1_2_1 = new KeyValue(row1, cf2, col, ts, KeyValue.Type.Put,
null);
KeyValue kv1_2_2 = new KeyValue(row1, cf1, col, ts + 1,
KeyValue.Type.Put, null);
KeyValue kv2_4_1 = new KeyValue(row2, cf2, col, ts, KeyValue.Type.Put,
null);
KeyValue kv2_4_2 = new KeyValue(row2, cf1, col, ts, KeyValue.Type.Put,
null);
KeyValue kv2_4_3 = new KeyValue(row2, cf3, col, ts, KeyValue.Type.Put,
null);
KeyValue kv2_4_4 = new KeyValue(row2, cf1, col, ts + 4,
KeyValue.Type.Put, null);
KeyValue kv3_2_1 = new KeyValue(row3, cf2, col, ts, KeyValue.Type.Put,
null);
KeyValue kv3_2_2 = new KeyValue(row3, cf1, col, ts + 4,
KeyValue.Type.Put, null);
KeyValue kv4_5_1 = new KeyValue(row4, cf1, col, ts, KeyValue.Type.Put,
null);
KeyValue kv4_5_2 = new KeyValue(row4, cf3, col, ts, KeyValue.Type.Put,
null);
KeyValue kv4_5_3 = new KeyValue(row4, cf3, col, ts + 5,
KeyValue.Type.Put, null);
KeyValue kv4_5_4 = new KeyValue(row4, cf2, col, ts, KeyValue.Type.Put,
null);
KeyValue kv4_5_5 = new KeyValue(row4, cf1, col, ts + 3,
KeyValue.Type.Put, null);
KeyValue kv5_2_1 = new KeyValue(row5, cf2, col, ts, KeyValue.Type.Put,
null);
KeyValue kv5_2_2 = new KeyValue(row5, cf3, col, ts, KeyValue.Type.Put,
null);
// hfiles(cf1/cf2) :"row1"(1 kv) / "row2"(1 kv) / "row4"(2 kv)
Put put = null;
put = new Put(row1);
put.add(kv1_2_1);
region.put(put);
put = new Put(row2);
put.add(kv2_4_1);
region.put(put);
put = new Put(row4);
put.add(kv4_5_4);
put.add(kv4_5_5);
region.put(put);
region.flush(true);
// hfiles(cf1/cf3) : "row1" (1 kvs) / "row2" (1 kv) / "row4" (2 kv)
put = new Put(row4);
put.add(kv4_5_1);
put.add(kv4_5_3);
region.put(put);
put = new Put(row1);
put.add(kv1_2_2);
region.put(put);
put = new Put(row2);
put.add(kv2_4_4);
region.put(put);
region.flush(true);
// hfiles(cf1/cf3) : "row2"(2 kv) / "row3"(1 kvs) / "row4" (1 kv)
put = new Put(row4);
put.add(kv4_5_2);
region.put(put);
put = new Put(row2);
put.add(kv2_4_2);
put.add(kv2_4_3);
region.put(put);
put = new Put(row3);
put.add(kv3_2_2);
region.put(put);
region.flush(true);
// memstore(cf1/cf2/cf3) : "row0" (1 kvs) / "row3" ( 1 kv) / "row5" (max)
// ( 2 kv)
put = new Put(row0);
put.add(kv0_1_1);
region.put(put);
put = new Put(row3);
put.add(kv3_2_1);
region.put(put);
put = new Put(row5);
put.add(kv5_2_1);
put.add(kv5_2_2);
region.put(put);
// scan range = ["row4", min), skip the max "row5"
Scan scan = new Scan(row4);
scan.setMaxVersions(5);
scan.setBatch(3);
scan.setReversed(true);
InternalScanner scanner = region.getScanner(scan);
List<Cell> currRow = new ArrayList<>();
boolean hasNext = false;
// 1. scan out "row4" (5 kvs), "row5" can't be scanned out since not
// included in scan range
// "row4" takes 2 next() calls since batch=3
hasNext = scanner.next(currRow);
assertEquals(3, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), row4, 0, row4.length));
assertTrue(hasNext);
currRow.clear();
hasNext = scanner.next(currRow);
assertEquals(2, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(),
currRow.get(0).getRowLength(), row4, 0,
row4.length));
assertTrue(hasNext);
// 2. scan out "row3" (2 kv)
currRow.clear();
hasNext = scanner.next(currRow);
assertEquals(2, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), row3, 0, row3.length));
assertTrue(hasNext);
// 3. scan out "row2" (4 kvs)
// "row2" takes 2 next() calls since batch=3
currRow.clear();
hasNext = scanner.next(currRow);
assertEquals(3, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), row2, 0, row2.length));
assertTrue(hasNext);
currRow.clear();
hasNext = scanner.next(currRow);
assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), row2, 0, row2.length));
assertTrue(hasNext);
// 4. scan out "row1" (2 kv)
currRow.clear();
hasNext = scanner.next(currRow);
assertEquals(2, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), row1, 0, row1.length));
assertTrue(hasNext);
// 5. scan out "row0" (1 kv)
currRow.clear();
hasNext = scanner.next(currRow);
assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), row0, 0, row0.length));
assertFalse(hasNext);
scanner.close();
}
@Test
public void testReverseScanner_FromMemStoreAndHFiles_MultiCFs2()
throws IOException {
byte[] row1 = Bytes.toBytes("row1");
byte[] row2 = Bytes.toBytes("row2");
byte[] row3 = Bytes.toBytes("row3");
byte[] row4 = Bytes.toBytes("row4");
byte[] cf1 = Bytes.toBytes("CF1");
byte[] cf2 = Bytes.toBytes("CF2");
byte[] cf3 = Bytes.toBytes("CF3");
byte[] cf4 = Bytes.toBytes("CF4");
byte[][] families = { cf1, cf2, cf3, cf4 };
byte[] col = Bytes.toBytes("C");
long ts = 1;
HBaseConfiguration conf = new HBaseConfiguration();
// disable compactions in this test.
conf.setInt("hbase.hstore.compactionThreshold", 10000);
this.region = initHRegion(tableName, method, conf, families);
KeyValue kv1 = new KeyValue(row1, cf1, col, ts, KeyValue.Type.Put, null);
KeyValue kv2 = new KeyValue(row2, cf2, col, ts, KeyValue.Type.Put, null);
KeyValue kv3 = new KeyValue(row3, cf3, col, ts, KeyValue.Type.Put, null);
KeyValue kv4 = new KeyValue(row4, cf4, col, ts, KeyValue.Type.Put, null);
// storefile1
Put put = new Put(row1);
put.add(kv1);
region.put(put);
region.flush(true);
// storefile2
put = new Put(row2);
put.add(kv2);
region.put(put);
region.flush(true);
// storefile3
put = new Put(row3);
put.add(kv3);
region.put(put);
region.flush(true);
// memstore
put = new Put(row4);
put.add(kv4);
region.put(put);
// scan range = ["row4", min)
Scan scan = new Scan(row4);
scan.setReversed(true);
scan.setBatch(10);
InternalScanner scanner = region.getScanner(scan);
List<Cell> currRow = new ArrayList<>();
boolean hasNext = scanner.next(currRow);
assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), row4, 0, row4.length));
assertTrue(hasNext);
currRow.clear();
hasNext = scanner.next(currRow);
assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), row3, 0, row3.length));
assertTrue(hasNext);
currRow.clear();
hasNext = scanner.next(currRow);
assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), row2, 0, row2.length));
assertTrue(hasNext);
currRow.clear();
hasNext = scanner.next(currRow);
assertEquals(1, currRow.size());
assertTrue(Bytes.equals(currRow.get(0).getRowArray(), currRow.get(0).getRowOffset(), currRow
.get(0).getRowLength(), row1, 0, row1.length));
assertFalse(hasNext);
}
/**
* Test for HBASE-14497: Reverse Scan threw StackOverflow caused by readPt checking
*/
@Test
public void testReverseScanner_StackOverflow() throws IOException {
byte[] cf1 = Bytes.toBytes("CF1");
byte[][] families = {cf1};
byte[] col = Bytes.toBytes("C");
HBaseConfiguration conf = new HBaseConfiguration();
this.region = initHRegion(tableName, method, conf, families);
// setup with one storefile and one memstore, to create scanner and get an earlier readPt
Put put = new Put(Bytes.toBytes("19998"));
put.addColumn(cf1, col, Bytes.toBytes("val"));
region.put(put);
region.flushcache(true, true, FlushLifeCycleTracker.DUMMY);
Put put2 = new Put(Bytes.toBytes("19997"));
put2.addColumn(cf1, col, Bytes.toBytes("val"));
region.put(put2);
Scan scan = new Scan(Bytes.toBytes("19998"));
scan.setReversed(true);
InternalScanner scanner = region.getScanner(scan);
// create one storefile contains many rows will be skipped
// to check StoreFileScanner.seekToPreviousRow
for (int i = 10000; i < 20000; i++) {
Put p = new Put(Bytes.toBytes(""+i));
p.addColumn(cf1, col, Bytes.toBytes("" + i));
region.put(p);
}
region.flushcache(true, true, FlushLifeCycleTracker.DUMMY);
// create one memstore contains many rows will be skipped
// to check MemStoreScanner.seekToPreviousRow
for (int i = 10000; i < 20000; i++) {
Put p = new Put(Bytes.toBytes(""+i));
p.addColumn(cf1, col, Bytes.toBytes("" + i));
region.put(p);
}
List<Cell> currRow = new ArrayList<>();
boolean hasNext;
do {
hasNext = scanner.next(currRow);
} while (hasNext);
assertEquals(2, currRow.size());
assertEquals("19998", Bytes.toString(currRow.get(0).getRowArray(),
currRow.get(0).getRowOffset(), currRow.get(0).getRowLength()));
assertEquals("19997", Bytes.toString(currRow.get(1).getRowArray(),
currRow.get(1).getRowOffset(), currRow.get(1).getRowLength()));
}
@Test
public void testReverseScanShouldNotScanMemstoreIfReadPtLesser() throws Exception {
byte[] cf1 = Bytes.toBytes("CF1");
byte[][] families = { cf1 };
byte[] col = Bytes.toBytes("C");
HBaseConfiguration conf = new HBaseConfiguration();
this.region = initHRegion(tableName, method, conf, families);
// setup with one storefile and one memstore, to create scanner and get an earlier readPt
Put put = new Put(Bytes.toBytes("19996"));
put.addColumn(cf1, col, Bytes.toBytes("val"));
region.put(put);
Put put2 = new Put(Bytes.toBytes("19995"));
put2.addColumn(cf1, col, Bytes.toBytes("val"));
region.put(put2);
// create a reverse scan
Scan scan = new Scan(Bytes.toBytes("19996"));
scan.setReversed(true);
RegionScannerImpl scanner = region.getScanner(scan);
// flush the cache. This will reset the store scanner
region.flushcache(true, true, FlushLifeCycleTracker.DUMMY);
// create one memstore contains many rows will be skipped
// to check MemStoreScanner.seekToPreviousRow
for (int i = 10000; i < 20000; i++) {
Put p = new Put(Bytes.toBytes("" + i));
p.addColumn(cf1, col, Bytes.toBytes("" + i));
region.put(p);
}
List<Cell> currRow = new ArrayList<>();
boolean hasNext;
boolean assertDone = false;
do {
hasNext = scanner.next(currRow);
// With HBASE-15871, after the scanner is reset the memstore scanner should not be
// added here
if (!assertDone) {
StoreScanner current =
(StoreScanner) (scanner.storeHeap).getCurrentForTesting();
List<KeyValueScanner> scanners = current.getAllScannersForTesting();
assertEquals("There should be only one scanner the store file scanner", 1,
scanners.size());
assertDone = true;
}
} while (hasNext);
assertEquals(2, currRow.size());
assertEquals("19996", Bytes.toString(currRow.get(0).getRowArray(),
currRow.get(0).getRowOffset(), currRow.get(0).getRowLength()));
assertEquals("19995", Bytes.toString(currRow.get(1).getRowArray(),
currRow.get(1).getRowOffset(), currRow.get(1).getRowLength()));
}
@Test
public void testReverseScanWhenPutCellsAfterOpenReverseScan() throws Exception {
byte[] cf1 = Bytes.toBytes("CF1");
byte[][] families = { cf1 };
byte[] col = Bytes.toBytes("C");
HBaseConfiguration conf = new HBaseConfiguration();
this.region = initHRegion(tableName, method, conf, families);
Put put = new Put(Bytes.toBytes("199996"));
put.addColumn(cf1, col, Bytes.toBytes("val"));
region.put(put);
Put put2 = new Put(Bytes.toBytes("199995"));
put2.addColumn(cf1, col, Bytes.toBytes("val"));
region.put(put2);
// Create a reverse scan
Scan scan = new Scan(Bytes.toBytes("199996"));
scan.setReversed(true);
RegionScannerImpl scanner = region.getScanner(scan);
// Put a lot of cells that have sequenceIDs grater than the readPt of the reverse scan
for (int i = 100000; i < 200000; i++) {
Put p = new Put(Bytes.toBytes("" + i));
p.addColumn(cf1, col, Bytes.toBytes("" + i));
region.put(p);
}
List<Cell> currRow = new ArrayList<>();
boolean hasNext;
do {
hasNext = scanner.next(currRow);
} while (hasNext);
assertEquals(2, currRow.size());
assertEquals("199996", Bytes.toString(currRow.get(0).getRowArray(),
currRow.get(0).getRowOffset(), currRow.get(0).getRowLength()));
assertEquals("199995", Bytes.toString(currRow.get(1).getRowArray(),
currRow.get(1).getRowOffset(), currRow.get(1).getRowLength()));
}
@Test
public void testWriteRequestsCounter() throws IOException {
byte[] fam = Bytes.toBytes("info");
byte[][] families = { fam };
this.region = initHRegion(tableName, method, CONF, families);
Assert.assertEquals(0L, region.getWriteRequestsCount());
Put put = new Put(row);
put.addColumn(fam, fam, fam);
Assert.assertEquals(0L, region.getWriteRequestsCount());
region.put(put);
Assert.assertEquals(1L, region.getWriteRequestsCount());
region.put(put);
Assert.assertEquals(2L, region.getWriteRequestsCount());
region.put(put);
Assert.assertEquals(3L, region.getWriteRequestsCount());
region.delete(new Delete(row));
Assert.assertEquals(4L, region.getWriteRequestsCount());
}
@Test
public void testOpenRegionWrittenToWAL() throws Exception {
final ServerName serverName = ServerName.valueOf(name.getMethodName(), 100, 42);
final RegionServerServices rss = spy(TEST_UTIL.createMockRegionServerService(serverName));
HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(name.getMethodName()));
htd.addFamily(new HColumnDescriptor(fam1));
htd.addFamily(new HColumnDescriptor(fam2));
HRegionInfo hri = new HRegionInfo(htd.getTableName(),
HConstants.EMPTY_BYTE_ARRAY, HConstants.EMPTY_BYTE_ARRAY);
// open the region w/o rss and wal and flush some files
region =
HBaseTestingUtility.createRegionAndWAL(hri, TEST_UTIL.getDataTestDir(), TEST_UTIL
.getConfiguration(), htd);
assertNotNull(region);
// create a file in fam1 for the region before opening in OpenRegionHandler
region.put(new Put(Bytes.toBytes("a")).addColumn(fam1, fam1, fam1));
region.flush(true);
HBaseTestingUtility.closeRegionAndWAL(region);
ArgumentCaptor<WALEdit> editCaptor = ArgumentCaptor.forClass(WALEdit.class);
// capture append() calls
WAL wal = mockWAL();
when(rss.getWAL((HRegionInfo) any())).thenReturn(wal);
region = HRegion.openHRegion(hri, htd, rss.getWAL(hri),
TEST_UTIL.getConfiguration(), rss, null);
verify(wal, times(1)).append((HRegionInfo)any(), (WALKeyImpl)any()
, editCaptor.capture(), anyBoolean());
WALEdit edit = editCaptor.getValue();
assertNotNull(edit);
assertNotNull(edit.getCells());
assertEquals(1, edit.getCells().size());
RegionEventDescriptor desc = WALEdit.getRegionEventDescriptor(edit.getCells().get(0));
assertNotNull(desc);
LOG.info("RegionEventDescriptor from WAL: " + desc);
assertEquals(RegionEventDescriptor.EventType.REGION_OPEN, desc.getEventType());
assertTrue(Bytes.equals(desc.getTableName().toByteArray(), htd.getTableName().toBytes()));
assertTrue(Bytes.equals(desc.getEncodedRegionName().toByteArray(),
hri.getEncodedNameAsBytes()));
assertTrue(desc.getLogSequenceNumber() > 0);
assertEquals(serverName, ProtobufUtil.toServerName(desc.getServer()));
assertEquals(2, desc.getStoresCount());
StoreDescriptor store = desc.getStores(0);
assertTrue(Bytes.equals(store.getFamilyName().toByteArray(), fam1));
assertEquals(store.getStoreHomeDir(), Bytes.toString(fam1));
assertEquals(1, store.getStoreFileCount()); // 1store file
assertFalse(store.getStoreFile(0).contains("/")); // ensure path is relative
store = desc.getStores(1);
assertTrue(Bytes.equals(store.getFamilyName().toByteArray(), fam2));
assertEquals(store.getStoreHomeDir(), Bytes.toString(fam2));
assertEquals(0, store.getStoreFileCount()); // no store files
}
// Helper for test testOpenRegionWrittenToWALForLogReplay
static class HRegionWithSeqId extends HRegion {
public HRegionWithSeqId(final Path tableDir, final WAL wal, final FileSystem fs,
final Configuration confParam, final RegionInfo regionInfo,
final TableDescriptor htd, final RegionServerServices rsServices) {
super(tableDir, wal, fs, confParam, regionInfo, htd, rsServices);
}
@Override
protected long getNextSequenceId(WAL wal) throws IOException {
return 42;
}
}
@Test
public void testFlushedFileWithNoTags() throws Exception {
final TableName tableName = TableName.valueOf(name.getMethodName());
HTableDescriptor htd = new HTableDescriptor(tableName);
htd.addFamily(new HColumnDescriptor(fam1));
HRegionInfo info = new HRegionInfo(tableName, null, null, false);
Path path = TEST_UTIL.getDataTestDir(getClass().getSimpleName());
region = HBaseTestingUtility.createRegionAndWAL(info, path, TEST_UTIL.getConfiguration(), htd);
Put put = new Put(Bytes.toBytes("a-b-0-0"));
put.addColumn(fam1, qual1, Bytes.toBytes("c1-value"));
region.put(put);
region.flush(true);
HStore store = region.getStore(fam1);
Collection<HStoreFile> storefiles = store.getStorefiles();
for (HStoreFile sf : storefiles) {
assertFalse("Tags should not be present "
,sf.getReader().getHFileReader().getFileContext().isIncludesTags());
}
}
/**
* Utility method to setup a WAL mock.
* Needs to do the bit where we close latch on the WALKeyImpl on append else test hangs.
* @return a mock WAL
* @throws IOException
*/
private WAL mockWAL() throws IOException {
WAL wal = mock(WAL.class);
Mockito.when(wal.append((HRegionInfo)Mockito.any(),
(WALKeyImpl)Mockito.any(), (WALEdit)Mockito.any(), Mockito.anyBoolean())).
thenAnswer(new Answer<Long>() {
@Override
public Long answer(InvocationOnMock invocation) throws Throwable {
WALKeyImpl key = invocation.getArgument(1);
MultiVersionConcurrencyControl.WriteEntry we = key.getMvcc().begin();
key.setWriteEntry(we);
return 1L;
}
});
return wal;
}
@Test
public void testCloseRegionWrittenToWAL() throws Exception {
Path rootDir = new Path(dir + name.getMethodName());
FSUtils.setRootDir(TEST_UTIL.getConfiguration(), rootDir);
final ServerName serverName = ServerName.valueOf("testCloseRegionWrittenToWAL", 100, 42);
final RegionServerServices rss = spy(TEST_UTIL.createMockRegionServerService(serverName));
HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(name.getMethodName()));
htd.addFamily(new HColumnDescriptor(fam1));
htd.addFamily(new HColumnDescriptor(fam2));
final HRegionInfo hri = new HRegionInfo(htd.getTableName(),
HConstants.EMPTY_BYTE_ARRAY, HConstants.EMPTY_BYTE_ARRAY);
ArgumentCaptor<WALEdit> editCaptor = ArgumentCaptor.forClass(WALEdit.class);
// capture append() calls
WAL wal = mockWAL();
when(rss.getWAL((HRegionInfo) any())).thenReturn(wal);
// create and then open a region first so that it can be closed later
region = HRegion.createHRegion(hri, rootDir, TEST_UTIL.getConfiguration(), htd, rss.getWAL(hri));
region = HRegion.openHRegion(hri, htd, rss.getWAL(hri),
TEST_UTIL.getConfiguration(), rss, null);
// close the region
region.close(false);
// 2 times, one for region open, the other close region
verify(wal, times(2)).append((HRegionInfo)any(), (WALKeyImpl)any(),
editCaptor.capture(), anyBoolean());
WALEdit edit = editCaptor.getAllValues().get(1);
assertNotNull(edit);
assertNotNull(edit.getCells());
assertEquals(1, edit.getCells().size());
RegionEventDescriptor desc = WALEdit.getRegionEventDescriptor(edit.getCells().get(0));
assertNotNull(desc);
LOG.info("RegionEventDescriptor from WAL: " + desc);
assertEquals(RegionEventDescriptor.EventType.REGION_CLOSE, desc.getEventType());
assertTrue(Bytes.equals(desc.getTableName().toByteArray(), htd.getTableName().toBytes()));
assertTrue(Bytes.equals(desc.getEncodedRegionName().toByteArray(),
hri.getEncodedNameAsBytes()));
assertTrue(desc.getLogSequenceNumber() > 0);
assertEquals(serverName, ProtobufUtil.toServerName(desc.getServer()));
assertEquals(2, desc.getStoresCount());
StoreDescriptor store = desc.getStores(0);
assertTrue(Bytes.equals(store.getFamilyName().toByteArray(), fam1));
assertEquals(store.getStoreHomeDir(), Bytes.toString(fam1));
assertEquals(0, store.getStoreFileCount()); // no store files
store = desc.getStores(1);
assertTrue(Bytes.equals(store.getFamilyName().toByteArray(), fam2));
assertEquals(store.getStoreHomeDir(), Bytes.toString(fam2));
assertEquals(0, store.getStoreFileCount()); // no store files
}
/**
* Test RegionTooBusyException thrown when region is busy
*/
@Test
public void testRegionTooBusy() throws IOException {
byte[] family = Bytes.toBytes("family");
long defaultBusyWaitDuration = CONF.getLong("hbase.busy.wait.duration",
HRegion.DEFAULT_BUSY_WAIT_DURATION);
CONF.setLong("hbase.busy.wait.duration", 1000);
region = initHRegion(tableName, method, CONF, family);
final AtomicBoolean stopped = new AtomicBoolean(true);
Thread t = new Thread(new Runnable() {
@Override
public void run() {
try {
region.lock.writeLock().lock();
stopped.set(false);
while (!stopped.get()) {
Thread.sleep(100);
}
} catch (InterruptedException ie) {
} finally {
region.lock.writeLock().unlock();
}
}
});
t.start();
Get get = new Get(row);
try {
while (stopped.get()) {
Thread.sleep(100);
}
region.get(get);
fail("Should throw RegionTooBusyException");
} catch (InterruptedException ie) {
fail("test interrupted");
} catch (RegionTooBusyException e) {
// Good, expected
} finally {
stopped.set(true);
try {
t.join();
} catch (Throwable e) {
}
HBaseTestingUtility.closeRegionAndWAL(region);
region = null;
CONF.setLong("hbase.busy.wait.duration", defaultBusyWaitDuration);
}
}
@Test
public void testCellTTLs() throws IOException {
IncrementingEnvironmentEdge edge = new IncrementingEnvironmentEdge();
EnvironmentEdgeManager.injectEdge(edge);
final byte[] row = Bytes.toBytes("testRow");
final byte[] q1 = Bytes.toBytes("q1");
final byte[] q2 = Bytes.toBytes("q2");
final byte[] q3 = Bytes.toBytes("q3");
final byte[] q4 = Bytes.toBytes("q4");
HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(name.getMethodName()));
HColumnDescriptor hcd = new HColumnDescriptor(fam1);
hcd.setTimeToLive(10); // 10 seconds
htd.addFamily(hcd);
Configuration conf = new Configuration(TEST_UTIL.getConfiguration());
conf.setInt(HFile.FORMAT_VERSION_KEY, HFile.MIN_FORMAT_VERSION_WITH_TAGS);
region = HBaseTestingUtility.createRegionAndWAL(new HRegionInfo(htd.getTableName(),
HConstants.EMPTY_BYTE_ARRAY, HConstants.EMPTY_BYTE_ARRAY),
TEST_UTIL.getDataTestDir(), conf, htd);
assertNotNull(region);
long now = EnvironmentEdgeManager.currentTime();
// Add a cell that will expire in 5 seconds via cell TTL
region.put(new Put(row).add(new KeyValue(row, fam1, q1, now,
HConstants.EMPTY_BYTE_ARRAY, new ArrayBackedTag[] {
// TTL tags specify ts in milliseconds
new ArrayBackedTag(TagType.TTL_TAG_TYPE, Bytes.toBytes(5000L)) })));
// Add a cell that will expire after 10 seconds via family setting
region.put(new Put(row).addColumn(fam1, q2, now, HConstants.EMPTY_BYTE_ARRAY));
// Add a cell that will expire in 15 seconds via cell TTL
region.put(new Put(row).add(new KeyValue(row, fam1, q3, now + 10000 - 1,
HConstants.EMPTY_BYTE_ARRAY, new ArrayBackedTag[] {
// TTL tags specify ts in milliseconds
new ArrayBackedTag(TagType.TTL_TAG_TYPE, Bytes.toBytes(5000L)) })));
// Add a cell that will expire in 20 seconds via family setting
region.put(new Put(row).addColumn(fam1, q4, now + 10000 - 1, HConstants.EMPTY_BYTE_ARRAY));
// Flush so we are sure store scanning gets this right
region.flush(true);
// A query at time T+0 should return all cells
Result r = region.get(new Get(row));
assertNotNull(r.getValue(fam1, q1));
assertNotNull(r.getValue(fam1, q2));
assertNotNull(r.getValue(fam1, q3));
assertNotNull(r.getValue(fam1, q4));
// Increment time to T+5 seconds
edge.incrementTime(5000);
r = region.get(new Get(row));
assertNull(r.getValue(fam1, q1));
assertNotNull(r.getValue(fam1, q2));
assertNotNull(r.getValue(fam1, q3));
assertNotNull(r.getValue(fam1, q4));
// Increment time to T+10 seconds
edge.incrementTime(5000);
r = region.get(new Get(row));
assertNull(r.getValue(fam1, q1));
assertNull(r.getValue(fam1, q2));
assertNotNull(r.getValue(fam1, q3));
assertNotNull(r.getValue(fam1, q4));
// Increment time to T+15 seconds
edge.incrementTime(5000);
r = region.get(new Get(row));
assertNull(r.getValue(fam1, q1));
assertNull(r.getValue(fam1, q2));
assertNull(r.getValue(fam1, q3));
assertNotNull(r.getValue(fam1, q4));
// Increment time to T+20 seconds
edge.incrementTime(10000);
r = region.get(new Get(row));
assertNull(r.getValue(fam1, q1));
assertNull(r.getValue(fam1, q2));
assertNull(r.getValue(fam1, q3));
assertNull(r.getValue(fam1, q4));
// Fun with disappearing increments
// Start at 1
region.put(new Put(row).addColumn(fam1, q1, Bytes.toBytes(1L)));
r = region.get(new Get(row));
byte[] val = r.getValue(fam1, q1);
assertNotNull(val);
assertEquals(1L, Bytes.toLong(val));
// Increment with a TTL of 5 seconds
Increment incr = new Increment(row).addColumn(fam1, q1, 1L);
incr.setTTL(5000);
region.increment(incr); // 2
// New value should be 2
r = region.get(new Get(row));
val = r.getValue(fam1, q1);
assertNotNull(val);
assertEquals(2L, Bytes.toLong(val));
// Increment time to T+25 seconds
edge.incrementTime(5000);
// Value should be back to 1
r = region.get(new Get(row));
val = r.getValue(fam1, q1);
assertNotNull(val);
assertEquals(1L, Bytes.toLong(val));
// Increment time to T+30 seconds
edge.incrementTime(5000);
// Original value written at T+20 should be gone now via family TTL
r = region.get(new Get(row));
assertNull(r.getValue(fam1, q1));
}
@Test
public void testIncrementTimestampsAreMonotonic() throws IOException {
region = initHRegion(tableName, method, CONF, fam1);
ManualEnvironmentEdge edge = new ManualEnvironmentEdge();
EnvironmentEdgeManager.injectEdge(edge);
edge.setValue(10);
Increment inc = new Increment(row);
inc.setDurability(Durability.SKIP_WAL);
inc.addColumn(fam1, qual1, 1L);
region.increment(inc);
Result result = region.get(new Get(row));
Cell c = result.getColumnLatestCell(fam1, qual1);
assertNotNull(c);
assertEquals(10L, c.getTimestamp());
edge.setValue(1); // clock goes back
region.increment(inc);
result = region.get(new Get(row));
c = result.getColumnLatestCell(fam1, qual1);
assertEquals(11L, c.getTimestamp());
assertEquals(2L, Bytes.toLong(c.getValueArray(), c.getValueOffset(), c.getValueLength()));
}
@Test
public void testAppendTimestampsAreMonotonic() throws IOException {
region = initHRegion(tableName, method, CONF, fam1);
ManualEnvironmentEdge edge = new ManualEnvironmentEdge();
EnvironmentEdgeManager.injectEdge(edge);
edge.setValue(10);
Append a = new Append(row);
a.setDurability(Durability.SKIP_WAL);
a.addColumn(fam1, qual1, qual1);
region.append(a);
Result result = region.get(new Get(row));
Cell c = result.getColumnLatestCell(fam1, qual1);
assertNotNull(c);
assertEquals(10L, c.getTimestamp());
edge.setValue(1); // clock goes back
region.append(a);
result = region.get(new Get(row));
c = result.getColumnLatestCell(fam1, qual1);
assertEquals(11L, c.getTimestamp());
byte[] expected = new byte[qual1.length*2];
System.arraycopy(qual1, 0, expected, 0, qual1.length);
System.arraycopy(qual1, 0, expected, qual1.length, qual1.length);
assertTrue(Bytes.equals(c.getValueArray(), c.getValueOffset(), c.getValueLength(),
expected, 0, expected.length));
}
@Test
public void testCheckAndMutateTimestampsAreMonotonic() throws IOException {
region = initHRegion(tableName, method, CONF, fam1);
ManualEnvironmentEdge edge = new ManualEnvironmentEdge();
EnvironmentEdgeManager.injectEdge(edge);
edge.setValue(10);
Put p = new Put(row);
p.setDurability(Durability.SKIP_WAL);
p.addColumn(fam1, qual1, qual1);
region.put(p);
Result result = region.get(new Get(row));
Cell c = result.getColumnLatestCell(fam1, qual1);
assertNotNull(c);
assertEquals(10L, c.getTimestamp());
edge.setValue(1); // clock goes back
p = new Put(row);
p.setDurability(Durability.SKIP_WAL);
p.addColumn(fam1, qual1, qual2);
region.checkAndMutate(row, fam1, qual1, CompareOperator.EQUAL, new BinaryComparator(qual1), p);
result = region.get(new Get(row));
c = result.getColumnLatestCell(fam1, qual1);
assertEquals(10L, c.getTimestamp());
assertTrue(Bytes.equals(c.getValueArray(), c.getValueOffset(), c.getValueLength(),
qual2, 0, qual2.length));
}
@Test
public void testBatchMutateWithWrongRegionException() throws Exception {
final byte[] a = Bytes.toBytes("a");
final byte[] b = Bytes.toBytes("b");
final byte[] c = Bytes.toBytes("c"); // exclusive
int prevLockTimeout = CONF.getInt("hbase.rowlock.wait.duration", 30000);
CONF.setInt("hbase.rowlock.wait.duration", 1000);
region = initHRegion(tableName, a, c, method, CONF, false, fam1);
Mutation[] mutations = new Mutation[] {
new Put(a)
.add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY)
.setRow(a)
.setFamily(fam1)
.setTimestamp(HConstants.LATEST_TIMESTAMP)
.setType(Cell.Type.Put)
.build()),
// this is outside the region boundary
new Put(c).add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY)
.setRow(c)
.setFamily(fam1)
.setTimestamp(HConstants.LATEST_TIMESTAMP)
.setType(Type.Put)
.build()),
new Put(b).add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY)
.setRow(b)
.setFamily(fam1)
.setTimestamp(HConstants.LATEST_TIMESTAMP)
.setType(Cell.Type.Put)
.build())
};
OperationStatus[] status = region.batchMutate(mutations);
assertEquals(OperationStatusCode.SUCCESS, status[0].getOperationStatusCode());
assertEquals(OperationStatusCode.SANITY_CHECK_FAILURE, status[1].getOperationStatusCode());
assertEquals(OperationStatusCode.SUCCESS, status[2].getOperationStatusCode());
// test with a row lock held for a long time
final CountDownLatch obtainedRowLock = new CountDownLatch(1);
ExecutorService exec = Executors.newFixedThreadPool(2);
Future<Void> f1 = exec.submit(new Callable<Void>() {
@Override
public Void call() throws Exception {
LOG.info("Acquiring row lock");
RowLock rl = region.getRowLock(b);
obtainedRowLock.countDown();
LOG.info("Waiting for 5 seconds before releasing lock");
Threads.sleep(5000);
LOG.info("Releasing row lock");
rl.release();
return null;
}
});
obtainedRowLock.await(30, TimeUnit.SECONDS);
Future<Void> f2 = exec.submit(new Callable<Void>() {
@Override
public Void call() throws Exception {
Mutation[] mutations = new Mutation[] {
new Put(a).add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY)
.setRow(a)
.setFamily(fam1)
.setTimestamp(HConstants.LATEST_TIMESTAMP)
.setType(Cell.Type.Put)
.build()),
new Put(b).add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY)
.setRow(b)
.setFamily(fam1)
.setTimestamp(HConstants.LATEST_TIMESTAMP)
.setType(Cell.Type.Put)
.build()),
};
// this will wait for the row lock, and it will eventually succeed
OperationStatus[] status = region.batchMutate(mutations);
assertEquals(OperationStatusCode.SUCCESS, status[0].getOperationStatusCode());
assertEquals(OperationStatusCode.SUCCESS, status[1].getOperationStatusCode());
return null;
}
});
f1.get();
f2.get();
CONF.setInt("hbase.rowlock.wait.duration", prevLockTimeout);
}
@Test
public void testCheckAndRowMutateTimestampsAreMonotonic() throws IOException {
region = initHRegion(tableName, method, CONF, fam1);
ManualEnvironmentEdge edge = new ManualEnvironmentEdge();
EnvironmentEdgeManager.injectEdge(edge);
edge.setValue(10);
Put p = new Put(row);
p.setDurability(Durability.SKIP_WAL);
p.addColumn(fam1, qual1, qual1);
region.put(p);
Result result = region.get(new Get(row));
Cell c = result.getColumnLatestCell(fam1, qual1);
assertNotNull(c);
assertEquals(10L, c.getTimestamp());
edge.setValue(1); // clock goes back
p = new Put(row);
p.setDurability(Durability.SKIP_WAL);
p.addColumn(fam1, qual1, qual2);
RowMutations rm = new RowMutations(row);
rm.add(p);
assertTrue(region.checkAndRowMutate(row, fam1, qual1, CompareOperator.EQUAL,
new BinaryComparator(qual1), rm));
result = region.get(new Get(row));
c = result.getColumnLatestCell(fam1, qual1);
assertEquals(10L, c.getTimestamp());
LOG.info("c value " +
Bytes.toStringBinary(c.getValueArray(), c.getValueOffset(), c.getValueLength()));
assertTrue(Bytes.equals(c.getValueArray(), c.getValueOffset(), c.getValueLength(),
qual2, 0, qual2.length));
}
HRegion initHRegion(TableName tableName, String callingMethod,
byte[]... families) throws IOException {
return initHRegion(tableName, callingMethod, HBaseConfiguration.create(),
families);
}
/**
* HBASE-16429 Make sure no stuck if roll writer when ring buffer is filled with appends
* @throws IOException if IO error occurred during test
*/
@Test
public void testWritesWhileRollWriter() throws IOException {
int testCount = 10;
int numRows = 1024;
int numFamilies = 2;
int numQualifiers = 2;
final byte[][] families = new byte[numFamilies][];
for (int i = 0; i < numFamilies; i++) {
families[i] = Bytes.toBytes("family" + i);
}
final byte[][] qualifiers = new byte[numQualifiers][];
for (int i = 0; i < numQualifiers; i++) {
qualifiers[i] = Bytes.toBytes("qual" + i);
}
CONF.setInt("hbase.regionserver.wal.disruptor.event.count", 2);
this.region = initHRegion(tableName, method, CONF, families);
try {
List<Thread> threads = new ArrayList<>();
for (int i = 0; i < numRows; i++) {
final int count = i;
Thread t = new Thread(new Runnable() {
@Override
public void run() {
byte[] row = Bytes.toBytes("row" + count);
Put put = new Put(row);
put.setDurability(Durability.SYNC_WAL);
byte[] value = Bytes.toBytes(String.valueOf(count));
for (byte[] family : families) {
for (byte[] qualifier : qualifiers) {
put.addColumn(family, qualifier, count, value);
}
}
try {
region.put(put);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
});
threads.add(t);
}
for (Thread t : threads) {
t.start();
}
for (int i = 0; i < testCount; i++) {
region.getWAL().rollWriter();
Thread.yield();
}
} finally {
try {
HBaseTestingUtility.closeRegionAndWAL(this.region);
CONF.setInt("hbase.regionserver.wal.disruptor.event.count", 16 * 1024);
} catch (DroppedSnapshotException dse) {
// We could get this on way out because we interrupt the background flusher and it could
// fail anywhere causing a DSE over in the background flusher... only it is not properly
// dealt with so could still be memory hanging out when we get to here -- memory we can't
// flush because the accounting is 'off' since original DSE.
}
this.region = null;
}
}
@Test
public void testMutateRow_WriteRequestCount() throws Exception {
byte[] row1 = Bytes.toBytes("row1");
byte[] fam1 = Bytes.toBytes("fam1");
byte[] qf1 = Bytes.toBytes("qualifier");
byte[] val1 = Bytes.toBytes("value1");
RowMutations rm = new RowMutations(row1);
Put put = new Put(row1);
put.addColumn(fam1, qf1, val1);
rm.add(put);
this.region = initHRegion(tableName, method, CONF, fam1);
long wrcBeforeMutate = this.region.writeRequestsCount.longValue();
this.region.mutateRow(rm);
long wrcAfterMutate = this.region.writeRequestsCount.longValue();
Assert.assertEquals(wrcBeforeMutate + rm.getMutations().size(), wrcAfterMutate);
}
@Test
public void testBulkLoadReplicationEnabled() throws IOException {
TEST_UTIL.getConfiguration().setBoolean(HConstants.REPLICATION_BULKLOAD_ENABLE_KEY, true);
final ServerName serverName = ServerName.valueOf(name.getMethodName(), 100, 42);
final RegionServerServices rss = spy(TEST_UTIL.createMockRegionServerService(serverName));
HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(name.getMethodName()));
htd.addFamily(new HColumnDescriptor(fam1));
HRegionInfo hri = new HRegionInfo(htd.getTableName(),
HConstants.EMPTY_BYTE_ARRAY, HConstants.EMPTY_BYTE_ARRAY);
region = HRegion.openHRegion(hri, htd, rss.getWAL(hri), TEST_UTIL.getConfiguration(),
rss, null);
assertTrue(region.conf.getBoolean(HConstants.REPLICATION_BULKLOAD_ENABLE_KEY, false));
String plugins = region.conf.get(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY, "");
String replicationCoprocessorClass = ReplicationObserver.class.getCanonicalName();
assertTrue(plugins.contains(replicationCoprocessorClass));
assertTrue(region.getCoprocessorHost().
getCoprocessors().contains(ReplicationObserver.class.getSimpleName()));
}
/**
* The same as HRegion class, the only difference is that instantiateHStore will
* create a different HStore - HStoreForTesting. [HBASE-8518]
*/
public static class HRegionForTesting extends HRegion {
public HRegionForTesting(final Path tableDir, final WAL wal, final FileSystem fs,
final Configuration confParam, final RegionInfo regionInfo,
final TableDescriptor htd, final RegionServerServices rsServices) {
this(new HRegionFileSystem(confParam, fs, tableDir, regionInfo),
wal, confParam, htd, rsServices);
}
public HRegionForTesting(HRegionFileSystem fs, WAL wal,
Configuration confParam, TableDescriptor htd,
RegionServerServices rsServices) {
super(fs, wal, confParam, htd, rsServices);
}
/**
* Create HStore instance.
* @return If Mob is enabled, return HMobStore, otherwise return HStoreForTesting.
*/
@Override
protected HStore instantiateHStore(final ColumnFamilyDescriptor family, boolean warmup)
throws IOException {
if (family.isMobEnabled()) {
if (HFile.getFormatVersion(this.conf) < HFile.MIN_FORMAT_VERSION_WITH_TAGS) {
throw new IOException("A minimum HFile version of " + HFile.MIN_FORMAT_VERSION_WITH_TAGS +
" is required for MOB feature. Consider setting " + HFile.FORMAT_VERSION_KEY +
" accordingly.");
}
return new HMobStore(this, family, this.conf, warmup);
}
return new HStoreForTesting(this, family, this.conf, warmup);
}
}
/**
* HStoreForTesting is merely the same as HStore, the difference is in the doCompaction method
* of HStoreForTesting there is a checkpoint "hbase.hstore.compaction.complete" which
* doesn't let hstore compaction complete. In the former edition, this config is set in
* HStore class inside compact method, though this is just for testing, otherwise it
* doesn't do any help. In HBASE-8518, we try to get rid of all "hbase.hstore.compaction.complete"
* config (except for testing code).
*/
public static class HStoreForTesting extends HStore {
protected HStoreForTesting(final HRegion region,
final ColumnFamilyDescriptor family,
final Configuration confParam, boolean warmup) throws IOException {
super(region, family, confParam, warmup);
}
@Override
protected List<HStoreFile> doCompaction(CompactionRequestImpl cr,
Collection<HStoreFile> filesToCompact, User user, long compactionStartTime,
List<Path> newFiles) throws IOException {
// let compaction incomplete.
if (!this.conf.getBoolean("hbase.hstore.compaction.complete", true)) {
LOG.warn("hbase.hstore.compaction.complete is set to false");
List<HStoreFile> sfs = new ArrayList<>(newFiles.size());
final boolean evictOnClose =
cacheConf != null? cacheConf.shouldEvictOnClose(): true;
for (Path newFile : newFiles) {
// Create storefile around what we wrote with a reader on it.
HStoreFile sf = createStoreFileAndReader(newFile);
sf.closeStoreFile(evictOnClose);
sfs.add(sf);
}
return sfs;
}
return super.doCompaction(cr, filesToCompact, user, compactionStartTime, newFiles);
}
}
}
| HBASE-22896 TestHRegion.testFlushMarkersWALFail is flaky (#551)
* HBASE-22896 TestHRegion.testFlushMarkersWALFail is flaky
* delete blank line
| hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java | HBASE-22896 TestHRegion.testFlushMarkersWALFail is flaky (#551) | <ide><path>base-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
<ide> String testName = "testMemstoreSizeAccountingWithFailedPostBatchMutate";
<ide> FileSystem fs = FileSystem.get(CONF);
<ide> Path rootDir = new Path(dir + testName);
<del> ChunkCreator.initialize(MemStoreLABImpl.CHUNK_SIZE_DEFAULT, false, 0, 0, 0, null);
<ide> FSHLog hLog = new FSHLog(fs, rootDir, testName, CONF);
<ide> hLog.init();
<ide> HRegion region = initHRegion(tableName, null, null, false, Durability.SYNC_WAL, hLog,
<ide> this.region = initHRegion(tableName, HConstants.EMPTY_START_ROW,
<ide> HConstants.EMPTY_END_ROW, false, Durability.USE_DEFAULT, wal, family);
<ide> region.put(put);
<del>
<ide> // 3. Test case where ABORT_FLUSH will throw exception.
<ide> // Even if ABORT_FLUSH throws exception, we should not fail with IOE, but continue with
<del> // DroppedSnapshotException. Below COMMMIT_FLUSH will cause flush to abort
<add> // DroppedSnapshotException. Below COMMIT_FLUSH will cause flush to abort
<ide> wal.flushActions = new FlushAction [] {FlushAction.COMMIT_FLUSH, FlushAction.ABORT_FLUSH};
<ide>
<ide> try {
<ide> hLog.init();
<ide> // This chunk creation is done throughout the code base. Do we want to move it into core?
<ide> // It is missing from this test. W/o it we NPE.
<del> ChunkCreator.initialize(MemStoreLABImpl.CHUNK_SIZE_DEFAULT, false, 0, 0, 0, null);
<ide> region = initHRegion(tableName, null, null, false, Durability.SYNC_WAL, hLog,
<ide> COLUMN_FAMILY_BYTES);
<ide>
<ide> String callingMethod, Configuration conf, boolean isReadOnly, byte[]... families)
<ide> throws IOException {
<ide> Path logDir = TEST_UTIL.getDataTestDirOnTestFS(callingMethod + ".log");
<del> ChunkCreator.initialize(MemStoreLABImpl.CHUNK_SIZE_DEFAULT, false, 0, 0, 0, null);
<ide> HRegionInfo hri = new HRegionInfo(tableName, startKey, stopKey);
<ide> final WAL wal = HBaseTestingUtility.createWal(conf, logDir, hri);
<ide> return initHRegion(tableName, startKey, stopKey, isReadOnly,
<ide> */
<ide> public HRegion initHRegion(TableName tableName, byte[] startKey, byte[] stopKey,
<ide> boolean isReadOnly, Durability durability, WAL wal, byte[]... families) throws IOException {
<add> ChunkCreator.initialize(MemStoreLABImpl.CHUNK_SIZE_DEFAULT, false, 0, 0, 0, null);
<ide> return TEST_UTIL.createLocalHRegion(tableName, startKey, stopKey,
<ide> isReadOnly, durability, wal, families);
<ide> } |
|
Java | apache-2.0 | d0c686a2773bbe6e151ddcf69c8c69f150b7a60c | 0 | WinRoad-NET/wrdocletbase | package net.winroad.wrdoclet.builder;
import java.io.File;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import net.winroad.wrdoclet.data.APIParameter;
import net.winroad.wrdoclet.data.ModificationHistory;
import net.winroad.wrdoclet.data.ModificationRecord;
import net.winroad.wrdoclet.data.OpenAPI;
import net.winroad.wrdoclet.data.ParameterOccurs;
import net.winroad.wrdoclet.data.ParameterType;
import net.winroad.wrdoclet.data.RequestMapping;
import net.winroad.wrdoclet.data.WRDoc;
import net.winroad.wrdoclet.taglets.WRBriefTaglet;
import net.winroad.wrdoclet.taglets.WRMemoTaglet;
import net.winroad.wrdoclet.taglets.WRMqConsumerTaglet;
import net.winroad.wrdoclet.taglets.WRMqProducerTaglet;
import net.winroad.wrdoclet.taglets.WROccursTaglet;
import net.winroad.wrdoclet.taglets.WRParamTaglet;
import net.winroad.wrdoclet.taglets.WRRefReqTaglet;
import net.winroad.wrdoclet.taglets.WRReturnCodeTaglet;
import net.winroad.wrdoclet.taglets.WRReturnTaglet;
import net.winroad.wrdoclet.taglets.WRTagTaglet;
import net.winroad.wrdoclet.utils.ApplicationContextConfig;
import net.winroad.wrdoclet.utils.Logger;
import net.winroad.wrdoclet.utils.LoggerFactory;
import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.lang.StringUtils;
import org.w3c.dom.Document;
import org.w3c.dom.NamedNodeMap;
import org.w3c.dom.Node;
import org.xml.sax.SAXException;
import com.sun.javadoc.AnnotationDesc;
import com.sun.javadoc.ClassDoc;
import com.sun.javadoc.FieldDoc;
import com.sun.javadoc.MemberDoc;
import com.sun.javadoc.MethodDoc;
import com.sun.javadoc.ParamTag;
import com.sun.javadoc.ParameterizedType;
import com.sun.javadoc.ProgramElementDoc;
import com.sun.javadoc.Tag;
import com.sun.javadoc.Type;
import com.sun.javadoc.AnnotationDesc.ElementValuePair;
import com.sun.tools.doclets.internal.toolkit.Configuration;
import com.sun.tools.doclets.internal.toolkit.util.Util;
public abstract class AbstractDocBuilder {
protected Logger logger;
protected WRDoc wrDoc;
protected Map<String, Set<MethodDoc>> taggedOpenAPIMethods = new HashMap<String, Set<MethodDoc>>();
protected Map<String, Set<ClassDoc>> taggedOpenAPIClasses = new HashMap<String, Set<ClassDoc>>();
public AbstractDocBuilder(WRDoc wrDoc) {
this.wrDoc = wrDoc;
this.logger = LoggerFactory.getLogger(this.getClass());
}
public WRDoc getWrDoc() {
return wrDoc;
}
public void setWrDoc(WRDoc wrDoc) {
this.wrDoc = wrDoc;
}
public Map<String, Set<MethodDoc>> getTaggedOpenAPIMethods() {
return taggedOpenAPIMethods;
}
/**
* methodDoc in implementation classes map to methodDoc in interfaces or
* methodDoc in interfaces map to methodDoc in implementation classes
*/
protected HashMap<MethodDoc, MethodDoc> methodMap = new HashMap<>();
public void setTaggedOpenAPIMethods(Map<String, Set<MethodDoc>> taggedOpenAPIMethods) {
this.taggedOpenAPIMethods = taggedOpenAPIMethods;
}
public void buildWRDoc() {
this.processOpenAPIClasses(this.wrDoc.getConfiguration().root.classes(), this.wrDoc.getConfiguration());
this.buildOpenAPIs(this.wrDoc.getConfiguration());
this.buildOpenAPIByClasses(this.wrDoc.getConfiguration());
}
protected abstract void processOpenAPIClasses(ClassDoc[] classDocs, Configuration configuration);
protected Tag[] getTagTaglets(MethodDoc methodDoc) {
return (Tag[]) ArrayUtils.addAll(methodDoc.tags(WRTagTaglet.NAME),
methodDoc.containingClass().tags(WRTagTaglet.NAME));
}
protected void processOpenAPIMethod(MethodDoc methodDoc, Configuration configuration) {
if ((configuration.nodeprecated && Util.isDeprecated(methodDoc)) || !isOpenAPIMethod(methodDoc)) {
return;
}
Tag[] methodTagArray = getTagTaglets(methodDoc);
if (methodTagArray.length == 0) {
String tag = methodDoc.containingClass().simpleTypeName();
this.wrDoc.getWRTags().add(tag);
if (!this.taggedOpenAPIMethods.containsKey(tag)) {
this.taggedOpenAPIMethods.put(tag, new HashSet<MethodDoc>());
}
this.taggedOpenAPIMethods.get(tag).add(methodDoc);
} else {
for (int i = 0; i < methodTagArray.length; i++) {
Set<String> methodTags = WRTagTaglet.getTagSet(methodTagArray[i].text());
this.wrDoc.getWRTags().addAll(methodTags);
for (Iterator<String> iter = methodTags.iterator(); iter.hasNext();) {
String tag = iter.next();
if (!this.taggedOpenAPIMethods.containsKey(tag)) {
this.taggedOpenAPIMethods.put(tag, new HashSet<MethodDoc>());
}
this.taggedOpenAPIMethods.get(tag).add(methodDoc);
}
}
}
}
protected String getBriefFromCommentText(String commentText) {
int index = StringUtils.indexOf(commentText, '\n');
if (index != -1) {
commentText = StringUtils.substring(commentText, 0, index);
}
index = StringUtils.indexOfAny(commentText, ".!?。!?…");
if (index > 0) {
commentText = StringUtils.substring(commentText, 0, index);
}
if (StringUtils.length(commentText) > 8) {
commentText = StringUtils.substring(commentText, 0, 8) + "…";
}
return commentText;
}
protected void buildOpenAPIByClasses(Configuration configuration) {
Set<Entry<String, Set<ClassDoc>>> classes = this.taggedOpenAPIClasses.entrySet();
for (Iterator<Entry<String, Set<ClassDoc>>> tagClsIter = classes.iterator(); tagClsIter.hasNext();) {
Entry<String, Set<ClassDoc>> kv = tagClsIter.next();
String tagName = kv.getKey();
if (!this.wrDoc.getTaggedOpenAPIs().containsKey(tagName)) {
this.wrDoc.getTaggedOpenAPIs().put(tagName, new LinkedList<OpenAPI>());
}
Set<ClassDoc> classDocSet = kv.getValue();
for (Iterator<ClassDoc> clsIter = classDocSet.iterator(); clsIter.hasNext();) {
ClassDoc classDoc = clsIter.next();
OpenAPI openAPI = new OpenAPI();
openAPI.setDeprecated(Util.isDeprecated(classDoc) || Util.isDeprecated(classDoc.containingPackage()));
Tag[] tags = classDoc.tags(WRTagTaglet.NAME);
if (tags.length == 0) {
openAPI.addTag(tagName);
} else {
for (Tag t : tags) {
openAPI.addTags(WRTagTaglet.getTagSet(t.text()));
}
}
openAPI.setQualifiedName(classDoc.qualifiedName());
if (StringUtils.isNotBlank(classDoc.commentText())) {
openAPI.setDescription(classDoc.commentText());
}
String brief;
if (classDoc.tags(WRBriefTaglet.NAME).length == 0) {
brief = getBriefFromCommentText(classDoc.commentText());
} else {
brief = classDoc.tags(WRBriefTaglet.NAME)[0].text();
}
openAPI.setBrief(brief);
if (StringUtils.isBlank(openAPI.getDescription())) {
openAPI.setDescription(openAPI.getBrief());
}
openAPI.setModificationHistory(this.getModificationHistory(classDoc));
openAPI.setRequestMapping(this.parseRequestMapping(classDoc));
openAPI.addInParameter(this.getInputParams(classDoc));
openAPI.setOutParameter(this.getOutputParam(classDoc));
this.wrDoc.getTaggedOpenAPIs().get(tagName).add(openAPI);
}
}
}
protected void buildOpenAPIs(Configuration configuration) {
Set<Entry<String, Set<MethodDoc>>> methods = this.taggedOpenAPIMethods.entrySet();
for (Iterator<Entry<String, Set<MethodDoc>>> tagMthIter = methods.iterator(); tagMthIter.hasNext();) {
Entry<String, Set<MethodDoc>> kv = tagMthIter.next();
String tagName = kv.getKey();
if (!this.wrDoc.getTaggedOpenAPIs().containsKey(tagName)) {
this.wrDoc.getTaggedOpenAPIs().put(tagName, new LinkedList<OpenAPI>());
}
Set<MethodDoc> methodDocSet = kv.getValue();
for (Iterator<MethodDoc> mthIter = methodDocSet.iterator(); mthIter.hasNext();) {
MethodDoc methodDoc = mthIter.next();
OpenAPI openAPI = new OpenAPI();
openAPI.setDeprecated(Util.isDeprecated(methodDoc) || Util.isDeprecated(methodDoc.containingClass())
|| Util.isDeprecated(methodDoc.containingPackage()));
Tag[] tags = this.getTagTaglets(methodDoc);
if (tags.length == 0 && this.methodMap.containsKey(methodDoc)) {
tags = this.getTagTaglets(this.methodMap.get(methodDoc));
}
if (tags.length == 0) {
openAPI.addTag(methodDoc.containingClass().simpleTypeName());
} else {
for (Tag t : tags) {
openAPI.addTags(WRTagTaglet.getTagSet(t.text()));
}
}
openAPI.setQualifiedName(methodDoc.qualifiedName());
if (StringUtils.isNotBlank(methodDoc.commentText())) {
openAPI.setDescription(methodDoc.commentText());
} else if (this.methodMap.containsKey(methodDoc)) {
openAPI.setDescription(this.methodMap.get(methodDoc).commentText());
}
String brief;
if (methodDoc.tags(WRBriefTaglet.NAME).length == 0) {
brief = getBriefFromCommentText(methodDoc.commentText());
} else {
brief = methodDoc.tags(WRBriefTaglet.NAME)[0].text();
}
if (StringUtils.isBlank(brief) && this.methodMap.containsKey(methodDoc)) {
if (this.methodMap.get(methodDoc).tags(WRBriefTaglet.NAME).length == 0) {
brief = getBriefFromCommentText(this.methodMap.get(methodDoc).commentText());
} else {
brief = this.methodMap.get(methodDoc).tags(WRBriefTaglet.NAME)[0].text();
}
}
openAPI.setBrief(brief);
if (StringUtils.isBlank(openAPI.getDescription())) {
openAPI.setDescription(openAPI.getBrief());
}
openAPI.setModificationHistory(this.getModificationHistory(methodDoc));
openAPI.setRequestMapping(this.parseRequestMapping(methodDoc));
if (openAPI.getRequestMapping() != null) {
openAPI.setAuthNeeded(this.isAPIAuthNeeded(openAPI.getRequestMapping().getUrl()));
}
openAPI.addInParameters(this.getInputParams(methodDoc));
openAPI.setOutParameter(this.getOutputParam(methodDoc));
openAPI.setReturnCode(this.getReturnCode(methodDoc));
this.wrDoc.getTaggedOpenAPIs().get(tagName).add(openAPI);
}
}
}
/**
* @param url
* url of API.
* @return 0 for anonymous allowed, 1 for authentication needed, others for not
* specified.
*/
protected abstract int isAPIAuthNeeded(String url);
protected abstract boolean isOpenAPIMethod(MethodDoc methodDoc);
protected abstract RequestMapping parseRequestMapping(MethodDoc methodDoc);
protected abstract RequestMapping parseRequestMapping(ClassDoc classDoc);
protected abstract APIParameter getOutputParam(MethodDoc methodDoc);
protected abstract APIParameter getOutputParam(ClassDoc classDoc);
protected abstract List<APIParameter> getInputParams(MethodDoc methodDoc);
protected abstract APIParameter getInputParams(ClassDoc classDoc);
protected String getParamComment(MethodDoc method, String paramName) {
ParamTag[] paramTags = method.paramTags();
for (ParamTag paramTag : paramTags) {
if (paramTag.parameterName().equals(paramName)) {
return paramTag.parameterComment();
}
}
return null;
}
protected boolean isProgramElementDocAnnotatedWith(ProgramElementDoc elementDoc, String annotation) {
AnnotationDesc[] annotations = elementDoc.annotations();
for (int i = 0; i < annotations.length; i++) {
if (annotations[i].annotationType().qualifiedTypeName().equals(annotation)) {
return true;
}
}
return false;
}
protected ModificationHistory getModificationHistory(ClassDoc classDoc) {
ModificationHistory history = new ModificationHistory();
if (classDoc != null) {
LinkedList<ModificationRecord> list = this.getModificationRecords(classDoc);
history.addModificationRecords(list);
}
return history;
}
/*
* get the modification history of the class.
*/
protected ModificationHistory getModificationHistory(Type type) {
ModificationHistory history = new ModificationHistory();
ClassDoc classDoc = this.wrDoc.getConfiguration().root.classNamed(type.qualifiedTypeName());
if (classDoc != null) {
LinkedList<ModificationRecord> list = this.getModificationRecords(classDoc);
history.addModificationRecords(list);
}
return history;
}
/*
* get the modification history of the method.
*/
protected ModificationHistory getModificationHistory(MethodDoc methodDoc) {
ModificationHistory history = new ModificationHistory();
history.addModificationRecords(this.parseModificationRecords(methodDoc.tags()));
return history;
}
/*
* get the modification records of the class.
*/
protected LinkedList<ModificationRecord> getModificationRecords(ClassDoc classDoc) {
ClassDoc superClass = classDoc.superclass();
if (superClass == null) {
return new LinkedList<ModificationRecord>();
}
LinkedList<ModificationRecord> result = this.getModificationRecords(superClass);
result.addAll(this.parseModificationRecords(classDoc.tags()));
return result;
}
/*
* Parse tags to get customized parameters.
*/
protected LinkedList<APIParameter> parseCustomizedParameters(MethodDoc methodDoc) {
Tag[] tags = methodDoc.tags(WRParamTaglet.NAME);
LinkedList<APIParameter> result = new LinkedList<APIParameter>();
for (int i = 0; i < tags.length; i++) {
result.add(WRParamTaglet.parse(tags[i].text()));
}
return result;
}
/*
* Parse tags to get customized return.
*/
protected APIParameter parseCustomizedReturn(MethodDoc methodDoc) {
Tag[] tags = methodDoc.tags(WRReturnTaglet.NAME);
APIParameter result = null;
if (tags.length > 0) {
result = WRReturnTaglet.parse(tags[0].text());
}
return result;
}
/*
* Parse tags to get modification records.
*/
protected LinkedList<ModificationRecord> parseModificationRecords(Tag[] tags) {
LinkedList<ModificationRecord> result = new LinkedList<ModificationRecord>();
for (int i = 0; i < tags.length; i++) {
if ("@author".equalsIgnoreCase(tags[i].name())) {
ModificationRecord record = new ModificationRecord();
record.setModifier(tags[i].text());
if (i + 1 < tags.length) {
if ("@version".equalsIgnoreCase(tags[i + 1].name())) {
record.setVersion(tags[i + 1].text());
if (i + 2 < tags.length && ("@" + WRMemoTaglet.NAME).equalsIgnoreCase(tags[i + 2].name())) {
record.setMemo(tags[i + 2].text());
}
} else if (("@" + WRMemoTaglet.NAME).equalsIgnoreCase(tags[i + 1].name())) {
record.setMemo(tags[i + 1].text());
}
}
result.add(record);
}
}
return result;
}
protected String getReturnCode(MethodDoc methodDoc) {
Tag[] tags = methodDoc.tags(WRReturnCodeTaglet.NAME);
return WRReturnCodeTaglet.concat(tags);
}
protected String getMQConsumerTopic(ClassDoc classDoc) {
Tag[] tags = classDoc.tags(WRMqConsumerTaglet.NAME);
if (tags.length == 0) {
return "";
}
return StringUtils.substringBefore(tags[0].text(), "\n");
}
protected String getMQProducerTopic(ClassDoc classDoc) {
Tag[] tags = classDoc.tags(WRMqProducerTaglet.NAME);
if (tags.length == 0) {
return "";
}
return StringUtils.substringBefore(tags[0].text(), "\n");
}
protected boolean isInStopClasses(ClassDoc classDoc) {
String property = ApplicationContextConfig.getStopClasses();
if (property != null) {
String[] stopClasses = property.split(",");
String[] cdParts = classDoc.qualifiedTypeName().split("\\.");
for (String stopClass : stopClasses) {
String[] scParts = stopClass.trim().split("\\.");
if (scParts.length <= cdParts.length) {
boolean hasDiffPart = false;
for (int i = 0; i < scParts.length; i++) {
if (scParts[i].equals("*")) {
return true;
} else if (!scParts[i].equalsIgnoreCase(cdParts[i])) {
hasDiffPart = true;
break;
}
}
if (scParts.length == cdParts.length && !hasDiffPart) {
return true;
}
}
}
}
return false;
}
protected boolean isParameterizedTypeInStopClasses(Type type) {
if (!this.isInStopClasses(type.asClassDoc())) {
return false;
}
ParameterizedType pt = type.asParameterizedType();
if (pt != null) {
for (Type arg : pt.typeArguments()) {
if (!this.isParameterizedTypeInStopClasses(arg)) {
return false;
}
}
}
return true;
}
protected List<APIParameter> getFields(Type type, ParameterType paramType, HashSet<String> processingClasses) {
processingClasses.add(type.toString());
List<APIParameter> result = new LinkedList<APIParameter>();
if (!type.isPrimitive()) {
ParameterizedType pt = type.asParameterizedType();
if (pt != null && pt.typeArguments().length > 0) {
for (Type arg : pt.typeArguments()) {
if (!this.isParameterizedTypeInStopClasses(arg)) {
APIParameter tmp = new APIParameter();
tmp.setName(arg.simpleTypeName());
tmp.setType(this.getTypeName(arg, false));
tmp.setDescription("");
tmp.setParentTypeArgument(true);
if (!processingClasses.contains(arg.qualifiedTypeName())) {
tmp.setFields(this.getFields(arg, paramType, processingClasses));
}
result.add(tmp);
}
}
}
ClassDoc classDoc = this.wrDoc.getConfiguration().root.classNamed(type.qualifiedTypeName());
if (classDoc != null) {
result.addAll(this.getFields(classDoc, paramType, processingClasses));
}
}
return result;
}
protected List<APIParameter> getFields(ClassDoc classDoc, ParameterType paramType,
HashSet<String> processingClasses) {
processingClasses.add(classDoc.toString());
List<APIParameter> result = new LinkedList<APIParameter>();
boolean isLomBokClass = this.isProgramElementDocAnnotatedWith(classDoc, "lombok.Data")
|| (paramType == ParameterType.Response
&& this.isProgramElementDocAnnotatedWith(classDoc, "lombok.Getter"))
|| (paramType == ParameterType.Request
&& this.isProgramElementDocAnnotatedWith(classDoc, "lombok.Setter"));
// todo
// this.wrDoc.getConfiguration().root.classNamed(type.qualifiedTypeName()).typeParameters()[0].qualifiedTypeName()
ClassDoc superClassDoc = classDoc.superclass();
if (superClassDoc != null && !this.isInStopClasses(superClassDoc)
&& !processingClasses.contains(superClassDoc.qualifiedTypeName())) {
result.addAll(this.getFields(superClassDoc, paramType, processingClasses));
}
if (this.isInStopClasses(classDoc)) {
return result;
}
FieldDoc[] fieldDocs = classDoc.fields(false);
HashMap<String, String> privateFieldValidator = new HashMap<>();
HashMap<String, String> privateFieldDesc = new HashMap<String, String>();
HashMap<String, String> privateJsonField = new HashMap<String, String>();
Set<String> transientFieldSet = new HashSet<>();
for (FieldDoc fieldDoc : fieldDocs) {
if (!fieldDoc.isTransient() && !fieldDoc.isStatic()
&& (fieldDoc.isPublic() || isLomBokClass
|| (this.isProgramElementDocAnnotatedWith(fieldDoc, "lombok.Getter")
&& paramType == ParameterType.Response)
|| (this.isProgramElementDocAnnotatedWith(fieldDoc, "lombok.Setter")
&& paramType == ParameterType.Request))) {
APIParameter param = new APIParameter();
param.setName(fieldDoc.name());
param.setType(this.getTypeName(fieldDoc.type(), false));
if (!processingClasses.contains(fieldDoc.type().qualifiedTypeName())) {
param.setFields(this.getFields(fieldDoc.type(), paramType, processingClasses));
}
param.setDescription(this.getFieldDescription(fieldDoc));
param.setHistory(new ModificationHistory(this.parseModificationRecords(fieldDoc.tags())));
param.setParameterOccurs(this.parseParameterOccurs(fieldDoc.tags(WROccursTaglet.NAME)));
result.add(param);
} else {
privateFieldDesc.put(fieldDoc.name(), fieldDoc.commentText());
String jsonField = this.getJsonField(fieldDoc);
if (jsonField != null) {
privateJsonField.put(fieldDoc.name(), jsonField);
}
privateFieldValidator.put(fieldDoc.name(), this.getFieldValidatorDesc(fieldDoc));
if (fieldDoc.isTransient()) {
transientFieldSet.add(fieldDoc.name());
}
}
}
MethodDoc[] methodDocs = classDoc.methods(false);
for (MethodDoc methodDoc : methodDocs) {
if (transientFieldSet.contains(this.getFieldNameOfAccesser(methodDoc.name()))) {
continue;
}
if ((paramType == ParameterType.Response && this.isGetterMethod(methodDoc))
|| (paramType == ParameterType.Request && this.isSetterMethod(methodDoc))) {
APIParameter param = new APIParameter();
String fieldNameOfAccesser = this.getFieldNameOfAccesser(methodDoc.name());
param.setName(fieldNameOfAccesser);
String jsonField = this.getJsonField(methodDoc);
if (jsonField != null) {
param.setName(jsonField);
} else if (privateJsonField.containsKey(param.getName())) {
param.setName(privateJsonField.get(param.getName()));
}
Type typeToProcess = null;
if (paramType == ParameterType.Request) {
// set method only has one parameter.
typeToProcess = methodDoc.parameters()[0].type();
} else {
typeToProcess = methodDoc.returnType();
}
param.setType(this.getTypeName(typeToProcess, false));
if (!processingClasses.contains(typeToProcess.qualifiedTypeName())) {
param.setFields(this.getFields(typeToProcess, paramType, processingClasses));
}
param.setHistory(new ModificationHistory(this.parseModificationRecords(methodDoc.tags())));
if (StringUtils.isEmpty(methodDoc.commentText())) {
if (paramType == ParameterType.Request) {
param.setDescription(this.getParamComment(methodDoc, methodDoc.parameters()[0].name()));
} else {
for (Tag tag : methodDoc.tags("return")) {
param.setDescription(tag.text());
}
}
} else {
param.setDescription(methodDoc.commentText());
}
if (StringUtils.isEmpty(param.getDescription())) {
String temp = privateFieldDesc.get(param.getName());
if (temp == null) {
if (typeToProcess.typeName().equals("boolean")) {
temp = privateFieldDesc.get(param.getName());
if (temp == null) {
param.setDescription(privateFieldDesc
.get("is" + net.winroad.wrdoclet.utils.Util.capitalize(param.getName())));
}
}
} else {
param.setDescription(temp);
}
}
if (privateFieldValidator.get(fieldNameOfAccesser) != null) {
param.setDescription(param.getDescription() == null ? privateFieldValidator.get(fieldNameOfAccesser)
: param.getDescription() + " " + privateFieldValidator.get(fieldNameOfAccesser));
}
param.setParameterOccurs(this.parseParameterOccurs(methodDoc.tags(WROccursTaglet.NAME)));
result.add(param);
}
}
return result;
}
protected String getFieldDescription(FieldDoc fieldDoc) {
StringBuilder strBuilder = new StringBuilder();
strBuilder.append(fieldDoc.commentText());
strBuilder.append(" ");
strBuilder.append(this.getFieldValidatorDesc(fieldDoc));
return strBuilder.toString();
}
protected String getJsonField(MemberDoc memberDoc) {
for (AnnotationDesc annotationDesc : memberDoc.annotations()) {
if (annotationDesc.annotationType().qualifiedTypeName()
.startsWith("com.fasterxml.jackson.annotation.JsonProperty")) {
if (annotationDesc.elementValues().length > 0) {
for (AnnotationDesc.ElementValuePair elementValuePair : annotationDesc.elementValues()) {
if (elementValuePair.element().name().equals("value")
&& !StringUtils.isEmpty(elementValuePair.value().toString())
&& !"\"\"".equals(elementValuePair.value().toString())) {
return elementValuePair.value().toString().replace("\"", "");
}
}
}
}
if (annotationDesc.annotationType().qualifiedTypeName()
.startsWith("com.alibaba.fastjson.annotation.JSONField")) {
if (annotationDesc.elementValues().length > 0) {
for (AnnotationDesc.ElementValuePair elementValuePair : annotationDesc.elementValues()) {
if (elementValuePair.element().name().equals("name")
&& !StringUtils.isEmpty(elementValuePair.value().toString())
&& !"\"\"".equals(elementValuePair.value().toString())) {
return elementValuePair.value().toString().replace("\"", "");
}
}
}
}
}
return null;
}
protected String getFieldValidatorDesc(FieldDoc fieldDoc) {
StringBuilder strBuilder = new StringBuilder();
for (AnnotationDesc annotationDesc : fieldDoc.annotations()) {
if (annotationDesc.annotationType().qualifiedTypeName().startsWith("org.hibernate.validator.constraints")
|| annotationDesc.annotationType().qualifiedTypeName().startsWith("javax.validation.constraints")
|| annotationDesc.annotationType().qualifiedTypeName().startsWith("lombok.NonNull")) {
strBuilder.append("@");
strBuilder.append(annotationDesc.annotationType().name());
if (annotationDesc.elementValues().length > 0) {
strBuilder.append("(");
boolean isFirstElement = true;
for (AnnotationDesc.ElementValuePair elementValuePair : annotationDesc.elementValues()) {
if (!isFirstElement) {
strBuilder.append(",");
}
strBuilder.append(elementValuePair.element().name());
strBuilder.append("=");
strBuilder.append(
net.winroad.wrdoclet.utils.Util.decodeUnicode(elementValuePair.value().toString()));
isFirstElement = false;
}
strBuilder.append(")");
}
strBuilder.append(" ");
}
}
return strBuilder.toString();
}
protected String getTypeName(Type typeToProcess, boolean ignoreSuperType) {
// special type to process e.g. java.util.Map.Entry<Address,Person>
ParameterizedType pt = typeToProcess.asParameterizedType();
if (pt != null && pt.typeArguments().length > 0) {
StringBuilder strBuilder = new StringBuilder();
strBuilder.append(typeToProcess.qualifiedTypeName());
strBuilder.append("<");
for (Type arg : pt.typeArguments()) {
strBuilder.append(this.getTypeName(arg, true));
strBuilder.append(",");
}
int len = strBuilder.length();
// trim the last ","
strBuilder.deleteCharAt(len - 1);
strBuilder.append(">");
return strBuilder.toString();
}
if (typeToProcess.asClassDoc() != null) {
ClassDoc superClass = typeToProcess.asClassDoc().superclass();
if (superClass != null) {
// handle enum to output enum values into doc
if ("java.lang.Enum".equals(superClass.qualifiedTypeName())) {
FieldDoc[] enumConstants = typeToProcess.asClassDoc().enumConstants();
StringBuilder strBuilder = new StringBuilder();
strBuilder.append("Enum[");
for (FieldDoc enumConstant : enumConstants) {
strBuilder.append(enumConstant.name());
strBuilder.append(",");
}
int len = strBuilder.length();
// trim the last ","
strBuilder.deleteCharAt(len - 1);
strBuilder.append("]");
return strBuilder.toString();
} else if (!ignoreSuperType && !this.isInStopClasses(superClass)) {
return typeToProcess.qualifiedTypeName() + " extends "
+ this.getTypeName(typeToProcess.asClassDoc().superclassType(), false);
}
}
}
return typeToProcess.toString();
}
/*
* Parse the ParameterOccurs from the tags.
*/
protected ParameterOccurs parseParameterOccurs(Tag[] tags) {
for (int i = 0; i < tags.length; i++) {
if (("@" + WROccursTaglet.NAME).equalsIgnoreCase(tags[i].name())) {
if (WROccursTaglet.REQUIRED.equalsIgnoreCase(tags[i].text())) {
return ParameterOccurs.REQUIRED;
} else if (WROccursTaglet.OPTIONAL.equalsIgnoreCase(tags[i].text())) {
return ParameterOccurs.OPTIONAL;
} else if (WROccursTaglet.DEPENDS.equalsIgnoreCase(tags[i].text())) {
return ParameterOccurs.DEPENDS;
} else {
this.logger.warn("Unexpected WROccursTaglet: " + tags[i].text());
}
}
}
return null;
}
/*
* is the method a getter method of a field.
*/
protected boolean isGetterMethod(MethodDoc methodDoc) {
if (methodDoc.parameters() != null && methodDoc.parameters().length == 0
&& (!"boolean".equalsIgnoreCase(methodDoc.returnType().qualifiedTypeName())
&& methodDoc.name().matches("^get.+"))
|| (("boolean".equalsIgnoreCase(methodDoc.returnType().qualifiedTypeName())
&& methodDoc.name().matches("^is.+")))) {
return true;
}
return false;
}
/*
* is the method a setter method of a field.
*/
protected boolean isSetterMethod(MethodDoc methodDoc) {
if (methodDoc.parameters() != null && methodDoc.parameters().length == 1
&& methodDoc.name().matches("^set.+")) {
return true;
}
return false;
}
/*
* get the field name which the getter or setter method to access. NOTE: the
* getter or setter method name should follow the naming convention.
*/
protected String getFieldNameOfAccesser(String methodName) {
if (methodName.startsWith("get")) {
return net.winroad.wrdoclet.utils.Util.uncapitalize(methodName.replaceFirst("get", ""));
} else if (methodName.startsWith("set")) {
return net.winroad.wrdoclet.utils.Util.uncapitalize(methodName.replaceFirst("set", ""));
} else {
return net.winroad.wrdoclet.utils.Util.uncapitalize(methodName.replaceFirst("is", ""));
}
}
public static Document readXMLConfig(String filePath)
throws ParserConfigurationException, SAXException, IOException {
DocumentBuilderFactory builderFactory = DocumentBuilderFactory.newInstance();
builderFactory.setNamespaceAware(true);
DocumentBuilder builder = builderFactory.newDocumentBuilder();
File dubboConfig = new File(filePath);
return builder.parse(dubboConfig);
}
public static String getAttributeValue(Node node, String attributeName) {
NamedNodeMap attributes = node.getAttributes();
if (attributes != null) {
Node attribute = attributes.getNamedItem(attributeName);
if (attribute != null) {
return attribute.getNodeValue();
}
}
return null;
}
protected void processAnnotations(AnnotationDesc annotation, APIParameter apiParameter) {
if (annotation.annotationType().qualifiedName().startsWith("org.springframework.web.bind.annotation.")) {
for (ElementValuePair pair : annotation.elementValues()) {
if (pair.element().name().equals("value") || pair.element().name().equals("name")) {
if (pair.value() != null) {
apiParameter.setName(pair.value().toString().replace("\"", ""));
}
}
if (pair.element().name().equals("required")) {
if (pair.value().value().equals(true)) {
apiParameter.setParameterOccurs(ParameterOccurs.REQUIRED);
} else {
apiParameter.setParameterOccurs(ParameterOccurs.OPTIONAL);
}
}
}
}
}
protected void handleRefReq(MethodDoc method, List<APIParameter> paramList) {
Tag[] tags = method.tags(WRRefReqTaglet.NAME);
for (int i = 0; i < tags.length; i++) {
APIParameter apiParameter = new APIParameter();
String[] strArr = tags[i].text().split(" ");
for (int j = 0; j < strArr.length; j++) {
switch (j) {
case 0:
apiParameter.setName(strArr[j]);
break;
case 1:
apiParameter.setType(strArr[j]);
break;
case 2:
apiParameter.setDescription(strArr[j]);
break;
case 3:
if (StringUtils.equalsIgnoreCase(strArr[j], WROccursTaglet.REQUIRED)) {
apiParameter.setParameterOccurs(ParameterOccurs.REQUIRED);
} else if (StringUtils.equalsIgnoreCase(strArr[j], WROccursTaglet.OPTIONAL)) {
apiParameter.setParameterOccurs(ParameterOccurs.OPTIONAL);
}
break;
default:
logger.warn("Unexpected tag:" + tags[i].text());
}
}
HashSet<String> processingClasses = new HashSet<String>();
ClassDoc c = this.wrDoc.getConfiguration().root.classNamed(apiParameter.getType());
if (c != null) {
apiParameter.setFields(this.getFields(c, ParameterType.Request, processingClasses));
}
paramList.add(apiParameter);
}
}
}
| src/main/java/net/winroad/wrdoclet/builder/AbstractDocBuilder.java | package net.winroad.wrdoclet.builder;
import java.io.File;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import net.winroad.wrdoclet.data.APIParameter;
import net.winroad.wrdoclet.data.ModificationHistory;
import net.winroad.wrdoclet.data.ModificationRecord;
import net.winroad.wrdoclet.data.OpenAPI;
import net.winroad.wrdoclet.data.ParameterOccurs;
import net.winroad.wrdoclet.data.ParameterType;
import net.winroad.wrdoclet.data.RequestMapping;
import net.winroad.wrdoclet.data.WRDoc;
import net.winroad.wrdoclet.taglets.WRBriefTaglet;
import net.winroad.wrdoclet.taglets.WRMemoTaglet;
import net.winroad.wrdoclet.taglets.WRMqConsumerTaglet;
import net.winroad.wrdoclet.taglets.WRMqProducerTaglet;
import net.winroad.wrdoclet.taglets.WROccursTaglet;
import net.winroad.wrdoclet.taglets.WRParamTaglet;
import net.winroad.wrdoclet.taglets.WRRefReqTaglet;
import net.winroad.wrdoclet.taglets.WRReturnCodeTaglet;
import net.winroad.wrdoclet.taglets.WRReturnTaglet;
import net.winroad.wrdoclet.taglets.WRTagTaglet;
import net.winroad.wrdoclet.utils.ApplicationContextConfig;
import net.winroad.wrdoclet.utils.Logger;
import net.winroad.wrdoclet.utils.LoggerFactory;
import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.lang.StringUtils;
import org.w3c.dom.Document;
import org.w3c.dom.NamedNodeMap;
import org.w3c.dom.Node;
import org.xml.sax.SAXException;
import com.sun.javadoc.AnnotationDesc;
import com.sun.javadoc.ClassDoc;
import com.sun.javadoc.FieldDoc;
import com.sun.javadoc.MemberDoc;
import com.sun.javadoc.MethodDoc;
import com.sun.javadoc.ParamTag;
import com.sun.javadoc.ParameterizedType;
import com.sun.javadoc.ProgramElementDoc;
import com.sun.javadoc.Tag;
import com.sun.javadoc.Type;
import com.sun.javadoc.AnnotationDesc.ElementValuePair;
import com.sun.tools.doclets.internal.toolkit.Configuration;
import com.sun.tools.doclets.internal.toolkit.util.Util;
public abstract class AbstractDocBuilder {
protected Logger logger;
protected WRDoc wrDoc;
protected Map<String, Set<MethodDoc>> taggedOpenAPIMethods = new HashMap<String, Set<MethodDoc>>();
protected Map<String, Set<ClassDoc>> taggedOpenAPIClasses = new HashMap<String, Set<ClassDoc>>();
public AbstractDocBuilder(WRDoc wrDoc) {
this.wrDoc = wrDoc;
this.logger = LoggerFactory.getLogger(this.getClass());
}
public WRDoc getWrDoc() {
return wrDoc;
}
public void setWrDoc(WRDoc wrDoc) {
this.wrDoc = wrDoc;
}
public Map<String, Set<MethodDoc>> getTaggedOpenAPIMethods() {
return taggedOpenAPIMethods;
}
/**
* methodDoc in implementation classes map to methodDoc in interfaces or
* methodDoc in interfaces map to methodDoc in implementation classes
*/
protected HashMap<MethodDoc, MethodDoc> methodMap = new HashMap<>();
public void setTaggedOpenAPIMethods(Map<String, Set<MethodDoc>> taggedOpenAPIMethods) {
this.taggedOpenAPIMethods = taggedOpenAPIMethods;
}
public void buildWRDoc() {
this.processOpenAPIClasses(this.wrDoc.getConfiguration().root.classes(), this.wrDoc.getConfiguration());
this.buildOpenAPIs(this.wrDoc.getConfiguration());
this.buildOpenAPIByClasses(this.wrDoc.getConfiguration());
}
protected abstract void processOpenAPIClasses(ClassDoc[] classDocs, Configuration configuration);
protected Tag[] getTagTaglets(MethodDoc methodDoc) {
return (Tag[]) ArrayUtils.addAll(methodDoc.tags(WRTagTaglet.NAME),
methodDoc.containingClass().tags(WRTagTaglet.NAME));
}
protected void processOpenAPIMethod(MethodDoc methodDoc, Configuration configuration) {
if ((configuration.nodeprecated && Util.isDeprecated(methodDoc)) || !isOpenAPIMethod(methodDoc)) {
return;
}
Tag[] methodTagArray = getTagTaglets(methodDoc);
if (methodTagArray.length == 0) {
String tag = methodDoc.containingClass().simpleTypeName();
this.wrDoc.getWRTags().add(tag);
if (!this.taggedOpenAPIMethods.containsKey(tag)) {
this.taggedOpenAPIMethods.put(tag, new HashSet<MethodDoc>());
}
this.taggedOpenAPIMethods.get(tag).add(methodDoc);
} else {
for (int i = 0; i < methodTagArray.length; i++) {
Set<String> methodTags = WRTagTaglet.getTagSet(methodTagArray[i].text());
this.wrDoc.getWRTags().addAll(methodTags);
for (Iterator<String> iter = methodTags.iterator(); iter.hasNext();) {
String tag = iter.next();
if (!this.taggedOpenAPIMethods.containsKey(tag)) {
this.taggedOpenAPIMethods.put(tag, new HashSet<MethodDoc>());
}
this.taggedOpenAPIMethods.get(tag).add(methodDoc);
}
}
}
}
protected String getBriefFromCommentText(String commentText) {
int index = StringUtils.indexOf(commentText, '\n');
if (index != -1) {
commentText = StringUtils.substring(commentText, 0, index);
}
index = StringUtils.indexOfAny(commentText, ".!?。!?…");
if (index > 0) {
commentText = StringUtils.substring(commentText, 0, index);
}
if (StringUtils.length(commentText) > 8) {
commentText = StringUtils.substring(commentText, 0, 8) + "…";
}
return commentText;
}
protected void buildOpenAPIByClasses(Configuration configuration) {
Set<Entry<String, Set<ClassDoc>>> classes = this.taggedOpenAPIClasses.entrySet();
for (Iterator<Entry<String, Set<ClassDoc>>> tagClsIter = classes.iterator(); tagClsIter.hasNext();) {
Entry<String, Set<ClassDoc>> kv = tagClsIter.next();
String tagName = kv.getKey();
if (!this.wrDoc.getTaggedOpenAPIs().containsKey(tagName)) {
this.wrDoc.getTaggedOpenAPIs().put(tagName, new LinkedList<OpenAPI>());
}
Set<ClassDoc> classDocSet = kv.getValue();
for (Iterator<ClassDoc> clsIter = classDocSet.iterator(); clsIter.hasNext();) {
ClassDoc classDoc = clsIter.next();
OpenAPI openAPI = new OpenAPI();
openAPI.setDeprecated(Util.isDeprecated(classDoc) || Util.isDeprecated(classDoc.containingPackage()));
Tag[] tags = classDoc.tags(WRTagTaglet.NAME);
if (tags.length == 0) {
openAPI.addTag(tagName);
} else {
for (Tag t : tags) {
openAPI.addTags(WRTagTaglet.getTagSet(t.text()));
}
}
openAPI.setQualifiedName(classDoc.qualifiedName());
if (StringUtils.isNotBlank(classDoc.commentText())) {
openAPI.setDescription(classDoc.commentText());
}
String brief;
if (classDoc.tags(WRBriefTaglet.NAME).length == 0) {
brief = getBriefFromCommentText(classDoc.commentText());
} else {
brief = classDoc.tags(WRBriefTaglet.NAME)[0].text();
}
openAPI.setBrief(brief);
if (StringUtils.isBlank(openAPI.getDescription())) {
openAPI.setDescription(openAPI.getBrief());
}
openAPI.setModificationHistory(this.getModificationHistory(classDoc));
openAPI.setRequestMapping(this.parseRequestMapping(classDoc));
openAPI.addInParameter(this.getInputParams(classDoc));
openAPI.setOutParameter(this.getOutputParam(classDoc));
this.wrDoc.getTaggedOpenAPIs().get(tagName).add(openAPI);
}
}
}
protected void buildOpenAPIs(Configuration configuration) {
Set<Entry<String, Set<MethodDoc>>> methods = this.taggedOpenAPIMethods.entrySet();
for (Iterator<Entry<String, Set<MethodDoc>>> tagMthIter = methods.iterator(); tagMthIter.hasNext();) {
Entry<String, Set<MethodDoc>> kv = tagMthIter.next();
String tagName = kv.getKey();
if (!this.wrDoc.getTaggedOpenAPIs().containsKey(tagName)) {
this.wrDoc.getTaggedOpenAPIs().put(tagName, new LinkedList<OpenAPI>());
}
Set<MethodDoc> methodDocSet = kv.getValue();
for (Iterator<MethodDoc> mthIter = methodDocSet.iterator(); mthIter.hasNext();) {
MethodDoc methodDoc = mthIter.next();
OpenAPI openAPI = new OpenAPI();
openAPI.setDeprecated(Util.isDeprecated(methodDoc) || Util.isDeprecated(methodDoc.containingClass())
|| Util.isDeprecated(methodDoc.containingPackage()));
Tag[] tags = this.getTagTaglets(methodDoc);
if (tags.length == 0 && this.methodMap.containsKey(methodDoc)) {
tags = this.getTagTaglets(this.methodMap.get(methodDoc));
}
if (tags.length == 0) {
openAPI.addTag(methodDoc.containingClass().simpleTypeName());
} else {
for (Tag t : tags) {
openAPI.addTags(WRTagTaglet.getTagSet(t.text()));
}
}
openAPI.setQualifiedName(methodDoc.qualifiedName());
if (StringUtils.isNotBlank(methodDoc.commentText())) {
openAPI.setDescription(methodDoc.commentText());
} else if (this.methodMap.containsKey(methodDoc)) {
openAPI.setDescription(this.methodMap.get(methodDoc).commentText());
}
String brief;
if (methodDoc.tags(WRBriefTaglet.NAME).length == 0) {
brief = getBriefFromCommentText(methodDoc.commentText());
} else {
brief = methodDoc.tags(WRBriefTaglet.NAME)[0].text();
}
if (StringUtils.isBlank(brief) && this.methodMap.containsKey(methodDoc)) {
if (this.methodMap.get(methodDoc).tags(WRBriefTaglet.NAME).length == 0) {
brief = getBriefFromCommentText(this.methodMap.get(methodDoc).commentText());
} else {
brief = this.methodMap.get(methodDoc).tags(WRBriefTaglet.NAME)[0].text();
}
}
openAPI.setBrief(brief);
if (StringUtils.isBlank(openAPI.getDescription())) {
openAPI.setDescription(openAPI.getBrief());
}
openAPI.setModificationHistory(this.getModificationHistory(methodDoc));
openAPI.setRequestMapping(this.parseRequestMapping(methodDoc));
if (openAPI.getRequestMapping() != null) {
openAPI.setAuthNeeded(this.isAPIAuthNeeded(openAPI.getRequestMapping().getUrl()));
}
openAPI.addInParameters(this.getInputParams(methodDoc));
openAPI.setOutParameter(this.getOutputParam(methodDoc));
openAPI.setReturnCode(this.getReturnCode(methodDoc));
this.wrDoc.getTaggedOpenAPIs().get(tagName).add(openAPI);
}
}
}
/**
* @param url
* url of API.
* @return 0 for anonymous allowed, 1 for authentication needed, others for not
* specified.
*/
protected abstract int isAPIAuthNeeded(String url);
protected abstract boolean isOpenAPIMethod(MethodDoc methodDoc);
protected abstract RequestMapping parseRequestMapping(MethodDoc methodDoc);
protected abstract RequestMapping parseRequestMapping(ClassDoc classDoc);
protected abstract APIParameter getOutputParam(MethodDoc methodDoc);
protected abstract APIParameter getOutputParam(ClassDoc classDoc);
protected abstract List<APIParameter> getInputParams(MethodDoc methodDoc);
protected abstract APIParameter getInputParams(ClassDoc classDoc);
protected String getParamComment(MethodDoc method, String paramName) {
ParamTag[] paramTags = method.paramTags();
for (ParamTag paramTag : paramTags) {
if (paramTag.parameterName().equals(paramName)) {
return paramTag.parameterComment();
}
}
return null;
}
protected boolean isProgramElementDocAnnotatedWith(ProgramElementDoc elementDoc, String annotation) {
AnnotationDesc[] annotations = elementDoc.annotations();
for (int i = 0; i < annotations.length; i++) {
if (annotations[i].annotationType().qualifiedTypeName().equals(annotation)) {
return true;
}
}
return false;
}
protected ModificationHistory getModificationHistory(ClassDoc classDoc) {
ModificationHistory history = new ModificationHistory();
if (classDoc != null) {
LinkedList<ModificationRecord> list = this.getModificationRecords(classDoc);
history.addModificationRecords(list);
}
return history;
}
/*
* get the modification history of the class.
*/
protected ModificationHistory getModificationHistory(Type type) {
ModificationHistory history = new ModificationHistory();
ClassDoc classDoc = this.wrDoc.getConfiguration().root.classNamed(type.qualifiedTypeName());
if (classDoc != null) {
LinkedList<ModificationRecord> list = this.getModificationRecords(classDoc);
history.addModificationRecords(list);
}
return history;
}
/*
* get the modification history of the method.
*/
protected ModificationHistory getModificationHistory(MethodDoc methodDoc) {
ModificationHistory history = new ModificationHistory();
history.addModificationRecords(this.parseModificationRecords(methodDoc.tags()));
return history;
}
/*
* get the modification records of the class.
*/
protected LinkedList<ModificationRecord> getModificationRecords(ClassDoc classDoc) {
ClassDoc superClass = classDoc.superclass();
if (superClass == null) {
return new LinkedList<ModificationRecord>();
}
LinkedList<ModificationRecord> result = this.getModificationRecords(superClass);
result.addAll(this.parseModificationRecords(classDoc.tags()));
return result;
}
/*
* Parse tags to get customized parameters.
*/
protected LinkedList<APIParameter> parseCustomizedParameters(MethodDoc methodDoc) {
Tag[] tags = methodDoc.tags(WRParamTaglet.NAME);
LinkedList<APIParameter> result = new LinkedList<APIParameter>();
for (int i = 0; i < tags.length; i++) {
result.add(WRParamTaglet.parse(tags[i].text()));
}
return result;
}
/*
* Parse tags to get customized return.
*/
protected APIParameter parseCustomizedReturn(MethodDoc methodDoc) {
Tag[] tags = methodDoc.tags(WRReturnTaglet.NAME);
APIParameter result = null;
if (tags.length > 0) {
result = WRReturnTaglet.parse(tags[0].text());
}
return result;
}
/*
* Parse tags to get modification records.
*/
protected LinkedList<ModificationRecord> parseModificationRecords(Tag[] tags) {
LinkedList<ModificationRecord> result = new LinkedList<ModificationRecord>();
for (int i = 0; i < tags.length; i++) {
if ("@author".equalsIgnoreCase(tags[i].name())) {
ModificationRecord record = new ModificationRecord();
record.setModifier(tags[i].text());
if (i + 1 < tags.length) {
if ("@version".equalsIgnoreCase(tags[i + 1].name())) {
record.setVersion(tags[i + 1].text());
if (i + 2 < tags.length && ("@" + WRMemoTaglet.NAME).equalsIgnoreCase(tags[i + 2].name())) {
record.setMemo(tags[i + 2].text());
}
} else if (("@" + WRMemoTaglet.NAME).equalsIgnoreCase(tags[i + 1].name())) {
record.setMemo(tags[i + 1].text());
}
}
result.add(record);
}
}
return result;
}
protected String getReturnCode(MethodDoc methodDoc) {
Tag[] tags = methodDoc.tags(WRReturnCodeTaglet.NAME);
return WRReturnCodeTaglet.concat(tags);
}
protected String getMQConsumerTopic(ClassDoc classDoc) {
Tag[] tags = classDoc.tags(WRMqConsumerTaglet.NAME);
if (tags.length == 0) {
return "";
}
return StringUtils.substringBefore(tags[0].text(), "\n");
}
protected String getMQProducerTopic(ClassDoc classDoc) {
Tag[] tags = classDoc.tags(WRMqProducerTaglet.NAME);
if (tags.length == 0) {
return "";
}
return StringUtils.substringBefore(tags[0].text(), "\n");
}
protected boolean isInStopClasses(ClassDoc classDoc) {
String property = ApplicationContextConfig.getStopClasses();
if (property != null) {
String[] stopClasses = property.split(",");
String[] cdParts = classDoc.qualifiedTypeName().split("\\.");
for (String stopClass : stopClasses) {
String[] scParts = stopClass.trim().split("\\.");
if (scParts.length <= cdParts.length) {
boolean hasDiffPart = false;
for (int i = 0; i < scParts.length; i++) {
if (scParts[i].equals("*")) {
return true;
} else if (!scParts[i].equalsIgnoreCase(cdParts[i])) {
hasDiffPart = true;
break;
}
}
if (scParts.length == cdParts.length && !hasDiffPart) {
return true;
}
}
}
}
return false;
}
protected boolean isParameterizedTypeInStopClasses(Type type) {
if (!this.isInStopClasses(type.asClassDoc())) {
return false;
}
ParameterizedType pt = type.asParameterizedType();
if (pt != null) {
for (Type arg : pt.typeArguments()) {
if (!this.isParameterizedTypeInStopClasses(arg)) {
return false;
}
}
}
return true;
}
protected List<APIParameter> getFields(Type type, ParameterType paramType, HashSet<String> processingClasses) {
processingClasses.add(type.toString());
List<APIParameter> result = new LinkedList<APIParameter>();
if (!type.isPrimitive()) {
ParameterizedType pt = type.asParameterizedType();
if (pt != null && pt.typeArguments().length > 0) {
for (Type arg : pt.typeArguments()) {
if (!this.isParameterizedTypeInStopClasses(arg)) {
APIParameter tmp = new APIParameter();
tmp.setName(arg.simpleTypeName());
tmp.setType(this.getTypeName(arg, false));
tmp.setDescription("");
tmp.setParentTypeArgument(true);
if (!processingClasses.contains(arg.qualifiedTypeName())) {
tmp.setFields(this.getFields(arg, paramType, processingClasses));
}
result.add(tmp);
}
}
}
ClassDoc classDoc = this.wrDoc.getConfiguration().root.classNamed(type.qualifiedTypeName());
if (classDoc != null) {
result.addAll(this.getFields(classDoc, paramType, processingClasses));
}
}
return result;
}
protected List<APIParameter> getFields(ClassDoc classDoc, ParameterType paramType,
HashSet<String> processingClasses) {
processingClasses.add(classDoc.toString());
List<APIParameter> result = new LinkedList<APIParameter>();
boolean isLomBokClass = this.isProgramElementDocAnnotatedWith(classDoc, "lombok.Data")
|| (paramType == ParameterType.Response
&& this.isProgramElementDocAnnotatedWith(classDoc, "lombok.Getter"))
|| (paramType == ParameterType.Request
&& this.isProgramElementDocAnnotatedWith(classDoc, "lombok.Setter"));
// todo
// this.wrDoc.getConfiguration().root.classNamed(type.qualifiedTypeName()).typeParameters()[0].qualifiedTypeName()
ClassDoc superClassDoc = classDoc.superclass();
if (superClassDoc != null && !this.isInStopClasses(superClassDoc)
&& !processingClasses.contains(superClassDoc.qualifiedTypeName())) {
result.addAll(this.getFields(superClassDoc, paramType, processingClasses));
}
if (this.isInStopClasses(classDoc)) {
return result;
}
FieldDoc[] fieldDocs = classDoc.fields(false);
HashMap<String, String> privateFieldValidator = new HashMap<>();
HashMap<String, String> privateFieldDesc = new HashMap<String, String>();
HashMap<String, String> privateJsonField = new HashMap<String, String>();
Set<String> transientFieldSet = new HashSet<>();
for (FieldDoc fieldDoc : fieldDocs) {
if (!fieldDoc.isTransient() && !fieldDoc.isStatic()
&& (fieldDoc.isPublic() || isLomBokClass
|| (this.isProgramElementDocAnnotatedWith(fieldDoc, "lombok.Getter")
&& paramType == ParameterType.Response)
|| (this.isProgramElementDocAnnotatedWith(fieldDoc, "lombok.Setter")
&& paramType == ParameterType.Request))) {
APIParameter param = new APIParameter();
param.setName(fieldDoc.name());
param.setType(this.getTypeName(fieldDoc.type(), false));
if (!processingClasses.contains(fieldDoc.type().qualifiedTypeName())) {
param.setFields(this.getFields(fieldDoc.type(), paramType, processingClasses));
}
param.setDescription(this.getFieldDescription(fieldDoc));
param.setHistory(new ModificationHistory(this.parseModificationRecords(fieldDoc.tags())));
param.setParameterOccurs(this.parseParameterOccurs(fieldDoc.tags(WROccursTaglet.NAME)));
result.add(param);
} else {
privateFieldDesc.put(fieldDoc.name(), fieldDoc.commentText());
String jsonField = this.getJsonField(fieldDoc);
if (jsonField != null) {
privateJsonField.put(fieldDoc.name(), jsonField);
}
privateFieldValidator.put(fieldDoc.name(), this.getFieldValidatorDesc(fieldDoc));
if (fieldDoc.isTransient()) {
transientFieldSet.add(fieldDoc.name());
}
}
}
MethodDoc[] methodDocs = classDoc.methods(false);
for (MethodDoc methodDoc : methodDocs) {
if (transientFieldSet.contains(this.getFieldNameOfAccesser(methodDoc.name()))) {
continue;
}
if ((paramType == ParameterType.Response && this.isGetterMethod(methodDoc))
|| (paramType == ParameterType.Request && this.isSetterMethod(methodDoc))) {
APIParameter param = new APIParameter();
String fieldNameOfAccesser = this.getFieldNameOfAccesser(methodDoc.name());
param.setName(fieldNameOfAccesser);
String jsonField = this.getJsonField(methodDoc);
if (jsonField != null) {
param.setName(jsonField);
} else if (privateJsonField.containsKey(param.getName())) {
param.setName(privateJsonField.get(param.getName()));
}
Type typeToProcess = null;
if (paramType == ParameterType.Request) {
// set method only has one parameter.
typeToProcess = methodDoc.parameters()[0].type();
} else {
typeToProcess = methodDoc.returnType();
}
param.setType(this.getTypeName(typeToProcess, false));
if (!processingClasses.contains(typeToProcess.qualifiedTypeName())) {
param.setFields(this.getFields(typeToProcess, paramType, processingClasses));
}
param.setHistory(new ModificationHistory(this.parseModificationRecords(methodDoc.tags())));
if (StringUtils.isEmpty(methodDoc.commentText())) {
if (paramType == ParameterType.Request) {
param.setDescription(this.getParamComment(methodDoc, methodDoc.parameters()[0].name()));
} else {
for (Tag tag : methodDoc.tags("return")) {
param.setDescription(tag.text());
}
}
} else {
param.setDescription(methodDoc.commentText());
}
if (StringUtils.isEmpty(param.getDescription())) {
String temp = privateFieldDesc.get(param.getName());
if (temp == null) {
if (typeToProcess.typeName().equals("boolean")) {
temp = privateFieldDesc.get(param.getName());
if (temp == null) {
param.setDescription(privateFieldDesc
.get("is" + net.winroad.wrdoclet.utils.Util.capitalize(param.getName())));
}
}
} else {
param.setDescription(temp);
}
}
if (privateFieldValidator.get(fieldNameOfAccesser) != null) {
param.setDescription(param.getDescription() == null ? privateFieldValidator.get(fieldNameOfAccesser)
: param.getDescription() + " " + privateFieldValidator.get(fieldNameOfAccesser));
}
param.setParameterOccurs(this.parseParameterOccurs(methodDoc.tags(WROccursTaglet.NAME)));
result.add(param);
}
}
return result;
}
protected String getFieldDescription(FieldDoc fieldDoc) {
StringBuilder strBuilder = new StringBuilder();
strBuilder.append(fieldDoc.commentText());
strBuilder.append(" ");
strBuilder.append(this.getFieldValidatorDesc(fieldDoc));
return strBuilder.toString();
}
protected String getJsonField(MemberDoc memberDoc) {
for (AnnotationDesc annotationDesc : memberDoc.annotations()) {
if (annotationDesc.annotationType().qualifiedTypeName()
.startsWith("com.fasterxml.jackson.annotation.JsonProperty")) {
if (annotationDesc.elementValues().length > 0) {
for (AnnotationDesc.ElementValuePair elementValuePair : annotationDesc.elementValues()) {
if (elementValuePair.element().name().equals("value")
&& !StringUtils.isEmpty(elementValuePair.value().toString())
&& !"\"\"".equals(elementValuePair.value().toString())) {
return elementValuePair.value().toString().replace("\"", "");
}
}
}
}
if (annotationDesc.annotationType().qualifiedTypeName()
.startsWith("com.alibaba.fastjson.annotation.JSONField")) {
if (annotationDesc.elementValues().length > 0) {
for (AnnotationDesc.ElementValuePair elementValuePair : annotationDesc.elementValues()) {
if (elementValuePair.element().name().equals("name")
&& !StringUtils.isEmpty(elementValuePair.value().toString())
&& !"\"\"".equals(elementValuePair.value().toString())) {
return elementValuePair.value().toString().replace("\"", "");
}
}
}
}
}
return null;
}
protected String getFieldValidatorDesc(FieldDoc fieldDoc) {
StringBuilder strBuilder = new StringBuilder();
for (AnnotationDesc annotationDesc : fieldDoc.annotations()) {
if (annotationDesc.annotationType().qualifiedTypeName().startsWith("org.hibernate.validator.constraints")
|| annotationDesc.annotationType().qualifiedTypeName().startsWith("javax.validation.constraints")
|| annotationDesc.annotationType().qualifiedTypeName().startsWith("lombok.NonNull")) {
strBuilder.append("@");
strBuilder.append(annotationDesc.annotationType().name());
if (annotationDesc.elementValues().length > 0) {
strBuilder.append("(");
boolean isFirstElement = true;
for (AnnotationDesc.ElementValuePair elementValuePair : annotationDesc.elementValues()) {
if (!isFirstElement) {
strBuilder.append(",");
}
strBuilder.append(elementValuePair.element().name());
strBuilder.append("=");
strBuilder.append(
net.winroad.wrdoclet.utils.Util.decodeUnicode(elementValuePair.value().toString()));
isFirstElement = false;
}
strBuilder.append(")");
}
strBuilder.append(" ");
}
}
return strBuilder.toString();
}
protected String getTypeName(Type typeToProcess, boolean ignoreSuperType) {
// special type to process e.g. java.util.Map.Entry<Address,Person>
ParameterizedType pt = typeToProcess.asParameterizedType();
if (pt != null && pt.typeArguments().length > 0) {
StringBuilder strBuilder = new StringBuilder();
strBuilder.append(typeToProcess.qualifiedTypeName());
strBuilder.append("<");
for (Type arg : pt.typeArguments()) {
strBuilder.append(this.getTypeName(arg, true));
strBuilder.append(",");
}
int len = strBuilder.length();
// trim the last ","
strBuilder.deleteCharAt(len - 1);
strBuilder.append(">");
return strBuilder.toString();
}
if (typeToProcess.asClassDoc() != null) {
ClassDoc superClass = typeToProcess.asClassDoc().superclass();
if (superClass != null) {
// handle enum to output enum values into doc
if ("java.lang.Enum".equals(superClass.qualifiedTypeName())) {
FieldDoc[] enumConstants = typeToProcess.asClassDoc().enumConstants();
StringBuilder strBuilder = new StringBuilder();
strBuilder.append("Enum[");
for (FieldDoc enumConstant : enumConstants) {
strBuilder.append(enumConstant.name());
strBuilder.append(",");
}
int len = strBuilder.length();
// trim the last ","
strBuilder.deleteCharAt(len - 1);
strBuilder.append("]");
return strBuilder.toString();
} else if (!ignoreSuperType && !this.isInStopClasses(superClass)) {
return typeToProcess.qualifiedTypeName() + " extends "
+ this.getTypeName(typeToProcess.asClassDoc().superclassType(), false);
}
}
}
return typeToProcess.qualifiedTypeName();
}
/*
* Parse the ParameterOccurs from the tags.
*/
protected ParameterOccurs parseParameterOccurs(Tag[] tags) {
for (int i = 0; i < tags.length; i++) {
if (("@" + WROccursTaglet.NAME).equalsIgnoreCase(tags[i].name())) {
if (WROccursTaglet.REQUIRED.equalsIgnoreCase(tags[i].text())) {
return ParameterOccurs.REQUIRED;
} else if (WROccursTaglet.OPTIONAL.equalsIgnoreCase(tags[i].text())) {
return ParameterOccurs.OPTIONAL;
} else if (WROccursTaglet.DEPENDS.equalsIgnoreCase(tags[i].text())) {
return ParameterOccurs.DEPENDS;
} else {
this.logger.warn("Unexpected WROccursTaglet: " + tags[i].text());
}
}
}
return null;
}
/*
* is the method a getter method of a field.
*/
protected boolean isGetterMethod(MethodDoc methodDoc) {
if (methodDoc.parameters() != null && methodDoc.parameters().length == 0
&& (!"boolean".equalsIgnoreCase(methodDoc.returnType().qualifiedTypeName())
&& methodDoc.name().matches("^get.+"))
|| (("boolean".equalsIgnoreCase(methodDoc.returnType().qualifiedTypeName())
&& methodDoc.name().matches("^is.+")))) {
return true;
}
return false;
}
/*
* is the method a setter method of a field.
*/
protected boolean isSetterMethod(MethodDoc methodDoc) {
if (methodDoc.parameters() != null && methodDoc.parameters().length == 1
&& methodDoc.name().matches("^set.+")) {
return true;
}
return false;
}
/*
* get the field name which the getter or setter method to access. NOTE: the
* getter or setter method name should follow the naming convention.
*/
protected String getFieldNameOfAccesser(String methodName) {
if (methodName.startsWith("get")) {
return net.winroad.wrdoclet.utils.Util.uncapitalize(methodName.replaceFirst("get", ""));
} else if (methodName.startsWith("set")) {
return net.winroad.wrdoclet.utils.Util.uncapitalize(methodName.replaceFirst("set", ""));
} else {
return net.winroad.wrdoclet.utils.Util.uncapitalize(methodName.replaceFirst("is", ""));
}
}
public static Document readXMLConfig(String filePath)
throws ParserConfigurationException, SAXException, IOException {
DocumentBuilderFactory builderFactory = DocumentBuilderFactory.newInstance();
builderFactory.setNamespaceAware(true);
DocumentBuilder builder = builderFactory.newDocumentBuilder();
File dubboConfig = new File(filePath);
return builder.parse(dubboConfig);
}
public static String getAttributeValue(Node node, String attributeName) {
NamedNodeMap attributes = node.getAttributes();
if (attributes != null) {
Node attribute = attributes.getNamedItem(attributeName);
if (attribute != null) {
return attribute.getNodeValue();
}
}
return null;
}
protected void processAnnotations(AnnotationDesc annotation, APIParameter apiParameter) {
if (annotation.annotationType().qualifiedName().startsWith("org.springframework.web.bind.annotation.")) {
for (ElementValuePair pair : annotation.elementValues()) {
if (pair.element().name().equals("value") || pair.element().name().equals("name")) {
if (pair.value() != null) {
apiParameter.setName(pair.value().toString().replace("\"", ""));
}
}
if (pair.element().name().equals("required")) {
if (pair.value().value().equals(true)) {
apiParameter.setParameterOccurs(ParameterOccurs.REQUIRED);
} else {
apiParameter.setParameterOccurs(ParameterOccurs.OPTIONAL);
}
}
}
}
}
protected void handleRefReq(MethodDoc method, List<APIParameter> paramList) {
Tag[] tags = method.tags(WRRefReqTaglet.NAME);
for (int i = 0; i < tags.length; i++) {
APIParameter apiParameter = new APIParameter();
String[] strArr = tags[i].text().split(" ");
for (int j = 0; j < strArr.length; j++) {
switch (j) {
case 0:
apiParameter.setName(strArr[j]);
break;
case 1:
apiParameter.setType(strArr[j]);
break;
case 2:
apiParameter.setDescription(strArr[j]);
break;
case 3:
if (StringUtils.equalsIgnoreCase(strArr[j], WROccursTaglet.REQUIRED)) {
apiParameter.setParameterOccurs(ParameterOccurs.REQUIRED);
} else if (StringUtils.equalsIgnoreCase(strArr[j], WROccursTaglet.OPTIONAL)) {
apiParameter.setParameterOccurs(ParameterOccurs.OPTIONAL);
}
break;
default:
logger.warn("Unexpected tag:" + tags[i].text());
}
}
HashSet<String> processingClasses = new HashSet<String>();
ClassDoc c = this.wrDoc.getConfiguration().root.classNamed(apiParameter.getType());
if (c != null) {
apiParameter.setFields(this.getFields(c, ParameterType.Request, processingClasses));
}
paramList.add(apiParameter);
}
}
}
| fix array type issue
bug: type "byte[]" will be processed as "byte" | src/main/java/net/winroad/wrdoclet/builder/AbstractDocBuilder.java | fix array type issue | <ide><path>rc/main/java/net/winroad/wrdoclet/builder/AbstractDocBuilder.java
<ide> }
<ide> }
<ide>
<del> return typeToProcess.qualifiedTypeName();
<add> return typeToProcess.toString();
<ide> }
<ide>
<ide> /* |
|
Java | apache-2.0 | 21fd7a996a019a06d3d3c33407286bad1884adb7 | 0 | reactor/reactor-netty,reactor/reactor-netty | /*
* Copyright (c) 2011-2017 Pivotal Software Inc, All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package reactor.ipc.netty.channel;
import java.net.InetSocketAddress;
import java.util.Objects;
import java.util.concurrent.atomic.AtomicReferenceFieldUpdater;
import java.util.function.BiFunction;
import java.util.function.Consumer;
import io.netty.channel.Channel;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.socket.DatagramChannel;
import io.netty.channel.socket.SocketChannel;
import io.netty.util.Attribute;
import io.netty.util.AttributeKey;
import org.reactivestreams.Publisher;
import org.reactivestreams.Subscription;
import reactor.core.CoreSubscriber;
import reactor.core.publisher.DirectProcessor;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import reactor.core.publisher.Operators;
import reactor.ipc.netty.NettyConnector;
import reactor.ipc.netty.NettyContext;
import reactor.ipc.netty.NettyInbound;
import reactor.ipc.netty.NettyOutbound;
import reactor.ipc.netty.NettyPipeline;
import reactor.util.Logger;
import reactor.util.Loggers;
import reactor.util.context.Context;
/**
* A bridge between an immutable {@link Channel} and {@link NettyInbound} /
* {@link NettyOutbound} semantics exposed to user
* {@link NettyConnector#newHandler(BiFunction)}
*
* @author Stephane Maldini
* @since 0.6
*/
public class ChannelOperations<INBOUND extends NettyInbound, OUTBOUND extends NettyOutbound>
implements NettyInbound, NettyOutbound, NettyContext, CoreSubscriber<Void> {
/**
* Create a new {@link ChannelOperations} attached to the {@link Channel} attribute
* {@link #OPERATIONS_KEY}.
* Attach the {@link NettyPipeline#ReactiveBridge} handle.
*
* @param channel the new {@link Channel} connection
* @param handler the user-provided {@link BiFunction} i/o handler
* @param context the dispose callback
* @param <INBOUND> the {@link NettyInbound} type
* @param <OUTBOUND> the {@link NettyOutbound} type
*
* @return the created {@link ChannelOperations} bridge
*/
public static <INBOUND extends NettyInbound, OUTBOUND extends NettyOutbound> ChannelOperations<INBOUND, OUTBOUND> bind(
Channel channel,
BiFunction<? super INBOUND, ? super OUTBOUND, ? extends Publisher<Void>> handler,
ContextHandler<?> context) {
@SuppressWarnings("unchecked") ChannelOperations<INBOUND, OUTBOUND> ops =
new ChannelOperations<>(channel, handler, context);
return ops;
}
/**
* Return a Noop {@link BiFunction} handler
*
* @param <INBOUND> reified inbound type
* @param <OUTBOUND> reified outbound type
*
* @return a Noop {@link BiFunction} handler
*/
@SuppressWarnings("unchecked")
public static <INBOUND extends NettyInbound, OUTBOUND extends NettyOutbound> BiFunction<? super INBOUND, ? super OUTBOUND, ? extends Publisher<Void>> noopHandler() {
return PING;
}
/**
* Return the current {@link Channel} bound
* {@link ChannelOperations} or null if none
*
* @param ch the current {@link Channel}
*
* @return the current {@link Channel} bound
* {@link ChannelOperations} or null if none
*/
public static ChannelOperations<?, ?> get(Channel ch) {
return ch.attr(OPERATIONS_KEY)
.get();
}
static ChannelOperations<?, ?> tryGetAndSet(Channel ch, ChannelOperations<?, ?> ops) {
Attribute<ChannelOperations> attr = ch.attr(ChannelOperations.OPERATIONS_KEY);
for (; ; ) {
ChannelOperations<?, ?> op = attr.get();
if (op != null) {
return op;
}
if (attr.compareAndSet(null, ops)) {
return null;
}
}
}
final BiFunction<? super INBOUND, ? super OUTBOUND, ? extends Publisher<Void>>
handler;
final Channel channel;
final FluxReceive inbound;
final DirectProcessor<Void> onInactive;
final ContextHandler<?> context;
@SuppressWarnings("unchecked")
volatile Subscription outboundSubscription;
protected ChannelOperations(Channel channel,
ChannelOperations<INBOUND, OUTBOUND> replaced) {
this(channel, replaced.handler, replaced.context, replaced.onInactive);
}
protected ChannelOperations(Channel channel,
BiFunction<? super INBOUND, ? super OUTBOUND, ? extends Publisher<Void>> handler,
ContextHandler<?> context) {
this(channel, handler, context, DirectProcessor.create());
}
protected ChannelOperations(Channel channel,
BiFunction<? super INBOUND, ? super OUTBOUND, ? extends Publisher<Void>> handler,
ContextHandler<?> context, DirectProcessor<Void> processor) {
this.handler = Objects.requireNonNull(handler, "handler");
this.channel = Objects.requireNonNull(channel, "channel");
this.context = Objects.requireNonNull(context, "context");
this.inbound = new FluxReceive(this);
this.onInactive = processor;
Subscription[] _s = new Subscription[1];
Mono.fromDirect(context.onCloseOrRelease(channel))
.doOnSubscribe(s -> _s[0] = s)
.subscribe(onInactive);
if(_s[0] != null) { //remove closeFuture listener ref by onCloseOrRelease
// subscription when onInactive is called for any reason from
// onHandlerTerminate
onInactive.subscribe(null, null, _s[0]::cancel);
}
}
@Override
public InetSocketAddress address() {
Channel c = channel();
if (c instanceof SocketChannel) {
return ((SocketChannel) c).remoteAddress();
}
if (c instanceof DatagramChannel) {
return ((DatagramChannel) c).localAddress();
}
throw new IllegalStateException("Does not have an InetSocketAddress");
}
@Override
public final Channel channel() {
return channel;
}
@Override
public final NettyContext context() {
return this;
}
@Override
public ChannelOperations<INBOUND, OUTBOUND> context(Consumer<NettyContext> contextCallback) {
contextCallback.accept(context());
return this;
}
@Override
public void dispose() {
inbound.cancel();
channel.close();
}
@Override
public final boolean isDisposed() {
return get(channel()) != this;
}
@Override
public final Mono<Void> onClose() {
return Mono.fromDirect(onInactive);
}
@Override
public NettyContext onClose(final Runnable onClose) {
onInactive.subscribe(null, e -> onClose.run(), onClose);
return this;
}
@Override
public final void onComplete() {
Subscription s =
OUTBOUND_CLOSE.getAndSet(this, Operators.cancelledSubscription());
if (s == Operators.cancelledSubscription() || isDisposed()) {
return;
}
onOutboundComplete();
}
@Override
public final void onError(Throwable t) {
Subscription s =
OUTBOUND_CLOSE.getAndSet(this, Operators.cancelledSubscription());
if (s == Operators.cancelledSubscription() || isDisposed()) {
if(log.isDebugEnabled()){
log.error("An outbound error could not be processed", t);
}
return;
}
onOutboundError(t);
}
@Override
public final void onNext(Void aVoid) {
}
@Override
public final void onSubscribe(Subscription s) {
if (Operators.setOnce(OUTBOUND_CLOSE, this, s)) {
s.request(Long.MAX_VALUE);
}
}
@Override
public Flux<?> receiveObject() {
return inbound;
}
@Override
public final InetSocketAddress remoteAddress() {
return (InetSocketAddress) channel.remoteAddress();
}
@Override
public String toString() {
return channel.toString();
}
/**
* Return true if inbound traffic is not expected anymore
*
* @return true if inbound traffic is not expected anymore
*/
protected final boolean isInboundDone() {
return inbound.inboundDone || !channel.isActive();
}
/**
* Return true if inbound traffic is not expected anymore
*
* @return true if inbound traffic is not expected anymore
*/
protected final boolean isInboundCancelled() {
return inbound.isCancelled() || !channel.isActive();
}
/**
* Return true if inbound traffic is not expected anymore
*
* @return true if inbound traffic is not expected anymore
*/
protected final boolean isOutboundDone() {
return outboundSubscription == Operators.cancelledSubscription() || !channel.isActive();
}
protected boolean shouldEmitEmptyContext() {
return false;
}
/**
* Connector handler provided by user
*
* @return Connector handler provided by user
*/
protected final BiFunction<? super INBOUND, ? super OUTBOUND, ? extends Publisher<Void>> handler() {
return handler;
}
/**
* React on input initialization
*
*/
@SuppressWarnings("unchecked")
protected void onHandlerStart() {
applyHandler();
context.fireContextActive(this);
}
/**
* React on inbound {@link Channel#read}
*
* @param ctx the context
* @param msg the read payload
*/
protected void onInboundNext(ChannelHandlerContext ctx, Object msg) {
if (msg == null) {
onInboundError(new NullPointerException("msg is null"));
return;
}
inbound.onInboundNext(msg);
}
/**
* Replace and complete previous operation inbound
*
* @param ops a new operations
*
* @return true if replaced
*/
protected final boolean replace(ChannelOperations<?, ?> ops) {
return channel.attr(OPERATIONS_KEY)
.compareAndSet(this, ops);
}
/**
* React on inbound cancel (receive() subscriber cancelled)
*/
protected void onInboundCancel() {
}
/**
* React on inbound completion (last packet)
*/
protected void onInboundComplete() {
if (inbound.onInboundComplete()) {
context.fireContextActive(this);
}
}
/**
* React on inbound/outbound completion (last packet)
*/
protected void onOutboundComplete() {
if (log.isDebugEnabled()) {
log.debug("[{}] {} User Handler requesting close connection", formatName(), channel());
}
markPersistent(false);
onHandlerTerminate();
}
/**
* React on inbound/outbound error
*
* @param err the {@link Throwable} cause
*/
protected void onOutboundError(Throwable err) {
discreteRemoteClose(err);
markPersistent(false);
onHandlerTerminate();
}
/**
* Apply the user-provided {@link NettyConnector} handler
*/
@SuppressWarnings("unchecked")
protected final void applyHandler() {
// channel.pipeline()
// .fireUserEventTriggered(NettyPipeline.handlerStartedEvent());
if (log.isDebugEnabled()) {
log.debug("[{}] {} handler is being applied: {}", formatName(), channel
(), handler);
}
Mono.fromDirect(handler.apply((INBOUND) this, (OUTBOUND) this))
.subscribe(this);
}
/**
* Try filtering out remote close unless traced, return true if filtered
*
* @param err the error to check
*
* @return true if filtered
*/
protected final boolean discreteRemoteClose(Throwable err) {
if (AbortedException.isConnectionReset(err)) {
if (log.isDebugEnabled()) {
log.debug("{} [{}] Connection closed remotely", channel.toString(),
formatName(),
err);
}
return true;
}
log.error("[" + formatName() + "] Error processing connection. Requesting close the channel",
err);
return false;
}
/**
* Final release/close (last packet)
*/
protected final void onHandlerTerminate() {
if (replace(null)) {
if(log.isTraceEnabled()){
log.trace("{} Disposing ChannelOperation from a channel", channel(), new Exception
("ChannelOperation terminal stack"));
}
try {
Operators.terminate(OUTBOUND_CLOSE, this);
onInactive.onComplete(); //signal senders and other interests
onInboundComplete(); // signal receiver
}
finally {
channel.pipeline()
.fireUserEventTriggered(NettyPipeline.handlerTerminatedEvent());
}
}
}
/**
* React on inbound error
*
* @param err the {@link Throwable} cause
*/
protected final void onInboundError(Throwable err) {
discreteRemoteClose(err);
if (inbound.onInboundError(err)) {
context.fireContextError(err);
}
}
/**
* Return the available parent {@link ContextHandler} for user-facing lifecycle
* handling
*
* @return the available parent {@link ContextHandler}for user-facing lifecycle
* handling
*/
protected final ContextHandler<?> parentContext() {
return context;
}
/**
* Return formatted name of this operation
*
* @return formatted name of this operation
*/
protected final String formatName() {
return getClass().getSimpleName()
.replace("Operations", "");
}
@Override
public Context currentContext() {
return context.sink.currentContext();
}
/**
* A {@link ChannelOperations} factory
*/
@FunctionalInterface
public interface OnNew<CHANNEL extends Channel> {
/**
* Create a new {@link ChannelOperations} given a netty channel, a parent
* {@link ContextHandler} and an optional message (nullable).
*
* @param c a {@link Channel}
* @param contextHandler a {@link ContextHandler}
* @param msg an optional message
*
* @return a new {@link ChannelOperations}
*/
ChannelOperations<?, ?> create(CHANNEL c, ContextHandler<?> contextHandler, Object msg);
}
/**
* The attribute in {@link Channel} to store the current {@link ChannelOperations}
*/
protected static final AttributeKey<ChannelOperations> OPERATIONS_KEY = AttributeKey.newInstance("nettyOperations");
static final Logger log = Loggers.getLogger(ChannelOperations.class);
static final BiFunction PING = (i, o) -> Flux.empty();
static final AtomicReferenceFieldUpdater<ChannelOperations, Subscription>
OUTBOUND_CLOSE = AtomicReferenceFieldUpdater.newUpdater(ChannelOperations.class,
Subscription.class,
"outboundSubscription");
} | src/main/java/reactor/ipc/netty/channel/ChannelOperations.java | /*
* Copyright (c) 2011-2017 Pivotal Software Inc, All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package reactor.ipc.netty.channel;
import java.net.InetSocketAddress;
import java.util.Objects;
import java.util.concurrent.atomic.AtomicReferenceFieldUpdater;
import java.util.function.BiFunction;
import java.util.function.Consumer;
import io.netty.channel.Channel;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.socket.DatagramChannel;
import io.netty.channel.socket.SocketChannel;
import io.netty.util.Attribute;
import io.netty.util.AttributeKey;
import org.reactivestreams.Publisher;
import org.reactivestreams.Subscription;
import reactor.core.CoreSubscriber;
import reactor.core.publisher.DirectProcessor;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import reactor.core.publisher.Operators;
import reactor.ipc.netty.NettyConnector;
import reactor.ipc.netty.NettyContext;
import reactor.ipc.netty.NettyInbound;
import reactor.ipc.netty.NettyOutbound;
import reactor.ipc.netty.NettyPipeline;
import reactor.util.Logger;
import reactor.util.Loggers;
import reactor.util.context.Context;
/**
* A bridge between an immutable {@link Channel} and {@link NettyInbound} /
* {@link NettyOutbound} semantics exposed to user
* {@link NettyConnector#newHandler(BiFunction)}
*
* @author Stephane Maldini
* @since 0.6
*/
public class ChannelOperations<INBOUND extends NettyInbound, OUTBOUND extends NettyOutbound>
implements NettyInbound, NettyOutbound, NettyContext, CoreSubscriber<Void> {
/**
* Create a new {@link ChannelOperations} attached to the {@link Channel} attribute
* {@link #OPERATIONS_KEY}.
* Attach the {@link NettyPipeline#ReactiveBridge} handle.
*
* @param channel the new {@link Channel} connection
* @param handler the user-provided {@link BiFunction} i/o handler
* @param context the dispose callback
* @param <INBOUND> the {@link NettyInbound} type
* @param <OUTBOUND> the {@link NettyOutbound} type
*
* @return the created {@link ChannelOperations} bridge
*/
public static <INBOUND extends NettyInbound, OUTBOUND extends NettyOutbound> ChannelOperations<INBOUND, OUTBOUND> bind(
Channel channel,
BiFunction<? super INBOUND, ? super OUTBOUND, ? extends Publisher<Void>> handler,
ContextHandler<?> context) {
@SuppressWarnings("unchecked") ChannelOperations<INBOUND, OUTBOUND> ops =
new ChannelOperations<>(channel, handler, context);
return ops;
}
/**
* Return a Noop {@link BiFunction} handler
*
* @param <INBOUND> reified inbound type
* @param <OUTBOUND> reified outbound type
*
* @return a Noop {@link BiFunction} handler
*/
@SuppressWarnings("unchecked")
public static <INBOUND extends NettyInbound, OUTBOUND extends NettyOutbound> BiFunction<? super INBOUND, ? super OUTBOUND, ? extends Publisher<Void>> noopHandler() {
return PING;
}
/**
* Return the current {@link Channel} bound
* {@link ChannelOperations} or null if none
*
* @param ch the current {@link Channel}
*
* @return the current {@link Channel} bound
* {@link ChannelOperations} or null if none
*/
public static ChannelOperations<?, ?> get(Channel ch) {
return ch.attr(OPERATIONS_KEY)
.get();
}
static ChannelOperations<?, ?> tryGetAndSet(Channel ch, ChannelOperations<?, ?> ops) {
Attribute<ChannelOperations> attr = ch.attr(ChannelOperations.OPERATIONS_KEY);
for (; ; ) {
ChannelOperations<?, ?> op = attr.get();
if (op != null) {
return op;
}
if (attr.compareAndSet(null, ops)) {
return null;
}
}
}
final BiFunction<? super INBOUND, ? super OUTBOUND, ? extends Publisher<Void>>
handler;
final Channel channel;
final FluxReceive inbound;
final DirectProcessor<Void> onInactive;
final ContextHandler<?> context;
@SuppressWarnings("unchecked")
volatile Subscription outboundSubscription;
protected ChannelOperations(Channel channel,
ChannelOperations<INBOUND, OUTBOUND> replaced) {
this(channel, replaced.handler, replaced.context, replaced.onInactive);
}
protected ChannelOperations(Channel channel,
BiFunction<? super INBOUND, ? super OUTBOUND, ? extends Publisher<Void>> handler,
ContextHandler<?> context) {
this(channel, handler, context, DirectProcessor.create());
}
protected ChannelOperations(Channel channel,
BiFunction<? super INBOUND, ? super OUTBOUND, ? extends Publisher<Void>> handler,
ContextHandler<?> context, DirectProcessor<Void> processor) {
this.handler = Objects.requireNonNull(handler, "handler");
this.channel = Objects.requireNonNull(channel, "channel");
this.context = Objects.requireNonNull(context, "context");
this.inbound = new FluxReceive(this);
this.onInactive = processor;
Mono.fromDirect(context.onCloseOrRelease(channel))
.subscribe(onInactive);
}
@Override
public InetSocketAddress address() {
Channel c = channel();
if (c instanceof SocketChannel) {
return ((SocketChannel) c).remoteAddress();
}
if (c instanceof DatagramChannel) {
return ((DatagramChannel) c).localAddress();
}
throw new IllegalStateException("Does not have an InetSocketAddress");
}
@Override
public final Channel channel() {
return channel;
}
@Override
public final NettyContext context() {
return this;
}
@Override
public ChannelOperations<INBOUND, OUTBOUND> context(Consumer<NettyContext> contextCallback) {
contextCallback.accept(context());
return this;
}
@Override
public void dispose() {
inbound.cancel();
channel.close();
}
@Override
public final boolean isDisposed() {
return get(channel()) != this;
}
@Override
public final Mono<Void> onClose() {
return Mono.fromDirect(onInactive);
}
@Override
public NettyContext onClose(final Runnable onClose) {
onInactive.subscribe(null, e -> onClose.run(), onClose);
return this;
}
@Override
public final void onComplete() {
Subscription s =
OUTBOUND_CLOSE.getAndSet(this, Operators.cancelledSubscription());
if (s == Operators.cancelledSubscription() || isDisposed()) {
return;
}
onOutboundComplete();
}
@Override
public final void onError(Throwable t) {
Subscription s =
OUTBOUND_CLOSE.getAndSet(this, Operators.cancelledSubscription());
if (s == Operators.cancelledSubscription() || isDisposed()) {
if(log.isDebugEnabled()){
log.error("An outbound error could not be processed", t);
}
return;
}
onOutboundError(t);
}
@Override
public final void onNext(Void aVoid) {
}
@Override
public final void onSubscribe(Subscription s) {
if (Operators.setOnce(OUTBOUND_CLOSE, this, s)) {
s.request(Long.MAX_VALUE);
}
}
@Override
public Flux<?> receiveObject() {
return inbound;
}
@Override
public final InetSocketAddress remoteAddress() {
return (InetSocketAddress) channel.remoteAddress();
}
@Override
public String toString() {
return channel.toString();
}
/**
* Return true if inbound traffic is not expected anymore
*
* @return true if inbound traffic is not expected anymore
*/
protected final boolean isInboundDone() {
return inbound.inboundDone || !channel.isActive();
}
/**
* Return true if inbound traffic is not expected anymore
*
* @return true if inbound traffic is not expected anymore
*/
protected final boolean isInboundCancelled() {
return inbound.isCancelled() || !channel.isActive();
}
/**
* Return true if inbound traffic is not expected anymore
*
* @return true if inbound traffic is not expected anymore
*/
protected final boolean isOutboundDone() {
return outboundSubscription == Operators.cancelledSubscription() || !channel.isActive();
}
protected boolean shouldEmitEmptyContext() {
return false;
}
/**
* Connector handler provided by user
*
* @return Connector handler provided by user
*/
protected final BiFunction<? super INBOUND, ? super OUTBOUND, ? extends Publisher<Void>> handler() {
return handler;
}
/**
* React on input initialization
*
*/
@SuppressWarnings("unchecked")
protected void onHandlerStart() {
applyHandler();
context.fireContextActive(this);
}
/**
* React on inbound {@link Channel#read}
*
* @param ctx the context
* @param msg the read payload
*/
protected void onInboundNext(ChannelHandlerContext ctx, Object msg) {
if (msg == null) {
onInboundError(new NullPointerException("msg is null"));
return;
}
inbound.onInboundNext(msg);
}
/**
* Replace and complete previous operation inbound
*
* @param ops a new operations
*
* @return true if replaced
*/
protected final boolean replace(ChannelOperations<?, ?> ops) {
return channel.attr(OPERATIONS_KEY)
.compareAndSet(this, ops);
}
/**
* React on inbound cancel (receive() subscriber cancelled)
*/
protected void onInboundCancel() {
}
/**
* React on inbound completion (last packet)
*/
protected void onInboundComplete() {
if (inbound.onInboundComplete()) {
context.fireContextActive(this);
}
}
/**
* React on inbound/outbound completion (last packet)
*/
protected void onOutboundComplete() {
if (log.isDebugEnabled()) {
log.debug("[{}] {} User Handler requesting close connection", formatName(), channel());
}
markPersistent(false);
onHandlerTerminate();
}
/**
* React on inbound/outbound error
*
* @param err the {@link Throwable} cause
*/
protected void onOutboundError(Throwable err) {
discreteRemoteClose(err);
markPersistent(false);
onHandlerTerminate();
}
/**
* Apply the user-provided {@link NettyConnector} handler
*/
@SuppressWarnings("unchecked")
protected final void applyHandler() {
// channel.pipeline()
// .fireUserEventTriggered(NettyPipeline.handlerStartedEvent());
if (log.isDebugEnabled()) {
log.debug("[{}] {} handler is being applied: {}", formatName(), channel
(), handler);
}
Mono.fromDirect(handler.apply((INBOUND) this, (OUTBOUND) this))
.subscribe(this);
}
/**
* Try filtering out remote close unless traced, return true if filtered
*
* @param err the error to check
*
* @return true if filtered
*/
protected final boolean discreteRemoteClose(Throwable err) {
if (AbortedException.isConnectionReset(err)) {
if (log.isDebugEnabled()) {
log.debug("{} [{}] Connection closed remotely", channel.toString(),
formatName(),
err);
}
return true;
}
log.error("[" + formatName() + "] Error processing connection. Requesting close the channel",
err);
return false;
}
/**
* Final release/close (last packet)
*/
protected final void onHandlerTerminate() {
if (replace(null)) {
if(log.isTraceEnabled()){
log.trace("{} Disposing ChannelOperation from a channel", channel(), new Exception
("ChannelOperation terminal stack"));
}
try {
Operators.terminate(OUTBOUND_CLOSE, this);
onInactive.onComplete(); //signal senders and other interests
onInboundComplete(); // signal receiver
}
finally {
channel.pipeline()
.fireUserEventTriggered(NettyPipeline.handlerTerminatedEvent());
}
}
}
/**
* React on inbound error
*
* @param err the {@link Throwable} cause
*/
protected final void onInboundError(Throwable err) {
discreteRemoteClose(err);
if (inbound.onInboundError(err)) {
context.fireContextError(err);
}
}
/**
* Return the available parent {@link ContextHandler} for user-facing lifecycle
* handling
*
* @return the available parent {@link ContextHandler}for user-facing lifecycle
* handling
*/
protected final ContextHandler<?> parentContext() {
return context;
}
/**
* Return formatted name of this operation
*
* @return formatted name of this operation
*/
protected final String formatName() {
return getClass().getSimpleName()
.replace("Operations", "");
}
@Override
public Context currentContext() {
return context.sink.currentContext();
}
/**
* A {@link ChannelOperations} factory
*/
@FunctionalInterface
public interface OnNew<CHANNEL extends Channel> {
/**
* Create a new {@link ChannelOperations} given a netty channel, a parent
* {@link ContextHandler} and an optional message (nullable).
*
* @param c a {@link Channel}
* @param contextHandler a {@link ContextHandler}
* @param msg an optional message
*
* @return a new {@link ChannelOperations}
*/
ChannelOperations<?, ?> create(CHANNEL c, ContextHandler<?> contextHandler, Object msg);
}
/**
* The attribute in {@link Channel} to store the current {@link ChannelOperations}
*/
protected static final AttributeKey<ChannelOperations> OPERATIONS_KEY = AttributeKey.newInstance("nettyOperations");
static final Logger log = Loggers.getLogger(ChannelOperations.class);
static final BiFunction PING = (i, o) -> Flux.empty();
static final AtomicReferenceFieldUpdater<ChannelOperations, Subscription>
OUTBOUND_CLOSE = AtomicReferenceFieldUpdater.newUpdater(ChannelOperations.class,
Subscription.class,
"outboundSubscription");
} | rework #176 with a simpler iteration until further API refining
| src/main/java/reactor/ipc/netty/channel/ChannelOperations.java | rework #176 with a simpler iteration until further API refining | <ide><path>rc/main/java/reactor/ipc/netty/channel/ChannelOperations.java
<ide> }
<ide> }
<ide>
<del> final BiFunction<? super INBOUND, ? super OUTBOUND, ? extends Publisher<Void>>
<del> handler;
<del> final Channel channel;
<del> final FluxReceive inbound;
<del> final DirectProcessor<Void> onInactive;
<del> final ContextHandler<?> context;
<add> final BiFunction<? super INBOUND, ? super OUTBOUND, ? extends Publisher<Void>>
<add> handler;
<add> final Channel channel;
<add> final FluxReceive inbound;
<add> final DirectProcessor<Void> onInactive;
<add> final ContextHandler<?> context;
<ide> @SuppressWarnings("unchecked")
<del> volatile Subscription outboundSubscription;
<add> volatile Subscription outboundSubscription;
<ide> protected ChannelOperations(Channel channel,
<ide> ChannelOperations<INBOUND, OUTBOUND> replaced) {
<ide> this(channel, replaced.handler, replaced.context, replaced.onInactive);
<ide> this.context = Objects.requireNonNull(context, "context");
<ide> this.inbound = new FluxReceive(this);
<ide> this.onInactive = processor;
<add> Subscription[] _s = new Subscription[1];
<ide> Mono.fromDirect(context.onCloseOrRelease(channel))
<add> .doOnSubscribe(s -> _s[0] = s)
<ide> .subscribe(onInactive);
<add>
<add> if(_s[0] != null) { //remove closeFuture listener ref by onCloseOrRelease
<add> // subscription when onInactive is called for any reason from
<add> // onHandlerTerminate
<add> onInactive.subscribe(null, null, _s[0]::cancel);
<add> }
<ide> }
<ide>
<ide> @Override |
|
Java | mit | e47e0235ba9509c5e6a5749c99efe79bc0d61efa | 0 | drexel-cs451-rbbtd/apologies | package edu.drexel.cs451_rbbtd.apologies.gui;
import javax.swing.*;
import java.awt.*;
import java.awt.event.*;
import java.util.ArrayList;
public class PlayerSetup extends JFrame {
private JFrame mainFrame;
private JPanel controlPanel;
private JButton start;
private JButton cancel;
private JCheckBox checkRed;
private JCheckBox checkYellow;
private JCheckBox checkGreen;
JCheckBox checkBlue;
private JRadioButton firstRed;
private JRadioButton firstYellow;
private JRadioButton firstGreen;
private JRadioButton firstBlue;
private ButtonGroup radioGroup;
private ArrayList<JCheckBox> checkGroup = new ArrayList<JCheckBox>();
private ArrayList<JRadioButton> rads = new ArrayList<JRadioButton>();
private ArrayList<JTextField> names = new ArrayList<JTextField>();
private JTextField nameRed;
private JTextField nameYellow;
private JTextField nameGreen;
private JTextField nameBlue;
private PlayerSetupController c = new PlayerSetupController();
public PlayerSetup() {
prepareGUI();
setupButtons();
addComponents();
}
private void prepareGUI() {
this.setTitle("Player Setup");
this.setSize(500,375);
this.setResizable(false);
this.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
controlPanel = new JPanel();
controlPanel.setLayout(new GridLayout(6, 1));
this.add(controlPanel);
this.setVisible(true);
}
private void setupButtons(){
start = new JButton("Start");
cancel = new JButton("Cancel");
start.setPreferredSize(new Dimension(150,45));
cancel.setPreferredSize(new Dimension(150,45));
start.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
Boolean isSetup = c.check(checkGroup, rads, names);
if (isSetup) {
ArrayList<PlayerColor> colors = c.getChecked(checkGroup);
new Apologies(colors);
dispose();
}
}});
cancel.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
clearForm();
dispose();
}});
//initialize check boxes and add to groups
checkRed = new JCheckBox();
checkYellow = new JCheckBox();
checkGreen = new JCheckBox();
checkBlue = new JCheckBox();
checkGroup.add(checkRed);
checkGroup.add(checkYellow);
checkGroup.add(checkGreen);
checkGroup.add(checkBlue);
//initialize radio buttons and add to groups
firstRed = new JRadioButton();
firstYellow = new JRadioButton();
firstGreen = new JRadioButton();
firstBlue = new JRadioButton();
rads.add(firstRed);
rads.add(firstYellow);
rads.add(firstGreen);
rads.add(firstBlue);
radioGroup = new ButtonGroup();
radioGroup.add(firstRed);
radioGroup.add(firstYellow);
radioGroup.add(firstGreen);
radioGroup.add(firstBlue);
}
private JPanel addLayout(JCheckBox check, JRadioButton radio, Color color, JTextField text) {
JPanel p = new JPanel();
p.setLayout(new FlowLayout(FlowLayout.LEADING, 50, 20));
p.add(check);
p.add(radio);
JLabel label = new JLabel(" ");
label.setOpaque(true);
label.setBackground(color);
p.add(label);
p.add(text);
return p;
}
private JPanel addLabels() {
JPanel panel = new JPanel();
panel.setLayout(new FlowLayout(FlowLayout.LEADING, 45, 20));
panel.add(new JLabel("Playing"));
panel.add(new JLabel("First"));
panel.add(new JLabel("Color"));
panel.add(new JLabel("Name"));
return panel;
}
private JPanel addButtons() {
JPanel panel = new JPanel();
panel.setLayout(new FlowLayout(FlowLayout.CENTER, 75, 0));
panel.add(start);
panel.add(cancel);
return panel;
}
private void addComponents() {
nameRed = new JTextField(15);
nameYellow = new JTextField(15);
nameGreen = new JTextField(15);
nameBlue = new JTextField(15);
names.add(nameRed);
names.add(nameYellow);
names.add(nameGreen);
names.add(nameBlue);
controlPanel.add(addLabels());
controlPanel.add(addLayout(checkRed, firstRed, Color.RED, nameRed));
controlPanel.add(addLayout(checkYellow, firstYellow, Color.YELLOW, nameYellow));
controlPanel.add(addLayout(checkGreen, firstGreen, Color.GREEN, nameGreen));
controlPanel.add(addLayout(checkBlue, firstBlue, Color.BLUE, nameBlue));
controlPanel.add(addButtons());
this.setVisible(true);
}
private void clearForm() {
// clear checkboxes
for (JCheckBox check : checkGroup) {
check.setSelected(false);
}
// clear text fields
for (JTextField name : names) {
name.setText("");
}
// clear radio buttons
radioGroup.clearSelection();
}
}
| src/main/java/edu/drexel/cs451_rbbtd/apologies/gui/PlayerSetup.java | package edu.drexel.cs451_rbbtd.apologies.gui;
import javax.swing.*;
import java.awt.*;
import java.awt.event.*;
import java.util.ArrayList;
public class PlayerSetup extends JFrame {
private JFrame mainFrame;
private JPanel controlPanel;
private JButton start;
private JButton cancel;
private JCheckBox checkRed;
private JCheckBox checkYellow;
private JCheckBox checkGreen;
JCheckBox checkBlue;
private JRadioButton firstRed;
private JRadioButton firstYellow;
private JRadioButton firstGreen;
private JRadioButton firstBlue;
private ButtonGroup radioGroup;
private ArrayList<JCheckBox> checkGroup = new ArrayList<JCheckBox>();
private ArrayList<JRadioButton> rads = new ArrayList<JRadioButton>();
private ArrayList<JTextField> names = new ArrayList<JTextField>();
private JTextField nameRed;
private JTextField nameYellow;
private JTextField nameGreen;
private JTextField nameBlue;
private PlayerSetupController c = new PlayerSetupController();
public PlayerSetup() {
prepareGUI();
setupButtons();
addComponents();
}
private void prepareGUI() {
this.setTitle("Player Setup");
this.setSize(500,375);
this.setResizable(false);
this.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
controlPanel = new JPanel();
controlPanel.setLayout(new GridLayout(6, 1));
this.add(controlPanel);
this.setVisible(true);
}
private void setupButtons(){
start = new JButton("Start");
cancel = new JButton("Cancel");
start.setPreferredSize(new Dimension(150,45));
cancel.setPreferredSize(new Dimension(150,45));
start.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
System.out.println("START");
System.out.println(c.check(checkGroup, rads, names));
}});
cancel.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
clearForm();
dispose();
System.out.println("CANCEL");
}});
//initialize check boxes and add to groups
checkRed = new JCheckBox();
checkYellow = new JCheckBox();
checkGreen = new JCheckBox();
checkBlue = new JCheckBox();
checkGroup.add(checkRed);
checkGroup.add(checkYellow);
checkGroup.add(checkGreen);
checkGroup.add(checkBlue);
//initialize radio buttons and add to groups
firstRed = new JRadioButton();
firstYellow = new JRadioButton();
firstGreen = new JRadioButton();
firstBlue = new JRadioButton();
rads.add(firstRed);
rads.add(firstYellow);
rads.add(firstGreen);
rads.add(firstBlue);
radioGroup = new ButtonGroup();
radioGroup.add(firstRed);
radioGroup.add(firstYellow);
radioGroup.add(firstGreen);
radioGroup.add(firstBlue);
}
private JPanel addLayout(JCheckBox check, JRadioButton radio, Color color, JTextField text) {
JPanel p = new JPanel();
p.setLayout(new FlowLayout(FlowLayout.LEADING, 50, 20));
p.add(check);
p.add(radio);
JLabel label = new JLabel(" ");
label.setOpaque(true);
label.setBackground(color);
p.add(label);
p.add(text);
return p;
}
private JPanel addLabels() {
JPanel panel = new JPanel();
panel.setLayout(new FlowLayout(FlowLayout.LEADING, 45, 20));
panel.add(new JLabel("Playing"));
panel.add(new JLabel("First"));
panel.add(new JLabel("Color"));
panel.add(new JLabel("Name"));
return panel;
}
private JPanel addButtons() {
JPanel panel = new JPanel();
panel.setLayout(new FlowLayout(FlowLayout.CENTER, 75, 0));
panel.add(start);
panel.add(cancel);
return panel;
}
private void addComponents() {
nameRed = new JTextField(15);
nameYellow = new JTextField(15);
nameGreen = new JTextField(15);
nameBlue = new JTextField(15);
names.add(nameRed);
names.add(nameYellow);
names.add(nameGreen);
names.add(nameBlue);
controlPanel.add(addLabels());
controlPanel.add(addLayout(checkRed, firstRed, Color.RED, nameRed));
controlPanel.add(addLayout(checkYellow, firstYellow, Color.YELLOW, nameYellow));
controlPanel.add(addLayout(checkGreen, firstGreen, Color.GREEN, nameGreen));
controlPanel.add(addLayout(checkBlue, firstBlue, Color.BLUE, nameBlue));
controlPanel.add(addButtons());
this.setVisible(true);
}
private void clearForm() {
// clear checkboxes
for (JCheckBox check : checkGroup) {
check.setSelected(false);
}
// clear text fields
for (JTextField name : names) {
name.setText("");
}
// clear radio buttons
radioGroup.clearSelection();
}
}
| modify start button so it launches the game board screen with selected pawns
| src/main/java/edu/drexel/cs451_rbbtd/apologies/gui/PlayerSetup.java | modify start button so it launches the game board screen with selected pawns | <ide><path>rc/main/java/edu/drexel/cs451_rbbtd/apologies/gui/PlayerSetup.java
<ide>
<ide> start.addActionListener(new ActionListener() {
<ide> public void actionPerformed(ActionEvent e) {
<del>
<del> System.out.println("START");
<del> System.out.println(c.check(checkGroup, rads, names));
<add> Boolean isSetup = c.check(checkGroup, rads, names);
<add> if (isSetup) {
<add> ArrayList<PlayerColor> colors = c.getChecked(checkGroup);
<add> new Apologies(colors);
<add> dispose();
<add> }
<ide> }});
<ide>
<ide> cancel.addActionListener(new ActionListener() {
<ide> public void actionPerformed(ActionEvent e) {
<ide> clearForm();
<ide> dispose();
<del> System.out.println("CANCEL");
<ide> }});
<ide>
<ide> //initialize check boxes and add to groups |
|
JavaScript | mit | e23120f24dc89a6401b58cb02679df8534d11057 | 0 | ritchie46/M-N-Kappa,ritchie46/M-N-Kappa | 'use strict';
var DEBUG = false;
// mkap namespace
var mkap = (function () {
//class
function MomentKappa(cross_section, compressive_diagram, tensile_diagram) {
this.cross_section = cross_section;
this.compressive_diagram = compressive_diagram;
this.tensile_diagram = tensile_diagram;
// sum of the forces in the cross section
this.force_tensile = 0;
this.force_compression = 0;
this.normal_force = 0;
/**
rebar
*/
this.rebar_As = [];
// distance rebar from the bottom of the master cross section
this.rebar_z = [];
// objects from the StressStrain class
this.rebar_diagram = [];
// phased rebar
this.m0 = [];
this.rebar_strain0 = [];
this.rebar_diam = null; // for the plotter
// Applied at t=0. 'werkvoorspanning'
this.prestress = [];
// Stress and strain in the reinforcement after Mp has been applied and the deformation is zero.
this.d_stress = [];
this.d_strain = [];
this.mp = 0;
this.original_rebar_diagrams = [];
/**
results
*/
this.solution = null;
this.rebar_force = [];
this.rebar_strain = [];
this.stress = [];
this.moment = null;
this.kappa = null;
this.strain_top = null;
this.strain_btm = null;
this.zero_line = null ; // xu is height - zero line
this.xu = null;
// solver settings
this.iterations = 120;
this.div = 4;
this.reduce_rebar = false
}
MomentKappa.prototype.det_force_distribution = function (strain_top, strain_btm, reduce_rebar) {
this.force_compression = 0;
this.force_tensile = 0;
this.stress = [];
this.rebar_strain = [];
this.strain_top = strain_top;
this.strain_btm = strain_btm;
// default parameter
reduce_rebar = (typeof reduce_rebar !== "undefined") ? reduce_rebar : false;
this.reduce_rebar = reduce_rebar;
if (this.normal_force < 0) {
this.force_tensile += Math.abs(this.normal_force)
}
else {
this.force_compression += Math.abs(this.normal_force)
}
// height of the sections
var dh = this.cross_section.y_val[1];
//cross section
var crs_btm = this.cross_section.y_val[0];
var crs_top = this.cross_section.y_val[this.cross_section.y_val.length - 1];
// iterate over the y-axis of the master cross section and determine the stresses.
// y-axis starts at bottom.
for (var i = 0; i < this.cross_section.y_val.length; i++) {
// interpolate the strain at this y-value
var strain_y = std.interpolate(crs_btm, strain_btm,
crs_top, strain_top, this.cross_section.y_val[i]);
// Send the strain value as parameter in the stress strain diagram
if (strain_y < 0) {
stress = -this.compressive_diagram.det_stress(Math.abs(strain_y));
this.force_compression -= stress * this.cross_section.width_array[i] * dh
}
else {
stress = this.tensile_diagram.det_stress(strain_y);
this.force_tensile += stress* this.cross_section.width_array[i] * dh
}
this.stress.push(stress)
}
// determine reinforcement forces
this.rebar_force = [];
for (i = 0; i < this.rebar_As.length; i++) {
var strain = std.interpolate(crs_btm, strain_btm, crs_top, strain_top, this.rebar_z[i]);
this.rebar_strain.push(strain + this.d_strain[i]);
var stress = this.rebar_diagram[i].det_stress(Math.abs(strain));
// absolute value
var force = this.rebar_As[i] * stress;
var stress_reduct;
if (strain < 0 && this.prestress[i] == 0) {
this.force_compression += force;
this.rebar_force.push(-force);
if (reduce_rebar) {
// Subtract reinforcement area from master element
stress_reduct = this.compressive_diagram.det_stress(Math.abs(strain));
this.force_compression -= this.rebar_As[i] * stress_reduct
}
}
else {
this.force_tensile += force;
this.rebar_force.push(force);
if (reduce_rebar) {
// Subtract reinforcement area from master element
stress_reduct = this.tensile_diagram.det_stress(strain);
this.force_tensile -= this.rebar_As[i] * stress_reduct
}
}
}
};
MomentKappa.prototype.solver = function (strain_top, strain, print) {
/**
* Return the .det_stress method several times and adapt the input until the convergence criteria is met.
* @param strain_top: (bool) Constant strain at the top.
* If the strain_top == true, the strain at the top will remain constant and the strain at the bottom will
* be iterated over. If false vice versa for strain_bottom.
*/
// default parameter
strain_top = (typeof strain_top !== "undefined") ? strain_top : true;
print = (typeof print !== "undefined") ? print : true;
this.solution = false;
if (this.normal_force != 0) {
this.div = 2.5;
this.iterations = 150
}
else {
this.div = 2.5;
this.iterations = 120
}
// first iteration
var btm_str = strain;
var top_str = -strain;
this.det_force_distribution(top_str, btm_str);
var count = 0;
var factor;
if (strain_top) { // top strain remains constant
// iterate until the convergence criteria is met
while (1) {
if (std.convergence_conditions(this.force_compression, this.force_tensile)) {
this.solution = true;
if (print) {
if (window.DEBUG) {
console.log("convergence after %s iterations".replace("%s", count))
}
}
break
}
// if the rebar is above the zero line, there will sometimes be no tensile force
var low = Math.min.apply(null, this.rebar_z);
for (var i = 0; i < this.rebar_As.length; i++) {
if (this.rebar_z[i] == low) {
var str_rbr = this.rebar_strain[i];
var rbr_index = i
}
}
if (this.force_tensile === 0 && str_rbr <= 0) {
// Extrapolate the from the first significant rebar strain point, to the bottom strain.
// Needed when the rebar is above the neutral line.
btm_str = std.interpolate(this.cross_section.top, top_str, low, this.rebar_diagram[rbr_index].strain[1], this.cross_section.bottom)
}
else if (isNaN(this.force_tensile)) {
btm_str = std.interpolate(this.cross_section.top, top_str, low, this.rebar_diagram[rbr_index].strain[1], this.cross_section.bottom)
}
else {
factor = std.convergence(this.force_tensile, this.force_compression, this.div);
btm_str = btm_str * factor;
}
this.det_force_distribution(top_str, btm_str);
if (count > this.iterations) {
if (print) {
if (window.DEBUG) {
console.log("no convergence found after %s iterations".replace("%s", count))
}
}
break
}
count += 1
}
}
else { // bottom strain remains constant
// iterate until the convergence criteria is met
while (1) {
if (std.convergence_conditions(this.force_compression, this.force_tensile)) {
this.solution = true;
if (print) {
if (window.DEBUG) {
console.log("convergence after %s iterations".replace("%s", count))
}
}
break
}
factor = std.convergence(this.force_compression, this.force_tensile, this.div);
top_str = top_str * factor;
this.det_force_distribution(top_str, btm_str);
if (count > this.iterations) {
if (print) {
if (window.DEBUG) {
console.log("no convergence found after %s iterations".replace("%s", count))
}
}
break
}
count += 1
}
}
this.zero_line = std.interpolate(this.strain_top, this.cross_section.top, this.strain_btm,
this.cross_section.bottom, 0);
};
MomentKappa.prototype.det_m_kappa = function () {
/**
Determines the moment and kappa values.
For each sections center of gravity the moment around the origin is determined.
______ <---- - F compression
| |
| | |y
| | |
|_____| ----> + F tensile |0____x
*/
// center of gravity offset of a section
this.kappa = (-this.strain_top + this.strain_btm) / (this.cross_section.top - this.cross_section.bottom); //this.strain_btm / (this.zero_line - this.cross_section.bottom)
this.moment = this.mp;
var offset = this.cross_section.y_val[1] * 0.5;
// height of the sections
var dh = this.cross_section.y_val[1];
for (var i = 0; i < this.cross_section.y_val.length; i++) {
var arm = this.cross_section.y_val[i] + offset;
var force = this.stress[i] * this.cross_section.width_array[i] * dh;
this.moment += arm * force;
}
// N normal force share
this.moment -= this.normal_force * (this.cross_section.top - this.cross_section.bottom) * 0.5;
// rebar share
for (i = 0; i < this.rebar_As.length; i++) {
this.moment += this.rebar_force[i] * this.rebar_z[i];
// reduction of master cross section at place of rebar
if (this.reduce_rebar) {
if (this.rebar_force[i] > 0) { // tensile stress
var stress_reduct = this.tensile_diagram.det_stress(this.rebar_strain[i]);
//this.moment -= stress_reduct * this.rebar_As[i] * this.rebar_z[i]
}
else { // compression stress
stress_reduct = -this.compressive_diagram.det_stress(Math.abs(this.rebar_strain[i]));
//this.moment -= stress_reduct * this.rebar_As[i] * this.rebar_z[i]
}
}
}
// zero line
this.zero_line = std.interpolate(this.strain_btm, this.cross_section.bottom, this.strain_top, this.cross_section.top, 0);
this.xu = this.cross_section.top - this.zero_line
};
MomentKappa.prototype.validity = function () {
if (std.is_number(this.moment)
&& std.is_number(this.kappa)
&& this.solution
&& this.strain_top >= -this.compressive_diagram.strain[this.compressive_diagram.strain.length - 1]
&& this.strain_top < 0
) {
var valid = true;
// for (var i in this.rebar_strain) {
// if (this.rebar_strain[i] > Math.max.apply(null, this.rebar_diagram[i].strain)) {
// valid = false;
// }
// }
return valid
}
else {
return false
}
};
//end class
//class
function StressStrain(strain, stress) {
/**
Class for creating stress strain diagrams.
*/
/// <param name="strain" type="array">Contains strain values corresponding with this.stress</param>
/// <param name="stress" type="array">Contains stress values corresponding with this.strain</param>
this.strain = strain;
this.stress = stress
}
StressStrain.prototype.det_stress = function (strain) {
/**
* @param strain: (float) Strain for which the stress needs to be determined.
* Iterate through the strain array until iterated value exceeds the requested strain.
* At that the point the two values will interpolated.
*/
for (var i = 0; i < this.strain.length; i++) {
if (strain > this.strain[this.strain.length - 1]) {
return 0;
}
else if (this.strain[i] == strain) {
return this.stress[i]
}
else if (this.strain[i] > strain) {
return std.interpolate(this.strain[i - 1], this.stress[i - 1],
this.strain[i], this.stress[i], strain);
}
}
};
StressStrain.prototype.det_strain = function (stress) {
/**
* @param stress: (float) Strain for which the stress needs to be determined.
* Iterate through the stress array until iterated value exceeds the requested strain.
* At that the point the two values will interpolated.
*/
for (var i = 0; i < this.stress.length; i++) {
if (stress > this.stress[this.stress.length - 1]) {
return 0;
}
else if (this.stress[i] == stress) {
return this.strain[i]
}
else if (this.stress[i] > stress) {
return std.interpolate(this.stress[i - 1], this.strain[i - 1],
this.stress[i], this.strain[i], stress);
}
}
};
//end class
return { MomentKappa: MomentKappa,
StressStrain: StressStrain
}
})(); // mkap namespace | M-N-Kappa/js/moment_kappa.js | 'use strict';
var DEBUG = false;
// mkap namespace
var mkap = (function () {
//class
function MomentKappa(cross_section, compressive_diagram, tensile_diagram) {
this.cross_section = cross_section;
this.compressive_diagram = compressive_diagram;
this.tensile_diagram = tensile_diagram;
// sum of the forces in the cross section
this.force_tensile = 0;
this.force_compression = 0;
this.normal_force = 0;
/**
rebar
*/
this.rebar_As = [];
// distance rebar from the bottom of the master cross section
this.rebar_z = [];
// objects from the StressStrain class
this.rebar_diagram = [];
// phased rebar
this.m0 = [];
this.rebar_strain0 = [];
this.rebar_diam = null; // for the plotter
// Applied at t=0. 'werkvoorspanning'
this.prestress = [];
// Stress and strain in the reinforcement after Mp has been applied and the deformation is zero.
this.d_stress = [];
this.d_strain = [];
this.mp = 0;
this.original_rebar_diagrams = [];
/**
results
*/
this.solution = null;
this.rebar_force = [];
this.rebar_strain = [];
this.stress = [];
this.moment = null;
this.kappa = null;
this.strain_top = null;
this.strain_btm = null;
this.zero_line = null ; // xu is height - zero line
this.xu = null;
// solver settings
this.iterations = 120;
this.div = 4;
this.reduce_rebar = false
}
MomentKappa.prototype.det_force_distribution = function (strain_top, strain_btm, reduce_rebar) {
this.force_compression = 0;
this.force_tensile = 0;
this.stress = [];
this.rebar_strain = [];
this.strain_top = strain_top;
this.strain_btm = strain_btm;
// default parameter
reduce_rebar = (typeof reduce_rebar !== "undefined") ? reduce_rebar : false;
this.reduce_rebar = reduce_rebar;
if (this.normal_force < 0) {
this.force_tensile += Math.abs(this.normal_force)
}
else {
this.force_compression += Math.abs(this.normal_force)
}
// height of the sections
var dh = this.cross_section.y_val[1];
//cross section
var crs_btm = this.cross_section.y_val[0];
var crs_top = this.cross_section.y_val[this.cross_section.y_val.length - 1];
// iterate over the y-axis of the master cross section and determine the stresses.
// y-axis starts at bottom.
for (var i = 0; i < this.cross_section.y_val.length; i++) {
// interpolate the strain at this y-value
var strain_y = std.interpolate(crs_btm, strain_btm,
crs_top, strain_top, this.cross_section.y_val[i]);
// Send the strain value as parameter in the stress strain diagram
if (strain_y < 0) {
stress = -this.compressive_diagram.det_stress(Math.abs(strain_y));
this.force_compression -= stress * this.cross_section.width_array[i] * dh
}
else {
stress = this.tensile_diagram.det_stress(strain_y);
this.force_tensile += stress* this.cross_section.width_array[i] * dh
}
this.stress.push(stress)
}
// determine reinforcement forces
this.rebar_force = [];
for (i = 0; i < this.rebar_As.length; i++) {
var strain = std.interpolate(crs_btm, strain_btm, crs_top, strain_top, this.rebar_z[i]);
this.rebar_strain.push(strain + this.d_strain[i]);
var stress = this.rebar_diagram[i].det_stress(Math.abs(strain));
// absolute value
var force = this.rebar_As[i] * stress;
var stress_reduct;
if (strain < 0 && this.prestress[i] == 0) {
this.force_compression += force;
this.rebar_force.push(-force);
if (reduce_rebar) {
// Subtract reinforcement area from master element
stress_reduct = this.compressive_diagram.det_stress(Math.abs(strain));
this.force_compression -= this.rebar_As[i] * stress_reduct
}
}
else {
this.force_tensile += force;
this.rebar_force.push(force);
if (reduce_rebar) {
// Subtract reinforcement area from master element
stress_reduct = this.tensile_diagram.det_stress(strain);
this.force_tensile -= this.rebar_As[i] * stress_reduct
}
}
}
};
MomentKappa.prototype.solver = function (strain_top, strain, print) {
/**
Return the .det_stress method several times and adapt the input untill the convergence criteria is met.
/// <param name="strain_top" type="bool">constant strain</param>
If the strain_top == true, the strain at the top will remain constant and the strain at the bottom will be iterated
over. If false vice versa for strain_bottom.
*/
// default parameter
strain_top = (typeof strain_top !== "undefined") ? strain_top : true;
print = (typeof print !== "undefined") ? print : true;
this.solution = false;
if (this.normal_force != 0) {
this.div = 2.5;
this.iterations = 150
}
else {
this.div = 2.5;
this.iterations = 120
}
// first iteration
var btm_str = strain;
var top_str = -strain;
this.det_force_distribution(top_str, btm_str);
var count = 0;
var factor;
if (strain_top) { // top strain remains constant
// iterate until the convergence criteria is met
while (1) {
if (std.convergence_conditions(this.force_compression, this.force_tensile)) {
this.solution = true;
if (print) {
if (window.DEBUG) {
console.log("convergence after %s iterations".replace("%s", count))
}
}
break
}
// if the rebar is above the zero line, there will sometimes be no tensile force
var low = Math.min.apply(null, this.rebar_z);
for (var i = 0; i < this.rebar_As.length; i++) {
if (this.rebar_z[i] == low) {
var str_rbr = this.rebar_strain[i];
var rbr_index = i
}
}
if (this.force_tensile === 0 && str_rbr <= 0) {
// Extrapolate the from the first significant rebar strain point, to the bottom strain.
// Needed when the rebar is above the neutral line.
btm_str = std.interpolate(this.cross_section.top, top_str, low, this.rebar_diagram[rbr_index].strain[1], this.cross_section.bottom)
}
else if (isNaN(this.force_tensile)) {
btm_str = std.interpolate(this.cross_section.top, top_str, low, this.rebar_diagram[rbr_index].strain[1], this.cross_section.bottom)
}
else {
factor = std.convergence(this.force_tensile, this.force_compression, this.div);
btm_str = btm_str * factor;
}
this.det_force_distribution(top_str, btm_str);
if (count > this.iterations) {
if (print) {
if (window.DEBUG) {
console.log("no convergence found after %s iterations".replace("%s", count))
}
}
break
}
count += 1
}
}
else { // bottom strain remains constant
// iterate until the convergence criteria is met
while (1) {
if (std.convergence_conditions(this.force_compression, this.force_tensile)) {
this.solution = true;
if (print) {
if (window.DEBUG) {
console.log("convergence after %s iterations".replace("%s", count))
}
}
break
}
factor = std.convergence(this.force_compression, this.force_tensile, this.div);
top_str = top_str * factor;
this.det_force_distribution(top_str, btm_str);
if (count > this.iterations) {
if (print) {
if (window.DEBUG) {
console.log("no convergence found after %s iterations".replace("%s", count))
}
}
break
}
count += 1
}
}
this.zero_line = std.interpolate(this.strain_top, this.cross_section.top, this.strain_btm,
this.cross_section.bottom, 0);
};
MomentKappa.prototype.det_m_kappa = function () {
/**
Determines the moment and kappa values.
For each sections center of gravity the moment around the origin is determined.
______ <---- - F compression
| |
| | |y
| | |
|_____| ----> + F tensile |0____x
*/
// center of gravity offset of a section
this.kappa = (-this.strain_top + this.strain_btm) / (this.cross_section.top - this.cross_section.bottom); //this.strain_btm / (this.zero_line - this.cross_section.bottom)
this.moment = this.mp;
var offset = this.cross_section.y_val[1] * 0.5;
// height of the sections
var dh = this.cross_section.y_val[1];
for (var i = 0; i < this.cross_section.y_val.length; i++) {
var arm = this.cross_section.y_val[i] + offset;
var force = this.stress[i] * this.cross_section.width_array[i] * dh;
this.moment += arm * force;
}
// N normal force share
this.moment -= this.normal_force * (this.cross_section.top - this.cross_section.bottom) * 0.5;
// rebar share
for (i = 0; i < this.rebar_As.length; i++) {
this.moment += this.rebar_force[i] * this.rebar_z[i];
// reduction of master cross section at place of rebar
if (this.reduce_rebar) {
if (this.rebar_force[i] > 0) { // tensile stress
var stress_reduct = this.tensile_diagram.det_stress(this.rebar_strain[i]);
//this.moment -= stress_reduct * this.rebar_As[i] * this.rebar_z[i]
}
else { // compression stress
stress_reduct = -this.compressive_diagram.det_stress(Math.abs(this.rebar_strain[i]));
//this.moment -= stress_reduct * this.rebar_As[i] * this.rebar_z[i]
}
}
}
// zero line
this.zero_line = std.interpolate(this.strain_btm, this.cross_section.bottom, this.strain_top, this.cross_section.top, 0);
this.xu = this.cross_section.top - this.zero_line
};
MomentKappa.prototype.validity = function () {
if (std.is_number(this.moment)
&& std.is_number(this.kappa)
&& this.solution
&& this.strain_top >= -this.compressive_diagram.strain[this.compressive_diagram.strain.length - 1]
&& this.strain_top < 0
) {
var valid = true;
// for (var i in this.rebar_strain) {
// if (this.rebar_strain[i] > Math.max.apply(null, this.rebar_diagram[i].strain)) {
// valid = false;
// }
// }
return valid
}
else {
return false
}
};
//end class
//class
function StressStrain(strain, stress) {
/**
Class for creating stress strain diagrams.
*/
/// <param name="strain" type="array">Contains strain values corresponding with this.stress</param>
/// <param name="stress" type="array">Contains stress values corresponding with this.strain</param>
this.strain = strain;
this.stress = stress
}
StressStrain.prototype.det_stress = function (strain) {
/**
* @param strain: (float) Strain for which the stress needs to be determined.
* Iterate through the strain array until iterated value exceeds the requested strain.
* At that the point the two values will interpolated.
*/
for (var i = 0; i < this.strain.length; i++) {
if (strain > this.strain[this.strain.length - 1]) {
return 0;
}
else if (this.strain[i] == strain) {
return this.stress[i]
}
else if (this.strain[i] > strain) {
return std.interpolate(this.strain[i - 1], this.stress[i - 1],
this.strain[i], this.stress[i], strain);
}
}
};
StressStrain.prototype.det_strain = function (stress) {
/**
* @param stress: (float) Strain for which the stress needs to be determined.
* Iterate through the stress array until iterated value exceeds the requested strain.
* At that the point the two values will interpolated.
*/
for (var i = 0; i < this.stress.length; i++) {
if (stress > this.stress[this.stress.length - 1]) {
return 0;
}
else if (this.stress[i] == stress) {
return this.strain[i]
}
else if (this.stress[i] > stress) {
return std.interpolate(this.stress[i - 1], this.strain[i - 1],
this.stress[i], this.strain[i], stress);
}
}
};
//end class
return { MomentKappa: MomentKappa,
StressStrain: StressStrain
}
})(); // mkap namespace | set up for solver change
| M-N-Kappa/js/moment_kappa.js | set up for solver change | <ide><path>-N-Kappa/js/moment_kappa.js
<ide>
<ide> MomentKappa.prototype.solver = function (strain_top, strain, print) {
<ide> /**
<del> Return the .det_stress method several times and adapt the input untill the convergence criteria is met.
<del>
<del> /// <param name="strain_top" type="bool">constant strain</param>
<del> If the strain_top == true, the strain at the top will remain constant and the strain at the bottom will be iterated
<del> over. If false vice versa for strain_bottom.
<del> */
<add> * Return the .det_stress method several times and adapt the input until the convergence criteria is met.
<add> * @param strain_top: (bool) Constant strain at the top.
<add> * If the strain_top == true, the strain at the top will remain constant and the strain at the bottom will
<add> * be iterated over. If false vice versa for strain_bottom.
<add> */
<add>
<ide> // default parameter
<ide> strain_top = (typeof strain_top !== "undefined") ? strain_top : true;
<ide> print = (typeof print !== "undefined") ? print : true; |
|
JavaScript | mit | f2b6ef0e43664180922aaaa0abdb2b7f03cf0bbb | 0 | neocotic/jquery-jsonx | /**
* Structure:
*
* element
* = '[' tag-name ',' attributes ',' element-list ']'
* | '[' tag-name ',' attributes ']'
* | '[' tag-name ',' element-list ']'
* | '[' tag-name ']'
* | string
* ;
* tag-name
* = string
* ;
* attributes
* = '{' attribute-list '}'
* | '{' '}'
* ;
* attribute-list
* = attribute ',' attribute-list
* | attribute
* ;
* attribute
* = attribute-name ':' attribute-value
* ;
* attribute-name
* = string
* ;
* attribute-value
* = string
* | number
* | 'true'
* | 'false'
* | 'null'
* ;
* element-list
* = element ',' element-list
* | element
* ;
*
* Usage:
*
* (Object) $(*).jsonx();
* (Object) $.jsonx(*);
* (jQuery) $(*).jsonx('parse', ''|{});
* (jQuery) $.jsonx('parse', ''|{});
* (String) $(*).jsonx('stringify');
* (String) $.jsonx('stringify', {});
* @author <a href="http://github.com/neocotic">Alasdair Mercer</a>
*/
(function ($) {
var methods = {
init: function (value) {
function convertJQuery(obj) {
var ret = [];
obj.each(function () {
var $this = $(this),
attrs = {},
contents = $this.contents(),
i;
ret.push(this.nodeName.toLowerCase());
if (this.attributes.length) {
for (i = 0; i < this.attributes.length; i++) {
attrs[this.attributes[i].name] = this.attributes[i].value;
}
ret.push(attrs);
}
if (contents.length) {
for (i = 0; i < contents.length; i++) {
if (contents[i].nodeType === 3) {
ret.push(contents[i].textContent);
} else {
ret.push(convertJQuery($(contents[i])));
}
}
}
});
return ret;
}
if (typeof value !== 'undefined') {
return convertJQuery($(value));
}
return convertJQuery(this);
},
parse: function (value) {
function convertJsonx(obj, parent) {
var arr = [],
ele = {};
if ($.isArray(obj)) {
if (!obj.length || typeof obj[0] !== 'string') {
throw new SyntaxError('JSONX.parse');
}
ele = $(document.createElement(obj[0]));
if (obj.length > 1) {
if ($.isPlainObject(obj[1])) {
ele.attr(obj[1]);
if (obj.length > 2) {
arr = Array.prototype.slice.call(obj, 2);
}
} else {
arr = Array.prototype.slice.call(obj, 1);
}
if (arr.length) {
convertJsonx(arr, ele);
}
}
} else if (typeof obj === 'string') {
parent.append(obj);
}
return parent;
}
if (typeof value === 'string') {
value = JSON.parse(value);
}
// TODO: Should this be children()?
return convertJsonx(value, $('<x/>')).contents();
},
stringify: function (value) {
if (typeof value === 'undefined') {
value = this.jsonx();
}
return JSON.stringify(value);
}
};
$.fn.jsonx = function (method) {
if (methods[method]) {
return methods[method].apply(this, Array.prototype.slice.call(arguments, 1));
}
return methods.init.apply(this, arguments);
};
})(jQuery); | jquery.jsonx.js | /**
* Structure:
*
*
*
* Usage:
*
* (Object) $(*).jsonx();
* (Object) $.jsonx(*);
* (jQuery) $(*).jsonx('parse', ''|{});
* (jQuery) $.jsonx('parse', ''|{});
* (String) $(*).jsonx('stringify');
* (String) $.jsonx('stringify', {});
*/
(function ($) {
var methods = {
init: function (value) {
var j, obj;
if (typeof value !== 'undefined') {
obj = $(value);
} else {
obj = this;
}
if (obj.length) {
// TODO: Create JSON(x) from jQuery object
j = {};
}
return j;
},
parse: function (value) {
var obj;
if (typeof value === 'string') {
value = JSON.parse(value);
}
// TODO: Create jQuery object from JSON(x)
return obj;
},
stringify: function (value) {
if (typeof value === 'undefined') {
value = this.jsonx();
}
return JSON.stringify(value);
}
};
$.fn.jsonx = function (method) {
if (methods[method]) {
return methods[method].apply(this, Array.prototype.slice.call(arguments, 1));
}
return methods.init.apply(this, arguments);
};
})(jQuery); | first draft logic and code
| jquery.jsonx.js | first draft logic and code | <ide><path>query.jsonx.js
<ide> /**
<ide> * Structure:
<ide> *
<del> *
<add> * element
<add> * = '[' tag-name ',' attributes ',' element-list ']'
<add> * | '[' tag-name ',' attributes ']'
<add> * | '[' tag-name ',' element-list ']'
<add> * | '[' tag-name ']'
<add> * | string
<add> * ;
<add> * tag-name
<add> * = string
<add> * ;
<add> * attributes
<add> * = '{' attribute-list '}'
<add> * | '{' '}'
<add> * ;
<add> * attribute-list
<add> * = attribute ',' attribute-list
<add> * | attribute
<add> * ;
<add> * attribute
<add> * = attribute-name ':' attribute-value
<add> * ;
<add> * attribute-name
<add> * = string
<add> * ;
<add> * attribute-value
<add> * = string
<add> * | number
<add> * | 'true'
<add> * | 'false'
<add> * | 'null'
<add> * ;
<add> * element-list
<add> * = element ',' element-list
<add> * | element
<add> * ;
<ide> *
<ide> * Usage:
<ide> *
<ide> * (jQuery) $.jsonx('parse', ''|{});
<ide> * (String) $(*).jsonx('stringify');
<ide> * (String) $.jsonx('stringify', {});
<add> * @author <a href="http://github.com/neocotic">Alasdair Mercer</a>
<ide> */
<ide> (function ($) {
<ide> var methods = {
<ide> init: function (value) {
<del> var j, obj;
<add> function convertJQuery(obj) {
<add> var ret = [];
<add> obj.each(function () {
<add> var $this = $(this),
<add> attrs = {},
<add> contents = $this.contents(),
<add> i;
<add> ret.push(this.nodeName.toLowerCase());
<add> if (this.attributes.length) {
<add> for (i = 0; i < this.attributes.length; i++) {
<add> attrs[this.attributes[i].name] = this.attributes[i].value;
<add> }
<add> ret.push(attrs);
<add> }
<add> if (contents.length) {
<add> for (i = 0; i < contents.length; i++) {
<add> if (contents[i].nodeType === 3) {
<add> ret.push(contents[i].textContent);
<add> } else {
<add> ret.push(convertJQuery($(contents[i])));
<add> }
<add> }
<add> }
<add> });
<add> return ret;
<add> }
<ide> if (typeof value !== 'undefined') {
<del> obj = $(value);
<del> } else {
<del> obj = this;
<add> return convertJQuery($(value));
<ide> }
<del> if (obj.length) {
<del> // TODO: Create JSON(x) from jQuery object
<del> j = {};
<del> }
<del> return j;
<add> return convertJQuery(this);
<ide> },
<ide> parse: function (value) {
<del> var obj;
<add> function convertJsonx(obj, parent) {
<add> var arr = [],
<add> ele = {};
<add> if ($.isArray(obj)) {
<add> if (!obj.length || typeof obj[0] !== 'string') {
<add> throw new SyntaxError('JSONX.parse');
<add> }
<add> ele = $(document.createElement(obj[0]));
<add> if (obj.length > 1) {
<add> if ($.isPlainObject(obj[1])) {
<add> ele.attr(obj[1]);
<add> if (obj.length > 2) {
<add> arr = Array.prototype.slice.call(obj, 2);
<add> }
<add> } else {
<add> arr = Array.prototype.slice.call(obj, 1);
<add> }
<add> if (arr.length) {
<add> convertJsonx(arr, ele);
<add> }
<add> }
<add> } else if (typeof obj === 'string') {
<add> parent.append(obj);
<add> }
<add> return parent;
<add> }
<ide> if (typeof value === 'string') {
<ide> value = JSON.parse(value);
<ide> }
<del> // TODO: Create jQuery object from JSON(x)
<del> return obj;
<add> // TODO: Should this be children()?
<add> return convertJsonx(value, $('<x/>')).contents();
<ide> },
<ide> stringify: function (value) {
<ide> if (typeof value === 'undefined') { |
|
JavaScript | mit | 1cbde8ee91a087c9f9e0ba91357efcc64d6aa7fd | 0 | Nodeventures/Nodeventures | "use strict";
var mongoose = require('mongoose');
// auto increment is used for the current hero id
var autoIncrement = require('mongoose-auto-increment');
// If we have more different kind of heroes can add type.
// Different heroes will start with different health, attack and defense points.
var heroSchema = new mongoose.Schema({
name: {type: String, required: true, unique: true, index: true},
status: {type: String, required: true, index: true, default: 'offline'},
position: {
x: {type: Number, default: 320},
y: {type: Number, default: 320},
map: {type: String, default: 'fields'}
},
currentHealth: {type: Number, required: true, default: 100},
health: {type: Number, required: true, default: 100},
attack: {type: Number, required: true, default: 1},
defense: {type: Number, required: true, default: 0},
userId: {type: mongoose.Schema.ObjectId, ref: 'User'},
heroSprite: {type: String, default: 'assets/tileset/space_guy.png'},
animations: {
idle: {type: [[Number]], default: [[1, 2], [1, 2]]},
walk: {type: [[Number]], default: [[0, 2], [1, 2], [2, 2]]}
},
id: Number
}, {
collection: 'heroes',
id: false
});
// configure autoIncrement to increment id by 1
heroSchema.plugin(autoIncrement.plugin, {
model: 'Hero',
field: 'id',
startAt: 1,
incrementBy: 1
});
var Hero = mongoose.model('Hero', heroSchema);
module.exports = Hero; | data/models/Hero.js | "use strict";
var mongoose = require('mongoose');
// auto increment is used for the current hero id
var autoIncrement = require('mongoose-auto-increment');
// If we have more different kind of heroes can add type.
// Different heroes will start with different health, attack and defense points.
var heroSchema = new mongoose.Schema({
name: {type: String, required: true, unique: true, index: true},
status: {type: String, required: true, index: true, default: 'offline'},
position: {
x: {type: Number, default: 320},
y: {type: Number, default: 320},
map: {type: String, default: 'fields'}
},
health: {type: Number, required: true, default: 100},
attack: {type: Number, required: true, default: 0},
defense: {type: Number, required: true, default: 0},
userId: {type: mongoose.Schema.ObjectId, ref: 'User'},
heroSprite: {type: String, default: 'assets/tileset/space_guy.png'},
animations: {
idle: {type: [[Number]], default: [[1, 2], [1, 2]]},
walk: {type: [[Number]], default: [[0, 2], [1, 2], [2, 2]]}
},
id: Number
}, {
collection: 'heroes',
id: false
});
// configure autoIncrement to increment id by 1
heroSchema.plugin(autoIncrement.plugin, {
model: 'Hero',
field: 'id',
startAt: 1,
incrementBy: 1
});
var Hero = mongoose.model('Hero', heroSchema);
module.exports = Hero; | Added currentHealth; Set base attack to 1 (fists)
| data/models/Hero.js | Added currentHealth; Set base attack to 1 (fists) | <ide><path>ata/models/Hero.js
<ide> y: {type: Number, default: 320},
<ide> map: {type: String, default: 'fields'}
<ide> },
<add> currentHealth: {type: Number, required: true, default: 100},
<ide> health: {type: Number, required: true, default: 100},
<del> attack: {type: Number, required: true, default: 0},
<add> attack: {type: Number, required: true, default: 1},
<ide> defense: {type: Number, required: true, default: 0},
<ide> userId: {type: mongoose.Schema.ObjectId, ref: 'User'},
<ide> heroSprite: {type: String, default: 'assets/tileset/space_guy.png'}, |
|
Java | mit | error: pathspec 'tinustris-parent/tinustris/src/test/java/nl/mvdr/tinustris/gui/GridGroupTest.java' did not match any file(s) known to git
| 504cf7125790e7a268d0287d9fb2383c1569bc6b | 1 | TinusTinus/game-engine | package nl.mvdr.tinustris.gui;
import javafx.scene.control.Label;
import nl.mvdr.tinustris.model.GameState;
import org.junit.Assert;
import org.junit.Test;
/**
* Test class for {@link GridGroup}.
*
* @author Martijn van de Rijdt
*/
public class GridGroupTest {
/** Tests {@link GridGroup#render(Label, GameState)}. */
@Test
public void testRender() {
GridGroup renderer = createGridGroup();
GameState state = new GameState();
renderer.render(state);
Assert.assertFalse(renderer.getChildren().isEmpty());
}
/** Tests {@link GridGroup#render(Label, GameState)} when a null value of GameState is passed in. */
@Test(expected = NullPointerException.class)
public void testNullState() {
GridGroup renderer = createGridGroup();
renderer.render(null);
}
/**
* Creates a new renderer.
*
* @return renderer
*/
private GridGroup createGridGroup() {
return new GridGroup() {
/**
* Mock implementation which just executes the runnable on the current thread.
*
* @param runnable runnable to be executed
*/
@Override
protected void runOnJavaFXThread(Runnable runnable) {
runnable.run();
}
};
}
}
| tinustris-parent/tinustris/src/test/java/nl/mvdr/tinustris/gui/GridGroupTest.java | Added a test case for GridGroup. | tinustris-parent/tinustris/src/test/java/nl/mvdr/tinustris/gui/GridGroupTest.java | Added a test case for GridGroup. | <ide><path>inustris-parent/tinustris/src/test/java/nl/mvdr/tinustris/gui/GridGroupTest.java
<add>package nl.mvdr.tinustris.gui;
<add>
<add>import javafx.scene.control.Label;
<add>import nl.mvdr.tinustris.model.GameState;
<add>
<add>import org.junit.Assert;
<add>import org.junit.Test;
<add>
<add>/**
<add> * Test class for {@link GridGroup}.
<add> *
<add> * @author Martijn van de Rijdt
<add> */
<add>public class GridGroupTest {
<add> /** Tests {@link GridGroup#render(Label, GameState)}. */
<add> @Test
<add> public void testRender() {
<add> GridGroup renderer = createGridGroup();
<add> GameState state = new GameState();
<add>
<add> renderer.render(state);
<add>
<add> Assert.assertFalse(renderer.getChildren().isEmpty());
<add> }
<add>
<add> /** Tests {@link GridGroup#render(Label, GameState)} when a null value of GameState is passed in. */
<add> @Test(expected = NullPointerException.class)
<add> public void testNullState() {
<add> GridGroup renderer = createGridGroup();
<add>
<add> renderer.render(null);
<add> }
<add>
<add> /**
<add> * Creates a new renderer.
<add> *
<add> * @return renderer
<add> */
<add> private GridGroup createGridGroup() {
<add> return new GridGroup() {
<add> /**
<add> * Mock implementation which just executes the runnable on the current thread.
<add> *
<add> * @param runnable runnable to be executed
<add> */
<add> @Override
<add> protected void runOnJavaFXThread(Runnable runnable) {
<add> runnable.run();
<add> }
<add> };
<add> }
<add>} |
|
Java | apache-2.0 | f5b3ca6367f756f364394d033f4c55d982da352e | 0 | Mishiranu/Dashchan,Mishiranu/Dashchan,Mishiranu/Dashchan,Mishiranu/Dashchan | /*
* Copyright 2014-2016 Fukurou Mishiranu
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mishiranu.dashchan.ui.navigator.page;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashSet;
import java.util.Locale;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.graphics.Typeface;
import android.net.Uri;
import android.os.Handler;
import android.os.Parcel;
import android.support.v4.content.LocalBroadcastManager;
import android.util.Pair;
import android.view.ActionMode;
import android.view.ContextThemeWrapper;
import android.view.Gravity;
import android.view.Menu;
import android.view.MenuItem;
import android.view.SubMenu;
import android.view.View;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.ListView;
import android.widget.TextView;
import chan.content.ChanConfiguration;
import chan.content.ChanLocator;
import chan.content.ChanManager;
import chan.content.model.Posts;
import chan.util.CommonUtils;
import chan.util.StringUtils;
import com.mishiranu.dashchan.C;
import com.mishiranu.dashchan.R;
import com.mishiranu.dashchan.content.CacheManager;
import com.mishiranu.dashchan.content.HidePerformer;
import com.mishiranu.dashchan.content.ImageLoader;
import com.mishiranu.dashchan.content.StatisticsManager;
import com.mishiranu.dashchan.content.async.DeserializePostsTask;
import com.mishiranu.dashchan.content.async.ReadPostsTask;
import com.mishiranu.dashchan.content.model.AttachmentItem;
import com.mishiranu.dashchan.content.model.ErrorItem;
import com.mishiranu.dashchan.content.model.GalleryItem;
import com.mishiranu.dashchan.content.model.PostItem;
import com.mishiranu.dashchan.content.service.PostingService;
import com.mishiranu.dashchan.content.storage.FavoritesStorage;
import com.mishiranu.dashchan.content.storage.HistoryDatabase;
import com.mishiranu.dashchan.preference.Preferences;
import com.mishiranu.dashchan.preference.SeekBarPreference;
import com.mishiranu.dashchan.ui.navigator.DrawerForm;
import com.mishiranu.dashchan.ui.navigator.adapter.PostsAdapter;
import com.mishiranu.dashchan.ui.navigator.manager.ThreadshotPerformer;
import com.mishiranu.dashchan.ui.navigator.manager.UiManager;
import com.mishiranu.dashchan.ui.posting.Replyable;
import com.mishiranu.dashchan.util.NavigationUtils;
import com.mishiranu.dashchan.util.ResourceUtils;
import com.mishiranu.dashchan.util.SearchHelper;
import com.mishiranu.dashchan.util.ToastUtils;
import com.mishiranu.dashchan.widget.ClickableToast;
import com.mishiranu.dashchan.widget.ListPosition;
import com.mishiranu.dashchan.widget.ListScroller;
import com.mishiranu.dashchan.widget.PullableListView;
import com.mishiranu.dashchan.widget.PullableWrapper;
public class PostsPage extends ListPage<PostsAdapter> implements FavoritesStorage.Observer, UiManager.Observer,
DeserializePostsTask.Callback, ReadPostsTask.Callback, ActionMode.Callback
{
private DeserializePostsTask mDeserializeTask;
private ReadPostsTask mReadTask;
private Replyable mReplyable;
private HidePerformer mHidePerformer;
private Pair<String, Uri> mOriginalThreadData;
private String mScrollToPostNumber;
private ActionMode mSelectionMode;
private LinearLayout mSearchController;
private TextView mSearchTextResult;
private final ArrayList<Integer> mSearchFoundPosts = new ArrayList<>();
private boolean mSearching = false;
private int mSearchLastPosition;
private int mAutoRefreshInterval = 30;
private boolean mAutoRefreshEnabled = false;
private final ArrayList<String> mLastEditedPostNumbers = new ArrayList<>();
private final BroadcastReceiver mGalleryPagerReceiver = new BroadcastReceiver()
{
@Override
public void onReceive(Context context, Intent intent)
{
String chanName = intent.getStringExtra(C.EXTRA_CHAN_NAME);
String boardName = intent.getStringExtra(C.EXTRA_BOARD_NAME);
String threadNumber = intent.getStringExtra(C.EXTRA_THREAD_NUMBER);
PageHolder pageHolder = getPageHolder();
if (pageHolder.chanName.equals(chanName) && StringUtils.equals(pageHolder.boardName, boardName)
&& pageHolder.threadNumber.equals(threadNumber))
{
String postNumber = intent.getStringExtra(C.EXTRA_POST_NUMBER);
int position = getAdapter().findPositionByPostNumber(postNumber);
if (position >= 0) ListScroller.scrollTo(getListView(), position);
}
}
};
@Override
protected void onCreate()
{
Activity activity = getActivity();
PullableListView listView = getListView();
PageHolder pageHolder = getPageHolder();
UiManager uiManager = getUiManager();
mHidePerformer = new HidePerformer();
PostsExtra extra = getExtra();
listView.setDivider(ResourceUtils.getDrawable(activity, R.attr.postsDivider, 0));
ChanConfiguration.Board board = getChanConfiguration().safe().obtainBoard(pageHolder.boardName);
if (board.allowPosting)
{
mReplyable = (data) ->
{
getUiManager().navigator().navigatePosting(pageHolder.chanName, pageHolder.boardName,
pageHolder.threadNumber, data);
};
}
PostsAdapter adapter = new PostsAdapter(activity, pageHolder.chanName, pageHolder.boardName, uiManager,
mReplyable, mHidePerformer, extra.userPostNumbers, listView);
initAdapter(adapter, adapter);
listView.getWrapper().setPullSides(PullableWrapper.Side.BOTH);
uiManager.observable().register(this);
applyTitle(pageHolder.threadTitle, true);
Context darkStyledContext = new ContextThemeWrapper(activity, R.style.Theme_General_Main_Dark);
mSearchController = new LinearLayout(darkStyledContext);
mSearchController.setOrientation(LinearLayout.HORIZONTAL);
mSearchController.setGravity(Gravity.CENTER_VERTICAL);
float density = ResourceUtils.obtainDensity(getResources());
int padding = (int) (10f * density);
mSearchTextResult = new TextView(darkStyledContext, null, android.R.attr.textAppearanceLarge);
mSearchTextResult.setTextSize(11f);
mSearchTextResult.setTypeface(null, Typeface.BOLD);
mSearchTextResult.setPadding((int) (4f * density), 0, (int) (4f * density), 0);
mSearchController.addView(mSearchTextResult, LinearLayout.LayoutParams.WRAP_CONTENT,
LinearLayout.LayoutParams.WRAP_CONTENT);
ImageView backButtonView = new ImageView(darkStyledContext, null, android.R.attr.borderlessButtonStyle);
backButtonView.setScaleType(ImageView.ScaleType.CENTER_INSIDE);
backButtonView.setImageResource(obtainIcon(R.attr.actionBack));
backButtonView.setPadding(padding, padding, padding, padding);
backButtonView.setOnClickListener(v -> findBack());
mSearchController.addView(backButtonView, (int) (48f * density), (int) (48f * density));
if (C.API_LOLLIPOP)
{
LinearLayout.LayoutParams layoutParams = (LinearLayout.LayoutParams) backButtonView.getLayoutParams();
layoutParams.leftMargin = (int) (2f * density);
layoutParams.rightMargin = -(int) (8f * density);
}
ImageView forwardButtonView = new ImageView(darkStyledContext, null, android.R.attr.borderlessButtonStyle);
forwardButtonView.setScaleType(ImageView.ScaleType.CENTER_INSIDE);
forwardButtonView.setImageResource(obtainIcon(R.attr.actionForward));
forwardButtonView.setPadding(padding, padding, padding, padding);
forwardButtonView.setOnClickListener(v -> findForward());
mSearchController.addView(forwardButtonView, (int) (48f * density), (int) (48f * density));
mScrollToPostNumber = pageHolder.initialPostNumber;
FavoritesStorage.getInstance().getObservable().register(this);
LocalBroadcastManager.getInstance(activity).registerReceiver(mGalleryPagerReceiver,
new IntentFilter(C.ACTION_GALLERY_GO_TO_POST));
boolean hasNewPostDatas = handleNewPostDatas();
extra.forceRefresh = hasNewPostDatas || !pageHolder.initialFromCache;
if (extra.cachedPosts != null && extra.cachedPostItems.size() > 0)
{
onDeserializePostsCompleteInternal(true, extra.cachedPosts, new ArrayList<>(extra.cachedPostItems), true);
}
else
{
mDeserializeTask = new DeserializePostsTask(this, pageHolder.chanName, pageHolder.boardName,
pageHolder.threadNumber, extra.cachedPosts);
mDeserializeTask.executeOnExecutor(DeserializePostsTask.THREAD_POOL_EXECUTOR);
getListView().getWrapper().startBusyState(PullableWrapper.Side.BOTH);
switchView(ViewType.PROGRESS, null);
}
pageHolder.setInitialPostsData(false, null);
}
@Override
protected void onResume()
{
queueNextRefresh(true);
}
@Override
protected void onPause()
{
stopRefresh();
}
@Override
protected void onDestroy()
{
getAdapter().cleanup();
LocalBroadcastManager.getInstance(getActivity()).unregisterReceiver(mGalleryPagerReceiver);
getUiManager().observable().unregister(this);
if (mDeserializeTask != null)
{
mDeserializeTask.cancel();
mDeserializeTask = null;
}
if (mReadTask != null)
{
mReadTask.cancel();
mReadTask = null;
}
ImageLoader.getInstance().clearTasks(getPageHolder().chanName);
FavoritesStorage.getInstance().getObservable().unregister(this);
}
@Override
protected void onHandleNewPostDatas()
{
boolean hasNewPostDatas = handleNewPostDatas();
if (hasNewPostDatas) refreshPosts(true, false);
}
@Override
public void onItemClick(View view, int position, long id)
{
if (mSelectionMode != null)
{
getAdapter().toggleItemSelected(getListView(), position);
mSelectionMode.setTitle(getString(R.string.text_selected_format, getAdapter().getSelectedCount()));
return;
}
PostsAdapter adapter = getAdapter();
PostItem postItem = adapter.getItem(position);
if (postItem != null) getUiManager().interaction().handlePostClick(view, postItem, adapter.getItems());
}
@Override
public boolean onItemLongClick(View view, int position, long id)
{
if (mSelectionMode != null) return false;
PostsAdapter adapter = getAdapter();
PostItem postItem = adapter.getItem(position);
return postItem != null && getUiManager().interaction().handlePostContextMenu(postItem, mReplyable, true, true);
}
private static final int OPTIONS_MENU_ADD_POST = 0;
private static final int OPTIONS_MENU_GALLERY = 1;
private static final int OPTIONS_MENU_SELECT = 2;
private static final int OPTIONS_MENU_REFRESH = 3;
private static final int OPTIONS_MENU_THREAD_OPTIONS = 4;
private static final int OPTIONS_MENU_ADD_TO_FAVORITES_TEXT = 5;
private static final int OPTIONS_MENU_REMOVE_FROM_FAVORITES_TEXT = 6;
private static final int OPTIONS_MENU_ADD_TO_FAVORITES_ICON = 7;
private static final int OPTIONS_MENU_REMOVE_FROM_FAVORITES_ICON = 8;
private static final int OPTIONS_MENU_OPEN_ORIGINAL_THREAD = 9;
private static final int OPTIONS_MENU_ARCHIVE = 10;
private static final int OPTIONS_MENU_SEARCH_CONTROLLER = 11;
private static final int THREAD_OPTIONS_MENU_RELOAD = 200;
private static final int THREAD_OPTIONS_MENU_AUTO_REFRESH = 201;
private static final int THREAD_OPTIONS_MENU_HIDDEN_POSTS = 202;
private static final int THREAD_OPTIONS_MENU_CLEAR_DELETED = 203;
private static final int THREAD_OPTIONS_MENU_SUMMARY = 204;
@Override
public void onCreateOptionsMenu(Menu menu)
{
menu.add(0, OPTIONS_MENU_ADD_POST, 0, R.string.action_add_post).setIcon(obtainIcon(R.attr.actionAddPost))
.setShowAsAction(MenuItem.SHOW_AS_ACTION_IF_ROOM);
menu.add(0, OPTIONS_MENU_SEARCH, 0, R.string.action_search)
.setShowAsAction(MenuItem.SHOW_AS_ACTION_COLLAPSE_ACTION_VIEW);
menu.add(0, OPTIONS_MENU_GALLERY, 0, R.string.action_gallery);
menu.add(0, OPTIONS_MENU_SELECT, 0, R.string.action_select);
menu.add(0, OPTIONS_MENU_REFRESH, 0, R.string.action_refresh).setIcon(obtainIcon(R.attr.actionRefresh))
.setShowAsAction(MenuItem.SHOW_AS_ACTION_IF_ROOM);
menu.addSubMenu(0, OPTIONS_MENU_APPEARANCE, 0, R.string.action_appearance);
SubMenu threadOptions = menu.addSubMenu(0, OPTIONS_MENU_THREAD_OPTIONS, 0, R.string.action_thread_options);
menu.add(0, OPTIONS_MENU_ADD_TO_FAVORITES_TEXT, 0, R.string.action_add_to_favorites);
menu.add(0, OPTIONS_MENU_REMOVE_FROM_FAVORITES_TEXT, 0, R.string.action_remove_from_favorites);
menu.add(0, OPTIONS_MENU_ADD_TO_FAVORITES_ICON, 0, R.string.action_add_to_favorites)
.setIcon(obtainIcon(R.attr.actionAddToFavorites)).setShowAsAction(MenuItem.SHOW_AS_ACTION_ALWAYS);
menu.add(0, OPTIONS_MENU_REMOVE_FROM_FAVORITES_ICON, 0, R.string.action_remove_from_favorites)
.setIcon(obtainIcon(R.attr.actionRemoveFromFavorites)).setShowAsAction(MenuItem.SHOW_AS_ACTION_ALWAYS);
menu.add(0, OPTIONS_MENU_OPEN_ORIGINAL_THREAD, 0, R.string.action_open_the_original);
menu.add(0, OPTIONS_MENU_ARCHIVE, 0, R.string.action_archive_add);
menu.add(0, OPTIONS_MENU_SEARCH_CONTROLLER, 0, null).setActionView(mSearchController)
.setShowAsAction(MenuItem.SHOW_AS_ACTION_ALWAYS);
threadOptions.add(0, THREAD_OPTIONS_MENU_RELOAD, 0, R.string.action_reload);
threadOptions.add(0, THREAD_OPTIONS_MENU_AUTO_REFRESH, 0, R.string.action_auto_refresh).setCheckable(true);
threadOptions.add(0, THREAD_OPTIONS_MENU_HIDDEN_POSTS, 0, R.string.action_hidden_posts);
threadOptions.add(0, THREAD_OPTIONS_MENU_CLEAR_DELETED, 0, R.string.action_clear_deleted);
threadOptions.add(0, THREAD_OPTIONS_MENU_SUMMARY, 0, R.string.action_summary);
}
@Override
public void onPrepareOptionsMenu(Menu menu)
{
if (mSearching)
{
for (int i = 0; i < menu.size(); i++) menu.getItem(i).setVisible(false);
menu.findItem(OPTIONS_MENU_SEARCH).setVisible(true);
menu.findItem(OPTIONS_MENU_SEARCH_CONTROLLER).setVisible(true);
}
else
{
for (int i = 0; i < menu.size(); i++) menu.getItem(i).setVisible(true);
PageHolder pageHolder = getPageHolder();
menu.findItem(OPTIONS_MENU_ADD_POST).setVisible(mReplyable != null);
boolean isFavorite = FavoritesStorage.getInstance().hasFavorite(pageHolder.chanName, pageHolder.boardName,
pageHolder.threadNumber);
boolean iconFavorite = ResourceUtils.isTabletOrLandscape(getResources().getConfiguration());
menu.findItem(OPTIONS_MENU_ADD_TO_FAVORITES_TEXT).setVisible(!iconFavorite && !isFavorite);
menu.findItem(OPTIONS_MENU_REMOVE_FROM_FAVORITES_TEXT).setVisible(!iconFavorite && isFavorite);
menu.findItem(OPTIONS_MENU_ADD_TO_FAVORITES_ICON).setVisible(iconFavorite && !isFavorite);
menu.findItem(OPTIONS_MENU_REMOVE_FROM_FAVORITES_ICON).setVisible(iconFavorite && isFavorite);
menu.findItem(OPTIONS_MENU_OPEN_ORIGINAL_THREAD).setVisible(mOriginalThreadData != null);
menu.findItem(OPTIONS_MENU_ARCHIVE).setVisible(ChanManager.getInstance()
.canBeArchived(pageHolder.chanName));
menu.findItem(OPTIONS_MENU_SEARCH_CONTROLLER).setVisible(false);
menu.findItem(THREAD_OPTIONS_MENU_AUTO_REFRESH).setVisible(Preferences.getAutoRefreshMode()
== Preferences.AUTO_REFRESH_MODE_SEPARATE).setEnabled(!getAdapter().isEmpty())
.setChecked(mAutoRefreshEnabled);
menu.findItem(THREAD_OPTIONS_MENU_HIDDEN_POSTS).setEnabled(mHidePerformer.hasLocalAutohide());
menu.findItem(THREAD_OPTIONS_MENU_CLEAR_DELETED).setEnabled(getAdapter().hasDeletedPosts());
}
}
@Override
public boolean onOptionsItemSelected(MenuItem item)
{
Activity activity = getActivity();
PageHolder pageHolder = getPageHolder();
PostsAdapter adapter = getAdapter();
switch (item.getItemId())
{
case OPTIONS_MENU_ADD_POST:
{
getUiManager().navigator().navigatePosting(pageHolder.chanName, pageHolder.boardName,
pageHolder.threadNumber);
return true;
}
case OPTIONS_MENU_GALLERY:
{
int imageIndex = -1;
ListView listView = getListView();
View child = listView.getChildAt(0);
if (child != null)
{
UiManager uiManager = getUiManager();
ArrayList<GalleryItem> galleryItems = getAdapter().getGallerySet().getItems();
int position = listView.getPositionForView(child);
OUTER: for (int v = 0; v <= 1; v++)
{
for (PostItem postItem : adapter.iterate(v == 0, position))
{
imageIndex = uiManager.view().findImageIndex(galleryItems, postItem);
if (imageIndex != -1) break OUTER;
}
}
}
NavigationUtils.openGallery(getActivity(), null, pageHolder.chanName, imageIndex,
adapter.getGallerySet(), true, true);
return true;
}
case OPTIONS_MENU_SELECT:
{
mSelectionMode = getActivity().startActionMode(this);
return true;
}
case OPTIONS_MENU_REFRESH:
{
refreshPosts(true, false);
return true;
}
case OPTIONS_MENU_ADD_TO_FAVORITES_TEXT:
case OPTIONS_MENU_ADD_TO_FAVORITES_ICON:
{
FavoritesStorage.getInstance().add(pageHolder.chanName, pageHolder.boardName,
pageHolder.threadNumber, pageHolder.threadTitle, adapter.getExistingPostsCount());
updateOptionsMenu(false);
return true;
}
case OPTIONS_MENU_REMOVE_FROM_FAVORITES_TEXT:
case OPTIONS_MENU_REMOVE_FROM_FAVORITES_ICON:
{
FavoritesStorage.getInstance().remove(pageHolder.chanName, pageHolder.boardName,
pageHolder.threadNumber);
updateOptionsMenu(false);
return true;
}
case OPTIONS_MENU_OPEN_ORIGINAL_THREAD:
{
String chanName = mOriginalThreadData.first;
Uri uri = mOriginalThreadData.second;
ChanLocator locator = ChanLocator.get(chanName);
String boardName = locator.safe(true).getBoardName(uri);
String threadNumber = locator.safe(true).getThreadNumber(uri);
if (threadNumber != null)
{
String threadTitle = getAdapter().getItem(0).getSubjectOrComment();
getUiManager().navigator().navigatePosts(chanName, boardName, threadNumber, null,
threadTitle, false);
}
return true;
}
case OPTIONS_MENU_ARCHIVE:
{
String threadTitle = null;
if (adapter.getCount() > 0) threadTitle = adapter.getItem(0).getSubjectOrComment();
getUiManager().dialog().performSendArchiveThread(pageHolder.chanName, pageHolder.boardName,
pageHolder.threadNumber, threadTitle, getExtra().cachedPosts);
return true;
}
case THREAD_OPTIONS_MENU_RELOAD:
{
refreshPosts(true, true);
return true;
}
case THREAD_OPTIONS_MENU_AUTO_REFRESH:
{
final SeekBarPreference.Holder holder = new SeekBarPreference.Holder(true,
Preferences.MIN_AUTO_REFRESH_INTERVAL, Preferences.MAX_AUTO_REFRESH_INTERVAL,
Preferences.STEP_AUTO_REFRESH_INTERVAL, 1f,
getString(R.string.preference_auto_refresh_interval_summary_format));
holder.setCurrentValue(mAutoRefreshInterval);
holder.setSwitchValue(mAutoRefreshEnabled);
new AlertDialog.Builder(activity).setTitle(R.string.action_auto_refresh).setView(holder.create
(getActivity())).setPositiveButton(android.R.string.ok, (dialog, which1) ->
{
mAutoRefreshEnabled = holder.getSwitchValue();
mAutoRefreshInterval = holder.getCurrentValue();
Posts posts = getExtra().cachedPosts;
boolean changed = posts.setAutoRefreshData(mAutoRefreshEnabled, mAutoRefreshInterval);
if (changed) serializePosts();
queueNextRefresh(true);
}).setNegativeButton(android.R.string.cancel, null).show();
return true;
}
case THREAD_OPTIONS_MENU_HIDDEN_POSTS:
{
ArrayList<String> localAutohide = mHidePerformer.getReadableLocalAutohide();
final boolean[] checked = new boolean[localAutohide.size()];
new AlertDialog.Builder(activity).setMultiChoiceItems(CommonUtils.toArray(localAutohide, String.class),
checked, (dialog, which, isChecked) -> checked[which] = isChecked)
.setPositiveButton(android.R.string.ok, (dialog, which) ->
{
boolean hasDeleted = false;
for (int i = 0, j = 0; i < checked.length; i++, j++)
{
if (checked[i])
{
mHidePerformer.removeLocalAutohide(j--);
hasDeleted = true;
}
}
if (hasDeleted)
{
adapter.invalidateHidden();
notifyAllAdaptersChanged();
mHidePerformer.encodeLocalAutohide(getExtra().cachedPosts);
serializePosts();
adapter.preloadPosts(getListView().getFirstVisiblePosition());
}
}).setNegativeButton(android.R.string.cancel, null).setTitle(R.string.text_remove_rules).show();
return true;
}
case THREAD_OPTIONS_MENU_CLEAR_DELETED:
{
new AlertDialog.Builder(getActivity()).setMessage(R.string.message_clear_deleted_warning)
.setPositiveButton(android.R.string.ok, (dialog, which) ->
{
PostsExtra extra = getExtra();
Posts cachedPosts = extra.cachedPosts;
cachedPosts.clearDeletedPosts();
ArrayList<PostItem> deletedPostItems = adapter.clearDeletedPosts();
if (deletedPostItems != null)
{
extra.cachedPostItems.removeAll(deletedPostItems);
for (PostItem postItem : deletedPostItems)
{
extra.userPostNumbers.remove(postItem.getPostNumber());
}
notifyAllAdaptersChanged();
}
updateOptionsMenu(false);
serializePosts();
}).setNegativeButton(android.R.string.cancel, null).show();
return true;
}
case THREAD_OPTIONS_MENU_SUMMARY:
{
PostsExtra extra = getExtra();
int files = 0;
int postsWithFiles = 0;
int links = 0;
for (PostItem postItem : getAdapter())
{
ArrayList<AttachmentItem> attachmentItems = postItem.getAttachmentItems();
if (attachmentItems != null)
{
int itFiles = 0;
for (AttachmentItem attachmentItem : attachmentItems)
{
int generalType = attachmentItem.getGeneralType();
switch (generalType)
{
case AttachmentItem.GENERAL_TYPE_FILE:
case AttachmentItem.GENERAL_TYPE_EMBEDDED:
{
itFiles++;
break;
}
case AttachmentItem.GENERAL_TYPE_LINK:
{
links++;
break;
}
}
}
if (itFiles > 0)
{
postsWithFiles++;
files += itFiles;
}
}
}
int uniquePosters = extra.cachedPosts!= null ? extra.cachedPosts.getUniquePosters() : -1;
StringBuilder builder = new StringBuilder();
String boardName = pageHolder.boardName;
if (boardName != null)
{
builder.append(getString(R.string.text_board)).append(": ");
String title = getChanConfiguration().getBoardTitle(boardName);
builder.append(StringUtils.formatBoardTitle(pageHolder.chanName, boardName, title));
builder.append('\n');
}
builder.append(getString(R.string.text_files_format, files));
builder.append('\n').append(getString(R.string.text_posts_with_files_format, postsWithFiles));
builder.append('\n').append(getString(R.string.text_links_attachments_format, links));
if (uniquePosters > 0)
{
builder.append('\n').append(getString(R.string.text_unique_posters_format, uniquePosters));
}
new AlertDialog.Builder(getActivity()).setTitle(R.string.action_summary).setMessage(builder)
.setPositiveButton(android.R.string.ok, null).show();
return true;
}
}
return false;
}
@Override
public void onFavoritesUpdate(FavoritesStorage.FavoriteItem favoriteItem, int action)
{
switch (action)
{
case FavoritesStorage.ACTION_ADD:
case FavoritesStorage.ACTION_REMOVE:
{
PageHolder pageHolder = getPageHolder();
if (favoriteItem.equals(pageHolder.chanName, pageHolder.boardName, pageHolder.threadNumber))
{
updateOptionsMenu(false);
}
break;
}
}
}
@Override
public void onAppearanceOptionChanged(int what)
{
switch (what)
{
case APPEARANCE_MENU_SPOILERS:
case APPEARANCE_MENU_MY_POSTS:
case APPEARANCE_MENU_SFW_MODE:
{
notifyAllAdaptersChanged();
break;
}
}
}
private static final int ACTION_MENU_MAKE_THREADSHOT = 0;
private static final int ACTION_MENU_REPLY = 1;
private static final int ACTION_MENU_DELETE_POSTS = 2;
private static final int ACTION_MENU_SEND_REPORT = 3;
@Override
public boolean onCreateActionMode(ActionMode mode, Menu menu)
{
PageHolder pageHolder = getPageHolder();
ChanConfiguration configuration = getChanConfiguration();
getAdapter().setSelectionModeEnabled(true);
mode.setTitle(getString(R.string.text_selected_format, 0));
int pasteResId = ResourceUtils.getSystemSelectionIcon(getActivity(), "actionModePasteDrawable",
"ic_menu_paste_holo_dark");
int flags = MenuItem.SHOW_AS_ACTION_ALWAYS | MenuItem.SHOW_AS_ACTION_WITH_TEXT;
ChanConfiguration.Board board = configuration.safe().obtainBoard(pageHolder.boardName);
menu.add(0, ACTION_MENU_MAKE_THREADSHOT, 0, R.string.action_make_threadshot)
.setIcon(obtainIcon(R.attr.actionMakeThreadshot)).setShowAsAction(flags);
if (mReplyable != null)
{
menu.add(0, ACTION_MENU_REPLY, 0, R.string.action_reply).setIcon(pasteResId).setShowAsAction(flags);
}
if (board.allowDeleting)
{
ChanConfiguration.Deleting deleting = configuration.safe().obtainDeleting(pageHolder.boardName);
if (deleting != null && deleting.multiplePosts)
{
menu.add(0, ACTION_MENU_DELETE_POSTS, 0, R.string.action_delete)
.setIcon(obtainIcon(R.attr.actionDelete)).setShowAsAction(flags);
}
}
if (board.allowReporting)
{
ChanConfiguration.Reporting reporting = configuration.safe().obtainReporting(pageHolder.boardName);
if (reporting != null && reporting.multiplePosts)
{
menu.add(0, ACTION_MENU_SEND_REPORT, 0, R.string.action_report)
.setIcon(obtainIcon(R.attr.actionReport)).setShowAsAction(flags);
}
}
return true;
}
@Override
public boolean onPrepareActionMode(ActionMode mode, Menu menu)
{
return false;
}
@Override
public boolean onActionItemClicked(ActionMode mode, MenuItem item)
{
switch (item.getItemId())
{
case ACTION_MENU_MAKE_THREADSHOT:
{
ArrayList<PostItem> postItems = getAdapter().getSelectedItems();
if (postItems.size() > 0)
{
PageHolder pageHolder = getPageHolder();
new ThreadshotPerformer(getListView(), getUiManager(), pageHolder.chanName, pageHolder.boardName,
pageHolder.threadNumber, getAdapter().getGallerySet().getThreadTitle(), postItems);
}
mode.finish();
return true;
}
case ACTION_MENU_REPLY:
{
ArrayList<Replyable.ReplyData> data = new ArrayList<>();
for (PostItem postItem : getAdapter().getSelectedItems())
{
data.add(new Replyable.ReplyData(postItem.getPostNumber(), null));
}
if (data.size() > 0) mReplyable.onRequestReply(CommonUtils.toArray(data, Replyable.ReplyData.class));
mode.finish();
return true;
}
case ACTION_MENU_DELETE_POSTS:
{
ArrayList<PostItem> postItems = getAdapter().getSelectedItems();
ArrayList<String> postNumbers = new ArrayList<>();
for (PostItem postItem : postItems)
{
if (!postItem.isDeleted()) postNumbers.add(postItem.getPostNumber());
}
if (postNumbers.size() > 0)
{
PageHolder pageHolder = getPageHolder();
getUiManager().dialog().performSendDeletePosts(pageHolder.chanName, pageHolder.boardName,
pageHolder.threadNumber, postNumbers);
}
mode.finish();
return true;
}
case ACTION_MENU_SEND_REPORT:
{
ArrayList<PostItem> postItems = getAdapter().getSelectedItems();
ArrayList<String> postNumbers = new ArrayList<>();
for (PostItem postItem : postItems)
{
if (!postItem.isDeleted()) postNumbers.add(postItem.getPostNumber());
}
if (postNumbers.size() > 0)
{
PageHolder pageHolder = getPageHolder();
getUiManager().dialog().performSendReportPosts(pageHolder.chanName, pageHolder.boardName,
pageHolder.threadNumber, postNumbers);
}
mode.finish();
return true;
}
}
return false;
}
@Override
public void onDestroyActionMode(ActionMode mode)
{
getAdapter().setSelectionModeEnabled(false);
mSelectionMode = null;
}
@Override
public boolean onStartSearch(String query)
{
PostsAdapter adapter = getAdapter();
if (adapter.isEmpty()) return false;
mSearchFoundPosts.clear();
int listPosition = ListPosition.obtain(getListView()).position;
mSearchLastPosition = 0;
boolean positionDefined = false;
Locale locale = Locale.getDefault();
SearchHelper helper = new SearchHelper();
helper.setFlags("m", "r", "a", "d", "e", "op");
HashSet<String> queries = helper.handleQueries(locale, query);
HashSet<String> fileNames = new HashSet<>();
PostsExtra extra = getExtra();
OUTER: for (int i = 0; i < adapter.getCount(); i++)
{
PostItem postItem = adapter.getItem(i);
if (postItem != null && !postItem.isHidden(mHidePerformer))
{
String postNumber = postItem.getPostNumber();
String comment = postItem.getComment().toString().toLowerCase(locale);
boolean userPost = postItem.isUserPost();
boolean reply = false;
HashSet<String> referencesTo = postItem.getReferencesTo();
if (referencesTo != null)
{
for (String referenceTo : referencesTo)
{
if (extra.userPostNumbers.contains(referenceTo))
{
reply = true;
break;
}
}
}
boolean hasAttachments = postItem.hasAttachments();
boolean deleted = postItem.isDeleted();
boolean edited = mLastEditedPostNumbers.contains(postNumber);
boolean originalPoster = postItem.isOriginalPoster();
if (!helper.checkFlags("m", userPost, "r", reply, "a", hasAttachments, "d", deleted, "e", edited,
"op", originalPoster))
{
continue;
}
for (String lowQuery : helper.getExcluded())
{
if (comment.contains(lowQuery)) continue OUTER;
}
String subject = postItem.getSubject().toLowerCase(locale);
String name = postItem.getFullName().toString().toLowerCase(locale);
fileNames.clear();
ArrayList<AttachmentItem> attachmentItems = postItem.getAttachmentItems();
if (attachmentItems != null)
{
for (AttachmentItem attachmentItem : attachmentItems)
{
String fileName = attachmentItem.getFileName();
if (fileName != null)
{
fileNames.add(fileName.toLowerCase(locale));
String originalName = attachmentItem.getOriginalName();
if (originalName != null) fileNames.add(originalName.toLowerCase(locale));
}
}
}
boolean found = false;
if (helper.hasIncluded())
{
QUERIES: for (String lowQuery : helper.getIncluded())
{
if (comment.contains(lowQuery))
{
found = true;
break;
}
else if (subject.contains(lowQuery))
{
found = true;
break;
}
else if (name.contains(lowQuery))
{
found = true;
break;
}
else
{
for (String fileName : fileNames)
{
if (fileName.contains(lowQuery))
{
found = true;
break QUERIES;
}
}
}
}
}
else found = true;
if (found)
{
if (!positionDefined && i > listPosition)
{
mSearchLastPosition = mSearchFoundPosts.size();
positionDefined = true;
}
mSearchFoundPosts.add(i);
}
}
}
boolean found = mSearchFoundPosts.size() > 0;
setActionBarLocked(true);
getUiManager().view().setHighlightText(found ? queries : null);
adapter.notifyDataSetChanged();
mSearching = true;
if (found)
{
updateOptionsMenu(true);
mSearchLastPosition--;
findForward();
}
else
{
ToastUtils.show(getActivity(), R.string.message_not_found);
mSearchLastPosition = -1;
updateSearchTitle();
}
return true;
}
@Override
public void onStopSearch()
{
onStopSearchInternal();
}
private boolean onStopSearchInternal()
{
if (mSearching)
{
mSearching = false;
updateOptionsMenu(true);
getUiManager().view().setHighlightText(null);
getAdapter().notifyDataSetChanged();
setActionBarLocked(false);
return true;
}
else return false;
}
private void findBack()
{
int count = mSearchFoundPosts.size();
if (count > 0)
{
mSearchLastPosition--;
if (mSearchLastPosition < 0) mSearchLastPosition += count;
ListScroller.scrollTo(getListView(), mSearchFoundPosts.get(mSearchLastPosition));
updateSearchTitle();
}
}
private void findForward()
{
int count = mSearchFoundPosts.size();
if (count > 0)
{
mSearchLastPosition++;
if (mSearchLastPosition >= count) mSearchLastPosition -= count;
ListScroller.scrollTo(getListView(), mSearchFoundPosts.get(mSearchLastPosition));
updateSearchTitle();
}
}
private void updateSearchTitle()
{
mSearchTextResult.setText((mSearchLastPosition + 1) + "/" + mSearchFoundPosts.size());
}
@Override
public boolean onBackPressed()
{
return onStopSearchInternal() || super.onBackPressed();
}
private boolean handleNewPostDatas()
{
PageHolder pageHolder = getPageHolder();
ArrayList<PostingService.NewPostData> newPostDatas = PostingService.getNewPostDatas(getActivity(),
pageHolder.chanName, pageHolder.boardName, pageHolder.threadNumber);
if (newPostDatas != null)
{
boolean hasNewPostDatas = false;
PostsExtra extra = getExtra();
OUTER: for (PostingService.NewPostData newPostData : newPostDatas)
{
ReadPostsTask.UserPostPending userPostPending;
if (newPostData.newThread)
{
userPostPending = new ReadPostsTask.NewThreadUserPostPending();
}
else if (newPostData.postNumber != null)
{
userPostPending = new ReadPostsTask.PostNumberUserPostPending(newPostData.postNumber);
// Check this post had loaded before this callback was called
// This can be unequivocally checked only for this type of UserPostPending
for (PostItem postItem : getAdapter())
{
if (userPostPending.isUserPost(postItem.getPost()))
{
postItem.setUserPost(true);
extra.userPostNumbers.add(postItem.getPostNumber());
getUiManager().sendPostItemMessage(postItem, UiManager.MESSAGE_INVALIDATE_VIEW);
serializePosts();
continue OUTER;
}
}
}
else
{
userPostPending = new ReadPostsTask.CommentUserPostPending(newPostData.comment);
}
extra.userPostPendings.add(userPostPending);
hasNewPostDatas = true;
}
return hasNewPostDatas;
}
return false;
}
@Override
public int onDrawerNumberEntered(int number)
{
PostsAdapter adapter = getAdapter();
int count = adapter.getCount();
boolean success = false;
if (count > 0 && number > 0)
{
if (number <= count)
{
int position = adapter.findPositionByOrdinalIndex(number - 1);
if (position >= 0)
{
ListScroller.scrollTo(getListView(), position);
success = true;
}
}
if (!success)
{
int position = adapter.findPositionByPostNumber(Integer.toString(number));
if (position >= 0)
{
ListScroller.scrollTo(getListView(), position);
success = true;
}
else ToastUtils.show(getActivity(), R.string.message_post_not_found);
}
}
int result = DrawerForm.RESULT_REMOVE_ERROR_MESSAGE;
if (success) result |= DrawerForm.RESULT_SUCCESS;
return result;
}
@Override
public void onRequestStoreExtra()
{
PostsExtra extra = getExtra();
extra.expandedPosts.clear();
for (PostItem postItem : getAdapter())
{
if (postItem.isExpanded()) extra.expandedPosts.add(postItem.getPostNumber());
}
}
@Override
public void updatePageConfiguration(String postNumber, String threadTitle)
{
mScrollToPostNumber = postNumber;
if (mReadTask == null && mDeserializeTask == null)
{
if (!scrollToSpecifiedPost(false)) refreshPosts(true, false);
}
}
@Override
public void onListPulled(PullableWrapper wrapper, PullableWrapper.Side side)
{
refreshPosts(true, false, true);
}
private boolean scrollToSpecifiedPost(boolean instantly)
{
if (mScrollToPostNumber != null)
{
int position = getAdapter().findPositionByPostNumber(mScrollToPostNumber);
if (position >= 0)
{
if (instantly) getListView().setSelection(position);
else ListScroller.scrollTo(getListView(), position);
mScrollToPostNumber = null;
}
}
return mScrollToPostNumber == null;
}
private void onFirstPostsLoad()
{
if (mScrollToPostNumber == null)
{
PageHolder pageHolder = getPageHolder();
if (pageHolder.position != null) pageHolder.position.apply(getListView());
}
}
private void onAfterPostsLoad()
{
PostsExtra extra = getExtra();
if (!extra.isAddedToHistory)
{
extra.isAddedToHistory = true;
PageHolder pageHolder = getPageHolder();
HistoryDatabase.getInstance().addHistory(pageHolder.chanName, pageHolder.boardName,
pageHolder.threadNumber, pageHolder.threadTitle);
}
if (extra.cachedPosts != null)
{
Pair<String, Uri> originalThreadData = null;
Uri archivedThreadUri = extra.cachedPosts.getArchivedThreadUri();
if (archivedThreadUri != null)
{
String chanName = ChanManager.getInstance().getChanNameByHost(archivedThreadUri.getAuthority());
if (chanName != null) originalThreadData = new Pair<>(chanName, archivedThreadUri);
}
if ((mOriginalThreadData == null) != (originalThreadData == null))
{
mOriginalThreadData = originalThreadData;
updateOptionsMenu(false);
}
}
Iterator<PostItem> iterator = getAdapter().iterator();
if (iterator.hasNext()) applyTitle(iterator.next().getSubjectOrComment(), false);
}
private void applyTitle(String title, boolean activityOnly)
{
PageHolder pageHolder = getPageHolder();
if (!StringUtils.isEmptyOrWhitespace(title))
{
if (!activityOnly)
{
pageHolder.threadTitle = title;
FavoritesStorage.getInstance().modifyTitle(pageHolder.chanName, pageHolder.boardName,
pageHolder.threadNumber, title, false);
invalidateDrawerItems(true, true);
HistoryDatabase.getInstance().refreshTitles(pageHolder.chanName, pageHolder.boardName,
pageHolder.threadNumber, title);
}
getActivity().setTitle(title);
}
else
{
getActivity().setTitle(StringUtils.formatThreadTitle(pageHolder.chanName, pageHolder.boardName,
pageHolder.threadNumber));
}
}
private static final Handler HANDLER = new Handler();
private final Runnable mRefreshRunnable = () ->
{
if (mDeserializeTask == null && mReadTask == null) refreshPosts(true, false);
queueNextRefresh(false);
};
private void queueNextRefresh(boolean instant)
{
HANDLER.removeCallbacks(mRefreshRunnable);
int mode = Preferences.getAutoRefreshMode();
boolean enabled = mode == Preferences.AUTO_REFRESH_MODE_SEPARATE && mAutoRefreshEnabled ||
mode == Preferences.AUTO_REFRESH_MODE_ENABLED;
if (enabled)
{
int interval = mode == Preferences.AUTO_REFRESH_MODE_SEPARATE ? mAutoRefreshInterval
: Preferences.getAutoRefreshInterval();
if (instant) HANDLER.post(mRefreshRunnable);
else HANDLER.postDelayed(mRefreshRunnable, interval * 1000);
}
}
private void stopRefresh()
{
HANDLER.removeCallbacks(mRefreshRunnable);
}
private void refreshPosts(boolean checkModified, boolean reload)
{
refreshPosts(checkModified, reload, !getAdapter().isEmpty());
}
private void refreshPosts(boolean checkModified, boolean reload, boolean showPull)
{
PostsExtra extra = getExtra();
if (mDeserializeTask != null)
{
if (!reload) extra.forceRefresh = true;
return;
}
if (mReadTask != null) mReadTask.cancel();
PageHolder pageHolder = getPageHolder();
PostsAdapter adapter = getAdapter();
boolean partialLoading = !adapter.isEmpty();
boolean useValidator = checkModified && partialLoading && !reload;
mReadTask = new ReadPostsTask(this, pageHolder.chanName, pageHolder.boardName, pageHolder.threadNumber,
extra.cachedPosts, useValidator, reload, adapter.getLastPostNumber(), extra.userPostPendings);
mReadTask.executeOnExecutor(ReadPostsTask.THREAD_POOL_EXECUTOR);
if (showPull)
{
getListView().getWrapper().startBusyState(PullableWrapper.Side.BOTTOM);
switchView(ViewType.LIST, null);
}
else
{
getListView().getWrapper().startBusyState(PullableWrapper.Side.BOTH);
switchView(ViewType.PROGRESS, null);
}
}
@Override
public void onRequestPreloadPosts(PostItem[] postItems)
{
PostsAdapter adapter = getAdapter();
int count = adapter.getCount();
int threshold = ListScroller.getJumpThreshold(getActivity());
int handleNewCount = Math.min(threshold / 4, postItems.length);
int handleOldCount = Math.min(threshold, count);
for (int i = 0; i < handleNewCount; i++)
{
PostItem postItem = postItems[i];
postItem.getComment();
postItem.isHidden(mHidePerformer);
}
for (int i = 0; i < handleOldCount; i++)
{
PostItem postItem = adapter.getItem(count - i - 1);
if (postItem != null)
{
postItem.getComment();
postItem.isHidden(mHidePerformer);
}
}
}
@Override
public void onDeserializePostsComplete(boolean success, Posts posts, ArrayList<PostItem> postItems)
{
mDeserializeTask = null;
getListView().getWrapper().cancelBusyState();
switchView(ViewType.LIST, null);
if (success && postItems != null)
{
PostsExtra extra = getExtra();
extra.userPostNumbers.clear();
for (PostItem postItem : postItems)
{
if (postItem.isUserPost()) extra.userPostNumbers.add(postItem.getPostNumber());
}
}
onDeserializePostsCompleteInternal(success, posts, postItems, false);
}
private void onDeserializePostsCompleteInternal(boolean success, Posts posts, ArrayList<PostItem> postItems,
boolean isLoadedExplicitly)
{
PostsAdapter adapter = getAdapter();
PostsExtra extra = getExtra();
extra.cachedPosts = null;
extra.cachedPostItems.clear();
if (success)
{
mHidePerformer.decodeLocalAutohide(posts);
extra.cachedPosts = posts;
extra.cachedPostItems.addAll(postItems);
ArrayList<ReadPostsTask.Patch> patches = new ArrayList<>();
for (int i = 0; i < postItems.size(); i++) patches.add(new ReadPostsTask.Patch(postItems.get(i), i));
adapter.setItems(patches, isLoadedExplicitly);
for (PostItem postItem : adapter)
{
if (extra.expandedPosts.contains(postItem.getPostNumber())) postItem.setExpanded(true);
}
Pair<Boolean, Integer> autoRefreshData = posts.getAutoRefreshData();
mAutoRefreshEnabled = autoRefreshData.first;
mAutoRefreshInterval = Math.min(Math.max(autoRefreshData.second, Preferences.MIN_AUTO_REFRESH_INTERVAL),
Preferences.MAX_AUTO_REFRESH_INTERVAL);
onFirstPostsLoad();
onAfterPostsLoad();
showScaleAnimation();
scrollToSpecifiedPost(true);
if (extra.forceRefresh)
{
extra.forceRefresh = false;
refreshPosts(true, false);
}
queueNextRefresh(false);
}
else refreshPosts(false, false);
updateOptionsMenu(false);
}
@Override
public void onReadPostsSuccess(ReadPostsTask.Result result, boolean fullThread,
ArrayList<ReadPostsTask.UserPostPending> removedUserPostPendings)
{
mReadTask = null;
getListView().getWrapper().cancelBusyState();
switchView(ViewType.LIST, null);
PostsAdapter adapter = getAdapter();
PageHolder pageHolder = getPageHolder();
if (adapter.isEmpty()) StatisticsManager.getInstance().incrementViews(pageHolder.chanName);
PostsExtra extra = getExtra();
boolean wasEmpty = adapter.isEmpty();
final int newPostPosition = adapter.getCount();
if (removedUserPostPendings != null)
{
for (ReadPostsTask.UserPostPending userPostPending : removedUserPostPendings)
{
extra.userPostPendings.remove(userPostPending);
}
}
if (fullThread)
{
// Thread was opened for the first time
extra.cachedPosts = result.posts;
extra.cachedPostItems.clear();
extra.userPostNumbers.clear();
for (ReadPostsTask.Patch patch : result.patches)
{
extra.cachedPostItems.add(patch.postItem);
if (patch.newPost.isUserPost()) extra.userPostNumbers.add(patch.newPost.getPostNumber());
}
adapter.setItems(result.patches, false);
boolean allowCache = CacheManager.getInstance().allowPagesCache(pageHolder.chanName);
if (allowCache)
{
for (PostItem postItem : extra.cachedPostItems) postItem.setUnread(true);
}
onFirstPostsLoad();
}
else
{
if (extra.cachedPosts != null)
{
// Copy data from old model to new model
Pair<Boolean, Integer> autoRefreshData = extra.cachedPosts.getAutoRefreshData();
result.posts.setAutoRefreshData(autoRefreshData.first, autoRefreshData.second);
result.posts.setLocalAutohide(extra.cachedPosts.getLocalAutohide());
}
extra.cachedPosts = result.posts;
int repliesCount = 0;
if (!result.patches.isEmpty())
{
// Copy data from old model to new model
for (ReadPostsTask.Patch patch : result.patches)
{
if (patch.oldPost != null)
{
if (patch.oldPost.isUserPost()) patch.newPost.setUserPost(true);
if (patch.oldPost.isHidden()) patch.newPost.setHidden(true);
if (patch.oldPost.isShown()) patch.newPost.setHidden(false);
}
}
for (ReadPostsTask.Patch patch : result.patches)
{
if (patch.newPost.isUserPost()) extra.userPostNumbers.add(patch.newPost.getPostNumber());
if (patch.newPostAddedToEnd)
{
HashSet<String> referencesTo = patch.postItem.getReferencesTo();
if (referencesTo != null)
{
for (String postNumber : referencesTo)
{
if (extra.userPostNumbers.contains(postNumber))
{
repliesCount++;
break;
}
}
}
}
}
adapter.mergeItems(result.patches);
extra.cachedPostItems.clear();
for (PostItem postItem : adapter) extra.cachedPostItems.add(postItem);
// Mark changed posts as unread
for (ReadPostsTask.Patch patch : result.patches) patch.postItem.setUnread(true);
}
if (result.newCount > 0 || repliesCount > 0 || result.deletedCount > 0 || result.hasEdited)
{
StringBuilder message = new StringBuilder();
if (repliesCount > 0 || result.deletedCount > 0)
{
message.append(getQuantityString(R.plurals.text_new_posts_count_short_format,
result.newCount, result.newCount));
if (repliesCount > 0)
{
message.append(", ").append(getQuantityString(R.plurals.text_replies_count_format,
repliesCount, repliesCount));
}
if (result.deletedCount > 0)
{
message.append(", ").append(getQuantityString(R.plurals.text_deleted_count_format,
result.deletedCount, result.deletedCount));
}
}
else if (result.newCount > 0)
{
message.append(getQuantityString(R.plurals.text_new_posts_count_format,
result.newCount, result.newCount));
}
else
{
message.append(getString(R.string.message_edited_posts));
}
if (result.newCount > 0)
{
ClickableToast.show(getActivity(), message, getString(R.string.action_show), () ->
{
if (!isDestroyed()) ListScroller.scrollTo(getListView(), newPostPosition);
}, true);
}
else ClickableToast.show(getActivity(), message);
}
}
boolean updateAdapters = result.newCount > 0 || result.deletedCount > 0 || result.hasEdited;
serializePosts();
if (result.hasEdited)
{
mLastEditedPostNumbers.clear();
for (ReadPostsTask.Patch patch : result.patches)
{
if (!patch.newPostAddedToEnd) mLastEditedPostNumbers.add(patch.newPost.getPostNumber());
}
}
if (FavoritesStorage.getInstance().hasFavorite(pageHolder.chanName, pageHolder.boardName,
pageHolder.threadNumber))
{
FavoritesStorage.getInstance().modifyPostsCount(pageHolder.chanName, pageHolder.boardName,
pageHolder.threadNumber, adapter.getExistingPostsCount());
// Invalidate for ThreadsWatcher
invalidateDrawerItems(false, true);
}
if (updateAdapters)
{
getUiManager().dialog().updateAdapters();
notifyAllAdaptersChanged();
}
onAfterPostsLoad();
if (wasEmpty && !adapter.isEmpty()) showScaleAnimation();
scrollToSpecifiedPost(wasEmpty);
mScrollToPostNumber = null;
updateOptionsMenu(false);
}
@Override
public void onReadPostsEmpty()
{
mReadTask = null;
getListView().getWrapper().cancelBusyState();
switchView(ViewType.LIST, null);
if (getAdapter().isEmpty()) displayDownloadError(true, getString(R.string.message_empty_response));
}
@Override
public void onReadPostsRedirect(String boardName, String threadNumber, String postNumber)
{
mReadTask = null;
getListView().getWrapper().cancelBusyState();
PageHolder pageHolder = getPageHolder();
removeCurrentPage();
getUiManager().navigator().navigatePosts(pageHolder.chanName, boardName, threadNumber, postNumber, null, false);
}
@Override
public void onReadPostsFail(ErrorItem errorItem)
{
mReadTask = null;
getListView().getWrapper().cancelBusyState();
displayDownloadError(true, errorItem.toString());
mScrollToPostNumber = null;
}
private void displayDownloadError(boolean show, String message)
{
if (show && getAdapter().getCount() > 0)
{
ClickableToast.show(getActivity(), message);
return;
}
switchView(ViewType.ERROR, message);
}
private void hidePostAndReplies(PostItem postItem, ArrayList<PostItem> postItemsToInvalidate)
{
if (!postItem.getPost().isHidden())
{
postItem.setHidden(true);
postItemsToInvalidate.add(postItem);
}
LinkedHashSet<String> referencesFrom = postItem.getReferencesFrom();
if (referencesFrom != null)
{
PostsAdapter adapter = getAdapter();
for (String postNumber : referencesFrom)
{
PostItem foundPostItem = adapter.findPostItem(postNumber);
if (foundPostItem != null) hidePostAndReplies(foundPostItem, postItemsToInvalidate);
}
}
}
@Override
public void onPostItemMessage(PostItem postItem, int message)
{
int position = getUiManager().view().findViewIndex(getListView(), postItem);
switch (message)
{
case UiManager.MESSAGE_INVALIDATE_VIEW:
{
getAdapter().notifyDataSetChanged();
break;
}
case UiManager.MESSAGE_INVALIDATE_COMMENT_VIEW:
{
getUiManager().view().invalidateComment(getListView(), position);
break;
}
case UiManager.MESSAGE_PERFORM_SERIALIZE:
{
if (position != ListView.INVALID_POSITION) serializePosts();
break;
}
case UiManager.MESSAGE_PERFORM_USER_MARK_UPDATE:
{
PostsExtra extra = getExtra();
if (postItem.isUserPost()) extra.userPostNumbers.add(postItem.getPostNumber());
else extra.userPostNumbers.remove(postItem.getPostNumber());
break;
}
case UiManager.MESSAGE_PERFORM_CASCADE_HIDE:
{
if (position != ListView.INVALID_POSITION)
{
ArrayList<PostItem> postItemsToInvalidate = new ArrayList<>();
hidePostAndReplies(postItem, postItemsToInvalidate);
UiManager uiManager = getUiManager();
for (PostItem invalidatePostItem : postItemsToInvalidate)
{
uiManager.sendPostItemMessage(invalidatePostItem, UiManager.MESSAGE_INVALIDATE_VIEW);
}
}
break;
}
case UiManager.MESSAGE_PERFORM_HIDE_NAME:
case UiManager.MESSAGE_PERFORM_HIDE_SIMILAR:
{
PostsAdapter adapter = getAdapter();
adapter.cancelPreloading();
boolean success;
if (message == UiManager.MESSAGE_PERFORM_HIDE_NAME) success = mHidePerformer.addHideByName(postItem);
else success = mHidePerformer.addHideSimilar(postItem);
if (success)
{
postItem.resetHidden();
adapter.invalidateHidden();
notifyAllAdaptersChanged();
mHidePerformer.encodeLocalAutohide(getExtra().cachedPosts);
serializePosts();
}
adapter.preloadPosts(getListView().getFirstVisiblePosition());
break;
}
case UiManager.MESSAGE_PERFORM_LOAD_THUMBNAIL:
{
getUiManager().view().displayThumbnail(getListView(), position, postItem.getAttachmentItems(), true);
break;
}
}
}
private void serializePosts()
{
PageHolder pageHolder = getPageHolder();
CacheManager.getInstance().serializePosts(pageHolder.chanName, pageHolder.boardName,
pageHolder.threadNumber, getExtra().cachedPosts);
}
public static class PostsExtra implements PageHolder.ParcelableExtra
{
public Posts cachedPosts;
public final ArrayList<PostItem> cachedPostItems = new ArrayList<>();
public final HashSet<String> userPostNumbers = new HashSet<>();
public final ArrayList<ReadPostsTask.UserPostPending> userPostPendings = new ArrayList<>();
public final HashSet<String> expandedPosts = new HashSet<>();
public boolean isAddedToHistory = false;
public boolean forceRefresh = false;
@Override
public void writeToParcel(Parcel dest)
{
dest.writeList(userPostPendings);
dest.writeStringArray(CommonUtils.toArray(expandedPosts, String.class));
dest.writeInt(isAddedToHistory ? 1 : 0);
dest.writeInt(forceRefresh ? 1 : 0);
}
@Override
public void readFromParcel(Parcel source)
{
@SuppressWarnings("unchecked")
ArrayList<ReadPostsTask.UserPostPending> userPostPendings = source
.readArrayList(PostsExtra.class.getClassLoader());
if (userPostPendings.size() > 0) this.userPostPendings.addAll(userPostPendings);
String[] data = source.createStringArray();
if (data != null) Collections.addAll(expandedPosts, data);
isAddedToHistory = source.readInt() != 0;
forceRefresh = source.readInt() != 0;
}
}
private PostsExtra getExtra()
{
PageHolder pageHolder = getPageHolder();
if (!(pageHolder.extra instanceof PostsExtra)) pageHolder.extra = new PostsExtra();
return (PostsExtra) pageHolder.extra;
}
} | src/com/mishiranu/dashchan/ui/navigator/page/PostsPage.java | /*
* Copyright 2014-2016 Fukurou Mishiranu
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mishiranu.dashchan.ui.navigator.page;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashSet;
import java.util.Locale;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.graphics.Typeface;
import android.net.Uri;
import android.os.Handler;
import android.os.Parcel;
import android.support.v4.content.LocalBroadcastManager;
import android.util.Pair;
import android.view.ActionMode;
import android.view.ContextThemeWrapper;
import android.view.Gravity;
import android.view.Menu;
import android.view.MenuItem;
import android.view.SubMenu;
import android.view.View;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.ListView;
import android.widget.TextView;
import chan.content.ChanConfiguration;
import chan.content.ChanLocator;
import chan.content.ChanManager;
import chan.content.model.Posts;
import chan.util.CommonUtils;
import chan.util.StringUtils;
import com.mishiranu.dashchan.C;
import com.mishiranu.dashchan.R;
import com.mishiranu.dashchan.content.CacheManager;
import com.mishiranu.dashchan.content.HidePerformer;
import com.mishiranu.dashchan.content.ImageLoader;
import com.mishiranu.dashchan.content.StatisticsManager;
import com.mishiranu.dashchan.content.async.DeserializePostsTask;
import com.mishiranu.dashchan.content.async.ReadPostsTask;
import com.mishiranu.dashchan.content.model.AttachmentItem;
import com.mishiranu.dashchan.content.model.ErrorItem;
import com.mishiranu.dashchan.content.model.GalleryItem;
import com.mishiranu.dashchan.content.model.PostItem;
import com.mishiranu.dashchan.content.service.PostingService;
import com.mishiranu.dashchan.content.storage.FavoritesStorage;
import com.mishiranu.dashchan.content.storage.HistoryDatabase;
import com.mishiranu.dashchan.preference.Preferences;
import com.mishiranu.dashchan.preference.SeekBarPreference;
import com.mishiranu.dashchan.ui.navigator.DrawerForm;
import com.mishiranu.dashchan.ui.navigator.adapter.PostsAdapter;
import com.mishiranu.dashchan.ui.navigator.manager.ThreadshotPerformer;
import com.mishiranu.dashchan.ui.navigator.manager.UiManager;
import com.mishiranu.dashchan.ui.posting.Replyable;
import com.mishiranu.dashchan.util.NavigationUtils;
import com.mishiranu.dashchan.util.ResourceUtils;
import com.mishiranu.dashchan.util.SearchHelper;
import com.mishiranu.dashchan.util.ToastUtils;
import com.mishiranu.dashchan.widget.ClickableToast;
import com.mishiranu.dashchan.widget.ListPosition;
import com.mishiranu.dashchan.widget.ListScroller;
import com.mishiranu.dashchan.widget.PullableListView;
import com.mishiranu.dashchan.widget.PullableWrapper;
public class PostsPage extends ListPage<PostsAdapter> implements FavoritesStorage.Observer, UiManager.Observer,
DeserializePostsTask.Callback, ReadPostsTask.Callback, ActionMode.Callback
{
private DeserializePostsTask mDeserializeTask;
private ReadPostsTask mReadTask;
private Replyable mReplyable;
private HidePerformer mHidePerformer;
private Pair<String, Uri> mOriginalThreadData;
private String mScrollToPostNumber;
private ActionMode mSelectionMode;
private LinearLayout mSearchController;
private TextView mSearchTextResult;
private final ArrayList<Integer> mSearchFoundPosts = new ArrayList<>();
private boolean mSearching = false;
private int mSearchLastPosition;
private int mAutoRefreshInterval = 30;
private boolean mAutoRefreshEnabled = false;
private final ArrayList<String> mLastEditedPostNumbers = new ArrayList<>();
private final BroadcastReceiver mGalleryPagerReceiver = new BroadcastReceiver()
{
@Override
public void onReceive(Context context, Intent intent)
{
String chanName = intent.getStringExtra(C.EXTRA_CHAN_NAME);
String boardName = intent.getStringExtra(C.EXTRA_BOARD_NAME);
String threadNumber = intent.getStringExtra(C.EXTRA_THREAD_NUMBER);
PageHolder pageHolder = getPageHolder();
if (pageHolder.chanName.equals(chanName) && StringUtils.equals(pageHolder.boardName, boardName)
&& pageHolder.threadNumber.equals(threadNumber))
{
String postNumber = intent.getStringExtra(C.EXTRA_POST_NUMBER);
int position = getAdapter().findPositionByPostNumber(postNumber);
if (position >= 0) ListScroller.scrollTo(getListView(), position);
}
}
};
@Override
protected void onCreate()
{
Activity activity = getActivity();
PullableListView listView = getListView();
PageHolder pageHolder = getPageHolder();
UiManager uiManager = getUiManager();
mHidePerformer = new HidePerformer();
PostsExtra extra = getExtra();
listView.setDivider(ResourceUtils.getDrawable(activity, R.attr.postsDivider, 0));
ChanConfiguration.Board board = getChanConfiguration().safe().obtainBoard(pageHolder.boardName);
if (board.allowPosting)
{
mReplyable = (data) ->
{
getUiManager().navigator().navigatePosting(pageHolder.chanName, pageHolder.boardName,
pageHolder.threadNumber, data);
};
}
PostsAdapter adapter = new PostsAdapter(activity, pageHolder.chanName, pageHolder.boardName, uiManager,
mReplyable, mHidePerformer, extra.userPostNumbers, listView);
initAdapter(adapter, adapter);
listView.getWrapper().setPullSides(PullableWrapper.Side.BOTH);
uiManager.observable().register(this);
applyTitle(pageHolder.threadTitle, true);
Context darkStyledContext = new ContextThemeWrapper(activity, R.style.Theme_General_Main_Dark);
mSearchController = new LinearLayout(darkStyledContext);
mSearchController.setOrientation(LinearLayout.HORIZONTAL);
mSearchController.setGravity(Gravity.CENTER_VERTICAL);
float density = ResourceUtils.obtainDensity(getResources());
int padding = (int) (10f * density);
mSearchTextResult = new TextView(darkStyledContext, null, android.R.attr.textAppearanceLarge);
mSearchTextResult.setTextSize(11f);
mSearchTextResult.setTypeface(null, Typeface.BOLD);
mSearchTextResult.setPadding((int) (4f * density), 0, (int) (4f * density), 0);
mSearchController.addView(mSearchTextResult, LinearLayout.LayoutParams.WRAP_CONTENT,
LinearLayout.LayoutParams.WRAP_CONTENT);
ImageView backButtonView = new ImageView(darkStyledContext, null, android.R.attr.borderlessButtonStyle);
backButtonView.setScaleType(ImageView.ScaleType.CENTER_INSIDE);
backButtonView.setImageResource(obtainIcon(R.attr.actionBack));
backButtonView.setPadding(padding, padding, padding, padding);
backButtonView.setOnClickListener(v -> findBack());
mSearchController.addView(backButtonView, (int) (48f * density), (int) (48f * density));
if (C.API_LOLLIPOP)
{
LinearLayout.LayoutParams layoutParams = (LinearLayout.LayoutParams) backButtonView.getLayoutParams();
layoutParams.leftMargin = (int) (2f * density);
layoutParams.rightMargin = -(int) (8f * density);
}
ImageView forwardButtonView = new ImageView(darkStyledContext, null, android.R.attr.borderlessButtonStyle);
forwardButtonView.setScaleType(ImageView.ScaleType.CENTER_INSIDE);
forwardButtonView.setImageResource(obtainIcon(R.attr.actionForward));
forwardButtonView.setPadding(padding, padding, padding, padding);
forwardButtonView.setOnClickListener(v -> findForward());
mSearchController.addView(forwardButtonView, (int) (48f * density), (int) (48f * density));
mScrollToPostNumber = pageHolder.initialPostNumber;
FavoritesStorage.getInstance().getObservable().register(this);
LocalBroadcastManager.getInstance(activity).registerReceiver(mGalleryPagerReceiver,
new IntentFilter(C.ACTION_GALLERY_GO_TO_POST));
boolean hasNewPostDatas = handleNewPostDatas();
extra.forceRefresh = hasNewPostDatas || !pageHolder.initialFromCache;
if (extra.cachedPosts != null && extra.cachedPostItems.size() > 0)
{
onDeserializePostsCompleteInternal(true, extra.cachedPosts, new ArrayList<>(extra.cachedPostItems), true);
}
else
{
mDeserializeTask = new DeserializePostsTask(this, pageHolder.chanName, pageHolder.boardName,
pageHolder.threadNumber, extra.cachedPosts);
mDeserializeTask.executeOnExecutor(DeserializePostsTask.THREAD_POOL_EXECUTOR);
getListView().getWrapper().startBusyState(PullableWrapper.Side.BOTH);
switchView(ViewType.PROGRESS, null);
}
pageHolder.setInitialPostsData(false, null);
}
@Override
protected void onResume()
{
queueNextRefresh(true);
}
@Override
protected void onPause()
{
stopRefresh();
}
@Override
protected void onDestroy()
{
getAdapter().cleanup();
LocalBroadcastManager.getInstance(getActivity()).unregisterReceiver(mGalleryPagerReceiver);
getUiManager().observable().unregister(this);
if (mDeserializeTask != null)
{
mDeserializeTask.cancel();
mDeserializeTask = null;
}
if (mReadTask != null)
{
mReadTask.cancel();
mReadTask = null;
}
ImageLoader.getInstance().clearTasks(getPageHolder().chanName);
FavoritesStorage.getInstance().getObservable().unregister(this);
}
@Override
protected void onHandleNewPostDatas()
{
boolean hasNewPostDatas = handleNewPostDatas();
if (hasNewPostDatas) refreshPosts(true, false);
}
@Override
public void onItemClick(View view, int position, long id)
{
if (mSelectionMode != null)
{
getAdapter().toggleItemSelected(getListView(), position);
mSelectionMode.setTitle(getString(R.string.text_selected_format, getAdapter().getSelectedCount()));
return;
}
PostsAdapter adapter = getAdapter();
PostItem postItem = adapter.getItem(position);
if (postItem != null) getUiManager().interaction().handlePostClick(view, postItem, adapter.getItems());
}
@Override
public boolean onItemLongClick(View view, int position, long id)
{
if (mSelectionMode != null) return false;
PostsAdapter adapter = getAdapter();
PostItem postItem = adapter.getItem(position);
return postItem != null && getUiManager().interaction().handlePostContextMenu(postItem, mReplyable, true, true);
}
private static final int OPTIONS_MENU_ADD_POST = 0;
private static final int OPTIONS_MENU_GALLERY = 1;
private static final int OPTIONS_MENU_SELECT = 2;
private static final int OPTIONS_MENU_REFRESH = 3;
private static final int OPTIONS_MENU_THREAD_OPTIONS = 4;
private static final int OPTIONS_MENU_ADD_TO_FAVORITES_TEXT = 5;
private static final int OPTIONS_MENU_REMOVE_FROM_FAVORITES_TEXT = 6;
private static final int OPTIONS_MENU_ADD_TO_FAVORITES_ICON = 7;
private static final int OPTIONS_MENU_REMOVE_FROM_FAVORITES_ICON = 8;
private static final int OPTIONS_MENU_OPEN_ORIGINAL_THREAD = 9;
private static final int OPTIONS_MENU_ARCHIVE = 10;
private static final int OPTIONS_MENU_SEARCH_CONTROLLER = 11;
private static final int THREAD_OPTIONS_MENU_RELOAD = 200;
private static final int THREAD_OPTIONS_MENU_AUTO_REFRESH = 201;
private static final int THREAD_OPTIONS_MENU_HIDDEN_POSTS = 202;
private static final int THREAD_OPTIONS_MENU_CLEAR_DELETED = 203;
private static final int THREAD_OPTIONS_MENU_SUMMARY = 204;
@Override
public void onCreateOptionsMenu(Menu menu)
{
menu.add(0, OPTIONS_MENU_ADD_POST, 0, R.string.action_add_post).setIcon(obtainIcon(R.attr.actionAddPost))
.setShowAsAction(MenuItem.SHOW_AS_ACTION_IF_ROOM);
menu.add(0, OPTIONS_MENU_SEARCH, 0, R.string.action_search)
.setShowAsAction(MenuItem.SHOW_AS_ACTION_COLLAPSE_ACTION_VIEW);
menu.add(0, OPTIONS_MENU_GALLERY, 0, R.string.action_gallery);
menu.add(0, OPTIONS_MENU_SELECT, 0, R.string.action_select);
menu.add(0, OPTIONS_MENU_REFRESH, 0, R.string.action_refresh).setIcon(obtainIcon(R.attr.actionRefresh))
.setShowAsAction(MenuItem.SHOW_AS_ACTION_IF_ROOM);
menu.addSubMenu(0, OPTIONS_MENU_APPEARANCE, 0, R.string.action_appearance);
SubMenu threadOptions = menu.addSubMenu(0, OPTIONS_MENU_THREAD_OPTIONS, 0, R.string.action_thread_options);
menu.add(0, OPTIONS_MENU_ADD_TO_FAVORITES_TEXT, 0, R.string.action_add_to_favorites);
menu.add(0, OPTIONS_MENU_REMOVE_FROM_FAVORITES_TEXT, 0, R.string.action_remove_from_favorites);
menu.add(0, OPTIONS_MENU_ADD_TO_FAVORITES_ICON, 0, R.string.action_add_to_favorites)
.setIcon(obtainIcon(R.attr.actionAddToFavorites)).setShowAsAction(MenuItem.SHOW_AS_ACTION_ALWAYS);
menu.add(0, OPTIONS_MENU_REMOVE_FROM_FAVORITES_ICON, 0, R.string.action_remove_from_favorites)
.setIcon(obtainIcon(R.attr.actionRemoveFromFavorites)).setShowAsAction(MenuItem.SHOW_AS_ACTION_ALWAYS);
menu.add(0, OPTIONS_MENU_OPEN_ORIGINAL_THREAD, 0, R.string.action_open_the_original);
menu.add(0, OPTIONS_MENU_ARCHIVE, 0, R.string.action_archive_add);
menu.add(0, OPTIONS_MENU_SEARCH_CONTROLLER, 0, null).setActionView(mSearchController)
.setShowAsAction(MenuItem.SHOW_AS_ACTION_ALWAYS);
threadOptions.add(0, THREAD_OPTIONS_MENU_RELOAD, 0, R.string.action_reload);
threadOptions.add(0, THREAD_OPTIONS_MENU_AUTO_REFRESH, 0, R.string.action_auto_refresh).setCheckable(true);
threadOptions.add(0, THREAD_OPTIONS_MENU_HIDDEN_POSTS, 0, R.string.action_hidden_posts);
threadOptions.add(0, THREAD_OPTIONS_MENU_CLEAR_DELETED, 0, R.string.action_clear_deleted);
threadOptions.add(0, THREAD_OPTIONS_MENU_SUMMARY, 0, R.string.action_summary);
}
@Override
public void onPrepareOptionsMenu(Menu menu)
{
if (mSearching)
{
for (int i = 0; i < menu.size(); i++) menu.getItem(i).setVisible(false);
menu.findItem(OPTIONS_MENU_SEARCH).setVisible(true);
menu.findItem(OPTIONS_MENU_SEARCH_CONTROLLER).setVisible(true);
}
else
{
for (int i = 0; i < menu.size(); i++) menu.getItem(i).setVisible(true);
PageHolder pageHolder = getPageHolder();
menu.findItem(OPTIONS_MENU_ADD_POST).setVisible(mReplyable != null);
boolean isFavorite = FavoritesStorage.getInstance().hasFavorite(pageHolder.chanName, pageHolder.boardName,
pageHolder.threadNumber);
boolean iconFavorite = ResourceUtils.isTabletOrLandscape(getResources().getConfiguration());
menu.findItem(OPTIONS_MENU_ADD_TO_FAVORITES_TEXT).setVisible(!iconFavorite && !isFavorite);
menu.findItem(OPTIONS_MENU_REMOVE_FROM_FAVORITES_TEXT).setVisible(!iconFavorite && isFavorite);
menu.findItem(OPTIONS_MENU_ADD_TO_FAVORITES_ICON).setVisible(iconFavorite && !isFavorite);
menu.findItem(OPTIONS_MENU_REMOVE_FROM_FAVORITES_ICON).setVisible(iconFavorite && isFavorite);
menu.findItem(OPTIONS_MENU_OPEN_ORIGINAL_THREAD).setVisible(mOriginalThreadData != null);
menu.findItem(OPTIONS_MENU_ARCHIVE).setVisible(ChanManager.getInstance()
.canBeArchived(pageHolder.chanName));
menu.findItem(OPTIONS_MENU_SEARCH_CONTROLLER).setVisible(false);
menu.findItem(THREAD_OPTIONS_MENU_AUTO_REFRESH).setVisible(Preferences.getAutoRefreshMode()
== Preferences.AUTO_REFRESH_MODE_SEPARATE).setEnabled(!getAdapter().isEmpty())
.setChecked(mAutoRefreshEnabled);
menu.findItem(THREAD_OPTIONS_MENU_HIDDEN_POSTS).setEnabled(mHidePerformer.hasLocalAutohide());
menu.findItem(THREAD_OPTIONS_MENU_CLEAR_DELETED).setEnabled(getAdapter().hasDeletedPosts());
}
}
@Override
public boolean onOptionsItemSelected(MenuItem item)
{
Activity activity = getActivity();
PageHolder pageHolder = getPageHolder();
PostsAdapter adapter = getAdapter();
switch (item.getItemId())
{
case OPTIONS_MENU_ADD_POST:
{
getUiManager().navigator().navigatePosting(pageHolder.chanName, pageHolder.boardName,
pageHolder.threadNumber);
return true;
}
case OPTIONS_MENU_GALLERY:
{
int imageIndex = -1;
ListView listView = getListView();
View child = listView.getChildAt(0);
if (child != null)
{
UiManager uiManager = getUiManager();
ArrayList<GalleryItem> galleryItems = getAdapter().getGallerySet().getItems();
int position = listView.getPositionForView(child);
OUTER: for (int v = 0; v <= 1; v++)
{
for (PostItem postItem : adapter.iterate(v == 0, position))
{
imageIndex = uiManager.view().findImageIndex(galleryItems, postItem);
if (imageIndex != -1) break OUTER;
}
}
}
NavigationUtils.openGallery(getActivity(), null, pageHolder.chanName, imageIndex,
adapter.getGallerySet(), true, true);
return true;
}
case OPTIONS_MENU_SELECT:
{
mSelectionMode = getActivity().startActionMode(this);
return true;
}
case OPTIONS_MENU_REFRESH:
{
refreshPosts(true, false);
return true;
}
case OPTIONS_MENU_ADD_TO_FAVORITES_TEXT:
case OPTIONS_MENU_ADD_TO_FAVORITES_ICON:
{
FavoritesStorage.getInstance().add(pageHolder.chanName, pageHolder.boardName,
pageHolder.threadNumber, pageHolder.threadTitle, adapter.getExistingPostsCount());
updateOptionsMenu(false);
return true;
}
case OPTIONS_MENU_REMOVE_FROM_FAVORITES_TEXT:
case OPTIONS_MENU_REMOVE_FROM_FAVORITES_ICON:
{
FavoritesStorage.getInstance().remove(pageHolder.chanName, pageHolder.boardName,
pageHolder.threadNumber);
updateOptionsMenu(false);
return true;
}
case OPTIONS_MENU_OPEN_ORIGINAL_THREAD:
{
String chanName = mOriginalThreadData.first;
Uri uri = mOriginalThreadData.second;
ChanLocator locator = ChanLocator.get(chanName);
String boardName = locator.safe(true).getBoardName(uri);
String threadNumber = locator.safe(true).getThreadNumber(uri);
if (threadNumber != null)
{
String threadTitle = getAdapter().getItem(0).getSubjectOrComment();
getUiManager().navigator().navigatePosts(chanName, boardName, threadNumber, null,
threadTitle, false);
}
return true;
}
case OPTIONS_MENU_ARCHIVE:
{
String threadTitle = null;
if (adapter.getCount() > 0) threadTitle = adapter.getItem(0).getSubjectOrComment();
getUiManager().dialog().performSendArchiveThread(pageHolder.chanName, pageHolder.boardName,
pageHolder.threadNumber, threadTitle, getExtra().cachedPosts);
return true;
}
case THREAD_OPTIONS_MENU_RELOAD:
{
refreshPosts(true, true);
return true;
}
case THREAD_OPTIONS_MENU_AUTO_REFRESH:
{
final SeekBarPreference.Holder holder = new SeekBarPreference.Holder(true,
Preferences.MIN_AUTO_REFRESH_INTERVAL, Preferences.MAX_AUTO_REFRESH_INTERVAL,
Preferences.STEP_AUTO_REFRESH_INTERVAL, 1f,
getString(R.string.preference_auto_refresh_interval_summary_format));
holder.setCurrentValue(mAutoRefreshInterval);
holder.setSwitchValue(mAutoRefreshEnabled);
new AlertDialog.Builder(activity).setTitle(R.string.action_auto_refresh).setView(holder.create
(getActivity())).setPositiveButton(android.R.string.ok, (dialog, which1) ->
{
mAutoRefreshEnabled = holder.getSwitchValue();
mAutoRefreshInterval = holder.getCurrentValue();
Posts posts = getExtra().cachedPosts;
boolean changed = posts.setAutoRefreshData(mAutoRefreshEnabled, mAutoRefreshInterval);
if (changed) serializePosts();
queueNextRefresh(true);
}).setNegativeButton(android.R.string.cancel, null).show();
return true;
}
case THREAD_OPTIONS_MENU_HIDDEN_POSTS:
{
ArrayList<String> localAutohide = mHidePerformer.getReadableLocalAutohide();
final boolean[] checked = new boolean[localAutohide.size()];
new AlertDialog.Builder(activity).setMultiChoiceItems(CommonUtils.toArray(localAutohide, String.class),
checked, (dialog, which, isChecked) -> checked[which] = isChecked)
.setPositiveButton(android.R.string.ok, (dialog, which) ->
{
boolean hasDeleted = false;
for (int i = 0, j = 0; i < checked.length; i++, j++)
{
if (checked[i])
{
mHidePerformer.removeLocalAutohide(j--);
hasDeleted = true;
}
}
if (hasDeleted)
{
adapter.invalidateHidden();
notifyAllAdaptersChanged();
mHidePerformer.encodeLocalAutohide(getExtra().cachedPosts);
serializePosts();
adapter.preloadPosts(getListView().getFirstVisiblePosition());
}
}).setNegativeButton(android.R.string.cancel, null).setTitle(R.string.text_remove_rules).show();
return true;
}
case THREAD_OPTIONS_MENU_CLEAR_DELETED:
{
new AlertDialog.Builder(getActivity()).setMessage(R.string.message_clear_deleted_warning)
.setPositiveButton(android.R.string.ok, (dialog, which) ->
{
PostsExtra extra = getExtra();
Posts cachedPosts = extra.cachedPosts;
cachedPosts.clearDeletedPosts();
ArrayList<PostItem> deletedPostItems = adapter.clearDeletedPosts();
if (deletedPostItems != null)
{
extra.cachedPostItems.removeAll(deletedPostItems);
for (PostItem postItem : deletedPostItems)
{
extra.userPostNumbers.remove(postItem.getPostNumber());
}
notifyAllAdaptersChanged();
}
updateOptionsMenu(false);
serializePosts();
}).setNegativeButton(android.R.string.cancel, null).show();
return true;
}
case THREAD_OPTIONS_MENU_SUMMARY:
{
PostsExtra extra = getExtra();
int files = 0;
int postsWithFiles = 0;
int links = 0;
for (PostItem postItem : getAdapter())
{
ArrayList<AttachmentItem> attachmentItems = postItem.getAttachmentItems();
if (attachmentItems != null)
{
int itFiles = 0;
for (AttachmentItem attachmentItem : attachmentItems)
{
int generalType = attachmentItem.getGeneralType();
switch (generalType)
{
case AttachmentItem.GENERAL_TYPE_FILE:
case AttachmentItem.GENERAL_TYPE_EMBEDDED:
{
itFiles++;
break;
}
case AttachmentItem.GENERAL_TYPE_LINK:
{
links++;
break;
}
}
}
if (itFiles > 0)
{
postsWithFiles++;
files += itFiles;
}
}
}
int uniquePosters = extra.cachedPosts!= null ? extra.cachedPosts.getUniquePosters() : -1;
StringBuilder builder = new StringBuilder();
String boardName = pageHolder.boardName;
if (boardName != null)
{
builder.append(getString(R.string.text_board)).append(": ");
String title = getChanConfiguration().getBoardTitle(boardName);
builder.append(StringUtils.formatBoardTitle(pageHolder.chanName, boardName, title));
builder.append('\n');
}
builder.append(getString(R.string.text_files_format, files));
builder.append('\n').append(getString(R.string.text_posts_with_files_format, postsWithFiles));
builder.append('\n').append(getString(R.string.text_links_attachments_format, links));
if (uniquePosters > 0)
{
builder.append('\n').append(getString(R.string.text_unique_posters_format, uniquePosters));
}
new AlertDialog.Builder(getActivity()).setTitle(R.string.action_summary).setMessage(builder)
.setPositiveButton(android.R.string.ok, null).show();
return true;
}
}
return false;
}
@Override
public void onFavoritesUpdate(FavoritesStorage.FavoriteItem favoriteItem, int action)
{
switch (action)
{
case FavoritesStorage.ACTION_ADD:
case FavoritesStorage.ACTION_REMOVE:
{
PageHolder pageHolder = getPageHolder();
if (favoriteItem.equals(pageHolder.chanName, pageHolder.boardName, pageHolder.threadNumber))
{
updateOptionsMenu(false);
}
break;
}
}
}
@Override
public void onAppearanceOptionChanged(int what)
{
switch (what)
{
case APPEARANCE_MENU_SPOILERS:
case APPEARANCE_MENU_MY_POSTS:
case APPEARANCE_MENU_SFW_MODE:
{
notifyAllAdaptersChanged();
break;
}
}
}
private static final int ACTION_MENU_MAKE_THREADSHOT = 0;
private static final int ACTION_MENU_REPLY = 1;
private static final int ACTION_MENU_DELETE_POSTS = 2;
private static final int ACTION_MENU_SEND_REPORT = 3;
@Override
public boolean onCreateActionMode(ActionMode mode, Menu menu)
{
PageHolder pageHolder = getPageHolder();
ChanConfiguration configuration = getChanConfiguration();
getAdapter().setSelectionModeEnabled(true);
mode.setTitle(getString(R.string.text_selected_format, 0));
int pasteResId = ResourceUtils.getSystemSelectionIcon(getActivity(), "actionModePasteDrawable",
"ic_menu_paste_holo_dark");
int flags = MenuItem.SHOW_AS_ACTION_ALWAYS | MenuItem.SHOW_AS_ACTION_WITH_TEXT;
ChanConfiguration.Board board = configuration.safe().obtainBoard(pageHolder.boardName);
menu.add(0, ACTION_MENU_MAKE_THREADSHOT, 0, R.string.action_make_threadshot)
.setIcon(obtainIcon(R.attr.actionMakeThreadshot)).setShowAsAction(flags);
if (mReplyable != null)
{
menu.add(0, ACTION_MENU_REPLY, 0, R.string.action_reply).setIcon(pasteResId).setShowAsAction(flags);
}
if (board.allowDeleting)
{
ChanConfiguration.Deleting deleting = configuration.safe().obtainDeleting(pageHolder.boardName);
if (deleting != null && deleting.multiplePosts)
{
menu.add(0, ACTION_MENU_DELETE_POSTS, 0, R.string.action_delete)
.setIcon(obtainIcon(R.attr.actionDelete)).setShowAsAction(flags);
}
}
if (board.allowReporting)
{
ChanConfiguration.Reporting reporting = configuration.safe().obtainReporting(pageHolder.boardName);
if (reporting != null && reporting.multiplePosts)
{
menu.add(0, ACTION_MENU_SEND_REPORT, 0, R.string.action_report)
.setIcon(obtainIcon(R.attr.actionReport)).setShowAsAction(flags);
}
}
return true;
}
@Override
public boolean onPrepareActionMode(ActionMode mode, Menu menu)
{
return false;
}
@Override
public boolean onActionItemClicked(ActionMode mode, MenuItem item)
{
switch (item.getItemId())
{
case ACTION_MENU_MAKE_THREADSHOT:
{
ArrayList<PostItem> postItems = getAdapter().getSelectedItems();
if (postItems.size() > 0)
{
PageHolder pageHolder = getPageHolder();
new ThreadshotPerformer(getListView(), getUiManager(), pageHolder.chanName, pageHolder.boardName,
pageHolder.threadNumber, getAdapter().getGallerySet().getThreadTitle(), postItems);
}
mode.finish();
return true;
}
case ACTION_MENU_REPLY:
{
ArrayList<Replyable.ReplyData> data = new ArrayList<>();
for (PostItem postItem : getAdapter().getSelectedItems())
{
data.add(new Replyable.ReplyData(postItem.getPostNumber(), null));
}
if (data.size() > 0) mReplyable.onRequestReply(CommonUtils.toArray(data, Replyable.ReplyData.class));
mode.finish();
return true;
}
case ACTION_MENU_DELETE_POSTS:
{
ArrayList<PostItem> postItems = getAdapter().getSelectedItems();
ArrayList<String> postNumbers = new ArrayList<>();
for (PostItem postItem : postItems)
{
if (!postItem.isDeleted()) postNumbers.add(postItem.getPostNumber());
}
if (postNumbers.size() > 0)
{
PageHolder pageHolder = getPageHolder();
getUiManager().dialog().performSendDeletePosts(pageHolder.chanName, pageHolder.boardName,
pageHolder.threadNumber, postNumbers);
}
mode.finish();
return true;
}
case ACTION_MENU_SEND_REPORT:
{
ArrayList<PostItem> postItems = getAdapter().getSelectedItems();
ArrayList<String> postNumbers = new ArrayList<>();
for (PostItem postItem : postItems)
{
if (!postItem.isDeleted()) postNumbers.add(postItem.getPostNumber());
}
if (postNumbers.size() > 0)
{
PageHolder pageHolder = getPageHolder();
getUiManager().dialog().performSendReportPosts(pageHolder.chanName, pageHolder.boardName,
pageHolder.threadNumber, postNumbers);
}
mode.finish();
return true;
}
}
return false;
}
@Override
public void onDestroyActionMode(ActionMode mode)
{
getAdapter().setSelectionModeEnabled(false);
mSelectionMode = null;
}
@Override
public boolean onStartSearch(String query)
{
PostsAdapter adapter = getAdapter();
if (adapter.isEmpty()) return false;
mSearchFoundPosts.clear();
int listPosition = ListPosition.obtain(getListView()).position;
mSearchLastPosition = 0;
boolean positionDefined = false;
Locale locale = Locale.getDefault();
SearchHelper helper = new SearchHelper();
helper.setFlags("m", "r", "a", "d", "e", "op");
HashSet<String> queries = helper.handleQueries(locale, query);
HashSet<String> fileNames = new HashSet<>();
PostsExtra extra = getExtra();
OUTER: for (int i = 0; i < adapter.getCount(); i++)
{
PostItem postItem = adapter.getItem(i);
if (postItem != null && !postItem.isHidden(mHidePerformer))
{
String postNumber = postItem.getPostNumber();
String comment = postItem.getComment().toString().toLowerCase(locale);
boolean userPost = postItem.isUserPost();
boolean reply = false;
HashSet<String> referencesTo = postItem.getReferencesTo();
if (referencesTo != null)
{
for (String referenceTo : referencesTo)
{
if (extra.userPostNumbers.contains(referenceTo))
{
reply = true;
break;
}
}
}
boolean hasAttachments = postItem.hasAttachments();
boolean deleted = postItem.isDeleted();
boolean edited = mLastEditedPostNumbers.contains(postNumber);
boolean originalPoster = postItem.isOriginalPoster();
if (!helper.checkFlags("m", userPost, "r", reply, "a", hasAttachments, "d", deleted, "e", edited,
"op", originalPoster))
{
continue;
}
for (String lowQuery : helper.getExcluded())
{
if (comment.contains(lowQuery)) continue OUTER;
}
String subject = postItem.getSubject().toLowerCase(locale);
String name = postItem.getFullName().toString().toLowerCase(locale);
fileNames.clear();
ArrayList<AttachmentItem> attachmentItems = postItem.getAttachmentItems();
if (attachmentItems != null)
{
for (AttachmentItem attachmentItem : attachmentItems)
{
String fileName = attachmentItem.getFileName();
if (fileName != null)
{
fileNames.add(fileName.toLowerCase(locale));
String originalName = attachmentItem.getOriginalName();
if (originalName != null) fileNames.add(originalName.toLowerCase(locale));
}
}
}
boolean found = false;
if (helper.hasIncluded())
{
QUERIES: for (String lowQuery : helper.getIncluded())
{
if (comment.contains(lowQuery))
{
found = true;
break;
}
else if (subject.contains(lowQuery))
{
found = true;
break;
}
else if (name.contains(lowQuery))
{
found = true;
break;
}
else
{
for (String fileName : fileNames)
{
if (fileName.contains(lowQuery))
{
found = true;
break QUERIES;
}
}
}
}
}
else found = true;
if (found)
{
if (!positionDefined && i > listPosition)
{
mSearchLastPosition = mSearchFoundPosts.size();
positionDefined = true;
}
mSearchFoundPosts.add(i);
}
}
}
boolean found = mSearchFoundPosts.size() > 0;
setActionBarLocked(true);
getUiManager().view().setHighlightText(found ? queries : null);
adapter.notifyDataSetChanged();
mSearching = true;
if (found)
{
updateOptionsMenu(true);
mSearchLastPosition--;
findForward();
}
else
{
ToastUtils.show(getActivity(), R.string.message_not_found);
mSearchLastPosition = -1;
updateSearchTitle();
}
return true;
}
@Override
public void onStopSearch()
{
onStopSearchInternal();
}
private boolean onStopSearchInternal()
{
if (mSearching)
{
mSearching = false;
updateOptionsMenu(true);
getUiManager().view().setHighlightText(null);
getAdapter().notifyDataSetChanged();
setActionBarLocked(false);
return true;
}
else return false;
}
private void findBack()
{
int count = mSearchFoundPosts.size();
if (count > 0)
{
mSearchLastPosition--;
if (mSearchLastPosition < 0) mSearchLastPosition += count;
ListScroller.scrollTo(getListView(), mSearchFoundPosts.get(mSearchLastPosition));
updateSearchTitle();
}
}
private void findForward()
{
int count = mSearchFoundPosts.size();
if (count > 0)
{
mSearchLastPosition++;
if (mSearchLastPosition >= count) mSearchLastPosition -= count;
ListScroller.scrollTo(getListView(), mSearchFoundPosts.get(mSearchLastPosition));
updateSearchTitle();
}
}
private void updateSearchTitle()
{
mSearchTextResult.setText((mSearchLastPosition + 1) + "/" + mSearchFoundPosts.size());
}
@Override
public boolean onBackPressed()
{
return onStopSearchInternal() || super.onBackPressed();
}
private boolean handleNewPostDatas()
{
PageHolder pageHolder = getPageHolder();
ArrayList<PostingService.NewPostData> newPostDatas = PostingService.getNewPostDatas(getActivity(),
pageHolder.chanName, pageHolder.boardName, pageHolder.threadNumber);
if (newPostDatas != null)
{
boolean hasNewPostDatas = false;
PostsExtra extra = getExtra();
OUTER: for (PostingService.NewPostData newPostData : newPostDatas)
{
ReadPostsTask.UserPostPending userPostPending;
if (newPostData.newThread)
{
userPostPending = new ReadPostsTask.NewThreadUserPostPending();
}
else if (newPostData.postNumber != null)
{
userPostPending = new ReadPostsTask.PostNumberUserPostPending(newPostData.postNumber);
// Check this post had loaded before this callback was called
// This can be unequivocally checked only for this type of UserPostPending
for (PostItem postItem : getAdapter())
{
if (userPostPending.isUserPost(postItem.getPost()))
{
postItem.setUserPost(true);
extra.userPostNumbers.add(postItem.getPostNumber());
getUiManager().sendPostItemMessage(postItem, UiManager.MESSAGE_INVALIDATE_VIEW);
serializePosts();
continue OUTER;
}
}
}
else
{
userPostPending = new ReadPostsTask.CommentUserPostPending(newPostData.comment);
}
extra.userPostPendings.add(userPostPending);
hasNewPostDatas = true;
}
return hasNewPostDatas;
}
return false;
}
@Override
public int onDrawerNumberEntered(int number)
{
PostsAdapter adapter = getAdapter();
int count = adapter.getCount();
boolean success = false;
if (count > 0 && number > 0)
{
if (number <= count)
{
int position = adapter.findPositionByOrdinalIndex(number - 1);
if (position >= 0)
{
ListScroller.scrollTo(getListView(), position);
success = true;
}
}
if (!success)
{
int position = adapter.findPositionByPostNumber(Integer.toString(number));
if (position >= 0)
{
ListScroller.scrollTo(getListView(), position);
success = true;
}
else ToastUtils.show(getActivity(), R.string.message_post_not_found);
}
}
int result = DrawerForm.RESULT_REMOVE_ERROR_MESSAGE;
if (success) result |= DrawerForm.RESULT_SUCCESS;
return result;
}
@Override
public void onRequestStoreExtra()
{
PostsExtra extra = getExtra();
extra.expandedPosts.clear();
for (PostItem postItem : getAdapter())
{
if (postItem.isExpanded()) extra.expandedPosts.add(postItem.getPostNumber());
}
}
@Override
public void updatePageConfiguration(String postNumber, String threadTitle)
{
mScrollToPostNumber = postNumber;
if (mReadTask == null && mDeserializeTask == null)
{
if (!scrollToSpecifiedPost(false)) refreshPosts(true, false);
}
}
@Override
public void onListPulled(PullableWrapper wrapper, PullableWrapper.Side side)
{
refreshPosts(true, false, true);
}
private boolean scrollToSpecifiedPost(boolean instantly)
{
if (mScrollToPostNumber != null)
{
int position = getAdapter().findPositionByPostNumber(mScrollToPostNumber);
if (position >= 0)
{
if (instantly) getListView().setSelection(position);
else ListScroller.scrollTo(getListView(), position);
mScrollToPostNumber = null;
}
}
return mScrollToPostNumber == null;
}
private void onFirstPostsLoad()
{
if (mScrollToPostNumber == null)
{
PageHolder pageHolder = getPageHolder();
if (pageHolder.position != null) pageHolder.position.apply(getListView());
}
}
private void onAfterPostsLoad()
{
PostsExtra extra = getExtra();
if (!extra.isAddedToHistory)
{
extra.isAddedToHistory = true;
PageHolder pageHolder = getPageHolder();
HistoryDatabase.getInstance().addHistory(pageHolder.chanName, pageHolder.boardName,
pageHolder.threadNumber, pageHolder.threadTitle);
}
if (extra.cachedPosts != null)
{
Pair<String, Uri> originalThreadData = null;
Uri archivedThreadUri = extra.cachedPosts.getArchivedThreadUri();
if (archivedThreadUri != null)
{
String chanName = ChanManager.getInstance().getChanNameByHost(archivedThreadUri.getAuthority());
if (chanName != null) originalThreadData = new Pair<>(chanName, archivedThreadUri);
}
if ((mOriginalThreadData == null) != (originalThreadData == null))
{
mOriginalThreadData = originalThreadData;
updateOptionsMenu(false);
}
}
Iterator<PostItem> iterator = getAdapter().iterator();
if (iterator.hasNext()) applyTitle(iterator.next().getSubjectOrComment(), false);
}
private void applyTitle(String title, boolean activityOnly)
{
PageHolder pageHolder = getPageHolder();
if (!StringUtils.isEmptyOrWhitespace(title))
{
if (!activityOnly)
{
pageHolder.threadTitle = title;
FavoritesStorage.getInstance().modifyTitle(pageHolder.chanName, pageHolder.boardName,
pageHolder.threadNumber, title, false);
invalidateDrawerItems(true, true);
HistoryDatabase.getInstance().refreshTitles(pageHolder.chanName, pageHolder.boardName,
pageHolder.threadNumber, title);
}
getActivity().setTitle(title);
}
else
{
getActivity().setTitle(StringUtils.formatThreadTitle(pageHolder.chanName, pageHolder.boardName,
pageHolder.threadNumber));
}
}
private static final Handler HANDLER = new Handler();
private final Runnable mRefreshRunnable = () ->
{
if (mDeserializeTask == null && mReadTask == null) refreshPosts(true, false);
queueNextRefresh(false);
};
private void queueNextRefresh(boolean instant)
{
HANDLER.removeCallbacks(mRefreshRunnable);
int mode = Preferences.getAutoRefreshMode();
boolean enabled = mode == Preferences.AUTO_REFRESH_MODE_SEPARATE && mAutoRefreshEnabled ||
mode == Preferences.AUTO_REFRESH_MODE_ENABLED;
if (enabled)
{
int interval = mode == Preferences.AUTO_REFRESH_MODE_SEPARATE ? mAutoRefreshInterval
: Preferences.getAutoRefreshInterval();
if (instant) HANDLER.post(mRefreshRunnable);
else HANDLER.postDelayed(mRefreshRunnable, interval * 1000);
}
}
private void stopRefresh()
{
HANDLER.removeCallbacks(mRefreshRunnable);
}
private void refreshPosts(boolean checkModified, boolean reload)
{
refreshPosts(checkModified, reload, !getAdapter().isEmpty());
}
private void refreshPosts(boolean checkModified, boolean reload, boolean showPull)
{
PostsExtra extra = getExtra();
if (mDeserializeTask != null)
{
if (!reload) extra.forceRefresh = true;
return;
}
if (mReadTask != null) mReadTask.cancel();
PageHolder pageHolder = getPageHolder();
PostsAdapter adapter = getAdapter();
boolean partialLoading = !adapter.isEmpty();
boolean useValidator = checkModified && partialLoading && !reload;
mReadTask = new ReadPostsTask(this, pageHolder.chanName, pageHolder.boardName, pageHolder.threadNumber,
extra.cachedPosts, useValidator, reload, adapter.getLastPostNumber(), extra.userPostPendings);
mReadTask.executeOnExecutor(ReadPostsTask.THREAD_POOL_EXECUTOR);
if (showPull)
{
getListView().getWrapper().startBusyState(PullableWrapper.Side.BOTTOM);
switchView(ViewType.LIST, null);
}
else
{
getListView().getWrapper().startBusyState(PullableWrapper.Side.BOTH);
switchView(ViewType.PROGRESS, null);
}
}
@Override
public void onRequestPreloadPosts(PostItem[] postItems)
{
PostsAdapter adapter = getAdapter();
int count = adapter.getCount();
int threshold = ListScroller.getJumpThreshold(getActivity());
int handleNewCount = Math.min(threshold / 4, postItems.length);
int handleOldCount = Math.min(threshold, count);
for (int i = 0; i < handleNewCount; i++)
{
PostItem postItem = postItems[i];
postItem.getComment();
postItem.isHidden(mHidePerformer);
}
for (int i = 0; i < handleOldCount; i++)
{
PostItem postItem = adapter.getItem(count - i - 1);
if (postItem != null)
{
postItem.getComment();
postItem.isHidden(mHidePerformer);
}
}
}
@Override
public void onDeserializePostsComplete(boolean success, Posts posts, ArrayList<PostItem> postItems)
{
mDeserializeTask = null;
getListView().getWrapper().cancelBusyState();
switchView(ViewType.LIST, null);
if (success && postItems != null)
{
PostsExtra extra = getExtra();
extra.userPostNumbers.clear();
for (PostItem postItem : postItems)
{
if (postItem.isUserPost()) extra.userPostNumbers.add(postItem.getPostNumber());
}
}
onDeserializePostsCompleteInternal(success, posts, postItems, false);
}
private void onDeserializePostsCompleteInternal(boolean success, Posts posts, ArrayList<PostItem> postItems,
boolean isLoadedExplicitly)
{
PostsAdapter adapter = getAdapter();
PostsExtra extra = getExtra();
extra.cachedPosts = null;
extra.cachedPostItems.clear();
if (success)
{
mHidePerformer.decodeLocalAutohide(posts);
extra.cachedPosts = posts;
extra.cachedPostItems.addAll(postItems);
ArrayList<ReadPostsTask.Patch> patches = new ArrayList<>();
for (int i = 0; i < postItems.size(); i++) patches.add(new ReadPostsTask.Patch(postItems.get(i), i));
adapter.setItems(patches, isLoadedExplicitly);
for (PostItem postItem : adapter)
{
if (extra.expandedPosts.contains(postItem.getPostNumber())) postItem.setExpanded(true);
}
Pair<Boolean, Integer> autoRefreshData = posts.getAutoRefreshData();
mAutoRefreshEnabled = autoRefreshData.first;
mAutoRefreshInterval = Math.min(Math.max(autoRefreshData.second, Preferences.MIN_AUTO_REFRESH_INTERVAL),
Preferences.MAX_AUTO_REFRESH_INTERVAL);
onFirstPostsLoad();
onAfterPostsLoad();
showScaleAnimation();
scrollToSpecifiedPost(true);
if (extra.forceRefresh)
{
extra.forceRefresh = false;
refreshPosts(true, false);
}
queueNextRefresh(false);
}
else refreshPosts(false, false);
updateOptionsMenu(false);
}
@Override
public void onReadPostsSuccess(ReadPostsTask.Result result, boolean fullThread,
ArrayList<ReadPostsTask.UserPostPending> removedUserPostPendings)
{
mReadTask = null;
getListView().getWrapper().cancelBusyState();
switchView(ViewType.LIST, null);
PostsAdapter adapter = getAdapter();
PageHolder pageHolder = getPageHolder();
if (adapter.isEmpty()) StatisticsManager.getInstance().incrementViews(pageHolder.chanName);
PostsExtra extra = getExtra();
boolean wasEmpty = adapter.isEmpty();
final int newPostPosition = adapter.getCount();
if (removedUserPostPendings != null)
{
for (ReadPostsTask.UserPostPending userPostPending : removedUserPostPendings)
{
extra.userPostPendings.remove(userPostPending);
}
}
if (fullThread)
{
// Thread was opened for the first time
extra.cachedPosts = result.posts;
extra.cachedPostItems.clear();
for (ReadPostsTask.Patch patch : result.patches) extra.cachedPostItems.add(patch.postItem);
adapter.setItems(result.patches, false);
boolean allowCache = CacheManager.getInstance().allowPagesCache(pageHolder.chanName);
if (allowCache)
{
for (PostItem postItem : extra.cachedPostItems) postItem.setUnread(true);
}
onFirstPostsLoad();
}
else
{
if (extra.cachedPosts != null)
{
// Copy data from old model to new model
Pair<Boolean, Integer> autoRefreshData = extra.cachedPosts.getAutoRefreshData();
result.posts.setAutoRefreshData(autoRefreshData.first, autoRefreshData.second);
result.posts.setLocalAutohide(extra.cachedPosts.getLocalAutohide());
}
extra.cachedPosts = result.posts;
int repliesCount = 0;
if (!result.patches.isEmpty())
{
// Copy data from old model to new model
for (ReadPostsTask.Patch patch : result.patches)
{
if (patch.oldPost != null)
{
if (patch.oldPost.isUserPost()) patch.newPost.setUserPost(true);
if (patch.oldPost.isHidden()) patch.newPost.setHidden(true);
if (patch.oldPost.isShown()) patch.newPost.setHidden(false);
}
}
for (ReadPostsTask.Patch patch : result.patches)
{
if (patch.newPost.isUserPost()) extra.userPostNumbers.add(patch.newPost.getPostNumber());
if (patch.newPostAddedToEnd)
{
HashSet<String> referencesTo = patch.postItem.getReferencesTo();
if (referencesTo != null)
{
for (String postNumber : referencesTo)
{
if (extra.userPostNumbers.contains(postNumber))
{
repliesCount++;
break;
}
}
}
}
}
adapter.mergeItems(result.patches);
extra.cachedPostItems.clear();
for (PostItem postItem : adapter) extra.cachedPostItems.add(postItem);
// Mark changed posts as unread
for (ReadPostsTask.Patch patch : result.patches) patch.postItem.setUnread(true);
}
if (result.newCount > 0 || repliesCount > 0 || result.deletedCount > 0 || result.hasEdited)
{
StringBuilder message = new StringBuilder();
if (repliesCount > 0 || result.deletedCount > 0)
{
message.append(getQuantityString(R.plurals.text_new_posts_count_short_format,
result.newCount, result.newCount));
if (repliesCount > 0)
{
message.append(", ").append(getQuantityString(R.plurals.text_replies_count_format,
repliesCount, repliesCount));
}
if (result.deletedCount > 0)
{
message.append(", ").append(getQuantityString(R.plurals.text_deleted_count_format,
result.deletedCount, result.deletedCount));
}
}
else if (result.newCount > 0)
{
message.append(getQuantityString(R.plurals.text_new_posts_count_format,
result.newCount, result.newCount));
}
else
{
message.append(getString(R.string.message_edited_posts));
}
if (result.newCount > 0)
{
ClickableToast.show(getActivity(), message, getString(R.string.action_show), () ->
{
if (!isDestroyed()) ListScroller.scrollTo(getListView(), newPostPosition);
}, true);
}
else ClickableToast.show(getActivity(), message);
}
}
boolean updateAdapters = result.newCount > 0 || result.deletedCount > 0 || result.hasEdited;
serializePosts();
if (result.hasEdited)
{
mLastEditedPostNumbers.clear();
for (ReadPostsTask.Patch patch : result.patches)
{
if (!patch.newPostAddedToEnd) mLastEditedPostNumbers.add(patch.newPost.getPostNumber());
}
}
if (FavoritesStorage.getInstance().hasFavorite(pageHolder.chanName, pageHolder.boardName,
pageHolder.threadNumber))
{
FavoritesStorage.getInstance().modifyPostsCount(pageHolder.chanName, pageHolder.boardName,
pageHolder.threadNumber, adapter.getExistingPostsCount());
// Invalidate for ThreadsWatcher
invalidateDrawerItems(false, true);
}
if (updateAdapters)
{
getUiManager().dialog().updateAdapters();
notifyAllAdaptersChanged();
}
onAfterPostsLoad();
if (wasEmpty && !adapter.isEmpty()) showScaleAnimation();
scrollToSpecifiedPost(wasEmpty);
mScrollToPostNumber = null;
updateOptionsMenu(false);
}
@Override
public void onReadPostsEmpty()
{
mReadTask = null;
getListView().getWrapper().cancelBusyState();
switchView(ViewType.LIST, null);
if (getAdapter().isEmpty()) displayDownloadError(true, getString(R.string.message_empty_response));
}
@Override
public void onReadPostsRedirect(String boardName, String threadNumber, String postNumber)
{
mReadTask = null;
getListView().getWrapper().cancelBusyState();
PageHolder pageHolder = getPageHolder();
removeCurrentPage();
getUiManager().navigator().navigatePosts(pageHolder.chanName, boardName, threadNumber, postNumber, null, false);
}
@Override
public void onReadPostsFail(ErrorItem errorItem)
{
mReadTask = null;
getListView().getWrapper().cancelBusyState();
displayDownloadError(true, errorItem.toString());
mScrollToPostNumber = null;
}
private void displayDownloadError(boolean show, String message)
{
if (show && getAdapter().getCount() > 0)
{
ClickableToast.show(getActivity(), message);
return;
}
switchView(ViewType.ERROR, message);
}
private void hidePostAndReplies(PostItem postItem, ArrayList<PostItem> postItemsToInvalidate)
{
if (!postItem.getPost().isHidden())
{
postItem.setHidden(true);
postItemsToInvalidate.add(postItem);
}
LinkedHashSet<String> referencesFrom = postItem.getReferencesFrom();
if (referencesFrom != null)
{
PostsAdapter adapter = getAdapter();
for (String postNumber : referencesFrom)
{
PostItem foundPostItem = adapter.findPostItem(postNumber);
if (foundPostItem != null) hidePostAndReplies(foundPostItem, postItemsToInvalidate);
}
}
}
@Override
public void onPostItemMessage(PostItem postItem, int message)
{
int position = getUiManager().view().findViewIndex(getListView(), postItem);
switch (message)
{
case UiManager.MESSAGE_INVALIDATE_VIEW:
{
getAdapter().notifyDataSetChanged();
break;
}
case UiManager.MESSAGE_INVALIDATE_COMMENT_VIEW:
{
getUiManager().view().invalidateComment(getListView(), position);
break;
}
case UiManager.MESSAGE_PERFORM_SERIALIZE:
{
if (position != ListView.INVALID_POSITION) serializePosts();
break;
}
case UiManager.MESSAGE_PERFORM_USER_MARK_UPDATE:
{
PostsExtra extra = getExtra();
if (postItem.isUserPost()) extra.userPostNumbers.add(postItem.getPostNumber());
else extra.userPostNumbers.remove(postItem.getPostNumber());
break;
}
case UiManager.MESSAGE_PERFORM_CASCADE_HIDE:
{
if (position != ListView.INVALID_POSITION)
{
ArrayList<PostItem> postItemsToInvalidate = new ArrayList<>();
hidePostAndReplies(postItem, postItemsToInvalidate);
UiManager uiManager = getUiManager();
for (PostItem invalidatePostItem : postItemsToInvalidate)
{
uiManager.sendPostItemMessage(invalidatePostItem, UiManager.MESSAGE_INVALIDATE_VIEW);
}
}
break;
}
case UiManager.MESSAGE_PERFORM_HIDE_NAME:
case UiManager.MESSAGE_PERFORM_HIDE_SIMILAR:
{
PostsAdapter adapter = getAdapter();
adapter.cancelPreloading();
boolean success;
if (message == UiManager.MESSAGE_PERFORM_HIDE_NAME) success = mHidePerformer.addHideByName(postItem);
else success = mHidePerformer.addHideSimilar(postItem);
if (success)
{
postItem.resetHidden();
adapter.invalidateHidden();
notifyAllAdaptersChanged();
mHidePerformer.encodeLocalAutohide(getExtra().cachedPosts);
serializePosts();
}
adapter.preloadPosts(getListView().getFirstVisiblePosition());
break;
}
case UiManager.MESSAGE_PERFORM_LOAD_THUMBNAIL:
{
getUiManager().view().displayThumbnail(getListView(), position, postItem.getAttachmentItems(), true);
break;
}
}
}
private void serializePosts()
{
PageHolder pageHolder = getPageHolder();
CacheManager.getInstance().serializePosts(pageHolder.chanName, pageHolder.boardName,
pageHolder.threadNumber, getExtra().cachedPosts);
}
public static class PostsExtra implements PageHolder.ParcelableExtra
{
public Posts cachedPosts;
public final ArrayList<PostItem> cachedPostItems = new ArrayList<>();
public final HashSet<String> userPostNumbers = new HashSet<>();
public final ArrayList<ReadPostsTask.UserPostPending> userPostPendings = new ArrayList<>();
public final HashSet<String> expandedPosts = new HashSet<>();
public boolean isAddedToHistory = false;
public boolean forceRefresh = false;
@Override
public void writeToParcel(Parcel dest)
{
dest.writeList(userPostPendings);
dest.writeStringArray(CommonUtils.toArray(expandedPosts, String.class));
dest.writeInt(isAddedToHistory ? 1 : 0);
dest.writeInt(forceRefresh ? 1 : 0);
}
@Override
public void readFromParcel(Parcel source)
{
@SuppressWarnings("unchecked")
ArrayList<ReadPostsTask.UserPostPending> userPostPendings = source
.readArrayList(PostsExtra.class.getClassLoader());
if (userPostPendings.size() > 0) this.userPostPendings.addAll(userPostPendings);
String[] data = source.createStringArray();
if (data != null) Collections.addAll(expandedPosts, data);
isAddedToHistory = source.readInt() != 0;
forceRefresh = source.readInt() != 0;
}
}
private PostsExtra getExtra()
{
PageHolder pageHolder = getPageHolder();
if (!(pageHolder.extra instanceof PostsExtra)) pageHolder.extra = new PostsExtra();
return (PostsExtra) pageHolder.extra;
}
} | Bug fix: update user post numbers set when thread loaded first time
| src/com/mishiranu/dashchan/ui/navigator/page/PostsPage.java | Bug fix: update user post numbers set when thread loaded first time | <ide><path>rc/com/mishiranu/dashchan/ui/navigator/page/PostsPage.java
<ide> // Thread was opened for the first time
<ide> extra.cachedPosts = result.posts;
<ide> extra.cachedPostItems.clear();
<del> for (ReadPostsTask.Patch patch : result.patches) extra.cachedPostItems.add(patch.postItem);
<add> extra.userPostNumbers.clear();
<add> for (ReadPostsTask.Patch patch : result.patches)
<add> {
<add> extra.cachedPostItems.add(patch.postItem);
<add> if (patch.newPost.isUserPost()) extra.userPostNumbers.add(patch.newPost.getPostNumber());
<add> }
<ide> adapter.setItems(result.patches, false);
<ide> boolean allowCache = CacheManager.getInstance().allowPagesCache(pageHolder.chanName);
<ide> if (allowCache) |
|
JavaScript | mit | 59a794a4101206756c54f9a367c074f6a3555564 | 0 | SAKryukov/vscode-markdown-to-html | "use strict";
module.exports = (md, options) => {
const utility = require("./utility");
const autoNumberingParser = require("./autoNumbering.optionParser");
const autoNumbering = require("./autoNumbering");
const defaultOptions = {
autoNumbering: {
enable: false,
pattern: [],
defaultSuffix: ". ",
defaultPrefix: "",
defaultStart: 1,
defaultSeparator: "."
},
}; //defaultOptions
let renderedHtml, usedIds, headingSet, tocLocations;
const cleanUp = () => {
renderedHtml = null;
usedIds = { headings: {}, toc: {}, excludeFromToc: {} };
headingSet = {};
tocLocations = [];
};
const tocIncludeLevelSet = new Set(options.thisExtensionSettings.TOC.includeLevels);
const tocRegex = new RegExp(options.thisExtensionSettings.TOC.regex);
const excludeFromTocRegex = new RegExp(options.thisExtensionSettings.TOC.excludeHeaderRegex);
const enumerationRuleSetRegexp = new RegExp(options.thisExtensionSettings.TOC.autoNumberingRegex);
md.core.ruler.before("block", "detectAutoNumbering", state => {
options.autoNumbering = defaultOptions.autoNumbering;
const match = enumerationRuleSetRegexp.exec(state.src);
if (!match) return;
try {
const privilegedOptions = autoNumberingParser(match[1]);
if (privilegedOptions) {
utility.populateWithDefault(privilegedOptions, defaultOptions.autoNumbering);
options.autoNumbering = privilegedOptions;
} // if
} finally {
if (match)
state.src = state.src.slice(match[0].length, state.src.length);
} //exception
}); //md.core.ruler.before
/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
const autoNumberGenerator = {
init: function() {
this.broken = false;
this.stack = [];
this.current = { level: undefined, indexIterator: undefined, parentPrefix: "", prefix: undefined };
this.levelOptionDictionary = {};
},
newCurrent: function(level) {
const effectiveLevelOptions = this.getEffectiveLevelOptions(level);
return { level: level, indexIterator: new autoNumbering.Iterator(effectiveLevelOptions.start), parentPrefix: this.current.prefix, prefix: undefined, standAlong: false };
},
brokenContent: function(content) { return `${options.thisExtensionSettings.TOC.autoNumbering.brokenHierarchy}${content}`; },
getEffectiveLevelOptions: function(level) {
if (level in this.levelOptionDictionary)
return this.levelOptionDictionary[level];
const effectiveOptions = autoNumbering.getEffectiveLevelOptions(options, level);
this.levelOptionDictionary[level] = effectiveOptions;
return effectiveOptions;
},
formPrefix: function(effectiveLevelOptions) {
this.current.prefix = this.current.indexIterator.toString();
this.current.indexIterator.next();
if (this.current.parentPrefix && (!effectiveLevelOptions.standAlong))
this.current.prefix = `${this.current.parentPrefix}${effectiveLevelOptions.separator}${this.current.prefix}`;
},
numberedContent: function(content, effectiveLevelOptions) {
return `${effectiveLevelOptions.prefix}${this.current.prefix}${effectiveLevelOptions.suffix}${content}`;
},
generate: function (tocLevel, content) {
if (!options.autoNumbering.enable) return content;
if (!tocIncludeLevelSet.has(tocLevel + 1)) return content;
if (this.broken) return this.brokenContent(content);
const effectiveLevelOptions = this.getEffectiveLevelOptions(tocLevel);
if (this.current.level == undefined) {
this.current.level = tocLevel;
this.current.indexIterator = new autoNumbering.Iterator(effectiveLevelOptions.start);
} else if (tocLevel == this.current.level) {
++this.current.index;
} else if (tocLevel == this.current.level + 1) {
this.stack.push(this.current);
this.current = this.newCurrent(tocLevel);
} else if (tocLevel < this.current.level) {
const popCount = this.current.level - tocLevel;
if (popCount > this.stack.length) {
this.broken = true;
return this.brokenContent(content);
} //if
let last = undefined;
for (let index = 0; index < popCount; ++index)
last = this.stack.pop();
this.current = last;
++this.current.index;
} else {
this.broken == true;
return this.brokenContent(content);
} //if
this.formPrefix(effectiveLevelOptions);
return this.numberedContent(content, effectiveLevelOptions);
},
}; //autoNumberGenerator
/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
const buildToc = () => {
if (renderedHtml) return renderedHtml;
(() => {
let zeroIndent = Number.MAX_VALUE;
for (let index in headingSet) {
const level = headingSet[index].level;
if (level < zeroIndent) zeroIndent = level;
} //loop zeroIndent
for (let index in headingSet) {
headingSet[index].tocLevel = headingSet[index].level;
headingSet[index].level -= zeroIndent;
}
})();
renderedHtml = `\n`;
for (let index in headingSet) {
let element = headingSet[index];
if (!tocIncludeLevelSet.has(element.tocLevel + 1)) continue;
renderedHtml += `<a style="margin-left: ${element.level * options.thisExtensionSettings.TOC.itemIndentInEm}em;" href="#${element.id}">${element.content}</a><br/>\n`;
} //loop
return renderedHtml;
}; //buildToc
md.core.ruler.after("block", "buildToc", state => {
cleanUp();
autoNumberGenerator.init();
for (let index = 0; index < state.tokens.length; ++index) {
const token = state.tokens[index];
const isParagraph = token.type == "paragraph_open";
const isHeading = token.type == "heading_open";
if (!isParagraph && !isHeading) continue;
const contentToken = state.tokens[index + 1];
if (isParagraph)
if (tocRegex.exec(contentToken.content)) {
tocLocations.push(index);
utility.cleanInline(contentToken, tocRegex);
}
if (!isHeading) continue;
if (excludeFromTocRegex.exec(contentToken.content)) {
utility.cleanInline(contentToken, excludeFromTocRegex);
continue;
}
const id = utility.slugify(contentToken.content, usedIds, options.thisExtensionSettings.headingIdPrefix);
const level = utility.htmlHeadingLevel(token.tag);
const content = autoNumberGenerator.generate(level, contentToken.content);
const prefix = content.slice(0, content.length - contentToken.content.length);
headingSet[index] = { index: index, id: id, content: content, prefix: prefix, level: level, tag: token.tag };
} // loop state.tokens
}); //md.core.ruler.after
const previousRenderHeadingOpen = md.renderer.rules.heading_open;
md.renderer.rules.heading_open = (tokens, index, ruleOptions, object, renderer) => {
const heading = headingSet[index];
if (!heading)
return utility.renderDefault(tokens, index, ruleOptions, object, renderer, previousRenderHeadingOpen, `<${tokens[index].tag}>`);
return `<${headingSet[index].tag} id="${headingSet[index].id}">${headingSet[index].prefix} `;
}; //md.renderer.rules.heading_open
// to remove data-... from links
const previousLinkOpen = md.renderer.rules.link_open;
md.renderer.rules.link_open = (tokens, index, ruleOptions, object, renderer) => {
let attributes = [];
if (tokens[index].type == "link_open") {
for (let attribute of tokens[index].attrs) {
if (!attribute[0].startsWith("data-"))
attributes.push(`${attribute[0]}="${attribute[1]}"`);
} //loop
} //if
let open = "<a>";
if (attributes.length > 0)
open = `<a ${attributes.join(' ')}>`;
return utility.renderDefault(tokens, index, ruleOptions, object, renderer, previousLinkOpen, open);
}; //md.renderer.rules.heading_open
const previousRenderParagraphOpen = md.renderer.rules.paragraph_open;
md.renderer.rules.paragraph_open = (tokens, index, ruleOptions, object, renderer) => {
for (let tocLocation of tocLocations)
if (index == tocLocation)
return `<p class="${options.thisExtensionSettings.TOC.containerClass}">${buildToc()}`;
return utility.renderDefault(tokens, index, ruleOptions, object, renderer, previousRenderParagraphOpen, `<p>`);
}; //md.renderer.rules.paragraph_open
}; //module.exports
| extension/id.toc.js | "use strict";
module.exports = (md, options) => {
const utility = require("./utility");
const autoNumberingParser = require("./autoNumbering.optionParser");
const autoNumbering = require("./autoNumbering");
const defaultOptions = {
autoNumbering: {
enable: false,
pattern: [],
defaultSuffix: ". ",
defaultPrefix: "",
defaultStart: 1,
defaultSeparator: "."
},
}; //defaultOptions
let renderedHtml, usedIds, headingSet, tocLocations;
const cleanUp = () => {
renderedHtml = null;
usedIds = { headings: {}, toc: {}, excludeFromToc: {} };
headingSet = {};
tocLocations = [];
};
const tocIncludeLevelSet = new Set(options.thisExtensionSettings.TOC.includeLevels);
const tocRegex = new RegExp(options.thisExtensionSettings.TOC.regex);
const excludeFromTocRegex = new RegExp(options.thisExtensionSettings.TOC.excludeHeaderRegex);
const enumerationRuleSetRegexp = new RegExp(options.thisExtensionSettings.TOC.autoNumberingRegex);
md.core.ruler.before("block", "detectAutoNumbering", state => {
options.autoNumbering = defaultOptions.autoNumbering;
const match = enumerationRuleSetRegexp.exec(state.src);
if (!match) return;
try {
const privilegedOptions = autoNumberingParser(match[1]);
if (privilegedOptions) {
utility.populateWithDefault(privilegedOptions, defaultOptions.autoNumbering);
options.autoNumbering = privilegedOptions;
} // if
} finally {
if (match)
state.src = state.src.slice(match[0].length, state.src.length);
} //exception
}); //md.core.ruler.before
/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
const autoNumberGenerator = {
init: function() {
this.broken = false;
this.stack = [];
this.current = { level: undefined, indexIterator: undefined, parentPrefix: "", prefix: undefined };
this.levelOptionDictionary = {};
},
newCurrent: function(level) {
const effectiveLevelOptions = this.getEffectiveLevelOptions(level);
return { level: level, indexIterator: new autoNumbering.Iterator(effectiveLevelOptions.start), parentPrefix: this.current.prefix, prefix: undefined, standAlong: false };
},
brokenContent: function(content) { return `${options.thisExtensionSettings.TOC.autoNumbering.brokenHierarchy}${content}`; },
getEffectiveLevelOptions: function(level) {
if (level in this.levelOptionDictionary)
return this.levelOptionDictionary[level];
const effectiveOptions = autoNumbering.getEffectiveLevelOptions(options, level);
this.levelOptionDictionary[level] = effectiveOptions;
return effectiveOptions;
},
formPrefix: function(effectiveLevelOptions) {
this.current.prefix = this.current.indexIterator.toString();
this.current.indexIterator.next();
if (this.current.parentPrefix && (!effectiveLevelOptions.standAlong))
this.current.prefix = `${this.current.parentPrefix}${effectiveLevelOptions.separator}${this.current.prefix}`;
},
numberedContent: function(content, effectiveLevelOptions) {
return `${effectiveLevelOptions.prefix}${this.current.prefix}${effectiveLevelOptions.suffix}${content}`;
},
generate: function (tocLevel, content) {
if (!options.autoNumbering.enable) return content;
if (!tocIncludeLevelSet.has(tocLevel + 1)) return content;
if (this.broken) return this.brokenContent(content);
const effectiveLevelOptions = this.getEffectiveLevelOptions(tocLevel);
if (this.current.level == undefined) {
this.current.level = tocLevel;
this.current.indexIterator = new autoNumbering.Iterator(effectiveLevelOptions.start);
} else if (tocLevel == this.current.level) {
++this.current.index;
} else if (tocLevel == this.current.level + 1) {
this.stack.push(this.current);
this.current = this.newCurrent(tocLevel);
} else if (tocLevel < this.current.level) {
const popCount = this.current.level - tocLevel;
if (popCount > this.stack.length) {
this.broken = true;
return this.brokenContent(content);
} //if
let last = undefined;
for (let index = 0; index < popCount; ++index)
last = this.stack.pop();
this.current = last;
++this.current.index;
} else {
this.broken == true;
return this.brokenContent(content);
} //if
this.formPrefix(effectiveLevelOptions);
return this.numberedContent(content, effectiveLevelOptions);
},
}; //autoNumberGenerator
/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
const buildToc = () => {
if (renderedHtml) return renderedHtml;
(() => {
let zeroIndent = Number.MAX_VALUE;
for (let index in headingSet) {
const level = headingSet[index].level;
if (level < zeroIndent) zeroIndent = level;
} //loop zeroIndent
for (let index in headingSet) {
headingSet[index].tocLevel = headingSet[index].level;
headingSet[index].level -= zeroIndent;
}
})();
renderedHtml = `\n`;
for (let index in headingSet) {
let element = headingSet[index];
if (!tocIncludeLevelSet.has(element.tocLevel + 1)) continue;
renderedHtml += `<a style="margin-left: ${element.level * options.thisExtensionSettings.TOC.itemIndentInEm}em;" href="#${element.id}">${element.content}</a><br/>\n`;
} //loop
return renderedHtml;
}; //buildToc
md.core.ruler.after("block", "buildToc", state => {
cleanUp();
autoNumberGenerator.init();
for (let index = 0; index < state.tokens.length; ++index) {
const token = state.tokens[index];
const isParagraph = token.type == "paragraph_open";
const isHeading = token.type == "heading_open";
if (!isParagraph && !isHeading) continue;
const contentToken = state.tokens[index + 1];
if (isParagraph)
if (tocRegex.exec(contentToken.content)) {
tocLocations.push(index);
utility.cleanInline(contentToken, tocRegex);
}
if (!isHeading) continue;
if (excludeFromTocRegex.exec(contentToken.content)) {
utility.cleanInline(contentToken, excludeFromTocRegex);
continue;
}
const id = utility.slugify(contentToken.content, usedIds, options.thisExtensionSettings.headingIdPrefix);
const level = utility.htmlHeadingLevel(token.tag);
const content = autoNumberGenerator.generate(level, contentToken.content);
headingSet[index] = { index: index, id: id, content: content, level: level, tag: token.tag };
} // loop state.tokens
}); //md.core.ruler.after
const previousRenderHeadingOpen = md.renderer.rules.heading_open;
md.renderer.rules.heading_open = (tokens, index, ruleOptions, object, renderer) => {
const heading = headingSet[index];
if (!heading)
return utility.renderDefault(tokens, index, ruleOptions, object, renderer, previousRenderHeadingOpen, `<${tokens[index].tag}>`);
return `<${headingSet[index].tag} id="${headingSet[index].id}">`;
}; //md.renderer.rules.heading_open
// to remove data-... from links
const previousLinkOpen = md.renderer.rules.link_open;
md.renderer.rules.link_open = (tokens, index, ruleOptions, object, renderer) => {
let attributes = [];
if (tokens[index].type == "link_open") {
for (let attribute of tokens[index].attrs) {
if (!attribute[0].startsWith("data-"))
attributes.push(`${attribute[0]}="${attribute[1]}"`);
} //loop
} //if
let open = "<a>";
if (attributes.length > 0)
open = `<a ${attributes.join(' ')}>`;
return utility.renderDefault(tokens, index, ruleOptions, object, renderer, previousLinkOpen, open);
}; //md.renderer.rules.heading_open
const previousRenderParagraphOpen = md.renderer.rules.paragraph_open;
md.renderer.rules.paragraph_open = (tokens, index, ruleOptions, object, renderer) => {
for (let tocLocation of tocLocations)
if (index == tocLocation)
return `<p class="${options.thisExtensionSettings.TOC.containerClass}">${buildToc()}`;
return utility.renderDefault(tokens, index, ruleOptions, object, renderer, previousRenderParagraphOpen, `<p>`);
}; //md.renderer.rules.paragraph_open
}; //module.exports
| Auto-numbering: added numbering to headings
| extension/id.toc.js | Auto-numbering: added numbering to headings | <ide><path>xtension/id.toc.js
<ide> const id = utility.slugify(contentToken.content, usedIds, options.thisExtensionSettings.headingIdPrefix);
<ide> const level = utility.htmlHeadingLevel(token.tag);
<ide> const content = autoNumberGenerator.generate(level, contentToken.content);
<del> headingSet[index] = { index: index, id: id, content: content, level: level, tag: token.tag };
<add> const prefix = content.slice(0, content.length - contentToken.content.length);
<add> headingSet[index] = { index: index, id: id, content: content, prefix: prefix, level: level, tag: token.tag };
<ide> } // loop state.tokens
<ide> }); //md.core.ruler.after
<ide>
<ide> const heading = headingSet[index];
<ide> if (!heading)
<ide> return utility.renderDefault(tokens, index, ruleOptions, object, renderer, previousRenderHeadingOpen, `<${tokens[index].tag}>`);
<del> return `<${headingSet[index].tag} id="${headingSet[index].id}">`;
<add> return `<${headingSet[index].tag} id="${headingSet[index].id}">${headingSet[index].prefix} `;
<ide> }; //md.renderer.rules.heading_open
<ide>
<ide> // to remove data-... from links |
|
Java | agpl-3.0 | d2481fd77f39ad9af526026806d7e15013a214ad | 0 | duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test | a22d4878-2e5f-11e5-9284-b827eb9e62be | hello.java | a227dea6-2e5f-11e5-9284-b827eb9e62be | a22d4878-2e5f-11e5-9284-b827eb9e62be | hello.java | a22d4878-2e5f-11e5-9284-b827eb9e62be | <ide><path>ello.java
<del>a227dea6-2e5f-11e5-9284-b827eb9e62be
<add>a22d4878-2e5f-11e5-9284-b827eb9e62be |
|
Java | apache-2.0 | 958091ee8ab089d273674f7e0c221dab336febf9 | 0 | napp-abledea/notbed-util | /**
*
*/
package com.notbed.util;
import java.io.Closeable;
import java.sql.Connection;
import org.apache.commons.logging.LogFactory;
import com.notbed.util.mass.VoidEvaluator;
/**
* @author Alexandru Bledea
* @since Sep 22, 2013
*/
public class UClose {
public static VoidEvaluator<Closeable> CLOSEABLE = new CloseEvaluator<Closeable>() {
/* (non-Javadoc)
* @see com.notbed.util.UClose.CloseEvaluator#close(java.lang.Object)
*/
@Override
protected void close(Closeable object) throws Exception {
object.close();
}
};
public static VoidEvaluator<Connection> CONNECTION = new CloseEvaluator<Connection>() {
/* (non-Javadoc)
* @see com.notbed.util.UClose.CloseEvaluator#close(java.lang.Object)
*/
@Override
protected void close(Connection object) throws Exception {
if (!object.isClosed()) {
object.close();
}
}
};
/**
* @author Alexandru Bledea
* @since Sep 22, 2013
* @param <O>
*/
private static abstract class CloseEvaluator<O> extends VoidEvaluator<O> {
/* (non-Javadoc)
* @see com.notbed.util.VoidEvaluator#evaluateNoResult(java.lang.Object)
*/
@Override
public void evaluateNoResult(O obj) {
if (obj != null) {
try {
close(obj);
} catch (Throwable t) {
LogFactory.getLog(getClass()).error("Failed to close", t);
}
}
};
/**
* @param object
*/
protected abstract void close(O object) throws Exception;
}
}
| com.notbed.util/src/com/notbed/util/UClose.java | /**
*
*/
package com.notbed.util;
import java.io.Closeable;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.Statement;
import org.apache.commons.logging.LogFactory;
import com.notbed.util.mass.VoidEvaluator;
/**
* @author Alexandru Bledea
* @since Sep 22, 2013
*/
public class UClose {
public static VoidEvaluator<Closeable> CLOSEABLE = new CloseEvaluator<Closeable>() {
/* (non-Javadoc)
* @see com.notbed.util.UClose.CloseEvaluator#close(java.lang.Object)
*/
@Override
protected void close(Closeable object) throws Exception {
object.close();
}
};
public static VoidEvaluator<ResultSet> RESULT_SET = new CloseEvaluator<ResultSet>() {
/* (non-Javadoc)
* @see com.notbed.util.UClose.CloseEvaluator#close(java.lang.Object)
*/
@Override
protected void close(ResultSet object) throws Exception {
if (!object.isClosed()) {
object.close();
}
}
};
public static VoidEvaluator<Statement> STATEMENT = new CloseEvaluator<Statement>() {
/* (non-Javadoc)
* @see com.notbed.util.UClose.CloseEvaluator#close(java.lang.Object)
*/
@Override
protected void close(Statement object) throws Exception {
if (!object.isClosed()) {
object.close();
}
}
};
public static VoidEvaluator<Connection> CONNECTION = new CloseEvaluator<Connection>() {
/* (non-Javadoc)
* @see com.notbed.util.UClose.CloseEvaluator#close(java.lang.Object)
*/
@Override
protected void close(Connection object) throws Exception {
if (!object.isClosed()) {
object.close();
}
}
};
/**
* @author Alexandru Bledea
* @since Sep 22, 2013
* @param <O>
*/
private static abstract class CloseEvaluator<O> extends VoidEvaluator<O> {
/* (non-Javadoc)
* @see com.notbed.util.VoidEvaluator#evaluateNoResult(java.lang.Object)
*/
@Override
public void evaluateNoResult(O obj) {
if (obj != null) {
try {
close(obj);
} catch (Throwable t) {
LogFactory.getLog(getClass()).error("Failed to close", t);
}
}
};
/**
* @param object
*/
protected abstract void close(O object) throws Exception;
}
}
| not needed in java 7
| com.notbed.util/src/com/notbed/util/UClose.java | not needed in java 7 | <ide><path>om.notbed.util/src/com/notbed/util/UClose.java
<ide>
<ide> import java.io.Closeable;
<ide> import java.sql.Connection;
<del>import java.sql.ResultSet;
<del>import java.sql.Statement;
<ide>
<ide> import org.apache.commons.logging.LogFactory;
<ide>
<ide> @Override
<ide> protected void close(Closeable object) throws Exception {
<ide> object.close();
<del> }
<del> };
<del>
<del> public static VoidEvaluator<ResultSet> RESULT_SET = new CloseEvaluator<ResultSet>() {
<del>
<del> /* (non-Javadoc)
<del> * @see com.notbed.util.UClose.CloseEvaluator#close(java.lang.Object)
<del> */
<del> @Override
<del> protected void close(ResultSet object) throws Exception {
<del> if (!object.isClosed()) {
<del> object.close();
<del> }
<del> }
<del> };
<del>
<del> public static VoidEvaluator<Statement> STATEMENT = new CloseEvaluator<Statement>() {
<del>
<del> /* (non-Javadoc)
<del> * @see com.notbed.util.UClose.CloseEvaluator#close(java.lang.Object)
<del> */
<del> @Override
<del> protected void close(Statement object) throws Exception {
<del> if (!object.isClosed()) {
<del> object.close();
<del> }
<ide> }
<ide> };
<ide> |
|
Java | mpl-2.0 | c94df2d2630877d135c68fc88e3016e9c7ced341 | 0 | CalebSLane/openelisglobal-core,pfschwartz/openelisglobal-core,openelisglobal/openelisglobal-core,CalebSLane/openelisglobal-core,pfschwartz/openelisglobal-core,phassoa/openelisglobal-core,openelisglobal/openelisglobal-core,phassoa/openelisglobal-core,phassoa/openelisglobal-core,pfschwartz/openelisglobal-core,openelisglobal/openelisglobal-core,CalebSLane/openelisglobal-core,pfschwartz/openelisglobal-core,pfschwartz/openelisglobal-core,openelisglobal/openelisglobal-core,phassoa/openelisglobal-core,CalebSLane/openelisglobal-core,phassoa/openelisglobal-core,CalebSLane/openelisglobal-core,openelisglobal/openelisglobal-core | /*
* The contents of this file are subject to the Mozilla Public License
* Version 1.1 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS"
* basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
* License for the specific language governing rights and limitations under
* the License.
*
* The Original Code is OpenELIS code.
*
* Copyright (C) ITECH, University of Washington, Seattle WA. All Rights Reserved.
*/
package us.mn.state.health.lims.plugin;
import org.apache.commons.io.IOUtils;
import org.dom4j.*;
import us.mn.state.health.lims.common.exception.LIMSException;
import us.mn.state.health.lims.common.log.LogEvent;
import javax.servlet.ServletContext;
import javax.servlet.ServletContextEvent;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.Enumeration;
import java.util.jar.JarEntry;
import java.util.jar.JarFile;
public class PluginLoader {
private static final String PLUGIN_ANALYZER = "plugin" + File.separator;
private static final String VERSION = "version";
private static final String SUPPORTED_VERSION = "1.0";
private static final String PATH = "path";
private static final String ANALYZER_IMPORTER = "analyzerImporter";
private static final String MENU = "menu";
private static final String PERMISSION = "permission";
private static final String EXTENSION_POINT = "extension_point";
private static final String EXTENSION = "extension";
private static final String DESCRIPTION = "description";
private static final String VALUE = "value";
private ServletContext context;
public PluginLoader(ServletContextEvent event) {
context = event.getServletContext();
}
public void load() {
File pluginDir = new File(context.getRealPath(PLUGIN_ANALYZER));
loadDirectory( pluginDir );
}
private void loadDirectory( File pluginDir ){
File[] files = pluginDir.listFiles();
if (files != null) {
for (File file : files) {
if (file.getName().endsWith("jar")) {
loadPlugin(file);
}else if(file.isDirectory()){
System.out.println("Checking plugin subfolder: " + file.getName());
loadDirectory( file );
}
}
}
}
private void loadPlugin(File pluginFile) {
try {
JarFile jar = new JarFile(pluginFile);
final Enumeration<JarEntry> entries = jar.entries();
while (entries.hasMoreElements()) {
final JarEntry entry = entries.nextElement();
if (entry.getName().contains(".xml")) {
boolean valid = loadFromXML(jar, entry);
if (valid) {
break;
}
}
}
} catch (IOException e) {
e.printStackTrace();
}
}
private boolean loadFromXML(JarFile jar, JarEntry entry) {
Attribute description = null;
try {
URL url = new URL("jar:file:///" + jar.getName() + "!/");
InputStream input = jar.getInputStream(entry);
String xml = IOUtils.toString(input, "UTF-8");
//System.out.println(xml);
Document doc = DocumentHelper.parseText(xml);
Element versionElement = doc.getRootElement().element(VERSION);
if( versionElement == null){
LogEvent.logError("PluginLoader", "load", "Missing version number in plugin");
System.out.println("Missing version number in plugin");
return false;
}
if (!SUPPORTED_VERSION.equals(versionElement.getData())) {
LogEvent.logError("PluginLoader", "load", "Unsupported version number. Expected " + SUPPORTED_VERSION + " got " + versionElement.getData());
System.out.println("Unsupported version number. Expected " + SUPPORTED_VERSION + " got " + versionElement.getData());
return false;
}
Element analyzerImporter = doc.getRootElement().element(ANALYZER_IMPORTER);
if (analyzerImporter != null) {
description = analyzerImporter.element(EXTENSION_POINT).element(DESCRIPTION).attribute(VALUE);
Attribute path = analyzerImporter.element(EXTENSION_POINT).element(EXTENSION).attribute(PATH);
loadActualPlugin(url, path.getValue());
System.out.println("Loaded: " + description.getValue());
}
Element menu = doc.getRootElement().element(MENU);
if (menu != null) {
description = menu.element(EXTENSION_POINT).element(DESCRIPTION).attribute(VALUE);
Attribute path = menu.element(EXTENSION_POINT).element(EXTENSION).attribute(PATH);
loadActualPlugin(url, path.getValue());
System.out.println("Loaded: " + description.getValue());
}
Element permissions = doc.getRootElement().element(PERMISSION);
if (permissions != null) {
description = permissions.element(EXTENSION_POINT).element(DESCRIPTION).attribute(VALUE);
Attribute path = permissions.element(EXTENSION_POINT).element(EXTENSION).attribute(PATH);
loadActualPlugin(url, path.getValue());
System.out.println( "Loaded: " + description.getValue());
}
} catch (MalformedURLException e) {
e.printStackTrace();
return false;
} catch (IOException e) {
e.printStackTrace();
return false;
} catch (DocumentException e) {
e.printStackTrace();
return false;
} catch (LIMSException e){
if( description != null) {
LogEvent.logError("PluginLoader", "load", "Failed Loading: " + description.getValue());
System.out.println("Failed Loading: " + description.getValue());
}
return false;
}
return true;
}
@SuppressWarnings("unchecked")
private void loadActualPlugin(URL url, String classPath) throws LIMSException {
try {
URL[] urls = {url};
ClassLoader classLoader = new URLClassLoader(urls, this.getClass().getClassLoader());
Class<APlugin> aClass = (Class<APlugin>) classLoader.loadClass(classPath);
APlugin instance = aClass.newInstance();
instance.connect();
} catch (ClassNotFoundException e) {
e.printStackTrace();
throw new LIMSException("See previous stack trace");
} catch (InstantiationException e) {
e.printStackTrace();
throw new LIMSException("See previous stack trace");
} catch (IllegalAccessException e) {
e.printStackTrace();
throw new LIMSException("See previous stack trace");
}
}
}
| app/src/us/mn/state/health/lims/plugin/PluginLoader.java | /*
* The contents of this file are subject to the Mozilla Public License
* Version 1.1 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS"
* basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
* License for the specific language governing rights and limitations under
* the License.
*
* The Original Code is OpenELIS code.
*
* Copyright (C) ITECH, University of Washington, Seattle WA. All Rights Reserved.
*/
package us.mn.state.health.lims.plugin;
import org.apache.commons.io.IOUtils;
import org.dom4j.*;
import javax.servlet.ServletContext;
import javax.servlet.ServletContextEvent;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.Enumeration;
import java.util.jar.JarEntry;
import java.util.jar.JarFile;
public class PluginLoader {
private static final String PLUGIN_ANALYZER = "plugin" + File.separator;
private static final String VERSION = "version";
private static final String SUPPORTED_VERSION = "1.0";
private static final String PATH = "path";
private static final String ANALYZER_IMPORTER = "analyzerImporter";
private static final String MENU = "menu";
private static final String PERMISSION = "permission";
private static final String EXTENSION_POINT = "extension_point";
private static final String EXTENSION = "extension";
private static final String DESCRIPTION = "description";
private static final String VALUE = "value";
private ServletContext context;
public PluginLoader(ServletContextEvent event) {
context = event.getServletContext();
}
public void load() {
File pluginDir = new File(context.getRealPath(PLUGIN_ANALYZER));
loadDirectory( pluginDir );
}
private void loadDirectory( File pluginDir ){
File[] files = pluginDir.listFiles();
if (files != null) {
for (File file : files) {
if (file.getName().endsWith("jar")) {
loadPlugin(file);
}else if(file.isDirectory()){
System.out.println("Checking plugin subfolder: " + file.getName());
loadDirectory( file );
}
}
}
}
private void loadPlugin(File pluginFile) {
try {
JarFile jar = new JarFile(pluginFile);
final Enumeration<JarEntry> entries = jar.entries();
while (entries.hasMoreElements()) {
final JarEntry entry = entries.nextElement();
if (entry.getName().contains(".xml")) {
boolean valid = loadFromXML(jar, entry);
if (valid) {
break;
}
}
}
} catch (IOException e) {
e.printStackTrace();
}
}
private boolean loadFromXML(JarFile jar, JarEntry entry) {
try {
URL url = new URL("jar:file:///" + jar.getName() + "!/");
InputStream input = jar.getInputStream(entry);
String xml = IOUtils.toString(input, "UTF-8");
//System.out.println(xml);
Document doc = DocumentHelper.parseText(xml);
Element versionElement = doc.getRootElement().element(VERSION);
if (!SUPPORTED_VERSION.equals(versionElement.getData())) {
System.out.println("Unsupported version number. Expected " + SUPPORTED_VERSION + " got " + versionElement.getData());
return false;
}
Element analyzerImporter = doc.getRootElement().element(ANALYZER_IMPORTER);
if (analyzerImporter != null) {
Attribute description = analyzerImporter.element(EXTENSION_POINT).element(DESCRIPTION).attribute(VALUE);
System.out.println( "Loading: " + description.getValue());
Attribute path = analyzerImporter.element(EXTENSION_POINT).element(EXTENSION).attribute(PATH);
loadActualPlugin(url, path.getValue());
}
Element menu = doc.getRootElement().element(MENU);
if (menu != null) {
Attribute description = menu.element(EXTENSION_POINT).element(DESCRIPTION).attribute(VALUE);
System.out.println( "Loading: " + description.getValue());
Attribute path = menu.element(EXTENSION_POINT).element(EXTENSION).attribute(PATH);
loadActualPlugin(url, path.getValue());
}
Element permissions = doc.getRootElement().element(PERMISSION);
if (permissions != null) {
Attribute description = permissions.element(EXTENSION_POINT).element(DESCRIPTION).attribute(VALUE);
Attribute path = permissions.element(EXTENSION_POINT).element(EXTENSION).attribute(PATH);
boolean loaded = loadActualPlugin(url, path.getValue());
if( loaded ){
System.out.println( "Loading: " + description.getValue());
}else{
System.out.println( "Failed Loading: " + description.getValue());
}
}
} catch (MalformedURLException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} catch (DocumentException e) {
e.printStackTrace();
}
return true;
}
@SuppressWarnings("unchecked")
private boolean loadActualPlugin(URL url, String classPath) {
try {
URL[] urls = {url};
ClassLoader classLoader = new URLClassLoader(urls, this.getClass().getClassLoader());
Class<APlugin> aClass = (Class<APlugin>) classLoader.loadClass(classPath);
APlugin instance = aClass.newInstance();
return instance.connect();
} catch (ClassNotFoundException e) {
e.printStackTrace();
} catch (InstantiationException e) {
e.printStackTrace();
} catch (IllegalAccessException e) {
e.printStackTrace();
}
return false;
}
}
| Plugin loader does a better job of catching exceptions and logging errors
https://www.pivotaltracker.com/story/show/94035198
| app/src/us/mn/state/health/lims/plugin/PluginLoader.java | Plugin loader does a better job of catching exceptions and logging errors | <ide><path>pp/src/us/mn/state/health/lims/plugin/PluginLoader.java
<ide>
<ide> import org.apache.commons.io.IOUtils;
<ide> import org.dom4j.*;
<add>import us.mn.state.health.lims.common.exception.LIMSException;
<add>import us.mn.state.health.lims.common.log.LogEvent;
<ide>
<ide> import javax.servlet.ServletContext;
<ide> import javax.servlet.ServletContextEvent;
<ide> }
<ide>
<ide> private boolean loadFromXML(JarFile jar, JarEntry entry) {
<del>
<add> Attribute description = null;
<ide> try {
<ide> URL url = new URL("jar:file:///" + jar.getName() + "!/");
<ide> InputStream input = jar.getInputStream(entry);
<ide>
<ide> Element versionElement = doc.getRootElement().element(VERSION);
<ide>
<add> if( versionElement == null){
<add> LogEvent.logError("PluginLoader", "load", "Missing version number in plugin");
<add> System.out.println("Missing version number in plugin");
<add> return false;
<add> }
<ide> if (!SUPPORTED_VERSION.equals(versionElement.getData())) {
<add> LogEvent.logError("PluginLoader", "load", "Unsupported version number. Expected " + SUPPORTED_VERSION + " got " + versionElement.getData());
<ide> System.out.println("Unsupported version number. Expected " + SUPPORTED_VERSION + " got " + versionElement.getData());
<ide> return false;
<ide> }
<ide> Element analyzerImporter = doc.getRootElement().element(ANALYZER_IMPORTER);
<ide>
<ide> if (analyzerImporter != null) {
<del> Attribute description = analyzerImporter.element(EXTENSION_POINT).element(DESCRIPTION).attribute(VALUE);
<del> System.out.println( "Loading: " + description.getValue());
<add> description = analyzerImporter.element(EXTENSION_POINT).element(DESCRIPTION).attribute(VALUE);
<ide> Attribute path = analyzerImporter.element(EXTENSION_POINT).element(EXTENSION).attribute(PATH);
<ide> loadActualPlugin(url, path.getValue());
<add> System.out.println("Loaded: " + description.getValue());
<ide> }
<ide>
<ide> Element menu = doc.getRootElement().element(MENU);
<ide>
<ide> if (menu != null) {
<del> Attribute description = menu.element(EXTENSION_POINT).element(DESCRIPTION).attribute(VALUE);
<del> System.out.println( "Loading: " + description.getValue());
<add> description = menu.element(EXTENSION_POINT).element(DESCRIPTION).attribute(VALUE);
<ide> Attribute path = menu.element(EXTENSION_POINT).element(EXTENSION).attribute(PATH);
<ide> loadActualPlugin(url, path.getValue());
<add> System.out.println("Loaded: " + description.getValue());
<ide> }
<ide>
<ide> Element permissions = doc.getRootElement().element(PERMISSION);
<ide>
<ide> if (permissions != null) {
<del> Attribute description = permissions.element(EXTENSION_POINT).element(DESCRIPTION).attribute(VALUE);
<add> description = permissions.element(EXTENSION_POINT).element(DESCRIPTION).attribute(VALUE);
<ide> Attribute path = permissions.element(EXTENSION_POINT).element(EXTENSION).attribute(PATH);
<del> boolean loaded = loadActualPlugin(url, path.getValue());
<del> if( loaded ){
<del> System.out.println( "Loading: " + description.getValue());
<del> }else{
<del> System.out.println( "Failed Loading: " + description.getValue());
<del> }
<add> loadActualPlugin(url, path.getValue());
<add> System.out.println( "Loaded: " + description.getValue());
<ide> }
<ide>
<ide> } catch (MalformedURLException e) {
<ide> e.printStackTrace();
<add> return false;
<ide> } catch (IOException e) {
<ide> e.printStackTrace();
<add> return false;
<ide> } catch (DocumentException e) {
<ide> e.printStackTrace();
<add> return false;
<add> } catch (LIMSException e){
<add> if( description != null) {
<add> LogEvent.logError("PluginLoader", "load", "Failed Loading: " + description.getValue());
<add> System.out.println("Failed Loading: " + description.getValue());
<add> }
<add> return false;
<ide> }
<ide>
<ide>
<ide>
<ide>
<ide> @SuppressWarnings("unchecked")
<del> private boolean loadActualPlugin(URL url, String classPath) {
<add> private void loadActualPlugin(URL url, String classPath) throws LIMSException {
<ide> try {
<ide> URL[] urls = {url};
<ide> ClassLoader classLoader = new URLClassLoader(urls, this.getClass().getClassLoader());
<ide>
<ide> Class<APlugin> aClass = (Class<APlugin>) classLoader.loadClass(classPath);
<ide> APlugin instance = aClass.newInstance();
<del> return instance.connect();
<add> instance.connect();
<ide> } catch (ClassNotFoundException e) {
<ide> e.printStackTrace();
<add> throw new LIMSException("See previous stack trace");
<ide> } catch (InstantiationException e) {
<ide> e.printStackTrace();
<add> throw new LIMSException("See previous stack trace");
<ide> } catch (IllegalAccessException e) {
<ide> e.printStackTrace();
<add> throw new LIMSException("See previous stack trace");
<ide> }
<ide>
<del> return false;
<ide> }
<ide> } |
|
Java | apache-2.0 | 59fe335f5fe8f6cc2500f141d0d78d584ee674f3 | 0 | Skarlso/gocd,marques-work/gocd,varshavaradarajan/gocd,marques-work/gocd,varshavaradarajan/gocd,arvindsv/gocd,varshavaradarajan/gocd,tomzo/gocd,kierarad/gocd,bdpiparva/gocd,kierarad/gocd,naveenbhaskar/gocd,bdpiparva/gocd,arvindsv/gocd,arvindsv/gocd,ibnc/gocd,gocd/gocd,tomzo/gocd,naveenbhaskar/gocd,ibnc/gocd,tomzo/gocd,ketan/gocd,marques-work/gocd,ind9/gocd,ibnc/gocd,arvindsv/gocd,naveenbhaskar/gocd,Skarlso/gocd,GaneshSPatil/gocd,tomzo/gocd,bdpiparva/gocd,ind9/gocd,naveenbhaskar/gocd,gocd/gocd,tomzo/gocd,varshavaradarajan/gocd,GaneshSPatil/gocd,kierarad/gocd,ketan/gocd,kierarad/gocd,naveenbhaskar/gocd,Skarlso/gocd,kierarad/gocd,kierarad/gocd,ibnc/gocd,tomzo/gocd,gocd/gocd,Skarlso/gocd,ind9/gocd,marques-work/gocd,bdpiparva/gocd,ind9/gocd,gocd/gocd,gocd/gocd,ibnc/gocd,marques-work/gocd,gocd/gocd,ibnc/gocd,varshavaradarajan/gocd,bdpiparva/gocd,GaneshSPatil/gocd,GaneshSPatil/gocd,Skarlso/gocd,arvindsv/gocd,marques-work/gocd,Skarlso/gocd,ketan/gocd,varshavaradarajan/gocd,GaneshSPatil/gocd,ketan/gocd,naveenbhaskar/gocd,arvindsv/gocd,ketan/gocd,ind9/gocd,GaneshSPatil/gocd,bdpiparva/gocd,ketan/gocd | /*
* Copyright 2019 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.server.service.materials;
import com.thoughtworks.go.config.GoConfigDao;
import com.thoughtworks.go.config.UpdateConfigCommand;
import com.thoughtworks.go.config.exceptions.EntityType;
import com.thoughtworks.go.domain.config.*;
import com.thoughtworks.go.domain.packagerepository.PackageRepositories;
import com.thoughtworks.go.domain.packagerepository.PackageRepository;
import com.thoughtworks.go.helper.ConfigFileFixture;
import com.thoughtworks.go.plugin.infra.PluginManager;
import com.thoughtworks.go.plugin.infra.plugininfo.GoPluginDescriptor;
import com.thoughtworks.go.presentation.TriStateSelection;
import com.thoughtworks.go.server.dao.PluginSqlMapDao;
import com.thoughtworks.go.server.domain.Username;
import com.thoughtworks.go.server.service.GoConfigService;
import com.thoughtworks.go.server.service.result.HttpLocalizedOperationResult;
import com.thoughtworks.go.util.GoConfigFileHelper;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import static com.thoughtworks.go.serverhealth.HealthStateType.forbidden;
import static java.util.Arrays.asList;
import static org.hamcrest.Matchers.is;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertThat;
import static org.mockito.Mockito.when;
import static org.mockito.MockitoAnnotations.initMocks;
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(locations = {
"classpath:WEB-INF/applicationContext-global.xml",
"classpath:WEB-INF/applicationContext-dataLocalAccess.xml",
"classpath:testPropertyConfigurer.xml",
"classpath:WEB-INF/spring-all-servlet.xml",
})
public class PackageRepositoryServiceIntegrationTest {
@Autowired
private GoConfigService goConfigService;
@Autowired
private PackageRepositoryService service;
@Autowired
private PluginSqlMapDao pluginSqlMapDao;
@Autowired
private GoConfigDao goConfigDao;
private GoConfigFileHelper configHelper;
@Mock
private PluginManager pluginManager;
private Username username;
@Before
public void setUp() throws Exception {
initMocks(this);
String content = ConfigFileFixture.configWithSecurity("<security>\n" +
" <authConfigs>\n" +
" <authConfig id=\"9cad79b0-4d9e-4a62-829c-eb4d9488062f\" pluginId=\"cd.go.authentication.passwordfile\">\n" +
" <property>\n" +
" <key>PasswordFilePath</key>\n" +
" <value>../manual-testing/ant_hg/password.properties</value>\n" +
" </property>\n" +
" </authConfig>\n" +
" </authConfigs>" +
"</security>");
configHelper = new GoConfigFileHelper(content);
configHelper.usingCruiseConfigDao(goConfigDao).initializeConfigFile();
configHelper.onSetUp();
goConfigService.forceNotifyListeners();
service.setPluginManager(pluginManager);
username = new Username("CurrentUser");
UpdateConfigCommand command = goConfigService.modifyAdminPrivilegesCommand(asList(username.getUsername().toString()), new TriStateSelection(Admin.GO_SYSTEM_ADMIN, TriStateSelection.Action.add));
goConfigService.updateConfig(command);
}
@After
public void tearDown() throws Exception {
configHelper.onTearDown();
pluginSqlMapDao.deleteAllPlugins();
goConfigService.getConfigForEditing().setPackageRepositories(new PackageRepositories());
}
@Test
public void shouldDeleteTheSpecifiedPackageRepository() throws Exception {
HttpLocalizedOperationResult result = new HttpLocalizedOperationResult();
String repoId = "npm";
PackageRepository npmRepo = new PackageRepository();
npmRepo.setId(repoId);
goConfigService.getConfigForEditing().setPackageRepositories(new PackageRepositories(npmRepo));
assertThat(goConfigService.getConfigForEditing().getPackageRepositories().size(), is(1));
assertThat(goConfigService.getConfigForEditing().getPackageRepositories().find(repoId), is(npmRepo));
service.deleteRepository(username, npmRepo, result);
HttpLocalizedOperationResult expectedResult = new HttpLocalizedOperationResult();
expectedResult.setMessage(EntityType.PackageRepository.deleteSuccessful(npmRepo.getId()));
assertThat(result, is(expectedResult));
assertThat(goConfigService.getConfigForEditing().getPackageRepositories().size(), is(0));
assertNull(goConfigService.getConfigForEditing().getPackageRepositories().find(repoId));
}
@Test
public void shouldReturnTheExactLocalizeMessageIfItFailsToDeletePackageRepository() throws Exception {
HttpLocalizedOperationResult result = new HttpLocalizedOperationResult();
String repoId = "npm";
PackageRepository npmRepo = new PackageRepository();
npmRepo.setId(repoId);
goConfigService.getConfigForEditing().setPackageRepositories(new PackageRepositories(npmRepo));
HttpLocalizedOperationResult expectedResult = new HttpLocalizedOperationResult();
expectedResult.forbidden(EntityType.PackageRepository.forbiddenToDelete("npm", "UnauthorizedUser"), forbidden());
assertThat(goConfigService.getConfigForEditing().getPackageRepositories().size(), is(1));
assertThat(goConfigService.getConfigForEditing().getPackageRepositories().find(repoId), is(npmRepo));
service.deleteRepository(new Username("UnauthorizedUser"), npmRepo, result);
assertThat(result, is(expectedResult));
assertThat(goConfigService.getConfigForEditing().getPackageRepositories().size(), is(1));
assertThat(goConfigService.getConfigForEditing().getPackageRepositories().find(repoId), is(npmRepo));
}
@Test
public void shouldReturnTheExactLocalizeMessageIfItFailsToCreatePackageRepository() throws Exception {
HttpLocalizedOperationResult result = new HttpLocalizedOperationResult();
String repoId = "npm";
PackageRepository npmRepo = new PackageRepository();
npmRepo.setId(repoId);
goConfigService.getConfigForEditing().setPackageRepositories(new PackageRepositories(npmRepo));
HttpLocalizedOperationResult expectedResult = new HttpLocalizedOperationResult();
expectedResult.forbidden(EntityType.PackageRepository.forbiddenToEdit("npm", "UnauthorizedUser"), forbidden());
assertThat(goConfigService.getConfigForEditing().getPackageRepositories().size(), is(1));
assertThat(goConfigService.getConfigForEditing().getPackageRepositories().find(repoId), is(npmRepo));
service.createPackageRepository(npmRepo, new Username("UnauthorizedUser"), result);
assertThat(result, is(expectedResult));
assertThat(goConfigService.getConfigForEditing().getPackageRepositories().size(), is(1));
assertThat(goConfigService.getConfigForEditing().getPackageRepositories().find(repoId), is(npmRepo));
}
@Test
public void shouldReturnTheExactLocalizeMessageIfItFailsToUpdatePackageRepository() throws Exception {
HttpLocalizedOperationResult expectedResult = new HttpLocalizedOperationResult();
expectedResult.forbidden(EntityType.PackageRepository.forbiddenToEdit("npm.org", "UnauthorizedUser"), forbidden());
HttpLocalizedOperationResult result = new HttpLocalizedOperationResult();
String oldRepoId = "npmOrg";
String newRepoId = "npm.org";
PackageRepository oldPackageRepo = new PackageRepository();
PackageRepository newPackageRepo = new PackageRepository();
PluginConfiguration pluginConfiguration = new PluginConfiguration();
pluginConfiguration.setId("npm");
oldPackageRepo.setPluginConfiguration(pluginConfiguration);
oldPackageRepo.setId(oldRepoId);
oldPackageRepo.setName(oldRepoId);
newPackageRepo.setPluginConfiguration(pluginConfiguration);
newPackageRepo.setId(newRepoId);
newPackageRepo.setName(newRepoId);
Configuration configuration = new Configuration();
configuration.add(new ConfigurationProperty(new ConfigurationKey("foo"), new ConfigurationValue("bar")));
oldPackageRepo.setConfiguration(configuration);
newPackageRepo.setConfiguration(configuration);
when(pluginManager.getPluginDescriptorFor("npm")).thenReturn(new GoPluginDescriptor("npm", "1", null, null, null, false));
goConfigService.getConfigForEditing().setPackageRepositories(new PackageRepositories(oldPackageRepo));
assertThat(goConfigService.getConfigForEditing().getPackageRepositories().size(), is(1));
assertThat(goConfigService.getConfigForEditing().getPackageRepositories().find(oldRepoId), is(oldPackageRepo));
assertNull(goConfigService.getConfigForEditing().getPackageRepositories().find(newRepoId));
service.updatePackageRepository(newPackageRepo, new Username("UnauthorizedUser"), "md5", result, oldRepoId);
assertThat(result, is(expectedResult));
assertThat(goConfigService.getConfigForEditing().getPackageRepositories().size(), is(1));
assertThat(goConfigService.getConfigForEditing().getPackageRepositories().find(oldRepoId), is(oldPackageRepo));
assertNull(goConfigService.getConfigForEditing().getPackageRepositories().find(newRepoId));
}
}
| server/src/test-integration/java/com/thoughtworks/go/server/service/materials/PackageRepositoryServiceIntegrationTest.java | /*
* Copyright 2018 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.server.service.materials;
import com.thoughtworks.go.config.UpdateConfigCommand;
import com.thoughtworks.go.config.exceptions.EntityType;
import com.thoughtworks.go.domain.config.*;
import com.thoughtworks.go.domain.packagerepository.PackageRepositories;
import com.thoughtworks.go.domain.packagerepository.PackageRepository;
import com.thoughtworks.go.plugin.infra.PluginManager;
import com.thoughtworks.go.plugin.infra.plugininfo.GoPluginDescriptor;
import com.thoughtworks.go.presentation.TriStateSelection;
import com.thoughtworks.go.server.dao.PluginSqlMapDao;
import com.thoughtworks.go.server.domain.Username;
import com.thoughtworks.go.server.service.GoConfigService;
import com.thoughtworks.go.server.service.result.HttpLocalizedOperationResult;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import static com.thoughtworks.go.serverhealth.HealthStateType.forbidden;
import static java.util.Arrays.asList;
import static org.hamcrest.Matchers.is;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertThat;
import static org.mockito.Mockito.when;
import static org.mockito.MockitoAnnotations.initMocks;
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(locations = {
"classpath:WEB-INF/applicationContext-global.xml",
"classpath:WEB-INF/applicationContext-dataLocalAccess.xml",
"classpath:testPropertyConfigurer.xml",
"classpath:WEB-INF/spring-all-servlet.xml",
})
public class PackageRepositoryServiceIntegrationTest {
@Autowired
private GoConfigService goConfigService;
@Autowired
private PackageRepositoryService service;
@Autowired
private PluginSqlMapDao pluginSqlMapDao;
@Mock
private PluginManager pluginManager;
private Username username;
@Before
public void setUp() throws Exception {
initMocks(this);
service.setPluginManager(pluginManager);
username = new Username("CurrentUser");
UpdateConfigCommand command = goConfigService.modifyAdminPrivilegesCommand(asList(username.getUsername().toString()), new TriStateSelection(Admin.GO_SYSTEM_ADMIN, TriStateSelection.Action.add));
goConfigService.updateConfig(command);
}
@After
public void tearDown() throws Exception {
pluginSqlMapDao.deleteAllPlugins();
goConfigService.getConfigForEditing().setPackageRepositories(new PackageRepositories());
}
@Test
public void shouldDeleteTheSpecifiedPackageRepository() throws Exception {
HttpLocalizedOperationResult result = new HttpLocalizedOperationResult();
String repoId = "npm";
PackageRepository npmRepo = new PackageRepository();
npmRepo.setId(repoId);
goConfigService.getConfigForEditing().setPackageRepositories(new PackageRepositories(npmRepo));
assertThat(goConfigService.getConfigForEditing().getPackageRepositories().size(), is(1));
assertThat(goConfigService.getConfigForEditing().getPackageRepositories().find(repoId), is(npmRepo));
service.deleteRepository(username, npmRepo, result);
HttpLocalizedOperationResult expectedResult = new HttpLocalizedOperationResult();
expectedResult.setMessage(EntityType.PackageRepository.deleteSuccessful(npmRepo.getId()));
assertThat(result, is(expectedResult));
assertThat(goConfigService.getConfigForEditing().getPackageRepositories().size(), is(0));
assertNull(goConfigService.getConfigForEditing().getPackageRepositories().find(repoId));
}
@Test
public void shouldReturnTheExactLocalizeMessageIfItFailsToDeletePackageRepository() throws Exception {
HttpLocalizedOperationResult result = new HttpLocalizedOperationResult();
String repoId = "npm";
PackageRepository npmRepo = new PackageRepository();
npmRepo.setId(repoId);
goConfigService.getConfigForEditing().setPackageRepositories(new PackageRepositories(npmRepo));
HttpLocalizedOperationResult expectedResult = new HttpLocalizedOperationResult();
expectedResult.forbidden(EntityType.PackageRepository.forbiddenToDelete("npm", "UnauthorizedUser"), forbidden());
assertThat(goConfigService.getConfigForEditing().getPackageRepositories().size(), is(1));
assertThat(goConfigService.getConfigForEditing().getPackageRepositories().find(repoId), is(npmRepo));
service.deleteRepository(new Username("UnauthorizedUser"), npmRepo, result);
assertThat(result, is(expectedResult));
assertThat(goConfigService.getConfigForEditing().getPackageRepositories().size(), is(1));
assertThat(goConfigService.getConfigForEditing().getPackageRepositories().find(repoId), is(npmRepo));
}
@Test
public void shouldReturnTheExactLocalizeMessageIfItFailsToCreatePackageRepository() throws Exception {
HttpLocalizedOperationResult result = new HttpLocalizedOperationResult();
String repoId = "npm";
PackageRepository npmRepo = new PackageRepository();
npmRepo.setId(repoId);
goConfigService.getConfigForEditing().setPackageRepositories(new PackageRepositories(npmRepo));
HttpLocalizedOperationResult expectedResult = new HttpLocalizedOperationResult();
expectedResult.forbidden(EntityType.PackageRepository.forbiddenToEdit("npm", "UnauthorizedUser"), forbidden());
assertThat(goConfigService.getConfigForEditing().getPackageRepositories().size(), is(1));
assertThat(goConfigService.getConfigForEditing().getPackageRepositories().find(repoId), is(npmRepo));
service.createPackageRepository(npmRepo, new Username("UnauthorizedUser"), result);
assertThat(result, is(expectedResult));
assertThat(goConfigService.getConfigForEditing().getPackageRepositories().size(), is(1));
assertThat(goConfigService.getConfigForEditing().getPackageRepositories().find(repoId), is(npmRepo));
}
@Test
public void shouldReturnTheExactLocalizeMessageIfItFailsToUpdatePackageRepository() throws Exception {
HttpLocalizedOperationResult expectedResult = new HttpLocalizedOperationResult();
expectedResult.forbidden(EntityType.PackageRepository.forbiddenToEdit("npm.org", "UnauthorizedUser"), forbidden());
HttpLocalizedOperationResult result = new HttpLocalizedOperationResult();
String oldRepoId = "npmOrg";
String newRepoId = "npm.org";
PackageRepository oldPackageRepo = new PackageRepository();
PackageRepository newPackageRepo = new PackageRepository();
PluginConfiguration pluginConfiguration = new PluginConfiguration();
pluginConfiguration.setId("npm");
oldPackageRepo.setPluginConfiguration(pluginConfiguration);
oldPackageRepo.setId(oldRepoId);
oldPackageRepo.setName(oldRepoId);
newPackageRepo.setPluginConfiguration(pluginConfiguration);
newPackageRepo.setId(newRepoId);
newPackageRepo.setName(newRepoId);
Configuration configuration = new Configuration();
configuration.add(new ConfigurationProperty(new ConfigurationKey("foo"), new ConfigurationValue("bar")));
oldPackageRepo.setConfiguration(configuration);
newPackageRepo.setConfiguration(configuration);
when(pluginManager.getPluginDescriptorFor("npm")).thenReturn(new GoPluginDescriptor("npm", "1", null, null, null, false));
goConfigService.getConfigForEditing().setPackageRepositories(new PackageRepositories(oldPackageRepo));
assertThat(goConfigService.getConfigForEditing().getPackageRepositories().size(), is(1));
assertThat(goConfigService.getConfigForEditing().getPackageRepositories().find(oldRepoId), is(oldPackageRepo));
assertNull(goConfigService.getConfigForEditing().getPackageRepositories().find(newRepoId));
service.updatePackageRepository(newPackageRepo, new Username("UnauthorizedUser"), "md5", result, oldRepoId);
assertThat(result, is(expectedResult));
assertThat(goConfigService.getConfigForEditing().getPackageRepositories().size(), is(1));
assertThat(goConfigService.getConfigForEditing().getPackageRepositories().find(oldRepoId), is(oldPackageRepo));
assertNull(goConfigService.getConfigForEditing().getPackageRepositories().find(newRepoId));
}
}
| Fixing flaky test. Test expected security to have been enabled was did not do so as part of its setup, hence was dependent on the order of execution of test suites
| server/src/test-integration/java/com/thoughtworks/go/server/service/materials/PackageRepositoryServiceIntegrationTest.java | Fixing flaky test. Test expected security to have been enabled was did not do so as part of its setup, hence was dependent on the order of execution of test suites | <ide><path>erver/src/test-integration/java/com/thoughtworks/go/server/service/materials/PackageRepositoryServiceIntegrationTest.java
<ide> /*
<del> * Copyright 2018 ThoughtWorks, Inc.
<add> * Copyright 2019 ThoughtWorks, Inc.
<ide> *
<ide> * Licensed under the Apache License, Version 2.0 (the "License");
<ide> * you may not use this file except in compliance with the License.
<ide>
<ide> package com.thoughtworks.go.server.service.materials;
<ide>
<add>import com.thoughtworks.go.config.GoConfigDao;
<ide> import com.thoughtworks.go.config.UpdateConfigCommand;
<ide> import com.thoughtworks.go.config.exceptions.EntityType;
<ide> import com.thoughtworks.go.domain.config.*;
<ide> import com.thoughtworks.go.domain.packagerepository.PackageRepositories;
<ide> import com.thoughtworks.go.domain.packagerepository.PackageRepository;
<add>import com.thoughtworks.go.helper.ConfigFileFixture;
<ide> import com.thoughtworks.go.plugin.infra.PluginManager;
<ide> import com.thoughtworks.go.plugin.infra.plugininfo.GoPluginDescriptor;
<ide> import com.thoughtworks.go.presentation.TriStateSelection;
<ide> import com.thoughtworks.go.server.domain.Username;
<ide> import com.thoughtworks.go.server.service.GoConfigService;
<ide> import com.thoughtworks.go.server.service.result.HttpLocalizedOperationResult;
<add>import com.thoughtworks.go.util.GoConfigFileHelper;
<ide> import org.junit.After;
<ide> import org.junit.Before;
<ide> import org.junit.Test;
<ide> private PackageRepositoryService service;
<ide> @Autowired
<ide> private PluginSqlMapDao pluginSqlMapDao;
<add> @Autowired
<add> private GoConfigDao goConfigDao;
<add> private GoConfigFileHelper configHelper;
<ide>
<ide> @Mock
<ide> private PluginManager pluginManager;
<ide> @Before
<ide> public void setUp() throws Exception {
<ide> initMocks(this);
<add> String content = ConfigFileFixture.configWithSecurity("<security>\n" +
<add> " <authConfigs>\n" +
<add> " <authConfig id=\"9cad79b0-4d9e-4a62-829c-eb4d9488062f\" pluginId=\"cd.go.authentication.passwordfile\">\n" +
<add> " <property>\n" +
<add> " <key>PasswordFilePath</key>\n" +
<add> " <value>../manual-testing/ant_hg/password.properties</value>\n" +
<add> " </property>\n" +
<add> " </authConfig>\n" +
<add> " </authConfigs>" +
<add> "</security>");
<add>
<add> configHelper = new GoConfigFileHelper(content);
<add> configHelper.usingCruiseConfigDao(goConfigDao).initializeConfigFile();
<add> configHelper.onSetUp();
<add> goConfigService.forceNotifyListeners();
<ide> service.setPluginManager(pluginManager);
<ide> username = new Username("CurrentUser");
<ide> UpdateConfigCommand command = goConfigService.modifyAdminPrivilegesCommand(asList(username.getUsername().toString()), new TriStateSelection(Admin.GO_SYSTEM_ADMIN, TriStateSelection.Action.add));
<ide>
<ide> @After
<ide> public void tearDown() throws Exception {
<add> configHelper.onTearDown();
<ide> pluginSqlMapDao.deleteAllPlugins();
<ide> goConfigService.getConfigForEditing().setPackageRepositories(new PackageRepositories());
<ide> } |
|
Java | apache-2.0 | d39f6b53104dcbb3bcdd015b73f4f5bb50486360 | 0 | dreedyman/Rio,dreedyman/Rio,dreedyman/Rio | /*
* Copyright to the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.rioproject.opstring;
import org.rioproject.util.PropertyHelper;
import java.io.Serializable;
import java.lang.reflect.Method;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.*;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* ClassBundle provides a mechanism to define the resources needed to load and
* instantiate a class.
*
* @author Dennis Reedy
*/
public class ClassBundle implements Serializable {
static final long serialVersionUID = 1L;
/**
* The classname
*/
private String className;
/**
* The URL path used to load the class. The path will be applied to all
* JARs in this ClassBundle
*/
private String codebase;
/**
* Collection of jar names
*/
private List<String> jarNames = Collections.synchronizedList(new ArrayList<String>());
/**
* An artifact ID
*/
private String artifact;
/**
* A table of method names to Class[] objects
*/
private Map<String, Object[]> methodObjectTable = Collections.synchronizedMap(new HashMap<String, Object[]>());
private static Logger logger = Logger.getLogger(ClassBundle.class.getName());
/**
* Create a new ClassBundle
*/
public ClassBundle() {
}
/**
* Create a new ClassBundle
*
* @param className The className
*/
public ClassBundle(String className) {
if(className == null)
throw new IllegalArgumentException("className is null");
this.className = className;
}
/**
* Create a new ClassBundle
*
* @param className The className
* @param jarNames Array of Strings identifying resource names used to load
* the className
* @param codebase The URL path used to load the class. The path will be
* applied to all JARs in this ClassBundle
*/
public ClassBundle(String className, String[] jarNames, String codebase) {
if(className == null)
throw new IllegalArgumentException("className cannot be null");
this.className = className;
if(jarNames!=null)
addJARs(jarNames);
this.codebase = codebase;
}
/**
* Set the codebase used to load the class. The path will be applied to all
* JARs in this ClassBundle
*
* @param codebase The codebase to set
*/
public void setCodebase(String codebase) {
this.codebase = codebase;
if(this.codebase!=null) {
if(!this.codebase.endsWith("/"))
this.codebase = this.codebase+"/";
}
}
/**
* Get the codebase used to load the class.
*
* @return The codebase that has been set. If the codebase has properties
* declared (in the form <tt>$[property]</tt>), return a formatted string
* with the properties expanded. If there are no property elements
* declared, return the original string.
*/
public String getCodebase() {
return(translateCodebase());
}
/**
* Get the codebase without any translation
*
* @return The codebase that has been set
*/
public String getRawCodebase() {
return(codebase);
}
/**
* Set the className
*
* @param className The className, suitable for use with Class.forName()
*/
public void setClassName(String className) {
this.className = className;
}
/**
* Get the className
*
* @return The className, suitable for use with Class.forName()
*/
public String getClassName() {
return (className);
}
/**
* Get the artifact associated with the className
*
* @return The artifact associated with the className
*/
public String getArtifact() {
return artifact;
}
/**
* Set the artifact
*
* @param artifact The artifact associated with the className
*/
public void setArtifact(String artifact) {
this.artifact = artifact;
}
/**
* Set JARs to the ClassBundle.
*
* @param jars Jar names to set
*/
public void setJARs(String... jars) {
jarNames.clear();
addJARs(jars);
}
/**
* Add JARs to the ClassBundle.
*
* @param jars Jar names to add.
*/
public void addJARs(String... jars) {
if(jars == null)
throw new IllegalArgumentException("jars cannot be null");
for(String jar : jars)
addJAR(jar);
}
/**
* Add a JAR to the Collection of JAR resources.
*
* @param jar Name of the JAR to add
*/
public void addJAR(String jar) {
if(jar == null)
throw new IllegalArgumentException("jar cannot be null");
if(!jarNames.contains(jar))
jarNames.add(jar);
}
/**
* Get the JAR names.
*
* @return A String array of the JAR names. This method will return a new
* array each time. If there are no JAR names, this method will return an
* empty array
*/
public String[] getJARNames() {
return jarNames.toArray(new String[jarNames.size()]);
}
/**
* Get the JAR resources
*
* @return An URL array of the JARs that can be used as a classpath to
* load the class. This method will return a new array each time. If
* there are no JARs configured, this method will return an empty array.
*
* @throws MalformedURLException if the codebase has not been set, or if
* the provided codebase contains an invalid protocol
*/
public URL[] getJARs() throws MalformedURLException {
return (urlsFromJARs(getJARNames()));
}
/**
* Add a method name and the parameters to use for when reflecting on
* specified public member method of the class or interface represented by
* this ClassBundle object. The array of parameter types will be determined
* by the Class object for the Object types provided
*
* @param methodName The public member method of the Class or interface
* represented by this ClassBundle
* @param parameters Array of Object parameters for use when reflecting on
* the method
*/
public void addMethod(String methodName, Object[] parameters) {
if(methodName == null)
throw new IllegalArgumentException("methodName is null");
if(parameters == null) {
methodObjectTable.put(methodName, null);
return;
}
methodObjectTable.put(methodName, parameters);
}
/**
* Get all method names to reflect on
*
* @return Array of String method names to reflect on. If there
* are no method names to reflect on this method will return an empty array
*/
public String[] getMethodNames() {
Set<String> keys = methodObjectTable.keySet();
return (keys.toArray(new String[keys.size()]));
}
/**
* Get the corresponding Class[] parameters to reflect on a method
*
* @param methodName The name of the public method to reflect on
* @return Array of Class objects to use when reflecting on the
* public method
*/
public Class[] getMethodClasses(String methodName) {
Object[] args = getMethodObjects(methodName);
Class[] classes = new Class[args.length];
for(int i = 0; i < classes.length; i++) {
classes[i] = args[i].getClass();
}
return classes;
}
/**
* Get the corresponding Class[] parameters to reflect on a method
*
* @param methodName The name of the public method to reflect on
* @return Array of Object objects to use when reflecting on the
* public method
*/
public Object[] getMethodObjects(String methodName) {
if(methodName == null)
throw new IllegalArgumentException("methodName is null");
return (methodObjectTable.get(methodName));
}
/**
* Utility method to reflect on all added methods using an object
* instantiated from the Class loaded by the ClassBundle
*
* @param object An instantiated Object from the Class loaded by the
* ClassBundle
*
* @throws Exception If there are errors running the known methods
*/
public void runKnownMethods(Object object) throws Exception {
String[] methods = getMethodNames();
for (String method : methods) {
Method m = object.getClass()
.getMethod(method, getMethodClasses(method));
m.invoke(object, getMethodObjects(method));
}
}
/**
* Override hashCode
*/
public int hashCode() {
int hc = 17;
hc = 37*hc+className.hashCode();
hc = 37*hc+getJARList().hashCode();
return(hc);
}
/**
* Override equals
*/
public boolean equals(Object obj) {
if(this == obj)
return(true);
if(!(obj instanceof ClassBundle))
return(false);
ClassBundle that = (ClassBundle)obj;
if(this.className.equals(that.className)) {
if(this.artifact!=null && that.artifact!=null) {
return this.artifact.equals(that.artifact);
} else {
return this.getJARList().equals(that.getJARList());
}
}
return(false);
}
/*
* Get URLs for jarNames based on the codebase
*/
private URL[] urlsFromJARs(String[] jarNames) throws MalformedURLException {
URL[] urls = new URL[jarNames.length];
StringBuilder sb = new StringBuilder();
for(int i = 0; i < urls.length; i++) {
if(i>0)
sb.append(", ");
sb.append(translateCodebase()).append(jarNames[i]);
urls[i] = new URL(translateCodebase()+jarNames[i]);
}
if(logger.isLoggable(Level.FINE)) {
logger.fine("Translated JARs=["+sb.toString()+"]");
}
return (urls);
}
/*
* Return an ArrayList of JAR names
*/
private ArrayList<String> getJARList() {
ArrayList<String> list = new ArrayList<String>();
String[] jars = getJARNames();
list.addAll(Arrays.asList(jars));
return(list);
}
@Override
public String toString() {
StringBuilder buffer = new StringBuilder();
buffer.append("ClassName=").append(className).append("\n");
buffer.append("Artifact=").append(artifact).append("\n");
buffer.append("Codebase=").append(codebase).append("\n");
String[] jars = getJARNames();
if(jars.length>0) {
buffer.append("Searchpath={");
for(int i=0; i<jars.length; i++) {
if(i>0)
buffer.append(", ");
buffer.append(jars[i]);
}
buffer.append("}");
}
return(buffer.toString());
}
/*
* Expand any properties in the codebase String. Properties are declared
* with the pattern of : <code>$[property]</code>
*
* @return If the codebase has properties declared (in the form
* <code>$[property]</code>), return a formatted string with the
* properties expanded. If there are no property elements declared, return
* the original string.
*
* @throws IllegalArgumentException If a property value cannot be obtained
* an IllegalArgument will be thrown
*/
private String translateCodebase() {
if(codebase==null)
return(codebase);
String translated = PropertyHelper.expandProperties(codebase, PropertyHelper.RUNTIME);
if(System.getProperty("os.name").startsWith("Win") && translated.startsWith("file://")) {
translated = "file:/"+translated.substring(7);
}
return(translated);
}
/**
* Merge two ClassBundles
*
* @param bundles ClassBundle instances to merge
*
* @return A merged ClassBundle.
*
* @throws IllegalArgumentException For all ClassBundles that have a
* non-null classname, that classname must be equal. If this is not the
* case then an IllegalArgumentException is thrown.
*/
public static ClassBundle merge(ClassBundle... bundles) {
String className = null;
for(ClassBundle bundle : bundles) {
if(bundle.getClassName()!=null) {
if(className==null)
className = bundle.getClassName();
else if(!className.equals(bundle.getClassName()))
throw new IllegalArgumentException("bundles must have same classname");
}
}
ClassBundle cb = new ClassBundle();
for(ClassBundle bundle : bundles) {
cb.setArtifact(bundle.getArtifact());
cb.addJARs(bundle.getJARNames());
cb.setCodebase(bundle.getCodebase());
cb.setClassName(bundle.getClassName());
cb.methodObjectTable.putAll(bundle.methodObjectTable);
}
return cb;
}
}
| rio-api/src/main/java/org/rioproject/opstring/ClassBundle.java | /*
* Copyright to the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.rioproject.opstring;
import org.rioproject.util.PropertyHelper;
import java.io.Serializable;
import java.lang.reflect.Method;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.*;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* ClassBundle provides a mechanism to define the resources needed to load and
* instantiate a class.
*
* @author Dennis Reedy
*/
public class ClassBundle implements Serializable {
static final long serialVersionUID = 1L;
/**
* The classname
*/
private String className;
/**
* The URL path used to load the class. The path will be applied to all
* JARs in this ClassBundle
*/
private String codebase;
/**
* Collection of jar names
*/
private List<String> jarNames = Collections.synchronizedList(new ArrayList<String>());
/**
* An artifact ID
*/
private String artifact;
/**
* Collection of shared components.
*/
private final Map<String, String[]> sharedComponents =
Collections.synchronizedMap(new HashMap<String, String[]>());
/**
* A table of method names to Class[] objects
*/
private Map<String, Object[]> methodObjectTable = Collections.synchronizedMap(new HashMap<String, Object[]>());
private static Logger logger = Logger.getLogger(ClassBundle.class.getName());
/**
* Create a new ClassBundle
*/
public ClassBundle() {
}
/**
* Create a new ClassBundle
*
* @param className The className
*/
public ClassBundle(String className) {
if(className == null)
throw new IllegalArgumentException("className is null");
this.className = className;
}
/**
* Create a new ClassBundle
*
* @param className The className
* @param jarNames Array of Strings identifying resource names used to load
* the className
* @param shComponents Map of class names and jar names to load the class
* from. A shared component will be loaded by the common loader for all
* services making it (and the resources it uses) available to all services
* @param codebase The URL path used to load the class. The path will be
* applied to all JARs in this ClassBundle
*/
public ClassBundle(String className,
String[] jarNames,
Map<String, String[]> shComponents,
String codebase) {
if(className == null)
throw new IllegalArgumentException("className cannot be null");
this.className = className;
if(jarNames!=null)
addJARs(jarNames);
if(shComponents!=null)
addSharedComponents(shComponents);
this.codebase = codebase;
}
/**
* Set the codebase used to load the class. The path will be applied to all
* JARs in this ClassBundle
*
* @param codebase The codebase to set
*/
public void setCodebase(String codebase) {
this.codebase = codebase;
if(this.codebase!=null) {
if(!this.codebase.endsWith("/"))
this.codebase = this.codebase+"/";
}
}
/**
* Get the codebase used to load the class.
*
* @return The codebase that has been set. If the codebase has properties
* declared (in the form <tt>$[property]</tt>), return a formatted string
* with the properties expanded. If there are no property elements
* declared, return the original string.
*/
public String getCodebase() {
return(translateCodebase());
}
/**
* Get the codebase without any translation
*
* @return The codebase that has been set
*/
public String getRawCodebase() {
return(codebase);
}
/**
* Set the className
*
* @param className The className, suitable for use with Class.forName()
*/
public void setClassName(String className) {
this.className = className;
}
/**
* Get the className
*
* @return The className, suitable for use with Class.forName()
*/
public String getClassName() {
return (className);
}
/**
* Get the artifact associated with the className
*
* @return The artifact associated with the className
*/
public String getArtifact() {
return artifact;
}
/**
* Set the artifact
*
* @param artifact The artifact associated with the className
*/
public void setArtifact(String artifact) {
this.artifact = artifact;
}
/**
* Set JARs to the ClassBundle.
*
* @param jars Jar names to set
*/
public void setJARs(String... jars) {
jarNames.clear();
addJARs(jars);
}
/**
* Add JARs to the ClassBundle.
*
* @param jars Jar names to add.
*/
public void addJARs(String... jars) {
if(jars == null)
throw new IllegalArgumentException("jars cannot be null");
for(String jar : jars)
addJAR(jar);
}
/**
* Add a JAR to the Collection of JAR resources.
*
* @param jar Name of the JAR to add
*/
public void addJAR(String jar) {
if(jar == null)
throw new IllegalArgumentException("jar cannot be null");
if(!jarNames.contains(jar))
jarNames.add(jar);
}
/**
* Add a Map of shared components
*
* @param m Map of shared components to add
*/
public void addSharedComponents(Map<String, String[]> m) {
if(m == null)
return;
sharedComponents.putAll(m);
}
/**
* Get the JAR names.
*
* @return A String array of the JAR names. This method will return a new
* array each time. If there are no JAR names, this method will return an
* empty array
*/
public String[] getJARNames() {
return jarNames.toArray(new String[jarNames.size()]);
}
/**
* Get the JAR resources
*
* @return An URL array of the JARs that can be used as a classpath to
* load the class. This method will return a new array each time. If
* there are no JARs configured, this method will return an empty array.
*
* @throws MalformedURLException if the codebase has not been set, or if
* the provided codebase contains an invalid protocol
*/
public URL[] getJARs() throws MalformedURLException {
return (urlsFromJARs(getJARNames()));
}
/**
* Get shared component information
*
* @return A Map of the shared component information, transforming
* configured string urls to URLs. This method will return a new Map
* each time. If there are no shared components, this method will return
* an empty Map
*
* @throws MalformedURLException If URLs cannot be created
*/
public Map<String, URL[]> getSharedComponents() throws MalformedURLException {
Map<String, URL[]> map = new HashMap<String, URL[]>();
for(Map.Entry<String, String[]> entry : sharedComponents.entrySet()) {
String className = entry.getKey();
String[] jarNames = entry.getValue();
URL[] urls = urlsFromJARs(jarNames);
map.put(className, urls);
}
return map;
}
/**
* Add a method name and the parameters to use for when reflecting on
* specified public member method of the class or interface represented by
* this ClassBundle object. The array of parameter types will be determined
* by the Class object for the Object types provided
*
* @param methodName The public member method of the Class or interface
* represented by this ClassBundle
* @param parameters Array of Object parameters for use when reflecting on
* the method
*/
public void addMethod(String methodName, Object[] parameters) {
if(methodName == null)
throw new IllegalArgumentException("methodName is null");
if(parameters == null) {
methodObjectTable.put(methodName, null);
return;
}
methodObjectTable.put(methodName, parameters);
}
/**
* Get all method names to reflect on
*
* @return Array of String method names to reflect on. If there
* are no method names to reflect on this method will return an empty array
*/
public String[] getMethodNames() {
Set<String> keys = methodObjectTable.keySet();
return (keys.toArray(new String[keys.size()]));
}
/**
* Get the corresponding Class[] parameters to reflect on a method
*
* @param methodName The name of the public method to reflect on
* @return Array of Class objects to use when reflecting on the
* public method
*/
public Class[] getMethodClasses(String methodName) {
Object[] args = getMethodObjects(methodName);
Class[] classes = new Class[args.length];
for(int i = 0; i < classes.length; i++) {
classes[i] = args[i].getClass();
}
return classes;
}
/**
* Get the corresponding Class[] parameters to reflect on a method
*
* @param methodName The name of the public method to reflect on
* @return Array of Object objects to use when reflecting on the
* public method
*/
public Object[] getMethodObjects(String methodName) {
if(methodName == null)
throw new IllegalArgumentException("methodName is null");
return (methodObjectTable.get(methodName));
}
/**
* Utility method to reflect on all added methods using an object
* instantiated from the Class loaded by the ClassBundle
*
* @param object An instantiated Object from the Class loaded by the
* ClassBundle
*
* @throws Exception If there are errors running the known methods
*/
public void runKnownMethods(Object object) throws Exception {
String[] methods = getMethodNames();
for (String method : methods) {
Method m = object.getClass()
.getMethod(method, getMethodClasses(method));
m.invoke(object, getMethodObjects(method));
}
}
/**
* Override hashCode
*/
public int hashCode() {
int hc = 17;
hc = 37*hc+className.hashCode();
hc = 37*hc+getJARList().hashCode();
return(hc);
}
/**
* Override equals
*/
public boolean equals(Object obj) {
if(this == obj)
return(true);
if(!(obj instanceof ClassBundle))
return(false);
ClassBundle that = (ClassBundle)obj;
if(this.className.equals(that.className)) {
if(this.artifact!=null && that.artifact!=null) {
return this.artifact.equals(that.artifact);
} else {
return this.getJARList().equals(that.getJARList());
}
}
return(false);
}
/*
* Get URLs for jarNames based on the codebase
*/
private URL[] urlsFromJARs(String[] jarNames) throws MalformedURLException {
URL[] urls = new URL[jarNames.length];
StringBuilder sb = new StringBuilder();
for(int i = 0; i < urls.length; i++) {
if(i>0)
sb.append(", ");
sb.append(translateCodebase()).append(jarNames[i]);
urls[i] = new URL(translateCodebase()+jarNames[i]);
}
if(logger.isLoggable(Level.FINE)) {
logger.fine("Translated JARs=["+sb.toString()+"]");
}
return (urls);
}
/*
* Return an ArrayList of JAR names
*/
private ArrayList<String> getJARList() {
ArrayList<String> list = new ArrayList<String>();
String[] jars = getJARNames();
list.addAll(Arrays.asList(jars));
return(list);
}
@Override
public String toString() {
StringBuilder buffer = new StringBuilder();
buffer.append("ClassName=").append(className).append("\n");
buffer.append("Artifact=").append(artifact).append("\n");
buffer.append("Codebase=").append(codebase).append("\n");
String[] jars = getJARNames();
if(jars.length>0) {
buffer.append("Searchpath={");
for(int i=0; i<jars.length; i++) {
if(i>0)
buffer.append(", ");
buffer.append(jars[i]);
}
buffer.append("}");
}
return(buffer.toString());
}
/*
* Expand any properties in the codebase String. Properties are declared
* with the pattern of : <code>$[property]</code>
*
* @return If the codebase has properties declared (in the form
* <code>$[property]</code>), return a formatted string with the
* properties expanded. If there are no property elements declared, return
* the original string.
*
* @throws IllegalArgumentException If a property value cannot be obtained
* an IllegalArgument will be thrown
*/
private String translateCodebase() {
if(codebase==null)
return(codebase);
String translated = PropertyHelper.expandProperties(codebase, PropertyHelper.RUNTIME);
if(System.getProperty("os.name").startsWith("Win") && translated.startsWith("file://")) {
translated = "file:/"+translated.substring(7);
}
return(translated);
}
/**
* Merge two ClassBundles
*
* @param bundles ClassBundle instances to merge
*
* @return A merged ClassBundle.
*
* @throws IllegalArgumentException For all ClassBundles that have a
* non-null classname, that classname must be equal. If this is not the
* case then an IllegalArgumentException is thrown.
*/
public static ClassBundle merge(ClassBundle... bundles) {
String className = null;
for(ClassBundle bundle : bundles) {
if(bundle.getClassName()!=null) {
if(className==null)
className = bundle.getClassName();
else if(!className.equals(bundle.getClassName()))
throw new IllegalArgumentException("bundles must have same classname");
}
}
ClassBundle cb = new ClassBundle();
for(ClassBundle bundle : bundles) {
cb.setArtifact(bundle.getArtifact());
cb.addJARs(bundle.getJARNames());
cb.addSharedComponents(bundle.sharedComponents);
cb.setCodebase(bundle.getCodebase());
cb.setClassName(bundle.getClassName());
cb.methodObjectTable.putAll(bundle.methodObjectTable);
}
return cb;
}
}
| Remove support for obsolete shared components
| rio-api/src/main/java/org/rioproject/opstring/ClassBundle.java | Remove support for obsolete shared components | <ide><path>io-api/src/main/java/org/rioproject/opstring/ClassBundle.java
<ide> * An artifact ID
<ide> */
<ide> private String artifact;
<del>
<del> /**
<del> * Collection of shared components.
<del> */
<del> private final Map<String, String[]> sharedComponents =
<del> Collections.synchronizedMap(new HashMap<String, String[]>());
<ide> /**
<ide> * A table of method names to Class[] objects
<ide> */
<ide> * @param className The className
<ide> * @param jarNames Array of Strings identifying resource names used to load
<ide> * the className
<del> * @param shComponents Map of class names and jar names to load the class
<del> * from. A shared component will be loaded by the common loader for all
<del> * services making it (and the resources it uses) available to all services
<ide> * @param codebase The URL path used to load the class. The path will be
<ide> * applied to all JARs in this ClassBundle
<ide> */
<del> public ClassBundle(String className,
<del> String[] jarNames,
<del> Map<String, String[]> shComponents,
<del> String codebase) {
<add> public ClassBundle(String className, String[] jarNames, String codebase) {
<ide> if(className == null)
<ide> throw new IllegalArgumentException("className cannot be null");
<ide> this.className = className;
<ide> if(jarNames!=null)
<ide> addJARs(jarNames);
<del> if(shComponents!=null)
<del> addSharedComponents(shComponents);
<ide> this.codebase = codebase;
<ide> }
<ide>
<ide> }
<ide>
<ide> /**
<del> * Add a Map of shared components
<del> *
<del> * @param m Map of shared components to add
<del> */
<del> public void addSharedComponents(Map<String, String[]> m) {
<del> if(m == null)
<del> return;
<del> sharedComponents.putAll(m);
<del> }
<del>
<del> /**
<ide> * Get the JAR names.
<ide> *
<ide> * @return A String array of the JAR names. This method will return a new
<ide> */
<ide> public URL[] getJARs() throws MalformedURLException {
<ide> return (urlsFromJARs(getJARNames()));
<del> }
<del>
<del> /**
<del> * Get shared component information
<del> *
<del> * @return A Map of the shared component information, transforming
<del> * configured string urls to URLs. This method will return a new Map
<del> * each time. If there are no shared components, this method will return
<del> * an empty Map
<del> *
<del> * @throws MalformedURLException If URLs cannot be created
<del> */
<del> public Map<String, URL[]> getSharedComponents() throws MalformedURLException {
<del> Map<String, URL[]> map = new HashMap<String, URL[]>();
<del> for(Map.Entry<String, String[]> entry : sharedComponents.entrySet()) {
<del> String className = entry.getKey();
<del> String[] jarNames = entry.getValue();
<del> URL[] urls = urlsFromJARs(jarNames);
<del> map.put(className, urls);
<del> }
<del> return map;
<ide> }
<ide>
<ide> /**
<ide> for(ClassBundle bundle : bundles) {
<ide> cb.setArtifact(bundle.getArtifact());
<ide> cb.addJARs(bundle.getJARNames());
<del> cb.addSharedComponents(bundle.sharedComponents);
<ide> cb.setCodebase(bundle.getCodebase());
<ide> cb.setClassName(bundle.getClassName());
<ide> cb.methodObjectTable.putAll(bundle.methodObjectTable); |
|
Java | lgpl-2.1 | 4a36607b88c40641510819481d70f70c8486792b | 0 | xwiki/xwiki-platform,xwiki/xwiki-platform,pbondoer/xwiki-platform,xwiki/xwiki-platform,xwiki/xwiki-platform,pbondoer/xwiki-platform,pbondoer/xwiki-platform,xwiki/xwiki-platform,pbondoer/xwiki-platform,pbondoer/xwiki-platform | /*
* Copyright 2006, XpertNet SARL, and individual contributors as indicated
* by the contributors.txt.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*
* @author sdumitriu
*/
package com.xpn.xwiki.plugin.charts;
import com.xpn.xwiki.XWikiContext;
import com.xpn.xwiki.api.Api;
import com.xpn.xwiki.plugin.XWikiDefaultPlugin;
import com.xpn.xwiki.plugin.XWikiPluginInterface;
import com.xpn.xwiki.plugin.charts.exceptions.DataSourceException;
import com.xpn.xwiki.plugin.charts.exceptions.GenerateException;
import com.xpn.xwiki.plugin.charts.params.ChartParams;
import com.xpn.xwiki.plugin.charts.plots.PlotFactory;
import com.xpn.xwiki.plugin.charts.source.DataSource;
import com.xpn.xwiki.plugin.charts.source.MainDataSourceFactory;
import com.xpn.xwiki.plugin.svg.SVGPlugin;
import com.xpn.xwiki.web.XWikiResponse;
import org.apache.batik.apps.rasterizer.SVGConverterException;
import org.apache.batik.dom.GenericDOMImplementation;
import org.apache.batik.svggen.SVGGraphics2D;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.jfree.chart.ChartUtilities;
import org.jfree.chart.JFreeChart;
import org.jfree.chart.plot.Plot;
import org.w3c.dom.DOMImplementation;
import org.w3c.dom.Document;
import java.awt.geom.Rectangle2D;
import java.io.*;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
/**
* TODO: Document this
* @author hritcu
*
*/
public class ChartingPlugin extends XWikiDefaultPlugin implements
XWikiPluginInterface {
public ChartingPlugin(String name, String className, XWikiContext context) {
super(name, className, context);
init(context);
}
public void init(XWikiContext context) {
super.init(context);
log.info("Charting Plugin - init");
File dir = (File) context.getEngineContext().getAttribute("javax.servlet.context.tempdir");
tempDir = new File(dir, "charts");
try {
tempDir.mkdirs();
} catch (Exception e1) {
log.warn("Could not create charts temporary directory: "+tempDir, e1);
dir = (File) new File(context.getWiki().Param("xwiki.upload.tempdir"));
try {
tempDir = new File(dir, "charts");
} catch (Exception e2) {
log.error("Could not create charts temporary directory: "+tempDir, e2);
}
};
}
public String getName() {
return "charting";
}
public Chart generateChart(ChartParams params, XWikiContext context) throws GenerateException {
try {
// Obtain the corresponding data source and wrap it into a data source object
DataSource dataSource = MainDataSourceFactory.getInstance()
.create(params.getMap(ChartParams.SOURCE), context);
String type = params.getString(ChartParams.TYPE);
Plot plot;
try {
String factoryClassName = ChartingPlugin.class.getPackage().getName()
+ ".plots." + Character.toUpperCase(type.charAt(0)) +
type.toLowerCase().substring(1) + "PlotFactory";
Class factoryClass = Class.forName(factoryClassName);
Method method = factoryClass.getMethod("getInstance", new Class[] {});
PlotFactory factory = (PlotFactory)method.invoke(null, new Object[] {});
plot = factory.create(dataSource, params);
} catch (InvocationTargetException e){
throw new GenerateException(e.getTargetException());
} catch (Throwable e) {
throw new GenerateException(e);
}
ChartCustomizer.customizePlot(plot, params);
JFreeChart jfchart = new JFreeChart(plot);
ChartCustomizer.customizeChart(jfchart, params);
return generatePngChart(jfchart, params, context);
} catch (IOException ioe) {
throw new GenerateException(ioe);
} catch (DataSourceException dse) {
throw new GenerateException(dse);
}
}
private Chart generateSvgChart(JFreeChart jfchart,
ChartParams params, XWikiContext context)
throws IOException, GenerateException {
// Get a DOMImplementation
DOMImplementation domImpl = GenericDOMImplementation.getDOMImplementation();
// Create an instance of org.w3c.dom.Document
Document document = domImpl.createDocument("http://www.w3.org/2000/svg", "svg", null);
// Create an instance of the SVG Generator
SVGGraphics2D svgGenerator = new SVGGraphics2D(document);
// Ask the chart to render into the SVG Graphics2D implementation
Rectangle2D.Double rect = new Rectangle2D.Double(0, 0,
params.getInteger(ChartParams.WIDTH).intValue(),
params.getInteger(ChartParams.HEIGHT).intValue());
jfchart.draw(svgGenerator, rect);
boolean useCSS = false;
StringWriter swriter = new StringWriter();
svgGenerator.stream(swriter, useCSS);
String svgText = swriter.toString();
String pageURL = null;
SVGPlugin svgPlugin = (SVGPlugin) context.getWiki().getPlugin("svg", context);
if (svgPlugin == null) {
throw new GenerateException("SVGPlugin not loaded");
}
String imageURL;
try {
imageURL = svgPlugin.getSVGImageURL(svgText,
params.getInteger(ChartParams.HEIGHT).intValue(),
params.getInteger(ChartParams.WIDTH).intValue(), context);
} catch (SVGConverterException sce) {
throw new GenerateException(sce);
}
return new ChartImpl(params, imageURL, pageURL);
}
private Chart generatePngChart(JFreeChart jfchart,
ChartParams params, XWikiContext context) throws IOException, GenerateException {
File file = getTempFile(params.hashCode(), "png");
ChartUtilities.saveChartAsPNG(file, jfchart,
params.getInteger(ChartParams.WIDTH).intValue(),
params.getInteger(ChartParams.HEIGHT).intValue());
String imageURL = context.getDoc().getAttachmentURL(
file.getName(), "charting", context);
String pageURL = imageURL;
return new ChartImpl(params, imageURL, pageURL);
}
public void outputFile(String filename, XWikiContext context) throws IOException {
File ofile = getTempFile(filename);
byte[] bytes = readFile(ofile);
XWikiResponse response = context.getResponse();
context.setFinished(true);
response.setDateHeader("Last-Modified", ofile.lastModified());
response.setContentLength(bytes.length);
response.setContentType(context.getEngineContext().getMimeType(filename));
OutputStream os = response.getOutputStream();
os.write(bytes);
}
public byte[] readFile(File ofile) throws FileNotFoundException, IOException {
FileInputStream fis = new FileInputStream(ofile);
byte[] result = new byte[(int)ofile.length()];
fis.read(result);
return result;
}
public Api getPluginApi(XWikiPluginInterface plugin, XWikiContext context) {
return new ChartingPluginApi((ChartingPlugin) plugin, context);
}
private File getTempFile(int hashcode, String extension) {
return getTempFile(hashcode + "." + extension);
}
private File getTempFile(String filename) {
return new File(tempDir, filename);
}
private static Log log = LogFactory.getFactory().getInstance(ChartingPlugin.class);
private File tempDir;
}
| src/main/java/com/xpn/xwiki/plugin/charts/ChartingPlugin.java | /*
* Copyright 2006, XpertNet SARL, and individual contributors as indicated
* by the contributors.txt.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*
* @author sdumitriu
*/
package com.xpn.xwiki.plugin.charts;
import com.xpn.xwiki.XWikiContext;
import com.xpn.xwiki.api.Api;
import com.xpn.xwiki.plugin.XWikiDefaultPlugin;
import com.xpn.xwiki.plugin.XWikiPluginInterface;
import com.xpn.xwiki.plugin.charts.exceptions.DataSourceException;
import com.xpn.xwiki.plugin.charts.exceptions.GenerateException;
import com.xpn.xwiki.plugin.charts.params.ChartParams;
import com.xpn.xwiki.plugin.charts.plots.PlotFactory;
import com.xpn.xwiki.plugin.charts.source.DataSource;
import com.xpn.xwiki.plugin.charts.source.MainDataSourceFactory;
import com.xpn.xwiki.plugin.svg.SVGPlugin;
import com.xpn.xwiki.web.XWikiResponse;
import org.apache.batik.apps.rasterizer.SVGConverterException;
import org.apache.batik.dom.GenericDOMImplementation;
import org.apache.batik.svggen.SVGGraphics2D;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.jfree.chart.ChartUtilities;
import org.jfree.chart.JFreeChart;
import org.jfree.chart.plot.Plot;
import org.w3c.dom.DOMImplementation;
import org.w3c.dom.Document;
import java.awt.geom.Rectangle2D;
import java.io.*;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
public class ChartingPlugin extends XWikiDefaultPlugin implements
XWikiPluginInterface {
public ChartingPlugin(String name, String className, XWikiContext context) {
super(name, className, context);
init(context);
}
public void init(XWikiContext context) {
super.init(context);
log.info("Charting Plugin - init");
File dir = (File) context.getEngineContext().getAttribute("javax.servlet.context.tempdir");
tempDir = new File(dir, "charts");
try {
tempDir.mkdirs();
} catch (Exception e1) {
log.warn("Could not create charts temporary directory: "+tempDir, e1);
dir = (File) new File(context.getWiki().Param("xwiki.upload.tempdir"));
try {
tempDir = new File(dir, "charts");
} catch (Exception e2) {
log.error("Could not create charts temporary directory: "+tempDir, e2);
}
};
}
public String getName() {
return "charting";
}
public Chart generateChart(ChartParams params, XWikiContext context) throws GenerateException {
try {
// Obtain the corresponding data source and wrap it into a data source object
DataSource dataSource = MainDataSourceFactory.getInstance()
.create(params.getMap(ChartParams.SOURCE), context);
String type = params.getString(ChartParams.TYPE);
Plot plot;
try {
String factoryClassName = ChartingPlugin.class.getPackage().getName()
+ ".plots." + Character.toUpperCase(type.charAt(0)) +
type.toLowerCase().substring(1) + "PlotFactory";
Class factoryClass = Class.forName(factoryClassName);
Method method = factoryClass.getMethod("getInstance", new Class[] {});
PlotFactory factory = (PlotFactory)method.invoke(null, new Object[] {});
plot = factory.create(dataSource, params);
} catch (InvocationTargetException e){
throw new GenerateException(e.getTargetException());
} catch (Throwable e) {
throw new GenerateException(e);
}
ChartCustomizer.customizePlot(plot, params);
JFreeChart jfchart = new JFreeChart(plot);
ChartCustomizer.customizeChart(jfchart, params);
return generatePngChart(jfchart, params, context);
} catch (IOException ioe) {
throw new GenerateException(ioe);
} catch (DataSourceException dse) {
throw new GenerateException(dse);
}
}
private Chart generateSvgChart(JFreeChart jfchart,
ChartParams params, XWikiContext context)
throws IOException, GenerateException {
// Get a DOMImplementation
DOMImplementation domImpl = GenericDOMImplementation.getDOMImplementation();
// Create an instance of org.w3c.dom.Document
Document document = domImpl.createDocument("http://www.w3.org/2000/svg", "svg", null);
// Create an instance of the SVG Generator
SVGGraphics2D svgGenerator = new SVGGraphics2D(document);
// Ask the chart to render into the SVG Graphics2D implementation
Rectangle2D.Double rect = new Rectangle2D.Double(0, 0,
params.getInteger(ChartParams.WIDTH).intValue(),
params.getInteger(ChartParams.HEIGHT).intValue());
jfchart.draw(svgGenerator, rect);
boolean useCSS = false;
StringWriter swriter = new StringWriter();
svgGenerator.stream(swriter, useCSS);
String svgText = swriter.toString();
String pageURL = null;
SVGPlugin svgPlugin = (SVGPlugin) context.getWiki().getPlugin("svg", context);
if (svgPlugin == null) {
throw new GenerateException("SVGPlugin not loaded");
}
String imageURL;
try {
imageURL = svgPlugin.getSVGImageURL(svgText,
params.getInteger(ChartParams.HEIGHT).intValue(),
params.getInteger(ChartParams.WIDTH).intValue(), context);
} catch (SVGConverterException sce) {
throw new GenerateException(sce);
}
return new ChartImpl(params, imageURL, pageURL);
}
private Chart generatePngChart(JFreeChart jfchart,
ChartParams params, XWikiContext context) throws IOException, GenerateException {
File file = getTempFile(params.hashCode(), "png");
ChartUtilities.saveChartAsPNG(file, jfchart,
params.getInteger(ChartParams.WIDTH).intValue(),
params.getInteger(ChartParams.HEIGHT).intValue());
String imageURL = context.getDoc().getAttachmentURL(
file.getName(), "charting", context);
String pageURL = imageURL;
return new ChartImpl(params, imageURL, pageURL);
}
public void outputFile(String filename, XWikiContext context) throws IOException {
File ofile = getTempFile(filename);
byte[] bytes = readFile(ofile);
XWikiResponse response = context.getResponse();
context.setFinished(true);
response.setDateHeader("Last-Modified", ofile.lastModified());
response.setContentLength(bytes.length);
response.setContentType(context.getEngineContext().getMimeType(filename));
OutputStream os = response.getOutputStream();
os.write(bytes);
}
public byte[] readFile(File ofile) throws FileNotFoundException, IOException {
FileInputStream fis = new FileInputStream(ofile);
byte[] result = new byte[(int)ofile.length()];
fis.read(result);
return result;
}
public Api getPluginApi(XWikiPluginInterface plugin, XWikiContext context) {
return new ChartingPluginApi((ChartingPlugin) plugin, context);
}
private File getTempFile(int hashcode, String extension) {
return getTempFile(hashcode + "." + extension);
}
private File getTempFile(String filename) {
return new File(tempDir, filename);
}
private static Log log = LogFactory.getFactory().getInstance(ChartingPlugin.class);
private File tempDir;
}
| just a test
git-svn-id: 50c93b9c2d9041e46610830dd7836bad08be8b2e@2224 f329d543-caf0-0310-9063-dda96c69346f
| src/main/java/com/xpn/xwiki/plugin/charts/ChartingPlugin.java | just a test | <ide><path>rc/main/java/com/xpn/xwiki/plugin/charts/ChartingPlugin.java
<ide> import java.lang.reflect.InvocationTargetException;
<ide> import java.lang.reflect.Method;
<ide>
<add>/**
<add> * TODO: Document this
<add> * @author hritcu
<add> *
<add> */
<ide> public class ChartingPlugin extends XWikiDefaultPlugin implements
<ide> XWikiPluginInterface {
<ide> public ChartingPlugin(String name, String className, XWikiContext context) { |
|
Java | epl-1.0 | e81fed24e6cf005b0eeb6c6ceb19fdfba98e2f83 | 0 | rrimmana/birt-1,rrimmana/birt-1,rrimmana/birt-1,sguan-actuate/birt,Charling-Huang/birt,Charling-Huang/birt,Charling-Huang/birt,rrimmana/birt-1,sguan-actuate/birt,sguan-actuate/birt,sguan-actuate/birt,Charling-Huang/birt,rrimmana/birt-1,sguan-actuate/birt,Charling-Huang/birt | /*******************************************************************************
* Copyright (c) 2010 Actuate Corporation.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Actuate Corporation - initial API and implementation
*******************************************************************************/
package org.eclipse.birt.chart.reportitem.ui;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.eclipse.birt.chart.reportitem.ui.dialogs.ChartExpressionProvider;
import org.eclipse.birt.chart.ui.swt.interfaces.IUIServiceProvider;
import org.eclipse.birt.chart.util.ChartUtil;
import org.eclipse.birt.report.designer.core.model.DesignElementHandleAdapter;
import org.eclipse.birt.report.designer.internal.ui.editors.schematic.figures.ReportElementFigure;
import org.eclipse.birt.report.designer.internal.ui.util.DataUtil;
import org.eclipse.birt.report.designer.internal.ui.util.ExpressionUtility;
import org.eclipse.birt.report.designer.util.ColorManager;
import org.eclipse.birt.report.designer.util.DEUtil;
import org.eclipse.birt.report.designer.util.ImageManager;
import org.eclipse.birt.report.model.api.DataSetHandle;
import org.eclipse.birt.report.model.api.DesignElementHandle;
import org.eclipse.birt.report.model.api.ExtendedItemHandle;
import org.eclipse.birt.report.model.api.ReportItemHandle;
import org.eclipse.birt.report.model.api.ResultSetColumnHandle;
import org.eclipse.birt.report.model.api.StructureFactory;
import org.eclipse.birt.report.model.api.StyleHandle;
import org.eclipse.birt.report.model.api.activity.SemanticException;
import org.eclipse.birt.report.model.api.elements.DesignChoiceConstants;
import org.eclipse.birt.report.model.api.elements.structures.ComputedColumn;
import org.eclipse.birt.report.model.api.extension.ExtendedElementException;
import org.eclipse.birt.report.model.api.extension.IReportItem;
import org.eclipse.birt.report.model.api.metadata.DimensionValue;
import org.eclipse.birt.report.model.api.util.ColorUtil;
import org.eclipse.draw2d.IFigure;
import org.eclipse.draw2d.geometry.Point;
import org.eclipse.draw2d.geometry.Rectangle;
import org.eclipse.swt.SWTException;
import org.eclipse.swt.graphics.Image;
/**
* ChartReportItemUIUtil
*
* @since 2.5.3
*/
public class ChartReportItemUIUtil
{
/**
* Creates chart filter factory instance according to specified item handle.
*
* @param item
* @return filter factory
* @throws ExtendedElementException
*/
public static ChartFilterFactory createChartFilterFactory( Object item )
throws ExtendedElementException
{
if ( item instanceof ExtendedItemHandle )
{
return getChartFilterFactory( ( (ExtendedItemHandle) item ).getReportItem( ) );
}
else if ( item instanceof IReportItem )
{
return createChartFilterFactory( item );
}
return new ChartFilterFactory( );
}
private static ChartFilterFactory getChartFilterFactory(
IReportItem adaptableObj )
{
ChartFilterFactory factory = ChartUtil.getAdapter( adaptableObj,
ChartFilterFactory.class );
if ( factory != null )
{
return factory;
}
return new ChartFilterFactory( );
}
/**
* Returns the categories list in BIRT chart expression builder
*
* @param builderCommand
* @return category style
*/
public static int getExpressionBuilderStyle( int builderCommand )
{
if ( builderCommand == IUIServiceProvider.COMMAND_EXPRESSION_DATA_BINDINGS )
{
return ChartExpressionProvider.CATEGORY_WITH_BIRT_VARIABLES
| ChartExpressionProvider.CATEGORY_WITH_COLUMN_BINDINGS
| ChartExpressionProvider.CATEGORY_WITH_REPORT_PARAMS;
}
else if ( builderCommand == IUIServiceProvider.COMMAND_EXPRESSION_CHART_DATAPOINTS )
{
return ChartExpressionProvider.CATEGORY_WITH_DATA_POINTS;
}
else if ( builderCommand == IUIServiceProvider.COMMAND_EXPRESSION_SCRIPT_DATAPOINTS )
{
// Script doesn't support column binding expression.
return ChartExpressionProvider.CATEGORY_WITH_DATA_POINTS
| ChartExpressionProvider.CATEGORY_WITH_REPORT_PARAMS
| ChartExpressionProvider.CATEGORY_WITH_JAVASCRIPT;
}
else if ( builderCommand == IUIServiceProvider.COMMAND_EXPRESSION_TRIGGERS_SIMPLE )
{
// Bugzilla#202386: Tooltips never support chart
// variables. Use COMMAND_EXPRESSION_TRIGGERS_SIMPLE for un-dp
return ChartExpressionProvider.CATEGORY_WITH_REPORT_PARAMS
| ChartExpressionProvider.CATEGORY_WITH_JAVASCRIPT;
}
else if ( builderCommand == IUIServiceProvider.COMMAND_EXPRESSION_TOOLTIPS_DATAPOINTS )
{
// Bugzilla#202386: Tooltips never support chart
// variables. Use COMMAND_EXPRESSION_TOOLTIPS_DATAPOINTS for dp
return ChartExpressionProvider.CATEGORY_WITH_REPORT_PARAMS
| ChartExpressionProvider.CATEGORY_WITH_COLUMN_BINDINGS
| ChartExpressionProvider.CATEGORY_WITH_DATA_POINTS;
}
else if ( builderCommand == IUIServiceProvider.COMMAND_CUBE_EXPRESSION_TOOLTIPS_DATAPOINTS )
{
return ChartExpressionProvider.CATEGORY_WITH_REPORT_PARAMS
| ChartExpressionProvider.CATEGORY_WITH_DATA_POINTS;
}
else if ( builderCommand == IUIServiceProvider.COMMAND_HYPERLINK )
{
return ChartExpressionProvider.CATEGORY_WITH_BIRT_VARIABLES
| ChartExpressionProvider.CATEGORY_WITH_REPORT_PARAMS;
}
else if ( builderCommand == IUIServiceProvider.COMMAND_HYPERLINK_DATAPOINTS )
{
return ChartExpressionProvider.CATEGORY_WITH_BIRT_VARIABLES
| ChartExpressionProvider.CATEGORY_WITH_COLUMN_BINDINGS
| ChartExpressionProvider.CATEGORY_WITH_REPORT_PARAMS
| ChartExpressionProvider.CATEGORY_WITH_DATA_POINTS;
}
else if ( builderCommand == IUIServiceProvider.COMMAND_HYPERLINK_DATAPOINTS_SIMPLE )
{
// Used for data cube case, no column bindings allowed
return ChartExpressionProvider.CATEGORY_WITH_BIRT_VARIABLES
| ChartExpressionProvider.CATEGORY_WITH_REPORT_PARAMS
| ChartExpressionProvider.CATEGORY_WITH_DATA_POINTS;
}
else if ( builderCommand == IUIServiceProvider.COMMAND_HYPERLINK_LEGEND )
{
// Add Legend item variables and remove column bindings
return ChartExpressionProvider.CATEGORY_WITH_LEGEND_ITEMS
| ChartExpressionProvider.CATEGORY_WITH_REPORT_PARAMS
| ChartExpressionProvider.CATEGORY_WITH_JAVASCRIPT
| ChartExpressionProvider.CATEGORY_WITH_BIRT_VARIABLES;
}
return ChartExpressionProvider.CATEGORY_BASE;
}
/**
* Get background image setting from design element handle.
*
* @param handle
* The handle of design element.
* @return background image
*/
public static String getBackgroundImage( DesignElementHandle handle )
{
return handle.getStringProperty( StyleHandle.BACKGROUND_IMAGE_PROP );
}
/**
* Get background position settings from design element handle.
*
* @param handle
* The handle of design element.
* @return background position
*/
public static Object[] getBackgroundPosition( DesignElementHandle handle )
{
Object x = null;
Object y = null;
if ( handle != null )
{
Object px = handle.getProperty( StyleHandle.BACKGROUND_POSITION_X_PROP );
Object py = handle.getProperty( StyleHandle.BACKGROUND_POSITION_Y_PROP );
if ( px instanceof String )
{
x = px;
}
else if ( px instanceof DimensionValue )
{
// {0%,0%}
if ( DesignChoiceConstants.UNITS_PERCENTAGE.equals( ( (DimensionValue) px ).getUnits( ) ) )
{
x = px;
}
else
{
// {1cm,1cm}
x = Integer.valueOf( (int) DEUtil.convertoToPixel( px ) );
}
}
if ( py instanceof String )
{
y = py;
}
else if ( py instanceof DimensionValue )
{
// {0%,0%}
if ( DesignChoiceConstants.UNITS_PERCENTAGE.equals( ( (DimensionValue) py ).getUnits( ) ) )
{
y = py;
}
else
{
// {1cm,1cm}
y = Integer.valueOf( (int) DEUtil.convertoToPixel( py ) );
}
}
}
return new Object[]{
x, y
};
}
/**
* Get background repeat property from design element handle.
*
* @param handle
* The handle of design element.
* @return background repeat property
*/
public static int getBackgroundRepeat( DesignElementHandle handle )
{
return getRepeat( handle.getStringProperty( StyleHandle.BACKGROUND_REPEAT_PROP ) );
}
/**
* Get repeat identifier according to its value
*
* @param repeat
* Given string
* @return
* The repeat value
*/
public static int getRepeat( String repeat )
{
if ( DesignChoiceConstants.BACKGROUND_REPEAT_REPEAT_X.equals( repeat ) )
{
return 1;
}
else if ( DesignChoiceConstants.BACKGROUND_REPEAT_REPEAT_Y.equals( repeat ) )
{
return 2;
}
else if ( DesignChoiceConstants.BACKGROUND_REPEAT_REPEAT.equals( repeat ) )
{
return 3;
}
return 0;
}
/**
* Generate computed columns for the given report item with the closest data
* set available.
*
* @param dataSetHandle
* Data Set. No aggregation created.
*
* @return true if succeed,or fail if no column generated.
* @see DataUtil#generateComputedColumns(ReportItemHandle)
*
*/
@SuppressWarnings("unchecked")
public static List<ComputedColumn> generateComputedColumns(
ReportItemHandle handle, DataSetHandle dataSetHandle )
throws SemanticException
{
if ( dataSetHandle != null )
{
List<ResultSetColumnHandle> resultSetColumnList = DataUtil.getColumnList( dataSetHandle );
List<ComputedColumn> columnList = new ArrayList<ComputedColumn>( );
for ( ResultSetColumnHandle resultSetColumn : resultSetColumnList )
{
ComputedColumn column = StructureFactory.newComputedColumn( handle,
resultSetColumn.getColumnName( ) );
column.setDataType( resultSetColumn.getDataType( ) );
ExpressionUtility.setBindingColumnExpression( resultSetColumn,
column );
columnList.add( column );
}
return columnList;
}
return Collections.emptyList( );
}
/**
* Refresh background including color and image.
*
* @param handle
* Item handle
* @param figure
* Element figure
*/
public static void refreshBackground( ExtendedItemHandle handle,
ReportElementFigure figure )
{
refreshBackgroundColor( handle, figure );
refreshBackgroundImage( handle, figure );
}
/**
* Refresh background image.
*
* @param handle
* Item handle
* @param figure
* Element figure
*/
public static void refreshBackgroundImage( ExtendedItemHandle handle,
ReportElementFigure figure )
{
String backGroundImage = ChartReportItemUIUtil.getBackgroundImage( handle );
if ( backGroundImage == null )
{
figure.setImage( null );
}
else
{
Image image = null;
try
{
image = ImageManager.getInstance( )
.getImage( handle.getModuleHandle( ), backGroundImage );
}
catch ( SWTException e )
{
// Should not be ExceptionHandler.handle(e), see SCR#73730
image = null;
}
if ( image == null )
{
figure.setImage( null );
return;
}
figure.setImage( image );
Object[] backGroundPosition = ChartReportItemUIUtil.getBackgroundPosition( handle );
int backGroundRepeat = ChartReportItemUIUtil.getBackgroundRepeat( handle );
figure.setRepeat( backGroundRepeat );
Object xPosition = backGroundPosition[0];
Object yPosition = backGroundPosition[1];
Rectangle area = figure.getClientArea( );
org.eclipse.swt.graphics.Rectangle imageArea = image.getBounds( );
Point position = new Point( -1, -1 );
int alignment = 0;
if ( xPosition instanceof Integer )
{
position.x = ( (Integer) xPosition ).intValue( );
}
else if ( xPosition instanceof DimensionValue )
{
int percentX = (int) ( (DimensionValue) xPosition ).getMeasure( );
position.x = ( area.width - imageArea.width ) * percentX / 100;
}
else if ( xPosition instanceof String )
{
alignment |= DesignElementHandleAdapter.getPosition( (String) xPosition );
}
if ( yPosition instanceof Integer )
{
position.y = ( (Integer) yPosition ).intValue( );
}
else if ( yPosition instanceof DimensionValue )
{
int percentY = (int) ( (DimensionValue) yPosition ).getMeasure( );
position.y = ( area.width - imageArea.width ) * percentY / 100;
}
else if ( yPosition instanceof String )
{
alignment |= DesignElementHandleAdapter.getPosition( (String) yPosition );
}
figure.setAlignment( alignment );
figure.setPosition( position );
}
}
/**
* Refresh background color.
*
* @param handle
* Item handle
* @param figure
* Figure
*/
public static void refreshBackgroundColor( ExtendedItemHandle handle,
IFigure figure )
{
Object obj = handle.getProperty( StyleHandle.BACKGROUND_COLOR_PROP );
figure.setOpaque( false );
if ( obj != null )
{
int color = 0xFFFFFF;
if ( obj instanceof String )
{
color = ColorUtil.parseColor( (String) obj );
}
else
{
color = ( (Integer) obj ).intValue( );
}
figure.setBackgroundColor( ColorManager.getColor( color ) );
figure.setOpaque( true );
}
}
}
| chart/org.eclipse.birt.chart.reportitem.ui/src/org/eclipse/birt/chart/reportitem/ui/ChartReportItemUIUtil.java | /*******************************************************************************
* Copyright (c) 2010 Actuate Corporation.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Actuate Corporation - initial API and implementation
*******************************************************************************/
package org.eclipse.birt.chart.reportitem.ui;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.eclipse.birt.chart.reportitem.ui.dialogs.ChartExpressionProvider;
import org.eclipse.birt.chart.ui.swt.interfaces.IUIServiceProvider;
import org.eclipse.birt.chart.util.ChartUtil;
import org.eclipse.birt.report.designer.core.model.DesignElementHandleAdapter;
import org.eclipse.birt.report.designer.internal.ui.editors.schematic.figures.ReportElementFigure;
import org.eclipse.birt.report.designer.internal.ui.util.DataUtil;
import org.eclipse.birt.report.designer.internal.ui.util.ExpressionUtility;
import org.eclipse.birt.report.designer.util.ColorManager;
import org.eclipse.birt.report.designer.util.DEUtil;
import org.eclipse.birt.report.designer.util.ImageManager;
import org.eclipse.birt.report.model.api.DataSetHandle;
import org.eclipse.birt.report.model.api.DesignElementHandle;
import org.eclipse.birt.report.model.api.ExtendedItemHandle;
import org.eclipse.birt.report.model.api.ReportItemHandle;
import org.eclipse.birt.report.model.api.ResultSetColumnHandle;
import org.eclipse.birt.report.model.api.StructureFactory;
import org.eclipse.birt.report.model.api.StyleHandle;
import org.eclipse.birt.report.model.api.activity.SemanticException;
import org.eclipse.birt.report.model.api.elements.DesignChoiceConstants;
import org.eclipse.birt.report.model.api.elements.structures.ComputedColumn;
import org.eclipse.birt.report.model.api.extension.ExtendedElementException;
import org.eclipse.birt.report.model.api.extension.IReportItem;
import org.eclipse.birt.report.model.api.metadata.DimensionValue;
import org.eclipse.birt.report.model.api.util.ColorUtil;
import org.eclipse.draw2d.IFigure;
import org.eclipse.draw2d.geometry.Point;
import org.eclipse.draw2d.geometry.Rectangle;
import org.eclipse.swt.SWTException;
import org.eclipse.swt.graphics.Image;
/**
* ChartReportItemUIUtil
*
* @since 2.5.3
*/
public class ChartReportItemUIUtil
{
/**
* Creates chart filter factory instance according to specified item handle.
*
* @param item
* @return filter factory
* @throws ExtendedElementException
*/
public static ChartFilterFactory createChartFilterFactory( Object item )
throws ExtendedElementException
{
if ( item instanceof ExtendedItemHandle )
{
return getChartFilterFactory( ( (ExtendedItemHandle) item ).getReportItem( ) );
}
else if ( item instanceof IReportItem )
{
return createChartFilterFactory( item );
}
return new ChartFilterFactory( );
}
private static ChartFilterFactory getChartFilterFactory(
IReportItem adaptableObj )
{
ChartFilterFactory factory = ChartUtil.getAdapter( adaptableObj,
ChartFilterFactory.class );
if ( factory != null )
{
return factory;
}
return new ChartFilterFactory( );
}
/**
* Returns the categories list in BIRT chart expression builder
*
* @param builderCommand
* @return category style
*/
public static int getExpressionBuilderStyle( int builderCommand )
{
if ( builderCommand == IUIServiceProvider.COMMAND_EXPRESSION_DATA_BINDINGS )
{
return ChartExpressionProvider.CATEGORY_WITH_BIRT_VARIABLES
| ChartExpressionProvider.CATEGORY_WITH_COLUMN_BINDINGS
| ChartExpressionProvider.CATEGORY_WITH_REPORT_PARAMS;
}
else if ( builderCommand == IUIServiceProvider.COMMAND_EXPRESSION_CHART_DATAPOINTS )
{
return ChartExpressionProvider.CATEGORY_WITH_DATA_POINTS;
}
else if ( builderCommand == IUIServiceProvider.COMMAND_EXPRESSION_SCRIPT_DATAPOINTS )
{
// Script doesn't support column binding expression.
return ChartExpressionProvider.CATEGORY_WITH_DATA_POINTS
| ChartExpressionProvider.CATEGORY_WITH_REPORT_PARAMS
| ChartExpressionProvider.CATEGORY_WITH_JAVASCRIPT;
}
else if ( builderCommand == IUIServiceProvider.COMMAND_EXPRESSION_TRIGGERS_SIMPLE )
{
// Bugzilla#202386: Tooltips never support chart
// variables. Use COMMAND_EXPRESSION_TRIGGERS_SIMPLE for un-dp
return ChartExpressionProvider.CATEGORY_WITH_REPORT_PARAMS
| ChartExpressionProvider.CATEGORY_WITH_JAVASCRIPT;
}
else if ( builderCommand == IUIServiceProvider.COMMAND_EXPRESSION_TOOLTIPS_DATAPOINTS )
{
// Bugzilla#202386: Tooltips never support chart
// variables. Use COMMAND_EXPRESSION_TOOLTIPS_DATAPOINTS for dp
return ChartExpressionProvider.CATEGORY_WITH_REPORT_PARAMS
| ChartExpressionProvider.CATEGORY_WITH_COLUMN_BINDINGS
| ChartExpressionProvider.CATEGORY_WITH_DATA_POINTS;
}
else if ( builderCommand == IUIServiceProvider.COMMAND_CUBE_EXPRESSION_TOOLTIPS_DATAPOINTS )
{
return ChartExpressionProvider.CATEGORY_WITH_REPORT_PARAMS
| ChartExpressionProvider.CATEGORY_WITH_DATA_POINTS;
}
else if ( builderCommand == IUIServiceProvider.COMMAND_HYPERLINK )
{
return ChartExpressionProvider.CATEGORY_WITH_BIRT_VARIABLES
| ChartExpressionProvider.CATEGORY_WITH_REPORT_PARAMS;
}
else if ( builderCommand == IUIServiceProvider.COMMAND_HYPERLINK_DATAPOINTS )
{
return ChartExpressionProvider.CATEGORY_WITH_BIRT_VARIABLES
| ChartExpressionProvider.CATEGORY_WITH_COLUMN_BINDINGS
| ChartExpressionProvider.CATEGORY_WITH_REPORT_PARAMS
| ChartExpressionProvider.CATEGORY_WITH_DATA_POINTS;
}
else if ( builderCommand == IUIServiceProvider.COMMAND_HYPERLINK_DATAPOINTS_SIMPLE )
{
// Used for data cube case, no column bindings allowed
return ChartExpressionProvider.CATEGORY_WITH_BIRT_VARIABLES
| ChartExpressionProvider.CATEGORY_WITH_REPORT_PARAMS
| ChartExpressionProvider.CATEGORY_WITH_DATA_POINTS;
}
else if ( builderCommand == IUIServiceProvider.COMMAND_HYPERLINK_LEGEND )
{
// Add Legend item variables and remove column bindings
return ChartExpressionProvider.CATEGORY_WITH_LEGEND_ITEMS
| ChartExpressionProvider.CATEGORY_WITH_REPORT_PARAMS
| ChartExpressionProvider.CATEGORY_WITH_JAVASCRIPT
| ChartExpressionProvider.CATEGORY_WITH_BIRT_VARIABLES;
}
return ChartExpressionProvider.CATEGORY_BASE;
}
/**
* Get background image setting from design element handle.
*
* @param handle
* The handle of design element.
* @return background image
*/
public static String getBackgroundImage( DesignElementHandle handle )
{
return handle.getStringProperty( StyleHandle.BACKGROUND_IMAGE_PROP );
}
/**
* Get background position settings from design element handle.
*
* @param handle
* The handle of design element.
* @return background position
*/
public static Object[] getBackgroundPosition( DesignElementHandle handle )
{
Object x = null;
Object y = null;
if ( handle != null )
{
Object px = handle.getProperty( StyleHandle.BACKGROUND_POSITION_X_PROP );
Object py = handle.getProperty( StyleHandle.BACKGROUND_POSITION_Y_PROP );
if ( px instanceof String )
{
x = px;
}
else if ( px instanceof DimensionValue )
{
// {0%,0%}
if ( DesignChoiceConstants.UNITS_PERCENTAGE.equals( ( (DimensionValue) px ).getUnits( ) ) )
{
x = px;
}
else
{
// {1cm,1cm}
x = Integer.valueOf( (int) DEUtil.convertoToPixel( px ) );
}
}
if ( py instanceof String )
{
y = py;
}
else if ( py instanceof DimensionValue )
{
// {0%,0%}
if ( DesignChoiceConstants.UNITS_PERCENTAGE.equals( ( (DimensionValue) py ).getUnits( ) ) )
{
y = py;
}
else
{
// {1cm,1cm}
y = Integer.valueOf( (int) DEUtil.convertoToPixel( py ) );
}
}
}
return new Object[]{
x, y
};
}
/**
* Get background repeat property from design element handle.
*
* @param handle
* The handle of design element.
* @return background repeat property
*/
public static int getBackgroundRepeat( DesignElementHandle handle )
{
return getRepeat( handle.getStringProperty( StyleHandle.BACKGROUND_REPEAT_PROP ) );
}
/**
* Get repeat identifier according to its value
*
* @param repeat
* Given string
* @return
* The repeat value
*/
public static int getRepeat( String repeat )
{
if ( DesignChoiceConstants.BACKGROUND_REPEAT_REPEAT_X.equals( repeat ) )
{
return 1;
}
else if ( DesignChoiceConstants.BACKGROUND_REPEAT_REPEAT_Y.equals( repeat ) )
{
return 2;
}
else if ( DesignChoiceConstants.BACKGROUND_REPEAT_REPEAT.equals( repeat ) )
{
return 3;
}
return 0;
}
/**
* Generate computed columns for the given report item with the closest data
* set available.
*
* @param dataSetHandle
* Data Set. No aggregation created.
*
* @return true if succeed,or fail if no column generated.
* @see DataUtil#generateComputedColumns(ReportItemHandle)
*
*/
@SuppressWarnings("unchecked")
public static List<ComputedColumn> generateComputedColumns(
ReportItemHandle handle, DataSetHandle dataSetHandle )
throws SemanticException
{
if ( dataSetHandle != null )
{
List<ResultSetColumnHandle> resultSetColumnList = DataUtil.getColumnList( dataSetHandle );
List<ComputedColumn> columnList = new ArrayList<ComputedColumn>( );
for ( ResultSetColumnHandle resultSetColumn : resultSetColumnList )
{
ComputedColumn column = StructureFactory.newComputedColumn( handle,
resultSetColumn.getColumnName( ) );
column.setDataType( resultSetColumn.getDataType( ) );
ExpressionUtility.setBindingColumnExpression( resultSetColumn,
column );
columnList.add( column );
}
return columnList;
}
return Collections.emptyList( );
}
public static void refreshBackground( ExtendedItemHandle handle,
ReportElementFigure figure )
{
refreshBackgroundColor( handle, figure );
refreshBackgroundImage( handle, figure );
}
/*
* Refresh Background: Color, Image, Repeat, PositionX, PositionY.
*/
public static void refreshBackgroundImage( ExtendedItemHandle handle,
ReportElementFigure figure )
{
String backGroundImage = ChartReportItemUIUtil.getBackgroundImage( handle );
if ( backGroundImage == null )
{
figure.setImage( null );
}
else
{
Image image = null;
try
{
image = ImageManager.getInstance( )
.getImage( handle.getModuleHandle( ), backGroundImage );
}
catch ( SWTException e )
{
// Should not be ExceptionHandler.handle(e), see SCR#73730
image = null;
}
if ( image == null )
{
figure.setImage( null );
return;
}
figure.setImage( image );
Object[] backGroundPosition = ChartReportItemUIUtil.getBackgroundPosition( handle );
int backGroundRepeat = ChartReportItemUIUtil.getBackgroundRepeat( handle );
figure.setRepeat( backGroundRepeat );
Object xPosition = backGroundPosition[0];
Object yPosition = backGroundPosition[1];
Rectangle area = figure.getClientArea( );
org.eclipse.swt.graphics.Rectangle imageArea = image.getBounds( );
Point position = new Point( -1, -1 );
int alignment = 0;
if ( xPosition instanceof Integer )
{
position.x = ( (Integer) xPosition ).intValue( );
}
else if ( xPosition instanceof DimensionValue )
{
int percentX = (int) ( (DimensionValue) xPosition ).getMeasure( );
position.x = ( area.width - imageArea.width ) * percentX / 100;
}
else if ( xPosition instanceof String )
{
alignment |= DesignElementHandleAdapter.getPosition( (String) xPosition );
}
if ( yPosition instanceof Integer )
{
position.y = ( (Integer) yPosition ).intValue( );
}
else if ( yPosition instanceof DimensionValue )
{
int percentY = (int) ( (DimensionValue) yPosition ).getMeasure( );
position.y = ( area.width - imageArea.width ) * percentY / 100;
}
else if ( yPosition instanceof String )
{
alignment |= DesignElementHandleAdapter.getPosition( (String) yPosition );
}
figure.setAlignment( alignment );
figure.setPosition( position );
}
}
public static void refreshBackgroundColor( ExtendedItemHandle handle,
IFigure figure )
{
Object obj = handle.getProperty( StyleHandle.BACKGROUND_COLOR_PROP );
figure.setOpaque( false );
if ( obj != null )
{
int color = 0xFFFFFF;
if ( obj instanceof String )
{
color = ColorUtil.parseColor( (String) obj );
}
else
{
color = ( (Integer) obj ).intValue( );
}
figure.setBackgroundColor( ColorManager.getColor( color ) );
figure.setOpaque( true );
}
}
}
| Description: Add Java doc. | chart/org.eclipse.birt.chart.reportitem.ui/src/org/eclipse/birt/chart/reportitem/ui/ChartReportItemUIUtil.java | Description: Add Java doc. | <ide><path>hart/org.eclipse.birt.chart.reportitem.ui/src/org/eclipse/birt/chart/reportitem/ui/ChartReportItemUIUtil.java
<ide> return Collections.emptyList( );
<ide> }
<ide>
<add> /**
<add> * Refresh background including color and image.
<add> *
<add> * @param handle
<add> * Item handle
<add> * @param figure
<add> * Element figure
<add> */
<ide> public static void refreshBackground( ExtendedItemHandle handle,
<ide> ReportElementFigure figure )
<ide> {
<ide> refreshBackgroundImage( handle, figure );
<ide> }
<ide>
<del> /*
<del> * Refresh Background: Color, Image, Repeat, PositionX, PositionY.
<add> /**
<add> * Refresh background image.
<add> *
<add> * @param handle
<add> * Item handle
<add> * @param figure
<add> * Element figure
<ide> */
<ide> public static void refreshBackgroundImage( ExtendedItemHandle handle,
<ide> ReportElementFigure figure )
<ide> }
<ide> }
<ide>
<add> /**
<add> * Refresh background color.
<add> *
<add> * @param handle
<add> * Item handle
<add> * @param figure
<add> * Figure
<add> */
<ide> public static void refreshBackgroundColor( ExtendedItemHandle handle,
<ide> IFigure figure )
<ide> { |
|
JavaScript | apache-2.0 | 245e63f5a3d971402b984286a071234db7868fee | 0 | jdwfly/slc-ios,jdwfly/slc-android | var _osname = Ti.Platform.osname;
var _debug = false;
var _baseUrl = "http://www.lancasterbaptist.org/slc/json";
var _eventsUrl = "/events";
var _speakersUrl = "/speakers";
var _liveUrl = "/liveevents";
var _slcDB = Ti.Database.open('slcdb');
_slcDB.execute('CREATE TABLE IF NOT EXISTS events (nid INTEGER, title TEXT, eventtype TEXT, day TEXT, datefrom TEXT, dateto TEXT, speaker TEXT, room TEXT, track TEXT, weight TEXT, download TEXT, notes TEXT)');
_slcDB.close();
var _speakerData = "";
var _liveData = "";
exports.osname = _osname;
exports.baseUrl = _baseUrl;
exports.eventsUrl = _eventsUrl;
exports.speakersUrl = _speakersUrl;
exports.liveUrl = _liveUrl;
// Sets whether we are in debug or not
// value = Boolean
exports.setDebug = function(value) {
if (value) {
_debug = true;
return _debug;
}
_debug = false;
return _debug;
}
exports.debug = function() {
return _debug;
}
exports.setSpeakerData = function(value) {
if (value) {
_speakerData = value;
return _speakerData;
}
return false;
}
exports.speakerData = function() {
return _speakerData;
}
exports.setLiveData = function(value) {
if (value) {
_liveData = value;
return _liveData;
}
return false;
}
exports.liveData = function() {
return _liveData;
}
exports.slcdbSaveEvents = function(events) {
_slcDB = Ti.Database.open('slcdb');
_slcDB.execute('DROP TABLE IF EXISTS events');
_slcDB.execute('CREATE TABLE IF NOT EXISTS events (nid INTEGER, title TEXT, eventtype TEXT, day TEXT, datefrom TEXT, dateto TEXT, speaker TEXT, room TEXT, track TEXT, weight TEXT, download TEXT, notes TEXT)');
// Remove all data first
_slcDB.execute('DELETE FROM events');
var parseEvents = JSON.parse(events), i = 0;
for (i in parseEvents.nodes) {
_slcDB.execute('INSERT INTO events (nid, title, eventtype, day, datefrom, dateto, speaker, room, track, weight, download, notes) VALUES(?,?,?,?,?,?,?,?,?,?,?,?)',
parseEvents.nodes[i].node.nid,
parseEvents.nodes[i].node.title,
parseEvents.nodes[i].node.type,
parseEvents.nodes[i].node.day,
parseEvents.nodes[i].node.from,
parseEvents.nodes[i].node.to,
parseEvents.nodes[i].node.speaker,
parseEvents.nodes[i].node.room,
parseEvents.nodes[i].node.track,
parseEvents.nodes[i].node.weight,
parseEvents.nodes[i].node.download,
parseEvents.nodes[i].node.notes
);
}
Ti.API.info('DB:LAST ROW INSERTED, lastInsertRowId = ' + _slcDB.lastInsertRowId);
Ti.App.fireEvent('schedule.updateTableView');
_slcDB.close();
}
exports.slcdbGetEvents = function(dateString) {
var results = [];
_slcDB = Ti.Database.open('slcdb');
var resultSet = _slcDB.execute('SELECT * FROM events WHERE eventtype<>"Session" AND day="'+dateString+'" ORDER BY datefrom ASC');
Ti.API.info('ROWS FETCHED = ' + resultSet.getRowCount());
while (resultSet.isValidRow()) {
results.push({
nid: resultSet.fieldByName('nid'),
title: resultSet.fieldByName('title'),
eventtype: resultSet.fieldByName('eventtype'),
day: resultSet.fieldByName('day'),
datefrom: resultSet.fieldByName('datefrom'),
dateto: resultSet.fieldByName('dateto'),
speaker: resultSet.fieldByName('speaker'),
room: resultSet.fieldByName('room'),
track: resultSet.fieldByName('track'),
weight: resultSet.fieldByName('weight'),
download: resultSet.fieldByName('download'),
notes: resultSet.fieldByName('notes')
});
resultSet.next();
}
_slcDB.close();
return results;
}
exports.dbGetEvents = function() {
var results = [];
_slcDB = Ti.Database.open('slcdb');
var resultSet = _slcDB.execute('SELECT * FROM events WHERE eventtype<>"Session" ORDER BY day ASC, datefrom ASC');
Ti.API.info('ROWS FETCHED = ' + resultSet.getRowCount());
while (resultSet.isValidRow()) {
results.push({
nid: resultSet.fieldByName('nid'),
title: resultSet.fieldByName('title'),
eventtype: resultSet.fieldByName('eventtype'),
day: resultSet.fieldByName('day'),
datefrom: resultSet.fieldByName('datefrom'),
dateto: resultSet.fieldByName('dateto'),
speaker: resultSet.fieldByName('speaker'),
room: resultSet.fieldByName('room'),
track: resultSet.fieldByName('track'),
weight: resultSet.fieldByName('weight'),
download: resultSet.fieldByName('download'),
notes: resultSet.fieldByName('notes')
});
resultSet.next();
}
_slcDB.close();
return results;
}
// nid = Node ID of event requesting
// returns node data of certain node ID
exports.dbGetSingleEvent = function(nid) {
var results = [];
_slcDB = Ti.Database.open('slcdb');
var resultSet = _slcDB.execute('SELECT * FROM events WHERE nid='+nid);
Ti.API.info('ROWS FETCHED = ' + resultSet.getRowCount());
while (resultSet.isValidRow()) {
results.push({
nid: resultSet.fieldByName('nid'),
title: resultSet.fieldByName('title'),
eventtype: resultSet.fieldByName('eventtype'),
day: resultSet.fieldByName('day'),
datefrom: resultSet.fieldByName('datefrom'),
dateto: resultSet.fieldByName('dateto'),
speaker: resultSet.fieldByName('speaker'),
room: resultSet.fieldByName('room'),
track: resultSet.fieldByName('track'),
weight: resultSet.fieldByName('weight'),
download: resultSet.fieldByName('download'),
notes: resultSet.fieldByName('notes')
});
resultSet.next();
}
_slcDB.close();
return results;
}
// nid = Node of Parent Workshop
// returns result array of children sessions during parent workshop time
exports.dbGetWorkshopEvents = function(nid) {
var results = [];
_slcDB = Ti.Database.open('slcdb');
var parentNode = _slcDB.execute('SELECT * FROM events WHERE nid='+nid);
var resultSet = _slcDB.execute('SELECT * FROM events WHERE eventtype="Session" AND day="'+parentNode.fieldByName('day')+'" AND datefrom='+parentNode.fieldByName('datefrom')+' ORDER BY weight ASC');
Ti.API.info('ROWS FETCHED = ' + resultSet.getRowCount());
while (resultSet.isValidRow()) {
results.push({
nid: resultSet.fieldByName('nid'),
title: resultSet.fieldByName('title'),
eventtype: resultSet.fieldByName('eventtype'),
day: resultSet.fieldByName('day'),
datefrom: resultSet.fieldByName('datefrom'),
dateto: resultSet.fieldByName('dateto'),
speaker: resultSet.fieldByName('speaker'),
room: resultSet.fieldByName('room'),
track: resultSet.fieldByName('track'),
weight: resultSet.fieldByName('weight'),
download: resultSet.fieldByName('download'),
notes: resultSet.fieldByName('notes')
});
resultSet.next();
}
_slcDB.close();
return results;
};
/**
* This will sessions from a certain time and day.
* dateFrom = time value in seconds
* day = date string formatted like 2011-07-11
* Returns a result set of session events only.
*/
exports.slcdbGetSessions = function(dateFrom, day) {
var results = [];
_slcDB = Ti.Database.open('slcdb');
var resultSet = _slcDB.execute('SELECT DISTINCT * FROM events WHERE eventtype="Session" AND datefrom="'+dateFrom+'" AND day="'+day+'" ORDER BY weight ASC');
Ti.API.info('ROWS FETCHED = ' + resultSet.getRowCount());
while (resultSet.isValidRow()) {
results.push({
nid: resultSet.fieldByName('nid'),
title: resultSet.fieldByName('title'),
eventtype: resultSet.fieldByName('eventtype'),
day: resultSet.fieldByName('day'),
datefrom: resultSet.fieldByName('datefrom'),
dateto: resultSet.fieldByName('dateto'),
speaker: resultSet.fieldByName('speaker'),
room: resultSet.fieldByName('room'),
track: resultSet.fieldByName('track'),
weight: resultSet.fieldByName('weight'),
download: resultSet.fieldByName('download'),
notes: resultSet.fieldByName('notes')
});
resultSet.next();
}
_slcDB.close();
return results;
}
/**
* This will get sessions by speaker
* snid = Speaker Node ID
*/
exports.slcdbGetSessionsSpeaker = function(snid) {
var results = [];
_slcDB = Ti.Database.open('slcdb');
var resultSet = _slcDB.execute('SELECT * FROM events WHERE eventtype="Session" AND speaker="'+snid+'" ORDER BY day ASC, datefrom ASC');
Ti.API.info('ROWS FETCHED = ' + resultSet.getRowCount());
while (resultSet.isValidRow()) {
results.push({
nid: resultSet.fieldByName('nid'),
title: resultSet.fieldByName('title'),
eventtype: resultSet.fieldByName('eventtype'),
day: resultSet.fieldByName('day'),
datefrom: resultSet.fieldByName('datefrom'),
dateto: resultSet.fieldByName('dateto'),
speaker: resultSet.fieldByName('speaker'),
room: resultSet.fieldByName('room'),
track: resultSet.fieldByName('track'),
weight: resultSet.fieldByName('weight'),
download: resultSet.fieldByName('download'),
notes: resultSet.fieldByName('notes')
});
resultSet.next();
}
_slcDB.close();
return results;
}
// Helper function that converts seconds into a readable time string
exports.secondsToTime = function(seconds) {
var hours = parseInt(seconds / 3600);
var minutes = (seconds % 3600) / 60;
var meridiem = '';
if (hours > 12) {
hours = hours - 12;
meridiem = 'pm';
} else if (hours == 12) {
meridiem = 'pm';
} else if (hours == 24) {
hours = 12;
meridiem = 'am';
} else if (hours == 0) {
hours = 12;
meridiem = 'am';
} else {
meridiem = 'am';
}
if (minutes < 10) {
minutes = "0" + minutes;
}
time = hours + ':' + minutes + ' ' + meridiem;
return time;
}
// Helper function to replace certain encoded characters
exports.html_decode = function(string) {
if (string == undefined) {
return;
}
string = string.replace(/'/gi, "'");
string = string.replace(/&/gi, "&");
return string;
}
// Helper function to give the textual representation of the day of the week
// BOO on you javascript for only giving me a number
// daynum = a date string formatted 2011-07-11
exports.DayofWeek = function(daynum) {
if (daynum == null) {
return;
}
var d = new Date();
var e = daynum.split("-");
d.setFullYear(parseInt(e[0]), (parseInt(e[1])-1), parseInt(e[2]));
switch (d.getDay()) {
case 0:
return "Sunday";
case 1:
return "Monday";
case 2:
return "Tuesday";
case 3:
return "Wednesday";
case 4:
return "Thursday";
case 5:
return "Friday";
case 6:
return "Saturday";
}
}
// PHPjs strtotime function
exports.strtotime = function(str, now) {
// http://kevin.vanzonneveld.net
// + original by: Caio Ariede (http://caioariede.com)
// + improved by: Kevin van Zonneveld (http://kevin.vanzonneveld.net)
// + input by: David
// + improved by: Caio Ariede (http://caioariede.com)
// + improved by: Brett Zamir (http://brett-zamir.me)
// + bugfixed by: Wagner B. Soares
// + bugfixed by: Artur Tchernychev
// % note 1: Examples all have a fixed timestamp to prevent tests to fail because of variable time(zones)
// * example 1: strtotime('+1 day', 1129633200);
// * returns 1: 1129719600
// * example 2: strtotime('+1 week 2 days 4 hours 2 seconds', 1129633200);
// * returns 2: 1130425202
// * example 3: strtotime('last month', 1129633200);
// * returns 3: 1127041200
// * example 4: strtotime('2009-05-04 08:30:00');
// * returns 4: 1241418600
var i, l, match, s, parse = '';
str = str.replace(/\s{2,}|^\s|\s$/g, ' '); // unecessary spaces
str = str.replace(/[\t\r\n]/g, ''); // unecessary chars
if (str === 'now') {
return now === null || isNaN(now) ? new Date().getTime() / 1000 | 0 : now | 0;
} else if (!isNaN(parse = Date.parse(str))) {
return parse / 1000 | 0;
} else if (now) {
now = new Date(now * 1000); // Accept PHP-style seconds
} else {
now = new Date();
}
str = str.toLowerCase();
var __is = {
day: {
'sun': 0,
'mon': 1,
'tue': 2,
'wed': 3,
'thu': 4,
'fri': 5,
'sat': 6
},
mon: [
'jan',
'feb',
'mar',
'apr',
'may',
'jun',
'jul',
'aug',
'sep',
'oct',
'nov',
'dec'
]
};
var process = function (m) {
var ago = (m[2] && m[2] === 'ago');
var num = (num = m[0] === 'last' ? -1 : 1) * (ago ? -1 : 1);
switch (m[0]) {
case 'last':
case 'next':
switch (m[1].substring(0, 3)) {
case 'yea':
now.setFullYear(now.getFullYear() + num);
break;
case 'wee':
now.setDate(now.getDate() + (num * 7));
break;
case 'day':
now.setDate(now.getDate() + num);
break;
case 'hou':
now.setHours(now.getHours() + num);
break;
case 'min':
now.setMinutes(now.getMinutes() + num);
break;
case 'sec':
now.setSeconds(now.getSeconds() + num);
break;
case 'mon':
if (m[1] === "month") {
now.setMonth(now.getMonth() + num);
break;
}
// fall through
default:
var day = __is.day[m[1].substring(0, 3)];
if (typeof day !== 'undefined') {
var diff = day - now.getDay();
if (diff === 0) {
diff = 7 * num;
} else if (diff > 0) {
if (m[0] === 'last') {
diff -= 7;
}
} else {
if (m[0] === 'next') {
diff += 7;
}
}
now.setDate(now.getDate() + diff);
now.setHours(0, 0, 0, 0); // when jumping to a specific last/previous day of week, PHP sets the time to 00:00:00
}
}
break;
default:
if (/\d+/.test(m[0])) {
num *= parseInt(m[0], 10);
switch (m[1].substring(0, 3)) {
case 'yea':
now.setFullYear(now.getFullYear() + num);
break;
case 'mon':
now.setMonth(now.getMonth() + num);
break;
case 'wee':
now.setDate(now.getDate() + (num * 7));
break;
case 'day':
now.setDate(now.getDate() + num);
break;
case 'hou':
now.setHours(now.getHours() + num);
break;
case 'min':
now.setMinutes(now.getMinutes() + num);
break;
case 'sec':
now.setSeconds(now.getSeconds() + num);
break;
}
} else {
return false;
}
break;
}
return true;
};
match = str.match(/^(\d{2,4}-\d{2}-\d{2})(?:\s(\d{1,2}:\d{2}(:\d{2})?)?(?:\.(\d+))?)?$/);
if (match !== null) {
if (!match[2]) {
match[2] = '00:00:00';
} else if (!match[3]) {
match[2] += ':00';
}
s = match[1].split(/-/g);
s[1] = __is.mon[s[1] - 1] || s[1];
s[0] = +s[0];
s[0] = (s[0] >= 0 && s[0] <= 69) ? '20' + (s[0] < 10 ? '0' + s[0] : s[0] + '') : (s[0] >= 70 && s[0] <= 99) ? '19' + s[0] : s[0] + '';
return parseInt(this.strtotime(s[2] + ' ' + s[1] + ' ' + s[0] + ' ' + match[2]) + (match[4] ? match[4] / 1000 : ''), 10);
}
var regex = '([+-]?\\d+\\s' + '(years?|months?|weeks?|days?|hours?|min|minutes?|sec|seconds?' + '|sun\\.?|sunday|mon\\.?|monday|tue\\.?|tuesday|wed\\.?|wednesday' + '|thu\\.?|thursday|fri\\.?|friday|sat\\.?|saturday)' + '|(last|next)\\s' + '(years?|months?|weeks?|days?|hours?|min|minutes?|sec|seconds?' + '|sun\\.?|sunday|mon\\.?|monday|tue\\.?|tuesday|wed\\.?|wednesday' + '|thu\\.?|thursday|fri\\.?|friday|sat\\.?|saturday))' + '(\\sago)?';
match = str.match(new RegExp(regex, 'gi')); // Brett: seems should be case insensitive per docs, so added 'i'
if (match === null) {
return false;
}
for (i = 0, l = match.length; i < l; i++) {
if (!process(match[i].split(' '))) {
return false;
}
}
return now.getTime() / 1000 | 0;
}
exports.date = function date (format, timestamp) {
var that = this,
jsdate, f, formatChr = /\\?([a-z])/gi,
formatChrCb,
// Keep this here (works, but for code commented-out
// below for file size reasons)
//, tal= [],
_pad = function (n, c) {
if ((n = n + '').length < c) {
return new Array((++c) - n.length).join('0') + n;
}
return n;
},
txt_words = ["Sun", "Mon", "Tues", "Wednes", "Thurs", "Fri", "Satur", "January", "February", "March", "April", "May", "June", "July", "August", "September", "October", "November", "December"];
formatChrCb = function (t, s) {
return f[t] ? f[t]() : s;
};
f = {
// Day
d: function () { // Day of month w/leading 0; 01..31
return _pad(f.j(), 2);
},
D: function () { // Shorthand day name; Mon...Sun
return f.l().slice(0, 3);
},
j: function () { // Day of month; 1..31
return jsdate.getDate();
},
l: function () { // Full day name; Monday...Sunday
return txt_words[f.w()] + 'day';
},
N: function () { // ISO-8601 day of week; 1[Mon]..7[Sun]
return f.w() || 7;
},
S: function () { // Ordinal suffix for day of month; st, nd, rd, th
var j = f.j();
return j < 4 | j > 20 && ['st', 'nd', 'rd'][j%10 - 1] || 'th';
},
w: function () { // Day of week; 0[Sun]..6[Sat]
return jsdate.getDay();
},
z: function () { // Day of year; 0..365
var a = new Date(f.Y(), f.n() - 1, f.j()),
b = new Date(f.Y(), 0, 1);
return Math.round((a - b) / 864e5) + 1;
},
// Week
W: function () { // ISO-8601 week number
var a = new Date(f.Y(), f.n() - 1, f.j() - f.N() + 3),
b = new Date(a.getFullYear(), 0, 4);
return _pad(1 + Math.round((a - b) / 864e5 / 7), 2);
},
// Month
F: function () { // Full month name; January...December
return txt_words[6 + f.n()];
},
m: function () { // Month w/leading 0; 01...12
return _pad(f.n(), 2);
},
M: function () { // Shorthand month name; Jan...Dec
return f.F().slice(0, 3);
},
n: function () { // Month; 1...12
return jsdate.getMonth() + 1;
},
t: function () { // Days in month; 28...31
return (new Date(f.Y(), f.n(), 0)).getDate();
},
// Year
L: function () { // Is leap year?; 0 or 1
var j = f.Y();
return j%4==0 & j%100!=0 | j%400==0;
},
o: function () { // ISO-8601 year
var n = f.n(),
W = f.W(),
Y = f.Y();
return Y + (n === 12 && W < 9 ? -1 : n === 1 && W > 9);
},
Y: function () { // Full year; e.g. 1980...2010
return jsdate.getFullYear();
},
y: function () { // Last two digits of year; 00...99
return (f.Y() + "").slice(-2);
},
// Time
a: function () { // am or pm
return jsdate.getHours() > 11 ? "pm" : "am";
},
A: function () { // AM or PM
return f.a().toUpperCase();
},
B: function () { // Swatch Internet time; 000..999
var H = jsdate.getUTCHours() * 36e2,
// Hours
i = jsdate.getUTCMinutes() * 60,
// Minutes
s = jsdate.getUTCSeconds(); // Seconds
return _pad(Math.floor((H + i + s + 36e2) / 86.4) % 1e3, 3);
},
g: function () { // 12-Hours; 1..12
return f.G() % 12 || 12;
},
G: function () { // 24-Hours; 0..23
return jsdate.getHours();
},
h: function () { // 12-Hours w/leading 0; 01..12
return _pad(f.g(), 2);
},
H: function () { // 24-Hours w/leading 0; 00..23
return _pad(f.G(), 2);
},
i: function () { // Minutes w/leading 0; 00..59
return _pad(jsdate.getMinutes(), 2);
},
s: function () { // Seconds w/leading 0; 00..59
return _pad(jsdate.getSeconds(), 2);
},
u: function () { // Microseconds; 000000-999000
return _pad(jsdate.getMilliseconds() * 1000, 6);
},
// Timezone
e: function () {
throw 'Not supported (see source code of date() for timezone on how to add support)';
},
I: function () { // DST observed?; 0 or 1
// Compares Jan 1 minus Jan 1 UTC to Jul 1 minus Jul 1 UTC.
// If they are not equal, then DST is observed.
var a = new Date(f.Y(), 0),
// Jan 1
c = Date.UTC(f.Y(), 0),
// Jan 1 UTC
b = new Date(f.Y(), 6),
// Jul 1
d = Date.UTC(f.Y(), 6); // Jul 1 UTC
return 0 + ((a - c) !== (b - d));
},
O: function () { // Difference to GMT in hour format; e.g. +0200
var tzo = jsdate.getTimezoneOffset(),
a = Math.abs(tzo);
return (tzo > 0 ? "-" : "+") + _pad(Math.floor(a / 60) * 100 + a % 60, 4);
},
P: function () { // Difference to GMT w/colon; e.g. +02:00
var O = f.O();
return (O.substr(0, 3) + ":" + O.substr(3, 2));
},
T: function () {
return 'UTC';
},
Z: function () { // Timezone offset in seconds (-43200...50400)
return -jsdate.getTimezoneOffset() * 60;
},
// Full Date/Time
c: function () { // ISO-8601 date.
return 'Y-m-d\\Th:i:sP'.replace(formatChr, formatChrCb);
},
r: function () { // RFC 2822
return 'D, d M Y H:i:s O'.replace(formatChr, formatChrCb);
},
U: function () { // Seconds since UNIX epoch
return jsdate / 1000 | 0;
}
};
this.date = function (format, timestamp) {
that = this;
jsdate = (timestamp == null ? new Date() : // Not provided
(timestamp instanceof Date) ? new Date(timestamp) : // JS Date()
new Date(timestamp * 1000) // UNIX timestamp (auto-convert to int)
);
return format.replace(formatChr, formatChrCb);
};
return this.date(format, timestamp);
}
exports.prettyDate = function(time) {
var monthname = ["Jan","Feb","Mar","Apr","May","Jun","Jul","Aug","Sep","Oct","Nov","Dec"];
var date = new Date(time*1000),
diff = (((new Date()).getTime() - date.getTime()) / 1000),
day_diff = Math.floor(diff / 86400);
if ( isNaN(day_diff) || day_diff < 0 ){
return '';
}
if(day_diff >= 31){
var date_year = date.getFullYear();
var month_name = monthname[date.getMonth()];
var date_month = date.getMonth() + 1;
if(date_month < 10){
date_month = "0"+date_month;
}
var date_monthday = date.getDate();
if(date_monthday < 10){
date_monthday = "0"+date_monthday;
}
return date_monthday + " " + month_name + " " + date_year;
}
return day_diff == 0 && (
diff < 60 && "just now" ||
diff < 120 && "1 minute ago" ||
diff < 3600 && Math.floor( diff / 60 ) + " minutes ago" ||
diff < 7200 && "1 hour ago" ||
diff < 86400 && "about " + Math.floor( diff / 3600 ) + " hours ago") ||
day_diff == 1 && "Yesterday" ||
day_diff < 7 && day_diff + " days ago" ||
day_diff < 31 && Math.ceil( day_diff / 7 ) + " week" + ((Math.ceil( day_diff / 7 )) == 1 ? "" : "s") + " ago";
} | Resources/lib/globals.js | var _osname = Ti.Platform.osname;
var _debug = false;
var _baseUrl = "http://www.lancasterbaptist.org/slc/json";
var _eventsUrl = "/events";
var _speakersUrl = "/speakers";
var _liveUrl = "/liveevents";
var _slcDB = Ti.Database.open('slcdb');
_slcDB.execute('CREATE TABLE IF NOT EXISTS events (nid INTEGER, title TEXT, eventtype TEXT, day TEXT, datefrom TEXT, dateto TEXT, speaker TEXT, room TEXT, track TEXT, weight TEXT, download TEXT, notes TEXT)');
_slcDB.close();
var _speakerData = "";
var _liveData = "";
exports.osname = _osname;
exports.baseUrl = _baseUrl;
exports.eventsUrl = _eventsUrl;
exports.speakersUrl = _speakersUrl;
exports.liveUrl = _liveUrl;
// Sets whether we are in debug or not
// value = Boolean
exports.setDebug = function(value) {
if (value) {
_debug = true;
return _debug;
}
_debug = false;
return _debug;
}
exports.debug = function() {
return _debug;
}
exports.setSpeakerData = function(value) {
if (value) {
_speakerData = value;
return _speakerData;
}
return false;
}
exports.speakerData = function() {
return _speakerData;
}
exports.setLiveData = function(value) {
if (value) {
_liveData = value;
return _liveData;
}
return false;
}
exports.liveData = function() {
return _liveData;
}
exports.slcdbSaveEvents = function(events) {
_slcDB = Ti.Database.open('slcdb');
_slcDB.execute('DROP TABLE IF EXISTS events');
_slcDB.execute('CREATE TABLE IF NOT EXISTS events (nid INTEGER, title TEXT, eventtype TEXT, day TEXT, datefrom TEXT, dateto TEXT, speaker TEXT, room TEXT, track TEXT, weight TEXT, download TEXT, notes TEXT)');
// Remove all data first
_slcDB.execute('DELETE FROM events');
var parseEvents = JSON.parse(events), i = 0;
for (i in parseEvents.nodes) {
_slcDB.execute('INSERT INTO events (nid, title, eventtype, day, datefrom, dateto, speaker, room, track, weight, download, notes) VALUES(?,?,?,?,?,?,?,?,?,?,?,?)',
parseEvents.nodes[i].node.nid,
parseEvents.nodes[i].node.title,
parseEvents.nodes[i].node.type,
parseEvents.nodes[i].node.day,
parseEvents.nodes[i].node.from,
parseEvents.nodes[i].node.to,
parseEvents.nodes[i].node.speaker,
parseEvents.nodes[i].node.room,
parseEvents.nodes[i].node.track,
parseEvents.nodes[i].node.weight,
parseEvents.nodes[i].node.download,
parseEvents.nodes[i].node.notes
);
}
Ti.API.info('DB:LAST ROW INSERTED, lastInsertRowId = ' + _slcDB.lastInsertRowId);
Ti.App.fireEvent('schedule.updateTableView');
_slcDB.close();
}
exports.slcdbGetEvents = function(dateString) {
var results = [];
_slcDB = Ti.Database.open('slcdb');
var resultSet = _slcDB.execute('SELECT * FROM events WHERE eventtype<>"Session" AND day="'+dateString+'" ORDER BY datefrom ASC');
Ti.API.info('ROWS FETCHED = ' + resultSet.getRowCount());
while (resultSet.isValidRow()) {
results.push({
nid: resultSet.fieldByName('nid'),
title: resultSet.fieldByName('title'),
eventtype: resultSet.fieldByName('eventtype'),
day: resultSet.fieldByName('day'),
datefrom: resultSet.fieldByName('datefrom'),
dateto: resultSet.fieldByName('dateto'),
speaker: resultSet.fieldByName('speaker'),
room: resultSet.fieldByName('room'),
track: resultSet.fieldByName('track'),
weight: resultSet.fieldByName('weight'),
download: resultSet.fieldByName('download'),
notes: resultSet.fieldByName('notes')
});
resultSet.next();
}
_slcDB.close();
return results;
}
exports.dbGetEvents = function() {
var results = [];
_slcDB = Ti.Database.open('slcdb');
var resultSet = _slcDB.execute('SELECT * FROM events WHERE eventtype<>"Session" ORDER BY day ASC, datefrom ASC');
Ti.API.info('ROWS FETCHED = ' + resultSet.getRowCount());
while (resultSet.isValidRow()) {
results.push({
nid: resultSet.fieldByName('nid'),
title: resultSet.fieldByName('title'),
eventtype: resultSet.fieldByName('eventtype'),
day: resultSet.fieldByName('day'),
datefrom: resultSet.fieldByName('datefrom'),
dateto: resultSet.fieldByName('dateto'),
speaker: resultSet.fieldByName('speaker'),
room: resultSet.fieldByName('room'),
track: resultSet.fieldByName('track'),
weight: resultSet.fieldByName('weight'),
download: resultSet.fieldByName('download'),
notes: resultSet.fieldByName('notes')
});
resultSet.next();
}
_slcDB.close();
return results;
}
// nid = Node ID of event requesting
// returns node data of certain node ID
exports.dbGetSingleEvent = function(nid) {
var results = [];
_slcDB = Ti.Database.open('slcdb');
var resultSet = _slcDB.execute('SELECT * FROM events WHERE nid='+nid);
Ti.API.info('ROWS FETCHED = ' + resultSet.getRowCount());
while (resultSet.isValidRow()) {
results.push({
nid: resultSet.fieldByName('nid'),
title: resultSet.fieldByName('title'),
eventtype: resultSet.fieldByName('eventtype'),
day: resultSet.fieldByName('day'),
datefrom: resultSet.fieldByName('datefrom'),
dateto: resultSet.fieldByName('dateto'),
speaker: resultSet.fieldByName('speaker'),
room: resultSet.fieldByName('room'),
track: resultSet.fieldByName('track'),
weight: resultSet.fieldByName('weight'),
download: resultSet.fieldByName('download'),
notes: resultSet.fieldByName('notes')
});
resultSet.next();
}
_slcDB.close();
return results;
}
// nid = Node of Parent Workshop
// returns result array of children sessions during parent workshop time
exports.dbGetWorkshopEvents = function(nid) {
var results = [];
_slcDB = Ti.Database.open('slcdb');
var parentNode = _slcDB.execute('SELECT * FROM events WHERE nid='+nid);
var resultSet = _slcDB.execute('SELECT * FROM events WHERE eventtype="Session" AND day="'+parentNode.fieldByName('day')+'" AND datefrom='+parentNode.fieldByName('datefrom')+' ORDER BY weight ASC');
Ti.API.info('ROWS FETCHED = ' + resultSet.getRowCount());
while (resultSet.isValidRow()) {
results.push({
nid: resultSet.fieldByName('nid'),
title: resultSet.fieldByName('title'),
eventtype: resultSet.fieldByName('eventtype'),
day: resultSet.fieldByName('day'),
datefrom: resultSet.fieldByName('datefrom'),
dateto: resultSet.fieldByName('dateto'),
speaker: resultSet.fieldByName('speaker'),
room: resultSet.fieldByName('room'),
track: resultSet.fieldByName('track'),
weight: resultSet.fieldByName('weight'),
download: resultSet.fieldByName('download'),
notes: resultSet.fieldByName('notes')
});
resultSet.next();
}
_slcDB.close();
return results;
};
/**
* This will sessions from a certain time and day.
* dateFrom = time value in seconds
* day = date string formatted like 2011-07-11
* Returns a result set of session events only.
*/
exports.slcdbGetSessions = function(dateFrom, day) {
var results = [];
_slcDB = Ti.Database.open('slcdb');
var resultSet = _slcDB.execute('SELECT DISTINCT * FROM events WHERE eventtype="Session" AND datefrom="'+dateFrom+'" AND day="'+day+'" ORDER BY weight ASC');
Ti.API.info('ROWS FETCHED = ' + resultSet.getRowCount());
while (resultSet.isValidRow()) {
results.push({
nid: resultSet.fieldByName('nid'),
title: resultSet.fieldByName('title'),
eventtype: resultSet.fieldByName('eventtype'),
day: resultSet.fieldByName('day'),
datefrom: resultSet.fieldByName('datefrom'),
dateto: resultSet.fieldByName('dateto'),
speaker: resultSet.fieldByName('speaker'),
room: resultSet.fieldByName('room'),
track: resultSet.fieldByName('track'),
weight: resultSet.fieldByName('weight'),
download: resultSet.fieldByName('download'),
notes: resultSet.fieldByName('notes')
});
resultSet.next();
}
_slcDB.close();
return results;
}
/**
* This will get sessions by speaker
* snid = Speaker Node ID
*/
exports.slcdbGetSessionsSpeaker = function(snid) {
var results = [];
_slcDB = Ti.Database.open('slcdb');
var resultSet = _slcDB.execute('SELECT * FROM events WHERE eventtype="Session" AND speaker="'+snid+'"');
Ti.API.info('ROWS FETCHED = ' + resultSet.getRowCount());
while (resultSet.isValidRow()) {
results.push({
nid: resultSet.fieldByName('nid'),
title: resultSet.fieldByName('title'),
eventtype: resultSet.fieldByName('eventtype'),
day: resultSet.fieldByName('day'),
datefrom: resultSet.fieldByName('datefrom'),
dateto: resultSet.fieldByName('dateto'),
speaker: resultSet.fieldByName('speaker'),
room: resultSet.fieldByName('room'),
track: resultSet.fieldByName('track'),
weight: resultSet.fieldByName('weight'),
download: resultSet.fieldByName('download'),
notes: resultSet.fieldByName('notes')
});
resultSet.next();
}
_slcDB.close();
return results;
}
// Helper function that converts seconds into a readable time string
exports.secondsToTime = function(seconds) {
var hours = parseInt(seconds / 3600);
var minutes = (seconds % 3600) / 60;
var meridiem = '';
if (hours > 12) {
hours = hours - 12;
meridiem = 'pm';
} else if (hours == 12) {
meridiem = 'pm';
} else if (hours == 24) {
hours = 12;
meridiem = 'am';
} else if (hours == 0) {
hours = 12;
meridiem = 'am';
} else {
meridiem = 'am';
}
if (minutes < 10) {
minutes = "0" + minutes;
}
time = hours + ':' + minutes + ' ' + meridiem;
return time;
}
// Helper function to replace certain encoded characters
exports.html_decode = function(string) {
if (string == undefined) {
return;
}
string = string.replace(/'/gi, "'");
string = string.replace(/&/gi, "&");
return string;
}
// Helper function to give the textual representation of the day of the week
// BOO on you javascript for only giving me a number
// daynum = a date string formatted 2011-07-11
exports.DayofWeek = function(daynum) {
if (daynum == null) {
return;
}
var d = new Date();
var e = daynum.split("-");
d.setFullYear(parseInt(e[0]), (parseInt(e[1])-1), parseInt(e[2]));
switch (d.getDay()) {
case 0:
return "Sunday";
case 1:
return "Monday";
case 2:
return "Tuesday";
case 3:
return "Wednesday";
case 4:
return "Thursday";
case 5:
return "Friday";
case 6:
return "Saturday";
}
}
// PHPjs strtotime function
exports.strtotime = function(str, now) {
// http://kevin.vanzonneveld.net
// + original by: Caio Ariede (http://caioariede.com)
// + improved by: Kevin van Zonneveld (http://kevin.vanzonneveld.net)
// + input by: David
// + improved by: Caio Ariede (http://caioariede.com)
// + improved by: Brett Zamir (http://brett-zamir.me)
// + bugfixed by: Wagner B. Soares
// + bugfixed by: Artur Tchernychev
// % note 1: Examples all have a fixed timestamp to prevent tests to fail because of variable time(zones)
// * example 1: strtotime('+1 day', 1129633200);
// * returns 1: 1129719600
// * example 2: strtotime('+1 week 2 days 4 hours 2 seconds', 1129633200);
// * returns 2: 1130425202
// * example 3: strtotime('last month', 1129633200);
// * returns 3: 1127041200
// * example 4: strtotime('2009-05-04 08:30:00');
// * returns 4: 1241418600
var i, l, match, s, parse = '';
str = str.replace(/\s{2,}|^\s|\s$/g, ' '); // unecessary spaces
str = str.replace(/[\t\r\n]/g, ''); // unecessary chars
if (str === 'now') {
return now === null || isNaN(now) ? new Date().getTime() / 1000 | 0 : now | 0;
} else if (!isNaN(parse = Date.parse(str))) {
return parse / 1000 | 0;
} else if (now) {
now = new Date(now * 1000); // Accept PHP-style seconds
} else {
now = new Date();
}
str = str.toLowerCase();
var __is = {
day: {
'sun': 0,
'mon': 1,
'tue': 2,
'wed': 3,
'thu': 4,
'fri': 5,
'sat': 6
},
mon: [
'jan',
'feb',
'mar',
'apr',
'may',
'jun',
'jul',
'aug',
'sep',
'oct',
'nov',
'dec'
]
};
var process = function (m) {
var ago = (m[2] && m[2] === 'ago');
var num = (num = m[0] === 'last' ? -1 : 1) * (ago ? -1 : 1);
switch (m[0]) {
case 'last':
case 'next':
switch (m[1].substring(0, 3)) {
case 'yea':
now.setFullYear(now.getFullYear() + num);
break;
case 'wee':
now.setDate(now.getDate() + (num * 7));
break;
case 'day':
now.setDate(now.getDate() + num);
break;
case 'hou':
now.setHours(now.getHours() + num);
break;
case 'min':
now.setMinutes(now.getMinutes() + num);
break;
case 'sec':
now.setSeconds(now.getSeconds() + num);
break;
case 'mon':
if (m[1] === "month") {
now.setMonth(now.getMonth() + num);
break;
}
// fall through
default:
var day = __is.day[m[1].substring(0, 3)];
if (typeof day !== 'undefined') {
var diff = day - now.getDay();
if (diff === 0) {
diff = 7 * num;
} else if (diff > 0) {
if (m[0] === 'last') {
diff -= 7;
}
} else {
if (m[0] === 'next') {
diff += 7;
}
}
now.setDate(now.getDate() + diff);
now.setHours(0, 0, 0, 0); // when jumping to a specific last/previous day of week, PHP sets the time to 00:00:00
}
}
break;
default:
if (/\d+/.test(m[0])) {
num *= parseInt(m[0], 10);
switch (m[1].substring(0, 3)) {
case 'yea':
now.setFullYear(now.getFullYear() + num);
break;
case 'mon':
now.setMonth(now.getMonth() + num);
break;
case 'wee':
now.setDate(now.getDate() + (num * 7));
break;
case 'day':
now.setDate(now.getDate() + num);
break;
case 'hou':
now.setHours(now.getHours() + num);
break;
case 'min':
now.setMinutes(now.getMinutes() + num);
break;
case 'sec':
now.setSeconds(now.getSeconds() + num);
break;
}
} else {
return false;
}
break;
}
return true;
};
match = str.match(/^(\d{2,4}-\d{2}-\d{2})(?:\s(\d{1,2}:\d{2}(:\d{2})?)?(?:\.(\d+))?)?$/);
if (match !== null) {
if (!match[2]) {
match[2] = '00:00:00';
} else if (!match[3]) {
match[2] += ':00';
}
s = match[1].split(/-/g);
s[1] = __is.mon[s[1] - 1] || s[1];
s[0] = +s[0];
s[0] = (s[0] >= 0 && s[0] <= 69) ? '20' + (s[0] < 10 ? '0' + s[0] : s[0] + '') : (s[0] >= 70 && s[0] <= 99) ? '19' + s[0] : s[0] + '';
return parseInt(this.strtotime(s[2] + ' ' + s[1] + ' ' + s[0] + ' ' + match[2]) + (match[4] ? match[4] / 1000 : ''), 10);
}
var regex = '([+-]?\\d+\\s' + '(years?|months?|weeks?|days?|hours?|min|minutes?|sec|seconds?' + '|sun\\.?|sunday|mon\\.?|monday|tue\\.?|tuesday|wed\\.?|wednesday' + '|thu\\.?|thursday|fri\\.?|friday|sat\\.?|saturday)' + '|(last|next)\\s' + '(years?|months?|weeks?|days?|hours?|min|minutes?|sec|seconds?' + '|sun\\.?|sunday|mon\\.?|monday|tue\\.?|tuesday|wed\\.?|wednesday' + '|thu\\.?|thursday|fri\\.?|friday|sat\\.?|saturday))' + '(\\sago)?';
match = str.match(new RegExp(regex, 'gi')); // Brett: seems should be case insensitive per docs, so added 'i'
if (match === null) {
return false;
}
for (i = 0, l = match.length; i < l; i++) {
if (!process(match[i].split(' '))) {
return false;
}
}
return now.getTime() / 1000 | 0;
}
exports.date = function date (format, timestamp) {
var that = this,
jsdate, f, formatChr = /\\?([a-z])/gi,
formatChrCb,
// Keep this here (works, but for code commented-out
// below for file size reasons)
//, tal= [],
_pad = function (n, c) {
if ((n = n + '').length < c) {
return new Array((++c) - n.length).join('0') + n;
}
return n;
},
txt_words = ["Sun", "Mon", "Tues", "Wednes", "Thurs", "Fri", "Satur", "January", "February", "March", "April", "May", "June", "July", "August", "September", "October", "November", "December"];
formatChrCb = function (t, s) {
return f[t] ? f[t]() : s;
};
f = {
// Day
d: function () { // Day of month w/leading 0; 01..31
return _pad(f.j(), 2);
},
D: function () { // Shorthand day name; Mon...Sun
return f.l().slice(0, 3);
},
j: function () { // Day of month; 1..31
return jsdate.getDate();
},
l: function () { // Full day name; Monday...Sunday
return txt_words[f.w()] + 'day';
},
N: function () { // ISO-8601 day of week; 1[Mon]..7[Sun]
return f.w() || 7;
},
S: function () { // Ordinal suffix for day of month; st, nd, rd, th
var j = f.j();
return j < 4 | j > 20 && ['st', 'nd', 'rd'][j%10 - 1] || 'th';
},
w: function () { // Day of week; 0[Sun]..6[Sat]
return jsdate.getDay();
},
z: function () { // Day of year; 0..365
var a = new Date(f.Y(), f.n() - 1, f.j()),
b = new Date(f.Y(), 0, 1);
return Math.round((a - b) / 864e5) + 1;
},
// Week
W: function () { // ISO-8601 week number
var a = new Date(f.Y(), f.n() - 1, f.j() - f.N() + 3),
b = new Date(a.getFullYear(), 0, 4);
return _pad(1 + Math.round((a - b) / 864e5 / 7), 2);
},
// Month
F: function () { // Full month name; January...December
return txt_words[6 + f.n()];
},
m: function () { // Month w/leading 0; 01...12
return _pad(f.n(), 2);
},
M: function () { // Shorthand month name; Jan...Dec
return f.F().slice(0, 3);
},
n: function () { // Month; 1...12
return jsdate.getMonth() + 1;
},
t: function () { // Days in month; 28...31
return (new Date(f.Y(), f.n(), 0)).getDate();
},
// Year
L: function () { // Is leap year?; 0 or 1
var j = f.Y();
return j%4==0 & j%100!=0 | j%400==0;
},
o: function () { // ISO-8601 year
var n = f.n(),
W = f.W(),
Y = f.Y();
return Y + (n === 12 && W < 9 ? -1 : n === 1 && W > 9);
},
Y: function () { // Full year; e.g. 1980...2010
return jsdate.getFullYear();
},
y: function () { // Last two digits of year; 00...99
return (f.Y() + "").slice(-2);
},
// Time
a: function () { // am or pm
return jsdate.getHours() > 11 ? "pm" : "am";
},
A: function () { // AM or PM
return f.a().toUpperCase();
},
B: function () { // Swatch Internet time; 000..999
var H = jsdate.getUTCHours() * 36e2,
// Hours
i = jsdate.getUTCMinutes() * 60,
// Minutes
s = jsdate.getUTCSeconds(); // Seconds
return _pad(Math.floor((H + i + s + 36e2) / 86.4) % 1e3, 3);
},
g: function () { // 12-Hours; 1..12
return f.G() % 12 || 12;
},
G: function () { // 24-Hours; 0..23
return jsdate.getHours();
},
h: function () { // 12-Hours w/leading 0; 01..12
return _pad(f.g(), 2);
},
H: function () { // 24-Hours w/leading 0; 00..23
return _pad(f.G(), 2);
},
i: function () { // Minutes w/leading 0; 00..59
return _pad(jsdate.getMinutes(), 2);
},
s: function () { // Seconds w/leading 0; 00..59
return _pad(jsdate.getSeconds(), 2);
},
u: function () { // Microseconds; 000000-999000
return _pad(jsdate.getMilliseconds() * 1000, 6);
},
// Timezone
e: function () {
throw 'Not supported (see source code of date() for timezone on how to add support)';
},
I: function () { // DST observed?; 0 or 1
// Compares Jan 1 minus Jan 1 UTC to Jul 1 minus Jul 1 UTC.
// If they are not equal, then DST is observed.
var a = new Date(f.Y(), 0),
// Jan 1
c = Date.UTC(f.Y(), 0),
// Jan 1 UTC
b = new Date(f.Y(), 6),
// Jul 1
d = Date.UTC(f.Y(), 6); // Jul 1 UTC
return 0 + ((a - c) !== (b - d));
},
O: function () { // Difference to GMT in hour format; e.g. +0200
var tzo = jsdate.getTimezoneOffset(),
a = Math.abs(tzo);
return (tzo > 0 ? "-" : "+") + _pad(Math.floor(a / 60) * 100 + a % 60, 4);
},
P: function () { // Difference to GMT w/colon; e.g. +02:00
var O = f.O();
return (O.substr(0, 3) + ":" + O.substr(3, 2));
},
T: function () {
return 'UTC';
},
Z: function () { // Timezone offset in seconds (-43200...50400)
return -jsdate.getTimezoneOffset() * 60;
},
// Full Date/Time
c: function () { // ISO-8601 date.
return 'Y-m-d\\Th:i:sP'.replace(formatChr, formatChrCb);
},
r: function () { // RFC 2822
return 'D, d M Y H:i:s O'.replace(formatChr, formatChrCb);
},
U: function () { // Seconds since UNIX epoch
return jsdate / 1000 | 0;
}
};
this.date = function (format, timestamp) {
that = this;
jsdate = (timestamp == null ? new Date() : // Not provided
(timestamp instanceof Date) ? new Date(timestamp) : // JS Date()
new Date(timestamp * 1000) // UNIX timestamp (auto-convert to int)
);
return format.replace(formatChr, formatChrCb);
};
return this.date(format, timestamp);
}
exports.prettyDate = function(time) {
var monthname = ["Jan","Feb","Mar","Apr","May","Jun","Jul","Aug","Sep","Oct","Nov","Dec"];
var date = new Date(time*1000),
diff = (((new Date()).getTime() - date.getTime()) / 1000),
day_diff = Math.floor(diff / 86400);
if ( isNaN(day_diff) || day_diff < 0 ){
return '';
}
if(day_diff >= 31){
var date_year = date.getFullYear();
var month_name = monthname[date.getMonth()];
var date_month = date.getMonth() + 1;
if(date_month < 10){
date_month = "0"+date_month;
}
var date_monthday = date.getDate();
if(date_monthday < 10){
date_monthday = "0"+date_monthday;
}
return date_monthday + " " + month_name + " " + date_year;
}
return day_diff == 0 && (
diff < 60 && "just now" ||
diff < 120 && "1 minute ago" ||
diff < 3600 && Math.floor( diff / 60 ) + " minutes ago" ||
diff < 7200 && "1 hour ago" ||
diff < 86400 && "about " + Math.floor( diff / 3600 ) + " hours ago") ||
day_diff == 1 && "Yesterday" ||
day_diff < 7 && day_diff + " days ago" ||
day_diff < 31 && Math.ceil( day_diff / 7 ) + " week" + ((Math.ceil( day_diff / 7 )) == 1 ? "" : "s") + " ago";
} | fixed incorrect session order when multiple sessions are taught by a speaker.
| Resources/lib/globals.js | fixed incorrect session order when multiple sessions are taught by a speaker. | <ide><path>esources/lib/globals.js
<ide> exports.slcdbGetSessionsSpeaker = function(snid) {
<ide> var results = [];
<ide> _slcDB = Ti.Database.open('slcdb');
<del> var resultSet = _slcDB.execute('SELECT * FROM events WHERE eventtype="Session" AND speaker="'+snid+'"');
<add> var resultSet = _slcDB.execute('SELECT * FROM events WHERE eventtype="Session" AND speaker="'+snid+'" ORDER BY day ASC, datefrom ASC');
<ide> Ti.API.info('ROWS FETCHED = ' + resultSet.getRowCount());
<ide> while (resultSet.isValidRow()) {
<ide> results.push({ |
|
Java | apache-2.0 | 13d38adcd63f81eb4eae33803971156aefe7e96e | 0 | BankingBoys/amos-ss17-proj7,BankingBoys/amos-ss17-proj7 | package de.fau.amos.virtualledger.android.views.savings;
import android.app.Activity;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ArrayAdapter;
import android.widget.TextView;
import java.util.List;
import de.fau.amos.virtualledger.R;
import de.fau.amos.virtualledger.dtos.SavingsAccountSubGoal;
public class SubgoalAdapter extends ArrayAdapter<SavingsAccountSubGoal> {
public SubgoalAdapter(Activity activity, int layout, List<SavingsAccountSubGoal> data) {
super(activity, layout, data);
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
if (convertView == null) {
convertView = LayoutInflater.from(getContext()).inflate(R.layout.subgoals_list_item, parent, false);
}
final SavingsAccountSubGoal subgoal = this.getItem(position);
updateText(convertView, R.id.subgoal_name, subgoal.getName());
updateText(convertView, R.id.subgoal_amount, String.valueOf(Math.round(subgoal.getAmount())));
return convertView;
}
private void updateText(View convertView, int id, String text) {
TextView goalBalance = (TextView) convertView.findViewById(id);
goalBalance.setText(text);
}
}
| app/src/main/java/de/fau/amos/virtualledger/android/views/savings/SubgoalAdapter.java | package de.fau.amos.virtualledger.android.views.savings;
import android.app.Activity;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ArrayAdapter;
import android.widget.TextView;
import java.util.List;
import de.fau.amos.virtualledger.R;
import de.fau.amos.virtualledger.dtos.SavingsAccountSubGoal;
public class SubgoalAdapter extends ArrayAdapter<SavingsAccountSubGoal> {
private Activity activity;
public SubgoalAdapter(Activity activity, int layout, List<SavingsAccountSubGoal> data) {
super(activity, layout, data);
this.activity = activity;
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
if (convertView == null) {
convertView = LayoutInflater.from(getContext()).inflate(R.layout.subgoals_list_item, parent, false);
}
final SavingsAccountSubGoal subgoal = this.getItem(position);
updateText(convertView, R.id.subgoal_name, subgoal.getName());
updateText(convertView, R.id.subgoal_amount, String.valueOf(Math.round(subgoal.getAmount())));
return convertView;
}
private void updateText(View convertView, int id, String text) {
TextView goalBalance = (TextView) convertView.findViewById(id);
goalBalance.setText(text);
}
}
| fixed adorsys-todo: unused member
| app/src/main/java/de/fau/amos/virtualledger/android/views/savings/SubgoalAdapter.java | fixed adorsys-todo: unused member | <ide><path>pp/src/main/java/de/fau/amos/virtualledger/android/views/savings/SubgoalAdapter.java
<ide>
<ide> public class SubgoalAdapter extends ArrayAdapter<SavingsAccountSubGoal> {
<ide>
<del> private Activity activity;
<del>
<ide> public SubgoalAdapter(Activity activity, int layout, List<SavingsAccountSubGoal> data) {
<ide> super(activity, layout, data);
<del> this.activity = activity;
<ide> }
<ide>
<ide> @Override |
|
JavaScript | mit | ee05eb51265eb5a558ccc57bd31f6f66a4a38bca | 0 | spreadshirt/rAppid.js-sprd | define(["js/ui/View", "js/core/Bus", "sprd/manager/ProductManager", "sprd/data/ImageService"], function (View, Bus, ProductManager, ImageService) {
return View.inherit('sprd.view.ProductViewerClass', {
defaults: {
view: null,
product: null,
width: 300,
height: 300,
selectedConfiguration: null,
editable: true,
focused: true,
productViewerSvg: null,
textArea: null,
textAreaPosition: null,
removeEmptyTextConfiguration: true,
imageService: null,
errorKey: "{product.configurationsOnViewErrorKey(view)}",
componentClass: "product-viewer"
},
inject: {
bus: Bus,
productManager: ProductManager,
imageService: ImageService
},
events: ['on:configurationSelect'],
ctor: function () {
this.callBase();
this.bind('productViewerSvg', 'add:configurationViewer', this._onConfigurationViewerAdded, this);
this.bind('product.configurations', 'reset', this._onConfigurationsReset, this);
this.bind('product.configurations', 'remove', function (e) {
if (e.$.item === this.$.selectedConfiguration) {
this.set('selectedConfiguration', null);
}
}, this);
this.bind('selectedConfiguration', 'change:scale', this._positionTextArea, this);
this.bind('selectedConfiguration', 'change:offset', this._positionTextArea, this);
},
_commitSelectedConfiguration: function (selectedConfiguration, oldSelectedConfiguration) {
if (this.$.removeEmptyTextConfiguration && oldSelectedConfiguration &&
oldSelectedConfiguration.type === "text" && oldSelectedConfiguration.$.textFlow &&
this.$.product) {
var text = oldSelectedConfiguration.$.textFlow.text(0, -1);
if (/^[\s\n\r]*$/.test(text)) {
this.$.product.$.configurations.remove(oldSelectedConfiguration);
}
}
this._positionTextArea();
},
keyUp: function (e) {
if (!this.$stage.$browser.isIOS) {
return;
}
if (this.$keyPressHandled) {
return;
}
// this is a work-a-round because keypress event isn't available on android
var value = this.$.textArea.$el.value;
if (this.$lastValue !== value && value && value.length !== 0) {
// input
var c = value.substr(-1),
viewer = this.$.selectedConfigurationViewer;
if (c && viewer) {
viewer.addChar(c);
}
}
this.$lastValue = value;
this.$.textArea.$el.value = "";
},
keyPress: function (e) {
if (!this.$stage.$browser.isIOS) {
return;
}
this.$keyPressHandled = true;
this.$.textArea.$el.value = "";
this._keyPressHandler(e.domEvent);
},
keyDown: function (e) {
if (!this.$stage.$browser.isIOS) {
return;
}
this.$keyPressHandled = false;
this._keyDownHandler(e.domEvent);
},
_positionTextArea: function () {
try {
var position = null,
selectedConfiguration = this.$.selectedConfiguration;
if (!this.$textAreaFocused && this.$.editable && selectedConfiguration && selectedConfiguration.type === "text" && this.$.productViewerSvg && this.$.productViewerSvg.$currentProductTypeViewViewer) {
var factor = this.$.productViewerSvg.localToGlobalFactor(),
view = this.$.productViewerSvg.$currentProductTypeViewViewer.$._view,
viewMap;
for (var i = 0; i < view.$.viewMaps.$items.length; i++) {
if (view.$.viewMaps.$items[i].$.printArea === selectedConfiguration.$.printArea) {
viewMap = view.$.viewMaps.$items[i];
break;
}
}
if (viewMap) {
position = {
x: (viewMap.get("offset.x") + selectedConfiguration.get("offset.x")) * factor.x + 14,
y: (viewMap.get("offset.y") + selectedConfiguration.get("offset.y")) * factor.y - 2,
width: selectedConfiguration.width() * factor.x - 25,
height: selectedConfiguration.height() * factor.y - 10
};
}
}
this.set("textAreaPosition", position);
} catch (e) {
if (this.$.bus) {
this.$.bus.trigger("Application.Error", e);
} else {
throw e;
}
}
},
_onConfigurationsReset: function () {
this.set('selectedConfiguration', null);
},
_onConfigurationViewerAdded: function (e) {
var viewer = e.$;
if (viewer) {
if (viewer.$.configuration === this.$.selectedConfiguration) {
this.set('selectedConfigurationViewer', viewer);
}
this.trigger('add:configurationViewer', viewer);
}
},
_clickHandler: function (e) {
if (this.$.editable && !(e.isDefaultPrevented || e.defaultPrevented) && e.domEvent && e.domEvent.target !== this.$.textArea.$el) {
this.set('selectedConfiguration', null);
}
this.set('focused', true);
},
_commitChangedAttributes: function ($) {
this.callBase();
if ($ && $.hasOwnProperty('selectedConfiguration')) {
var configuration = $['selectedConfiguration'],
viewer = null;
if (!configuration) {
viewer = null;
} else {
viewer = this.getViewerForConfiguration(configuration);
}
this.trigger('on:configurationSelect', configuration);
this.set('selectedConfigurationViewer', viewer);
}
},
getViewerForConfiguration: function (configuration) {
if (this.$.productViewerSvg) {
return this.$.productViewerSvg.getViewerForConfiguration(configuration);
}
return null;
},
_keyDownHandler: function (e) {
var self = this,
product = self.$.product;
var viewer = this.$.selectedConfigurationViewer;
if (viewer) {
viewer._keyDown(e);
if (e.defaultPrevented) {
return;
}
}
var selectedConfiguration = self.$.selectedConfiguration;
if (selectedConfiguration && product) {
var deltaX = 0,
deltaY = 0;
switch (e.keyCode) {
case 40:
deltaY = 1;
break;
case 38:
deltaY = -1;
break;
case 37:
deltaX = -1;
break;
case 39:
deltaX = 1;
}
if (deltaX || deltaY) {
if (e.shiftKey) {
deltaX *= 10;
deltaY *= 10;
}
var offset = selectedConfiguration.$.offset.clone();
offset.set({
x: offset.$.x + deltaX,
y: offset.$.y + deltaY
});
selectedConfiguration.set('offset', offset);
e.preventDefault();
e.stopPropagation();
}
if (e.keyCode === 8 || e.keyCode === 46) {
// backspace || delete --> remove selected configuration
product.$.configurations.remove(selectedConfiguration);
self.set('selectedConfiguration', null);
e.preventDefault();
e.stopPropagation();
}
}
},
_keyPressHandler: function (e) {
var viewer = this.$.selectedConfigurationViewer;
if (viewer) {
viewer._keyPress(e);
}
},
_bindDomEvents: function () {
if (this.runsInBrowser() && this.$.editable) {
var self = this;
this.bind("on:click", this._clickHandler, this);
this.$stage.bind('on:blur', function () {
self.set('focused', false);
});
this.$stage.bind('on:focus', function () {
self.set('focused', true);
});
this.callBase();
}
},
textAreaFocused: function () {
this.set('focused', true);
if (this.$stage.$browser.isIOS) {
this.$.textArea.set('visibility', 'hidden');
} else {
// android hack
this.$stage.set('height', this.$stage.$el.offsetHeight);
var self = this;
setTimeout(function () {
self._positionTextArea();
self.$.textArea.set({
opacity: 1.0,
value: self.$.selectedConfiguration.$.textFlow.text(0, -1, "\n").replace(/\n$/, "")
});
self.$.textArea.set('opacity', 1.0);
}, 1000);
}
this.addClass("text-area-active");
},
_delegateEvent: function (e) {
if(this.$stage.$browser.isIOS || this.$.textArea.get('opacity') == 0) {
var viewer = this.$.selectedConfigurationViewer;
if (viewer) {
viewer._down(e.domEvent, viewer._isGesture(e.domEvent) ? "gesture" : "move");
}
this.$pointerMoveEventTriggerd = false;
}
},
_textAreaMove: function () {
this.$pointerMoveEventTriggerd = true;
},
_endTextAreaMove: function (e) {
if (!this.$pointerMoveEventTriggerd) {
e.target.focus();
}
},
textAreaBlured: function () {
this.set('focused', false);
if (this.$stage.$browser.isIOS) {
this.$.textArea.set('visibility', 'visible');
var viewer = this.$.selectedConfigurationViewer;
if (viewer) {
viewer.set('focused', false);
}
} else {
// android hack
var self = this;
setTimeout(function () {
self.$stage.set('height', '100%');
}, 200);
this.$.productManager.setTextForConfiguration(this.$.textArea.$.value, this.$.selectedConfiguration);
self.$.textArea.set('opacity', 0);
}
this.removeClass("text-area-active");
},
showTextAreaOverlay: function () {
return this.$.editable &&
this.$.selectedConfiguration && this.$.selectedConfiguration.type === "text" &&
this.runsInBrowser() && ('ontouchstart' in window);
}.onChange("selectedConfiguration", "editable")
});
}); | sprd/view/ProductViewerClass.js | define(["js/ui/View", "js/core/Bus", "sprd/manager/ProductManager", "sprd/data/ImageService"], function (View, Bus, ProductManager, ImageService) {
return View.inherit('sprd.view.ProductViewerClass', {
defaults: {
view: null,
product: null,
width: 300,
height: 300,
selectedConfiguration: null,
editable: true,
focused: true,
productViewerSvg: null,
textArea: null,
textAreaPosition: null,
removeEmptyTextConfiguration: true,
imageService: null,
errorKey: "{product.configurationsOnViewErrorKey(view)}",
componentClass: "product-viewer"
},
inject: {
bus: Bus,
productManager: ProductManager,
imageService: ImageService
},
events: ['on:configurationSelect'],
ctor: function () {
this.callBase();
this.bind('productViewerSvg', 'add:configurationViewer', this._onConfigurationViewerAdded, this);
this.bind('product.configurations', 'reset', this._onConfigurationsReset, this);
this.bind('product.configurations', 'remove', function (e) {
if (e.$.item === this.$.selectedConfiguration) {
this.set('selectedConfiguration', null);
}
}, this);
this.bind('selectedConfiguration', 'change:scale', this._positionTextArea, this);
this.bind('selectedConfiguration', 'change:offset', this._positionTextArea, this);
},
_commitSelectedConfiguration: function (selectedConfiguration, oldSelectedConfiguration) {
if (this.$.removeEmptyTextConfiguration && oldSelectedConfiguration &&
oldSelectedConfiguration.type === "text" && oldSelectedConfiguration.$.textFlow &&
this.$.product) {
var text = oldSelectedConfiguration.$.textFlow.text(0, -1);
if (/^[\s\n\r]*$/.test(text)) {
this.$.product.$.configurations.remove(oldSelectedConfiguration);
}
}
this._positionTextArea();
},
keyUp: function (e) {
if (!this.$stage.$browser.isIOS) {
return;
}
if (this.$keyPressHandled) {
return;
}
// this is a work-a-round because keypress event isn't available on android
var value = this.$.textArea.$el.value;
if (this.$lastValue !== value && value && value.length !== 0) {
// input
var c = value.substr(-1),
viewer = this.$.selectedConfigurationViewer;
if (c && viewer) {
viewer.addChar(c);
}
}
this.$lastValue = value;
this.$.textArea.$el.value = "";
},
keyPress: function (e) {
if (!this.$stage.$browser.isIOS) {
return;
}
this.$keyPressHandled = true;
this.$.textArea.$el.value = "";
this._keyPressHandler(e.domEvent);
},
keyDown: function (e) {
if (!this.$stage.$browser.isIOS) {
return;
}
this.$keyPressHandled = false;
this._keyDownHandler(e.domEvent);
},
_positionTextArea: function () {
try {
var position = null,
selectedConfiguration = this.$.selectedConfiguration;
if (!this.$textAreaFocused && this.$.editable && selectedConfiguration && selectedConfiguration.type === "text" && this.$.productViewerSvg && this.$.productViewerSvg.$currentProductTypeViewViewer) {
var factor = this.$.productViewerSvg.localToGlobalFactor(),
view = this.$.productViewerSvg.$currentProductTypeViewViewer.$._view,
viewMap;
for (var i = 0; i < view.$.viewMaps.$items.length; i++) {
if (view.$.viewMaps.$items[i].$.printArea === selectedConfiguration.$.printArea) {
viewMap = view.$.viewMaps.$items[i];
break;
}
}
if (viewMap) {
position = {
x: (viewMap.get("offset.x") + selectedConfiguration.get("offset.x")) * factor.x + 14,
y: (viewMap.get("offset.y") + selectedConfiguration.get("offset.y")) * factor.y - 2,
width: selectedConfiguration.width() * factor.x - 25,
height: selectedConfiguration.height() * factor.y - 10
};
}
}
this.set("textAreaPosition", position);
} catch (e) {
if (this.$.bus) {
this.$.bus.trigger("Application.Error", e);
} else {
throw e;
}
}
},
_onConfigurationsReset: function () {
this.set('selectedConfiguration', null);
},
_onConfigurationViewerAdded: function (e) {
var viewer = e.$;
if (viewer) {
if (viewer.$.configuration === this.$.selectedConfiguration) {
this.set('selectedConfigurationViewer', viewer);
}
this.trigger('add:configurationViewer', viewer);
}
},
_clickHandler: function (e) {
if (this.$.editable && !(e.isDefaultPrevented || e.defaultPrevented) && e.domEvent && e.domEvent.target !== this.$.textArea.$el) {
this.set('selectedConfiguration', null);
}
this.set('focused', true);
},
_commitChangedAttributes: function ($) {
this.callBase();
if ($ && $.hasOwnProperty('selectedConfiguration')) {
var configuration = $['selectedConfiguration'],
viewer = null;
if (!configuration) {
viewer = null;
} else {
viewer = this.getViewerForConfiguration(configuration);
}
this.trigger('on:configurationSelect', configuration);
this.set('selectedConfigurationViewer', viewer);
}
},
getViewerForConfiguration: function (configuration) {
if (this.$.productViewerSvg) {
return this.$.productViewerSvg.getViewerForConfiguration(configuration);
}
return null;
},
_keyDownHandler: function (e) {
var self = this,
product = self.$.product;
var viewer = this.$.selectedConfigurationViewer;
if (viewer) {
viewer._keyDown(e);
if (e.defaultPrevented) {
return;
}
}
var selectedConfiguration = self.$.selectedConfiguration;
if (selectedConfiguration && product) {
var deltaX = 0,
deltaY = 0;
switch (e.keyCode) {
case 40:
deltaY = 1;
break;
case 38:
deltaY = -1;
break;
case 37:
deltaX = -1;
break;
case 39:
deltaX = 1;
}
if (deltaX || deltaY) {
if (e.shiftKey) {
deltaX *= 10;
deltaY *= 10;
}
var offset = selectedConfiguration.$.offset.clone();
offset.set({
x: offset.$.x + deltaX,
y: offset.$.y + deltaY
});
selectedConfiguration.set('offset', offset);
e.preventDefault();
e.stopPropagation();
}
if (e.keyCode === 8 || e.keyCode === 46) {
// backspace || delete --> remove selected configuration
product.$.configurations.remove(selectedConfiguration);
self.set('selectedConfiguration', null);
e.preventDefault();
e.stopPropagation();
}
}
},
_keyPressHandler: function (e) {
var viewer = this.$.selectedConfigurationViewer;
if (viewer) {
viewer._keyPress(e);
}
},
_bindDomEvents: function () {
if (this.runsInBrowser() && this.$.editable) {
var self = this;
this.bind("on:click", this._clickHandler, this);
this.$stage.bind('on:blur', function () {
self.set('focused', false);
});
this.$stage.bind('on:focus', function () {
self.set('focused', true);
});
this.callBase();
}
},
textAreaFocused: function () {
this.set('focused', true);
if (this.$stage.$browser.isIOS) {
this.$.textArea.set('visibility', 'hidden');
} else {
// android hack
this.$stage.set('height', this.$stage.$el.offsetHeight);
var self = this;
setTimeout(function () {
self._positionTextArea();
self.$.textArea.set({
opacity: 1.0,
value: self.$.selectedConfiguration.$.textFlow.text(0, -1, "\n").replace(/\n$/, "")
});
self.$.textArea.set('opacity', 1.0);
}, 1000);
}
this.addClass("text-area-active");
},
_delegateEvent: function (e) {
var viewer = this.$.selectedConfigurationViewer;
if (viewer) {
viewer._down(e.domEvent, viewer._isGesture(e.domEvent) ? "gesture" : "move");
}
this.$pointerMoveEventTriggerd = false;
},
_textAreaMove: function () {
this.$pointerMoveEventTriggerd = true;
},
_endTextAreaMove: function (e) {
if (!this.$pointerMoveEventTriggerd) {
e.target.focus();
}
},
textAreaBlured: function () {
this.set('focused', false);
if (this.$stage.$browser.isIOS) {
this.$.textArea.set('visibility', 'visible');
var viewer = this.$.selectedConfigurationViewer;
if (viewer) {
viewer.set('focused', false);
}
} else {
// android hack
var self = this;
setTimeout(function () {
self.$stage.set('height', '100%');
}, 200);
this.$.productManager.setTextForConfiguration(this.$.textArea.$.value, this.$.selectedConfiguration);
self.$.textArea.set('opacity', 0);
}
this.removeClass("text-area-active");
},
showTextAreaOverlay: function () {
return this.$.editable &&
this.$.selectedConfiguration && this.$.selectedConfiguration.type === "text" &&
this.runsInBrowser() && ('ontouchstart' in window);
}.onChange("selectedConfiguration", "editable")
});
}); | fixed text editing for android
| sprd/view/ProductViewerClass.js | fixed text editing for android | <ide><path>prd/view/ProductViewerClass.js
<ide>
<ide> _delegateEvent: function (e) {
<ide>
<del> var viewer = this.$.selectedConfigurationViewer;
<del> if (viewer) {
<del> viewer._down(e.domEvent, viewer._isGesture(e.domEvent) ? "gesture" : "move");
<del> }
<del>
<del> this.$pointerMoveEventTriggerd = false;
<add> if(this.$stage.$browser.isIOS || this.$.textArea.get('opacity') == 0) {
<add> var viewer = this.$.selectedConfigurationViewer;
<add> if (viewer) {
<add> viewer._down(e.domEvent, viewer._isGesture(e.domEvent) ? "gesture" : "move");
<add> }
<add>
<add> this.$pointerMoveEventTriggerd = false;
<add> }
<ide>
<ide> },
<ide> _textAreaMove: function () { |
|
Java | mit | 9f81170f02a64b14b54808478dbce51bf2837573 | 0 | mmcdermo/cordova-pushnotifications,mmcdermo/cordova-pushnotifications,mmcdermo/cordova-pushnotifications,mmcdermo/cordova-pushnotifications,mmcdermo/cordova-pushnotifications | package com.plugin.gcm;
import org.json.JSONException;
import org.json.JSONObject;
import android.annotation.SuppressLint;
import android.app.Notification;
import android.app.NotificationManager;
import android.app.PendingIntent;
import android.content.Context;
import android.content.Intent;
import android.os.Bundle;
import android.support.v4.app.NotificationCompat;
import android.util.Log;
import com.google.android.gcm.GCMBaseIntentService;
@SuppressLint("NewApi")
public class GCMIntentService extends GCMBaseIntentService {
private static final String TAG = "GCMIntentService";
public GCMIntentService() {
super("GCMIntentService");
}
@Override
public void onRegistered(Context context, String regId) {
Log.v(TAG, "onRegistered: "+ regId);
JSONObject json;
try
{
json = new JSONObject().put("event", "registered");
json.put("regid", regId);
Log.v(TAG, "onRegistered: " + json.toString());
// Send this JSON data to the JavaScript application above EVENT should be set to the msg type
// In this case this is the registration ID
PushPlugin.sendJavascript( json );
}
catch( JSONException e)
{
// No message to the user is sent, JSON failed
Log.e(TAG, "onRegistered: JSON exception");
}
}
@Override
public void onUnregistered(Context context, String regId) {
Log.d(TAG, "onUnregistered - regId: " + regId);
}
@Override
protected void onMessage(Context context, Intent intent) {
Log.d(TAG, "onMessage - context: " + context);
// Extract the payload from the message
Bundle extras = intent.getExtras();
if (extras != null)
{
// if we are in the foreground, just surface the payload, else post it to the statusbar
if (PushPlugin.isInForeground()) {
extras.putBoolean("foreground", true);
PushPlugin.sendExtras(extras);
}
else {
extras.putBoolean("foreground", false);
// Send a notification if there is a message
if (extras.getString("message") != null && extras.getString("message").length() != 0) {
createNotification(context, extras);
}
}
}
}
public void createNotification(Context context, Bundle extras)
{
NotificationManager mNotificationManager = (NotificationManager) getSystemService(Context.NOTIFICATION_SERVICE);
String appName = getAppName(this);
Intent notificationIntent = new Intent(this, PushHandlerActivity.class);
notificationIntent.addFlags(Intent.FLAG_ACTIVITY_SINGLE_TOP | Intent.FLAG_ACTIVITY_CLEAR_TOP);
notificationIntent.putExtra("pushBundle", extras);
PendingIntent contentIntent = PendingIntent.getActivity(this, 0, notificationIntent, PendingIntent.FLAG_UPDATE_CURRENT);
int defaults = Notification.DEFAULT_ALL;
if (extras.getString("defaults") != null) {
try {
defaults = Integer.parseInt(extras.getString("defaults"));
} catch (NumberFormatException e) {}
}
NotificationCompat.Builder mBuilder =
new NotificationCompat.Builder(context)
.setDefaults(defaults)
.setSmallIcon(context.getApplicationInfo().icon)
.setWhen(System.currentTimeMillis())
.setContentTitle(extras.getString("title"))
.setTicker(extras.getString("title"))
.setContentIntent(contentIntent)
.setAutoCancel(true);
if (true || extras.getString("image_url") != null){
URL url = new URL("https://https://hearths3-kaleidosllc.netdna-ssl.com/profile/4.jpg"); //extras.getString("image_url"));
HttpURLConnection connection = (HttpURLConnection) url.openConnection();
connection.setDoInput(true);
connection.connect();
InputStream input = connection.getInputStream();
Bitmap bitmap = BitmapFactory.decodeStream(input);
mBuilder.setLargeIcon(bitmap);
}
String message = extras.getString("message");
if (message != null) {
mBuilder.setContentText(message);
} else {
mBuilder.setContentText("<missing message content>");
}
String msgcnt = extras.getString("msgcnt");
if (msgcnt != null) {
mBuilder.setNumber(Integer.parseInt(msgcnt));
}
int notId = 0;
try {
notId = Integer.parseInt(extras.getString("notId"));
}
catch(NumberFormatException e) {
Log.e(TAG, "Number format exception - Error parsing Notification ID: " + e.getMessage());
}
catch(Exception e) {
Log.e(TAG, "Number format exception - Error parsing Notification ID" + e.getMessage());
}
mNotificationManager.notify((String) appName, notId, mBuilder.build());
}
private static String getAppName(Context context)
{
CharSequence appName =
context
.getPackageManager()
.getApplicationLabel(context.getApplicationInfo());
return (String)appName;
}
@Override
public void onError(Context context, String errorId) {
Log.e(TAG, "onError - errorId: " + errorId);
}
}
| src/android/com/plugin/gcm/GCMIntentService.java | package com.plugin.gcm;
import org.json.JSONException;
import org.json.JSONObject;
import android.annotation.SuppressLint;
import android.app.Notification;
import android.app.NotificationManager;
import android.app.PendingIntent;
import android.content.Context;
import android.content.Intent;
import android.os.Bundle;
import android.support.v4.app.NotificationCompat;
import android.util.Log;
import com.google.android.gcm.GCMBaseIntentService;
@SuppressLint("NewApi")
public class GCMIntentService extends GCMBaseIntentService {
private static final String TAG = "GCMIntentService";
public GCMIntentService() {
super("GCMIntentService");
}
@Override
public void onRegistered(Context context, String regId) {
Log.v(TAG, "onRegistered: "+ regId);
JSONObject json;
try
{
json = new JSONObject().put("event", "registered");
json.put("regid", regId);
Log.v(TAG, "onRegistered: " + json.toString());
// Send this JSON data to the JavaScript application above EVENT should be set to the msg type
// In this case this is the registration ID
PushPlugin.sendJavascript( json );
}
catch( JSONException e)
{
// No message to the user is sent, JSON failed
Log.e(TAG, "onRegistered: JSON exception");
}
}
@Override
public void onUnregistered(Context context, String regId) {
Log.d(TAG, "onUnregistered - regId: " + regId);
}
@Override
protected void onMessage(Context context, Intent intent) {
Log.d(TAG, "onMessage - context: " + context);
// Extract the payload from the message
Bundle extras = intent.getExtras();
if (extras != null)
{
// if we are in the foreground, just surface the payload, else post it to the statusbar
if (PushPlugin.isInForeground()) {
extras.putBoolean("foreground", true);
PushPlugin.sendExtras(extras);
}
else {
extras.putBoolean("foreground", false);
// Send a notification if there is a message
if (extras.getString("message") != null && extras.getString("message").length() != 0) {
createNotification(context, extras);
}
}
}
}
public void createNotification(Context context, Bundle extras)
{
NotificationManager mNotificationManager = (NotificationManager) getSystemService(Context.NOTIFICATION_SERVICE);
String appName = getAppName(this);
Intent notificationIntent = new Intent(this, PushHandlerActivity.class);
notificationIntent.addFlags(Intent.FLAG_ACTIVITY_SINGLE_TOP | Intent.FLAG_ACTIVITY_CLEAR_TOP);
notificationIntent.putExtra("pushBundle", extras);
PendingIntent contentIntent = PendingIntent.getActivity(this, 0, notificationIntent, PendingIntent.FLAG_UPDATE_CURRENT);
int defaults = Notification.DEFAULT_ALL;
if (extras.getString("defaults") != null) {
try {
defaults = Integer.parseInt(extras.getString("defaults"));
} catch (NumberFormatException e) {}
}
NotificationCompat.Builder mBuilder =
new NotificationCompat.Builder(context)
.setDefaults(defaults)
.setSmallIcon(context.getApplicationInfo().icon)
.setGroup(extras.getString("group"))
.setWhen(System.currentTimeMillis())
.setContentTitle(extras.getString("title"))
.setTicker(extras.getString("title"))
.setContentIntent(contentIntent)
.setAutoCancel(true);
String message = extras.getString("message");
if (message != null) {
mBuilder.setContentText(message);
} else {
mBuilder.setContentText("<missing message content>");
}
String msgcnt = extras.getString("msgcnt");
if (msgcnt != null) {
mBuilder.setNumber(Integer.parseInt(msgcnt));
}
int notId = 0;
try {
notId = Integer.parseInt(extras.getString("notId"));
}
catch(NumberFormatException e) {
Log.e(TAG, "Number format exception - Error parsing Notification ID: " + e.getMessage());
}
catch(Exception e) {
Log.e(TAG, "Number format exception - Error parsing Notification ID" + e.getMessage());
}
mNotificationManager.notify((String) appName, notId, mBuilder.build());
}
private static String getAppName(Context context)
{
CharSequence appName =
context
.getPackageManager()
.getApplicationLabel(context.getApplicationInfo());
return (String)appName;
}
@Override
public void onError(Context context, String errorId) {
Log.e(TAG, "onError - errorId: " + errorId);
}
}
| Image test
| src/android/com/plugin/gcm/GCMIntentService.java | Image test | <ide><path>rc/android/com/plugin/gcm/GCMIntentService.java
<ide> } catch (NumberFormatException e) {}
<ide> }
<ide>
<add>
<ide> NotificationCompat.Builder mBuilder =
<ide> new NotificationCompat.Builder(context)
<ide> .setDefaults(defaults)
<ide> .setSmallIcon(context.getApplicationInfo().icon)
<del> .setGroup(extras.getString("group"))
<ide> .setWhen(System.currentTimeMillis())
<ide> .setContentTitle(extras.getString("title"))
<ide> .setTicker(extras.getString("title"))
<ide> .setContentIntent(contentIntent)
<ide> .setAutoCancel(true);
<add>
<add> if (true || extras.getString("image_url") != null){
<add> URL url = new URL("https://https://hearths3-kaleidosllc.netdna-ssl.com/profile/4.jpg"); //extras.getString("image_url"));
<add> HttpURLConnection connection = (HttpURLConnection) url.openConnection();
<add> connection.setDoInput(true);
<add> connection.connect();
<add> InputStream input = connection.getInputStream();
<add> Bitmap bitmap = BitmapFactory.decodeStream(input);
<add> mBuilder.setLargeIcon(bitmap);
<add> }
<ide>
<ide> String message = extras.getString("message");
<ide> if (message != null) { |
|
Java | mpl-2.0 | 6b2ca5970ae6552074df97a68f3dce9f381f41f5 | 0 | d-lopes/stock-facilitator,d-lopes/stock-facilitator,d-lopes/stock-facilitator | package de.dlopes.stocks.facilitator.data.util;
import static org.junit.Assert.*;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
@RunWith(SpringJUnit4ClassRunner.class)
@SpringBootTest
public class YahooSymbolIsinMappingTest {
YahooSymbolIsinMapping _classUnderTest;
List<Map<String,String>> stocks;
@Before
public void setup() {
_classUnderTest = new YahooSymbolIsinMapping();
_classUnderTest.query = _classUnderTest.new Query();
stocks = new ArrayList<Map<String,String>>();
Map<String, List<Map<String,String>>> results = new HashMap<String, List<Map<String,String>>>();
results.put("stock", stocks);
_classUnderTest.query.results = results;
}
@Test
public void testConvertToMap() {
// setup test data:
// ... "results":{"stock":
// [
// {"symbol":"DE0005785802","Isin":"FME.DE"},
// {"symbol":"DE000BAY0017","Isin":"BAYN.DE"},
// {"symbol":"DE000PSM7770","Isin":"PSM.DE"},
// {"symbol":"DE0006231004","Isin":"DTB.DE"},
// {"symbol":"DE000CBK1001","Isin":"DBK.DE"},
// {"symbol":"DE000A1ML7J1","Isin":"VON.DE"}
// ]}}}
String[] isins = new String[] { "DE0005785802", "DE000BAY0017", "DE000PSM7770", "DE0006231004",
"DE000CBK1001", "DE000A1ML7J1"};
String[] symbols = new String[] { "FME.DE", "BAYN.DE", "PSM.DE", "DTB.DE", "DBK.DE", "VON.DE" };
for (int i = 0; i < isins.length; i++) {
createTestDataEntry(isins[i], symbols[i]);
}
// run the test:
Map<String,String> map = _classUnderTest.covertToMap();
// assert there is only one value in the list
assertEquals("unexpected amount of results!", isins.length, map.size());
// assert only all mappings have been transfered
for (int i = 0; i < isins.length; i++) {
assertEquals("Wrong ISIN for symbol " + symbols[i] + " found!", isins[i], map.get(symbols[i]));
}
}
@Test
public void testConvertToMapWithDummyISIN() {
// setup test data:
// ... "results":{"stock":
// [
// {"symbol":"DE0005785802","Isin":"FME.DE"},
// {"symbol":"XX0000000000","Isin":null}
// ]}}}
createTestDataEntry("DE0005785802", "FME.DE");
createTestDataEntry(YahooSymbolIsinMapping.DUMMY_ISIN, null);
// run the test:
Map<String,String> map = _classUnderTest.covertToMap();
// assert there is only one value in the list
assertEquals("unexpected amount of results!", 1, map.size());
// assert only the mapping for the valid ISIN is in the list => that means the dummy ISIN was
// neglected
assertEquals("Mapping for valid ISIN was not found in the result!", "DE0005785802", map.get("FME.DE"));
}
@Test
public void testConvertToMapWithoutMapping() {
// test data is not given on purpose!
// run the test:
Map<String,String> map = _classUnderTest.covertToMap(false);
// assert there is only one value in the list
assertEquals("unexpected amount of results!", 0, map.size());
}
@Test
public void testConvertToMapWithISIN2Symbol() {
// setup test data:
// ... "results":{"stock":
// [
// {"symbol":"DE0005785802","Isin":"FME.DE"},
// {"symbol":"DE000BAY0017","Isin":"BAYN.DE"},
// ]}}}
createTestDataEntry("DE0005785802", "FME.DE");
createTestDataEntry("DE000BAY0017", "BAYN.DE");
// run the test:
Map<String,String> map = _classUnderTest.covertToMap(false);
// assert there is only one value in the list
assertEquals("unexpected amount of results!", 2, map.size());
// assert only the mapping for the valid ISIN is in the list => that means the dummy ISIN was
// neglected
assertEquals("Mapping for Symbol was not found in the result!", "FME.DE", map.get("DE0005785802"));
assertEquals("Mapping for Symbol was not found in the result!", "BAYN.DE", map.get("DE000BAY0017"));
}
@Test
public void testGetIsinsWithoutMapping() {
// setup test data:
// ... "results":{"stock":
// [
// {"symbol":"DE0005785802","Isin":"FME.DE"},
// {"symbol":"DE000BAY0017","Isin":null},
// {"symbol":"DE000PSM7770","Isin":"PSM.DE"},
// {"symbol":"DE0006231004","Isin":null"},
// {"symbol":"DE000CBK1001","Isin":null"},
// {"symbol":"DE000A1ML7J1","Isin":"VON.DE"}
// ]}}}
String[] isins = new String[] { "DE0005785802", "DE000BAY0017", "DE000PSM7770", "DE0006231004",
"DE000CBK1001", "DE000A1ML7J1"};
String[] symbols = new String[] { "FME.DE", null, "PSM.DE", null, null, "VON.DE" };
int nullValues = 0;
for (int i = 0; i < isins.length; i++) {
createTestDataEntry(isins[i], symbols[i]);
nullValues += symbols[i] == null ? 1 : 0; // count the null values in the symbols
}
// run the test:
Map<String,String> map = _classUnderTest.covertToMap();
List<String> list = _classUnderTest.getIsinsWithoutMapping();
// assert there is only one value in the list
assertEquals("unexpected amount of results!", isins.length - nullValues, map.size());
// assert only the valid mapping have been transfered
for (int i = 0; i < isins.length; i++) {
if (symbols[i] == null) {
continue;
}
assertEquals("Wrong ISIN for symbol " + symbols[i] + " found!", isins[i], map.get(symbols[i]));
}
// assert that all missing mapping are found
for (int i = 0; i < isins.length; i++) {
if (symbols[i] != null) {
continue;
}
assertTrue("ISIN " + isins[i] + " not recognized as missing mapping!", list.contains(isins[i]));
}
}
/* ------------------- convinience methods ----------------------------*/
private void createTestDataEntry(String isin, String symbol) {
Map<String,String> entry = new HashMap<String,String>();
entry.put("symbol", isin);
entry.put("Isin", symbol);
stocks.add(entry);
}
}
| src/test/java/de/dlopes/stocks/facilitator/data/util/YahooSymbolIsinMappingTest.java | package de.dlopes.stocks.facilitator.data.util;
import static org.junit.Assert.*;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
@RunWith(SpringJUnit4ClassRunner.class)
@SpringBootTest
public class YahooSymbolIsinMappingTest {
YahooSymbolIsinMapping _classUnderTest;
List<Map<String,String>> stocks;
@Before
public void setup() {
_classUnderTest = new YahooSymbolIsinMapping();
_classUnderTest.query = _classUnderTest.new Query();
stocks = new ArrayList<Map<String,String>>();
Map<String, List<Map<String,String>>> results = new HashMap<String, List<Map<String,String>>>();
results.put("stock", stocks);
_classUnderTest.query.results = results;
}
@Test
public void testConvertToMap() {
// setup test data:
// ... "results":{"stock":
// [
// {"symbol":"DE0005785802","Isin":"FME.DE"},
// {"symbol":"DE000BAY0017","Isin":"BAYN.DE"},
// {"symbol":"DE000PSM7770","Isin":"PSM.DE"},
// {"symbol":"DE0006231004","Isin":"DTB.DE"},
// {"symbol":"DE000CBK1001","Isin":"DBK.DE"},
// {"symbol":"DE000A1ML7J1","Isin":"VON.DE"}
// ]}}}
String[] isins = new String[] { "DE0005785802", "DE000BAY0017", "DE000PSM7770", "DE0006231004",
"DE000CBK1001", "DE000A1ML7J1"};
String[] symbols = new String[] { "FME.DE", "BAYN.DE", "PSM.DE", "DTB.DE", "DBK.DE", "VON.DE" };
for (int i = 0; i < isins.length; i++) {
createTestDataEntry(isins[i], symbols[i]);
}
// run the test:
Map<String,String> map = _classUnderTest.covertToMap();
// assert there is only one value in the list
assertEquals("unexpected amount of results!", isins.length, map.size());
// assert only all mappings have been transfered
for (int i = 0; i < isins.length; i++) {
assertEquals("Wrong ISIN for symbol " + symbols[i] + " found!", isins[i], map.get(symbols[i]));
}
}
@Test
public void testConvertToMapWithDummyISIN() {
// setup test data:
// ... "results":{"stock":
// [
// {"symbol":"DE0005785802","Isin":"FME.DE"},
// {"symbol":"XX0000000000","Isin":null}
// ]}}}
createTestDataEntry("DE0005785802", "FME.DE");
createTestDataEntry(YahooSymbolIsinMapping.DUMMY_ISIN, null);
// run the test:
Map<String,String> map = _classUnderTest.covertToMap();
// assert there is only one value in the list
assertEquals("unexpected amount of results!", 1, map.size());
// assert only the mapping for the valid ISIN is in the list => that means the dummy ISIN was
// neglected
assertEquals("Mapping for valid ISIN was not found in the result!", "DE0005785802", map.get("FME.DE"));
}
@Test
public void testGetIsinsWithoutMapping() {
// setup test data:
// ... "results":{"stock":
// [
// {"symbol":"DE0005785802","Isin":"FME.DE"},
// {"symbol":"DE000BAY0017","Isin":null},
// {"symbol":"DE000PSM7770","Isin":"PSM.DE"},
// {"symbol":"DE0006231004","Isin":null"},
// {"symbol":"DE000CBK1001","Isin":null"},
// {"symbol":"DE000A1ML7J1","Isin":"VON.DE"}
// ]}}}
String[] isins = new String[] { "DE0005785802", "DE000BAY0017", "DE000PSM7770", "DE0006231004",
"DE000CBK1001", "DE000A1ML7J1"};
String[] symbols = new String[] { "FME.DE", null, "PSM.DE", null, null, "VON.DE" };
int nullValues = 0;
for (int i = 0; i < isins.length; i++) {
createTestDataEntry(isins[i], symbols[i]);
nullValues += symbols[i] == null ? 1 : 0; // count the null values in the symbols
}
// run the test:
Map<String,String> map = _classUnderTest.covertToMap();
List<String> list = _classUnderTest.getIsinsWithoutMapping();
// assert there is only one value in the list
assertEquals("unexpected amount of results!", isins.length - nullValues, map.size());
// assert only the valid mapping have been transfered
for (int i = 0; i < isins.length; i++) {
if (symbols[i] == null) {
continue;
}
assertEquals("Wrong ISIN for symbol " + symbols[i] + " found!", isins[i], map.get(symbols[i]));
}
// assert that all missing mapping are found
for (int i = 0; i < isins.length; i++) {
if (symbols[i] != null) {
continue;
}
assertTrue("ISIN " + isins[i] + " not recognized as missing mapping!", list.contains(isins[i]));
}
}
/* ------------------- convinience methods ----------------------------*/
private void createTestDataEntry(String isin, String symbol) {
Map<String,String> entry = new HashMap<String,String>();
entry.put("symbol", isin);
entry.put("Isin", symbol);
stocks.add(entry);
}
}
| a few more testcases for YahooSymbilIsinMapping.java | src/test/java/de/dlopes/stocks/facilitator/data/util/YahooSymbolIsinMappingTest.java | a few more testcases for YahooSymbilIsinMapping.java | <ide><path>rc/test/java/de/dlopes/stocks/facilitator/data/util/YahooSymbolIsinMappingTest.java
<ide> }
<ide>
<ide> @Test
<add> public void testConvertToMapWithoutMapping() {
<add>
<add> // test data is not given on purpose!
<add>
<add> // run the test:
<add> Map<String,String> map = _classUnderTest.covertToMap(false);
<add>
<add> // assert there is only one value in the list
<add> assertEquals("unexpected amount of results!", 0, map.size());
<add>
<add> }
<add>
<add>
<add> @Test
<add> public void testConvertToMapWithISIN2Symbol() {
<add>
<add> // setup test data:
<add> // ... "results":{"stock":
<add> // [
<add> // {"symbol":"DE0005785802","Isin":"FME.DE"},
<add> // {"symbol":"DE000BAY0017","Isin":"BAYN.DE"},
<add> // ]}}}
<add> createTestDataEntry("DE0005785802", "FME.DE");
<add> createTestDataEntry("DE000BAY0017", "BAYN.DE");
<add>
<add> // run the test:
<add> Map<String,String> map = _classUnderTest.covertToMap(false);
<add>
<add> // assert there is only one value in the list
<add> assertEquals("unexpected amount of results!", 2, map.size());
<add>
<add> // assert only the mapping for the valid ISIN is in the list => that means the dummy ISIN was
<add> // neglected
<add> assertEquals("Mapping for Symbol was not found in the result!", "FME.DE", map.get("DE0005785802"));
<add> assertEquals("Mapping for Symbol was not found in the result!", "BAYN.DE", map.get("DE000BAY0017"));
<add>
<add> }
<add>
<add> @Test
<ide> public void testGetIsinsWithoutMapping() {
<ide>
<ide> // setup test data: |
|
Java | apache-2.0 | 32fba5876c7bde7910099610b38ffe73b6575854 | 0 | Mygraduate/Supervisor_Java,Mygraduate/Supervisor_Java | package com.graduate.api.college;
import com.graduate.common.BaseController;
import com.graduate.common.BaseJsonData;
import com.graduate.system.college.model.College;
import com.graduate.system.college.service.CollegeService;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
import java.util.HashMap;
import java.util.List;
/**
* Created by Administrator on 2017/5/15.
*/
@RestController
@RequestMapping("api/college")
@Api(value = "api/college", description = "学院接口")
public class CollegeController extends BaseController {
@Autowired
private CollegeService<College> collegeService;
@ApiOperation(value="新增学院", notes="")
@PreAuthorize("hasRole('ROLE_ADMIN')")
@RequestMapping(value={"/create"}, method= RequestMethod.POST)
public BaseJsonData createCollege(@RequestBody College college) {
BaseJsonData data = new BaseJsonData();
try {
College college1 = collegeService.save(college);
return data.ok();
}catch (Exception e){
e.printStackTrace();
return data.fail(e.getMessage());
}
}
@ApiOperation(value="获取学院列表", notes="")
@PreAuthorize("hasRole('ROLE_ADMIN')")
@RequestMapping(value={"/list"}, method=RequestMethod.POST)
public BaseJsonData getCollegeList() {
BaseJsonData data = new BaseJsonData();
HashMap<String,Object> map = new HashMap<>();
try{
List<College> collegelist = collegeService.findAll();
map.put("info",collegelist);
return data.ok(map);
}catch (Exception e){
e.printStackTrace();
return data.fail(e.getMessage());
}
}
@ApiOperation(value="删除学院", notes="")
@PreAuthorize("hasRole('ROLE_ADMIN')")
@RequestMapping(value={"/delete"}, method=RequestMethod.POST)
public BaseJsonData deleteCollegeList(@RequestBody College college) {
BaseJsonData data = new BaseJsonData();
try{
collegeService.delete(college);
return data.ok();
}catch (Exception e){
e.printStackTrace();
return data.fail(e.getMessage());
}
}
@ApiOperation(value="修改学院", notes="")
@PreAuthorize("hasRole('ROLE_ADMIN')")
@RequestMapping(value={"/update"}, method=RequestMethod.POST)
public BaseJsonData updateCollegeList(@RequestBody College college) {
BaseJsonData data = new BaseJsonData();
HashMap<String,Object> map = new HashMap<>();
try{
College college1=collegeService.findOne(college.getId());
map.put("info",college1);
return data.ok(map);
}catch (Exception e){
e.printStackTrace();
return data.fail(e.getMessage());
}
}
}
| src/main/java/com/graduate/api/college/CollegeController.java | package com.graduate.api.college;
import com.graduate.common.BaseController;
import com.graduate.common.BaseJsonData;
import com.graduate.system.college.model.College;
import com.graduate.system.college.service.CollegeService;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
import java.util.HashMap;
import java.util.List;
/**
* Created by Administrator on 2017/5/15.
*/
@RestController
@RequestMapping("api/college")
@Api(value = "api/college", description = "学院接口")
public class CollegeController extends BaseController {
@Autowired
private CollegeService<College> collegeService;
@ApiOperation(value="新增学院", notes="")
@PreAuthorize("hasRole('ROLE_ADMIN')")
@RequestMapping(value={"/create"}, method= RequestMethod.POST)
public BaseJsonData createCollege(@RequestBody College college) {
BaseJsonData data = new BaseJsonData();
try {
College college1 = collegeService.save(college);
return data.ok();
}catch (Exception e){
e.printStackTrace();
return data.fail();
}
}
@ApiOperation(value="获取学院列表", notes="")
@PreAuthorize("hasRole('ROLE_ADMIN')")
@RequestMapping(value={"/list"}, method=RequestMethod.POST)
public BaseJsonData getCollegeList() {
BaseJsonData data = new BaseJsonData();
HashMap<String,Object> map = new HashMap<>();
try{
List<College> collegelist = collegeService.findAll();
map.put("info",collegelist);
return data.ok(map);
}catch (Exception e){
e.printStackTrace();
return data.fail();
}
}
@ApiOperation(value="删除学院", notes="")
@PreAuthorize("hasRole('ROLE_ADMIN')")
@RequestMapping(value={"/delete"}, method=RequestMethod.POST)
public BaseJsonData deleteCollegeList(@RequestBody College college) {
BaseJsonData data = new BaseJsonData();
try{
collegeService.delete(college);
return data.ok();
}catch (Exception e){
e.printStackTrace();
return data.fail();
}
}
@ApiOperation(value="修改学院", notes="")
@PreAuthorize("hasRole('ROLE_ADMIN')")
@RequestMapping(value={"/update"}, method=RequestMethod.POST)
public BaseJsonData updateCollegeList(@RequestBody College college) {
BaseJsonData data = new BaseJsonData();
HashMap<String,Object> map = new HashMap<>();
try{
College college1=collegeService.findOne(college.getId());
map.put("info",college1);
return data.ok(map);
}catch (Exception e){
e.printStackTrace();
return data.fail();
}
}
}
| update
| src/main/java/com/graduate/api/college/CollegeController.java | update | <ide><path>rc/main/java/com/graduate/api/college/CollegeController.java
<ide> return data.ok();
<ide> }catch (Exception e){
<ide> e.printStackTrace();
<del> return data.fail();
<add> return data.fail(e.getMessage());
<ide> }
<ide> }
<ide>
<ide> return data.ok(map);
<ide> }catch (Exception e){
<ide> e.printStackTrace();
<del> return data.fail();
<add> return data.fail(e.getMessage());
<ide> }
<ide> }
<ide>
<ide> return data.ok();
<ide> }catch (Exception e){
<ide> e.printStackTrace();
<del> return data.fail();
<add> return data.fail(e.getMessage());
<ide> }
<ide> }
<ide>
<ide> return data.ok(map);
<ide> }catch (Exception e){
<ide> e.printStackTrace();
<del> return data.fail();
<add> return data.fail(e.getMessage());
<ide> }
<ide> }
<ide> |
|
JavaScript | mit | 2a6a9a59d64bd2b0e0bb8c0dc4e5e4ceba237c98 | 0 | silverbux/rsjs | 63f84b0a-2e9d-11e5-b747-a45e60cdfd11 | helloWorld.js | 63ebf8b5-2e9d-11e5-9936-a45e60cdfd11 | 63f84b0a-2e9d-11e5-b747-a45e60cdfd11 | helloWorld.js | 63f84b0a-2e9d-11e5-b747-a45e60cdfd11 | <ide><path>elloWorld.js
<del>63ebf8b5-2e9d-11e5-9936-a45e60cdfd11
<add>63f84b0a-2e9d-11e5-b747-a45e60cdfd11 |
|
Java | apache-2.0 | c41e90aa4a741cbab777f6136bdd202798755ccb | 0 | GerritCodeReview/gerrit,GerritCodeReview/gerrit,GerritCodeReview/gerrit,GerritCodeReview/gerrit,GerritCodeReview/gerrit,GerritCodeReview/gerrit,GerritCodeReview/gerrit,GerritCodeReview/gerrit | // Copyright (C) 2013 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.acceptance.git;
import static com.google.common.base.MoreObjects.firstNonNull;
import static com.google.common.collect.ImmutableList.toImmutableList;
import static com.google.common.collect.MoreCollectors.onlyElement;
import static com.google.common.truth.Truth.assertThat;
import static com.google.common.truth.Truth.assertWithMessage;
import static com.google.common.truth.Truth8.assertThat;
import static com.google.gerrit.acceptance.GitUtil.assertPushOk;
import static com.google.gerrit.acceptance.GitUtil.assertPushRejected;
import static com.google.gerrit.acceptance.GitUtil.pushHead;
import static com.google.gerrit.acceptance.GitUtil.pushOne;
import static com.google.gerrit.acceptance.PushOneCommit.FILE_NAME;
import static com.google.gerrit.acceptance.testsuite.project.TestProjectUpdate.allow;
import static com.google.gerrit.acceptance.testsuite.project.TestProjectUpdate.allowCapability;
import static com.google.gerrit.acceptance.testsuite.project.TestProjectUpdate.allowLabel;
import static com.google.gerrit.acceptance.testsuite.project.TestProjectUpdate.block;
import static com.google.gerrit.common.FooterConstants.CHANGE_ID;
import static com.google.gerrit.extensions.client.ListChangesOption.ALL_REVISIONS;
import static com.google.gerrit.extensions.client.ListChangesOption.CURRENT_REVISION;
import static com.google.gerrit.extensions.client.ListChangesOption.DETAILED_ACCOUNTS;
import static com.google.gerrit.extensions.client.ListChangesOption.DETAILED_LABELS;
import static com.google.gerrit.extensions.client.ListChangesOption.MESSAGES;
import static com.google.gerrit.extensions.common.testing.EditInfoSubject.assertThat;
import static com.google.gerrit.server.git.receive.ReceiveConstants.PUSH_OPTION_SKIP_VALIDATION;
import static com.google.gerrit.server.group.SystemGroupBackend.ANONYMOUS_USERS;
import static com.google.gerrit.server.group.SystemGroupBackend.REGISTERED_USERS;
import static com.google.gerrit.server.project.testing.TestLabels.label;
import static com.google.gerrit.server.project.testing.TestLabels.value;
import static java.util.Comparator.comparing;
import static java.util.stream.Collectors.joining;
import static java.util.stream.Collectors.toList;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.common.collect.Streams;
import com.google.gerrit.acceptance.AbstractDaemonTest;
import com.google.gerrit.acceptance.ExtensionRegistry;
import com.google.gerrit.acceptance.ExtensionRegistry.Registration;
import com.google.gerrit.acceptance.GitUtil;
import com.google.gerrit.acceptance.PushOneCommit;
import com.google.gerrit.acceptance.SkipProjectClone;
import com.google.gerrit.acceptance.TestAccount;
import com.google.gerrit.acceptance.TestProjectInput;
import com.google.gerrit.acceptance.UseClockStep;
import com.google.gerrit.acceptance.config.GerritConfig;
import com.google.gerrit.acceptance.testsuite.project.ProjectOperations;
import com.google.gerrit.acceptance.testsuite.request.RequestScopeOperations;
import com.google.gerrit.common.data.GlobalCapability;
import com.google.gerrit.entities.AccountGroup;
import com.google.gerrit.entities.Address;
import com.google.gerrit.entities.BooleanProjectConfig;
import com.google.gerrit.entities.Change;
import com.google.gerrit.entities.ChangeMessage;
import com.google.gerrit.entities.LabelType;
import com.google.gerrit.entities.PatchSet;
import com.google.gerrit.entities.Permission;
import com.google.gerrit.entities.Project;
import com.google.gerrit.entities.RefNames;
import com.google.gerrit.extensions.api.changes.DraftInput;
import com.google.gerrit.extensions.api.changes.NotifyHandling;
import com.google.gerrit.extensions.api.changes.ReviewInput;
import com.google.gerrit.extensions.api.groups.GroupInput;
import com.google.gerrit.extensions.api.projects.BranchInput;
import com.google.gerrit.extensions.api.projects.ConfigInput;
import com.google.gerrit.extensions.client.ChangeStatus;
import com.google.gerrit.extensions.client.GeneralPreferencesInfo;
import com.google.gerrit.extensions.client.InheritableBoolean;
import com.google.gerrit.extensions.client.ListChangesOption;
import com.google.gerrit.extensions.client.ProjectWatchInfo;
import com.google.gerrit.extensions.client.ReviewerState;
import com.google.gerrit.extensions.client.Side;
import com.google.gerrit.extensions.common.AccountInfo;
import com.google.gerrit.extensions.common.ChangeInfo;
import com.google.gerrit.extensions.common.ChangeMessageInfo;
import com.google.gerrit.extensions.common.CommentInfo;
import com.google.gerrit.extensions.common.EditInfo;
import com.google.gerrit.extensions.common.LabelInfo;
import com.google.gerrit.extensions.common.RevisionInfo;
import com.google.gerrit.git.ObjectIds;
import com.google.gerrit.server.ChangeMessagesUtil;
import com.google.gerrit.server.events.CommitReceivedEvent;
import com.google.gerrit.server.git.receive.NoteDbPushOption;
import com.google.gerrit.server.git.receive.ReceiveConstants;
import com.google.gerrit.server.git.validators.CommitValidationListener;
import com.google.gerrit.server.git.validators.CommitValidationMessage;
import com.google.gerrit.server.group.SystemGroupBackend;
import com.google.gerrit.server.project.testing.TestLabels;
import com.google.gerrit.server.query.change.ChangeData;
import com.google.gerrit.testing.FakeEmailSender.Message;
import com.google.gerrit.testing.TestTimeUtil;
import com.google.inject.Inject;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Stream;
import org.eclipse.jgit.api.errors.GitAPIException;
import org.eclipse.jgit.junit.TestRepository;
import org.eclipse.jgit.lib.AnyObjectId;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.PersonIdent;
import org.eclipse.jgit.lib.RefUpdate;
import org.eclipse.jgit.lib.RefUpdate.Result;
import org.eclipse.jgit.lib.Repository;
import org.eclipse.jgit.revwalk.RevCommit;
import org.eclipse.jgit.revwalk.RevWalk;
import org.eclipse.jgit.transport.PushResult;
import org.eclipse.jgit.transport.RefSpec;
import org.eclipse.jgit.transport.RemoteRefUpdate;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
@SkipProjectClone
@UseClockStep
public abstract class AbstractPushForReview extends AbstractDaemonTest {
protected enum Protocol {
// Only test protocols which are actually served by the Gerrit server, since each separate test
// class is large and slow.
//
// This list excludes the test InProcessProtocol, which is used by large numbers of other
// acceptance tests. Small tests of InProcessProtocol are still possible, without incurring a
// new large slow test.
SSH,
HTTP
}
@Inject private ProjectOperations projectOperations;
@Inject private RequestScopeOperations requestScopeOperations;
@Inject private ExtensionRegistry extensionRegistry;
private static String NEW_CHANGE_INDICATOR = " [NEW]";
private LabelType patchSetLock;
@Before
public void setUpPatchSetLock() throws Exception {
try (ProjectConfigUpdate u = updateProject(project)) {
patchSetLock = TestLabels.patchSetLock();
u.getConfig().upsertLabelType(patchSetLock);
u.save();
}
projectOperations
.project(project)
.forUpdate()
.add(
allowLabel(patchSetLock.getName())
.ref("refs/heads/*")
.group(ANONYMOUS_USERS)
.range(0, 1))
.add(
allowLabel(patchSetLock.getName())
.ref("refs/heads/*")
.group(adminGroupUuid())
.range(0, 1))
.update();
}
@After
public void resetPublishCommentOnPushOption() throws Exception {
requestScopeOperations.setApiUser(admin.id());
GeneralPreferencesInfo prefs = gApi.accounts().id(admin.id().get()).getPreferences();
prefs.publishCommentsOnPush = false;
gApi.accounts().id(admin.id().get()).setPreferences(prefs);
}
protected void selectProtocol(Protocol p) throws Exception {
String url;
switch (p) {
case SSH:
url = adminSshSession.getUrl();
break;
case HTTP:
url = admin.getHttpUrl(server);
break;
default:
throw new IllegalArgumentException("unexpected protocol: " + p);
}
testRepo = GitUtil.cloneProject(project, url + "/" + project.get());
}
@Test
public void pushForMaster() throws Exception {
PushOneCommit.Result r = pushTo("refs/for/master");
r.assertOkStatus();
r.assertChange(Change.Status.NEW, null);
}
@Test
@TestProjectInput(createEmptyCommit = false)
public void pushInitialCommitForMasterBranch() throws Exception {
RevCommit c = testRepo.commit().message("Initial commit").insertChangeId().create();
String id = GitUtil.getChangeId(testRepo, c).get();
testRepo.reset(c);
String r = "refs/for/master";
PushResult pr = pushHead(testRepo, r, false);
assertPushOk(pr, r);
ChangeInfo change = gApi.changes().id(id).info();
assertThat(change.branch).isEqualTo("master");
assertThat(change.status).isEqualTo(ChangeStatus.NEW);
try (Repository repo = repoManager.openRepository(project)) {
assertThat(repo.resolve("master")).isNull();
}
gApi.changes().id(change.id).current().review(ReviewInput.approve());
gApi.changes().id(change.id).current().submit();
try (Repository repo = repoManager.openRepository(project)) {
assertThat(repo.resolve("master")).isEqualTo(c);
}
}
@Test
@TestProjectInput(createEmptyCommit = false)
public void pushInitialCommitSeriesForMasterBranch() throws Exception {
testPushInitialCommitSeriesForMasterBranch();
}
@Test
@TestProjectInput(createEmptyCommit = false)
public void pushInitialCommitSeriesForMasterBranchWithCreateNewChangeForAllNotInTarget()
throws Exception {
enableCreateNewChangeForAllNotInTarget();
testPushInitialCommitSeriesForMasterBranch();
}
private void testPushInitialCommitSeriesForMasterBranch() throws Exception {
RevCommit c = testRepo.commit().message("Initial commit").insertChangeId().create();
String id = GitUtil.getChangeId(testRepo, c).get();
testRepo.reset(c);
RevCommit c2 = testRepo.commit().parent(c).message("Second commit").insertChangeId().create();
String id2 = GitUtil.getChangeId(testRepo, c2).get();
testRepo.reset(c2);
String r = "refs/for/master";
PushResult pr = pushHead(testRepo, r, false);
assertPushOk(pr, r);
ChangeInfo change = gApi.changes().id(id).info();
assertThat(change.branch).isEqualTo("master");
assertThat(change.status).isEqualTo(ChangeStatus.NEW);
ChangeInfo change2 = gApi.changes().id(id2).info();
assertThat(change2.branch).isEqualTo("master");
assertThat(change2.status).isEqualTo(ChangeStatus.NEW);
try (Repository repo = repoManager.openRepository(project)) {
assertThat(repo.resolve("master")).isNull();
}
gApi.changes().id(change.id).current().review(ReviewInput.approve());
gApi.changes().id(change.id).current().submit();
try (Repository repo = repoManager.openRepository(project)) {
assertThat(repo.resolve("master")).isEqualTo(c);
}
gApi.changes().id(change2.id).current().review(ReviewInput.approve());
gApi.changes().id(change2.id).current().submit();
try (Repository repo = repoManager.openRepository(project)) {
assertThat(repo.resolve("master")).isEqualTo(c2);
}
}
@Test
@TestProjectInput(createEmptyCommit = false)
public void validateConnected() throws Exception {
RevCommit c = testRepo.commit().message("Initial commit").insertChangeId().create();
testRepo.reset(c);
String r = "refs/heads/master";
PushResult pr = pushHead(testRepo, r, false);
assertPushOk(pr, r);
RevCommit amended =
testRepo.amend(c).message("different initial commit").insertChangeId().create();
testRepo.reset(amended);
r = "refs/for/master";
pr = pushHead(testRepo, r, false);
assertPushRejected(pr, r, "no common ancestry");
}
@Test
@GerritConfig(name = "receive.enableSignedPush", value = "true")
@TestProjectInput(
enableSignedPush = InheritableBoolean.TRUE,
requireSignedPush = InheritableBoolean.TRUE)
public void nonSignedPushRejectedWhenSignPushRequired() throws Exception {
pushTo("refs/for/master").assertErrorStatus("push cert error");
}
@Test
public void pushInitialCommitForRefsMetaConfigBranch() throws Exception {
// delete refs/meta/config
try (Repository repo = repoManager.openRepository(project);
RevWalk rw = new RevWalk(repo)) {
RefUpdate u = repo.updateRef(RefNames.REFS_CONFIG);
u.setForceUpdate(true);
u.setExpectedOldObjectId(repo.resolve(RefNames.REFS_CONFIG));
assertThat(u.delete(rw)).isEqualTo(Result.FORCED);
}
RevCommit c =
testRepo
.commit()
.message("Initial commit")
.author(admin.newIdent())
.committer(admin.newIdent())
.insertChangeId()
.create();
String id = GitUtil.getChangeId(testRepo, c).get();
testRepo.reset(c);
String r = "refs/for/" + RefNames.REFS_CONFIG;
PushResult pr = pushHead(testRepo, r, false);
assertPushOk(pr, r);
ChangeInfo change = gApi.changes().id(id).info();
assertThat(change.branch).isEqualTo(RefNames.REFS_CONFIG);
assertThat(change.status).isEqualTo(ChangeStatus.NEW);
try (Repository repo = repoManager.openRepository(project)) {
assertThat(repo.resolve(RefNames.REFS_CONFIG)).isNull();
}
gApi.changes().id(change.id).current().review(ReviewInput.approve());
gApi.changes().id(change.id).current().submit();
try (Repository repo = repoManager.openRepository(project)) {
assertThat(repo.resolve(RefNames.REFS_CONFIG)).isEqualTo(c);
}
}
@Test
public void pushInitialCommitForNormalNonExistingBranchFails() throws Exception {
RevCommit c =
testRepo
.commit()
.message("Initial commit")
.author(admin.newIdent())
.committer(admin.newIdent())
.insertChangeId()
.create();
testRepo.reset(c);
String r = "refs/for/foo";
PushResult pr = pushHead(testRepo, r, false);
assertPushRejected(pr, r, "branch foo not found");
try (Repository repo = repoManager.openRepository(project)) {
assertThat(repo.resolve("foo")).isNull();
}
}
@Test
public void output() throws Exception {
String url = canonicalWebUrl.get() + "c/" + project.get() + "/+/";
ObjectId initialHead = testRepo.getRepository().resolve("HEAD");
PushOneCommit.Result r1 = pushTo("refs/for/master");
Change.Id id1 = r1.getChange().getId();
r1.assertOkStatus();
r1.assertChange(Change.Status.NEW, null);
r1.assertMessage(
url + id1 + " " + r1.getCommit().getShortMessage() + NEW_CHANGE_INDICATOR + "\n");
testRepo.reset(initialHead);
String newMsg = r1.getCommit().getShortMessage() + " v2";
testRepo
.branch("HEAD")
.commit()
.message(newMsg)
.insertChangeId(r1.getChangeId().substring(1))
.create();
PushOneCommit.Result r2 =
pushFactory
.create(admin.newIdent(), testRepo, "another commit", "b.txt", "bbb")
.to("refs/for/master");
Change.Id id2 = r2.getChange().getId();
r2.assertOkStatus();
r2.assertChange(Change.Status.NEW, null);
r2.assertMessage(
"success\n"
+ "\n"
+ " "
+ url
+ id1
+ " "
+ newMsg
+ "\n"
+ " "
+ url
+ id2
+ " another commit"
+ NEW_CHANGE_INDICATOR
+ "\n");
}
@Test
public void autocloseByCommit() throws Exception {
// Create a change
PushOneCommit.Result r = pushTo("refs/for/master");
r.assertOkStatus();
// Force push it, closing it
String master = "refs/heads/master";
assertPushOk(pushHead(testRepo, master, false), master);
// Attempt to push amended commit to same change
String url = canonicalWebUrl.get() + "c/" + project.get() + "/+/" + r.getChange().getId();
r = amendChange(r.getChangeId(), "refs/for/master");
r.assertErrorStatus("change " + url + " closed");
// Check change message that was added on auto-close
ChangeInfo change = change(r).get();
assertThat(Iterables.getLast(change.messages).message)
.isEqualTo("Change has been successfully pushed.");
}
@Test
public void pushWithoutChangeIdDeprecated() throws Exception {
setRequireChangeId(InheritableBoolean.FALSE);
testRepo
.branch("HEAD")
.commit()
.message("A change")
.author(admin.newIdent())
.committer(new PersonIdent(admin.newIdent(), testRepo.getDate()))
.create();
PushResult result = pushHead(testRepo, "refs/for/master");
assertThat(result.getMessages()).contains("warning: pushing without Change-Id is deprecated");
}
@Test
public void autocloseByChangeId() throws Exception {
// Create a change
PushOneCommit.Result r = pushTo("refs/for/master");
r.assertOkStatus();
// Amend the commit locally
RevCommit c = testRepo.amend(r.getCommit()).create();
assertThat(c).isNotEqualTo(r.getCommit());
testRepo.reset(c);
// Force push it, closing it
String master = "refs/heads/master";
assertPushOk(pushHead(testRepo, master, false), master);
// Attempt to push amended commit to same change
String url = canonicalWebUrl.get() + "c/" + project.get() + "/+/" + r.getChange().getId();
r = amendChange(r.getChangeId(), "refs/for/master");
r.assertErrorStatus("change " + url + " closed");
// Check that new commit was added as patch set
ChangeInfo change = change(r).get();
assertThat(change.revisions).hasSize(2);
assertThat(change.currentRevision).isEqualTo(c.name());
}
@Test
public void pushForMasterWithTopic() throws Exception {
String topic = "my/topic";
// specify topic as option
PushOneCommit.Result r = pushTo("refs/for/master%topic=" + topic);
r.assertOkStatus();
r.assertChange(Change.Status.NEW, topic);
}
@Test
public void pushForMasterWithTopicOption() throws Exception {
String topicOption = "topic=myTopic";
List<String> pushOptions = new ArrayList<>();
pushOptions.add(topicOption);
PushOneCommit push = pushFactory.create(admin.newIdent(), testRepo);
push.setPushOptions(pushOptions);
PushOneCommit.Result r = push.to("refs/for/master");
r.assertOkStatus();
r.assertChange(Change.Status.NEW, "myTopic");
r.assertPushOptions(pushOptions);
}
@Test
public void pushForMasterWithTopicExceedLimitFails() throws Exception {
String topic = Stream.generate(() -> "t").limit(2049).collect(joining());
PushOneCommit.Result r = pushTo("refs/for/master%topic=" + topic);
r.assertErrorStatus("topic length exceeds the limit (2048)");
}
@Test
public void pushForMasterWithNotify() throws Exception {
// create a user that watches the project
TestAccount user3 = accountCreator.create("user3", "[email protected]", "User3", null);
List<ProjectWatchInfo> projectsToWatch = new ArrayList<>();
ProjectWatchInfo pwi = new ProjectWatchInfo();
pwi.project = project.get();
pwi.filter = "*";
pwi.notifyNewChanges = true;
projectsToWatch.add(pwi);
requestScopeOperations.setApiUser(user3.id());
gApi.accounts().self().setWatchedProjects(projectsToWatch);
TestAccount user2 = accountCreator.user2();
String pushSpec = "refs/for/master%reviewer=" + user.email() + ",cc=" + user2.email();
sender.clear();
PushOneCommit.Result r = pushTo(pushSpec + ",notify=" + NotifyHandling.NONE);
r.assertOkStatus();
assertThat(sender.getMessages()).isEmpty();
sender.clear();
r = pushTo(pushSpec + ",notify=" + NotifyHandling.OWNER);
r.assertOkStatus();
// no email notification about own changes
assertThat(sender.getMessages()).isEmpty();
sender.clear();
r = pushTo(pushSpec + ",notify=" + NotifyHandling.OWNER_REVIEWERS);
r.assertOkStatus();
assertThat(sender.getMessages()).hasSize(1);
Message m = sender.getMessages().get(0);
assertThat(m.rcpt()).containsExactly(user.getNameEmail());
sender.clear();
r = pushTo(pushSpec + ",notify=" + NotifyHandling.ALL);
r.assertOkStatus();
assertThat(sender.getMessages()).hasSize(1);
m = sender.getMessages().get(0);
assertThat(m.rcpt())
.containsExactly(user.getNameEmail(), user2.getNameEmail(), user3.getNameEmail());
sender.clear();
r = pushTo(pushSpec + ",notify=" + NotifyHandling.NONE + ",notify-to=" + user3.email());
r.assertOkStatus();
assertNotifyTo(user3);
sender.clear();
r = pushTo(pushSpec + ",notify=" + NotifyHandling.NONE + ",notify-cc=" + user3.email());
r.assertOkStatus();
assertNotifyCc(user3);
sender.clear();
r = pushTo(pushSpec + ",notify=" + NotifyHandling.NONE + ",notify-bcc=" + user3.email());
r.assertOkStatus();
assertNotifyBcc(user3);
// request that sender gets notified as TO, CC and BCC, email should be sent
// even if the sender is the only recipient
sender.clear();
pushTo(pushSpec + ",notify=" + NotifyHandling.NONE + ",notify-to=" + admin.email());
assertNotifyTo(admin);
sender.clear();
r = pushTo(pushSpec + ",notify=" + NotifyHandling.NONE + ",notify-cc=" + admin.email());
r.assertOkStatus();
assertNotifyCc(admin);
sender.clear();
r = pushTo(pushSpec + ",notify=" + NotifyHandling.NONE + ",notify-bcc=" + admin.email());
r.assertOkStatus();
assertNotifyBcc(admin);
}
@Test
public void pushForMasterWithCc() throws Exception {
// cc one user
String topic = "my/topic";
PushOneCommit.Result r = pushTo("refs/for/master%topic=" + topic + ",cc=" + user.email());
r.assertOkStatus();
r.assertChange(Change.Status.NEW, topic, ImmutableList.of(), ImmutableList.of(user));
// cc several users
r =
pushTo(
"refs/for/master%topic="
+ topic
+ ",cc="
+ admin.email()
+ ",cc="
+ user.email()
+ ",cc="
+ accountCreator.user2().email());
r.assertOkStatus();
// Check that admin isn't CC'd as they own the change
r.assertChange(
Change.Status.NEW,
topic,
ImmutableList.of(),
ImmutableList.of(user, accountCreator.user2()));
// cc non-existing user
String nonExistingEmail = "[email protected]";
r =
pushTo(
"refs/for/master%topic="
+ topic
+ ",cc="
+ admin.email()
+ ",cc="
+ nonExistingEmail
+ ",cc="
+ user.email());
r.assertErrorStatus(nonExistingEmail + " does not identify a registered user or group");
}
@Test
public void pushForMasterWithCcByEmail() throws Exception {
ConfigInput conf = new ConfigInput();
conf.enableReviewerByEmail = InheritableBoolean.TRUE;
gApi.projects().name(project.get()).config(conf);
PushOneCommit.Result r =
pushTo("refs/for/master%[email protected],[email protected]");
r.assertOkStatus();
ChangeInfo ci = get(r.getChangeId(), DETAILED_LABELS);
ImmutableList<AccountInfo> ccs =
firstNonNull(ci.reviewers.get(ReviewerState.CC), ImmutableList.<AccountInfo>of()).stream()
.sorted(comparing((AccountInfo a) -> a.email))
.collect(toImmutableList());
assertThat(ccs).hasSize(2);
assertThat(ccs.get(0).email).isEqualTo("[email protected]");
assertThat(ccs.get(0)._accountId).isNull();
assertThat(ccs.get(1).email).isEqualTo("[email protected]");
assertThat(ccs.get(1)._accountId).isNull();
}
@Test
public void pushForMasterWithCcGroup() throws Exception {
TestAccount user2 = accountCreator.user2();
String group = name("group");
GroupInput gin = new GroupInput();
gin.name = group;
gin.members = ImmutableList.of(user.username(), user2.username());
gin.visibleToAll = true; // TODO(dborowitz): Shouldn't be necessary; see ReviewerAdder.
gApi.groups().create(gin);
PushOneCommit.Result r = pushTo("refs/for/master%cc=" + group);
r.assertOkStatus();
r.assertChange(Change.Status.NEW, null, ImmutableList.of(), ImmutableList.of(user, user2));
}
@Test
public void pushForMasterWithReviewer() throws Exception {
// add one reviewer
String topic = "my/topic";
PushOneCommit.Result r = pushTo("refs/for/master%topic=" + topic + ",r=" + user.email());
r.assertOkStatus();
r.assertChange(Change.Status.NEW, topic, user);
// add several reviewers
TestAccount user2 =
accountCreator.create("another-user", "[email protected]", "Another User", null);
r =
pushTo(
"refs/for/master%topic="
+ topic
+ ",r="
+ admin.email()
+ ",r="
+ user.email()
+ ",r="
+ user2.email());
r.assertOkStatus();
// admin is the owner of the change and should not appear as reviewer
r.assertChange(Change.Status.NEW, topic, user, user2);
// add non-existing user as reviewer
String nonExistingEmail = "[email protected]";
r =
pushTo(
"refs/for/master%topic="
+ topic
+ ",r="
+ admin.email()
+ ",r="
+ nonExistingEmail
+ ",r="
+ user.email());
r.assertErrorStatus(nonExistingEmail + " does not identify a registered user or group");
}
@Test
public void pushForMasterWithReviewerByEmail() throws Exception {
ConfigInput conf = new ConfigInput();
conf.enableReviewerByEmail = InheritableBoolean.TRUE;
gApi.projects().name(project.get()).config(conf);
PushOneCommit.Result r =
pushTo("refs/for/master%[email protected],[email protected]");
r.assertOkStatus();
ChangeInfo ci = get(r.getChangeId(), DETAILED_LABELS);
ImmutableList<AccountInfo> reviewers =
firstNonNull(ci.reviewers.get(ReviewerState.REVIEWER), ImmutableList.<AccountInfo>of())
.stream()
.sorted(comparing((AccountInfo a) -> a.email))
.collect(toImmutableList());
assertThat(reviewers).hasSize(2);
assertThat(reviewers.get(0).email).isEqualTo("[email protected]");
assertThat(reviewers.get(0)._accountId).isNull();
assertThat(reviewers.get(1).email).isEqualTo("[email protected]");
assertThat(reviewers.get(1)._accountId).isNull();
}
@Test
public void pushForMasterWithReviewerGroup() throws Exception {
TestAccount user2 = accountCreator.user2();
String group = name("group");
GroupInput gin = new GroupInput();
gin.name = group;
gin.members = ImmutableList.of(user.username(), user2.username());
gin.visibleToAll = true; // TODO(dborowitz): Shouldn't be necessary; see ReviewerAdder.
gApi.groups().create(gin);
PushOneCommit.Result r = pushTo("refs/for/master%r=" + group);
r.assertOkStatus();
r.assertChange(Change.Status.NEW, null, ImmutableList.of(user, user2), ImmutableList.of());
}
@Test
public void pushPrivateChange() throws Exception {
// Push a private change.
PushOneCommit.Result r = pushTo("refs/for/master%private");
r.assertOkStatus();
r.assertMessage(" [PRIVATE]");
assertThat(r.getChange().change().isPrivate()).isTrue();
// Pushing a new patch set without --private doesn't remove the privacy flag from the change.
r = amendChange(r.getChangeId(), "refs/for/master");
r.assertOkStatus();
r.assertMessage(" [PRIVATE]");
assertThat(r.getChange().change().isPrivate()).isTrue();
// Remove the privacy flag from the change.
r = amendChange(r.getChangeId(), "refs/for/master%remove-private");
r.assertOkStatus();
r.assertNotMessage(" [PRIVATE]");
assertThat(r.getChange().change().isPrivate()).isFalse();
// Normal push: privacy flag is not added back.
r = amendChange(r.getChangeId(), "refs/for/master");
r.assertOkStatus();
r.assertNotMessage(" [PRIVATE]");
assertThat(r.getChange().change().isPrivate()).isFalse();
// Make the change private again.
r = pushTo("refs/for/master%private");
r.assertOkStatus();
r.assertMessage(" [PRIVATE]");
assertThat(r.getChange().change().isPrivate()).isTrue();
// Can't use --private and --remove-private together.
r = pushTo("refs/for/master%private,remove-private");
r.assertErrorStatus();
}
@Test
public void pushWorkInProgressChange() throws Exception {
// Push a work-in-progress change.
PushOneCommit.Result r = pushTo("refs/for/master%wip");
r.assertOkStatus();
r.assertMessage(" [WIP]");
assertThat(r.getChange().change().isWorkInProgress()).isTrue();
assertUploadTag(r.getChange(), ChangeMessagesUtil.TAG_UPLOADED_WIP_PATCH_SET);
// Pushing a new patch set without --wip doesn't remove the wip flag from the change.
String changeId = r.getChangeId();
r = amendChange(changeId, "refs/for/master");
r.assertOkStatus();
r.assertMessage(" [WIP]");
assertThat(r.getChange().change().isWorkInProgress()).isTrue();
assertUploadTag(r.getChange(), ChangeMessagesUtil.TAG_UPLOADED_WIP_PATCH_SET);
// Remove the wip flag from the change.
r = amendChange(changeId, "refs/for/master%ready");
r.assertOkStatus();
r.assertNotMessage(" [WIP]");
assertThat(r.getChange().change().isWorkInProgress()).isFalse();
assertUploadTag(r.getChange(), ChangeMessagesUtil.TAG_UPLOADED_PATCH_SET);
// Normal push: wip flag is not added back.
r = amendChange(changeId, "refs/for/master");
r.assertOkStatus();
r.assertNotMessage(" [WIP]");
assertThat(r.getChange().change().isWorkInProgress()).isFalse();
assertUploadTag(r.getChange(), ChangeMessagesUtil.TAG_UPLOADED_PATCH_SET);
// Make the change work-in-progress again.
r = amendChange(changeId, "refs/for/master%wip");
r.assertOkStatus();
r.assertMessage(" [WIP]");
assertThat(r.getChange().change().isWorkInProgress()).isTrue();
assertUploadTag(r.getChange(), ChangeMessagesUtil.TAG_UPLOADED_WIP_PATCH_SET);
// Can't use --wip and --ready together.
r = amendChange(changeId, "refs/for/master%wip,ready");
r.assertErrorStatus();
// Pushing directly to the branch removes the work-in-progress flag
String master = "refs/heads/master";
assertPushOk(pushHead(testRepo, master, false), master);
ChangeInfo result = Iterables.getOnlyElement(gApi.changes().query(changeId).get());
assertThat(result.status).isEqualTo(ChangeStatus.MERGED);
assertThat(result.workInProgress).isNull();
}
private void assertUploadTag(ChangeData cd, String expectedTag) throws Exception {
List<ChangeMessage> msgs = cd.messages();
assertThat(msgs).isNotEmpty();
assertThat(Iterables.getLast(msgs).getTag()).isEqualTo(expectedTag);
}
@Test
public void pushWorkInProgressChangeWhenNotOwner() throws Exception {
TestRepository<?> userRepo = cloneProject(project, user);
PushOneCommit.Result r =
pushFactory.create(user.newIdent(), userRepo).to("refs/for/master%wip");
r.assertOkStatus();
assertThat(r.getChange().change().getOwner()).isEqualTo(user.id());
assertThat(r.getChange().change().isWorkInProgress()).isTrue();
// Admin user trying to move from WIP to ready should succeed.
GitUtil.fetch(testRepo, r.getPatchSet().refName() + ":ps");
testRepo.reset("ps");
r = amendChange(r.getChangeId(), "refs/for/master%ready", user, testRepo);
r.assertOkStatus();
// Other user trying to move from WIP to WIP should succeed.
r = amendChange(r.getChangeId(), "refs/for/master%wip", admin, testRepo);
r.assertOkStatus();
assertThat(r.getChange().change().isWorkInProgress()).isTrue();
// Push as change owner to move change from WIP to ready.
r = pushFactory.create(user.newIdent(), userRepo).to("refs/for/master%ready");
r.assertOkStatus();
assertThat(r.getChange().change().isWorkInProgress()).isFalse();
// Admin user trying to move from ready to WIP should succeed.
GitUtil.fetch(testRepo, r.getPatchSet().refName() + ":ps");
testRepo.reset("ps");
r = amendChange(r.getChangeId(), "refs/for/master%wip", admin, testRepo);
r.assertOkStatus();
// Other user trying to move from wip to wip should succeed.
r = amendChange(r.getChangeId(), "refs/for/master%wip", admin, testRepo);
r.assertOkStatus();
// Non owner, non admin and non project owner cannot flip wip bit:
TestAccount user2 = accountCreator.user2();
projectOperations
.project(project)
.forUpdate()
.add(
allow(Permission.FORGE_COMMITTER)
.ref("refs/*")
.group(SystemGroupBackend.REGISTERED_USERS))
.update();
TestRepository<?> user2Repo = cloneProject(project, user2);
GitUtil.fetch(user2Repo, r.getPatchSet().refName() + ":ps");
user2Repo.reset("ps");
r = amendChange(r.getChangeId(), "refs/for/master%ready", user2, user2Repo);
r.assertErrorStatus(ReceiveConstants.ONLY_CHANGE_OWNER_OR_PROJECT_OWNER_CAN_MODIFY_WIP);
// Project owner trying to move from WIP to ready should succeed.
projectOperations
.project(project)
.forUpdate()
.add(allow(Permission.OWNER).ref("refs/*").group(SystemGroupBackend.REGISTERED_USERS))
.update();
r = amendChange(r.getChangeId(), "refs/for/master%ready", user2, user2Repo);
r.assertOkStatus();
}
@Test
public void pushForMasterAsEdit() throws Exception {
PushOneCommit.Result r = pushTo("refs/for/master");
r.assertOkStatus();
Optional<EditInfo> edit = getEdit(r.getChangeId());
assertThat(edit).isAbsent();
assertThat(query("has:edit")).isEmpty();
// specify edit as option
r = amendChange(r.getChangeId(), "refs/for/master%edit");
r.assertOkStatus();
edit = getEdit(r.getChangeId());
assertThat(edit).isPresent();
EditInfo editInfo = edit.get();
r.assertMessage(
canonicalWebUrl.get()
+ "c/"
+ project.get()
+ "/+/"
+ r.getChange().getId()
+ " "
+ editInfo.commit.subject
+ " [EDIT]\n");
// verify that the re-indexing was triggered for the change
assertThat(query("has:edit")).hasSize(1);
}
@Test
public void pushForMasterWithMessage() throws Exception {
PushOneCommit.Result r = pushTo("refs/for/master%m=my_test_message");
r.assertOkStatus();
r.assertChange(Change.Status.NEW, null);
ChangeInfo ci = get(r.getChangeId(), MESSAGES, ALL_REVISIONS);
Collection<ChangeMessageInfo> changeMessages = ci.messages;
assertThat(changeMessages).hasSize(1);
for (ChangeMessageInfo cm : changeMessages) {
assertThat(cm.message).isEqualTo("Uploaded patch set 1.\nmy test message");
}
Collection<RevisionInfo> revisions = ci.revisions.values();
assertThat(revisions).hasSize(1);
for (RevisionInfo ri : revisions) {
assertThat(ri.description).isEqualTo("my test message");
}
}
@Test
public void pushForMasterWithMessageTwiceWithDifferentMessages() throws Exception {
enableCreateNewChangeForAllNotInTarget();
PushOneCommit push =
pushFactory.create(admin.newIdent(), testRepo, PushOneCommit.SUBJECT, "a.txt", "content");
// %2C is comma; the value below tests that percent decoding happens after splitting.
// All three ways of representing space ("%20", "+", and "_" are also exercised.
PushOneCommit.Result r = push.to("refs/for/master%m=my_test%20+_message%2Cm=");
r.assertOkStatus();
push =
pushFactory.create(
admin.newIdent(),
testRepo,
PushOneCommit.SUBJECT,
"b.txt",
"anotherContent",
r.getChangeId());
r = push.to("refs/for/master%m=new_test_message");
r.assertOkStatus();
ChangeInfo ci = get(r.getChangeId(), ALL_REVISIONS);
Collection<RevisionInfo> revisions = ci.revisions.values();
assertThat(revisions).hasSize(2);
for (RevisionInfo ri : revisions) {
if (ri.isCurrent) {
assertThat(ri.description).isEqualTo("new test message");
} else {
assertThat(ri.description).isEqualTo("my test message,m=");
}
}
}
@Test
public void pushForMasterWithPercentEncodedMessage() throws Exception {
// Exercise percent-encoding of UTF-8, underscores, and patterns reserved by git-rev-parse.
PushOneCommit.Result r =
pushTo(
"refs/for/master%m="
+ "Punctu%2E%2e%2Eation%7E%2D%40%7Bu%7D%20%7C%20%28%E2%95%AF%C2%B0%E2%96%A1%C2%B0"
+ "%EF%BC%89%E2%95%AF%EF%B8%B5%20%E2%94%BB%E2%94%81%E2%94%BB%20%5E%5F%5E");
r.assertOkStatus();
r.assertChange(Change.Status.NEW, null);
ChangeInfo ci = get(r.getChangeId(), MESSAGES, ALL_REVISIONS);
Collection<ChangeMessageInfo> changeMessages = ci.messages;
assertThat(changeMessages).hasSize(1);
for (ChangeMessageInfo cm : changeMessages) {
assertThat(cm.message)
.isEqualTo("Uploaded patch set 1.\nPunctu...ation~-@{u} | (╯°□°)╯︵ ┻━┻ ^_^");
}
Collection<RevisionInfo> revisions = ci.revisions.values();
assertThat(revisions).hasSize(1);
for (RevisionInfo ri : revisions) {
assertThat(ri.description).isEqualTo("Punctu...ation~-@{u} | (╯°□°)╯︵ ┻━┻ ^_^");
}
}
@Test
public void pushForMasterWithInvalidPercentEncodedMessage() throws Exception {
PushOneCommit.Result r = pushTo("refs/for/master%m=not_percent_decodable_%%oops%20");
r.assertOkStatus();
r.assertChange(Change.Status.NEW, null);
ChangeInfo ci = get(r.getChangeId(), MESSAGES, ALL_REVISIONS);
Collection<ChangeMessageInfo> changeMessages = ci.messages;
assertThat(changeMessages).hasSize(1);
for (ChangeMessageInfo cm : changeMessages) {
assertThat(cm.message).isEqualTo("Uploaded patch set 1.\nnot percent decodable %%oops%20");
}
Collection<RevisionInfo> revisions = ci.revisions.values();
assertThat(revisions).hasSize(1);
for (RevisionInfo ri : revisions) {
assertThat(ri.description).isEqualTo("not percent decodable %%oops%20");
}
}
@Test
public void pushForMasterWithApprovals() throws Exception {
PushOneCommit.Result r = pushTo("refs/for/master%l=Code-Review");
r.assertOkStatus();
ChangeInfo ci = get(r.getChangeId(), DETAILED_LABELS, MESSAGES, DETAILED_ACCOUNTS);
LabelInfo cr = ci.labels.get("Code-Review");
assertThat(cr.all).hasSize(1);
assertThat(cr.all.get(0).name).isEqualTo("Administrator");
assertThat(cr.all.get(0).value).isEqualTo(1);
assertThat(Iterables.getLast(ci.messages).message)
.isEqualTo("Uploaded patch set 1: Code-Review+1.");
PushOneCommit push =
pushFactory.create(
admin.newIdent(),
testRepo,
PushOneCommit.SUBJECT,
"b.txt",
"anotherContent",
r.getChangeId());
r = push.to("refs/for/master%l=Code-Review+2");
ci = get(r.getChangeId(), DETAILED_LABELS, MESSAGES, DETAILED_ACCOUNTS);
cr = ci.labels.get("Code-Review");
assertThat(Iterables.getLast(ci.messages).message)
.isEqualTo("Uploaded patch set 2: Code-Review+2.");
// Check that the user who pushed the change was added as a reviewer since they added a vote
assertThatUserIsOnlyReviewer(ci, admin);
assertThat(cr.all).hasSize(1);
assertThat(cr.all.get(0).name).isEqualTo("Administrator");
assertThat(cr.all.get(0).value).isEqualTo(2);
push =
pushFactory.create(
admin.newIdent(),
testRepo,
PushOneCommit.SUBJECT,
"c.txt",
"moreContent",
r.getChangeId());
r = push.to("refs/for/master%l=Code-Review+2");
ci = get(r.getChangeId(), MESSAGES);
assertThat(Iterables.getLast(ci.messages).message).isEqualTo("Uploaded patch set 3.");
}
@Test
public void pushNewPatchSetForMasterWithApprovals() throws Exception {
PushOneCommit.Result r = pushTo("refs/for/master");
r.assertOkStatus();
PushOneCommit push =
pushFactory.create(
admin.newIdent(),
testRepo,
PushOneCommit.SUBJECT,
"b.txt",
"anotherContent",
r.getChangeId());
r = push.to("refs/for/master%l=Code-Review+2");
ChangeInfo ci = get(r.getChangeId(), DETAILED_LABELS, MESSAGES, DETAILED_ACCOUNTS);
LabelInfo cr = ci.labels.get("Code-Review");
assertThat(Iterables.getLast(ci.messages).message)
.isEqualTo("Uploaded patch set 2: Code-Review+2.");
// Check that the user who pushed the new patch set was added as a reviewer since they added
// a vote
assertThatUserIsOnlyReviewer(ci, admin);
assertThat(cr.all).hasSize(1);
assertThat(cr.all.get(0).name).isEqualTo("Administrator");
assertThat(cr.all.get(0).value).isEqualTo(2);
}
@Test
public void pushForMasterWithForgedAuthorAndCommitter() throws Exception {
TestAccount user2 = accountCreator.user2();
// Create a commit with different forged author and committer.
RevCommit c =
commitBuilder()
.author(user.newIdent())
.committer(user2.newIdent())
.add(PushOneCommit.FILE_NAME, PushOneCommit.FILE_CONTENT)
.message(PushOneCommit.SUBJECT)
.create();
// Push commit as "Admnistrator".
pushHead(testRepo, "refs/for/master");
String changeId = GitUtil.getChangeId(testRepo, c).get();
assertThat(getOwnerEmail(changeId)).isEqualTo(admin.email());
assertThat(getReviewerEmails(changeId, ReviewerState.REVIEWER))
.containsExactly(user.email(), user2.email());
assertThat(sender.getMessages()).hasSize(1);
assertThat(sender.getMessages().get(0).rcpt())
.containsExactly(user.getNameEmail(), user2.getNameEmail());
}
@Test
public void pushNewPatchSetForMasterWithForgedAuthorAndCommitter() throws Exception {
TestAccount user2 = accountCreator.user2();
// First patch set has author and committer matching change owner.
PushOneCommit.Result r = pushTo("refs/for/master");
assertThat(getOwnerEmail(r.getChangeId())).isEqualTo(admin.email());
assertThat(getReviewerEmails(r.getChangeId(), ReviewerState.REVIEWER)).isEmpty();
amendBuilder()
.author(user.newIdent())
.committer(user2.newIdent())
.add(PushOneCommit.FILE_NAME, PushOneCommit.FILE_CONTENT + "2")
.create();
pushHead(testRepo, "refs/for/master");
assertThat(getOwnerEmail(r.getChangeId())).isEqualTo(admin.email());
assertThat(getReviewerEmails(r.getChangeId(), ReviewerState.REVIEWER))
.containsExactly(user.email(), user2.email());
assertThat(sender.getMessages()).hasSize(1);
assertThat(sender.getMessages().get(0).rcpt())
.containsExactly(user.getNameEmail(), user2.getNameEmail());
}
/**
* There was a bug that allowed a user with Forge Committer Identity access right to upload a
* commit and put *votes on behalf of another user* on it. This test checks that this is not
* possible, but that the votes that are specified on push are applied only on behalf of the
* uploader.
*
* <p>This particular bug only occurred when there was more than one label defined. However to
* test that the votes that are specified on push are applied on behalf of the uploader a single
* label is sufficient.
*/
@Test
public void pushForMasterWithApprovalsForgeCommitterButNoForgeVote() throws Exception {
// Create a commit with "User" as author and committer
RevCommit c =
commitBuilder()
.author(user.newIdent())
.committer(user.newIdent())
.add(PushOneCommit.FILE_NAME, PushOneCommit.FILE_CONTENT)
.message(PushOneCommit.SUBJECT)
.create();
// Push this commit as "Administrator" (requires Forge Committer Identity)
pushHead(testRepo, "refs/for/master%l=Code-Review+1", false);
// Expected Code-Review votes:
// 1. 0 from User (committer):
// When the committer is forged, the committer is automatically added as
// reviewer, hence we expect a dummy 0 vote for the committer.
// 2. +1 from Administrator (uploader):
// On push Code-Review+1 was specified, hence we expect a +1 vote from
// the uploader.
ChangeInfo ci =
get(GitUtil.getChangeId(testRepo, c).get(), DETAILED_LABELS, MESSAGES, DETAILED_ACCOUNTS);
LabelInfo cr = ci.labels.get("Code-Review");
assertThat(cr.all).hasSize(2);
int indexAdmin = admin.fullName().equals(cr.all.get(0).name) ? 0 : 1;
int indexUser = indexAdmin == 0 ? 1 : 0;
assertThat(cr.all.get(indexAdmin).name).isEqualTo(admin.fullName());
assertThat(cr.all.get(indexAdmin).value.intValue()).isEqualTo(1);
assertThat(cr.all.get(indexUser).name).isEqualTo(user.fullName());
assertThat(cr.all.get(indexUser).value.intValue()).isEqualTo(0);
assertThat(Iterables.getLast(ci.messages).message)
.isEqualTo("Uploaded patch set 1: Code-Review+1.");
// Check that the user who pushed the change was added as a reviewer since they added a vote
assertThatUserIsOnlyReviewer(ci, admin);
}
@Test
public void pushWithMultipleApprovals() throws Exception {
LabelType Q =
label("Custom-Label", value(1, "Positive"), value(0, "No score"), value(-1, "Negative"));
String heads = "refs/heads/*";
try (ProjectConfigUpdate u = updateProject(project)) {
u.getConfig().upsertLabelType(Q);
u.save();
}
projectOperations
.project(project)
.forUpdate()
.add(allowLabel("Custom-Label").ref(heads).group(ANONYMOUS_USERS).range(-1, 1))
.update();
RevCommit c =
commitBuilder()
.author(admin.newIdent())
.committer(admin.newIdent())
.add(PushOneCommit.FILE_NAME, PushOneCommit.FILE_CONTENT)
.message(PushOneCommit.SUBJECT)
.create();
pushHead(testRepo, "refs/for/master%l=Code-Review+1,l=Custom-Label-1", false);
ChangeInfo ci = get(GitUtil.getChangeId(testRepo, c).get(), DETAILED_LABELS, DETAILED_ACCOUNTS);
LabelInfo cr = ci.labels.get("Code-Review");
assertThat(cr.all).hasSize(1);
cr = ci.labels.get("Custom-Label");
assertThat(cr.all).hasSize(1);
// Check that the user who pushed the change was added as a reviewer since they added a vote
assertThatUserIsOnlyReviewer(ci, admin);
}
@Test
public void pushNewPatchsetToPatchSetLockedChange() throws Exception {
PushOneCommit.Result r = pushTo("refs/for/master");
r.assertOkStatus();
PushOneCommit push =
pushFactory.create(
admin.newIdent(),
testRepo,
PushOneCommit.SUBJECT,
"b.txt",
"anotherContent",
r.getChangeId());
revision(r).review(new ReviewInput().label("Patch-Set-Lock", 1));
r = push.to("refs/for/master");
r.assertErrorStatus("cannot add patch set to " + r.getChange().change().getChangeId() + ".");
}
@Test
public void pushForMasterWithApprovals_MissingLabel() throws Exception {
PushOneCommit.Result r = pushTo("refs/for/master%l=Verify");
r.assertErrorStatus("label \"Verify\" is not a configured label");
}
@Test
public void pushForMasterWithApprovals_ValueOutOfRange() throws Exception {
PushOneCommit.Result r = pushTo("refs/for/master%l=Code-Review-3");
r.assertErrorStatus("label \"Code-Review\": -3 is not a valid value");
}
@Test
public void pushForNonExistingBranch() throws Exception {
String branchName = "non-existing";
PushOneCommit.Result r = pushTo("refs/for/" + branchName);
r.assertErrorStatus("branch " + branchName + " not found");
}
@Test
public void pushForMasterWithHashtags() throws Exception {
// specify a single hashtag as option
String hashtag1 = "tag1";
Set<String> expected = ImmutableSet.of(hashtag1);
PushOneCommit.Result r = pushTo("refs/for/master%hashtag=#" + hashtag1);
r.assertOkStatus();
r.assertChange(Change.Status.NEW, null);
Set<String> hashtags = gApi.changes().id(r.getChangeId()).getHashtags();
assertThat(hashtags).containsExactlyElementsIn(expected);
// specify a single hashtag as option in new patch set
String hashtag2 = "tag2";
PushOneCommit push =
pushFactory.create(
admin.newIdent(),
testRepo,
PushOneCommit.SUBJECT,
"b.txt",
"anotherContent",
r.getChangeId());
r = push.to("refs/for/master%hashtag=" + hashtag2);
r.assertOkStatus();
expected = ImmutableSet.of(hashtag1, hashtag2);
hashtags = gApi.changes().id(r.getChangeId()).getHashtags();
assertThat(hashtags).containsExactlyElementsIn(expected);
}
@Test
public void pushForMasterWithMultipleHashtags() throws Exception {
// specify multiple hashtags as options
String hashtag1 = "tag1";
String hashtag2 = "tag2";
Set<String> expected = ImmutableSet.of(hashtag1, hashtag2);
PushOneCommit.Result r =
pushTo("refs/for/master%hashtag=#" + hashtag1 + ",hashtag=##" + hashtag2);
r.assertOkStatus();
r.assertChange(Change.Status.NEW, null);
Set<String> hashtags = gApi.changes().id(r.getChangeId()).getHashtags();
assertThat(hashtags).containsExactlyElementsIn(expected);
// specify multiple hashtags as options in new patch set
String hashtag3 = "tag3";
String hashtag4 = "tag4";
PushOneCommit push =
pushFactory.create(
admin.newIdent(),
testRepo,
PushOneCommit.SUBJECT,
"b.txt",
"anotherContent",
r.getChangeId());
r = push.to("refs/for/master%hashtag=" + hashtag3 + ",hashtag=" + hashtag4);
r.assertOkStatus();
expected = ImmutableSet.of(hashtag1, hashtag2, hashtag3, hashtag4);
hashtags = gApi.changes().id(r.getChangeId()).getHashtags();
assertThat(hashtags).containsExactlyElementsIn(expected);
}
@Test
public void pushCommitUsingSignedOffBy() throws Exception {
PushOneCommit push =
pushFactory.create(
admin.newIdent(), testRepo, PushOneCommit.SUBJECT, "b.txt", "anotherContent");
PushOneCommit.Result r = push.to("refs/for/master");
r.assertOkStatus();
setUseSignedOffBy(InheritableBoolean.TRUE);
projectOperations
.project(project)
.forUpdate()
.add(block(Permission.FORGE_COMMITTER).ref("refs/heads/master").group(REGISTERED_USERS))
.update();
push =
pushFactory.create(
admin.newIdent(),
testRepo,
PushOneCommit.SUBJECT
+ String.format("\n\nSigned-off-by: %s <%s>", admin.fullName(), admin.email()),
"b.txt",
"anotherContent");
r = push.to("refs/for/master");
r.assertOkStatus();
push =
pushFactory.create(
admin.newIdent(), testRepo, PushOneCommit.SUBJECT, "b.txt", "anotherContent");
r = push.to("refs/for/master");
r.assertErrorStatus("not Signed-off-by author/committer/uploader in message footer");
}
@Test
public void createNewChangeForAllNotInTarget() throws Exception {
enableCreateNewChangeForAllNotInTarget();
PushOneCommit push =
pushFactory.create(admin.newIdent(), testRepo, PushOneCommit.SUBJECT, "a.txt", "content");
PushOneCommit.Result r = push.to("refs/for/master");
r.assertOkStatus();
push =
pushFactory.create(
admin.newIdent(), testRepo, PushOneCommit.SUBJECT, "b.txt", "anotherContent");
r = push.to("refs/for/master");
r.assertOkStatus();
gApi.projects().name(project.get()).branch("otherBranch").create(new BranchInput());
PushOneCommit.Result r2 = push.to("refs/for/otherBranch");
r2.assertOkStatus();
assertTwoChangesWithSameRevision(r);
}
@Test
public void pushChangeBasedOnChangeOfOtherUserWithCreateNewChangeForAllNotInTarget()
throws Exception {
enableCreateNewChangeForAllNotInTarget();
// create a change as admin
PushOneCommit push =
pushFactory.create(admin.newIdent(), testRepo, PushOneCommit.SUBJECT, "a.txt", "content");
PushOneCommit.Result r = push.to("refs/for/master");
r.assertOkStatus();
RevCommit commitChange1 = r.getCommit();
// create a second change as user (depends on the change from admin)
TestRepository<?> userRepo = cloneProject(project, user);
GitUtil.fetch(userRepo, r.getPatchSet().refName() + ":change");
userRepo.reset("change");
push =
pushFactory.create(
user.newIdent(), userRepo, PushOneCommit.SUBJECT, "b.txt", "anotherContent");
r = push.to("refs/for/master");
r.assertOkStatus();
// assert that no new change was created for the commit of the predecessor change
assertThat(query(commitChange1.name())).hasSize(1);
}
@Test
public void pushSameCommitTwiceUsingMagicBranchBaseOption() throws Exception {
projectOperations
.project(project)
.forUpdate()
.add(allow(Permission.PUSH).ref("refs/heads/master").group(adminGroupUuid()))
.update();
PushOneCommit.Result rBase = pushTo("refs/heads/master");
rBase.assertOkStatus();
gApi.projects().name(project.get()).branch("foo").create(new BranchInput());
PushOneCommit push =
pushFactory.create(
admin.newIdent(), testRepo, PushOneCommit.SUBJECT, "b.txt", "anotherContent");
PushOneCommit.Result r = push.to("refs/for/master");
r.assertOkStatus();
PushResult pr =
GitUtil.pushHead(testRepo, "refs/for/foo%base=" + rBase.getCommit().name(), false, false);
// BatchUpdate implementations differ in how they hook into progress monitors. We mostly just
// care that there is a new change.
assertThat(pr.getMessages()).containsMatch("changes: .*new: 1.*done");
assertTwoChangesWithSameRevision(r);
}
@Test
public void pushSameCommitTwice() throws Exception {
enableCreateNewChangeForAllNotInTarget();
PushOneCommit push =
pushFactory.create(admin.newIdent(), testRepo, PushOneCommit.SUBJECT, "a.txt", "content");
PushOneCommit.Result r = push.to("refs/for/master");
r.assertOkStatus();
push =
pushFactory.create(
admin.newIdent(), testRepo, PushOneCommit.SUBJECT, "b.txt", "anotherContent");
r = push.to("refs/for/master");
r.assertOkStatus();
assertPushRejected(
pushHead(testRepo, "refs/for/master", false),
"refs/for/master",
"commit(s) already exists (as current patchset)");
}
@Test
public void pushSameCommitTwiceWhenIndexFailed() throws Exception {
enableCreateNewChangeForAllNotInTarget();
PushOneCommit push =
pushFactory.create(admin.newIdent(), testRepo, PushOneCommit.SUBJECT, "a.txt", "content");
PushOneCommit.Result r = push.to("refs/for/master");
r.assertOkStatus();
push =
pushFactory.create(
admin.newIdent(), testRepo, PushOneCommit.SUBJECT, "b.txt", "anotherContent");
r = push.to("refs/for/master");
r.assertOkStatus();
indexer.delete(r.getChange().getId());
assertPushRejected(
pushHead(testRepo, "refs/for/master", false),
"refs/for/master",
"commit(s) already exists (as current patchset)");
}
private void assertTwoChangesWithSameRevision(PushOneCommit.Result result) throws Exception {
List<ChangeInfo> changes = query(result.getCommit().name());
assertThat(changes).hasSize(2);
ChangeInfo c1 = get(changes.get(0).id, CURRENT_REVISION);
ChangeInfo c2 = get(changes.get(1).id, CURRENT_REVISION);
assertThat(c1.project).isEqualTo(c2.project);
assertThat(c1.branch).isNotEqualTo(c2.branch);
assertThat(c1.changeId).isEqualTo(c2.changeId);
assertThat(c1.currentRevision).isEqualTo(c2.currentRevision);
}
@Test
public void pushAFewChanges() throws Exception {
testPushAFewChanges();
}
@Test
public void pushAFewChangesWithCreateNewChangeForAllNotInTarget() throws Exception {
enableCreateNewChangeForAllNotInTarget();
testPushAFewChanges();
}
private void testPushAFewChanges() throws Exception {
int n = 10;
String r = "refs/for/master";
ObjectId initialHead = testRepo.getRepository().resolve("HEAD");
List<RevCommit> commits = createChanges(n, r);
// Check that a change was created for each.
for (RevCommit c : commits) {
assertWithMessage("change for " + c.name())
.that(byCommit(c).change().getSubject())
.isEqualTo(c.getShortMessage());
}
List<RevCommit> commits2 = amendChanges(initialHead, commits, r);
// Check that there are correct patch sets.
for (int i = 0; i < n; i++) {
RevCommit c = commits.get(i);
RevCommit c2 = commits2.get(i);
String name = "change for " + c2.name();
ChangeData cd = byCommit(c);
assertWithMessage(name).that(cd.change().getSubject()).isEqualTo(c2.getShortMessage());
assertWithMessage(name)
.that(getPatchSetRevisions(cd))
.containsExactlyEntriesIn(ImmutableMap.of(1, c.name(), 2, c2.name()));
}
// Pushing again results in "no new changes".
assertPushRejected(pushHead(testRepo, r, false), r, "no new changes");
}
@Test
public void pushWithoutChangeId() throws Exception {
testPushWithoutChangeId();
}
@Test
public void pushWithoutChangeIdWithCreateNewChangeForAllNotInTarget() throws Exception {
enableCreateNewChangeForAllNotInTarget();
testPushWithoutChangeId();
}
private void testPushWithoutChangeId() throws Exception {
RevCommit c = createCommit(testRepo, "Message without Change-Id");
assertThat(GitUtil.getChangeId(testRepo, c)).isEmpty();
pushForReviewRejected(testRepo, "missing Change-Id in message footer");
setRequireChangeId(InheritableBoolean.FALSE);
pushForReviewOk(testRepo);
}
@Test
public void pushWithChangeIdAboveFooter() throws Exception {
testPushWithChangeIdAboveFooter();
}
@Test
public void pushWithLinkFooter() throws Exception {
String changeId = "I0123456789abcdef0123456789abcdef01234567";
String url = cfg.getString("gerrit", null, "canonicalWebUrl");
if (!url.endsWith("/")) {
url += "/";
}
createCommit(testRepo, "test commit\n\nLink: " + url + "id/" + changeId);
pushForReviewOk(testRepo);
List<ChangeMessageInfo> messages = getMessages(changeId);
assertThat(messages.get(0).message).isEqualTo("Uploaded patch set 1.");
}
@Test
public void pushWithWrongHostLinkFooter() throws Exception {
String changeId = "I0123456789abcdef0123456789abcdef01234567";
createCommit(testRepo, "test commit\n\nLink: https://wronghost/id/" + changeId);
pushForReviewRejected(testRepo, "missing Change-Id in message footer");
}
@Test
public void pushWithChangeIdAboveFooterWithCreateNewChangeForAllNotInTarget() throws Exception {
enableCreateNewChangeForAllNotInTarget();
testPushWithChangeIdAboveFooter();
}
private void testPushWithChangeIdAboveFooter() throws Exception {
RevCommit c =
createCommit(
testRepo,
PushOneCommit.SUBJECT
+ "\n\n"
+ "Change-Id: Ied70ea827f5bf968f1f6aaee6594e07c846d217a\n\n"
+ "More text, uh oh.\n");
assertThat(GitUtil.getChangeId(testRepo, c)).isEmpty();
pushForReviewRejected(testRepo, "Change-Id must be in message footer");
setRequireChangeId(InheritableBoolean.FALSE);
pushForReviewRejected(testRepo, "Change-Id must be in message footer");
}
@Test
public void errorMessageFormat() throws Exception {
RevCommit c = createCommit(testRepo, "Message without Change-Id");
assertThat(GitUtil.getChangeId(testRepo, c)).isEmpty();
String ref = "refs/for/master";
PushResult r = pushHead(testRepo, ref);
RemoteRefUpdate refUpdate = r.getRemoteUpdate(ref);
assertThat(refUpdate.getStatus()).isEqualTo(RemoteRefUpdate.Status.REJECTED_OTHER_REASON);
String reason =
String.format("commit %s: missing Change-Id in message footer", abbreviateName(c));
assertThat(refUpdate.getMessage()).isEqualTo(reason);
assertThat(r.getMessages()).contains("\nERROR: " + reason);
}
@Test
public void pushWithMultipleChangeIds() throws Exception {
testPushWithMultipleChangeIds();
}
@Test
public void pushWithMultipleChangeIdsWithCreateNewChangeForAllNotInTarget() throws Exception {
enableCreateNewChangeForAllNotInTarget();
testPushWithMultipleChangeIds();
}
private void testPushWithMultipleChangeIds() throws Exception {
createCommit(
testRepo,
"Message with multiple Change-Id\n"
+ "\n"
+ "Change-Id: I10f98c2ef76e52e23aa23be5afeb71e40b350e86\n"
+ "Change-Id: Ie9a132e107def33bdd513b7854b50de911edba0a\n");
pushForReviewRejected(testRepo, "multiple Change-Id lines in message footer");
setRequireChangeId(InheritableBoolean.FALSE);
pushForReviewRejected(testRepo, "multiple Change-Id lines in message footer");
}
@Test
public void pushWithInvalidChangeId() throws Exception {
testpushWithInvalidChangeId();
}
@Test
public void pushWithInvalidChangeIdWithCreateNewChangeForAllNotInTarget() throws Exception {
enableCreateNewChangeForAllNotInTarget();
testpushWithInvalidChangeId();
}
private void testpushWithInvalidChangeId() throws Exception {
createCommit(testRepo, "Message with invalid Change-Id\n\nChange-Id: X\n");
pushForReviewRejected(testRepo, "invalid Change-Id line format in message footer");
setRequireChangeId(InheritableBoolean.FALSE);
pushForReviewRejected(testRepo, "invalid Change-Id line format in message footer");
}
@Test
public void pushWithInvalidChangeIdFromEgit() throws Exception {
testPushWithInvalidChangeIdFromEgit();
}
@Test
public void pushWithInvalidChangeIdFromEgitWithCreateNewChangeForAllNotInTarget()
throws Exception {
enableCreateNewChangeForAllNotInTarget();
testPushWithInvalidChangeIdFromEgit();
}
private void testPushWithInvalidChangeIdFromEgit() throws Exception {
createCommit(
testRepo,
"Message with invalid Change-Id\n"
+ "\n"
+ "Change-Id: I0000000000000000000000000000000000000000\n");
pushForReviewRejected(testRepo, "invalid Change-Id line format in message footer");
setRequireChangeId(InheritableBoolean.FALSE);
pushForReviewRejected(testRepo, "invalid Change-Id line format in message footer");
}
@Test
public void pushWithChangeIdInSubjectLine() throws Exception {
createCommit(testRepo, "Change-Id: I1234000000000000000000000000000000000000");
pushForReviewRejected(testRepo, "missing subject; Change-Id must be in message footer");
setRequireChangeId(InheritableBoolean.FALSE);
pushForReviewRejected(testRepo, "missing subject; Change-Id must be in message footer");
}
@Test
public void pushCommitWithSameChangeIdAsPredecessorChange() throws Exception {
PushOneCommit push =
pushFactory.create(admin.newIdent(), testRepo, PushOneCommit.SUBJECT, "a.txt", "content");
PushOneCommit.Result r = push.to("refs/for/master");
r.assertOkStatus();
RevCommit commitChange1 = r.getCommit();
createCommit(testRepo, commitChange1.getFullMessage());
pushForReviewRejected(
testRepo,
"same Change-Id in multiple changes.\n"
+ "Squash the commits with the same Change-Id or ensure Change-Ids are unique for each"
+ " commit");
try (ProjectConfigUpdate u = updateProject(project)) {
u.getConfig()
.updateProject(
p ->
p.setBooleanConfig(
BooleanProjectConfig.REQUIRE_CHANGE_ID, InheritableBoolean.FALSE));
u.save();
}
pushForReviewRejected(
testRepo,
"same Change-Id in multiple changes.\n"
+ "Squash the commits with the same Change-Id or ensure Change-Ids are unique for each"
+ " commit");
}
@Test
public void pushTwoCommitWithSameChangeId() throws Exception {
RevCommit commitChange1 = createCommitWithChangeId(testRepo, "some change");
createCommit(testRepo, commitChange1.getFullMessage());
pushForReviewRejected(
testRepo,
"same Change-Id in multiple changes.\n"
+ "Squash the commits with the same Change-Id or ensure Change-Ids are unique for each"
+ " commit");
try (ProjectConfigUpdate u = updateProject(project)) {
u.getConfig()
.updateProject(
p ->
p.setBooleanConfig(
BooleanProjectConfig.REQUIRE_CHANGE_ID, InheritableBoolean.FALSE));
u.save();
}
pushForReviewRejected(
testRepo,
"same Change-Id in multiple changes.\n"
+ "Squash the commits with the same Change-Id or ensure Change-Ids are unique for each"
+ " commit");
}
private static RevCommit createCommit(TestRepository<?> testRepo, String message)
throws Exception {
return testRepo.branch("HEAD").commit().message(message).add("a.txt", "content").create();
}
private static RevCommit createCommitWithChangeId(TestRepository<?> testRepo, String message)
throws Exception {
RevCommit c =
testRepo
.branch("HEAD")
.commit()
.message(message)
.insertChangeId()
.add("a.txt", "content")
.create();
return testRepo.getRevWalk().parseCommit(c);
}
@Test
public void cantAutoCloseChangeAlreadyMergedToBranch() throws Exception {
PushOneCommit.Result r1 = createChange();
Change.Id id1 = r1.getChange().getId();
PushOneCommit.Result r2 = createChange();
Change.Id id2 = r2.getChange().getId();
// Merge change 1 behind Gerrit's back.
try (Repository repo = repoManager.openRepository(project);
TestRepository<?> tr = new TestRepository<>(repo)) {
tr.branch("refs/heads/master").update(r1.getCommit());
}
assertThat(gApi.changes().id(id1.get()).info().status).isEqualTo(ChangeStatus.NEW);
assertThat(gApi.changes().id(id2.get()).info().status).isEqualTo(ChangeStatus.NEW);
r2 = amendChange(r2.getChangeId());
r2.assertOkStatus();
// Change 1 is still new despite being merged into the branch, because
// ReceiveCommits only considers commits between the branch tip (which is
// now the merged change 1) and the push tip (new patch set of change 2).
assertThat(gApi.changes().id(id1.get()).info().status).isEqualTo(ChangeStatus.NEW);
assertThat(gApi.changes().id(id2.get()).info().status).isEqualTo(ChangeStatus.NEW);
}
@Test
public void accidentallyPushNewPatchSetDirectlyToBranchAndCantRecoverByPushingToRefsFor()
throws Exception {
Change.Id id = accidentallyPushNewPatchSetDirectlyToBranch();
ChangeData cd = byChangeId(id);
String ps1Rev = Iterables.getOnlyElement(cd.patchSets()).commitId().name();
String r = "refs/for/master";
assertPushRejected(pushHead(testRepo, r, false), r, "no new changes");
// Change not updated.
cd = byChangeId(id);
assertThat(cd.change().isNew()).isTrue();
assertThat(getPatchSetRevisions(cd)).containsExactlyEntriesIn(ImmutableMap.of(1, ps1Rev));
}
@Test
public void forcePushAbandonedChange() throws Exception {
projectOperations
.project(project)
.forUpdate()
.add(allow(Permission.PUSH).ref("refs/*").group(adminGroupUuid()).force(true))
.update();
PushOneCommit push1 =
pushFactory.create(admin.newIdent(), testRepo, "change1", "a.txt", "content");
PushOneCommit.Result r = push1.to("refs/for/master");
r.assertOkStatus();
// abandon the change
String changeId = r.getChangeId();
assertThat(info(changeId).status).isEqualTo(ChangeStatus.NEW);
gApi.changes().id(changeId).abandon();
ChangeInfo info = get(changeId);
assertThat(info.status).isEqualTo(ChangeStatus.ABANDONED);
push1.setForce(true);
PushOneCommit.Result r1 = push1.to("refs/heads/master");
r1.assertOkStatus();
ChangeInfo result = Iterables.getOnlyElement(gApi.changes().query(r.getChangeId()).get());
assertThat(result.status).isEqualTo(ChangeStatus.MERGED);
}
private Change.Id accidentallyPushNewPatchSetDirectlyToBranch() throws Exception {
PushOneCommit.Result r = createChange();
RevCommit ps1Commit = r.getCommit();
Change c = r.getChange().change();
RevCommit ps2Commit;
try (Repository repo = repoManager.openRepository(project);
TestRepository<?> tr = new TestRepository<>(repo)) {
// Create a new patch set of the change directly in Gerrit's repository,
// without pushing it. In reality it's more likely that the client would
// create and push this behind Gerrit's back (e.g. an admin accidentally
// using direct ssh access to the repo), but that's harder to do in tests.
ps2Commit =
tr.branch("refs/heads/master")
.commit()
.message(ps1Commit.getShortMessage() + " v2")
.insertChangeId(r.getChangeId().substring(1))
.create();
}
testRepo.git().fetch().setRefSpecs(new RefSpec("refs/heads/master")).call();
testRepo.reset(ps2Commit);
ChangeData cd = byCommit(ps1Commit);
assertThat(cd.change().isNew()).isTrue();
assertThat(getPatchSetRevisions(cd))
.containsExactlyEntriesIn(ImmutableMap.of(1, ps1Commit.name()));
return c.getId();
}
@Test
public void pushWithEmailInFooter() throws Exception {
pushWithReviewerInFooter(user.getNameEmail().toString(), user);
}
@Test
public void pushWithNameInFooter() throws Exception {
pushWithReviewerInFooter(user.fullName(), user);
}
@Test
public void pushWithEmailInFooterNotFound() throws Exception {
pushWithReviewerInFooter(
Address.create("No Body", "[email protected]").toString(), null);
}
@Test
public void pushWithNameInFooterNotFound() throws Exception {
pushWithReviewerInFooter("Notauser", null);
}
@Test
public void pushNewPatchsetOverridingStickyLabel() throws Exception {
try (ProjectConfigUpdate u = updateProject(project)) {
LabelType codeReview = TestLabels.codeReview().toBuilder().setCopyMaxScore(true).build();
u.getConfig().upsertLabelType(codeReview);
u.save();
}
PushOneCommit.Result r = pushTo("refs/for/master%l=Code-Review+2");
r.assertOkStatus();
PushOneCommit push =
pushFactory.create(
admin.newIdent(),
testRepo,
PushOneCommit.SUBJECT,
"b.txt",
"anotherContent",
r.getChangeId());
r = push.to("refs/for/master%l=Code-Review+1");
r.assertOkStatus();
}
@Test
public void createChangeForMergedCommit() throws Exception {
String master = "refs/heads/master";
projectOperations
.project(project)
.forUpdate()
.add(allow(Permission.PUSH).ref(master).group(adminGroupUuid()).force(true))
.update();
// Update master with a direct push.
RevCommit c1 = testRepo.commit().message("Non-change 1").create();
RevCommit c2 =
testRepo.parseBody(
testRepo.commit().parent(c1).message("Non-change 2").insertChangeId().create());
String changeId = Iterables.getOnlyElement(c2.getFooterLines(CHANGE_ID));
testRepo.reset(c2);
assertPushOk(pushHead(testRepo, master, false, true), master);
String q = "commit:" + c1.name() + " OR commit:" + c2.name() + " OR change:" + changeId;
assertThat(gApi.changes().query(q).get()).isEmpty();
// Push c2 as a merged change.
String r = "refs/for/master%merged";
assertPushOk(pushHead(testRepo, r, false), r);
EnumSet<ListChangesOption> opts = EnumSet.of(ListChangesOption.CURRENT_REVISION);
ChangeInfo info = gApi.changes().id(changeId).get(opts);
assertThat(info.currentRevision).isEqualTo(c2.name());
assertThat(info.status).isEqualTo(ChangeStatus.MERGED);
// Only c2 was created as a change.
String q1 = "commit: " + c1.name();
assertThat(gApi.changes().query(q1).get()).isEmpty();
// Push c1 as a merged change.
testRepo.reset(c1);
assertPushOk(pushHead(testRepo, r, false), r);
List<ChangeInfo> infos = gApi.changes().query(q1).withOptions(opts).get();
assertThat(infos).hasSize(1);
info = infos.get(0);
assertThat(info.currentRevision).isEqualTo(c1.name());
assertThat(info.status).isEqualTo(ChangeStatus.MERGED);
}
@Test
public void mergedOptionFailsWhenCommitIsNotMerged() throws Exception {
PushOneCommit.Result r = pushTo("refs/for/master%merged");
r.assertErrorStatus("not merged into branch");
}
@Test
public void mergedOptionFailsWhenCommitIsMergedOnOtherBranch() throws Exception {
PushOneCommit.Result r = pushTo("refs/for/master");
r.assertOkStatus();
gApi.changes().id(r.getChangeId()).current().review(ReviewInput.approve());
gApi.changes().id(r.getChangeId()).current().submit();
try (Repository repo = repoManager.openRepository(project);
TestRepository<Repository> tr = new TestRepository<>(repo)) {
tr.branch("refs/heads/branch").commit().message("Initial commit on branch").create();
}
pushTo("refs/for/master%merged").assertErrorStatus("not merged into branch");
}
@Test
public void mergedOptionFailsWhenChangeExists() throws Exception {
PushOneCommit.Result r = pushTo("refs/for/master");
r.assertOkStatus();
gApi.changes().id(r.getChangeId()).current().review(ReviewInput.approve());
gApi.changes().id(r.getChangeId()).current().submit();
testRepo.reset(r.getCommit());
String ref = "refs/for/master%merged";
PushResult pr = pushHead(testRepo, ref, false);
RemoteRefUpdate rru = pr.getRemoteUpdate(ref);
assertThat(rru.getStatus()).isEqualTo(RemoteRefUpdate.Status.REJECTED_OTHER_REASON);
assertThat(rru.getMessage()).contains("no new changes");
}
@Test
public void mergedOptionWithNewCommitWithSameChangeIdFails() throws Exception {
PushOneCommit.Result r = pushTo("refs/for/master");
r.assertOkStatus();
gApi.changes().id(r.getChangeId()).current().review(ReviewInput.approve());
gApi.changes().id(r.getChangeId()).current().submit();
RevCommit c2 =
testRepo
.amend(r.getCommit())
.message("New subject")
.insertChangeId(r.getChangeId().substring(1))
.create();
testRepo.reset(c2);
String ref = "refs/for/master%merged";
PushResult pr = pushHead(testRepo, ref, false);
RemoteRefUpdate rru = pr.getRemoteUpdate(ref);
assertThat(rru.getStatus()).isEqualTo(RemoteRefUpdate.Status.REJECTED_OTHER_REASON);
assertThat(rru.getMessage()).contains("not merged into branch");
}
@Test
public void mergedOptionWithExistingChangeInsertsPatchSet() throws Exception {
String master = "refs/heads/master";
projectOperations
.project(project)
.forUpdate()
.add(allow(Permission.PUSH).ref(master).group(adminGroupUuid()).force(true))
.update();
PushOneCommit.Result r = pushTo("refs/for/master");
r.assertOkStatus();
ObjectId c1 = r.getCommit().copy();
// Create a PS2 commit directly on master in the server's repo. This
// simulates the client amending locally and pushing directly to the branch,
// expecting the change to be auto-closed, but the change metadata update
// fails.
ObjectId c2;
try (Repository repo = repoManager.openRepository(project);
TestRepository<Repository> tr = new TestRepository<>(repo)) {
RevCommit commit2 =
tr.amend(c1).message("New subject").insertChangeId(r.getChangeId().substring(1)).create();
c2 = commit2.copy();
tr.update(master, c2);
}
testRepo.git().fetch().setRefSpecs(new RefSpec("refs/heads/master")).call();
testRepo.reset(c2);
String ref = "refs/for/master%merged";
assertPushOk(pushHead(testRepo, ref, false), ref);
ChangeInfo info = gApi.changes().id(r.getChangeId()).get(ALL_REVISIONS);
assertThat(info.currentRevision).isEqualTo(c2.name());
assertThat(info.revisions.keySet()).containsExactly(c1.name(), c2.name());
// TODO(dborowitz): Fix ReceiveCommits to also auto-close the change.
assertThat(info.status).isEqualTo(ChangeStatus.NEW);
}
@Test
public void publishedCommentsAssignedToChangeMessages() throws Exception {
TestTimeUtil.resetWithClockStep(0, TimeUnit.SECONDS);
PushOneCommit.Result r = createChange(); // creating the change with patch set 1
TestTimeUtil.incrementClock(5, TimeUnit.SECONDS);
/** Create and publish a comment on PS2. Increment the clock step */
String rev1 = r.getCommit().name();
addDraft(r.getChangeId(), rev1, newDraft(FILE_NAME, 1, "comment_PS2."));
r = amendChange(r.getChangeId(), "refs/for/master%publish-comments");
assertThat(getPublishedComments(r.getChangeId())).isNotEmpty();
TestTimeUtil.incrementClock(5, TimeUnit.SECONDS);
/** Create and publish a comment on PS3 */
String rev2 = r.getCommit().name();
addDraft(r.getChangeId(), rev2, newDraft(FILE_NAME, 1, "comment_PS3."));
amendChange(r.getChangeId(), "refs/for/master%publish-comments");
Collection<CommentInfo> comments = getPublishedComments(r.getChangeId());
List<ChangeMessageInfo> allMessages = getMessages(r.getChangeId());
assertThat(allMessages.stream().map(m -> m.message).collect(toList()))
.containsExactly(
"Uploaded patch set 1.",
"Uploaded patch set 2.",
"Patch Set 2:\n\n(1 comment)",
"Uploaded patch set 3.",
"Patch Set 3:\n\n(1 comment)")
.inOrder();
/**
* Note that the following 3 items have the same timestamp: comment "comment_PS2", message
* "Uploaded patch set 2.", and message "Patch Set 2:\n\n(1 comment)". The comment will not be
* matched with the upload change message because it is auto-generated. Same goes for patch set
* 3.
*/
String commentPs2MessageId =
comments.stream()
.filter(c -> c.message.equals("comment_PS2."))
.collect(onlyElement())
.changeMessageId;
String commentPs3MessageId =
comments.stream()
.filter(c -> c.message.equals("comment_PS3."))
.collect(onlyElement())
.changeMessageId;
String message2Id =
allMessages.stream()
.filter(m -> m.message.equals("Patch Set 2:\n\n(1 comment)"))
.collect(onlyElement())
.id;
String message3Id =
allMessages.stream()
.filter(m -> m.message.equals("Patch Set 3:\n\n(1 comment)"))
.collect(onlyElement())
.id;
assertThat(commentPs2MessageId).isEqualTo(message2Id);
assertThat(commentPs3MessageId).isEqualTo(message3Id);
}
@Test
public void publishCommentsOnPushPublishesDraftsOnAllRevisions() throws Exception {
PushOneCommit.Result r = createChange();
String rev1 = r.getCommit().name();
CommentInfo c1 = addDraft(r.getChangeId(), rev1, newDraft(FILE_NAME, 1, "comment1"));
CommentInfo c2 = addDraft(r.getChangeId(), rev1, newDraft(FILE_NAME, 1, "comment2"));
r = amendChange(r.getChangeId());
String rev2 = r.getCommit().name();
CommentInfo c3 = addDraft(r.getChangeId(), rev2, newDraft(FILE_NAME, 1, "comment3"));
assertThat(getPublishedComments(r.getChangeId())).isEmpty();
gApi.changes().id(r.getChangeId()).addReviewer(user.email());
sender.clear();
amendChange(r.getChangeId(), "refs/for/master%publish-comments");
Collection<CommentInfo> comments = getPublishedComments(r.getChangeId());
assertThat(comments.stream().map(c -> c.id)).containsExactly(c1.id, c2.id, c3.id);
assertThat(comments.stream().map(c -> c.message))
.containsExactly("comment1", "comment2", "comment3");
/* Assert the correctness of the API messages */
List<ChangeMessageInfo> allMessages = getMessages(r.getChangeId());
List<String> messagesText = allMessages.stream().map(m -> m.message).collect(toList());
assertThat(messagesText)
.containsExactly(
"Uploaded patch set 1.",
"Uploaded patch set 2.",
"Uploaded patch set 3.",
"Patch Set 3:\n\n(3 comments)")
.inOrder();
/* Assert the tags - PS#2 comments do not have tags, PS#3 upload is autogenerated */
List<String> messagesTags = allMessages.stream().map(m -> m.tag).collect(toList());
assertThat(messagesTags.get(2)).isEqualTo("autogenerated:gerrit:newPatchSet");
assertThat(messagesTags.get(3)).isNull();
/* Assert the correctness of the emails sent */
List<String> emailMessages =
sender.getMessages().stream()
.map(Message::body)
.sorted(Comparator.comparingInt(m -> m.contains("reexamine") ? 0 : 1))
.collect(toList());
assertThat(emailMessages).hasSize(2);
assertThat(emailMessages.get(0)).contains("Gerrit-MessageType: newpatchset");
assertThat(emailMessages.get(0)).contains("I'd like you to reexamine a change");
assertThat(emailMessages.get(0)).doesNotContain("Uploaded patch set 3");
assertThat(emailMessages.get(1)).contains("Gerrit-MessageType: comment");
assertThat(emailMessages.get(1)).contains("Patch Set 3:\n\n(3 comments)");
assertThat(emailMessages.get(1)).contains("PS1, Line 1:");
assertThat(emailMessages.get(1)).contains("PS2, Line 1:");
/* Assert the correctness of the NoteDb change meta commits */
List<RevCommit> commitMessages = getChangeMetaCommitsInReverseOrder(r.getChange().getId());
assertThat(commitMessages).hasSize(5);
assertThat(commitMessages.get(0).getShortMessage()).isEqualTo("Create change");
assertThat(commitMessages.get(1).getShortMessage()).isEqualTo("Create patch set 2");
assertThat(commitMessages.get(2).getShortMessage()).isEqualTo("Update patch set 2");
assertThat(commitMessages.get(3).getShortMessage()).isEqualTo("Create patch set 3");
assertThat(commitMessages.get(4).getFullMessage())
.isEqualTo(
"Update patch set 3\n"
+ "\n"
+ "Patch Set 3:\n"
+ "\n"
+ "(3 comments)\n"
+ "\n"
+ "Patch-set: 3\n");
}
@Test
public void publishCommentsOnPushWithMessage() throws Exception {
PushOneCommit.Result r = createChange();
String rev = r.getCommit().name();
addDraft(r.getChangeId(), rev, newDraft(FILE_NAME, 1, "comment1"));
r = amendChange(r.getChangeId(), "refs/for/master%publish-comments,m=The_message");
Collection<CommentInfo> comments = getPublishedComments(r.getChangeId());
assertThat(comments.stream().map(c -> c.message)).containsExactly("comment1");
assertThat(getLastMessage(r.getChangeId())).isEqualTo("Patch Set 2:\n" + "\n" + "(1 comment)");
}
@Test
public void publishCommentsOnPushPublishesDraftsOnMultipleChanges() throws Exception {
ObjectId initialHead = testRepo.getRepository().resolve("HEAD");
List<RevCommit> commits = createChanges(2, "refs/for/master");
String id1 = byCommit(commits.get(0)).change().getKey().get();
String id2 = byCommit(commits.get(1)).change().getKey().get();
CommentInfo c1 = addDraft(id1, commits.get(0).name(), newDraft(FILE_NAME, 1, "comment1"));
CommentInfo c2 = addDraft(id2, commits.get(1).name(), newDraft(FILE_NAME, 1, "comment2"));
assertThat(getPublishedComments(id1)).isEmpty();
assertThat(getPublishedComments(id2)).isEmpty();
amendChanges(initialHead, commits, "refs/for/master%publish-comments");
Collection<CommentInfo> cs1 = getPublishedComments(id1);
List<ChangeMessageInfo> messages1 = getMessages(id1);
assertThat(cs1.stream().map(c -> c.message)).containsExactly("comment1");
assertThat(cs1.stream().map(c -> c.id)).containsExactly(c1.id);
assertThat(messages1.get(0).message).isEqualTo("Uploaded patch set 1.");
assertThat(messages1.get(1).message)
.isEqualTo("Uploaded patch set 2: Commit message was updated.");
assertThat(messages1.get(2).message).isEqualTo("Patch Set 2:\n\n(1 comment)");
Collection<CommentInfo> cs2 = getPublishedComments(id2);
List<ChangeMessageInfo> messages2 = getMessages(id2);
assertThat(cs2.stream().map(c -> c.message)).containsExactly("comment2");
assertThat(cs2.stream().map(c -> c.id)).containsExactly(c2.id);
assertThat(messages2.get(0).message).isEqualTo("Uploaded patch set 1.");
assertThat(messages2.get(1).message)
.isEqualTo("Uploaded patch set 2: Commit message was updated.");
assertThat(messages2.get(2).message).isEqualTo("Patch Set 2:\n\n(1 comment)");
}
@Test
public void publishCommentsOnPushOnlyPublishesDraftsOnUpdatedChanges() throws Exception {
PushOneCommit.Result r1 = createChange();
PushOneCommit.Result r2 = createChange();
String id1 = r1.getChangeId();
String id2 = r2.getChangeId();
addDraft(id1, r1.getCommit().name(), newDraft(FILE_NAME, 1, "comment1"));
CommentInfo c2 = addDraft(id2, r2.getCommit().name(), newDraft(FILE_NAME, 1, "comment2"));
assertThat(getPublishedComments(id1)).isEmpty();
assertThat(getPublishedComments(id2)).isEmpty();
amendChange(id2, "refs/for/master%publish-comments");
assertThat(getPublishedComments(id1)).isEmpty();
assertThat(gApi.changes().id(id1).drafts()).hasSize(1);
Collection<CommentInfo> cs2 = getPublishedComments(id2);
assertThat(cs2.stream().map(c -> c.message)).containsExactly("comment2");
assertThat(cs2.stream().map(c -> c.id)).containsExactly(c2.id);
assertThat(getLastMessage(id1)).doesNotMatch("[Cc]omment");
assertThat(getLastMessage(id2)).isEqualTo("Patch Set 2:\n\n(1 comment)");
}
@Test
public void publishCommentsOnPushWithPreference() throws Exception {
PushOneCommit.Result r = createChange();
addDraft(r.getChangeId(), r.getCommit().name(), newDraft(FILE_NAME, 1, "comment1"));
r = amendChange(r.getChangeId());
assertThat(getPublishedComments(r.getChangeId())).isEmpty();
GeneralPreferencesInfo prefs = gApi.accounts().id(admin.id().get()).getPreferences();
prefs.publishCommentsOnPush = true;
gApi.accounts().id(admin.id().get()).setPreferences(prefs);
r = amendChange(r.getChangeId());
assertThat(getPublishedComments(r.getChangeId()).stream().map(c -> c.message))
.containsExactly("comment1");
}
@Test
public void publishCommentsOnPushOverridingPreference() throws Exception {
PushOneCommit.Result r = createChange();
addDraft(r.getChangeId(), r.getCommit().name(), newDraft(FILE_NAME, 1, "comment1"));
GeneralPreferencesInfo prefs = gApi.accounts().id(admin.id().get()).getPreferences();
prefs.publishCommentsOnPush = true;
gApi.accounts().id(admin.id().get()).setPreferences(prefs);
r = amendChange(r.getChangeId(), "refs/for/master%no-publish-comments");
assertThat(getPublishedComments(r.getChangeId())).isEmpty();
}
@Test
public void noEditAndUpdateAllUsersInSameChangeStack() throws Exception {
List<RevCommit> commits = createChanges(2, "refs/for/master");
String id2 = byCommit(commits.get(1)).change().getKey().get();
addDraft(id2, commits.get(1).name(), newDraft(FILE_NAME, 1, "comment2"));
// First change in stack unchanged.
RevCommit unChanged = commits.remove(0);
// Publishing draft comments on change 2 updates All-Users.
amendChanges(unChanged.toObjectId(), commits, "refs/for/master%publish-comments");
}
@GerritConfig(name = "receive.maxBatchCommits", value = "2")
@Test
public void maxBatchCommits() throws Exception {
testMaxBatchCommits();
}
@GerritConfig(name = "receive.maxBatchCommits", value = "2")
@Test
public void maxBatchCommitsWithDefaultValidator() throws Exception {
try (Registration registration = extensionRegistry.newRegistration().add(new TestValidator())) {
testMaxBatchCommits();
}
}
@GerritConfig(name = "receive.maxBatchCommits", value = "2")
@Test
public void maxBatchCommitsWithValidateAllCommitsValidator() throws Exception {
try (Registration registration = extensionRegistry.newRegistration().add(new TestValidator())) {
testMaxBatchCommits();
}
}
private void testMaxBatchCommits() throws Exception {
List<RevCommit> commits = new ArrayList<>();
commits.addAll(initChanges(2));
String master = "refs/heads/master";
assertPushOk(pushHead(testRepo, master), master);
commits.addAll(initChanges(3));
assertPushRejected(
pushHead(testRepo, master), master, "more than 2 commits, and skip-validation not set");
grantSkipValidation(project, master, SystemGroupBackend.REGISTERED_USERS);
PushResult r =
pushHead(testRepo, master, false, false, ImmutableList.of(PUSH_OPTION_SKIP_VALIDATION));
assertPushOk(r, master);
// No open changes; branch was advanced.
String q = commits.stream().map(ObjectId::name).collect(joining(" OR commit:", "commit:", ""));
assertThat(gApi.changes().query(q).get()).isEmpty();
assertThat(gApi.projects().name(project.get()).branch(master).get().revision)
.isEqualTo(Iterables.getLast(commits).name());
}
private static class TestValidator implements CommitValidationListener {
private final AtomicInteger count = new AtomicInteger();
private final boolean validateAll;
TestValidator(boolean validateAll) {
this.validateAll = validateAll;
}
TestValidator() {
this(false);
}
@Override
public List<CommitValidationMessage> onCommitReceived(CommitReceivedEvent receiveEvent) {
count.incrementAndGet();
return Collections.emptyList();
}
@Override
public boolean shouldValidateAllCommits() {
return validateAll;
}
public int count() {
return count.get();
}
}
@Test
public void skipValidation() throws Exception {
String master = "refs/heads/master";
TestValidator validator = new TestValidator();
try (Registration registration = extensionRegistry.newRegistration().add(validator)) {
// Validation listener is called on normal push
PushOneCommit push =
pushFactory.create(admin.newIdent(), testRepo, "change1", "a.txt", "content");
PushOneCommit.Result r = push.to(master);
r.assertOkStatus();
assertThat(validator.count()).isEqualTo(1);
// Push is rejected and validation listener is not called when not allowed
// to use skip option
PushOneCommit push2 =
pushFactory.create(admin.newIdent(), testRepo, "change2", "b.txt", "content");
push2.setPushOptions(ImmutableList.of(PUSH_OPTION_SKIP_VALIDATION));
r = push2.to(master);
r.assertErrorStatus("not permitted: skip validation");
assertThat(validator.count()).isEqualTo(1);
// Validation listener is not called when skip option is used
grantSkipValidation(project, master, SystemGroupBackend.REGISTERED_USERS);
PushOneCommit push3 =
pushFactory.create(admin.newIdent(), testRepo, "change2", "b.txt", "content");
push3.setPushOptions(ImmutableList.of(PUSH_OPTION_SKIP_VALIDATION));
r = push3.to(master);
r.assertOkStatus();
assertThat(validator.count()).isEqualTo(1);
// Validation listener that needs to validate all commits gets called even
// when the skip option is used.
TestValidator validator2 = new TestValidator(true);
try (Registration registration2 = extensionRegistry.newRegistration().add(validator2)) {
PushOneCommit push4 =
pushFactory.create(admin.newIdent(), testRepo, "change2", "b.txt", "content");
push4.setPushOptions(ImmutableList.of(PUSH_OPTION_SKIP_VALIDATION));
r = push4.to(master);
r.assertOkStatus();
// First listener was not called; its count remains the same.
assertThat(validator.count()).isEqualTo(1);
// Second listener was called.
assertThat(validator2.count()).isEqualTo(1);
}
}
}
@Test
public void pushNoteDbRef() throws Exception {
String ref = "refs/changes/34/1234/meta";
RevCommit c = testRepo.commit().message("Junk NoteDb commit").create();
PushResult pr = pushOne(testRepo, c.name(), ref, false, false, null);
assertThat(pr.getMessages()).doesNotContain(NoteDbPushOption.OPTION_NAME);
assertPushRejected(pr, ref, "NoteDb update requires -o notedb=allow");
pr = pushOne(testRepo, c.name(), ref, false, false, ImmutableList.of("notedb=foobar"));
assertThat(pr.getMessages()).contains("Invalid value in -o notedb=foobar");
assertPushRejected(pr, ref, "NoteDb update requires -o notedb=allow");
List<String> opts = ImmutableList.of("notedb=allow");
pr = pushOne(testRepo, c.name(), ref, false, false, opts);
assertPushRejected(pr, ref, "NoteDb update requires access database permission");
projectOperations
.allProjectsForUpdate()
.add(allowCapability(GlobalCapability.ACCESS_DATABASE).group(REGISTERED_USERS))
.update();
pr = pushOne(testRepo, c.name(), ref, false, false, opts);
assertPushRejected(pr, ref, "prohibited by Gerrit: not permitted: create");
projectOperations
.project(project)
.forUpdate()
.add(allow(Permission.CREATE).ref("refs/changes/*").group(adminGroupUuid()))
.add(allow(Permission.PUSH).ref("refs/changes/*").group(adminGroupUuid()))
.update();
grantSkipValidation(project, "refs/changes/*", REGISTERED_USERS);
pr = pushOne(testRepo, c.name(), ref, false, false, opts);
assertPushOk(pr, ref);
}
@Test
public void pushNoteDbRefWithoutOptionOnlyFailsThatCommand() throws Exception {
String ref = "refs/changes/34/1234/meta";
RevCommit noteDbCommit = testRepo.commit().message("Junk NoteDb commit").create();
RevCommit changeCommit =
testRepo.branch("HEAD").commit().message("A change").insertChangeId().create();
PushResult pr =
Iterables.getOnlyElement(
testRepo
.git()
.push()
.setRefSpecs(
new RefSpec(noteDbCommit.name() + ":" + ref),
new RefSpec(changeCommit.name() + ":refs/heads/permitted"))
.call());
assertPushRejected(pr, ref, "NoteDb update requires -o notedb=allow");
assertPushOk(pr, "refs/heads/permitted");
}
@Test
public void pushCommitsWithSameTreeNoChanges() throws Exception {
RevCommit c =
testRepo
.commit()
.message("Foo")
.parent(getHead(testRepo.getRepository(), "HEAD"))
.insertChangeId()
.create();
testRepo.reset(c);
String r = "refs/for/master";
PushResult pr = pushHead(testRepo, r, false);
assertPushOk(pr, r);
RevCommit amended = testRepo.amend(c).create();
testRepo.reset(amended);
pr = pushHead(testRepo, r, false);
assertPushOk(pr, r);
assertThat(pr.getMessages())
.contains(
"warning: no changes between prior commit "
+ abbreviateName(c)
+ " and new commit "
+ abbreviateName(amended));
}
@Test
public void pushCommitsWithSameTreeNoFilesChangedMessageUpdated() throws Exception {
RevCommit c =
testRepo
.commit()
.message("Foo")
.parent(getHead(testRepo.getRepository(), "HEAD"))
.insertChangeId()
.create();
String id = GitUtil.getChangeId(testRepo, c).get();
testRepo.reset(c);
String r = "refs/for/master";
PushResult pr = pushHead(testRepo, r, false);
assertPushOk(pr, r);
RevCommit amended =
testRepo.amend(c).message("Foo Bar").insertChangeId(id.substring(1)).create();
testRepo.reset(amended);
pr = pushHead(testRepo, r, false);
assertPushOk(pr, r);
assertThat(pr.getMessages())
.contains("warning: " + abbreviateName(amended) + ": no files changed, message updated");
}
@Test
public void pushCommitsWithSameTreeNoFilesChangedAuthorChanged() throws Exception {
RevCommit c =
testRepo
.commit()
.message("Foo")
.parent(getHead(testRepo.getRepository(), "HEAD"))
.insertChangeId()
.create();
testRepo.reset(c);
String r = "refs/for/master";
PushResult pr = pushHead(testRepo, r, false);
assertPushOk(pr, r);
RevCommit amended = testRepo.amend(c).author(user.newIdent()).create();
testRepo.reset(amended);
pr = pushHead(testRepo, r, false);
assertPushOk(pr, r);
assertThat(pr.getMessages())
.contains("warning: " + abbreviateName(amended) + ": no files changed, author changed");
}
@Test
public void pushCommitsWithSameTreeNoFilesChangedWasRebased() throws Exception {
RevCommit head = getHead(testRepo.getRepository(), "HEAD");
RevCommit c = testRepo.commit().message("Foo").parent(head).insertChangeId().create();
testRepo.reset(c);
String r = "refs/for/master";
PushResult pr = pushHead(testRepo, r, false);
assertPushOk(pr, r);
testRepo.reset(head);
RevCommit newBase = testRepo.commit().message("Base").parent(head).insertChangeId().create();
testRepo.reset(newBase);
pr = pushHead(testRepo, r, false);
assertPushOk(pr, r);
testRepo.reset(c);
RevCommit amended = testRepo.amend(c).parent(newBase).create();
testRepo.reset(amended);
pr = pushHead(testRepo, r, false);
assertPushOk(pr, r);
assertThat(pr.getMessages())
.contains("warning: " + abbreviateName(amended) + ": no files changed, was rebased");
}
@Test
public void sequentialCommitMessages() throws Exception {
String url = canonicalWebUrl.get() + "c/" + project.get() + "/+/";
ObjectId initialHead = testRepo.getRepository().resolve("HEAD");
PushOneCommit.Result r1 = pushTo("refs/for/master");
Change.Id id1 = r1.getChange().getId();
r1.assertOkStatus();
r1.assertChange(Change.Status.NEW, null);
r1.assertMessage(
url + id1 + " " + r1.getCommit().getShortMessage() + NEW_CHANGE_INDICATOR + "\n");
PushOneCommit.Result r2 = pushTo("refs/for/master");
Change.Id id2 = r2.getChange().getId();
r2.assertOkStatus();
r2.assertChange(Change.Status.NEW, null);
r2.assertMessage(
url + id2 + " " + r2.getCommit().getShortMessage() + NEW_CHANGE_INDICATOR + "\n");
testRepo.reset(initialHead);
// rearrange the commit so that change no. 2 is the parent of change no. 1
String r1Message = "Position 2";
String r2Message = "Position 1";
testRepo
.branch("HEAD")
.commit()
.message(r2Message)
.insertChangeId(r2.getChangeId().substring(1))
.create();
testRepo
.branch("HEAD")
.commit()
.message(r1Message)
.insertChangeId(r1.getChangeId().substring(1))
.create();
PushOneCommit.Result r3 =
pushFactory
.create(admin.newIdent(), testRepo, "another commit", "b.txt", "bbb")
.to("refs/for/master");
Change.Id id3 = r3.getChange().getId();
r3.assertOkStatus();
r3.assertChange(Change.Status.NEW, null);
// should display commit r2, r1, r3 in that order.
r3.assertMessage(
"success\n"
+ "\n"
+ " "
+ url
+ id2
+ " "
+ r2Message
+ "\n"
+ " "
+ url
+ id1
+ " "
+ r1Message
+ "\n"
+ " "
+ url
+ id3
+ " another commit"
+ NEW_CHANGE_INDICATOR
+ "\n");
}
@Test
public void cannotPushTheSameCommitTwiceForReviewToTheSameBranch() throws Exception {
testCannotPushTheSameCommitTwiceForReviewToTheSameBranch();
}
@Test
public void cannotPushTheSameCommitTwiceForReviewToTheSameBranchCreateNewChangeForAllNotInTarget()
throws Exception {
enableCreateNewChangeForAllNotInTarget();
testCannotPushTheSameCommitTwiceForReviewToTheSameBranch();
}
private void testCannotPushTheSameCommitTwiceForReviewToTheSameBranch() throws Exception {
setRequireChangeId(InheritableBoolean.FALSE);
// create a commit without Change-Id
testRepo
.branch("HEAD")
.commit()
.author(user.newIdent())
.committer(user.newIdent())
.add(PushOneCommit.FILE_NAME, PushOneCommit.FILE_CONTENT)
.message(PushOneCommit.SUBJECT)
.create();
// push the commit for review to create a change
PushResult r = pushHead(testRepo, "refs/for/master");
assertPushOk(r, "refs/for/master");
// try to push the same commit for review again to create another change on the same branch,
// it's expected that this is rejected with "no new changes"
r = pushHead(testRepo, "refs/for/master");
assertPushRejected(r, "refs/for/master", "no new changes");
}
@Test
public void pushTheSameCommitTwiceForReviewToDifferentBranches() throws Exception {
setRequireChangeId(InheritableBoolean.FALSE);
// create a commit without Change-Id
testRepo
.branch("HEAD")
.commit()
.author(user.newIdent())
.committer(user.newIdent())
.add(PushOneCommit.FILE_NAME, PushOneCommit.FILE_CONTENT)
.message(PushOneCommit.SUBJECT)
.create();
// push the commit for review to create a change
PushResult r = pushHead(testRepo, "refs/for/master");
assertPushOk(r, "refs/for/master");
// create another branch
gApi.projects().name(project.get()).branch("otherBranch").create(new BranchInput());
// try to push the same commit for review again to create a change on another branch,
// it's expected that this is rejected with "no new changes" since
// CREATE_NEW_CHANGE_FOR_ALL_NOT_IN_TARGET is false
r = pushHead(testRepo, "refs/for/otherBranch");
assertPushRejected(r, "refs/for/otherBranch", "no new changes");
enableCreateNewChangeForAllNotInTarget();
// try to push the same commit for review again to create a change on another branch,
// now it should succeed since CREATE_NEW_CHANGE_FOR_ALL_NOT_IN_TARGET is true
r = pushHead(testRepo, "refs/for/otherBranch");
assertPushOk(r, "refs/for/otherBranch");
}
private DraftInput newDraft(String path, int line, String message) {
DraftInput d = new DraftInput();
d.path = path;
d.side = Side.REVISION;
d.line = line;
d.message = message;
d.unresolved = true;
return d;
}
private CommentInfo addDraft(String changeId, String revId, DraftInput in) throws Exception {
return gApi.changes().id(changeId).revision(revId).createDraft(in).get();
}
private Collection<CommentInfo> getPublishedComments(String changeId) throws Exception {
return gApi.changes().id(changeId).commentsRequest().get().values().stream()
.flatMap(Collection::stream)
.collect(toList());
}
private String getLastMessage(String changeId) throws Exception {
return Streams.findLast(
gApi.changes().id(changeId).get(MESSAGES).messages.stream().map(m -> m.message))
.get();
}
private List<ChangeMessageInfo> getMessages(String changeId) throws Exception {
return gApi.changes().id(changeId).get(MESSAGES).messages.stream().collect(toList());
}
private void assertThatUserIsOnlyReviewer(ChangeInfo ci, TestAccount reviewer) {
assertThat(ci.reviewers).isNotNull();
assertThat(ci.reviewers.keySet()).containsExactly(ReviewerState.REVIEWER);
assertThat(ci.reviewers.get(ReviewerState.REVIEWER).iterator().next().email)
.isEqualTo(reviewer.email());
}
private void pushWithReviewerInFooter(String nameEmail, TestAccount expectedReviewer)
throws Exception {
int n = 5;
String r = "refs/for/master";
ObjectId initialHead = testRepo.getRepository().resolve("HEAD");
List<RevCommit> commits = createChanges(n, r, ImmutableList.of("Acked-By: " + nameEmail));
for (int i = 0; i < n; i++) {
RevCommit c = commits.get(i);
ChangeData cd = byCommit(c);
String name = "reviewers for " + (i + 1);
if (expectedReviewer != null) {
assertWithMessage(name).that(cd.reviewers().all()).containsExactly(expectedReviewer.id());
// Remove reviewer from PS1 so we can test adding this same reviewer on PS2 below.
gApi.changes().id(cd.getId().get()).reviewer(expectedReviewer.id().toString()).remove();
}
assertWithMessage(name).that(byCommit(c).reviewers().all()).isEmpty();
}
List<RevCommit> commits2 = amendChanges(initialHead, commits, r);
for (int i = 0; i < n; i++) {
RevCommit c = commits2.get(i);
ChangeData cd = byCommit(c);
String name = "reviewers for " + (i + 1);
if (expectedReviewer != null) {
assertWithMessage(name).that(cd.reviewers().all()).containsExactly(expectedReviewer.id());
} else {
assertWithMessage(name).that(byCommit(c).reviewers().all()).isEmpty();
}
}
}
private List<RevCommit> createChanges(int n, String refsFor) throws Exception {
return createChanges(n, refsFor, ImmutableList.of());
}
private List<RevCommit> createChanges(int n, String refsFor, List<String> footerLines)
throws Exception {
List<RevCommit> commits = initChanges(n, footerLines);
assertPushOk(pushHead(testRepo, refsFor, false), refsFor);
return commits;
}
private List<RevCommit> initChanges(int n) throws Exception {
return initChanges(n, ImmutableList.of());
}
private List<RevCommit> initChanges(int n, List<String> footerLines) throws Exception {
List<RevCommit> commits = new ArrayList<>(n);
for (int i = 1; i <= n; i++) {
String msg = "Change " + i;
if (!footerLines.isEmpty()) {
StringBuilder sb = new StringBuilder(msg).append("\n\n");
for (String line : footerLines) {
sb.append(line).append('\n');
}
msg = sb.toString();
}
TestRepository<?>.CommitBuilder cb =
testRepo.branch("HEAD").commit().message(msg).insertChangeId();
if (!commits.isEmpty()) {
cb.parent(commits.get(commits.size() - 1));
}
RevCommit c = cb.create();
testRepo.getRevWalk().parseBody(c);
commits.add(c);
}
return commits;
}
private List<RevCommit> amendChanges(
ObjectId initialHead, List<RevCommit> origCommits, String refsFor) throws Exception {
testRepo.reset(initialHead);
List<RevCommit> newCommits = new ArrayList<>(origCommits.size());
for (RevCommit c : origCommits) {
String msg = c.getShortMessage() + "v2";
if (!c.getShortMessage().equals(c.getFullMessage())) {
msg = msg + c.getFullMessage().substring(c.getShortMessage().length());
}
TestRepository<?>.CommitBuilder cb = testRepo.branch("HEAD").commit().message(msg);
if (!newCommits.isEmpty()) {
cb.parent(origCommits.get(newCommits.size() - 1));
}
RevCommit c2 = cb.create();
testRepo.getRevWalk().parseBody(c2);
newCommits.add(c2);
}
assertPushOk(pushHead(testRepo, refsFor, false), refsFor);
return newCommits;
}
private static Map<Integer, String> getPatchSetRevisions(ChangeData cd) throws Exception {
Map<Integer, String> revisions = new HashMap<>();
for (PatchSet ps : cd.patchSets()) {
revisions.put(ps.number(), ps.commitId().name());
}
return revisions;
}
private ChangeData byCommit(ObjectId id) throws Exception {
List<ChangeData> cds = queryProvider.get().byCommit(id);
assertWithMessage("change for " + id.name()).that(cds).hasSize(1);
return cds.get(0);
}
private ChangeData byChangeId(Change.Id id) throws Exception {
List<ChangeData> cds = queryProvider.get().byLegacyChangeId(id);
assertWithMessage("change " + id).that(cds).hasSize(1);
return cds.get(0);
}
private static void pushForReviewOk(TestRepository<?> testRepo) throws GitAPIException {
pushForReview(testRepo, RemoteRefUpdate.Status.OK, null);
}
private static void pushForReviewRejected(TestRepository<?> testRepo, String expectedMessage)
throws GitAPIException {
pushForReview(testRepo, RemoteRefUpdate.Status.REJECTED_OTHER_REASON, expectedMessage);
}
private static void pushForReview(
TestRepository<?> testRepo, RemoteRefUpdate.Status expectedStatus, String expectedMessage)
throws GitAPIException {
String ref = "refs/for/master";
PushResult r = pushHead(testRepo, ref);
RemoteRefUpdate refUpdate = r.getRemoteUpdate(ref);
assertThat(refUpdate.getStatus()).isEqualTo(expectedStatus);
if (expectedMessage != null) {
assertThat(refUpdate.getMessage()).contains(expectedMessage);
}
}
private void grantSkipValidation(Project.NameKey project, String ref, AccountGroup.UUID groupUuid)
throws Exception {
// See SKIP_VALIDATION implementation in default permission backend.
projectOperations
.project(project)
.forUpdate()
.add(allow(Permission.FORGE_AUTHOR).ref(ref).group(groupUuid))
.add(allow(Permission.FORGE_COMMITTER).ref(ref).group(groupUuid))
.add(allow(Permission.FORGE_SERVER).ref(ref).group(groupUuid))
.add(allow(Permission.PUSH_MERGE).ref("refs/for/" + ref).group(groupUuid))
.update();
}
private PushOneCommit.Result amendChange(String changeId, String ref) throws Exception {
return amendChange(changeId, ref, admin, testRepo);
}
private String getOwnerEmail(String changeId) throws Exception {
return get(changeId, DETAILED_ACCOUNTS).owner.email;
}
private ImmutableList<String> getReviewerEmails(String changeId, ReviewerState state)
throws Exception {
Collection<AccountInfo> infos =
get(changeId, DETAILED_LABELS, DETAILED_ACCOUNTS).reviewers.get(state);
return infos != null
? infos.stream().map(a -> a.email).collect(toImmutableList())
: ImmutableList.of();
}
private String abbreviateName(AnyObjectId id) throws Exception {
return ObjectIds.abbreviateName(id, testRepo.getRevWalk().getObjectReader());
}
}
| javatests/com/google/gerrit/acceptance/git/AbstractPushForReview.java | // Copyright (C) 2013 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.acceptance.git;
import static com.google.common.base.MoreObjects.firstNonNull;
import static com.google.common.collect.ImmutableList.toImmutableList;
import static com.google.common.collect.MoreCollectors.onlyElement;
import static com.google.common.truth.Truth.assertThat;
import static com.google.common.truth.Truth.assertWithMessage;
import static com.google.common.truth.Truth8.assertThat;
import static com.google.gerrit.acceptance.GitUtil.assertPushOk;
import static com.google.gerrit.acceptance.GitUtil.assertPushRejected;
import static com.google.gerrit.acceptance.GitUtil.pushHead;
import static com.google.gerrit.acceptance.GitUtil.pushOne;
import static com.google.gerrit.acceptance.PushOneCommit.FILE_NAME;
import static com.google.gerrit.acceptance.testsuite.project.TestProjectUpdate.allow;
import static com.google.gerrit.acceptance.testsuite.project.TestProjectUpdate.allowCapability;
import static com.google.gerrit.acceptance.testsuite.project.TestProjectUpdate.allowLabel;
import static com.google.gerrit.acceptance.testsuite.project.TestProjectUpdate.block;
import static com.google.gerrit.common.FooterConstants.CHANGE_ID;
import static com.google.gerrit.extensions.client.ListChangesOption.ALL_REVISIONS;
import static com.google.gerrit.extensions.client.ListChangesOption.CURRENT_REVISION;
import static com.google.gerrit.extensions.client.ListChangesOption.DETAILED_ACCOUNTS;
import static com.google.gerrit.extensions.client.ListChangesOption.DETAILED_LABELS;
import static com.google.gerrit.extensions.client.ListChangesOption.MESSAGES;
import static com.google.gerrit.extensions.common.testing.EditInfoSubject.assertThat;
import static com.google.gerrit.server.git.receive.ReceiveConstants.PUSH_OPTION_SKIP_VALIDATION;
import static com.google.gerrit.server.group.SystemGroupBackend.ANONYMOUS_USERS;
import static com.google.gerrit.server.group.SystemGroupBackend.REGISTERED_USERS;
import static com.google.gerrit.server.project.testing.TestLabels.label;
import static com.google.gerrit.server.project.testing.TestLabels.value;
import static java.util.Comparator.comparing;
import static java.util.stream.Collectors.joining;
import static java.util.stream.Collectors.toList;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.common.collect.Streams;
import com.google.gerrit.acceptance.AbstractDaemonTest;
import com.google.gerrit.acceptance.ExtensionRegistry;
import com.google.gerrit.acceptance.ExtensionRegistry.Registration;
import com.google.gerrit.acceptance.GitUtil;
import com.google.gerrit.acceptance.PushOneCommit;
import com.google.gerrit.acceptance.SkipProjectClone;
import com.google.gerrit.acceptance.TestAccount;
import com.google.gerrit.acceptance.TestProjectInput;
import com.google.gerrit.acceptance.UseClockStep;
import com.google.gerrit.acceptance.config.GerritConfig;
import com.google.gerrit.acceptance.testsuite.project.ProjectOperations;
import com.google.gerrit.acceptance.testsuite.request.RequestScopeOperations;
import com.google.gerrit.common.data.GlobalCapability;
import com.google.gerrit.entities.AccountGroup;
import com.google.gerrit.entities.Address;
import com.google.gerrit.entities.BooleanProjectConfig;
import com.google.gerrit.entities.Change;
import com.google.gerrit.entities.ChangeMessage;
import com.google.gerrit.entities.LabelType;
import com.google.gerrit.entities.PatchSet;
import com.google.gerrit.entities.Permission;
import com.google.gerrit.entities.Project;
import com.google.gerrit.entities.RefNames;
import com.google.gerrit.extensions.api.changes.DraftInput;
import com.google.gerrit.extensions.api.changes.NotifyHandling;
import com.google.gerrit.extensions.api.changes.ReviewInput;
import com.google.gerrit.extensions.api.groups.GroupInput;
import com.google.gerrit.extensions.api.projects.BranchInput;
import com.google.gerrit.extensions.api.projects.ConfigInput;
import com.google.gerrit.extensions.client.ChangeStatus;
import com.google.gerrit.extensions.client.GeneralPreferencesInfo;
import com.google.gerrit.extensions.client.InheritableBoolean;
import com.google.gerrit.extensions.client.ListChangesOption;
import com.google.gerrit.extensions.client.ProjectWatchInfo;
import com.google.gerrit.extensions.client.ReviewerState;
import com.google.gerrit.extensions.client.Side;
import com.google.gerrit.extensions.common.AccountInfo;
import com.google.gerrit.extensions.common.ChangeInfo;
import com.google.gerrit.extensions.common.ChangeMessageInfo;
import com.google.gerrit.extensions.common.CommentInfo;
import com.google.gerrit.extensions.common.EditInfo;
import com.google.gerrit.extensions.common.LabelInfo;
import com.google.gerrit.extensions.common.RevisionInfo;
import com.google.gerrit.git.ObjectIds;
import com.google.gerrit.server.ChangeMessagesUtil;
import com.google.gerrit.server.events.CommitReceivedEvent;
import com.google.gerrit.server.git.receive.NoteDbPushOption;
import com.google.gerrit.server.git.receive.ReceiveConstants;
import com.google.gerrit.server.git.validators.CommitValidationListener;
import com.google.gerrit.server.git.validators.CommitValidationMessage;
import com.google.gerrit.server.group.SystemGroupBackend;
import com.google.gerrit.server.project.testing.TestLabels;
import com.google.gerrit.server.query.change.ChangeData;
import com.google.gerrit.testing.FakeEmailSender.Message;
import com.google.gerrit.testing.TestTimeUtil;
import com.google.inject.Inject;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Stream;
import org.eclipse.jgit.api.errors.GitAPIException;
import org.eclipse.jgit.junit.TestRepository;
import org.eclipse.jgit.lib.AnyObjectId;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.PersonIdent;
import org.eclipse.jgit.lib.RefUpdate;
import org.eclipse.jgit.lib.RefUpdate.Result;
import org.eclipse.jgit.lib.Repository;
import org.eclipse.jgit.revwalk.RevCommit;
import org.eclipse.jgit.revwalk.RevWalk;
import org.eclipse.jgit.transport.PushResult;
import org.eclipse.jgit.transport.RefSpec;
import org.eclipse.jgit.transport.RemoteRefUpdate;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
@SkipProjectClone
@UseClockStep
public abstract class AbstractPushForReview extends AbstractDaemonTest {
protected enum Protocol {
// Only test protocols which are actually served by the Gerrit server, since each separate test
// class is large and slow.
//
// This list excludes the test InProcessProtocol, which is used by large numbers of other
// acceptance tests. Small tests of InProcessProtocol are still possible, without incurring a
// new large slow test.
SSH,
HTTP
}
@Inject private ProjectOperations projectOperations;
@Inject private RequestScopeOperations requestScopeOperations;
@Inject private ExtensionRegistry extensionRegistry;
private static String NEW_CHANGE_INDICATOR = " [NEW]";
private LabelType patchSetLock;
@Before
public void setUpPatchSetLock() throws Exception {
try (ProjectConfigUpdate u = updateProject(project)) {
patchSetLock = TestLabels.patchSetLock();
u.getConfig().upsertLabelType(patchSetLock);
u.save();
}
projectOperations
.project(project)
.forUpdate()
.add(
allowLabel(patchSetLock.getName())
.ref("refs/heads/*")
.group(ANONYMOUS_USERS)
.range(0, 1))
.add(
allowLabel(patchSetLock.getName())
.ref("refs/heads/*")
.group(adminGroupUuid())
.range(0, 1))
.update();
}
@After
public void resetPublishCommentOnPushOption() throws Exception {
requestScopeOperations.setApiUser(admin.id());
GeneralPreferencesInfo prefs = gApi.accounts().id(admin.id().get()).getPreferences();
prefs.publishCommentsOnPush = false;
gApi.accounts().id(admin.id().get()).setPreferences(prefs);
}
protected void selectProtocol(Protocol p) throws Exception {
String url;
switch (p) {
case SSH:
url = adminSshSession.getUrl();
break;
case HTTP:
url = admin.getHttpUrl(server);
break;
default:
throw new IllegalArgumentException("unexpected protocol: " + p);
}
testRepo = GitUtil.cloneProject(project, url + "/" + project.get());
}
@Test
public void pushForMaster() throws Exception {
PushOneCommit.Result r = pushTo("refs/for/master");
r.assertOkStatus();
r.assertChange(Change.Status.NEW, null);
}
@Test
@TestProjectInput(createEmptyCommit = false)
public void pushInitialCommitForMasterBranch() throws Exception {
RevCommit c = testRepo.commit().message("Initial commit").insertChangeId().create();
String id = GitUtil.getChangeId(testRepo, c).get();
testRepo.reset(c);
String r = "refs/for/master";
PushResult pr = pushHead(testRepo, r, false);
assertPushOk(pr, r);
ChangeInfo change = gApi.changes().id(id).info();
assertThat(change.branch).isEqualTo("master");
assertThat(change.status).isEqualTo(ChangeStatus.NEW);
try (Repository repo = repoManager.openRepository(project)) {
assertThat(repo.resolve("master")).isNull();
}
gApi.changes().id(change.id).current().review(ReviewInput.approve());
gApi.changes().id(change.id).current().submit();
try (Repository repo = repoManager.openRepository(project)) {
assertThat(repo.resolve("master")).isEqualTo(c);
}
}
@Test
@TestProjectInput(createEmptyCommit = false)
public void pushInitialCommitSeriesForMasterBranch() throws Exception {
testPushInitialCommitSeriesForMasterBranch();
}
@Test
@TestProjectInput(createEmptyCommit = false)
public void pushInitialCommitSeriesForMasterBranchWithCreateNewChangeForAllNotInTarget()
throws Exception {
enableCreateNewChangeForAllNotInTarget();
testPushInitialCommitSeriesForMasterBranch();
}
private void testPushInitialCommitSeriesForMasterBranch() throws Exception {
RevCommit c = testRepo.commit().message("Initial commit").insertChangeId().create();
String id = GitUtil.getChangeId(testRepo, c).get();
testRepo.reset(c);
RevCommit c2 = testRepo.commit().parent(c).message("Second commit").insertChangeId().create();
String id2 = GitUtil.getChangeId(testRepo, c2).get();
testRepo.reset(c2);
String r = "refs/for/master";
PushResult pr = pushHead(testRepo, r, false);
assertPushOk(pr, r);
ChangeInfo change = gApi.changes().id(id).info();
assertThat(change.branch).isEqualTo("master");
assertThat(change.status).isEqualTo(ChangeStatus.NEW);
ChangeInfo change2 = gApi.changes().id(id2).info();
assertThat(change2.branch).isEqualTo("master");
assertThat(change2.status).isEqualTo(ChangeStatus.NEW);
try (Repository repo = repoManager.openRepository(project)) {
assertThat(repo.resolve("master")).isNull();
}
gApi.changes().id(change.id).current().review(ReviewInput.approve());
gApi.changes().id(change.id).current().submit();
try (Repository repo = repoManager.openRepository(project)) {
assertThat(repo.resolve("master")).isEqualTo(c);
}
gApi.changes().id(change2.id).current().review(ReviewInput.approve());
gApi.changes().id(change2.id).current().submit();
try (Repository repo = repoManager.openRepository(project)) {
assertThat(repo.resolve("master")).isEqualTo(c2);
}
}
@Test
@TestProjectInput(createEmptyCommit = false)
public void validateConnected() throws Exception {
RevCommit c = testRepo.commit().message("Initial commit").insertChangeId().create();
testRepo.reset(c);
String r = "refs/heads/master";
PushResult pr = pushHead(testRepo, r, false);
assertPushOk(pr, r);
RevCommit amended =
testRepo.amend(c).message("different initial commit").insertChangeId().create();
testRepo.reset(amended);
r = "refs/for/master";
pr = pushHead(testRepo, r, false);
assertPushRejected(pr, r, "no common ancestry");
}
@Test
@GerritConfig(name = "receive.enableSignedPush", value = "true")
@TestProjectInput(
enableSignedPush = InheritableBoolean.TRUE,
requireSignedPush = InheritableBoolean.TRUE)
public void nonSignedPushRejectedWhenSignPushRequired() throws Exception {
pushTo("refs/for/master").assertErrorStatus("push cert error");
}
@Test
public void pushInitialCommitForRefsMetaConfigBranch() throws Exception {
// delete refs/meta/config
try (Repository repo = repoManager.openRepository(project);
RevWalk rw = new RevWalk(repo)) {
RefUpdate u = repo.updateRef(RefNames.REFS_CONFIG);
u.setForceUpdate(true);
u.setExpectedOldObjectId(repo.resolve(RefNames.REFS_CONFIG));
assertThat(u.delete(rw)).isEqualTo(Result.FORCED);
}
RevCommit c =
testRepo
.commit()
.message("Initial commit")
.author(admin.newIdent())
.committer(admin.newIdent())
.insertChangeId()
.create();
String id = GitUtil.getChangeId(testRepo, c).get();
testRepo.reset(c);
String r = "refs/for/" + RefNames.REFS_CONFIG;
PushResult pr = pushHead(testRepo, r, false);
assertPushOk(pr, r);
ChangeInfo change = gApi.changes().id(id).info();
assertThat(change.branch).isEqualTo(RefNames.REFS_CONFIG);
assertThat(change.status).isEqualTo(ChangeStatus.NEW);
try (Repository repo = repoManager.openRepository(project)) {
assertThat(repo.resolve(RefNames.REFS_CONFIG)).isNull();
}
gApi.changes().id(change.id).current().review(ReviewInput.approve());
gApi.changes().id(change.id).current().submit();
try (Repository repo = repoManager.openRepository(project)) {
assertThat(repo.resolve(RefNames.REFS_CONFIG)).isEqualTo(c);
}
}
@Test
public void pushInitialCommitForNormalNonExistingBranchFails() throws Exception {
RevCommit c =
testRepo
.commit()
.message("Initial commit")
.author(admin.newIdent())
.committer(admin.newIdent())
.insertChangeId()
.create();
testRepo.reset(c);
String r = "refs/for/foo";
PushResult pr = pushHead(testRepo, r, false);
assertPushRejected(pr, r, "branch foo not found");
try (Repository repo = repoManager.openRepository(project)) {
assertThat(repo.resolve("foo")).isNull();
}
}
@Test
public void output() throws Exception {
String url = canonicalWebUrl.get() + "c/" + project.get() + "/+/";
ObjectId initialHead = testRepo.getRepository().resolve("HEAD");
PushOneCommit.Result r1 = pushTo("refs/for/master");
Change.Id id1 = r1.getChange().getId();
r1.assertOkStatus();
r1.assertChange(Change.Status.NEW, null);
r1.assertMessage(
url + id1 + " " + r1.getCommit().getShortMessage() + NEW_CHANGE_INDICATOR + "\n");
testRepo.reset(initialHead);
String newMsg = r1.getCommit().getShortMessage() + " v2";
testRepo
.branch("HEAD")
.commit()
.message(newMsg)
.insertChangeId(r1.getChangeId().substring(1))
.create();
PushOneCommit.Result r2 =
pushFactory
.create(admin.newIdent(), testRepo, "another commit", "b.txt", "bbb")
.to("refs/for/master");
Change.Id id2 = r2.getChange().getId();
r2.assertOkStatus();
r2.assertChange(Change.Status.NEW, null);
r2.assertMessage(
"success\n"
+ "\n"
+ " "
+ url
+ id1
+ " "
+ newMsg
+ "\n"
+ " "
+ url
+ id2
+ " another commit"
+ NEW_CHANGE_INDICATOR
+ "\n");
}
@Test
public void autocloseByCommit() throws Exception {
// Create a change
PushOneCommit.Result r = pushTo("refs/for/master");
r.assertOkStatus();
// Force push it, closing it
String master = "refs/heads/master";
assertPushOk(pushHead(testRepo, master, false), master);
// Attempt to push amended commit to same change
String url = canonicalWebUrl.get() + "c/" + project.get() + "/+/" + r.getChange().getId();
r = amendChange(r.getChangeId(), "refs/for/master");
r.assertErrorStatus("change " + url + " closed");
// Check change message that was added on auto-close
ChangeInfo change = change(r).get();
assertThat(Iterables.getLast(change.messages).message)
.isEqualTo("Change has been successfully pushed.");
}
@Test
public void pushWithoutChangeIdDeprecated() throws Exception {
setRequireChangeId(InheritableBoolean.FALSE);
testRepo
.branch("HEAD")
.commit()
.message("A change")
.author(admin.newIdent())
.committer(new PersonIdent(admin.newIdent(), testRepo.getDate()))
.create();
PushResult result = pushHead(testRepo, "refs/for/master");
assertThat(result.getMessages()).contains("warning: pushing without Change-Id is deprecated");
}
@Test
public void autocloseByChangeId() throws Exception {
// Create a change
PushOneCommit.Result r = pushTo("refs/for/master");
r.assertOkStatus();
// Amend the commit locally
RevCommit c = testRepo.amend(r.getCommit()).create();
assertThat(c).isNotEqualTo(r.getCommit());
testRepo.reset(c);
// Force push it, closing it
String master = "refs/heads/master";
assertPushOk(pushHead(testRepo, master, false), master);
// Attempt to push amended commit to same change
String url = canonicalWebUrl.get() + "c/" + project.get() + "/+/" + r.getChange().getId();
r = amendChange(r.getChangeId(), "refs/for/master");
r.assertErrorStatus("change " + url + " closed");
// Check that new commit was added as patch set
ChangeInfo change = change(r).get();
assertThat(change.revisions).hasSize(2);
assertThat(change.currentRevision).isEqualTo(c.name());
}
@Test
public void pushForMasterWithTopic() throws Exception {
String topic = "my/topic";
// specify topic as option
PushOneCommit.Result r = pushTo("refs/for/master%topic=" + topic);
r.assertOkStatus();
r.assertChange(Change.Status.NEW, topic);
}
@Test
public void pushForMasterWithTopicOption() throws Exception {
String topicOption = "topic=myTopic";
List<String> pushOptions = new ArrayList<>();
pushOptions.add(topicOption);
PushOneCommit push = pushFactory.create(admin.newIdent(), testRepo);
push.setPushOptions(pushOptions);
PushOneCommit.Result r = push.to("refs/for/master");
r.assertOkStatus();
r.assertChange(Change.Status.NEW, "myTopic");
r.assertPushOptions(pushOptions);
}
@Test
public void pushForMasterWithTopicExceedLimitFails() throws Exception {
String topic = Stream.generate(() -> "t").limit(2049).collect(joining());
PushOneCommit.Result r = pushTo("refs/for/master%topic=" + topic);
r.assertErrorStatus("topic length exceeds the limit (2048)");
}
@Test
public void pushForMasterWithNotify() throws Exception {
// create a user that watches the project
TestAccount user3 = accountCreator.create("user3", "[email protected]", "User3", null);
List<ProjectWatchInfo> projectsToWatch = new ArrayList<>();
ProjectWatchInfo pwi = new ProjectWatchInfo();
pwi.project = project.get();
pwi.filter = "*";
pwi.notifyNewChanges = true;
projectsToWatch.add(pwi);
requestScopeOperations.setApiUser(user3.id());
gApi.accounts().self().setWatchedProjects(projectsToWatch);
TestAccount user2 = accountCreator.user2();
String pushSpec = "refs/for/master%reviewer=" + user.email() + ",cc=" + user2.email();
sender.clear();
PushOneCommit.Result r = pushTo(pushSpec + ",notify=" + NotifyHandling.NONE);
r.assertOkStatus();
assertThat(sender.getMessages()).isEmpty();
sender.clear();
r = pushTo(pushSpec + ",notify=" + NotifyHandling.OWNER);
r.assertOkStatus();
// no email notification about own changes
assertThat(sender.getMessages()).isEmpty();
sender.clear();
r = pushTo(pushSpec + ",notify=" + NotifyHandling.OWNER_REVIEWERS);
r.assertOkStatus();
assertThat(sender.getMessages()).hasSize(1);
Message m = sender.getMessages().get(0);
assertThat(m.rcpt()).containsExactly(user.getNameEmail());
sender.clear();
r = pushTo(pushSpec + ",notify=" + NotifyHandling.ALL);
r.assertOkStatus();
assertThat(sender.getMessages()).hasSize(1);
m = sender.getMessages().get(0);
assertThat(m.rcpt())
.containsExactly(user.getNameEmail(), user2.getNameEmail(), user3.getNameEmail());
sender.clear();
r = pushTo(pushSpec + ",notify=" + NotifyHandling.NONE + ",notify-to=" + user3.email());
r.assertOkStatus();
assertNotifyTo(user3);
sender.clear();
r = pushTo(pushSpec + ",notify=" + NotifyHandling.NONE + ",notify-cc=" + user3.email());
r.assertOkStatus();
assertNotifyCc(user3);
sender.clear();
r = pushTo(pushSpec + ",notify=" + NotifyHandling.NONE + ",notify-bcc=" + user3.email());
r.assertOkStatus();
assertNotifyBcc(user3);
// request that sender gets notified as TO, CC and BCC, email should be sent
// even if the sender is the only recipient
sender.clear();
pushTo(pushSpec + ",notify=" + NotifyHandling.NONE + ",notify-to=" + admin.email());
assertNotifyTo(admin);
sender.clear();
r = pushTo(pushSpec + ",notify=" + NotifyHandling.NONE + ",notify-cc=" + admin.email());
r.assertOkStatus();
assertNotifyCc(admin);
sender.clear();
r = pushTo(pushSpec + ",notify=" + NotifyHandling.NONE + ",notify-bcc=" + admin.email());
r.assertOkStatus();
assertNotifyBcc(admin);
}
@Test
public void pushForMasterWithCc() throws Exception {
// cc one user
String topic = "my/topic";
PushOneCommit.Result r = pushTo("refs/for/master%topic=" + topic + ",cc=" + user.email());
r.assertOkStatus();
r.assertChange(Change.Status.NEW, topic, ImmutableList.of(), ImmutableList.of(user));
// cc several users
r =
pushTo(
"refs/for/master%topic="
+ topic
+ ",cc="
+ admin.email()
+ ",cc="
+ user.email()
+ ",cc="
+ accountCreator.user2().email());
r.assertOkStatus();
// Check that admin isn't CC'd as they own the change
r.assertChange(
Change.Status.NEW,
topic,
ImmutableList.of(),
ImmutableList.of(user, accountCreator.user2()));
// cc non-existing user
String nonExistingEmail = "[email protected]";
r =
pushTo(
"refs/for/master%topic="
+ topic
+ ",cc="
+ admin.email()
+ ",cc="
+ nonExistingEmail
+ ",cc="
+ user.email());
r.assertErrorStatus(nonExistingEmail + " does not identify a registered user or group");
}
@Test
public void pushForMasterWithCcByEmail() throws Exception {
ConfigInput conf = new ConfigInput();
conf.enableReviewerByEmail = InheritableBoolean.TRUE;
gApi.projects().name(project.get()).config(conf);
PushOneCommit.Result r =
pushTo("refs/for/master%[email protected],[email protected]");
r.assertOkStatus();
ChangeInfo ci = get(r.getChangeId(), DETAILED_LABELS);
ImmutableList<AccountInfo> ccs =
firstNonNull(ci.reviewers.get(ReviewerState.CC), ImmutableList.<AccountInfo>of()).stream()
.sorted(comparing((AccountInfo a) -> a.email))
.collect(toImmutableList());
assertThat(ccs).hasSize(2);
assertThat(ccs.get(0).email).isEqualTo("[email protected]");
assertThat(ccs.get(0)._accountId).isNull();
assertThat(ccs.get(1).email).isEqualTo("[email protected]");
assertThat(ccs.get(1)._accountId).isNull();
}
@Test
public void pushForMasterWithCcGroup() throws Exception {
TestAccount user2 = accountCreator.user2();
String group = name("group");
GroupInput gin = new GroupInput();
gin.name = group;
gin.members = ImmutableList.of(user.username(), user2.username());
gin.visibleToAll = true; // TODO(dborowitz): Shouldn't be necessary; see ReviewerAdder.
gApi.groups().create(gin);
PushOneCommit.Result r = pushTo("refs/for/master%cc=" + group);
r.assertOkStatus();
r.assertChange(Change.Status.NEW, null, ImmutableList.of(), ImmutableList.of(user, user2));
}
@Test
public void pushForMasterWithReviewer() throws Exception {
// add one reviewer
String topic = "my/topic";
PushOneCommit.Result r = pushTo("refs/for/master%topic=" + topic + ",r=" + user.email());
r.assertOkStatus();
r.assertChange(Change.Status.NEW, topic, user);
// add several reviewers
TestAccount user2 =
accountCreator.create("another-user", "[email protected]", "Another User", null);
r =
pushTo(
"refs/for/master%topic="
+ topic
+ ",r="
+ admin.email()
+ ",r="
+ user.email()
+ ",r="
+ user2.email());
r.assertOkStatus();
// admin is the owner of the change and should not appear as reviewer
r.assertChange(Change.Status.NEW, topic, user, user2);
// add non-existing user as reviewer
String nonExistingEmail = "[email protected]";
r =
pushTo(
"refs/for/master%topic="
+ topic
+ ",r="
+ admin.email()
+ ",r="
+ nonExistingEmail
+ ",r="
+ user.email());
r.assertErrorStatus(nonExistingEmail + " does not identify a registered user or group");
}
@Test
public void pushForMasterWithReviewerByEmail() throws Exception {
ConfigInput conf = new ConfigInput();
conf.enableReviewerByEmail = InheritableBoolean.TRUE;
gApi.projects().name(project.get()).config(conf);
PushOneCommit.Result r =
pushTo("refs/for/master%[email protected],[email protected]");
r.assertOkStatus();
ChangeInfo ci = get(r.getChangeId(), DETAILED_LABELS);
ImmutableList<AccountInfo> reviewers =
firstNonNull(ci.reviewers.get(ReviewerState.REVIEWER), ImmutableList.<AccountInfo>of())
.stream()
.sorted(comparing((AccountInfo a) -> a.email))
.collect(toImmutableList());
assertThat(reviewers).hasSize(2);
assertThat(reviewers.get(0).email).isEqualTo("[email protected]");
assertThat(reviewers.get(0)._accountId).isNull();
assertThat(reviewers.get(1).email).isEqualTo("[email protected]");
assertThat(reviewers.get(1)._accountId).isNull();
}
@Test
public void pushForMasterWithReviewerGroup() throws Exception {
TestAccount user2 = accountCreator.user2();
String group = name("group");
GroupInput gin = new GroupInput();
gin.name = group;
gin.members = ImmutableList.of(user.username(), user2.username());
gin.visibleToAll = true; // TODO(dborowitz): Shouldn't be necessary; see ReviewerAdder.
gApi.groups().create(gin);
PushOneCommit.Result r = pushTo("refs/for/master%r=" + group);
r.assertOkStatus();
r.assertChange(Change.Status.NEW, null, ImmutableList.of(user, user2), ImmutableList.of());
}
@Test
public void pushPrivateChange() throws Exception {
// Push a private change.
PushOneCommit.Result r = pushTo("refs/for/master%private");
r.assertOkStatus();
r.assertMessage(" [PRIVATE]");
assertThat(r.getChange().change().isPrivate()).isTrue();
// Pushing a new patch set without --private doesn't remove the privacy flag from the change.
r = amendChange(r.getChangeId(), "refs/for/master");
r.assertOkStatus();
r.assertMessage(" [PRIVATE]");
assertThat(r.getChange().change().isPrivate()).isTrue();
// Remove the privacy flag from the change.
r = amendChange(r.getChangeId(), "refs/for/master%remove-private");
r.assertOkStatus();
r.assertNotMessage(" [PRIVATE]");
assertThat(r.getChange().change().isPrivate()).isFalse();
// Normal push: privacy flag is not added back.
r = amendChange(r.getChangeId(), "refs/for/master");
r.assertOkStatus();
r.assertNotMessage(" [PRIVATE]");
assertThat(r.getChange().change().isPrivate()).isFalse();
// Make the change private again.
r = pushTo("refs/for/master%private");
r.assertOkStatus();
r.assertMessage(" [PRIVATE]");
assertThat(r.getChange().change().isPrivate()).isTrue();
// Can't use --private and --remove-private together.
r = pushTo("refs/for/master%private,remove-private");
r.assertErrorStatus();
}
@Test
public void pushWorkInProgressChange() throws Exception {
// Push a work-in-progress change.
PushOneCommit.Result r = pushTo("refs/for/master%wip");
r.assertOkStatus();
r.assertMessage(" [WIP]");
assertThat(r.getChange().change().isWorkInProgress()).isTrue();
assertUploadTag(r.getChange(), ChangeMessagesUtil.TAG_UPLOADED_WIP_PATCH_SET);
// Pushing a new patch set without --wip doesn't remove the wip flag from the change.
String changeId = r.getChangeId();
r = amendChange(changeId, "refs/for/master");
r.assertOkStatus();
r.assertMessage(" [WIP]");
assertThat(r.getChange().change().isWorkInProgress()).isTrue();
assertUploadTag(r.getChange(), ChangeMessagesUtil.TAG_UPLOADED_WIP_PATCH_SET);
// Remove the wip flag from the change.
r = amendChange(changeId, "refs/for/master%ready");
r.assertOkStatus();
r.assertNotMessage(" [WIP]");
assertThat(r.getChange().change().isWorkInProgress()).isFalse();
assertUploadTag(r.getChange(), ChangeMessagesUtil.TAG_UPLOADED_PATCH_SET);
// Normal push: wip flag is not added back.
r = amendChange(changeId, "refs/for/master");
r.assertOkStatus();
r.assertNotMessage(" [WIP]");
assertThat(r.getChange().change().isWorkInProgress()).isFalse();
assertUploadTag(r.getChange(), ChangeMessagesUtil.TAG_UPLOADED_PATCH_SET);
// Make the change work-in-progress again.
r = amendChange(changeId, "refs/for/master%wip");
r.assertOkStatus();
r.assertMessage(" [WIP]");
assertThat(r.getChange().change().isWorkInProgress()).isTrue();
assertUploadTag(r.getChange(), ChangeMessagesUtil.TAG_UPLOADED_WIP_PATCH_SET);
// Can't use --wip and --ready together.
r = amendChange(changeId, "refs/for/master%wip,ready");
r.assertErrorStatus();
// Pushing directly to the branch removes the work-in-progress flag
String master = "refs/heads/master";
assertPushOk(pushHead(testRepo, master, false), master);
ChangeInfo result = Iterables.getOnlyElement(gApi.changes().query(changeId).get());
assertThat(result.status).isEqualTo(ChangeStatus.MERGED);
assertThat(result.workInProgress).isNull();
}
private void assertUploadTag(ChangeData cd, String expectedTag) throws Exception {
List<ChangeMessage> msgs = cd.messages();
assertThat(msgs).isNotEmpty();
assertThat(Iterables.getLast(msgs).getTag()).isEqualTo(expectedTag);
}
@Test
public void pushWorkInProgressChangeWhenNotOwner() throws Exception {
TestRepository<?> userRepo = cloneProject(project, user);
PushOneCommit.Result r =
pushFactory.create(user.newIdent(), userRepo).to("refs/for/master%wip");
r.assertOkStatus();
assertThat(r.getChange().change().getOwner()).isEqualTo(user.id());
assertThat(r.getChange().change().isWorkInProgress()).isTrue();
// Admin user trying to move from WIP to ready should succeed.
GitUtil.fetch(testRepo, r.getPatchSet().refName() + ":ps");
testRepo.reset("ps");
r = amendChange(r.getChangeId(), "refs/for/master%ready", user, testRepo);
r.assertOkStatus();
// Other user trying to move from WIP to WIP should succeed.
r = amendChange(r.getChangeId(), "refs/for/master%wip", admin, testRepo);
r.assertOkStatus();
assertThat(r.getChange().change().isWorkInProgress()).isTrue();
// Push as change owner to move change from WIP to ready.
r = pushFactory.create(user.newIdent(), userRepo).to("refs/for/master%ready");
r.assertOkStatus();
assertThat(r.getChange().change().isWorkInProgress()).isFalse();
// Admin user trying to move from ready to WIP should succeed.
GitUtil.fetch(testRepo, r.getPatchSet().refName() + ":ps");
testRepo.reset("ps");
r = amendChange(r.getChangeId(), "refs/for/master%wip", admin, testRepo);
r.assertOkStatus();
// Other user trying to move from wip to wip should succeed.
r = amendChange(r.getChangeId(), "refs/for/master%wip", admin, testRepo);
r.assertOkStatus();
// Non owner, non admin and non project owner cannot flip wip bit:
TestAccount user2 = accountCreator.user2();
projectOperations
.project(project)
.forUpdate()
.add(
allow(Permission.FORGE_COMMITTER)
.ref("refs/*")
.group(SystemGroupBackend.REGISTERED_USERS))
.update();
TestRepository<?> user2Repo = cloneProject(project, user2);
GitUtil.fetch(user2Repo, r.getPatchSet().refName() + ":ps");
user2Repo.reset("ps");
r = amendChange(r.getChangeId(), "refs/for/master%ready", user2, user2Repo);
r.assertErrorStatus(ReceiveConstants.ONLY_CHANGE_OWNER_OR_PROJECT_OWNER_CAN_MODIFY_WIP);
// Project owner trying to move from WIP to ready should succeed.
projectOperations
.project(project)
.forUpdate()
.add(allow(Permission.OWNER).ref("refs/*").group(SystemGroupBackend.REGISTERED_USERS))
.update();
r = amendChange(r.getChangeId(), "refs/for/master%ready", user2, user2Repo);
r.assertOkStatus();
}
@Test
public void pushForMasterAsEdit() throws Exception {
PushOneCommit.Result r = pushTo("refs/for/master");
r.assertOkStatus();
Optional<EditInfo> edit = getEdit(r.getChangeId());
assertThat(edit).isAbsent();
assertThat(query("has:edit")).isEmpty();
// specify edit as option
r = amendChange(r.getChangeId(), "refs/for/master%edit");
r.assertOkStatus();
edit = getEdit(r.getChangeId());
assertThat(edit).isPresent();
EditInfo editInfo = edit.get();
r.assertMessage(
canonicalWebUrl.get()
+ "c/"
+ project.get()
+ "/+/"
+ r.getChange().getId()
+ " "
+ editInfo.commit.subject
+ " [EDIT]\n");
// verify that the re-indexing was triggered for the change
assertThat(query("has:edit")).hasSize(1);
}
@Test
public void pushForMasterWithMessage() throws Exception {
PushOneCommit.Result r = pushTo("refs/for/master%m=my_test_message");
r.assertOkStatus();
r.assertChange(Change.Status.NEW, null);
ChangeInfo ci = get(r.getChangeId(), MESSAGES, ALL_REVISIONS);
Collection<ChangeMessageInfo> changeMessages = ci.messages;
assertThat(changeMessages).hasSize(1);
for (ChangeMessageInfo cm : changeMessages) {
assertThat(cm.message).isEqualTo("Uploaded patch set 1.\nmy test message");
}
Collection<RevisionInfo> revisions = ci.revisions.values();
assertThat(revisions).hasSize(1);
for (RevisionInfo ri : revisions) {
assertThat(ri.description).isEqualTo("my test message");
}
}
@Test
public void pushForMasterWithMessageTwiceWithDifferentMessages() throws Exception {
enableCreateNewChangeForAllNotInTarget();
PushOneCommit push =
pushFactory.create(admin.newIdent(), testRepo, PushOneCommit.SUBJECT, "a.txt", "content");
// %2C is comma; the value below tests that percent decoding happens after splitting.
// All three ways of representing space ("%20", "+", and "_" are also exercised.
PushOneCommit.Result r = push.to("refs/for/master%m=my_test%20+_message%2Cm=");
r.assertOkStatus();
push =
pushFactory.create(
admin.newIdent(),
testRepo,
PushOneCommit.SUBJECT,
"b.txt",
"anotherContent",
r.getChangeId());
r = push.to("refs/for/master%m=new_test_message");
r.assertOkStatus();
ChangeInfo ci = get(r.getChangeId(), ALL_REVISIONS);
Collection<RevisionInfo> revisions = ci.revisions.values();
assertThat(revisions).hasSize(2);
for (RevisionInfo ri : revisions) {
if (ri.isCurrent) {
assertThat(ri.description).isEqualTo("new test message");
} else {
assertThat(ri.description).isEqualTo("my test message,m=");
}
}
}
@Test
public void pushForMasterWithPercentEncodedMessage() throws Exception {
// Exercise percent-encoding of UTF-8, underscores, and patterns reserved by git-rev-parse.
PushOneCommit.Result r =
pushTo(
"refs/for/master%m="
+ "Punctu%2E%2e%2Eation%7E%2D%40%7Bu%7D%20%7C%20%28%E2%95%AF%C2%B0%E2%96%A1%C2%B0"
+ "%EF%BC%89%E2%95%AF%EF%B8%B5%20%E2%94%BB%E2%94%81%E2%94%BB%20%5E%5F%5E");
r.assertOkStatus();
r.assertChange(Change.Status.NEW, null);
ChangeInfo ci = get(r.getChangeId(), MESSAGES, ALL_REVISIONS);
Collection<ChangeMessageInfo> changeMessages = ci.messages;
assertThat(changeMessages).hasSize(1);
for (ChangeMessageInfo cm : changeMessages) {
assertThat(cm.message)
.isEqualTo("Uploaded patch set 1.\nPunctu...ation~-@{u} | (╯°□°)╯︵ ┻━┻ ^_^");
}
Collection<RevisionInfo> revisions = ci.revisions.values();
assertThat(revisions).hasSize(1);
for (RevisionInfo ri : revisions) {
assertThat(ri.description).isEqualTo("Punctu...ation~-@{u} | (╯°□°)╯︵ ┻━┻ ^_^");
}
}
@Test
public void pushForMasterWithInvalidPercentEncodedMessage() throws Exception {
PushOneCommit.Result r = pushTo("refs/for/master%m=not_percent_decodable_%%oops%20");
r.assertOkStatus();
r.assertChange(Change.Status.NEW, null);
ChangeInfo ci = get(r.getChangeId(), MESSAGES, ALL_REVISIONS);
Collection<ChangeMessageInfo> changeMessages = ci.messages;
assertThat(changeMessages).hasSize(1);
for (ChangeMessageInfo cm : changeMessages) {
assertThat(cm.message).isEqualTo("Uploaded patch set 1.\nnot percent decodable %%oops%20");
}
Collection<RevisionInfo> revisions = ci.revisions.values();
assertThat(revisions).hasSize(1);
for (RevisionInfo ri : revisions) {
assertThat(ri.description).isEqualTo("not percent decodable %%oops%20");
}
}
@Test
public void pushForMasterWithApprovals() throws Exception {
PushOneCommit.Result r = pushTo("refs/for/master%l=Code-Review");
r.assertOkStatus();
ChangeInfo ci = get(r.getChangeId(), DETAILED_LABELS, MESSAGES, DETAILED_ACCOUNTS);
LabelInfo cr = ci.labels.get("Code-Review");
assertThat(cr.all).hasSize(1);
assertThat(cr.all.get(0).name).isEqualTo("Administrator");
assertThat(cr.all.get(0).value).isEqualTo(1);
assertThat(Iterables.getLast(ci.messages).message)
.isEqualTo("Uploaded patch set 1: Code-Review+1.");
PushOneCommit push =
pushFactory.create(
admin.newIdent(),
testRepo,
PushOneCommit.SUBJECT,
"b.txt",
"anotherContent",
r.getChangeId());
r = push.to("refs/for/master%l=Code-Review+2");
ci = get(r.getChangeId(), DETAILED_LABELS, MESSAGES, DETAILED_ACCOUNTS);
cr = ci.labels.get("Code-Review");
assertThat(Iterables.getLast(ci.messages).message)
.isEqualTo("Uploaded patch set 2: Code-Review+2.");
// Check that the user who pushed the change was added as a reviewer since they added a vote
assertThatUserIsOnlyReviewer(ci, admin);
assertThat(cr.all).hasSize(1);
assertThat(cr.all.get(0).name).isEqualTo("Administrator");
assertThat(cr.all.get(0).value).isEqualTo(2);
push =
pushFactory.create(
admin.newIdent(),
testRepo,
PushOneCommit.SUBJECT,
"c.txt",
"moreContent",
r.getChangeId());
r = push.to("refs/for/master%l=Code-Review+2");
ci = get(r.getChangeId(), MESSAGES);
assertThat(Iterables.getLast(ci.messages).message).isEqualTo("Uploaded patch set 3.");
}
@Test
public void pushNewPatchSetForMasterWithApprovals() throws Exception {
PushOneCommit.Result r = pushTo("refs/for/master");
r.assertOkStatus();
PushOneCommit push =
pushFactory.create(
admin.newIdent(),
testRepo,
PushOneCommit.SUBJECT,
"b.txt",
"anotherContent",
r.getChangeId());
r = push.to("refs/for/master%l=Code-Review+2");
ChangeInfo ci = get(r.getChangeId(), DETAILED_LABELS, MESSAGES, DETAILED_ACCOUNTS);
LabelInfo cr = ci.labels.get("Code-Review");
assertThat(Iterables.getLast(ci.messages).message)
.isEqualTo("Uploaded patch set 2: Code-Review+2.");
// Check that the user who pushed the new patch set was added as a reviewer since they added
// a vote
assertThatUserIsOnlyReviewer(ci, admin);
assertThat(cr.all).hasSize(1);
assertThat(cr.all.get(0).name).isEqualTo("Administrator");
assertThat(cr.all.get(0).value).isEqualTo(2);
}
@Test
public void pushForMasterWithForgedAuthorAndCommitter() throws Exception {
TestAccount user2 = accountCreator.user2();
// Create a commit with different forged author and committer.
RevCommit c =
commitBuilder()
.author(user.newIdent())
.committer(user2.newIdent())
.add(PushOneCommit.FILE_NAME, PushOneCommit.FILE_CONTENT)
.message(PushOneCommit.SUBJECT)
.create();
// Push commit as "Admnistrator".
pushHead(testRepo, "refs/for/master");
String changeId = GitUtil.getChangeId(testRepo, c).get();
assertThat(getOwnerEmail(changeId)).isEqualTo(admin.email());
assertThat(getReviewerEmails(changeId, ReviewerState.REVIEWER))
.containsExactly(user.email(), user2.email());
assertThat(sender.getMessages()).hasSize(1);
assertThat(sender.getMessages().get(0).rcpt())
.containsExactly(user.getNameEmail(), user2.getNameEmail());
}
@Test
public void pushNewPatchSetForMasterWithForgedAuthorAndCommitter() throws Exception {
TestAccount user2 = accountCreator.user2();
// First patch set has author and committer matching change owner.
PushOneCommit.Result r = pushTo("refs/for/master");
assertThat(getOwnerEmail(r.getChangeId())).isEqualTo(admin.email());
assertThat(getReviewerEmails(r.getChangeId(), ReviewerState.REVIEWER)).isEmpty();
amendBuilder()
.author(user.newIdent())
.committer(user2.newIdent())
.add(PushOneCommit.FILE_NAME, PushOneCommit.FILE_CONTENT + "2")
.create();
pushHead(testRepo, "refs/for/master");
assertThat(getOwnerEmail(r.getChangeId())).isEqualTo(admin.email());
assertThat(getReviewerEmails(r.getChangeId(), ReviewerState.REVIEWER))
.containsExactly(user.email(), user2.email());
assertThat(sender.getMessages()).hasSize(1);
assertThat(sender.getMessages().get(0).rcpt())
.containsExactly(user.getNameEmail(), user2.getNameEmail());
}
/**
* There was a bug that allowed a user with Forge Committer Identity access right to upload a
* commit and put *votes on behalf of another user* on it. This test checks that this is not
* possible, but that the votes that are specified on push are applied only on behalf of the
* uploader.
*
* <p>This particular bug only occurred when there was more than one label defined. However to
* test that the votes that are specified on push are applied on behalf of the uploader a single
* label is sufficient.
*/
@Test
public void pushForMasterWithApprovalsForgeCommitterButNoForgeVote() throws Exception {
// Create a commit with "User" as author and committer
RevCommit c =
commitBuilder()
.author(user.newIdent())
.committer(user.newIdent())
.add(PushOneCommit.FILE_NAME, PushOneCommit.FILE_CONTENT)
.message(PushOneCommit.SUBJECT)
.create();
// Push this commit as "Administrator" (requires Forge Committer Identity)
pushHead(testRepo, "refs/for/master%l=Code-Review+1", false);
// Expected Code-Review votes:
// 1. 0 from User (committer):
// When the committer is forged, the committer is automatically added as
// reviewer, hence we expect a dummy 0 vote for the committer.
// 2. +1 from Administrator (uploader):
// On push Code-Review+1 was specified, hence we expect a +1 vote from
// the uploader.
ChangeInfo ci =
get(GitUtil.getChangeId(testRepo, c).get(), DETAILED_LABELS, MESSAGES, DETAILED_ACCOUNTS);
LabelInfo cr = ci.labels.get("Code-Review");
assertThat(cr.all).hasSize(2);
int indexAdmin = admin.fullName().equals(cr.all.get(0).name) ? 0 : 1;
int indexUser = indexAdmin == 0 ? 1 : 0;
assertThat(cr.all.get(indexAdmin).name).isEqualTo(admin.fullName());
assertThat(cr.all.get(indexAdmin).value.intValue()).isEqualTo(1);
assertThat(cr.all.get(indexUser).name).isEqualTo(user.fullName());
assertThat(cr.all.get(indexUser).value.intValue()).isEqualTo(0);
assertThat(Iterables.getLast(ci.messages).message)
.isEqualTo("Uploaded patch set 1: Code-Review+1.");
// Check that the user who pushed the change was added as a reviewer since they added a vote
assertThatUserIsOnlyReviewer(ci, admin);
}
@Test
public void pushWithMultipleApprovals() throws Exception {
LabelType Q =
label("Custom-Label", value(1, "Positive"), value(0, "No score"), value(-1, "Negative"));
String heads = "refs/heads/*";
try (ProjectConfigUpdate u = updateProject(project)) {
u.getConfig().upsertLabelType(Q);
u.save();
}
projectOperations
.project(project)
.forUpdate()
.add(allowLabel("Custom-Label").ref(heads).group(ANONYMOUS_USERS).range(-1, 1))
.update();
RevCommit c =
commitBuilder()
.author(admin.newIdent())
.committer(admin.newIdent())
.add(PushOneCommit.FILE_NAME, PushOneCommit.FILE_CONTENT)
.message(PushOneCommit.SUBJECT)
.create();
pushHead(testRepo, "refs/for/master%l=Code-Review+1,l=Custom-Label-1", false);
ChangeInfo ci = get(GitUtil.getChangeId(testRepo, c).get(), DETAILED_LABELS, DETAILED_ACCOUNTS);
LabelInfo cr = ci.labels.get("Code-Review");
assertThat(cr.all).hasSize(1);
cr = ci.labels.get("Custom-Label");
assertThat(cr.all).hasSize(1);
// Check that the user who pushed the change was added as a reviewer since they added a vote
assertThatUserIsOnlyReviewer(ci, admin);
}
@Test
public void pushNewPatchsetToPatchSetLockedChange() throws Exception {
PushOneCommit.Result r = pushTo("refs/for/master");
r.assertOkStatus();
PushOneCommit push =
pushFactory.create(
admin.newIdent(),
testRepo,
PushOneCommit.SUBJECT,
"b.txt",
"anotherContent",
r.getChangeId());
revision(r).review(new ReviewInput().label("Patch-Set-Lock", 1));
r = push.to("refs/for/master");
r.assertErrorStatus("cannot add patch set to " + r.getChange().change().getChangeId() + ".");
}
@Test
public void pushForMasterWithApprovals_MissingLabel() throws Exception {
PushOneCommit.Result r = pushTo("refs/for/master%l=Verify");
r.assertErrorStatus("label \"Verify\" is not a configured label");
}
@Test
public void pushForMasterWithApprovals_ValueOutOfRange() throws Exception {
PushOneCommit.Result r = pushTo("refs/for/master%l=Code-Review-3");
r.assertErrorStatus("label \"Code-Review\": -3 is not a valid value");
}
@Test
public void pushForNonExistingBranch() throws Exception {
String branchName = "non-existing";
PushOneCommit.Result r = pushTo("refs/for/" + branchName);
r.assertErrorStatus("branch " + branchName + " not found");
}
@Test
public void pushForMasterWithHashtags() throws Exception {
// specify a single hashtag as option
String hashtag1 = "tag1";
Set<String> expected = ImmutableSet.of(hashtag1);
PushOneCommit.Result r = pushTo("refs/for/master%hashtag=#" + hashtag1);
r.assertOkStatus();
r.assertChange(Change.Status.NEW, null);
Set<String> hashtags = gApi.changes().id(r.getChangeId()).getHashtags();
assertThat(hashtags).containsExactlyElementsIn(expected);
// specify a single hashtag as option in new patch set
String hashtag2 = "tag2";
PushOneCommit push =
pushFactory.create(
admin.newIdent(),
testRepo,
PushOneCommit.SUBJECT,
"b.txt",
"anotherContent",
r.getChangeId());
r = push.to("refs/for/master%hashtag=" + hashtag2);
r.assertOkStatus();
expected = ImmutableSet.of(hashtag1, hashtag2);
hashtags = gApi.changes().id(r.getChangeId()).getHashtags();
assertThat(hashtags).containsExactlyElementsIn(expected);
}
@Test
public void pushForMasterWithMultipleHashtags() throws Exception {
// specify multiple hashtags as options
String hashtag1 = "tag1";
String hashtag2 = "tag2";
Set<String> expected = ImmutableSet.of(hashtag1, hashtag2);
PushOneCommit.Result r =
pushTo("refs/for/master%hashtag=#" + hashtag1 + ",hashtag=##" + hashtag2);
r.assertOkStatus();
r.assertChange(Change.Status.NEW, null);
Set<String> hashtags = gApi.changes().id(r.getChangeId()).getHashtags();
assertThat(hashtags).containsExactlyElementsIn(expected);
// specify multiple hashtags as options in new patch set
String hashtag3 = "tag3";
String hashtag4 = "tag4";
PushOneCommit push =
pushFactory.create(
admin.newIdent(),
testRepo,
PushOneCommit.SUBJECT,
"b.txt",
"anotherContent",
r.getChangeId());
r = push.to("refs/for/master%hashtag=" + hashtag3 + ",hashtag=" + hashtag4);
r.assertOkStatus();
expected = ImmutableSet.of(hashtag1, hashtag2, hashtag3, hashtag4);
hashtags = gApi.changes().id(r.getChangeId()).getHashtags();
assertThat(hashtags).containsExactlyElementsIn(expected);
}
@Test
public void pushCommitUsingSignedOffBy() throws Exception {
PushOneCommit push =
pushFactory.create(
admin.newIdent(), testRepo, PushOneCommit.SUBJECT, "b.txt", "anotherContent");
PushOneCommit.Result r = push.to("refs/for/master");
r.assertOkStatus();
setUseSignedOffBy(InheritableBoolean.TRUE);
projectOperations
.project(project)
.forUpdate()
.add(block(Permission.FORGE_COMMITTER).ref("refs/heads/master").group(REGISTERED_USERS))
.update();
push =
pushFactory.create(
admin.newIdent(),
testRepo,
PushOneCommit.SUBJECT
+ String.format("\n\nSigned-off-by: %s <%s>", admin.fullName(), admin.email()),
"b.txt",
"anotherContent");
r = push.to("refs/for/master");
r.assertOkStatus();
push =
pushFactory.create(
admin.newIdent(), testRepo, PushOneCommit.SUBJECT, "b.txt", "anotherContent");
r = push.to("refs/for/master");
r.assertErrorStatus("not Signed-off-by author/committer/uploader in message footer");
}
@Test
public void createNewChangeForAllNotInTarget() throws Exception {
enableCreateNewChangeForAllNotInTarget();
PushOneCommit push =
pushFactory.create(admin.newIdent(), testRepo, PushOneCommit.SUBJECT, "a.txt", "content");
PushOneCommit.Result r = push.to("refs/for/master");
r.assertOkStatus();
push =
pushFactory.create(
admin.newIdent(), testRepo, PushOneCommit.SUBJECT, "b.txt", "anotherContent");
r = push.to("refs/for/master");
r.assertOkStatus();
gApi.projects().name(project.get()).branch("otherBranch").create(new BranchInput());
PushOneCommit.Result r2 = push.to("refs/for/otherBranch");
r2.assertOkStatus();
assertTwoChangesWithSameRevision(r);
}
@Test
public void pushChangeBasedOnChangeOfOtherUserWithCreateNewChangeForAllNotInTarget()
throws Exception {
enableCreateNewChangeForAllNotInTarget();
// create a change as admin
PushOneCommit push =
pushFactory.create(admin.newIdent(), testRepo, PushOneCommit.SUBJECT, "a.txt", "content");
PushOneCommit.Result r = push.to("refs/for/master");
r.assertOkStatus();
RevCommit commitChange1 = r.getCommit();
// create a second change as user (depends on the change from admin)
TestRepository<?> userRepo = cloneProject(project, user);
GitUtil.fetch(userRepo, r.getPatchSet().refName() + ":change");
userRepo.reset("change");
push =
pushFactory.create(
user.newIdent(), userRepo, PushOneCommit.SUBJECT, "b.txt", "anotherContent");
r = push.to("refs/for/master");
r.assertOkStatus();
// assert that no new change was created for the commit of the predecessor change
assertThat(query(commitChange1.name())).hasSize(1);
}
@Test
public void pushSameCommitTwiceUsingMagicBranchBaseOption() throws Exception {
projectOperations
.project(project)
.forUpdate()
.add(allow(Permission.PUSH).ref("refs/heads/master").group(adminGroupUuid()))
.update();
PushOneCommit.Result rBase = pushTo("refs/heads/master");
rBase.assertOkStatus();
gApi.projects().name(project.get()).branch("foo").create(new BranchInput());
PushOneCommit push =
pushFactory.create(
admin.newIdent(), testRepo, PushOneCommit.SUBJECT, "b.txt", "anotherContent");
PushOneCommit.Result r = push.to("refs/for/master");
r.assertOkStatus();
PushResult pr =
GitUtil.pushHead(testRepo, "refs/for/foo%base=" + rBase.getCommit().name(), false, false);
// BatchUpdate implementations differ in how they hook into progress monitors. We mostly just
// care that there is a new change.
assertThat(pr.getMessages()).containsMatch("changes: .*new: 1.*done");
assertTwoChangesWithSameRevision(r);
}
@Test
public void pushSameCommitTwice() throws Exception {
enableCreateNewChangeForAllNotInTarget();
PushOneCommit push =
pushFactory.create(admin.newIdent(), testRepo, PushOneCommit.SUBJECT, "a.txt", "content");
PushOneCommit.Result r = push.to("refs/for/master");
r.assertOkStatus();
push =
pushFactory.create(
admin.newIdent(), testRepo, PushOneCommit.SUBJECT, "b.txt", "anotherContent");
r = push.to("refs/for/master");
r.assertOkStatus();
assertPushRejected(
pushHead(testRepo, "refs/for/master", false),
"refs/for/master",
"commit(s) already exists (as current patchset)");
}
@Test
public void pushSameCommitTwiceWhenIndexFailed() throws Exception {
enableCreateNewChangeForAllNotInTarget();
PushOneCommit push =
pushFactory.create(admin.newIdent(), testRepo, PushOneCommit.SUBJECT, "a.txt", "content");
PushOneCommit.Result r = push.to("refs/for/master");
r.assertOkStatus();
push =
pushFactory.create(
admin.newIdent(), testRepo, PushOneCommit.SUBJECT, "b.txt", "anotherContent");
r = push.to("refs/for/master");
r.assertOkStatus();
indexer.delete(r.getChange().getId());
assertPushRejected(
pushHead(testRepo, "refs/for/master", false),
"refs/for/master",
"commit(s) already exists (as current patchset)");
}
private void assertTwoChangesWithSameRevision(PushOneCommit.Result result) throws Exception {
List<ChangeInfo> changes = query(result.getCommit().name());
assertThat(changes).hasSize(2);
ChangeInfo c1 = get(changes.get(0).id, CURRENT_REVISION);
ChangeInfo c2 = get(changes.get(1).id, CURRENT_REVISION);
assertThat(c1.project).isEqualTo(c2.project);
assertThat(c1.branch).isNotEqualTo(c2.branch);
assertThat(c1.changeId).isEqualTo(c2.changeId);
assertThat(c1.currentRevision).isEqualTo(c2.currentRevision);
}
@Test
public void pushAFewChanges() throws Exception {
testPushAFewChanges();
}
@Test
public void pushAFewChangesWithCreateNewChangeForAllNotInTarget() throws Exception {
enableCreateNewChangeForAllNotInTarget();
testPushAFewChanges();
}
private void testPushAFewChanges() throws Exception {
int n = 10;
String r = "refs/for/master";
ObjectId initialHead = testRepo.getRepository().resolve("HEAD");
List<RevCommit> commits = createChanges(n, r);
// Check that a change was created for each.
for (RevCommit c : commits) {
assertWithMessage("change for " + c.name())
.that(byCommit(c).change().getSubject())
.isEqualTo(c.getShortMessage());
}
List<RevCommit> commits2 = amendChanges(initialHead, commits, r);
// Check that there are correct patch sets.
for (int i = 0; i < n; i++) {
RevCommit c = commits.get(i);
RevCommit c2 = commits2.get(i);
String name = "change for " + c2.name();
ChangeData cd = byCommit(c);
assertWithMessage(name).that(cd.change().getSubject()).isEqualTo(c2.getShortMessage());
assertWithMessage(name)
.that(getPatchSetRevisions(cd))
.containsExactlyEntriesIn(ImmutableMap.of(1, c.name(), 2, c2.name()));
}
// Pushing again results in "no new changes".
assertPushRejected(pushHead(testRepo, r, false), r, "no new changes");
}
@Test
public void pushWithoutChangeId() throws Exception {
testPushWithoutChangeId();
}
@Test
public void pushWithoutChangeIdWithCreateNewChangeForAllNotInTarget() throws Exception {
enableCreateNewChangeForAllNotInTarget();
testPushWithoutChangeId();
}
private void testPushWithoutChangeId() throws Exception {
RevCommit c = createCommit(testRepo, "Message without Change-Id");
assertThat(GitUtil.getChangeId(testRepo, c)).isEmpty();
pushForReviewRejected(testRepo, "missing Change-Id in message footer");
setRequireChangeId(InheritableBoolean.FALSE);
pushForReviewOk(testRepo);
}
@Test
public void pushWithChangeIdAboveFooter() throws Exception {
testPushWithChangeIdAboveFooter();
}
@Test
public void pushWithLinkFooter() throws Exception {
String changeId = "I0123456789abcdef0123456789abcdef01234567";
String url = cfg.getString("gerrit", null, "canonicalWebUrl");
if (!url.endsWith("/")) {
url += "/";
}
RevCommit c = createCommit(testRepo, "test commit\n\nLink: " + url + "id/" + changeId);
pushForReviewOk(testRepo);
List<ChangeMessageInfo> messages = getMessages(changeId);
assertThat(messages.get(0).message).isEqualTo("Uploaded patch set 1.");
}
@Test
public void pushWithWrongHostLinkFooter() throws Exception {
String changeId = "I0123456789abcdef0123456789abcdef01234567";
RevCommit c = createCommit(testRepo, "test commit\n\nLink: https://wronghost/id/" + changeId);
pushForReviewRejected(testRepo, "missing Change-Id in message footer");
}
@Test
public void pushWithChangeIdAboveFooterWithCreateNewChangeForAllNotInTarget() throws Exception {
enableCreateNewChangeForAllNotInTarget();
testPushWithChangeIdAboveFooter();
}
private void testPushWithChangeIdAboveFooter() throws Exception {
RevCommit c =
createCommit(
testRepo,
PushOneCommit.SUBJECT
+ "\n\n"
+ "Change-Id: Ied70ea827f5bf968f1f6aaee6594e07c846d217a\n\n"
+ "More text, uh oh.\n");
assertThat(GitUtil.getChangeId(testRepo, c)).isEmpty();
pushForReviewRejected(testRepo, "Change-Id must be in message footer");
setRequireChangeId(InheritableBoolean.FALSE);
pushForReviewRejected(testRepo, "Change-Id must be in message footer");
}
@Test
public void errorMessageFormat() throws Exception {
RevCommit c = createCommit(testRepo, "Message without Change-Id");
assertThat(GitUtil.getChangeId(testRepo, c)).isEmpty();
String ref = "refs/for/master";
PushResult r = pushHead(testRepo, ref);
RemoteRefUpdate refUpdate = r.getRemoteUpdate(ref);
assertThat(refUpdate.getStatus()).isEqualTo(RemoteRefUpdate.Status.REJECTED_OTHER_REASON);
String reason =
String.format("commit %s: missing Change-Id in message footer", abbreviateName(c));
assertThat(refUpdate.getMessage()).isEqualTo(reason);
assertThat(r.getMessages()).contains("\nERROR: " + reason);
}
@Test
public void pushWithMultipleChangeIds() throws Exception {
testPushWithMultipleChangeIds();
}
@Test
public void pushWithMultipleChangeIdsWithCreateNewChangeForAllNotInTarget() throws Exception {
enableCreateNewChangeForAllNotInTarget();
testPushWithMultipleChangeIds();
}
private void testPushWithMultipleChangeIds() throws Exception {
createCommit(
testRepo,
"Message with multiple Change-Id\n"
+ "\n"
+ "Change-Id: I10f98c2ef76e52e23aa23be5afeb71e40b350e86\n"
+ "Change-Id: Ie9a132e107def33bdd513b7854b50de911edba0a\n");
pushForReviewRejected(testRepo, "multiple Change-Id lines in message footer");
setRequireChangeId(InheritableBoolean.FALSE);
pushForReviewRejected(testRepo, "multiple Change-Id lines in message footer");
}
@Test
public void pushWithInvalidChangeId() throws Exception {
testpushWithInvalidChangeId();
}
@Test
public void pushWithInvalidChangeIdWithCreateNewChangeForAllNotInTarget() throws Exception {
enableCreateNewChangeForAllNotInTarget();
testpushWithInvalidChangeId();
}
private void testpushWithInvalidChangeId() throws Exception {
createCommit(testRepo, "Message with invalid Change-Id\n\nChange-Id: X\n");
pushForReviewRejected(testRepo, "invalid Change-Id line format in message footer");
setRequireChangeId(InheritableBoolean.FALSE);
pushForReviewRejected(testRepo, "invalid Change-Id line format in message footer");
}
@Test
public void pushWithInvalidChangeIdFromEgit() throws Exception {
testPushWithInvalidChangeIdFromEgit();
}
@Test
public void pushWithInvalidChangeIdFromEgitWithCreateNewChangeForAllNotInTarget()
throws Exception {
enableCreateNewChangeForAllNotInTarget();
testPushWithInvalidChangeIdFromEgit();
}
private void testPushWithInvalidChangeIdFromEgit() throws Exception {
createCommit(
testRepo,
"Message with invalid Change-Id\n"
+ "\n"
+ "Change-Id: I0000000000000000000000000000000000000000\n");
pushForReviewRejected(testRepo, "invalid Change-Id line format in message footer");
setRequireChangeId(InheritableBoolean.FALSE);
pushForReviewRejected(testRepo, "invalid Change-Id line format in message footer");
}
@Test
public void pushWithChangeIdInSubjectLine() throws Exception {
createCommit(testRepo, "Change-Id: I1234000000000000000000000000000000000000");
pushForReviewRejected(testRepo, "missing subject; Change-Id must be in message footer");
setRequireChangeId(InheritableBoolean.FALSE);
pushForReviewRejected(testRepo, "missing subject; Change-Id must be in message footer");
}
@Test
public void pushCommitWithSameChangeIdAsPredecessorChange() throws Exception {
PushOneCommit push =
pushFactory.create(admin.newIdent(), testRepo, PushOneCommit.SUBJECT, "a.txt", "content");
PushOneCommit.Result r = push.to("refs/for/master");
r.assertOkStatus();
RevCommit commitChange1 = r.getCommit();
createCommit(testRepo, commitChange1.getFullMessage());
pushForReviewRejected(
testRepo,
"same Change-Id in multiple changes.\n"
+ "Squash the commits with the same Change-Id or ensure Change-Ids are unique for each"
+ " commit");
try (ProjectConfigUpdate u = updateProject(project)) {
u.getConfig()
.updateProject(
p ->
p.setBooleanConfig(
BooleanProjectConfig.REQUIRE_CHANGE_ID, InheritableBoolean.FALSE));
u.save();
}
pushForReviewRejected(
testRepo,
"same Change-Id in multiple changes.\n"
+ "Squash the commits with the same Change-Id or ensure Change-Ids are unique for each"
+ " commit");
}
@Test
public void pushTwoCommitWithSameChangeId() throws Exception {
RevCommit commitChange1 = createCommitWithChangeId(testRepo, "some change");
createCommit(testRepo, commitChange1.getFullMessage());
pushForReviewRejected(
testRepo,
"same Change-Id in multiple changes.\n"
+ "Squash the commits with the same Change-Id or ensure Change-Ids are unique for each"
+ " commit");
try (ProjectConfigUpdate u = updateProject(project)) {
u.getConfig()
.updateProject(
p ->
p.setBooleanConfig(
BooleanProjectConfig.REQUIRE_CHANGE_ID, InheritableBoolean.FALSE));
u.save();
}
pushForReviewRejected(
testRepo,
"same Change-Id in multiple changes.\n"
+ "Squash the commits with the same Change-Id or ensure Change-Ids are unique for each"
+ " commit");
}
private static RevCommit createCommit(TestRepository<?> testRepo, String message)
throws Exception {
return testRepo.branch("HEAD").commit().message(message).add("a.txt", "content").create();
}
private static RevCommit createCommitWithChangeId(TestRepository<?> testRepo, String message)
throws Exception {
RevCommit c =
testRepo
.branch("HEAD")
.commit()
.message(message)
.insertChangeId()
.add("a.txt", "content")
.create();
return testRepo.getRevWalk().parseCommit(c);
}
@Test
public void cantAutoCloseChangeAlreadyMergedToBranch() throws Exception {
PushOneCommit.Result r1 = createChange();
Change.Id id1 = r1.getChange().getId();
PushOneCommit.Result r2 = createChange();
Change.Id id2 = r2.getChange().getId();
// Merge change 1 behind Gerrit's back.
try (Repository repo = repoManager.openRepository(project);
TestRepository<?> tr = new TestRepository<>(repo)) {
tr.branch("refs/heads/master").update(r1.getCommit());
}
assertThat(gApi.changes().id(id1.get()).info().status).isEqualTo(ChangeStatus.NEW);
assertThat(gApi.changes().id(id2.get()).info().status).isEqualTo(ChangeStatus.NEW);
r2 = amendChange(r2.getChangeId());
r2.assertOkStatus();
// Change 1 is still new despite being merged into the branch, because
// ReceiveCommits only considers commits between the branch tip (which is
// now the merged change 1) and the push tip (new patch set of change 2).
assertThat(gApi.changes().id(id1.get()).info().status).isEqualTo(ChangeStatus.NEW);
assertThat(gApi.changes().id(id2.get()).info().status).isEqualTo(ChangeStatus.NEW);
}
@Test
public void accidentallyPushNewPatchSetDirectlyToBranchAndCantRecoverByPushingToRefsFor()
throws Exception {
Change.Id id = accidentallyPushNewPatchSetDirectlyToBranch();
ChangeData cd = byChangeId(id);
String ps1Rev = Iterables.getOnlyElement(cd.patchSets()).commitId().name();
String r = "refs/for/master";
assertPushRejected(pushHead(testRepo, r, false), r, "no new changes");
// Change not updated.
cd = byChangeId(id);
assertThat(cd.change().isNew()).isTrue();
assertThat(getPatchSetRevisions(cd)).containsExactlyEntriesIn(ImmutableMap.of(1, ps1Rev));
}
@Test
public void forcePushAbandonedChange() throws Exception {
projectOperations
.project(project)
.forUpdate()
.add(allow(Permission.PUSH).ref("refs/*").group(adminGroupUuid()).force(true))
.update();
PushOneCommit push1 =
pushFactory.create(admin.newIdent(), testRepo, "change1", "a.txt", "content");
PushOneCommit.Result r = push1.to("refs/for/master");
r.assertOkStatus();
// abandon the change
String changeId = r.getChangeId();
assertThat(info(changeId).status).isEqualTo(ChangeStatus.NEW);
gApi.changes().id(changeId).abandon();
ChangeInfo info = get(changeId);
assertThat(info.status).isEqualTo(ChangeStatus.ABANDONED);
push1.setForce(true);
PushOneCommit.Result r1 = push1.to("refs/heads/master");
r1.assertOkStatus();
ChangeInfo result = Iterables.getOnlyElement(gApi.changes().query(r.getChangeId()).get());
assertThat(result.status).isEqualTo(ChangeStatus.MERGED);
}
private Change.Id accidentallyPushNewPatchSetDirectlyToBranch() throws Exception {
PushOneCommit.Result r = createChange();
RevCommit ps1Commit = r.getCommit();
Change c = r.getChange().change();
RevCommit ps2Commit;
try (Repository repo = repoManager.openRepository(project);
TestRepository<?> tr = new TestRepository<>(repo)) {
// Create a new patch set of the change directly in Gerrit's repository,
// without pushing it. In reality it's more likely that the client would
// create and push this behind Gerrit's back (e.g. an admin accidentally
// using direct ssh access to the repo), but that's harder to do in tests.
ps2Commit =
tr.branch("refs/heads/master")
.commit()
.message(ps1Commit.getShortMessage() + " v2")
.insertChangeId(r.getChangeId().substring(1))
.create();
}
testRepo.git().fetch().setRefSpecs(new RefSpec("refs/heads/master")).call();
testRepo.reset(ps2Commit);
ChangeData cd = byCommit(ps1Commit);
assertThat(cd.change().isNew()).isTrue();
assertThat(getPatchSetRevisions(cd))
.containsExactlyEntriesIn(ImmutableMap.of(1, ps1Commit.name()));
return c.getId();
}
@Test
public void pushWithEmailInFooter() throws Exception {
pushWithReviewerInFooter(user.getNameEmail().toString(), user);
}
@Test
public void pushWithNameInFooter() throws Exception {
pushWithReviewerInFooter(user.fullName(), user);
}
@Test
public void pushWithEmailInFooterNotFound() throws Exception {
pushWithReviewerInFooter(
Address.create("No Body", "[email protected]").toString(), null);
}
@Test
public void pushWithNameInFooterNotFound() throws Exception {
pushWithReviewerInFooter("Notauser", null);
}
@Test
public void pushNewPatchsetOverridingStickyLabel() throws Exception {
try (ProjectConfigUpdate u = updateProject(project)) {
LabelType codeReview = TestLabels.codeReview().toBuilder().setCopyMaxScore(true).build();
u.getConfig().upsertLabelType(codeReview);
u.save();
}
PushOneCommit.Result r = pushTo("refs/for/master%l=Code-Review+2");
r.assertOkStatus();
PushOneCommit push =
pushFactory.create(
admin.newIdent(),
testRepo,
PushOneCommit.SUBJECT,
"b.txt",
"anotherContent",
r.getChangeId());
r = push.to("refs/for/master%l=Code-Review+1");
r.assertOkStatus();
}
@Test
public void createChangeForMergedCommit() throws Exception {
String master = "refs/heads/master";
projectOperations
.project(project)
.forUpdate()
.add(allow(Permission.PUSH).ref(master).group(adminGroupUuid()).force(true))
.update();
// Update master with a direct push.
RevCommit c1 = testRepo.commit().message("Non-change 1").create();
RevCommit c2 =
testRepo.parseBody(
testRepo.commit().parent(c1).message("Non-change 2").insertChangeId().create());
String changeId = Iterables.getOnlyElement(c2.getFooterLines(CHANGE_ID));
testRepo.reset(c2);
assertPushOk(pushHead(testRepo, master, false, true), master);
String q = "commit:" + c1.name() + " OR commit:" + c2.name() + " OR change:" + changeId;
assertThat(gApi.changes().query(q).get()).isEmpty();
// Push c2 as a merged change.
String r = "refs/for/master%merged";
assertPushOk(pushHead(testRepo, r, false), r);
EnumSet<ListChangesOption> opts = EnumSet.of(ListChangesOption.CURRENT_REVISION);
ChangeInfo info = gApi.changes().id(changeId).get(opts);
assertThat(info.currentRevision).isEqualTo(c2.name());
assertThat(info.status).isEqualTo(ChangeStatus.MERGED);
// Only c2 was created as a change.
String q1 = "commit: " + c1.name();
assertThat(gApi.changes().query(q1).get()).isEmpty();
// Push c1 as a merged change.
testRepo.reset(c1);
assertPushOk(pushHead(testRepo, r, false), r);
List<ChangeInfo> infos = gApi.changes().query(q1).withOptions(opts).get();
assertThat(infos).hasSize(1);
info = infos.get(0);
assertThat(info.currentRevision).isEqualTo(c1.name());
assertThat(info.status).isEqualTo(ChangeStatus.MERGED);
}
@Test
public void mergedOptionFailsWhenCommitIsNotMerged() throws Exception {
PushOneCommit.Result r = pushTo("refs/for/master%merged");
r.assertErrorStatus("not merged into branch");
}
@Test
public void mergedOptionFailsWhenCommitIsMergedOnOtherBranch() throws Exception {
PushOneCommit.Result r = pushTo("refs/for/master");
r.assertOkStatus();
gApi.changes().id(r.getChangeId()).current().review(ReviewInput.approve());
gApi.changes().id(r.getChangeId()).current().submit();
try (Repository repo = repoManager.openRepository(project);
TestRepository<Repository> tr = new TestRepository<>(repo)) {
tr.branch("refs/heads/branch").commit().message("Initial commit on branch").create();
}
pushTo("refs/for/master%merged").assertErrorStatus("not merged into branch");
}
@Test
public void mergedOptionFailsWhenChangeExists() throws Exception {
PushOneCommit.Result r = pushTo("refs/for/master");
r.assertOkStatus();
gApi.changes().id(r.getChangeId()).current().review(ReviewInput.approve());
gApi.changes().id(r.getChangeId()).current().submit();
testRepo.reset(r.getCommit());
String ref = "refs/for/master%merged";
PushResult pr = pushHead(testRepo, ref, false);
RemoteRefUpdate rru = pr.getRemoteUpdate(ref);
assertThat(rru.getStatus()).isEqualTo(RemoteRefUpdate.Status.REJECTED_OTHER_REASON);
assertThat(rru.getMessage()).contains("no new changes");
}
@Test
public void mergedOptionWithNewCommitWithSameChangeIdFails() throws Exception {
PushOneCommit.Result r = pushTo("refs/for/master");
r.assertOkStatus();
gApi.changes().id(r.getChangeId()).current().review(ReviewInput.approve());
gApi.changes().id(r.getChangeId()).current().submit();
RevCommit c2 =
testRepo
.amend(r.getCommit())
.message("New subject")
.insertChangeId(r.getChangeId().substring(1))
.create();
testRepo.reset(c2);
String ref = "refs/for/master%merged";
PushResult pr = pushHead(testRepo, ref, false);
RemoteRefUpdate rru = pr.getRemoteUpdate(ref);
assertThat(rru.getStatus()).isEqualTo(RemoteRefUpdate.Status.REJECTED_OTHER_REASON);
assertThat(rru.getMessage()).contains("not merged into branch");
}
@Test
public void mergedOptionWithExistingChangeInsertsPatchSet() throws Exception {
String master = "refs/heads/master";
projectOperations
.project(project)
.forUpdate()
.add(allow(Permission.PUSH).ref(master).group(adminGroupUuid()).force(true))
.update();
PushOneCommit.Result r = pushTo("refs/for/master");
r.assertOkStatus();
ObjectId c1 = r.getCommit().copy();
// Create a PS2 commit directly on master in the server's repo. This
// simulates the client amending locally and pushing directly to the branch,
// expecting the change to be auto-closed, but the change metadata update
// fails.
ObjectId c2;
try (Repository repo = repoManager.openRepository(project);
TestRepository<Repository> tr = new TestRepository<>(repo)) {
RevCommit commit2 =
tr.amend(c1).message("New subject").insertChangeId(r.getChangeId().substring(1)).create();
c2 = commit2.copy();
tr.update(master, c2);
}
testRepo.git().fetch().setRefSpecs(new RefSpec("refs/heads/master")).call();
testRepo.reset(c2);
String ref = "refs/for/master%merged";
assertPushOk(pushHead(testRepo, ref, false), ref);
ChangeInfo info = gApi.changes().id(r.getChangeId()).get(ALL_REVISIONS);
assertThat(info.currentRevision).isEqualTo(c2.name());
assertThat(info.revisions.keySet()).containsExactly(c1.name(), c2.name());
// TODO(dborowitz): Fix ReceiveCommits to also auto-close the change.
assertThat(info.status).isEqualTo(ChangeStatus.NEW);
}
@Test
public void publishedCommentsAssignedToChangeMessages() throws Exception {
TestTimeUtil.resetWithClockStep(0, TimeUnit.SECONDS);
PushOneCommit.Result r = createChange(); // creating the change with patch set 1
TestTimeUtil.incrementClock(5, TimeUnit.SECONDS);
/** Create and publish a comment on PS2. Increment the clock step */
String rev1 = r.getCommit().name();
addDraft(r.getChangeId(), rev1, newDraft(FILE_NAME, 1, "comment_PS2."));
r = amendChange(r.getChangeId(), "refs/for/master%publish-comments");
assertThat(getPublishedComments(r.getChangeId())).isNotEmpty();
TestTimeUtil.incrementClock(5, TimeUnit.SECONDS);
/** Create and publish a comment on PS3 */
String rev2 = r.getCommit().name();
addDraft(r.getChangeId(), rev2, newDraft(FILE_NAME, 1, "comment_PS3."));
amendChange(r.getChangeId(), "refs/for/master%publish-comments");
Collection<CommentInfo> comments = getPublishedComments(r.getChangeId());
List<ChangeMessageInfo> allMessages = getMessages(r.getChangeId());
assertThat(allMessages.stream().map(m -> m.message).collect(toList()))
.containsExactly(
"Uploaded patch set 1.",
"Uploaded patch set 2.",
"Patch Set 2:\n\n(1 comment)",
"Uploaded patch set 3.",
"Patch Set 3:\n\n(1 comment)")
.inOrder();
/**
* Note that the following 3 items have the same timestamp: comment "comment_PS2", message
* "Uploaded patch set 2.", and message "Patch Set 2:\n\n(1 comment)". The comment will not be
* matched with the upload change message because it is auto-generated. Same goes for patch set
* 3.
*/
String commentPs2MessageId =
comments.stream()
.filter(c -> c.message.equals("comment_PS2."))
.collect(onlyElement())
.changeMessageId;
String commentPs3MessageId =
comments.stream()
.filter(c -> c.message.equals("comment_PS3."))
.collect(onlyElement())
.changeMessageId;
String message2Id =
allMessages.stream()
.filter(m -> m.message.equals("Patch Set 2:\n\n(1 comment)"))
.collect(onlyElement())
.id;
String message3Id =
allMessages.stream()
.filter(m -> m.message.equals("Patch Set 3:\n\n(1 comment)"))
.collect(onlyElement())
.id;
assertThat(commentPs2MessageId).isEqualTo(message2Id);
assertThat(commentPs3MessageId).isEqualTo(message3Id);
}
@Test
public void publishCommentsOnPushPublishesDraftsOnAllRevisions() throws Exception {
PushOneCommit.Result r = createChange();
String rev1 = r.getCommit().name();
CommentInfo c1 = addDraft(r.getChangeId(), rev1, newDraft(FILE_NAME, 1, "comment1"));
CommentInfo c2 = addDraft(r.getChangeId(), rev1, newDraft(FILE_NAME, 1, "comment2"));
r = amendChange(r.getChangeId());
String rev2 = r.getCommit().name();
CommentInfo c3 = addDraft(r.getChangeId(), rev2, newDraft(FILE_NAME, 1, "comment3"));
assertThat(getPublishedComments(r.getChangeId())).isEmpty();
gApi.changes().id(r.getChangeId()).addReviewer(user.email());
sender.clear();
amendChange(r.getChangeId(), "refs/for/master%publish-comments");
Collection<CommentInfo> comments = getPublishedComments(r.getChangeId());
assertThat(comments.stream().map(c -> c.id)).containsExactly(c1.id, c2.id, c3.id);
assertThat(comments.stream().map(c -> c.message))
.containsExactly("comment1", "comment2", "comment3");
/* Assert the correctness of the API messages */
List<ChangeMessageInfo> allMessages = getMessages(r.getChangeId());
List<String> messagesText = allMessages.stream().map(m -> m.message).collect(toList());
assertThat(messagesText)
.containsExactly(
"Uploaded patch set 1.",
"Uploaded patch set 2.",
"Uploaded patch set 3.",
"Patch Set 3:\n\n(3 comments)")
.inOrder();
/* Assert the tags - PS#2 comments do not have tags, PS#3 upload is autogenerated */
List<String> messagesTags = allMessages.stream().map(m -> m.tag).collect(toList());
assertThat(messagesTags.get(2)).isEqualTo("autogenerated:gerrit:newPatchSet");
assertThat(messagesTags.get(3)).isNull();
/* Assert the correctness of the emails sent */
List<String> emailMessages =
sender.getMessages().stream()
.map(Message::body)
.sorted(Comparator.comparingInt(m -> m.contains("reexamine") ? 0 : 1))
.collect(toList());
assertThat(emailMessages).hasSize(2);
assertThat(emailMessages.get(0)).contains("Gerrit-MessageType: newpatchset");
assertThat(emailMessages.get(0)).contains("I'd like you to reexamine a change");
assertThat(emailMessages.get(0)).doesNotContain("Uploaded patch set 3");
assertThat(emailMessages.get(1)).contains("Gerrit-MessageType: comment");
assertThat(emailMessages.get(1)).contains("Patch Set 3:\n\n(3 comments)");
assertThat(emailMessages.get(1)).contains("PS1, Line 1:");
assertThat(emailMessages.get(1)).contains("PS2, Line 1:");
/* Assert the correctness of the NoteDb change meta commits */
List<RevCommit> commitMessages = getChangeMetaCommitsInReverseOrder(r.getChange().getId());
assertThat(commitMessages).hasSize(5);
assertThat(commitMessages.get(0).getShortMessage()).isEqualTo("Create change");
assertThat(commitMessages.get(1).getShortMessage()).isEqualTo("Create patch set 2");
assertThat(commitMessages.get(2).getShortMessage()).isEqualTo("Update patch set 2");
assertThat(commitMessages.get(3).getShortMessage()).isEqualTo("Create patch set 3");
assertThat(commitMessages.get(4).getFullMessage())
.isEqualTo(
"Update patch set 3\n"
+ "\n"
+ "Patch Set 3:\n"
+ "\n"
+ "(3 comments)\n"
+ "\n"
+ "Patch-set: 3\n");
}
@Test
public void publishCommentsOnPushWithMessage() throws Exception {
PushOneCommit.Result r = createChange();
String rev = r.getCommit().name();
addDraft(r.getChangeId(), rev, newDraft(FILE_NAME, 1, "comment1"));
r = amendChange(r.getChangeId(), "refs/for/master%publish-comments,m=The_message");
Collection<CommentInfo> comments = getPublishedComments(r.getChangeId());
assertThat(comments.stream().map(c -> c.message)).containsExactly("comment1");
assertThat(getLastMessage(r.getChangeId())).isEqualTo("Patch Set 2:\n" + "\n" + "(1 comment)");
}
@Test
public void publishCommentsOnPushPublishesDraftsOnMultipleChanges() throws Exception {
ObjectId initialHead = testRepo.getRepository().resolve("HEAD");
List<RevCommit> commits = createChanges(2, "refs/for/master");
String id1 = byCommit(commits.get(0)).change().getKey().get();
String id2 = byCommit(commits.get(1)).change().getKey().get();
CommentInfo c1 = addDraft(id1, commits.get(0).name(), newDraft(FILE_NAME, 1, "comment1"));
CommentInfo c2 = addDraft(id2, commits.get(1).name(), newDraft(FILE_NAME, 1, "comment2"));
assertThat(getPublishedComments(id1)).isEmpty();
assertThat(getPublishedComments(id2)).isEmpty();
amendChanges(initialHead, commits, "refs/for/master%publish-comments");
Collection<CommentInfo> cs1 = getPublishedComments(id1);
List<ChangeMessageInfo> messages1 = getMessages(id1);
assertThat(cs1.stream().map(c -> c.message)).containsExactly("comment1");
assertThat(cs1.stream().map(c -> c.id)).containsExactly(c1.id);
assertThat(messages1.get(0).message).isEqualTo("Uploaded patch set 1.");
assertThat(messages1.get(1).message)
.isEqualTo("Uploaded patch set 2: Commit message was updated.");
assertThat(messages1.get(2).message).isEqualTo("Patch Set 2:\n\n(1 comment)");
Collection<CommentInfo> cs2 = getPublishedComments(id2);
List<ChangeMessageInfo> messages2 = getMessages(id2);
assertThat(cs2.stream().map(c -> c.message)).containsExactly("comment2");
assertThat(cs2.stream().map(c -> c.id)).containsExactly(c2.id);
assertThat(messages2.get(0).message).isEqualTo("Uploaded patch set 1.");
assertThat(messages2.get(1).message)
.isEqualTo("Uploaded patch set 2: Commit message was updated.");
assertThat(messages2.get(2).message).isEqualTo("Patch Set 2:\n\n(1 comment)");
}
@Test
public void publishCommentsOnPushOnlyPublishesDraftsOnUpdatedChanges() throws Exception {
PushOneCommit.Result r1 = createChange();
PushOneCommit.Result r2 = createChange();
String id1 = r1.getChangeId();
String id2 = r2.getChangeId();
addDraft(id1, r1.getCommit().name(), newDraft(FILE_NAME, 1, "comment1"));
CommentInfo c2 = addDraft(id2, r2.getCommit().name(), newDraft(FILE_NAME, 1, "comment2"));
assertThat(getPublishedComments(id1)).isEmpty();
assertThat(getPublishedComments(id2)).isEmpty();
amendChange(id2, "refs/for/master%publish-comments");
assertThat(getPublishedComments(id1)).isEmpty();
assertThat(gApi.changes().id(id1).drafts()).hasSize(1);
Collection<CommentInfo> cs2 = getPublishedComments(id2);
assertThat(cs2.stream().map(c -> c.message)).containsExactly("comment2");
assertThat(cs2.stream().map(c -> c.id)).containsExactly(c2.id);
assertThat(getLastMessage(id1)).doesNotMatch("[Cc]omment");
assertThat(getLastMessage(id2)).isEqualTo("Patch Set 2:\n\n(1 comment)");
}
@Test
public void publishCommentsOnPushWithPreference() throws Exception {
PushOneCommit.Result r = createChange();
addDraft(r.getChangeId(), r.getCommit().name(), newDraft(FILE_NAME, 1, "comment1"));
r = amendChange(r.getChangeId());
assertThat(getPublishedComments(r.getChangeId())).isEmpty();
GeneralPreferencesInfo prefs = gApi.accounts().id(admin.id().get()).getPreferences();
prefs.publishCommentsOnPush = true;
gApi.accounts().id(admin.id().get()).setPreferences(prefs);
r = amendChange(r.getChangeId());
assertThat(getPublishedComments(r.getChangeId()).stream().map(c -> c.message))
.containsExactly("comment1");
}
@Test
public void publishCommentsOnPushOverridingPreference() throws Exception {
PushOneCommit.Result r = createChange();
addDraft(r.getChangeId(), r.getCommit().name(), newDraft(FILE_NAME, 1, "comment1"));
GeneralPreferencesInfo prefs = gApi.accounts().id(admin.id().get()).getPreferences();
prefs.publishCommentsOnPush = true;
gApi.accounts().id(admin.id().get()).setPreferences(prefs);
r = amendChange(r.getChangeId(), "refs/for/master%no-publish-comments");
assertThat(getPublishedComments(r.getChangeId())).isEmpty();
}
@Test
public void noEditAndUpdateAllUsersInSameChangeStack() throws Exception {
List<RevCommit> commits = createChanges(2, "refs/for/master");
String id2 = byCommit(commits.get(1)).change().getKey().get();
addDraft(id2, commits.get(1).name(), newDraft(FILE_NAME, 1, "comment2"));
// First change in stack unchanged.
RevCommit unChanged = commits.remove(0);
// Publishing draft comments on change 2 updates All-Users.
amendChanges(unChanged.toObjectId(), commits, "refs/for/master%publish-comments");
}
@GerritConfig(name = "receive.maxBatchCommits", value = "2")
@Test
public void maxBatchCommits() throws Exception {
testMaxBatchCommits();
}
@GerritConfig(name = "receive.maxBatchCommits", value = "2")
@Test
public void maxBatchCommitsWithDefaultValidator() throws Exception {
try (Registration registration = extensionRegistry.newRegistration().add(new TestValidator())) {
testMaxBatchCommits();
}
}
@GerritConfig(name = "receive.maxBatchCommits", value = "2")
@Test
public void maxBatchCommitsWithValidateAllCommitsValidator() throws Exception {
try (Registration registration = extensionRegistry.newRegistration().add(new TestValidator())) {
testMaxBatchCommits();
}
}
private void testMaxBatchCommits() throws Exception {
List<RevCommit> commits = new ArrayList<>();
commits.addAll(initChanges(2));
String master = "refs/heads/master";
assertPushOk(pushHead(testRepo, master), master);
commits.addAll(initChanges(3));
assertPushRejected(
pushHead(testRepo, master), master, "more than 2 commits, and skip-validation not set");
grantSkipValidation(project, master, SystemGroupBackend.REGISTERED_USERS);
PushResult r =
pushHead(testRepo, master, false, false, ImmutableList.of(PUSH_OPTION_SKIP_VALIDATION));
assertPushOk(r, master);
// No open changes; branch was advanced.
String q = commits.stream().map(ObjectId::name).collect(joining(" OR commit:", "commit:", ""));
assertThat(gApi.changes().query(q).get()).isEmpty();
assertThat(gApi.projects().name(project.get()).branch(master).get().revision)
.isEqualTo(Iterables.getLast(commits).name());
}
private static class TestValidator implements CommitValidationListener {
private final AtomicInteger count = new AtomicInteger();
private final boolean validateAll;
TestValidator(boolean validateAll) {
this.validateAll = validateAll;
}
TestValidator() {
this(false);
}
@Override
public List<CommitValidationMessage> onCommitReceived(CommitReceivedEvent receiveEvent) {
count.incrementAndGet();
return Collections.emptyList();
}
@Override
public boolean shouldValidateAllCommits() {
return validateAll;
}
public int count() {
return count.get();
}
}
@Test
public void skipValidation() throws Exception {
String master = "refs/heads/master";
TestValidator validator = new TestValidator();
try (Registration registration = extensionRegistry.newRegistration().add(validator)) {
// Validation listener is called on normal push
PushOneCommit push =
pushFactory.create(admin.newIdent(), testRepo, "change1", "a.txt", "content");
PushOneCommit.Result r = push.to(master);
r.assertOkStatus();
assertThat(validator.count()).isEqualTo(1);
// Push is rejected and validation listener is not called when not allowed
// to use skip option
PushOneCommit push2 =
pushFactory.create(admin.newIdent(), testRepo, "change2", "b.txt", "content");
push2.setPushOptions(ImmutableList.of(PUSH_OPTION_SKIP_VALIDATION));
r = push2.to(master);
r.assertErrorStatus("not permitted: skip validation");
assertThat(validator.count()).isEqualTo(1);
// Validation listener is not called when skip option is used
grantSkipValidation(project, master, SystemGroupBackend.REGISTERED_USERS);
PushOneCommit push3 =
pushFactory.create(admin.newIdent(), testRepo, "change2", "b.txt", "content");
push3.setPushOptions(ImmutableList.of(PUSH_OPTION_SKIP_VALIDATION));
r = push3.to(master);
r.assertOkStatus();
assertThat(validator.count()).isEqualTo(1);
// Validation listener that needs to validate all commits gets called even
// when the skip option is used.
TestValidator validator2 = new TestValidator(true);
try (Registration registration2 = extensionRegistry.newRegistration().add(validator2)) {
PushOneCommit push4 =
pushFactory.create(admin.newIdent(), testRepo, "change2", "b.txt", "content");
push4.setPushOptions(ImmutableList.of(PUSH_OPTION_SKIP_VALIDATION));
r = push4.to(master);
r.assertOkStatus();
// First listener was not called; its count remains the same.
assertThat(validator.count()).isEqualTo(1);
// Second listener was called.
assertThat(validator2.count()).isEqualTo(1);
}
}
}
@Test
public void pushNoteDbRef() throws Exception {
String ref = "refs/changes/34/1234/meta";
RevCommit c = testRepo.commit().message("Junk NoteDb commit").create();
PushResult pr = pushOne(testRepo, c.name(), ref, false, false, null);
assertThat(pr.getMessages()).doesNotContain(NoteDbPushOption.OPTION_NAME);
assertPushRejected(pr, ref, "NoteDb update requires -o notedb=allow");
pr = pushOne(testRepo, c.name(), ref, false, false, ImmutableList.of("notedb=foobar"));
assertThat(pr.getMessages()).contains("Invalid value in -o notedb=foobar");
assertPushRejected(pr, ref, "NoteDb update requires -o notedb=allow");
List<String> opts = ImmutableList.of("notedb=allow");
pr = pushOne(testRepo, c.name(), ref, false, false, opts);
assertPushRejected(pr, ref, "NoteDb update requires access database permission");
projectOperations
.allProjectsForUpdate()
.add(allowCapability(GlobalCapability.ACCESS_DATABASE).group(REGISTERED_USERS))
.update();
pr = pushOne(testRepo, c.name(), ref, false, false, opts);
assertPushRejected(pr, ref, "prohibited by Gerrit: not permitted: create");
projectOperations
.project(project)
.forUpdate()
.add(allow(Permission.CREATE).ref("refs/changes/*").group(adminGroupUuid()))
.add(allow(Permission.PUSH).ref("refs/changes/*").group(adminGroupUuid()))
.update();
grantSkipValidation(project, "refs/changes/*", REGISTERED_USERS);
pr = pushOne(testRepo, c.name(), ref, false, false, opts);
assertPushOk(pr, ref);
}
@Test
public void pushNoteDbRefWithoutOptionOnlyFailsThatCommand() throws Exception {
String ref = "refs/changes/34/1234/meta";
RevCommit noteDbCommit = testRepo.commit().message("Junk NoteDb commit").create();
RevCommit changeCommit =
testRepo.branch("HEAD").commit().message("A change").insertChangeId().create();
PushResult pr =
Iterables.getOnlyElement(
testRepo
.git()
.push()
.setRefSpecs(
new RefSpec(noteDbCommit.name() + ":" + ref),
new RefSpec(changeCommit.name() + ":refs/heads/permitted"))
.call());
assertPushRejected(pr, ref, "NoteDb update requires -o notedb=allow");
assertPushOk(pr, "refs/heads/permitted");
}
@Test
public void pushCommitsWithSameTreeNoChanges() throws Exception {
RevCommit c =
testRepo
.commit()
.message("Foo")
.parent(getHead(testRepo.getRepository(), "HEAD"))
.insertChangeId()
.create();
testRepo.reset(c);
String r = "refs/for/master";
PushResult pr = pushHead(testRepo, r, false);
assertPushOk(pr, r);
RevCommit amended = testRepo.amend(c).create();
testRepo.reset(amended);
pr = pushHead(testRepo, r, false);
assertPushOk(pr, r);
assertThat(pr.getMessages())
.contains(
"warning: no changes between prior commit "
+ abbreviateName(c)
+ " and new commit "
+ abbreviateName(amended));
}
@Test
public void pushCommitsWithSameTreeNoFilesChangedMessageUpdated() throws Exception {
RevCommit c =
testRepo
.commit()
.message("Foo")
.parent(getHead(testRepo.getRepository(), "HEAD"))
.insertChangeId()
.create();
String id = GitUtil.getChangeId(testRepo, c).get();
testRepo.reset(c);
String r = "refs/for/master";
PushResult pr = pushHead(testRepo, r, false);
assertPushOk(pr, r);
RevCommit amended =
testRepo.amend(c).message("Foo Bar").insertChangeId(id.substring(1)).create();
testRepo.reset(amended);
pr = pushHead(testRepo, r, false);
assertPushOk(pr, r);
assertThat(pr.getMessages())
.contains("warning: " + abbreviateName(amended) + ": no files changed, message updated");
}
@Test
public void pushCommitsWithSameTreeNoFilesChangedAuthorChanged() throws Exception {
RevCommit c =
testRepo
.commit()
.message("Foo")
.parent(getHead(testRepo.getRepository(), "HEAD"))
.insertChangeId()
.create();
testRepo.reset(c);
String r = "refs/for/master";
PushResult pr = pushHead(testRepo, r, false);
assertPushOk(pr, r);
RevCommit amended = testRepo.amend(c).author(user.newIdent()).create();
testRepo.reset(amended);
pr = pushHead(testRepo, r, false);
assertPushOk(pr, r);
assertThat(pr.getMessages())
.contains("warning: " + abbreviateName(amended) + ": no files changed, author changed");
}
@Test
public void pushCommitsWithSameTreeNoFilesChangedWasRebased() throws Exception {
RevCommit head = getHead(testRepo.getRepository(), "HEAD");
RevCommit c = testRepo.commit().message("Foo").parent(head).insertChangeId().create();
testRepo.reset(c);
String r = "refs/for/master";
PushResult pr = pushHead(testRepo, r, false);
assertPushOk(pr, r);
testRepo.reset(head);
RevCommit newBase = testRepo.commit().message("Base").parent(head).insertChangeId().create();
testRepo.reset(newBase);
pr = pushHead(testRepo, r, false);
assertPushOk(pr, r);
testRepo.reset(c);
RevCommit amended = testRepo.amend(c).parent(newBase).create();
testRepo.reset(amended);
pr = pushHead(testRepo, r, false);
assertPushOk(pr, r);
assertThat(pr.getMessages())
.contains("warning: " + abbreviateName(amended) + ": no files changed, was rebased");
}
@Test
public void sequentialCommitMessages() throws Exception {
String url = canonicalWebUrl.get() + "c/" + project.get() + "/+/";
ObjectId initialHead = testRepo.getRepository().resolve("HEAD");
PushOneCommit.Result r1 = pushTo("refs/for/master");
Change.Id id1 = r1.getChange().getId();
r1.assertOkStatus();
r1.assertChange(Change.Status.NEW, null);
r1.assertMessage(
url + id1 + " " + r1.getCommit().getShortMessage() + NEW_CHANGE_INDICATOR + "\n");
PushOneCommit.Result r2 = pushTo("refs/for/master");
Change.Id id2 = r2.getChange().getId();
r2.assertOkStatus();
r2.assertChange(Change.Status.NEW, null);
r2.assertMessage(
url + id2 + " " + r2.getCommit().getShortMessage() + NEW_CHANGE_INDICATOR + "\n");
testRepo.reset(initialHead);
// rearrange the commit so that change no. 2 is the parent of change no. 1
String r1Message = "Position 2";
String r2Message = "Position 1";
testRepo
.branch("HEAD")
.commit()
.message(r2Message)
.insertChangeId(r2.getChangeId().substring(1))
.create();
testRepo
.branch("HEAD")
.commit()
.message(r1Message)
.insertChangeId(r1.getChangeId().substring(1))
.create();
PushOneCommit.Result r3 =
pushFactory
.create(admin.newIdent(), testRepo, "another commit", "b.txt", "bbb")
.to("refs/for/master");
Change.Id id3 = r3.getChange().getId();
r3.assertOkStatus();
r3.assertChange(Change.Status.NEW, null);
// should display commit r2, r1, r3 in that order.
r3.assertMessage(
"success\n"
+ "\n"
+ " "
+ url
+ id2
+ " "
+ r2Message
+ "\n"
+ " "
+ url
+ id1
+ " "
+ r1Message
+ "\n"
+ " "
+ url
+ id3
+ " another commit"
+ NEW_CHANGE_INDICATOR
+ "\n");
}
@Test
public void cannotPushTheSameCommitTwiceForReviewToTheSameBranch() throws Exception {
testCannotPushTheSameCommitTwiceForReviewToTheSameBranch();
}
@Test
public void cannotPushTheSameCommitTwiceForReviewToTheSameBranchCreateNewChangeForAllNotInTarget()
throws Exception {
enableCreateNewChangeForAllNotInTarget();
testCannotPushTheSameCommitTwiceForReviewToTheSameBranch();
}
private void testCannotPushTheSameCommitTwiceForReviewToTheSameBranch() throws Exception {
setRequireChangeId(InheritableBoolean.FALSE);
// create a commit without Change-Id
testRepo
.branch("HEAD")
.commit()
.author(user.newIdent())
.committer(user.newIdent())
.add(PushOneCommit.FILE_NAME, PushOneCommit.FILE_CONTENT)
.message(PushOneCommit.SUBJECT)
.create();
// push the commit for review to create a change
PushResult r = pushHead(testRepo, "refs/for/master");
assertPushOk(r, "refs/for/master");
// try to push the same commit for review again to create another change on the same branch,
// it's expected that this is rejected with "no new changes"
r = pushHead(testRepo, "refs/for/master");
assertPushRejected(r, "refs/for/master", "no new changes");
}
@Test
public void pushTheSameCommitTwiceForReviewToDifferentBranches() throws Exception {
setRequireChangeId(InheritableBoolean.FALSE);
// create a commit without Change-Id
testRepo
.branch("HEAD")
.commit()
.author(user.newIdent())
.committer(user.newIdent())
.add(PushOneCommit.FILE_NAME, PushOneCommit.FILE_CONTENT)
.message(PushOneCommit.SUBJECT)
.create();
// push the commit for review to create a change
PushResult r = pushHead(testRepo, "refs/for/master");
assertPushOk(r, "refs/for/master");
// create another branch
gApi.projects().name(project.get()).branch("otherBranch").create(new BranchInput());
// try to push the same commit for review again to create a change on another branch,
// it's expected that this is rejected with "no new changes" since
// CREATE_NEW_CHANGE_FOR_ALL_NOT_IN_TARGET is false
r = pushHead(testRepo, "refs/for/otherBranch");
assertPushRejected(r, "refs/for/otherBranch", "no new changes");
enableCreateNewChangeForAllNotInTarget();
// try to push the same commit for review again to create a change on another branch,
// now it should succeed since CREATE_NEW_CHANGE_FOR_ALL_NOT_IN_TARGET is true
r = pushHead(testRepo, "refs/for/otherBranch");
assertPushOk(r, "refs/for/otherBranch");
}
private DraftInput newDraft(String path, int line, String message) {
DraftInput d = new DraftInput();
d.path = path;
d.side = Side.REVISION;
d.line = line;
d.message = message;
d.unresolved = true;
return d;
}
private CommentInfo addDraft(String changeId, String revId, DraftInput in) throws Exception {
return gApi.changes().id(changeId).revision(revId).createDraft(in).get();
}
private Collection<CommentInfo> getPublishedComments(String changeId) throws Exception {
return gApi.changes().id(changeId).commentsRequest().get().values().stream()
.flatMap(Collection::stream)
.collect(toList());
}
private String getLastMessage(String changeId) throws Exception {
return Streams.findLast(
gApi.changes().id(changeId).get(MESSAGES).messages.stream().map(m -> m.message))
.get();
}
private List<ChangeMessageInfo> getMessages(String changeId) throws Exception {
return gApi.changes().id(changeId).get(MESSAGES).messages.stream().collect(toList());
}
private void assertThatUserIsOnlyReviewer(ChangeInfo ci, TestAccount reviewer) {
assertThat(ci.reviewers).isNotNull();
assertThat(ci.reviewers.keySet()).containsExactly(ReviewerState.REVIEWER);
assertThat(ci.reviewers.get(ReviewerState.REVIEWER).iterator().next().email)
.isEqualTo(reviewer.email());
}
private void pushWithReviewerInFooter(String nameEmail, TestAccount expectedReviewer)
throws Exception {
int n = 5;
String r = "refs/for/master";
ObjectId initialHead = testRepo.getRepository().resolve("HEAD");
List<RevCommit> commits = createChanges(n, r, ImmutableList.of("Acked-By: " + nameEmail));
for (int i = 0; i < n; i++) {
RevCommit c = commits.get(i);
ChangeData cd = byCommit(c);
String name = "reviewers for " + (i + 1);
if (expectedReviewer != null) {
assertWithMessage(name).that(cd.reviewers().all()).containsExactly(expectedReviewer.id());
// Remove reviewer from PS1 so we can test adding this same reviewer on PS2 below.
gApi.changes().id(cd.getId().get()).reviewer(expectedReviewer.id().toString()).remove();
}
assertWithMessage(name).that(byCommit(c).reviewers().all()).isEmpty();
}
List<RevCommit> commits2 = amendChanges(initialHead, commits, r);
for (int i = 0; i < n; i++) {
RevCommit c = commits2.get(i);
ChangeData cd = byCommit(c);
String name = "reviewers for " + (i + 1);
if (expectedReviewer != null) {
assertWithMessage(name).that(cd.reviewers().all()).containsExactly(expectedReviewer.id());
} else {
assertWithMessage(name).that(byCommit(c).reviewers().all()).isEmpty();
}
}
}
private List<RevCommit> createChanges(int n, String refsFor) throws Exception {
return createChanges(n, refsFor, ImmutableList.of());
}
private List<RevCommit> createChanges(int n, String refsFor, List<String> footerLines)
throws Exception {
List<RevCommit> commits = initChanges(n, footerLines);
assertPushOk(pushHead(testRepo, refsFor, false), refsFor);
return commits;
}
private List<RevCommit> initChanges(int n) throws Exception {
return initChanges(n, ImmutableList.of());
}
private List<RevCommit> initChanges(int n, List<String> footerLines) throws Exception {
List<RevCommit> commits = new ArrayList<>(n);
for (int i = 1; i <= n; i++) {
String msg = "Change " + i;
if (!footerLines.isEmpty()) {
StringBuilder sb = new StringBuilder(msg).append("\n\n");
for (String line : footerLines) {
sb.append(line).append('\n');
}
msg = sb.toString();
}
TestRepository<?>.CommitBuilder cb =
testRepo.branch("HEAD").commit().message(msg).insertChangeId();
if (!commits.isEmpty()) {
cb.parent(commits.get(commits.size() - 1));
}
RevCommit c = cb.create();
testRepo.getRevWalk().parseBody(c);
commits.add(c);
}
return commits;
}
private List<RevCommit> amendChanges(
ObjectId initialHead, List<RevCommit> origCommits, String refsFor) throws Exception {
testRepo.reset(initialHead);
List<RevCommit> newCommits = new ArrayList<>(origCommits.size());
for (RevCommit c : origCommits) {
String msg = c.getShortMessage() + "v2";
if (!c.getShortMessage().equals(c.getFullMessage())) {
msg = msg + c.getFullMessage().substring(c.getShortMessage().length());
}
TestRepository<?>.CommitBuilder cb = testRepo.branch("HEAD").commit().message(msg);
if (!newCommits.isEmpty()) {
cb.parent(origCommits.get(newCommits.size() - 1));
}
RevCommit c2 = cb.create();
testRepo.getRevWalk().parseBody(c2);
newCommits.add(c2);
}
assertPushOk(pushHead(testRepo, refsFor, false), refsFor);
return newCommits;
}
private static Map<Integer, String> getPatchSetRevisions(ChangeData cd) throws Exception {
Map<Integer, String> revisions = new HashMap<>();
for (PatchSet ps : cd.patchSets()) {
revisions.put(ps.number(), ps.commitId().name());
}
return revisions;
}
private ChangeData byCommit(ObjectId id) throws Exception {
List<ChangeData> cds = queryProvider.get().byCommit(id);
assertWithMessage("change for " + id.name()).that(cds).hasSize(1);
return cds.get(0);
}
private ChangeData byChangeId(Change.Id id) throws Exception {
List<ChangeData> cds = queryProvider.get().byLegacyChangeId(id);
assertWithMessage("change " + id).that(cds).hasSize(1);
return cds.get(0);
}
private static void pushForReviewOk(TestRepository<?> testRepo) throws GitAPIException {
pushForReview(testRepo, RemoteRefUpdate.Status.OK, null);
}
private static void pushForReviewRejected(TestRepository<?> testRepo, String expectedMessage)
throws GitAPIException {
pushForReview(testRepo, RemoteRefUpdate.Status.REJECTED_OTHER_REASON, expectedMessage);
}
private static void pushForReview(
TestRepository<?> testRepo, RemoteRefUpdate.Status expectedStatus, String expectedMessage)
throws GitAPIException {
String ref = "refs/for/master";
PushResult r = pushHead(testRepo, ref);
RemoteRefUpdate refUpdate = r.getRemoteUpdate(ref);
assertThat(refUpdate.getStatus()).isEqualTo(expectedStatus);
if (expectedMessage != null) {
assertThat(refUpdate.getMessage()).contains(expectedMessage);
}
}
private void grantSkipValidation(Project.NameKey project, String ref, AccountGroup.UUID groupUuid)
throws Exception {
// See SKIP_VALIDATION implementation in default permission backend.
projectOperations
.project(project)
.forUpdate()
.add(allow(Permission.FORGE_AUTHOR).ref(ref).group(groupUuid))
.add(allow(Permission.FORGE_COMMITTER).ref(ref).group(groupUuid))
.add(allow(Permission.FORGE_SERVER).ref(ref).group(groupUuid))
.add(allow(Permission.PUSH_MERGE).ref("refs/for/" + ref).group(groupUuid))
.update();
}
private PushOneCommit.Result amendChange(String changeId, String ref) throws Exception {
return amendChange(changeId, ref, admin, testRepo);
}
private String getOwnerEmail(String changeId) throws Exception {
return get(changeId, DETAILED_ACCOUNTS).owner.email;
}
private ImmutableList<String> getReviewerEmails(String changeId, ReviewerState state)
throws Exception {
Collection<AccountInfo> infos =
get(changeId, DETAILED_LABELS, DETAILED_ACCOUNTS).reviewers.get(state);
return infos != null
? infos.stream().map(a -> a.email).collect(toImmutableList())
: ImmutableList.of();
}
private String abbreviateName(AnyObjectId id) throws Exception {
return ObjectIds.abbreviateName(id, testRepo.getRevWalk().getObjectReader());
}
}
| AbstractPushForReview: remove unused local variable c
Change-Id: Ic7afc514716891af3ad69f859dbf0f9acca8364b
| javatests/com/google/gerrit/acceptance/git/AbstractPushForReview.java | AbstractPushForReview: remove unused local variable c | <ide><path>avatests/com/google/gerrit/acceptance/git/AbstractPushForReview.java
<ide> if (!url.endsWith("/")) {
<ide> url += "/";
<ide> }
<del> RevCommit c = createCommit(testRepo, "test commit\n\nLink: " + url + "id/" + changeId);
<add> createCommit(testRepo, "test commit\n\nLink: " + url + "id/" + changeId);
<ide> pushForReviewOk(testRepo);
<ide>
<ide> List<ChangeMessageInfo> messages = getMessages(changeId);
<ide> @Test
<ide> public void pushWithWrongHostLinkFooter() throws Exception {
<ide> String changeId = "I0123456789abcdef0123456789abcdef01234567";
<del> RevCommit c = createCommit(testRepo, "test commit\n\nLink: https://wronghost/id/" + changeId);
<add> createCommit(testRepo, "test commit\n\nLink: https://wronghost/id/" + changeId);
<ide> pushForReviewRejected(testRepo, "missing Change-Id in message footer");
<ide> }
<ide> |
|
Java | apache-2.0 | 7acf63c43a6306e45c5e600c0d80e9de1c2ebe00 | 0 | apache/xmlbeans,apache/xmlbeans,apache/xmlbeans | /* Copyright 2004 The Apache Software Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.xmlbeans.impl.store;
import org.xml.sax.Locator;
import org.xml.sax.Attributes;
import org.xml.sax.ContentHandler;
import org.xml.sax.EntityResolver;
import org.xml.sax.ErrorHandler;
import org.xml.sax.InputSource;
import org.xml.sax.ext.LexicalHandler;
import org.xml.sax.ext.DeclHandler;
import org.xml.sax.SAXParseException;
import org.xml.sax.InputSource;
import org.xml.sax.XMLReader;
import org.xml.sax.SAXException;
import org.xml.sax.DTDHandler;
import java.util.HashMap;
import java.util.Map;
import java.lang.ref.ReferenceQueue;
import java.lang.ref.Reference;
import java.lang.ref.PhantomReference;
import java.lang.ref.SoftReference;
import java.lang.reflect.Method;
import java.io.InputStream;
import java.io.Reader;
import java.io.StringReader;
import java.io.IOException;
import javax.xml.stream.XMLStreamReader;
import javax.xml.stream.XMLStreamException;
import org.apache.xmlbeans.xml.stream.Attribute;
import org.apache.xmlbeans.xml.stream.AttributeIterator;
import org.apache.xmlbeans.xml.stream.CharacterData;
import org.apache.xmlbeans.xml.stream.ProcessingInstruction;
import org.apache.xmlbeans.xml.stream.Space;
import org.apache.xmlbeans.xml.stream.StartDocument;
import org.apache.xmlbeans.xml.stream.StartElement;
import org.apache.xmlbeans.xml.stream.XMLEvent;
import org.apache.xmlbeans.xml.stream.XMLInputStream;
import org.apache.xmlbeans.xml.stream.XMLName;
import org.w3c.dom.DOMImplementation;
import org.w3c.dom.Document;
import org.w3c.dom.DocumentType;
import org.w3c.dom.Node;
import org.w3c.dom.NamedNodeMap;
import org.w3c.dom.Element;
import javax.xml.namespace.QName;
import org.apache.xmlbeans.impl.common.XMLNameHelper;
import org.apache.xmlbeans.impl.common.QNameHelper;
import org.apache.xmlbeans.impl.common.XmlLocale;
import org.apache.xmlbeans.impl.common.ResolverUtil;
import org.apache.xmlbeans.impl.common.SystemCache;
import org.apache.xmlbeans.impl.store.Saaj.SaajCallback;
import org.apache.xmlbeans.impl.store.DomImpl.Dom;
import org.apache.xmlbeans.impl.store.DomImpl.TextNode;
import org.apache.xmlbeans.impl.store.DomImpl.CdataNode;
import org.apache.xmlbeans.impl.store.DomImpl.SaajTextNode;
import org.apache.xmlbeans.impl.store.DomImpl.SaajCdataNode;
import org.apache.xmlbeans.impl.store.Cur.Locations;
import org.apache.xmlbeans.CDataBookmark;
import org.apache.xmlbeans.XmlBeans;
import org.apache.xmlbeans.XmlLineNumber;
import org.apache.xmlbeans.XmlCursor;
import org.apache.xmlbeans.XmlCursor.XmlBookmark;
import org.apache.xmlbeans.XmlSaxHandler;
import org.apache.xmlbeans.XmlException;
import org.apache.xmlbeans.XmlObject;
import org.apache.xmlbeans.XmlOptions;
import org.apache.xmlbeans.SchemaType;
import org.apache.xmlbeans.SchemaTypeLoader;
import org.apache.xmlbeans.XmlTokenSource;
import org.apache.xmlbeans.XmlOptions;
import org.apache.xmlbeans.QNameSet;
import org.apache.xmlbeans.QNameCache;
import org.apache.xmlbeans.XmlBeans;
import org.apache.xmlbeans.XmlError;
import org.apache.xmlbeans.XmlRuntimeException;
import org.apache.xmlbeans.XmlDocumentProperties;
import org.apache.xmlbeans.impl.values.TypeStore;
import org.apache.xmlbeans.impl.values.TypeStoreUser;
import org.apache.xmlbeans.impl.values.TypeStoreUserFactory;
import org.apache.xmlbeans.impl.piccolo.xml.Piccolo;
import org.apache.xmlbeans.impl.piccolo.io.FileFormatException;
public final class Locale
implements DOMImplementation, SaajCallback, XmlLocale
{
static final int ROOT = Cur.ROOT;
static final int ELEM = Cur.ELEM;
static final int ATTR = Cur.ATTR;
static final int COMMENT = Cur.COMMENT;
static final int PROCINST = Cur.PROCINST;
static final int TEXT = Cur.TEXT;
static final int WS_UNSPECIFIED = TypeStore.WS_UNSPECIFIED;
static final int WS_PRESERVE = TypeStore.WS_PRESERVE;
static final int WS_REPLACE = TypeStore.WS_REPLACE;
static final int WS_COLLAPSE = TypeStore.WS_COLLAPSE;
static final String _xsi = "http://www.w3.org/2001/XMLSchema-instance";
static final String _schema = "http://www.w3.org/2001/XMLSchema";
static final String _openFragUri = "http://www.openuri.org/fragment";
static final String _xml1998Uri = "http://www.w3.org/XML/1998/namespace";
static final String _xmlnsUri = "http://www.w3.org/2000/xmlns/";
static final QName _xsiNil = new QName(_xsi, "nil", "xsi");
static final QName _xsiType = new QName(_xsi, "type", "xsi");
static final QName _xsiLoc = new QName(_xsi, "schemaLocation", "xsi");
static final QName _xsiNoLoc = new QName(_xsi, "noNamespaceSchemaLocation",
"xsi");
static final QName _openuriFragment = new QName(_openFragUri, "fragment",
"frag");
static final QName _xmlFragment = new QName("xml-fragment");
private Locale(SchemaTypeLoader stl, XmlOptions options)
{
options = XmlOptions.maskNull(options);
//
//
//
// TODO - add option for no=sync, or make it all thread safe
//
// Also - have a thread local setting for thread safety? .. Perhaps something
// in the type loader which defines whether ot not sync is on????
_noSync = options.hasOption(XmlOptions.UNSYNCHRONIZED);
_tempFrames = new Cur[_numTempFramesLeft = 8];
// BUGBUG - this cannot be thread local ....
// BUGBUG - this cannot be thread local ....
// BUGBUG - this cannot be thread local .... uhh what, again?
//
// Lazy create this (loading up a locale should use the thread locale one)
// same goes for the qname factory .. use thread local for hte most part when loading
_qnameFactory = new DefaultQNameFactory();
_locations = new Locations(this);
_schemaTypeLoader = stl;
_validateOnSet = options.hasOption(XmlOptions.VALIDATE_ON_SET);
//
// Check for Saaj implementation request
//
Object saajObj = options.get(Saaj.SAAJ_IMPL);
if (saajObj != null)
{
if (!(saajObj instanceof Saaj))
throw new IllegalStateException(
"Saaj impl not correct type: " + saajObj);
_saaj = (Saaj) saajObj;
_saaj.setCallback(this);
}
}
//
//
//
public static final String USE_SAME_LOCALE = "USE_SAME_LOCALE";
/**
* This option is checked in XmlObjectBase._copy(XmlOptions), the locale is used as the synchronization domain.
* useNewLocale = true: copy will use a new locale, false: copy will use the same locale as the source
*/
public static final String COPY_USE_NEW_LOCALE = "COPY_USE_NEW_LOCALE";
static Locale getLocale(SchemaTypeLoader stl, XmlOptions options)
{
if (stl == null)
stl = XmlBeans.getContextTypeLoader();
options = XmlOptions.maskNull(options);
Locale l = null;
if (options.hasOption(USE_SAME_LOCALE))
{
Object source = options.get(USE_SAME_LOCALE);
if (source instanceof Locale)
l = (Locale) source;
else if (source instanceof XmlTokenSource)
l = (Locale) ((XmlTokenSource) source).monitor();
else
throw new IllegalArgumentException(
"Source locale not understood: " + source);
if (l._schemaTypeLoader != stl)
throw new IllegalArgumentException(
"Source locale does not support same schema type loader");
if (l._saaj != null && l._saaj != options.get(Saaj.SAAJ_IMPL))
throw new IllegalArgumentException(
"Source locale does not support same saaj");
if (l._validateOnSet &&
!options.hasOption(XmlOptions.VALIDATE_ON_SET))
throw new IllegalArgumentException(
"Source locale does not support same validate on set");
// TODO - other things to check?
}
else
l = new Locale(stl, options);
return l;
}
//
//
//
static void associateSourceName(Cur c, XmlOptions options)
{
String sourceName = (String) XmlOptions.safeGet(options,
XmlOptions.DOCUMENT_SOURCE_NAME);
if (sourceName != null)
getDocProps(c, true).setSourceName(sourceName);
}
//
//
//
static void autoTypeDocument(Cur c, SchemaType requestedType,
XmlOptions options)
throws XmlException
{
assert c.isRoot();
// The type in the options overrides all sniffing
options = XmlOptions.maskNull(options);
SchemaType optionType = (SchemaType) options.get(
XmlOptions.DOCUMENT_TYPE);
if (optionType != null)
{
c.setType(optionType);
return;
}
SchemaType type = null;
// An xsi:type can be used to pick a type out of the loader, or used to refine
// a type with a name.
if (requestedType == null || requestedType.getName() != null)
{
QName xsiTypeName = c.getXsiTypeName();
SchemaType xsiSchemaType =
xsiTypeName == null ?
null : c._locale._schemaTypeLoader.findType(xsiTypeName);
if (requestedType == null ||
requestedType.isAssignableFrom(xsiSchemaType))
type = xsiSchemaType;
}
// Look for a document element to establish type
if (type == null &&
(requestedType == null || requestedType.isDocumentType()))
{
assert c.isRoot();
c.push();
QName docElemName =
!c.hasAttrs() && Locale.toFirstChildElement(c) &&
!Locale.toNextSiblingElement(c)
? c.getName() : null;
c.pop();
if (docElemName != null)
{
type =
c._locale._schemaTypeLoader.findDocumentType(docElemName);
if (type != null && requestedType != null)
{
QName requesteddocElemNameName = requestedType.getDocumentElementName();
if (!requesteddocElemNameName.equals(docElemName) &&
!requestedType.isValidSubstitution(docElemName))
{
throw
new XmlException("Element " +
QNameHelper.pretty(docElemName) +
" is not a valid " +
QNameHelper.pretty(requesteddocElemNameName) +
" document or a valid substitution.");
}
}
}
}
if (type == null && requestedType == null)
{
c.push();
type =
Locale.toFirstNormalAttr(c) && !Locale.toNextNormalAttr(c)
?
c._locale._schemaTypeLoader.findAttributeType(c.getName()) :
null;
c.pop();
}
if (type == null)
type = requestedType;
if (type == null)
type = XmlBeans.NO_TYPE;
c.setType(type);
if (requestedType != null)
{
if (type.isDocumentType())
verifyDocumentType(c, type.getDocumentElementName());
else if (type.isAttributeType())
verifyAttributeType(c, type.getAttributeTypeAttributeName());
}
}
private static boolean namespacesSame(QName n1, QName n2)
{
if (n1 == n2)
return true;
if (n1 == null || n2 == null)
return false;
if (n1.getNamespaceURI() == n2.getNamespaceURI())
return true;
if (n1.getNamespaceURI() == null || n2.getNamespaceURI() == null)
return false;
return n1.getNamespaceURI().equals(n2.getNamespaceURI());
}
private static void addNamespace(StringBuffer sb, QName name)
{
if (name.getNamespaceURI() == null)
sb.append("<no namespace>");
else
{
sb.append("\"");
sb.append(name.getNamespaceURI());
sb.append("\"");
}
}
private static void verifyDocumentType(Cur c, QName docElemName)
throws XmlException
{
assert c.isRoot();
c.push();
try
{
StringBuffer sb = null;
if (!Locale.toFirstChildElement(c) ||
Locale.toNextSiblingElement(c))
{
sb = new StringBuffer();
sb.append("The document is not a ");
sb.append(QNameHelper.pretty(docElemName));
sb.append(
c.isRoot() ?
": no document element" : ": multiple document elements");
}
else
{
QName name = c.getName();
if (!name.equals(docElemName))
{
sb = new StringBuffer();
sb.append("The document is not a ");
sb.append(QNameHelper.pretty(docElemName));
if (docElemName.getLocalPart().equals(name.getLocalPart()))
{
sb.append(": document element namespace mismatch ");
sb.append("expected ");
addNamespace(sb, docElemName);
sb.append(" got ");
addNamespace(sb, name);
}
else if (namespacesSame(docElemName, name))
{
sb.append(": document element local name mismatch ");
sb.append("expected " + docElemName.getLocalPart());
sb.append(" got " + name.getLocalPart());
}
else
{
sb.append(": document element mismatch ");
sb.append("got ");
sb.append(QNameHelper.pretty(name));
}
}
}
if (sb != null)
{
XmlError err = XmlError.forCursor(sb.toString(),
new Cursor(c));
throw new XmlException(err.toString(), null, err);
}
}
finally
{
c.pop();
}
}
private static void verifyAttributeType(Cur c, QName attrName)
throws XmlException
{
assert c.isRoot();
c.push();
try
{
StringBuffer sb = null;
if (!Locale.toFirstNormalAttr(c) || Locale.toNextNormalAttr(c))
{
sb = new StringBuffer();
sb.append("The document is not a ");
sb.append(QNameHelper.pretty(attrName));
sb.append(
c.isRoot() ? ": no attributes" : ": multiple attributes");
}
else
{
QName name = c.getName();
if (!name.equals(attrName))
{
sb = new StringBuffer();
sb.append("The document is not a ");
sb.append(QNameHelper.pretty(attrName));
if (attrName.getLocalPart().equals(name.getLocalPart()))
{
sb.append(": attribute namespace mismatch ");
sb.append("expected ");
addNamespace(sb, attrName);
sb.append(" got ");
addNamespace(sb, name);
}
else if (namespacesSame(attrName, name))
{
sb.append(": attribute local name mismatch ");
sb.append("expected " + attrName.getLocalPart());
sb.append(" got " + name.getLocalPart());
}
else
{
sb.append(": attribute element mismatch ");
sb.append("got ");
sb.append(QNameHelper.pretty(name));
}
}
}
if (sb != null)
{
XmlError err = XmlError.forCursor(sb.toString(),
new Cursor(c));
throw new XmlException(err.toString(), null, err);
}
}
finally
{
c.pop();
}
}
static boolean isFragmentQName(QName name)
{
return name.equals(Locale._openuriFragment) ||
name.equals(Locale._xmlFragment);
}
static boolean isFragment(Cur start, Cur end)
{
assert !end.isAttr();
start.push();
end.push();
int numDocElems = 0;
boolean isFrag = false;
while (!start.isSamePos(end))
{
int k = start.kind();
if (k == ATTR)
break;
if (k == TEXT && !isWhiteSpace(start.getCharsAsString(-1)))
{
isFrag = true;
break;
}
if (k == ELEM && ++numDocElems > 1)
{
isFrag = true;
break;
}
// Move to next token
assert k != ATTR;
if (k != TEXT)
start.toEnd();
start.next();
}
start.pop();
end.pop();
return isFrag || numDocElems != 1;
}
//
//
//
public static XmlObject newInstance(SchemaTypeLoader stl, SchemaType type,
XmlOptions options)
{
Locale l = getLocale(stl, options);
if (l.noSync())
{
l.enter();
try
{
return l.newInstance(type, options);
}
finally
{
l.exit();
}
}
else
synchronized (l)
{
l.enter();
try
{
return l.newInstance(type, options);
}
finally
{
l.exit();
}
}
}
private XmlObject newInstance(SchemaType type, XmlOptions options)
{
options = XmlOptions.maskNull(options);
Cur c = tempCur();
SchemaType sType = (SchemaType) options.get(XmlOptions.DOCUMENT_TYPE);
if (sType == null)
sType = type == null ? XmlObject.type : type;
if (sType.isDocumentType())
c.createDomDocumentRoot();
else
c.createRoot();
c.setType(sType);
XmlObject x = (XmlObject) c.getUser();
c.release();
return x;
}
//
//
//
public static DOMImplementation newDomImplementation(SchemaTypeLoader stl,
XmlOptions options)
{
return (DOMImplementation) getLocale(stl, options);
}
//
//
//
public static XmlObject parseToXmlObject(SchemaTypeLoader stl,
String xmlText, SchemaType type, XmlOptions options)
throws XmlException
{
Locale l = getLocale(stl, options);
if (l.noSync())
{
l.enter();
try
{
return l.parseToXmlObject(xmlText, type, options);
}
finally
{
l.exit();
}
}
else
synchronized (l)
{
l.enter();
try
{
return l.parseToXmlObject(xmlText, type, options);
}
finally
{
l.exit();
}
}
}
private XmlObject parseToXmlObject(String xmlText, SchemaType type,
XmlOptions options)
throws XmlException
{
Cur c = parse(xmlText, type, options);
XmlObject x = (XmlObject) c.getUser();
c.release();
return x;
}
Cur parse(String s, SchemaType type, XmlOptions options)
throws XmlException
{
Reader r = new StringReader(s);
try
{
Cur c = getSaxLoader(options).load(this, new InputSource(r),
options);
autoTypeDocument(c, type, options);
return c;
}
catch (IOException e)
{
assert false: "StringReader should not throw IOException";
throw new XmlException(e.getMessage(), e);
}
finally
{
try
{
r.close();
}
catch (IOException e)
{
}
}
}
//
//
//
/**
* @deprecated XMLInputStream was deprecated by XMLStreamReader from STaX - jsr173 API.
*/
public static XmlObject parseToXmlObject(SchemaTypeLoader stl,
XMLInputStream xis, SchemaType type, XmlOptions options)
throws XmlException, org.apache.xmlbeans.xml.stream.XMLStreamException
{
Locale l = getLocale(stl, options);
if (l.noSync())
{
l.enter();
try
{
return l.parseToXmlObject(xis, type, options);
}
finally
{
l.exit();
}
}
else
synchronized (l)
{
l.enter();
try
{
return l.parseToXmlObject(xis, type, options);
}
finally
{
l.exit();
}
}
}
/**
* @deprecated XMLInputStream was deprecated by XMLStreamReader from STaX - jsr173 API.
*/
public XmlObject parseToXmlObject(XMLInputStream xis, SchemaType type,
XmlOptions options)
throws XmlException, org.apache.xmlbeans.xml.stream.XMLStreamException
{
Cur c;
try
{
c = loadXMLInputStream(xis, options);
}
catch (org.apache.xmlbeans.xml.stream.XMLStreamException e)
{
throw new XmlException(e.getMessage(), e);
}
autoTypeDocument(c, type, options);
XmlObject x = (XmlObject) c.getUser();
c.release();
return x;
}
//
//
//
public static XmlObject parseToXmlObject(SchemaTypeLoader stl,
XMLStreamReader xsr, SchemaType type, XmlOptions options)
throws XmlException
{
Locale l = getLocale(stl, options);
if (l.noSync())
{
l.enter();
try
{
return l.parseToXmlObject(xsr, type, options);
}
finally
{
l.exit();
}
}
else
synchronized (l)
{
l.enter();
try
{
return l.parseToXmlObject(xsr, type, options);
}
finally
{
l.exit();
}
}
}
public XmlObject parseToXmlObject(XMLStreamReader xsr, SchemaType type,
XmlOptions options)
throws XmlException
{
Cur c;
try
{
c = loadXMLStreamReader(xsr, options);
}
catch (XMLStreamException e)
{
throw new XmlException(e.getMessage(), e);
}
autoTypeDocument(c, type, options);
XmlObject x = (XmlObject) c.getUser();
c.release();
return x;
}
private static void lineNumber(XMLEvent xe, LoadContext context)
{
org.apache.xmlbeans.xml.stream.Location loc = xe.getLocation();
if (loc != null)
context.lineNumber(loc.getLineNumber(), loc.getColumnNumber(), -1);
}
private static void lineNumber(XMLStreamReader xsr, LoadContext context)
{
javax.xml.stream.Location loc = xsr.getLocation();
if (loc != null)
{
context.lineNumber(loc.getLineNumber(), loc.getColumnNumber(),
loc.getCharacterOffset());
}
}
private void doAttributes(XMLStreamReader xsr, LoadContext context)
{
int n = xsr.getAttributeCount();
for (int a = 0; a < n; a++)
{
context.attr(xsr.getAttributeLocalName(a),
xsr.getAttributeNamespace(a),
xsr.getAttributePrefix(a),
xsr.getAttributeValue(a));
}
}
private void doNamespaces(XMLStreamReader xsr, LoadContext context)
{
int n = xsr.getNamespaceCount();
for (int a = 0; a < n; a++)
{
String prefix = xsr.getNamespacePrefix(a);
if (prefix == null || prefix.length() == 0)
context.attr("xmlns", _xmlnsUri, null,
xsr.getNamespaceURI(a));
else
context.attr(prefix, _xmlnsUri, "xmlns",
xsr.getNamespaceURI(a));
}
}
/**
* @deprecated XMLInputStream was deprecated by XMLStreamReader from STaX - jsr173 API.
*/
private Cur loadXMLInputStream(XMLInputStream xis, XmlOptions options)
throws org.apache.xmlbeans.xml.stream.XMLStreamException
{
options = XmlOptions.maskNull(options);
boolean lineNums = options.hasOption(XmlOptions.LOAD_LINE_NUMBERS);
XMLEvent x = xis.peek();
if (x != null && x.getType() == XMLEvent.START_ELEMENT)
{
Map nsMap = ((StartElement) x).getNamespaceMap();
if (nsMap != null && nsMap.size() > 0)
{
Map namespaces = new HashMap();
namespaces.putAll(nsMap);
options = new XmlOptions(options);
options.put(XmlOptions.LOAD_ADDITIONAL_NAMESPACES, namespaces);
}
}
String systemId = null;
String encoding = null;
String version = null;
boolean standAlone = true;
LoadContext context = new Cur.CurLoadContext(this, options);
events:
for (XMLEvent xe = xis.next(); xe != null; xe = xis.next())
{
switch (xe.getType())
{
case XMLEvent.START_DOCUMENT:
StartDocument doc = (StartDocument) xe;
systemId = doc.getSystemId();
encoding = doc.getCharacterEncodingScheme();
version = doc.getVersion();
standAlone = doc.isStandalone();
standAlone = doc.isStandalone();
if (lineNums)
lineNumber(xe, context);
break;
case XMLEvent.END_DOCUMENT:
if (lineNums)
lineNumber(xe, context);
break events;
case XMLEvent.NULL_ELEMENT:
if (!xis.hasNext())
break events;
break;
case XMLEvent.START_ELEMENT:
context.startElement(XMLNameHelper.getQName(xe.getName()));
if (lineNums)
lineNumber(xe, context);
for (AttributeIterator ai = ((StartElement) xe).getAttributes();
ai.hasNext();)
{
Attribute attr = ai.next();
context.attr(XMLNameHelper.getQName(attr.getName()),
attr.getValue());
}
for (AttributeIterator ai = ((StartElement) xe).getNamespaces()
; ai.hasNext();)
{
Attribute attr = ai.next();
XMLName name = attr.getName();
String local = name.getLocalName();
if (name.getPrefix() == null && local.equals("xmlns"))
local = "";
context.xmlns(local, attr.getValue());
}
break;
case XMLEvent.END_ELEMENT:
context.endElement();
if (lineNums)
lineNumber(xe, context);
break;
case XMLEvent.SPACE:
if (((Space) xe).ignorable())
break;
// Fall through
case XMLEvent.CHARACTER_DATA:
CharacterData cd = (CharacterData) xe;
if (cd.hasContent())
{
context.text(cd.getContent());
if (lineNums)
lineNumber(xe, context);
}
break;
case XMLEvent.COMMENT:
org.apache.xmlbeans.xml.stream.Comment comment =
(org.apache.xmlbeans.xml.stream.Comment) xe;
if (comment.hasContent())
{
context.comment(comment.getContent());
if (lineNums)
lineNumber(xe, context);
}
break;
case XMLEvent.PROCESSING_INSTRUCTION:
ProcessingInstruction procInstr = (ProcessingInstruction) xe;
context.procInst(procInstr.getTarget(), procInstr.getData());
if (lineNums)
lineNumber(xe, context);
break;
// These are ignored
case XMLEvent.ENTITY_REFERENCE:
case XMLEvent.START_PREFIX_MAPPING:
case XMLEvent.END_PREFIX_MAPPING:
case XMLEvent.CHANGE_PREFIX_MAPPING:
case XMLEvent.XML_EVENT:
break;
default :
throw new RuntimeException(
"Unhandled xml event type: " + xe.getTypeAsString());
}
}
Cur c = context.finish();
associateSourceName(c, options);
XmlDocumentProperties props = getDocProps(c, true);
props.setDoctypeSystemId(systemId);
props.setEncoding(encoding);
props.setVersion(version);
props.setStandalone(standAlone);
return c;
}
private Cur loadXMLStreamReader(XMLStreamReader xsr, XmlOptions options)
throws XMLStreamException
{
options = XmlOptions.maskNull(options);
boolean lineNums = options.hasOption(XmlOptions.LOAD_LINE_NUMBERS);
String encoding = null, version = null;
boolean standAlone = false;
LoadContext context = new Cur.CurLoadContext(this, options);
int depth = 0;
events:
for (int eventType = xsr.getEventType(); ; eventType = xsr.next())
{
switch (eventType)
{
case XMLStreamReader.START_DOCUMENT:
{
depth++;
encoding = xsr.getCharacterEncodingScheme();
version = xsr.getVersion();
standAlone = xsr.isStandalone();
if (lineNums)
lineNumber(xsr, context);
break;
}
case XMLStreamReader.END_DOCUMENT:
{
depth--;
if (lineNums)
lineNumber(xsr, context);
break events;
}
case XMLStreamReader.START_ELEMENT:
{
depth++;
context.startElement(xsr.getName());
if (lineNums)
lineNumber(xsr, context);
doAttributes(xsr, context);
doNamespaces(xsr, context);
break;
}
case XMLStreamReader.END_ELEMENT:
{
depth--;
context.endElement();
if (lineNums)
lineNumber(xsr, context);
break;
}
case XMLStreamReader.CHARACTERS:
case XMLStreamReader.CDATA:
{
context.text(xsr.getTextCharacters(), xsr.getTextStart(),
xsr.getTextLength());
if (lineNums)
lineNumber(xsr, context);
break;
}
case XMLStreamReader.COMMENT:
{
String comment = xsr.getText();
context.comment(comment);
if (lineNums)
lineNumber(xsr, context);
break;
}
case XMLStreamReader.PROCESSING_INSTRUCTION:
{
context.procInst(xsr.getPITarget(), xsr.getPIData());
if (lineNums)
lineNumber(xsr, context);
break;
}
case XMLStreamReader.ATTRIBUTE:
{
doAttributes(xsr, context);
break;
}
case XMLStreamReader.NAMESPACE:
{
doNamespaces(xsr, context);
break;
}
case XMLStreamReader.ENTITY_REFERENCE:
{
context.text(xsr.getText());
break;
}
case XMLStreamReader.SPACE:
case XMLStreamReader.DTD:
break;
default :
throw new RuntimeException(
"Unhandled xml event type: " + eventType);
}
if (!xsr.hasNext() || depth <= 0)
break;
}
Cur c = context.finish();
associateSourceName(c, options);
XmlDocumentProperties props = getDocProps(c, true);
props.setEncoding(encoding);
props.setVersion(version);
props.setStandalone(standAlone);
return c;
}
//
//
//
public static XmlObject parseToXmlObject(SchemaTypeLoader stl,
InputStream is, SchemaType type, XmlOptions options)
throws XmlException, IOException
{
Locale l = getLocale(stl, options);
if (l.noSync())
{
l.enter();
try
{
return l.parseToXmlObject(is, type, options);
}
finally
{
l.exit();
}
}
else
synchronized (l)
{
l.enter();
try
{
return l.parseToXmlObject(is, type, options);
}
finally
{
l.exit();
}
}
}
private XmlObject parseToXmlObject(InputStream is, SchemaType type,
XmlOptions options)
throws XmlException, IOException
{
Cur c = getSaxLoader(options).load(this, new InputSource(is),
options);
autoTypeDocument(c, type, options);
XmlObject x = (XmlObject) c.getUser();
c.release();
return x;
}
//
//
//
public static XmlObject parseToXmlObject(SchemaTypeLoader stl,
Reader reader, SchemaType type, XmlOptions options)
throws XmlException, IOException
{
Locale l = getLocale(stl, options);
if (l.noSync())
{
l.enter();
try
{
return l.parseToXmlObject(reader, type, options);
}
finally
{
l.exit();
}
}
else
synchronized (l)
{
l.enter();
try
{
return l.parseToXmlObject(reader, type, options);
}
finally
{
l.exit();
}
}
}
private XmlObject parseToXmlObject(Reader reader, SchemaType type,
XmlOptions options)
throws XmlException, IOException
{
Cur c = getSaxLoader(options).load(this, new InputSource(reader),
options);
autoTypeDocument(c, type, options);
XmlObject x = (XmlObject) c.getUser();
c.release();
return x;
}
//
//
//
public static XmlObject parseToXmlObject(SchemaTypeLoader stl, Node node,
SchemaType type, XmlOptions options)
throws XmlException
{
Locale l = getLocale(stl, options);
if (l.noSync())
{
l.enter();
try
{
return l.parseToXmlObject(node, type, options);
}
finally
{
l.exit();
}
}
else
synchronized (l)
{
l.enter();
try
{
return l.parseToXmlObject(node, type, options);
}
finally
{
l.exit();
}
}
}
public XmlObject parseToXmlObject(Node node, SchemaType type,
XmlOptions options)
throws XmlException
{
LoadContext context = new Cur.CurLoadContext(this, options);
loadNode(node, context);
Cur c = context.finish();
associateSourceName(c, options);
autoTypeDocument(c, type, options);
XmlObject x = (XmlObject) c.getUser();
c.release();
return x;
}
private void loadNodeChildren(Node n, LoadContext context)
{
for (Node c = n.getFirstChild(); c != null; c = c.getNextSibling())
loadNode(c, context);
}
void loadNode(Node n, LoadContext context)
{
switch (n.getNodeType())
{
case Node.DOCUMENT_NODE:
case Node.DOCUMENT_FRAGMENT_NODE:
case Node.ENTITY_REFERENCE_NODE:
{
loadNodeChildren(n, context);
break;
}
case Node.ELEMENT_NODE:
{
context.startElement(
makeQualifiedQName(n.getNamespaceURI(), n.getNodeName()));
NamedNodeMap attrs = n.getAttributes();
for (int i = 0; i < attrs.getLength(); i++)
{
Node a = attrs.item(i);
String attrName = a.getNodeName();
String attrValue = a.getNodeValue();
if (attrName.toLowerCase().startsWith("xmlns"))
{
if (attrName.length() == 5)
context.xmlns(null, attrValue);
else
context.xmlns(attrName.substring(6), attrValue);
}
else
context.attr(
makeQualifiedQName(a.getNamespaceURI(), attrName),
attrValue);
}
loadNodeChildren(n, context);
context.endElement();
break;
}
case Node.TEXT_NODE:
case Node.CDATA_SECTION_NODE:
{
context.text(n.getNodeValue());
break;
}
case Node.COMMENT_NODE:
{
context.comment(n.getNodeValue());
break;
}
case Node.PROCESSING_INSTRUCTION_NODE:
{
context.procInst(n.getNodeName(), n.getNodeValue());
break;
}
case Node.DOCUMENT_TYPE_NODE:
case Node.ENTITY_NODE:
case Node.NOTATION_NODE:
case Node.ATTRIBUTE_NODE:
{
throw new RuntimeException("Unexpected node");
}
}
}
//
//
//
private class XmlSaxHandlerImpl
extends SaxHandler
implements XmlSaxHandler
{
XmlSaxHandlerImpl(Locale l, SchemaType type, XmlOptions options)
{
super(null);
_options = options;
_type = type;
// Because SAX loading is not atomic with respect to XmlBeans, I can't use the default
// thread local CharUtil. Instruct the SaxHandler (and the LoadContext, eventually)
// to use the Locale specific CharUtil.
XmlOptions saxHandlerOptions = new XmlOptions(options);
saxHandlerOptions.put(Cur.LOAD_USE_LOCALE_CHAR_UTIL);
initSaxHandler(l, saxHandlerOptions);
}
public ContentHandler getContentHandler()
{
return _context == null ? null : this;
}
public LexicalHandler getLexicalHandler()
{
return _context == null ? null : this;
}
public void bookmarkLastEvent(XmlBookmark mark)
{
_context.bookmarkLastNonAttr(mark);
}
public void bookmarkLastAttr(QName attrName, XmlBookmark mark)
{
_context.bookmarkLastAttr(attrName, mark);
}
public XmlObject getObject()
throws XmlException
{
if (_context == null)
return null;
_locale.enter();
try
{
Cur c = _context.finish();
autoTypeDocument(c, _type, _options);
XmlObject x = (XmlObject) c.getUser();
c.release();
_context = null;
return x;
}
finally
{
_locale.exit();
}
}
private SchemaType _type;
private XmlOptions _options;
}
public static XmlSaxHandler newSaxHandler(SchemaTypeLoader stl,
SchemaType type, XmlOptions options)
{
Locale l = getLocale(stl, options);
if (l.noSync())
{
l.enter();
try
{
return l.newSaxHandler(type, options);
}
finally
{
l.exit();
}
}
else
synchronized (l)
{
l.enter();
try
{
return l.newSaxHandler(type, options);
}
finally
{
l.exit();
}
}
}
public XmlSaxHandler newSaxHandler(SchemaType type, XmlOptions options)
{
return new XmlSaxHandlerImpl(this, type, options);
}
// TODO (ericvas ) - have a qname factory here so that the same factory may be
// used by the parser. This factory would probably come from my
// high speed parser. Otherwise, use a thread local on
QName makeQName(String uri, String localPart)
{
assert localPart != null && localPart.length() > 0;
// TODO - make sure name is a well formed name?
return _qnameFactory.getQName(uri, localPart);
}
QName makeQNameNoCheck(String uri, String localPart)
{
return _qnameFactory.getQName(uri, localPart);
}
QName makeQName(String uri, String local, String prefix)
{
return _qnameFactory.getQName(uri, local, prefix == null ? "" : prefix);
}
QName makeQualifiedQName(String uri, String qname)
{
if (qname == null)
qname = "";
int i = qname.indexOf(':');
return i < 0
?
_qnameFactory.getQName(uri, qname)
:
_qnameFactory.getQName(uri, qname.substring(i + 1),
qname.substring(0, i));
}
static private class DocProps
extends XmlDocumentProperties
{
private HashMap _map = new HashMap();
public Object put(Object key, Object value)
{
return _map.put(key, value);
}
public Object get(Object key)
{
return _map.get(key);
}
public Object remove(Object key)
{
return _map.remove(key);
}
}
static XmlDocumentProperties getDocProps(Cur c, boolean ensure)
{
c.push();
while (c.toParent())
;
DocProps props = (DocProps) c.getBookmark(DocProps.class);
if (props == null && ensure)
c.setBookmark(DocProps.class, props = new DocProps());
c.pop();
return props;
}
interface ChangeListener
{
void notifyChange();
void setNextChangeListener(ChangeListener listener);
ChangeListener getNextChangeListener();
}
void registerForChange(ChangeListener listener)
{
if (listener.getNextChangeListener() == null)
{
if (_changeListeners == null)
listener.setNextChangeListener(listener);
else
listener.setNextChangeListener(_changeListeners);
_changeListeners = listener;
}
}
void notifyChange()
{
// First, notify the registered listeners ...
while (_changeListeners != null)
{
_changeListeners.notifyChange();
if (_changeListeners.getNextChangeListener() == _changeListeners)
_changeListeners.setNextChangeListener(null);
ChangeListener next = _changeListeners.getNextChangeListener();
_changeListeners.setNextChangeListener(null);
_changeListeners = next;
}
// Then, prepare for the change in a locale specific way. Need to create real Curs for
// 'virtual' Curs in Locations
_locations.notifyChange();
}
//
// Cursor helpers
//
static String getTextValue(Cur c)
{
assert c.isNode();
if (!c.hasChildren())
return c.getValueAsString();
StringBuffer sb = new StringBuffer();
c.push();
for (c.next(); !c.isAtEndOfLastPush(); c.next())
if (c.isText())
{
if ( (c._xobj.isComment() || c._xobj.isProcinst() ) && c._pos<c._xobj._cchValue )
continue;
CharUtil.getString(sb, c.getChars(-1), c._offSrc, c._cchSrc);
}
c.pop();
return sb.toString();
}
static int getTextValue(Cur c, int wsr, char[] chars, int off, int maxCch)
{
// TODO - hack impl for now ... improve
assert c.isNode();
String s = c._xobj.getValueAsString(wsr);
int n = s.length();
if (n > maxCch)
n = maxCch;
if (n <= 0)
return 0;
s.getChars(0, n, chars, off);
return n;
}
static String applyWhiteSpaceRule(String s, int wsr)
{
int l = s == null ? 0 : s.length();
if (l == 0 || wsr == WS_PRESERVE)
return s;
char ch;
if (wsr == WS_REPLACE)
{
for (int i = 0; i < l; i++)
if ((ch = s.charAt(i)) == '\n' || ch == '\r' || ch == '\t')
return processWhiteSpaceRule(s, wsr);
}
else if (wsr == Locale.WS_COLLAPSE)
{
if (CharUtil.isWhiteSpace(s.charAt(0)) ||
CharUtil.isWhiteSpace(s.charAt(l - 1)))
return processWhiteSpaceRule(s, wsr);
boolean lastWasWhite = false;
for (int i = 1; i < l; i++)
{
boolean isWhite = CharUtil.isWhiteSpace(s.charAt(i));
if (isWhite && lastWasWhite)
return processWhiteSpaceRule(s, wsr);
lastWasWhite = isWhite;
}
}
return s;
}
static String processWhiteSpaceRule(String s, int wsr)
{
ScrubBuffer sb = getScrubBuffer(wsr);
sb.scrub(s, 0, s.length());
return sb.getResultAsString();
}
static final class ScrubBuffer
{
ScrubBuffer()
{
_sb = new StringBuffer();
}
void init(int wsr)
{
_sb.delete(0, _sb.length());
_wsr = wsr;
_state = START_STATE;
}
void scrub(Object src, int off, int cch)
{
if (cch == 0)
return;
if (_wsr == Locale.WS_PRESERVE)
{
CharUtil.getString(_sb, src, off, cch);
return;
}
char[] chars;
if (src instanceof char[])
chars = (char[]) src;
else
{
if (cch <= _srcBuf.length)
chars = _srcBuf;
else if (cch <= 16384)
chars = _srcBuf = new char[16384];
else
chars = new char[cch];
CharUtil.getChars(chars, 0, src, off, cch);
off = 0;
}
int start = 0;
for (int i = 0; i < cch; i++)
{
char ch = chars[off + i];
if (ch == ' ' || ch == '\n' || ch == '\r' || ch == '\t')
{
_sb.append(chars, off + start, i - start);
start = i + 1;
if (_wsr == Locale.WS_REPLACE)
_sb.append(' ');
else if (_state == NOSPACE_STATE)
_state = SPACE_SEEN_STATE;
}
else
{
if (_state == SPACE_SEEN_STATE)
_sb.append(' ');
_state = NOSPACE_STATE;
}
}
_sb.append(chars, off + start, cch - start);
}
String getResultAsString()
{
return _sb.toString();
}
private static final int START_STATE = 0;
private static final int SPACE_SEEN_STATE = 1;
private static final int NOSPACE_STATE = 2;
private int _state;
private int _wsr;
private char[] _srcBuf = new char[1024];
private StringBuffer _sb;
}
private static ThreadLocal tl_scrubBuffer =
new ThreadLocal()
{
protected Object initialValue()
{
return new SoftReference(new ScrubBuffer());
}
};
static ScrubBuffer getScrubBuffer(int wsr)
{
SoftReference softRef = (SoftReference) tl_scrubBuffer.get();
ScrubBuffer scrubBuffer = (ScrubBuffer) (softRef).get();
if (scrubBuffer == null)
{
scrubBuffer = new ScrubBuffer();
tl_scrubBuffer.set(new SoftReference(scrubBuffer));
}
scrubBuffer.init(wsr);
return scrubBuffer;
}
static boolean pushToContainer(Cur c)
{
c.push();
for (; ;)
{
switch (c.kind())
{
case ROOT:
case ELEM:
return true;
case -ROOT:
case -ELEM:
c.pop();
return false;
case COMMENT:
case PROCINST:
c.skip();
break;
default :
c.nextWithAttrs();
break;
}
}
}
static boolean toFirstNormalAttr(Cur c)
{
c.push();
if (c.toFirstAttr())
{
do
{
if (!c.isXmlns())
{
c.popButStay();
return true;
}
}
while (c.toNextAttr());
}
c.pop();
return false;
}
static boolean toPrevNormalAttr(Cur c)
{
if (c.isAttr())
{
c.push();
for (; ;)
{
assert c.isAttr();
// See if I can move backward. If I'm at the first attr, prev must return
// false and not move.
if (!c.prev())
break;
// Skip past the text value or attr begin
c.prev();
// I might have skipped over text above
if (!c.isAttr())
c.prev();
if (c.isNormalAttr())
{
c.popButStay();
return true;
}
}
c.pop();
}
return false;
}
static boolean toNextNormalAttr(Cur c)
{
c.push();
while (c.toNextAttr())
{
if (!c.isXmlns())
{
c.popButStay();
return true;
}
}
c.pop();
return false;
}
Xobj findNthChildElem(Xobj parent, QName name, QNameSet set, int n)
{
// only one of (set or name) is not null
// or both are null for a wildcard
assert (name == null || set == null);
assert n >= 0;
if (parent == null)
return null;
int da = _nthCache_A.distance(parent, name, set, n);
int db = _nthCache_B.distance(parent, name, set, n);
Xobj x =
da <= db
? _nthCache_A.fetch(parent, name, set, n)
: _nthCache_B.fetch(parent, name, set, n);
if (da == db)
{
nthCache temp = _nthCache_A;
_nthCache_A = _nthCache_B;
_nthCache_B = temp;
}
return x;
}
int count(Xobj parent, QName name, QNameSet set)
{
int n = 0;
for (Xobj x = findNthChildElem(parent, name, set, 0);
x != null; x = x._nextSibling)
{
if (x.isElem())
{
if (set == null)
{
if (x._name.equals(name))
n++;
}
else if (set.contains(x._name))
n++;
}
}
return n;
}
static boolean toChild(Cur c, QName name, int n)
{
if (n >= 0 && pushToContainer(c))
{
Xobj x = c._locale.findNthChildElem(c._xobj, name, null, n);
c.pop();
if (x != null)
{
c.moveTo(x);
return true;
}
}
return false;
}
static boolean toFirstChildElement(Cur c)
{
// if (!pushToContainer(c))
// return false;
//
// if (!c.toFirstChild() || (!c.isElem() && !toNextSiblingElement(c)))
// {
// c.pop();
// return false;
// }
//
// c.popButStay();
//
// return true;
Xobj originalXobj = c._xobj;
int originalPos = c._pos;
loop:
for (; ;)
{
switch (c.kind())
{
case ROOT:
case ELEM:
break loop;
case -ROOT:
case -ELEM:
c.moveTo(originalXobj, originalPos);
return false;
case COMMENT:
case PROCINST:
c.skip();
break;
default:
c.nextWithAttrs();
break;
}
}
if (!c.toFirstChild() || (!c.isElem() && !toNextSiblingElement(c)))
{
c.moveTo(originalXobj, originalPos);
return false;
}
return true;
}
static boolean toLastChildElement(Cur c)
{
if (!pushToContainer(c))
return false;
if (!c.toLastChild() || (!c.isElem() && !toPrevSiblingElement(c)))
{
c.pop();
return false;
}
c.popButStay();
return true;
}
static boolean toPrevSiblingElement(Cur cur)
{
if (!cur.hasParent())
return false;
Cur c = cur.tempCur();
boolean moved = false;
int k = c.kind();
if (k != ATTR)
{
for (; ;)
{
if (!c.prev())
break;
k = c.kind();
if (k == ROOT || k == ELEM)
break;
if (c.kind() == -ELEM)
{
c.toParent();
cur.moveToCur(c);
moved = true;
break;
}
}
}
c.release();
return moved;
}
static boolean toNextSiblingElement(Cur c)
{
if (!c.hasParent())
return false;
c.push();
int k = c.kind();
if (k == ATTR)
{
c.toParent();
c.next();
}
else if (k == ELEM)
c.skip();
while ((k = c.kind()) >= 0)
{
if (k == ELEM)
{
c.popButStay();
return true;
}
if (k > 0)
c.toEnd();
c.next();
}
c.pop();
return false;
}
static boolean toNextSiblingElement(Cur c, Xobj parent)
{
Xobj originalXobj = c._xobj;
int originalPos = c._pos;
int k = c.kind();
if (k == ATTR)
{
c.moveTo(parent);
c.next();
}
else if (k == ELEM)
c.skip();
while ((k = c.kind()) >= 0)
{
if (k == ELEM)
{
return true;
}
if (k > 0)
c.toEnd();
c.next();
}
c.moveTo(originalXobj, originalPos);
return false;
}
static void applyNamespaces(Cur c, Map namespaces)
{
assert c.isContainer();
java.util.Iterator i = namespaces.keySet().iterator();
while (i.hasNext())
{
String prefix = (String) i.next();
// Usually, this is the predefined xml namespace
if (!prefix.toLowerCase().startsWith("xml"))
{
if (c.namespaceForPrefix(prefix, false) == null)
{
c.push();
c.next();
c.createAttr(c._locale.createXmlns(prefix));
c.next();
c.insertString((String) namespaces.get(prefix));
c.pop();
}
}
}
}
static Map getAllNamespaces(Cur c, Map filleMe)
{
assert c.isNode();
c.push();
if (!c.isContainer())
c.toParent();
assert c.isContainer();
do
{
QName cName = c.getName();
while (c.toNextAttr())
{
if (c.isXmlns())
{
String prefix = c.getXmlnsPrefix();
String uri = c.getXmlnsUri();
// Here I check to see if there is a default namespace
// mapping which is not empty on a non root container which
// is in a namespace. This this case, I do not want to add
// this mapping because it could not be persisted out this
// way.
if (prefix.length() == 0 && uri.length() > 0 &&
cName != null &&
cName.getNamespaceURI().length() > 0)
{
continue;
}
if (filleMe == null)
filleMe = new HashMap();
if (!filleMe.containsKey(prefix))
filleMe.put(prefix, uri);
}
}
if (!c.isContainer())
c.toParentRaw();
}
while (c.toParentRaw());
c.pop();
return filleMe;
}
class nthCache
{
private boolean namesSame(QName pattern, QName name)
{
return pattern == null || pattern.equals(name);
}
private boolean setsSame(QNameSet patternSet, QNameSet set)
{
// value equality is probably too expensive. Since the use case
// involves QNameSets that are generated by the compiler, we
// can use identity comparison.
return patternSet != null && patternSet == set;
}
private boolean nameHit(QName namePattern, QNameSet setPattern,
QName name)
{
return
setPattern == null
? namesSame(namePattern, name)
: setPattern.contains(name);
}
private boolean cacheSame(QName namePattern, QNameSet setPattern)
{
return
setPattern == null
? namesSame(namePattern, _name)
: setsSame(setPattern, _set);
}
int distance(Xobj parent, QName name, QNameSet set, int n)
{
assert n >= 0;
if (_version != Locale.this.version())
return Integer.MAX_VALUE - 1;
if (parent != _parent || !cacheSame(name, set))
return Integer.MAX_VALUE;
return n > _n ? n - _n : _n - n;
}
Xobj fetch(Xobj parent, QName name, QNameSet set, int n)
{
assert n >= 0;
if (_version != Locale.this.version() || _parent != parent ||
!cacheSame(name, set) || n == 0)
{
_version = Locale.this.version();
_parent = parent;
_name = name;
_child = null;
_n = -1;
loop:
for (Xobj x = parent._firstChild;
x != null; x = x._nextSibling)
{
if (x.isElem() && nameHit(name, set, x._name))
{
_child = x;
_n = 0;
break loop;
}
}
}
if (_n < 0)
return null;
if (n > _n)
{
while (n > _n)
{
for (Xobj x = _child._nextSibling; ; x = x._nextSibling)
{
if (x == null)
return null;
if (x.isElem() && nameHit(name, set, x._name))
{
_child = x;
_n++;
break;
}
}
}
}
else if (n < _n)
{
while (n < _n)
{
for (Xobj x = _child._prevSibling; ; x = x._prevSibling)
{
if (x == null)
return null;
if (x.isElem() && nameHit(name, set, x._name))
{
_child = x;
_n--;
break;
}
}
}
}
return _child;
}
private long _version;
private Xobj _parent;
private QName _name;
private QNameSet _set;
private Xobj _child;
private int _n;
}
//
//
//
Dom findDomNthChild ( Dom parent, int n )
{
assert n >= 0;
if (parent == null)
return null;
int da = _domNthCache_A.distance(parent, n);
int db = _domNthCache_B.distance(parent, n);
// the "better" cache should never walk more than 1/2 len
Dom x = null;
boolean bInvalidate = (db - _domNthCache_B._len / 2 > 0) &&
(db - _domNthCache_B._len / 2 - domNthCache.BLITZ_BOUNDARY > 0);
boolean aInvalidate = (da - _domNthCache_A._len / 2 > 0) &&
(da - _domNthCache_A._len / 2 - domNthCache.BLITZ_BOUNDARY > 0);
if (da <= db)
if (!aInvalidate)
x = _domNthCache_A.fetch(parent, n);
else
{
_domNthCache_B._version = -1;//blitz the cache
x = _domNthCache_B.fetch(parent, n);
}
else if (!bInvalidate)
x = _domNthCache_B.fetch(parent, n);
else
{
_domNthCache_A._version = -1;//blitz the cache
x = _domNthCache_A.fetch(parent, n);
}
if (da == db)
{
domNthCache temp = _domNthCache_A;
_domNthCache_A = _domNthCache_B;
_domNthCache_B = temp;
}
return x;
}
int domLength ( Dom parent )
{
if (parent == null)
return 0;
int da = _domNthCache_A.distance( parent, 0 );
int db = _domNthCache_B.distance( parent, 0 );
int len =
da <= db
? _domNthCache_A.length( parent )
: _domNthCache_B.length( parent );
if (da == db)
{
domNthCache temp = _domNthCache_A;
_domNthCache_A = _domNthCache_B;
_domNthCache_B = temp;
}
return len;
}
void invalidateDomCaches ( Dom d )
{
if (_domNthCache_A._parent == d)
_domNthCache_A._version = -1;
if (_domNthCache_B._parent == d)
_domNthCache_B._version = -1;
}
boolean isDomCached ( Dom d )
{
return _domNthCache_A._parent == d || _domNthCache_B._parent == d;
}
class domNthCache
{
int distance ( Dom parent, int n )
{
assert n >= 0;
if (_version != Locale.this.version())
return Integer.MAX_VALUE - 1;
if (parent != _parent)
return Integer.MAX_VALUE;
return n > _n ? n - _n : _n - n;
}
int length ( Dom parent )
{
if (_version != Locale.this.version() || _parent != parent)
{
_parent = parent;
_version = Locale.this.version();
_child = null;
_n = -1;
_len = -1;
}
if (_len == -1)
{
Dom x = null;
if (_child != null && _n != -1)
{
x = _child;
_len = _n;
}
else
{
x = DomImpl.firstChild(_parent);
_len = 0;
// cache the 0th child
_child = x;
_n = 0;
}
for (; x != null; x = DomImpl.nextSibling(x) )
{
_len++;
}
}
return _len;
}
Dom fetch ( Dom parent, int n )
{
assert n >= 0;
if (_version != Locale.this.version() || _parent != parent)
{
_parent = parent;
_version = Locale.this.version();
_child = null;
_n = -1;
_len = -1;
for (Dom x = DomImpl.firstChild(_parent); x != null; x = DomImpl.nextSibling(x) )
{
_n++;
if (_child == null && n == _n )
{
_child = x;
break;
}
}
return _child;
}
if (_n < 0)
return null;
if (n > _n)
{
while ( n > _n )
{
for (Dom x = DomImpl.nextSibling(_child); ; x = DomImpl.nextSibling(x) )
{
if (x == null)
return null;
_child = x;
_n++;
break;
}
}
}
else if (n < _n)
{
while ( n < _n )
{
for (Dom x = DomImpl.prevSibling(_child); ; x = DomImpl.prevSibling(x) )
{
if (x == null)
return null;
_child = x;
_n--;
break;
}
}
}
return _child;
}
public static final int BLITZ_BOUNDARY = 40; //walk small lists
private long _version;
private Dom _parent;
private Dom _child;
private int _n;
private int _len;
}
//
//
//
CharUtil getCharUtil()
{
if (_charUtil == null)
_charUtil = new CharUtil(1024);
return _charUtil;
}
long version()
{
return _versionAll;
}
Cur weakCur(Object o)
{
assert o != null && !(o instanceof Ref);
Cur c = getCur();
assert c._tempFrame == -1;
assert c._ref == null;
c._ref = new Ref(c, o);
return c;
}
final ReferenceQueue refQueue()
{
if (_refQueue == null)
_refQueue = new ReferenceQueue();
return _refQueue;
}
final static class Ref
extends PhantomReference
{
Ref(Cur c, Object obj)
{
super(obj, c._locale.refQueue());
_cur = c;
}
Cur _cur;
}
Cur tempCur()
{
return tempCur(null);
}
Cur tempCur(String id)
{
Cur c = getCur();
assert c._tempFrame == -1;
assert _numTempFramesLeft < _tempFrames.length : "Temp frame not pushed";
int frame = _tempFrames.length - _numTempFramesLeft - 1;
assert frame >= 0 && frame < _tempFrames.length;
Cur next = _tempFrames[frame];
c._nextTemp = next;
assert c._prevTemp == null;
if (next != null)
{
assert next._prevTemp == null;
next._prevTemp = c;
}
_tempFrames[frame] = c;
c._tempFrame = frame;
c._id = id;
return c;
}
Cur getCur()
{
assert _curPool == null || _curPoolCount > 0;
Cur c;
if (_curPool == null)
c = new Cur(this);
else
{
_curPool = _curPool.listRemove(c = _curPool);
_curPoolCount--;
}
assert c._state == Cur.POOLED;
assert c._prev == null && c._next == null;
assert c._xobj == null && c._pos == Cur.NO_POS;
assert c._ref == null;
_registered = c.listInsert(_registered);
c._state = Cur.REGISTERED;
return c;
}
void embedCurs()
{
for (Cur c; (c = _registered) != null;)
{
assert c._xobj != null;
_registered = c.listRemove(_registered);
c._xobj._embedded = c.listInsert(c._xobj._embedded);
c._state = Cur.EMBEDDED;
}
}
TextNode createTextNode()
{
return _saaj == null ? new TextNode(this) : new SaajTextNode(this);
}
CdataNode createCdataNode()
{
return _saaj == null ?
new CdataNode(this) : new SaajCdataNode(this);
}
boolean entered()
{
return _tempFrames.length - _numTempFramesLeft > 0;
}
public void enter(Locale otherLocale)
{
enter();
if (otherLocale != this)
otherLocale.enter();
}
public void enter()
{
assert _numTempFramesLeft >= 0;
if (--_numTempFramesLeft <= 0)
{
Cur[] newTempFrames = new Cur[(_numTempFramesLeft =
_tempFrames.length) *
2];
System.arraycopy(_tempFrames, 0, newTempFrames, 0,
_tempFrames.length);
_tempFrames = newTempFrames;
}
if (++_entryCount > 1000)
{
pollQueue();
_entryCount = 0;
}
}
private void pollQueue()
{
if (_refQueue != null)
{
for (; ;)
{
Ref ref = (Ref) _refQueue.poll();
if (ref == null)
break;
if (ref._cur != null)
ref._cur.release();
}
}
}
public void exit(Locale otherLocale)
{
exit();
if (otherLocale != this)
otherLocale.exit();
}
public void exit()
{
// assert _numTempFramesLeft >= 0;
//asserts computed frame fits between 0 and _tempFrames.length
assert _numTempFramesLeft >= 0 &&
(_numTempFramesLeft <= _tempFrames.length - 1):
" Temp frames mismanaged. Impossible stack frame. Unsynchronized: " +
noSync();
int frame = _tempFrames.length - ++_numTempFramesLeft;
while (_tempFrames[frame] != null)
_tempFrames[frame].release();
}
//
//
//
public boolean noSync()
{
return _noSync;
}
public boolean sync()
{
return !_noSync;
}
static final boolean isWhiteSpace(String s)
{
int l = s.length();
while (l-- > 0)
if (!CharUtil.isWhiteSpace(s.charAt(l)))
return false;
return true;
}
static final boolean isWhiteSpace(StringBuffer sb)
{
int l = sb.length();
while (l-- > 0)
if (!CharUtil.isWhiteSpace(sb.charAt(l)))
return false;
return true;
}
static boolean beginsWithXml(String name)
{
if (name.length() < 3)
return false;
char ch;
if (((ch = name.charAt(0)) == 'x' || ch == 'X') &&
((ch = name.charAt(1)) == 'm' || ch == 'M') &&
((ch = name.charAt(2)) == 'l' || ch == 'L'))
{
return true;
}
return false;
}
static boolean isXmlns(QName name)
{
String prefix = name.getPrefix();
if (prefix.equals("xmlns"))
return true;
return prefix.length() == 0 && name.getLocalPart().equals("xmlns");
}
QName createXmlns(String prefix)
{
if (prefix == null)
prefix = "";
return
prefix.length() == 0
? makeQName(_xmlnsUri, "xmlns", "")
: makeQName(_xmlnsUri, prefix, "xmlns");
}
static String xmlnsPrefix(QName name)
{
return name.getPrefix().equals("xmlns") ? name.getLocalPart() : "";
}
//
// Loading/parsing
//
static abstract class LoadContext
{
protected abstract void startDTD(String name, String publicId,
String systemId);
protected abstract void endDTD();
protected abstract void startElement(QName name);
protected abstract void endElement();
protected abstract void attr(QName name, String value);
protected abstract void attr(String local, String uri, String prefix,
String value);
protected abstract void xmlns(String prefix, String uri);
protected abstract void comment(char[] buff, int off, int cch);
protected abstract void comment(String comment);
protected abstract void procInst(String target, String value);
protected abstract void text(char[] buff, int off, int cch);
protected abstract void text(String s);
protected abstract Cur finish();
protected abstract void abort();
protected abstract void bookmark(XmlBookmark bm);
protected abstract void bookmarkLastNonAttr(XmlBookmark bm);
protected abstract void bookmarkLastAttr(QName attrName,
XmlBookmark bm);
protected abstract void lineNumber(int line, int column, int offset);
protected void addIdAttr(String eName, String aName){
if ( _idAttrs == null )
_idAttrs = new java.util.Hashtable();
_idAttrs.put(aName,eName);
}
protected boolean isAttrOfTypeId(QName aqn, QName eqn){
if (_idAttrs == null)
return false;
String pre = aqn.getPrefix();
String lName = aqn.getLocalPart();
String urnName = "".equals(pre)?lName:pre + ":" + lName;
String eName = (String) _idAttrs.get(urnName);
if (eName == null ) return false;
//get the name of the parent elt
pre = eqn.getPrefix();
lName = eqn.getLocalPart();
lName = eqn.getLocalPart();
urnName = "".equals(pre)?lName:pre + ":" + lName;
return eName.equals(urnName);
}
private java.util.Hashtable _idAttrs;
}
private static class DefaultEntityResolver
implements EntityResolver
{
public InputSource resolveEntity(String publicId, String systemId)
{
return new InputSource(new StringReader(""));
}
}
private static SaxLoader getPiccoloSaxLoader()
{
SaxLoader piccoloLoader = (SaxLoader) SystemCache.get().getSaxLoader();
if (piccoloLoader == null)
{
piccoloLoader = PiccoloSaxLoader.newInstance();
SystemCache.get().setSaxLoader(piccoloLoader);
}
return piccoloLoader;
}
private static SaxLoader getSaxLoader(XmlOptions options)
{
options = XmlOptions.maskNull(options);
EntityResolver er = null;
if (!options.hasOption(XmlOptions.LOAD_USE_DEFAULT_RESOLVER))
{
er = (EntityResolver) options.get(XmlOptions.ENTITY_RESOLVER);
if (er == null)
er = ResolverUtil.getGlobalEntityResolver();
if (er == null)
er = new DefaultEntityResolver();
}
SaxLoader sl;
if (options.hasOption(XmlOptions.LOAD_USE_XMLREADER))
{
XMLReader xr = (XMLReader) options.get(
XmlOptions.LOAD_USE_XMLREADER);
if (xr == null)
throw new IllegalArgumentException("XMLReader is null");
sl = new XmlReaderSaxLoader(xr);
// I've noticed that most XMLReaders don't like a null EntityResolver...
if (er != null)
xr.setEntityResolver(er);
}
else
{
sl = getPiccoloSaxLoader();
// Piccolo doesnot mind a null entity resolver ...
sl.setEntityResolver(er);
}
return sl;
}
private static class XmlReaderSaxLoader
extends SaxLoader
{
XmlReaderSaxLoader(XMLReader xr)
{
super(xr, null);
}
}
private static class PiccoloSaxLoader
extends SaxLoader
{
private PiccoloSaxLoader(Piccolo p)
{
super(p, p.getStartLocator());
_piccolo = p;
}
static PiccoloSaxLoader newInstance()
{
return new PiccoloSaxLoader(new Piccolo());
}
void postLoad(Cur c)
{
XmlDocumentProperties props = getDocProps(c, true);
props.setEncoding(_piccolo.getEncoding());
props.setVersion(_piccolo.getVersion());
super.postLoad(c);
}
private Piccolo _piccolo;
}
private static abstract class SaxHandler
implements ContentHandler, LexicalHandler , DeclHandler, DTDHandler
{
SaxHandler(Locator startLocator)
{
_startLocator = startLocator;
}
SaxHandler()
{
this(null);
}
void initSaxHandler(Locale l, XmlOptions options)
{
_locale = l;
options = XmlOptions.maskNull(options);
_context = new Cur.CurLoadContext(_locale, options);
_wantLineNumbers =
_startLocator != null &&
options.hasOption(XmlOptions.LOAD_LINE_NUMBERS);
_wantLineNumbersAtEndElt =
_startLocator != null &&
options.hasOption(XmlOptions.LOAD_LINE_NUMBERS_END_ELEMENT);
_wantCdataBookmarks =
_startLocator != null &&
options.hasOption(XmlOptions.LOAD_SAVE_CDATA_BOOKMARKS);
}
public void startDocument()
throws SAXException
{
// Do nothing ... start of document is implicit
}
public void endDocument()
throws SAXException
{
// Do nothing ... end of document is implicit
}
public void startElement(String uri, String local, String qName,
Attributes atts)
throws SAXException
{
if (local.length() == 0)
local = qName;
// Out current parser (Piccolo) does not error when a
// namespace is used and not defined. Check for these here
if (qName.indexOf(':') >= 0 && uri.length() == 0)
{
XmlError err =
XmlError.forMessage("Use of undefined namespace prefix: " +
qName.substring(0, qName.indexOf(':')));
throw new XmlRuntimeException(err.toString(), null, err);
}
_context.startElement(_locale.makeQualifiedQName(uri, qName));
if (_wantLineNumbers)
{
_context.bookmark(
new XmlLineNumber(_startLocator.getLineNumber(),
_startLocator.getColumnNumber() - 1, -1));
}
for (int i = 0, len = atts.getLength(); i < len; i++)
{
String aqn = atts.getQName(i);
if (aqn.equals("xmlns"))
{
_context.xmlns("", atts.getValue(i));
}
else if (aqn.startsWith("xmlns:"))
{
String prefix = aqn.substring(6);
if (prefix.length() == 0)
{
XmlError err =
XmlError.forMessage("Prefix not specified",
XmlError.SEVERITY_ERROR);
throw new XmlRuntimeException(err.toString(), null,
err);
}
String attrUri = atts.getValue(i);
if (attrUri.length() == 0)
{
XmlError err =
XmlError.forMessage(
"Prefix can't be mapped to no namespace: " +
prefix,
XmlError.SEVERITY_ERROR);
throw new XmlRuntimeException(err.toString(), null,
err);
}
_context.xmlns(prefix, attrUri);
}
else
{
int colon = aqn.indexOf(':');
if (colon < 0)
_context.attr(aqn, atts.getURI(i), null,
atts.getValue(i));
else
{
_context.attr(aqn.substring(colon + 1), atts.getURI(i), aqn.substring(
0, colon),
atts.getValue(i));
}
}
}
}
public void endElement(String namespaceURI, String localName,
String qName)
throws SAXException
{
_context.endElement();
if (_wantLineNumbersAtEndElt)
{
_context.bookmark(
new XmlLineNumber(_startLocator.getLineNumber(),
_startLocator.getColumnNumber() - 1, -1));
}
}
public void characters(char ch[], int start, int length)
throws SAXException
{
_context.text(ch, start, length);
if (_wantCdataBookmarks && _insideCDATA)
_context.bookmarkLastNonAttr(CDataBookmark.CDATA_BOOKMARK);
}
public void ignorableWhitespace(char ch[], int start, int length)
throws SAXException
{
}
public void comment(char ch[], int start, int length)
throws SAXException
{
_context.comment(ch, start, length);
}
public void processingInstruction(String target, String data)
throws SAXException
{
_context.procInst(target, data);
}
public void startDTD(String name, String publicId, String systemId)
throws SAXException
{
_context.startDTD(name, publicId, systemId);
}
public void endDTD()
throws SAXException
{
_context.endDTD();
}
public void startPrefixMapping(String prefix, String uri)
throws SAXException
{
if (beginsWithXml(prefix) &&
!("xml".equals(prefix) && _xml1998Uri.equals(uri)))
{
XmlError err =
XmlError.forMessage(
"Prefix can't begin with XML: " + prefix,
XmlError.SEVERITY_ERROR);
throw new XmlRuntimeException(err.toString(), null, err);
}
}
public void endPrefixMapping(String prefix)
throws SAXException
{
}
public void skippedEntity(String name)
throws SAXException
{
// throw new RuntimeException( "Not impl: skippedEntity" );
}
public void startCDATA()
throws SAXException
{
_insideCDATA = true;
}
public void endCDATA()
throws SAXException
{
_insideCDATA = false;
}
public void startEntity(String name)
throws SAXException
{
// throw new RuntimeException( "Not impl: startEntity" );
}
public void endEntity(String name)
throws SAXException
{
// throw new RuntimeException( "Not impl: endEntity" );
}
public void setDocumentLocator(Locator locator)
{
// TODO - for non-Piccolo use cases, use a locator to get line numbers
}
//DeclHandler
public void attributeDecl(String eName, String aName, String type, String valueDefault, String value){
if (type.equals("ID")){
_context.addIdAttr(eName,aName);
}
}
public void elementDecl(String name, String model){
}
public void externalEntityDecl(String name, String publicId, String systemId){
}
public void internalEntityDecl(String name, String value){
}
//DTDHandler
public void notationDecl(String name, String publicId, String systemId){
}
public void unparsedEntityDecl(String name, String publicId, String systemId, String notationName){
}
protected Locale _locale;
protected LoadContext _context;
private boolean _wantLineNumbers;
private boolean _wantLineNumbersAtEndElt;
private boolean _wantCdataBookmarks;
private Locator _startLocator;
private boolean _insideCDATA = false;
}
private static abstract class SaxLoader
extends SaxHandler
implements ErrorHandler
{
SaxLoader(XMLReader xr, Locator startLocator)
{
super(startLocator);
_xr = xr;
try
{
_xr.setFeature(
"http://xml.org/sax/features/namespace-prefixes", true);
_xr.setFeature("http://xml.org/sax/features/namespaces", true);
_xr.setFeature("http://xml.org/sax/features/validation", false);
_xr.setProperty(
"http://xml.org/sax/properties/lexical-handler", this);
_xr.setContentHandler(this);
_xr.setProperty("http://xml.org/sax/properties/declaration-handler", this);
_xr.setDTDHandler(this);
_xr.setErrorHandler(this);
}
catch (Throwable e)
{
throw new RuntimeException(e.getMessage(), e);
}
}
void setEntityResolver(EntityResolver er)
{
_xr.setEntityResolver(er);
}
void postLoad(Cur c)
{
// fix garbage collection of Locale -> Xobj -> STL
_locale = null;
_context = null;
}
public Cur load(Locale l, InputSource is, XmlOptions options)
throws XmlException, IOException
{
is.setSystemId("file://");
initSaxHandler(l, options);
try
{
_xr.parse(is);
Cur c = _context.finish();
associateSourceName(c, options);
postLoad(c);
return c;
}
catch (FileFormatException e)
{
_context.abort();
throw new XmlException(e.getMessage(), e);
}
catch (XmlRuntimeException e)
{
_context.abort();
throw new XmlException(e);
}
catch (SAXParseException e)
{
_context.abort();
XmlError err =
XmlError.forLocation(e.getMessage(),
(String) XmlOptions.safeGet(options,
XmlOptions.DOCUMENT_SOURCE_NAME),
e.getLineNumber(), e.getColumnNumber(), -1);
throw new XmlException(err.toString(), e, err);
}
catch (SAXException e)
{
_context.abort();
XmlError err = XmlError.forMessage(e.getMessage());
throw new XmlException(err.toString(), e, err);
}
catch (RuntimeException e)
{
_context.abort();
throw e;
}
}
public void fatalError(SAXParseException e)
throws SAXException
{
throw e;
}
public void error(SAXParseException e)
throws SAXException
{
throw e;
}
public void warning(SAXParseException e)
throws SAXException
{
throw e;
}
private XMLReader _xr;
}
private Dom load(InputSource is, XmlOptions options)
throws XmlException, IOException
{
return getSaxLoader(options).load(this, is, options).getDom();
}
public Dom load(Reader r)
throws XmlException, IOException
{
return load(r, null);
}
public Dom load(Reader r, XmlOptions options)
throws XmlException, IOException
{
return load(new InputSource(r), options);
}
public Dom load(InputStream in)
throws XmlException, IOException
{
return load(in, null);
}
public Dom load(InputStream in, XmlOptions options)
throws XmlException, IOException
{
return load(new InputSource(in), options);
}
public Dom load(String s)
throws XmlException
{
return load(s, null);
}
public Dom load(String s, XmlOptions options)
throws XmlException
{
Reader r = new StringReader(s);
try
{
return load(r, options);
}
catch (IOException e)
{
assert false: "StringReader should not throw IOException";
throw new XmlException(e.getMessage(), e);
}
finally
{
try
{
r.close();
}
catch (IOException e)
{
}
}
}
//
// DOMImplementation methods
//
public Document createDocument(String uri, String qname,
DocumentType doctype)
{
return DomImpl._domImplementation_createDocument(this, uri, qname,
doctype);
}
public DocumentType createDocumentType(String qname, String publicId,
String systemId)
{
throw new RuntimeException("Not implemented");
// return DomImpl._domImplementation_createDocumentType( this, qname, publicId, systemId );
}
public boolean hasFeature(String feature, String version)
{
return DomImpl._domImplementation_hasFeature(this, feature, version);
}
public Object getFeature(String feature, String version)
{
throw new RuntimeException("DOM Level 3 Not implemented");
}
//
// Dom methods
//
private static Dom checkNode(Node n)
{
if (n == null)
throw new IllegalArgumentException("Node is null");
if (!(n instanceof Dom))
throw new IllegalArgumentException("Node is not an XmlBeans node");
return (Dom) n;
}
public static XmlCursor nodeToCursor(Node n)
{
return DomImpl._getXmlCursor(checkNode(n));
}
public static XmlObject nodeToXmlObject(Node n)
{
return DomImpl._getXmlObject(checkNode(n));
}
public static XMLStreamReader nodeToXmlStream(Node n)
{
return DomImpl._getXmlStreamReader(checkNode(n));
}
public static Node streamToNode(XMLStreamReader xs)
{
return Jsr173.nodeFromStream(xs);
}
//
// SaajCallback methods
//
public void setSaajData(Node n, Object o)
{
assert n instanceof Dom;
DomImpl.saajCallback_setSaajData((Dom) n, o);
}
public Object getSaajData(Node n)
{
assert n instanceof Dom;
return DomImpl.saajCallback_getSaajData((Dom) n);
}
public Element createSoapElement(QName name, QName parentName)
{
assert _ownerDoc != null;
return DomImpl.saajCallback_createSoapElement(_ownerDoc, name,
parentName);
}
public Element importSoapElement(Document doc, Element elem, boolean deep,
QName parentName)
{
assert doc instanceof Dom;
return DomImpl.saajCallback_importSoapElement((Dom) doc, elem, deep,
parentName);
}
private static final class DefaultQNameFactory
implements QNameFactory
{
private QNameCache _cache = XmlBeans.getQNameCache();
public QName getQName(String uri, String local)
{
return _cache.getName(uri, local, "");
}
public QName getQName(String uri, String local, String prefix)
{
return _cache.getName(uri, local, prefix);
}
public QName getQName(char[] uriSrc, int uriPos, int uriCch,
char[] localSrc, int localPos, int localCch)
{
return
_cache.getName(new String(uriSrc, uriPos, uriCch),
new String(localSrc, localPos, localCch),
"");
}
public QName getQName(char[] uriSrc, int uriPos, int uriCch,
char[] localSrc, int localPos, int localCch,
char[] prefixSrc, int prefixPos, int prefixCch)
{
return
_cache.getName(new String(uriSrc, uriPos, uriCch),
new String(localSrc, localPos, localCch),
new String(prefixSrc, prefixPos, prefixCch));
}
}
//
//
//
boolean _noSync;
SchemaTypeLoader _schemaTypeLoader;
private ReferenceQueue _refQueue;
private int _entryCount;
int _numTempFramesLeft;
Cur[] _tempFrames;
Cur _curPool;
int _curPoolCount;
Cur _registered;
ChangeListener _changeListeners;
long _versionAll;
long _versionSansText;
Locations _locations;
private CharUtil _charUtil;
int _offSrc;
int _cchSrc;
Saaj _saaj;
Dom _ownerDoc;
QNameFactory _qnameFactory;
boolean _validateOnSet;
int _posTemp;
nthCache _nthCache_A = new nthCache();
nthCache _nthCache_B = new nthCache();
domNthCache _domNthCache_A = new domNthCache();
domNthCache _domNthCache_B = new domNthCache();
}
| src/store/org/apache/xmlbeans/impl/store/Locale.java | /* Copyright 2004 The Apache Software Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.xmlbeans.impl.store;
import org.xml.sax.Locator;
import org.xml.sax.Attributes;
import org.xml.sax.ContentHandler;
import org.xml.sax.EntityResolver;
import org.xml.sax.ErrorHandler;
import org.xml.sax.InputSource;
import org.xml.sax.ext.LexicalHandler;
import org.xml.sax.ext.DeclHandler;
import org.xml.sax.SAXParseException;
import org.xml.sax.InputSource;
import org.xml.sax.XMLReader;
import org.xml.sax.SAXException;
import org.xml.sax.DTDHandler;
import java.util.HashMap;
import java.util.Map;
import java.lang.ref.ReferenceQueue;
import java.lang.ref.Reference;
import java.lang.ref.PhantomReference;
import java.lang.ref.SoftReference;
import java.lang.reflect.Method;
import java.io.InputStream;
import java.io.Reader;
import java.io.StringReader;
import java.io.IOException;
import javax.xml.stream.XMLStreamReader;
import javax.xml.stream.XMLStreamException;
import org.apache.xmlbeans.xml.stream.Attribute;
import org.apache.xmlbeans.xml.stream.AttributeIterator;
import org.apache.xmlbeans.xml.stream.CharacterData;
import org.apache.xmlbeans.xml.stream.ProcessingInstruction;
import org.apache.xmlbeans.xml.stream.Space;
import org.apache.xmlbeans.xml.stream.StartDocument;
import org.apache.xmlbeans.xml.stream.StartElement;
import org.apache.xmlbeans.xml.stream.XMLEvent;
import org.apache.xmlbeans.xml.stream.XMLInputStream;
import org.apache.xmlbeans.xml.stream.XMLName;
import org.w3c.dom.DOMImplementation;
import org.w3c.dom.Document;
import org.w3c.dom.DocumentType;
import org.w3c.dom.Node;
import org.w3c.dom.NamedNodeMap;
import org.w3c.dom.Element;
import javax.xml.namespace.QName;
import org.apache.xmlbeans.impl.common.XMLNameHelper;
import org.apache.xmlbeans.impl.common.QNameHelper;
import org.apache.xmlbeans.impl.common.XmlLocale;
import org.apache.xmlbeans.impl.common.ResolverUtil;
import org.apache.xmlbeans.impl.common.SystemCache;
import org.apache.xmlbeans.impl.store.Saaj.SaajCallback;
import org.apache.xmlbeans.impl.store.DomImpl.Dom;
import org.apache.xmlbeans.impl.store.DomImpl.TextNode;
import org.apache.xmlbeans.impl.store.DomImpl.CdataNode;
import org.apache.xmlbeans.impl.store.DomImpl.SaajTextNode;
import org.apache.xmlbeans.impl.store.DomImpl.SaajCdataNode;
import org.apache.xmlbeans.impl.store.Cur.Locations;
import org.apache.xmlbeans.CDataBookmark;
import org.apache.xmlbeans.XmlBeans;
import org.apache.xmlbeans.XmlLineNumber;
import org.apache.xmlbeans.XmlCursor;
import org.apache.xmlbeans.XmlCursor.XmlBookmark;
import org.apache.xmlbeans.XmlSaxHandler;
import org.apache.xmlbeans.XmlException;
import org.apache.xmlbeans.XmlObject;
import org.apache.xmlbeans.XmlOptions;
import org.apache.xmlbeans.SchemaType;
import org.apache.xmlbeans.SchemaTypeLoader;
import org.apache.xmlbeans.XmlTokenSource;
import org.apache.xmlbeans.XmlOptions;
import org.apache.xmlbeans.QNameSet;
import org.apache.xmlbeans.QNameCache;
import org.apache.xmlbeans.XmlBeans;
import org.apache.xmlbeans.XmlError;
import org.apache.xmlbeans.XmlRuntimeException;
import org.apache.xmlbeans.XmlDocumentProperties;
import org.apache.xmlbeans.impl.values.TypeStore;
import org.apache.xmlbeans.impl.values.TypeStoreUser;
import org.apache.xmlbeans.impl.values.TypeStoreUserFactory;
import org.apache.xmlbeans.impl.piccolo.xml.Piccolo;
import org.apache.xmlbeans.impl.piccolo.io.FileFormatException;
public final class Locale
implements DOMImplementation, SaajCallback, XmlLocale
{
static final int ROOT = Cur.ROOT;
static final int ELEM = Cur.ELEM;
static final int ATTR = Cur.ATTR;
static final int COMMENT = Cur.COMMENT;
static final int PROCINST = Cur.PROCINST;
static final int TEXT = Cur.TEXT;
static final int WS_UNSPECIFIED = TypeStore.WS_UNSPECIFIED;
static final int WS_PRESERVE = TypeStore.WS_PRESERVE;
static final int WS_REPLACE = TypeStore.WS_REPLACE;
static final int WS_COLLAPSE = TypeStore.WS_COLLAPSE;
static final String _xsi = "http://www.w3.org/2001/XMLSchema-instance";
static final String _schema = "http://www.w3.org/2001/XMLSchema";
static final String _openFragUri = "http://www.openuri.org/fragment";
static final String _xml1998Uri = "http://www.w3.org/XML/1998/namespace";
static final String _xmlnsUri = "http://www.w3.org/2000/xmlns/";
static final QName _xsiNil = new QName(_xsi, "nil", "xsi");
static final QName _xsiType = new QName(_xsi, "type", "xsi");
static final QName _xsiLoc = new QName(_xsi, "schemaLocation", "xsi");
static final QName _xsiNoLoc = new QName(_xsi, "noNamespaceSchemaLocation",
"xsi");
static final QName _openuriFragment = new QName(_openFragUri, "fragment",
"frag");
static final QName _xmlFragment = new QName("xml-fragment");
private Locale(SchemaTypeLoader stl, XmlOptions options)
{
options = XmlOptions.maskNull(options);
//
//
//
// TODO - add option for no=sync, or make it all thread safe
//
// Also - have a thread local setting for thread safety? .. Perhaps something
// in the type loader which defines whether ot not sync is on????
_noSync = options.hasOption(XmlOptions.UNSYNCHRONIZED);
_tempFrames = new Cur[_numTempFramesLeft = 8];
// BUGBUG - this cannot be thread local ....
// BUGBUG - this cannot be thread local ....
// BUGBUG - this cannot be thread local .... uhh what, again?
//
// Lazy create this (loading up a locale should use the thread locale one)
// same goes for the qname factory .. use thread local for hte most part when loading
_qnameFactory = new DefaultQNameFactory();
_locations = new Locations(this);
_schemaTypeLoader = stl;
_validateOnSet = options.hasOption(XmlOptions.VALIDATE_ON_SET);
//
// Check for Saaj implementation request
//
Object saajObj = options.get(Saaj.SAAJ_IMPL);
if (saajObj != null)
{
if (!(saajObj instanceof Saaj))
throw new IllegalStateException(
"Saaj impl not correct type: " + saajObj);
_saaj = (Saaj) saajObj;
_saaj.setCallback(this);
}
}
//
//
//
public static final String USE_SAME_LOCALE = "USE_SAME_LOCALE";
/**
* This option is checked in XmlObjectBase._copy(XmlOptions), the locale is used as the synchronization domain.
* useNewLocale = true: copy will use a new locale, false: copy will use the same locale as the source
*/
public static final String COPY_USE_NEW_LOCALE = "COPY_USE_NEW_LOCALE";
static Locale getLocale(SchemaTypeLoader stl, XmlOptions options)
{
if (stl == null)
stl = XmlBeans.getContextTypeLoader();
options = XmlOptions.maskNull(options);
Locale l = null;
if (options.hasOption(USE_SAME_LOCALE))
{
Object source = options.get(USE_SAME_LOCALE);
if (source instanceof Locale)
l = (Locale) source;
else if (source instanceof XmlTokenSource)
l = (Locale) ((XmlTokenSource) source).monitor();
else
throw new IllegalArgumentException(
"Source locale not understood: " + source);
if (l._schemaTypeLoader != stl)
throw new IllegalArgumentException(
"Source locale does not support same schema type loader");
if (l._saaj != null && l._saaj != options.get(Saaj.SAAJ_IMPL))
throw new IllegalArgumentException(
"Source locale does not support same saaj");
if (l._validateOnSet &&
!options.hasOption(XmlOptions.VALIDATE_ON_SET))
throw new IllegalArgumentException(
"Source locale does not support same validate on set");
// TODO - other things to check?
}
else
l = new Locale(stl, options);
return l;
}
//
//
//
static void associateSourceName(Cur c, XmlOptions options)
{
String sourceName = (String) XmlOptions.safeGet(options,
XmlOptions.DOCUMENT_SOURCE_NAME);
if (sourceName != null)
getDocProps(c, true).setSourceName(sourceName);
}
//
//
//
static void autoTypeDocument(Cur c, SchemaType requestedType,
XmlOptions options)
throws XmlException
{
assert c.isRoot();
// The type in the options overrides all sniffing
options = XmlOptions.maskNull(options);
SchemaType optionType = (SchemaType) options.get(
XmlOptions.DOCUMENT_TYPE);
if (optionType != null)
{
c.setType(optionType);
return;
}
SchemaType type = null;
// An xsi:type can be used to pick a type out of the loader, or used to refine
// a type with a name.
if (requestedType == null || requestedType.getName() != null)
{
QName xsiTypeName = c.getXsiTypeName();
SchemaType xsiSchemaType =
xsiTypeName == null ?
null : c._locale._schemaTypeLoader.findType(xsiTypeName);
if (requestedType == null ||
requestedType.isAssignableFrom(xsiSchemaType))
type = xsiSchemaType;
}
// Look for a document element to establish type
if (type == null &&
(requestedType == null || requestedType.isDocumentType()))
{
assert c.isRoot();
c.push();
QName docElemName =
!c.hasAttrs() && Locale.toFirstChildElement(c) &&
!Locale.toNextSiblingElement(c)
? c.getName() : null;
c.pop();
if (docElemName != null)
{
type =
c._locale._schemaTypeLoader.findDocumentType(docElemName);
if (type != null && requestedType != null)
{
QName requesteddocElemNameName = requestedType.getDocumentElementName();
if (!requesteddocElemNameName.equals(docElemName) &&
!requestedType.isValidSubstitution(docElemName))
{
throw
new XmlException("Element " +
QNameHelper.pretty(docElemName) +
" is not a valid " +
QNameHelper.pretty(requesteddocElemNameName) +
" document or a valid substitution.");
}
}
}
}
if (type == null && requestedType == null)
{
c.push();
type =
Locale.toFirstNormalAttr(c) && !Locale.toNextNormalAttr(c)
?
c._locale._schemaTypeLoader.findAttributeType(c.getName()) :
null;
c.pop();
}
if (type == null)
type = requestedType;
if (type == null)
type = XmlBeans.NO_TYPE;
c.setType(type);
if (requestedType != null)
{
if (type.isDocumentType())
verifyDocumentType(c, type.getDocumentElementName());
else if (type.isAttributeType())
verifyAttributeType(c, type.getAttributeTypeAttributeName());
}
}
private static boolean namespacesSame(QName n1, QName n2)
{
if (n1 == n2)
return true;
if (n1 == null || n2 == null)
return false;
if (n1.getNamespaceURI() == n2.getNamespaceURI())
return true;
if (n1.getNamespaceURI() == null || n2.getNamespaceURI() == null)
return false;
return n1.getNamespaceURI().equals(n2.getNamespaceURI());
}
private static void addNamespace(StringBuffer sb, QName name)
{
if (name.getNamespaceURI() == null)
sb.append("<no namespace>");
else
{
sb.append("\"");
sb.append(name.getNamespaceURI());
sb.append("\"");
}
}
private static void verifyDocumentType(Cur c, QName docElemName)
throws XmlException
{
assert c.isRoot();
c.push();
try
{
StringBuffer sb = null;
if (!Locale.toFirstChildElement(c) ||
Locale.toNextSiblingElement(c))
{
sb = new StringBuffer();
sb.append("The document is not a ");
sb.append(QNameHelper.pretty(docElemName));
sb.append(
c.isRoot() ?
": no document element" : ": multiple document elements");
}
else
{
QName name = c.getName();
if (!name.equals(docElemName))
{
sb = new StringBuffer();
sb.append("The document is not a ");
sb.append(QNameHelper.pretty(docElemName));
if (docElemName.getLocalPart().equals(name.getLocalPart()))
{
sb.append(": document element namespace mismatch ");
sb.append("expected ");
addNamespace(sb, docElemName);
sb.append(" got ");
addNamespace(sb, name);
}
else if (namespacesSame(docElemName, name))
{
sb.append(": document element local name mismatch ");
sb.append("expected " + docElemName.getLocalPart());
sb.append(" got " + name.getLocalPart());
}
else
{
sb.append(": document element mismatch ");
sb.append("got ");
sb.append(QNameHelper.pretty(name));
}
}
}
if (sb != null)
{
XmlError err = XmlError.forCursor(sb.toString(),
new Cursor(c));
throw new XmlException(err.toString(), null, err);
}
}
finally
{
c.pop();
}
}
private static void verifyAttributeType(Cur c, QName attrName)
throws XmlException
{
assert c.isRoot();
c.push();
try
{
StringBuffer sb = null;
if (!Locale.toFirstNormalAttr(c) || Locale.toNextNormalAttr(c))
{
sb = new StringBuffer();
sb.append("The document is not a ");
sb.append(QNameHelper.pretty(attrName));
sb.append(
c.isRoot() ? ": no attributes" : ": multiple attributes");
}
else
{
QName name = c.getName();
if (!name.equals(attrName))
{
sb = new StringBuffer();
sb.append("The document is not a ");
sb.append(QNameHelper.pretty(attrName));
if (attrName.getLocalPart().equals(name.getLocalPart()))
{
sb.append(": attribute namespace mismatch ");
sb.append("expected ");
addNamespace(sb, attrName);
sb.append(" got ");
addNamespace(sb, name);
}
else if (namespacesSame(attrName, name))
{
sb.append(": attribute local name mismatch ");
sb.append("expected " + attrName.getLocalPart());
sb.append(" got " + name.getLocalPart());
}
else
{
sb.append(": attribute element mismatch ");
sb.append("got ");
sb.append(QNameHelper.pretty(name));
}
}
}
if (sb != null)
{
XmlError err = XmlError.forCursor(sb.toString(),
new Cursor(c));
throw new XmlException(err.toString(), null, err);
}
}
finally
{
c.pop();
}
}
static boolean isFragmentQName(QName name)
{
return name.equals(Locale._openuriFragment) ||
name.equals(Locale._xmlFragment);
}
static boolean isFragment(Cur start, Cur end)
{
assert !end.isAttr();
start.push();
end.push();
int numDocElems = 0;
boolean isFrag = false;
while (!start.isSamePos(end))
{
int k = start.kind();
if (k == ATTR)
break;
if (k == TEXT && !isWhiteSpace(start.getCharsAsString(-1)))
{
isFrag = true;
break;
}
if (k == ELEM && ++numDocElems > 1)
{
isFrag = true;
break;
}
// Move to next token
assert k != ATTR;
if (k != TEXT)
start.toEnd();
start.next();
}
start.pop();
end.pop();
return isFrag || numDocElems != 1;
}
//
//
//
public static XmlObject newInstance(SchemaTypeLoader stl, SchemaType type,
XmlOptions options)
{
Locale l = getLocale(stl, options);
if (l.noSync())
{
l.enter();
try
{
return l.newInstance(type, options);
}
finally
{
l.exit();
}
}
else
synchronized (l)
{
l.enter();
try
{
return l.newInstance(type, options);
}
finally
{
l.exit();
}
}
}
private XmlObject newInstance(SchemaType type, XmlOptions options)
{
options = XmlOptions.maskNull(options);
Cur c = tempCur();
SchemaType sType = (SchemaType) options.get(XmlOptions.DOCUMENT_TYPE);
if (sType == null)
sType = type == null ? XmlObject.type : type;
if (sType.isDocumentType())
c.createDomDocumentRoot();
else
c.createRoot();
c.setType(sType);
XmlObject x = (XmlObject) c.getUser();
c.release();
return x;
}
//
//
//
public static DOMImplementation newDomImplementation(SchemaTypeLoader stl,
XmlOptions options)
{
return (DOMImplementation) getLocale(stl, options);
}
//
//
//
public static XmlObject parseToXmlObject(SchemaTypeLoader stl,
String xmlText, SchemaType type, XmlOptions options)
throws XmlException
{
Locale l = getLocale(stl, options);
if (l.noSync())
{
l.enter();
try
{
return l.parseToXmlObject(xmlText, type, options);
}
finally
{
l.exit();
}
}
else
synchronized (l)
{
l.enter();
try
{
return l.parseToXmlObject(xmlText, type, options);
}
finally
{
l.exit();
}
}
}
private XmlObject parseToXmlObject(String xmlText, SchemaType type,
XmlOptions options)
throws XmlException
{
Cur c = parse(xmlText, type, options);
XmlObject x = (XmlObject) c.getUser();
c.release();
return x;
}
Cur parse(String s, SchemaType type, XmlOptions options)
throws XmlException
{
Reader r = new StringReader(s);
try
{
Cur c = getSaxLoader(options).load(this, new InputSource(r),
options);
autoTypeDocument(c, type, options);
return c;
}
catch (IOException e)
{
assert false: "StringReader should not throw IOException";
throw new XmlException(e.getMessage(), e);
}
finally
{
try
{
r.close();
}
catch (IOException e)
{
}
}
}
//
//
//
/**
* @deprecated XMLInputStream was deprecated by XMLStreamReader from STaX - jsr173 API.
*/
public static XmlObject parseToXmlObject(SchemaTypeLoader stl,
XMLInputStream xis, SchemaType type, XmlOptions options)
throws XmlException, org.apache.xmlbeans.xml.stream.XMLStreamException
{
Locale l = getLocale(stl, options);
if (l.noSync())
{
l.enter();
try
{
return l.parseToXmlObject(xis, type, options);
}
finally
{
l.exit();
}
}
else
synchronized (l)
{
l.enter();
try
{
return l.parseToXmlObject(xis, type, options);
}
finally
{
l.exit();
}
}
}
/**
* @deprecated XMLInputStream was deprecated by XMLStreamReader from STaX - jsr173 API.
*/
public XmlObject parseToXmlObject(XMLInputStream xis, SchemaType type,
XmlOptions options)
throws XmlException, org.apache.xmlbeans.xml.stream.XMLStreamException
{
Cur c;
try
{
c = loadXMLInputStream(xis, options);
}
catch (org.apache.xmlbeans.xml.stream.XMLStreamException e)
{
throw new XmlException(e.getMessage(), e);
}
autoTypeDocument(c, type, options);
XmlObject x = (XmlObject) c.getUser();
c.release();
return x;
}
//
//
//
public static XmlObject parseToXmlObject(SchemaTypeLoader stl,
XMLStreamReader xsr, SchemaType type, XmlOptions options)
throws XmlException
{
Locale l = getLocale(stl, options);
if (l.noSync())
{
l.enter();
try
{
return l.parseToXmlObject(xsr, type, options);
}
finally
{
l.exit();
}
}
else
synchronized (l)
{
l.enter();
try
{
return l.parseToXmlObject(xsr, type, options);
}
finally
{
l.exit();
}
}
}
public XmlObject parseToXmlObject(XMLStreamReader xsr, SchemaType type,
XmlOptions options)
throws XmlException
{
Cur c;
try
{
c = loadXMLStreamReader(xsr, options);
}
catch (XMLStreamException e)
{
throw new XmlException(e.getMessage(), e);
}
autoTypeDocument(c, type, options);
XmlObject x = (XmlObject) c.getUser();
c.release();
return x;
}
private static void lineNumber(XMLEvent xe, LoadContext context)
{
org.apache.xmlbeans.xml.stream.Location loc = xe.getLocation();
if (loc != null)
context.lineNumber(loc.getLineNumber(), loc.getColumnNumber(), -1);
}
private static void lineNumber(XMLStreamReader xsr, LoadContext context)
{
javax.xml.stream.Location loc = xsr.getLocation();
if (loc != null)
{
context.lineNumber(loc.getLineNumber(), loc.getColumnNumber(),
loc.getCharacterOffset());
}
}
private void doAttributes(XMLStreamReader xsr, LoadContext context)
{
int n = xsr.getAttributeCount();
for (int a = 0; a < n; a++)
{
context.attr(xsr.getAttributeLocalName(a),
xsr.getAttributeNamespace(a),
xsr.getAttributePrefix(a),
xsr.getAttributeValue(a));
}
}
private void doNamespaces(XMLStreamReader xsr, LoadContext context)
{
int n = xsr.getNamespaceCount();
for (int a = 0; a < n; a++)
{
String prefix = xsr.getNamespacePrefix(a);
if (prefix == null || prefix.length() == 0)
context.attr("xmlns", _xmlnsUri, null,
xsr.getNamespaceURI(a));
else
context.attr(prefix, _xmlnsUri, "xmlns",
xsr.getNamespaceURI(a));
}
}
/**
* @deprecated XMLInputStream was deprecated by XMLStreamReader from STaX - jsr173 API.
*/
private Cur loadXMLInputStream(XMLInputStream xis, XmlOptions options)
throws org.apache.xmlbeans.xml.stream.XMLStreamException
{
options = XmlOptions.maskNull(options);
boolean lineNums = options.hasOption(XmlOptions.LOAD_LINE_NUMBERS);
XMLEvent x = xis.peek();
if (x != null && x.getType() == XMLEvent.START_ELEMENT)
{
Map nsMap = ((StartElement) x).getNamespaceMap();
if (nsMap != null && nsMap.size() > 0)
{
Map namespaces = new HashMap();
namespaces.putAll(nsMap);
options = new XmlOptions(options);
options.put(XmlOptions.LOAD_ADDITIONAL_NAMESPACES, namespaces);
}
}
String systemId = null;
String encoding = null;
String version = null;
boolean standAlone = true;
LoadContext context = new Cur.CurLoadContext(this, options);
events:
for (XMLEvent xe = xis.next(); xe != null; xe = xis.next())
{
switch (xe.getType())
{
case XMLEvent.START_DOCUMENT:
StartDocument doc = (StartDocument) xe;
systemId = doc.getSystemId();
encoding = doc.getCharacterEncodingScheme();
version = doc.getVersion();
standAlone = doc.isStandalone();
standAlone = doc.isStandalone();
if (lineNums)
lineNumber(xe, context);
break;
case XMLEvent.END_DOCUMENT:
if (lineNums)
lineNumber(xe, context);
break events;
case XMLEvent.NULL_ELEMENT:
if (!xis.hasNext())
break events;
break;
case XMLEvent.START_ELEMENT:
context.startElement(XMLNameHelper.getQName(xe.getName()));
if (lineNums)
lineNumber(xe, context);
for (AttributeIterator ai = ((StartElement) xe).getAttributes();
ai.hasNext();)
{
Attribute attr = ai.next();
context.attr(XMLNameHelper.getQName(attr.getName()),
attr.getValue());
}
for (AttributeIterator ai = ((StartElement) xe).getNamespaces()
; ai.hasNext();)
{
Attribute attr = ai.next();
XMLName name = attr.getName();
String local = name.getLocalName();
if (name.getPrefix() == null && local.equals("xmlns"))
local = "";
context.xmlns(local, attr.getValue());
}
break;
case XMLEvent.END_ELEMENT:
context.endElement();
if (lineNums)
lineNumber(xe, context);
break;
case XMLEvent.SPACE:
if (((Space) xe).ignorable())
break;
// Fall through
case XMLEvent.CHARACTER_DATA:
CharacterData cd = (CharacterData) xe;
if (cd.hasContent())
{
context.text(cd.getContent());
if (lineNums)
lineNumber(xe, context);
}
break;
case XMLEvent.COMMENT:
org.apache.xmlbeans.xml.stream.Comment comment =
(org.apache.xmlbeans.xml.stream.Comment) xe;
if (comment.hasContent())
{
context.comment(comment.getContent());
if (lineNums)
lineNumber(xe, context);
}
break;
case XMLEvent.PROCESSING_INSTRUCTION:
ProcessingInstruction procInstr = (ProcessingInstruction) xe;
context.procInst(procInstr.getTarget(), procInstr.getData());
if (lineNums)
lineNumber(xe, context);
break;
// These are ignored
case XMLEvent.ENTITY_REFERENCE:
case XMLEvent.START_PREFIX_MAPPING:
case XMLEvent.END_PREFIX_MAPPING:
case XMLEvent.CHANGE_PREFIX_MAPPING:
case XMLEvent.XML_EVENT:
break;
default :
throw new RuntimeException(
"Unhandled xml event type: " + xe.getTypeAsString());
}
}
Cur c = context.finish();
associateSourceName(c, options);
XmlDocumentProperties props = getDocProps(c, true);
props.setDoctypeSystemId(systemId);
props.setEncoding(encoding);
props.setVersion(version);
props.setStandalone(standAlone);
return c;
}
private Cur loadXMLStreamReader(XMLStreamReader xsr, XmlOptions options)
throws XMLStreamException
{
options = XmlOptions.maskNull(options);
boolean lineNums = options.hasOption(XmlOptions.LOAD_LINE_NUMBERS);
String encoding = null, version = null;
boolean standAlone = false;
LoadContext context = new Cur.CurLoadContext(this, options);
int depth = 0;
events:
for (int eventType = xsr.getEventType(); ; eventType = xsr.next())
{
switch (eventType)
{
case XMLStreamReader.START_DOCUMENT:
{
depth++;
encoding = xsr.getCharacterEncodingScheme();
version = xsr.getVersion();
standAlone = xsr.isStandalone();
if (lineNums)
lineNumber(xsr, context);
break;
}
case XMLStreamReader.END_DOCUMENT:
{
depth--;
if (lineNums)
lineNumber(xsr, context);
break events;
}
case XMLStreamReader.START_ELEMENT:
{
depth++;
context.startElement(xsr.getName());
if (lineNums)
lineNumber(xsr, context);
doAttributes(xsr, context);
doNamespaces(xsr, context);
break;
}
case XMLStreamReader.END_ELEMENT:
{
depth--;
context.endElement();
if (lineNums)
lineNumber(xsr, context);
break;
}
case XMLStreamReader.CHARACTERS:
case XMLStreamReader.CDATA:
{
context.text(xsr.getTextCharacters(), xsr.getTextStart(),
xsr.getTextLength());
if (lineNums)
lineNumber(xsr, context);
break;
}
case XMLStreamReader.COMMENT:
{
String comment = xsr.getText();
context.comment(comment);
if (lineNums)
lineNumber(xsr, context);
break;
}
case XMLStreamReader.PROCESSING_INSTRUCTION:
{
context.procInst(xsr.getPITarget(), xsr.getPIData());
if (lineNums)
lineNumber(xsr, context);
break;
}
case XMLStreamReader.ATTRIBUTE:
{
doAttributes(xsr, context);
break;
}
case XMLStreamReader.NAMESPACE:
{
doNamespaces(xsr, context);
break;
}
case XMLStreamReader.ENTITY_REFERENCE:
{
context.text(xsr.getText());
break;
}
case XMLStreamReader.SPACE:
case XMLStreamReader.DTD:
break;
default :
throw new RuntimeException(
"Unhandled xml event type: " + eventType);
}
if (!xsr.hasNext() || depth <= 0)
break;
}
Cur c = context.finish();
associateSourceName(c, options);
XmlDocumentProperties props = getDocProps(c, true);
props.setEncoding(encoding);
props.setVersion(version);
props.setStandalone(standAlone);
return c;
}
//
//
//
public static XmlObject parseToXmlObject(SchemaTypeLoader stl,
InputStream is, SchemaType type, XmlOptions options)
throws XmlException, IOException
{
Locale l = getLocale(stl, options);
if (l.noSync())
{
l.enter();
try
{
return l.parseToXmlObject(is, type, options);
}
finally
{
l.exit();
}
}
else
synchronized (l)
{
l.enter();
try
{
return l.parseToXmlObject(is, type, options);
}
finally
{
l.exit();
}
}
}
private XmlObject parseToXmlObject(InputStream is, SchemaType type,
XmlOptions options)
throws XmlException, IOException
{
Cur c = getSaxLoader(options).load(this, new InputSource(is),
options);
autoTypeDocument(c, type, options);
XmlObject x = (XmlObject) c.getUser();
c.release();
return x;
}
//
//
//
public static XmlObject parseToXmlObject(SchemaTypeLoader stl,
Reader reader, SchemaType type, XmlOptions options)
throws XmlException, IOException
{
Locale l = getLocale(stl, options);
if (l.noSync())
{
l.enter();
try
{
return l.parseToXmlObject(reader, type, options);
}
finally
{
l.exit();
}
}
else
synchronized (l)
{
l.enter();
try
{
return l.parseToXmlObject(reader, type, options);
}
finally
{
l.exit();
}
}
}
private XmlObject parseToXmlObject(Reader reader, SchemaType type,
XmlOptions options)
throws XmlException, IOException
{
Cur c = getSaxLoader(options).load(this, new InputSource(reader),
options);
autoTypeDocument(c, type, options);
XmlObject x = (XmlObject) c.getUser();
c.release();
return x;
}
//
//
//
public static XmlObject parseToXmlObject(SchemaTypeLoader stl, Node node,
SchemaType type, XmlOptions options)
throws XmlException
{
Locale l = getLocale(stl, options);
if (l.noSync())
{
l.enter();
try
{
return l.parseToXmlObject(node, type, options);
}
finally
{
l.exit();
}
}
else
synchronized (l)
{
l.enter();
try
{
return l.parseToXmlObject(node, type, options);
}
finally
{
l.exit();
}
}
}
public XmlObject parseToXmlObject(Node node, SchemaType type,
XmlOptions options)
throws XmlException
{
LoadContext context = new Cur.CurLoadContext(this, options);
loadNode(node, context);
Cur c = context.finish();
associateSourceName(c, options);
autoTypeDocument(c, type, options);
XmlObject x = (XmlObject) c.getUser();
c.release();
return x;
}
private void loadNodeChildren(Node n, LoadContext context)
{
for (Node c = n.getFirstChild(); c != null; c = c.getNextSibling())
loadNode(c, context);
}
void loadNode(Node n, LoadContext context)
{
switch (n.getNodeType())
{
case Node.DOCUMENT_NODE:
case Node.DOCUMENT_FRAGMENT_NODE:
case Node.ENTITY_REFERENCE_NODE:
{
loadNodeChildren(n, context);
break;
}
case Node.ELEMENT_NODE:
{
context.startElement(
makeQualifiedQName(n.getNamespaceURI(), n.getNodeName()));
NamedNodeMap attrs = n.getAttributes();
for (int i = 0; i < attrs.getLength(); i++)
{
Node a = attrs.item(i);
String attrName = a.getNodeName();
String attrValue = a.getNodeValue();
if (attrName.toLowerCase().startsWith("xmlns"))
{
if (attrName.length() == 5)
context.xmlns(null, attrValue);
else
context.xmlns(attrName.substring(6), attrValue);
}
else
context.attr(
makeQualifiedQName(a.getNamespaceURI(), attrName),
attrValue);
}
loadNodeChildren(n, context);
context.endElement();
break;
}
case Node.TEXT_NODE:
case Node.CDATA_SECTION_NODE:
{
context.text(n.getNodeValue());
break;
}
case Node.COMMENT_NODE:
{
context.comment(n.getNodeValue());
break;
}
case Node.PROCESSING_INSTRUCTION_NODE:
{
context.procInst(n.getNodeName(), n.getNodeValue());
break;
}
case Node.DOCUMENT_TYPE_NODE:
case Node.ENTITY_NODE:
case Node.NOTATION_NODE:
case Node.ATTRIBUTE_NODE:
{
throw new RuntimeException("Unexpected node");
}
}
}
//
//
//
private class XmlSaxHandlerImpl
extends SaxHandler
implements XmlSaxHandler
{
XmlSaxHandlerImpl(Locale l, SchemaType type, XmlOptions options)
{
super(null);
_options = options;
_type = type;
// Because SAX loading is not atomic with respect to XmlBeans, I can't use the default
// thread local CharUtil. Instruct the SaxHandler (and the LoadContext, eventually)
// to use the Locale specific CharUtil.
XmlOptions saxHandlerOptions = new XmlOptions(options);
saxHandlerOptions.put(Cur.LOAD_USE_LOCALE_CHAR_UTIL);
initSaxHandler(l, saxHandlerOptions);
}
public ContentHandler getContentHandler()
{
return _context == null ? null : this;
}
public LexicalHandler getLexicalHandler()
{
return _context == null ? null : this;
}
public void bookmarkLastEvent(XmlBookmark mark)
{
_context.bookmarkLastNonAttr(mark);
}
public void bookmarkLastAttr(QName attrName, XmlBookmark mark)
{
_context.bookmarkLastAttr(attrName, mark);
}
public XmlObject getObject()
throws XmlException
{
if (_context == null)
return null;
_locale.enter();
try
{
Cur c = _context.finish();
autoTypeDocument(c, _type, _options);
XmlObject x = (XmlObject) c.getUser();
c.release();
_context = null;
return x;
}
finally
{
_locale.exit();
}
}
private SchemaType _type;
private XmlOptions _options;
}
public static XmlSaxHandler newSaxHandler(SchemaTypeLoader stl,
SchemaType type, XmlOptions options)
{
Locale l = getLocale(stl, options);
if (l.noSync())
{
l.enter();
try
{
return l.newSaxHandler(type, options);
}
finally
{
l.exit();
}
}
else
synchronized (l)
{
l.enter();
try
{
return l.newSaxHandler(type, options);
}
finally
{
l.exit();
}
}
}
public XmlSaxHandler newSaxHandler(SchemaType type, XmlOptions options)
{
return new XmlSaxHandlerImpl(this, type, options);
}
// TODO (ericvas ) - have a qname factory here so that the same factory may be
// used by the parser. This factory would probably come from my
// high speed parser. Otherwise, use a thread local on
QName makeQName(String uri, String localPart)
{
assert localPart != null && localPart.length() > 0;
// TODO - make sure name is a well formed name?
return _qnameFactory.getQName(uri, localPart);
}
QName makeQNameNoCheck(String uri, String localPart)
{
return _qnameFactory.getQName(uri, localPart);
}
QName makeQName(String uri, String local, String prefix)
{
return _qnameFactory.getQName(uri, local, prefix == null ? "" : prefix);
}
QName makeQualifiedQName(String uri, String qname)
{
if (qname == null)
qname = "";
int i = qname.indexOf(':');
return i < 0
?
_qnameFactory.getQName(uri, qname)
:
_qnameFactory.getQName(uri, qname.substring(i + 1),
qname.substring(0, i));
}
static private class DocProps
extends XmlDocumentProperties
{
private HashMap _map = new HashMap();
public Object put(Object key, Object value)
{
return _map.put(key, value);
}
public Object get(Object key)
{
return _map.get(key);
}
public Object remove(Object key)
{
return _map.remove(key);
}
}
static XmlDocumentProperties getDocProps(Cur c, boolean ensure)
{
c.push();
while (c.toParent())
;
DocProps props = (DocProps) c.getBookmark(DocProps.class);
if (props == null && ensure)
c.setBookmark(DocProps.class, props = new DocProps());
c.pop();
return props;
}
interface ChangeListener
{
void notifyChange();
void setNextChangeListener(ChangeListener listener);
ChangeListener getNextChangeListener();
}
void registerForChange(ChangeListener listener)
{
if (listener.getNextChangeListener() == null)
{
if (_changeListeners == null)
listener.setNextChangeListener(listener);
else
listener.setNextChangeListener(_changeListeners);
_changeListeners = listener;
}
}
void notifyChange()
{
// First, notify the registered listeners ...
while (_changeListeners != null)
{
_changeListeners.notifyChange();
if (_changeListeners.getNextChangeListener() == _changeListeners)
_changeListeners.setNextChangeListener(null);
ChangeListener next = _changeListeners.getNextChangeListener();
_changeListeners.setNextChangeListener(null);
_changeListeners = next;
}
// Then, prepare for the change in a locale specific way. Need to create real Curs for
// 'virtual' Curs in Locations
_locations.notifyChange();
}
//
// Cursor helpers
//
static String getTextValue(Cur c)
{
assert c.isNode();
if (!c.hasChildren())
return c.getValueAsString();
StringBuffer sb = new StringBuffer();
c.push();
for (c.next(); !c.isAtEndOfLastPush(); c.next())
if (c.isText())
{
if ( (c._xobj.isComment() || c._xobj.isProcinst() ) && c._pos<c._xobj._cchValue )
continue;
CharUtil.getString(sb, c.getChars(-1), c._offSrc, c._cchSrc);
}
c.pop();
return sb.toString();
}
static int getTextValue(Cur c, int wsr, char[] chars, int off, int maxCch)
{
// TODO - hack impl for now ... improve
assert c.isNode();
String s = c._xobj.getValueAsString(wsr);
int n = s.length();
if (n > maxCch)
n = maxCch;
if (n <= 0)
return 0;
s.getChars(0, n, chars, off);
return n;
}
static String applyWhiteSpaceRule(String s, int wsr)
{
int l = s == null ? 0 : s.length();
if (l == 0 || wsr == WS_PRESERVE)
return s;
char ch;
if (wsr == WS_REPLACE)
{
for (int i = 0; i < l; i++)
if ((ch = s.charAt(i)) == '\n' || ch == '\r' || ch == '\t')
return processWhiteSpaceRule(s, wsr);
}
else if (wsr == Locale.WS_COLLAPSE)
{
if (CharUtil.isWhiteSpace(s.charAt(0)) ||
CharUtil.isWhiteSpace(s.charAt(l - 1)))
return processWhiteSpaceRule(s, wsr);
boolean lastWasWhite = false;
for (int i = 1; i < l; i++)
{
boolean isWhite = CharUtil.isWhiteSpace(s.charAt(i));
if (isWhite && lastWasWhite)
return processWhiteSpaceRule(s, wsr);
lastWasWhite = isWhite;
}
}
return s;
}
static String processWhiteSpaceRule(String s, int wsr)
{
ScrubBuffer sb = getScrubBuffer(wsr);
sb.scrub(s, 0, s.length());
return sb.getResultAsString();
}
static final class ScrubBuffer
{
ScrubBuffer()
{
_sb = new StringBuffer();
}
void init(int wsr)
{
_sb.delete(0, _sb.length());
_wsr = wsr;
_state = START_STATE;
}
void scrub(Object src, int off, int cch)
{
if (cch == 0)
return;
if (_wsr == Locale.WS_PRESERVE)
{
CharUtil.getString(_sb, src, off, cch);
return;
}
char[] chars;
if (src instanceof char[])
chars = (char[]) src;
else
{
if (cch <= _srcBuf.length)
chars = _srcBuf;
else if (cch <= 16384)
chars = _srcBuf = new char[16384];
else
chars = new char[cch];
CharUtil.getChars(chars, 0, src, off, cch);
off = 0;
}
int start = 0;
for (int i = 0; i < cch; i++)
{
char ch = chars[off + i];
if (ch == ' ' || ch == '\n' || ch == '\r' || ch == '\t')
{
_sb.append(chars, off + start, i - start);
start = i + 1;
if (_wsr == Locale.WS_REPLACE)
_sb.append(' ');
else if (_state == NOSPACE_STATE)
_state = SPACE_SEEN_STATE;
}
else
{
if (_state == SPACE_SEEN_STATE)
_sb.append(' ');
_state = NOSPACE_STATE;
}
}
_sb.append(chars, off + start, cch - start);
}
String getResultAsString()
{
return _sb.toString();
}
private static final int START_STATE = 0;
private static final int SPACE_SEEN_STATE = 1;
private static final int NOSPACE_STATE = 2;
private int _state;
private int _wsr;
private char[] _srcBuf = new char[1024];
private StringBuffer _sb;
}
private static ThreadLocal tl_scrubBuffer =
new ThreadLocal()
{
protected Object initialValue()
{
return new SoftReference(new ScrubBuffer());
}
};
static ScrubBuffer getScrubBuffer(int wsr)
{
SoftReference softRef = (SoftReference) tl_scrubBuffer.get();
ScrubBuffer scrubBuffer = (ScrubBuffer) (softRef).get();
if (scrubBuffer == null)
{
scrubBuffer = new ScrubBuffer();
tl_scrubBuffer.set(new SoftReference(scrubBuffer));
}
scrubBuffer.init(wsr);
return scrubBuffer;
}
static boolean pushToContainer(Cur c)
{
c.push();
for (; ;)
{
switch (c.kind())
{
case ROOT:
case ELEM:
return true;
case -ROOT:
case -ELEM:
c.pop();
return false;
case COMMENT:
case PROCINST:
c.skip();
break;
default :
c.nextWithAttrs();
break;
}
}
}
static boolean toFirstNormalAttr(Cur c)
{
c.push();
if (c.toFirstAttr())
{
do
{
if (!c.isXmlns())
{
c.popButStay();
return true;
}
}
while (c.toNextAttr());
}
c.pop();
return false;
}
static boolean toPrevNormalAttr(Cur c)
{
if (c.isAttr())
{
c.push();
for (; ;)
{
assert c.isAttr();
// See if I can move backward. If I'm at the first attr, prev must return
// false and not move.
if (!c.prev())
break;
// Skip past the text value or attr begin
c.prev();
// I might have skipped over text above
if (!c.isAttr())
c.prev();
if (c.isNormalAttr())
{
c.popButStay();
return true;
}
}
c.pop();
}
return false;
}
static boolean toNextNormalAttr(Cur c)
{
c.push();
while (c.toNextAttr())
{
if (!c.isXmlns())
{
c.popButStay();
return true;
}
}
c.pop();
return false;
}
Xobj findNthChildElem(Xobj parent, QName name, QNameSet set, int n)
{
// only one of (set or name) is not null
// or both are null for a wildcard
assert (name == null || set == null);
assert n >= 0;
if (parent == null)
return null;
int da = _nthCache_A.distance(parent, name, set, n);
int db = _nthCache_B.distance(parent, name, set, n);
Xobj x =
da <= db
? _nthCache_A.fetch(parent, name, set, n)
: _nthCache_B.fetch(parent, name, set, n);
if (da == db)
{
nthCache temp = _nthCache_A;
_nthCache_A = _nthCache_B;
_nthCache_B = temp;
}
return x;
}
int count(Xobj parent, QName name, QNameSet set)
{
int n = 0;
for (Xobj x = findNthChildElem(parent, name, set, 0);
x != null; x = x._nextSibling)
{
if (x.isElem())
{
if (set == null)
{
if (x._name.equals(name))
n++;
}
else if (set.contains(x._name))
n++;
}
}
return n;
}
static boolean toChild(Cur c, QName name, int n)
{
if (n >= 0 && pushToContainer(c))
{
Xobj x = c._locale.findNthChildElem(c._xobj, name, null, n);
c.pop();
if (x != null)
{
c.moveTo(x);
return true;
}
}
return false;
}
static boolean toFirstChildElement(Cur c)
{
// if (!pushToContainer(c))
// return false;
//
// if (!c.toFirstChild() || (!c.isElem() && !toNextSiblingElement(c)))
// {
// c.pop();
// return false;
// }
//
// c.popButStay();
//
// return true;
Xobj originalXobj = c._xobj;
int originalPos = c._pos;
loop:
for (; ;)
{
switch (c.kind())
{
case ROOT:
case ELEM:
break loop;
case -ROOT:
case -ELEM:
c.moveTo(originalXobj, originalPos);
return false;
case COMMENT:
case PROCINST:
c.skip();
break;
default:
c.nextWithAttrs();
break;
}
}
if (!c.toFirstChild() || (!c.isElem() && !toNextSiblingElement(c)))
{
c.moveTo(originalXobj, originalPos);
return false;
}
return true;
}
static boolean toLastChildElement(Cur c)
{
if (!pushToContainer(c))
return false;
if (!c.toLastChild() || (!c.isElem() && !toPrevSiblingElement(c)))
{
c.pop();
return false;
}
c.popButStay();
return true;
}
static boolean toPrevSiblingElement(Cur cur)
{
if (!cur.hasParent())
return false;
Cur c = cur.tempCur();
boolean moved = false;
int k = c.kind();
if (k != ATTR)
{
for (; ;)
{
if (!c.prev())
break;
k = c.kind();
if (k == ROOT || k == ELEM)
break;
if (c.kind() == -ELEM)
{
c.toParent();
cur.moveToCur(c);
moved = true;
break;
}
}
}
c.release();
return moved;
}
static boolean toNextSiblingElement(Cur c)
{
if (!c.hasParent())
return false;
c.push();
int k = c.kind();
if (k == ATTR)
{
c.toParent();
c.next();
}
else if (k == ELEM)
c.skip();
while ((k = c.kind()) >= 0)
{
if (k == ELEM)
{
c.popButStay();
return true;
}
if (k > 0)
c.toEnd();
c.next();
}
c.pop();
return false;
}
static boolean toNextSiblingElement(Cur c, Xobj parent)
{
Xobj originalXobj = c._xobj;
int originalPos = c._pos;
int k = c.kind();
if (k == ATTR)
{
c.moveTo(parent);
c.next();
}
else if (k == ELEM)
c.skip();
while ((k = c.kind()) >= 0)
{
if (k == ELEM)
{
return true;
}
if (k > 0)
c.toEnd();
c.next();
}
c.moveTo(originalXobj, originalPos);
return false;
}
static void applyNamespaces(Cur c, Map namespaces)
{
assert c.isContainer();
java.util.Iterator i = namespaces.keySet().iterator();
while (i.hasNext())
{
String prefix = (String) i.next();
// Usually, this is the predefined xml namespace
if (!prefix.toLowerCase().startsWith("xml"))
{
if (c.namespaceForPrefix(prefix, false) == null)
{
c.push();
c.next();
c.createAttr(c._locale.createXmlns(prefix));
c.next();
c.insertString((String) namespaces.get(prefix));
c.pop();
}
}
}
}
static Map getAllNamespaces(Cur c, Map filleMe)
{
assert c.isNode();
c.push();
if (!c.isContainer())
c.toParent();
assert c.isContainer();
do
{
QName cName = c.getName();
while (c.toNextAttr())
{
if (c.isXmlns())
{
String prefix = c.getXmlnsPrefix();
String uri = c.getXmlnsUri();
// Here I check to see if there is a default namespace
// mapping which is not empty on a non root container which
// is in a namespace. This this case, I do not want to add
// this mapping because it could not be persisted out this
// way.
if (prefix.length() == 0 && uri.length() > 0 &&
cName != null &&
cName.getNamespaceURI().length() > 0)
{
continue;
}
if (filleMe == null)
filleMe = new HashMap();
if (!filleMe.containsKey(prefix))
filleMe.put(prefix, uri);
}
}
if (!c.isContainer())
c.toParentRaw();
}
while (c.toParentRaw());
c.pop();
return filleMe;
}
class nthCache
{
private boolean namesSame(QName pattern, QName name)
{
return pattern == null || pattern.equals(name);
}
private boolean setsSame(QNameSet patternSet, QNameSet set)
{
// value equality is probably too expensive. Since the use case
// involves QNameSets that are generated by the compiler, we
// can use identity comparison.
return patternSet != null && patternSet == set;
}
private boolean nameHit(QName namePattern, QNameSet setPattern,
QName name)
{
return
setPattern == null
? namesSame(namePattern, name)
: setPattern.contains(name);
}
private boolean cacheSame(QName namePattern, QNameSet setPattern)
{
return
setPattern == null
? namesSame(namePattern, _name)
: setsSame(setPattern, _set);
}
int distance(Xobj parent, QName name, QNameSet set, int n)
{
assert n >= 0;
if (_version != Locale.this.version())
return Integer.MAX_VALUE - 1;
if (parent != _parent || !cacheSame(name, set))
return Integer.MAX_VALUE;
return n > _n ? n - _n : _n - n;
}
Xobj fetch(Xobj parent, QName name, QNameSet set, int n)
{
assert n >= 0;
if (_version != Locale.this.version() || _parent != parent ||
!cacheSame(name, set) || n == 0)
{
_version = Locale.this.version();
_parent = parent;
_name = name;
_child = null;
_n = -1;
loop:
for (Xobj x = parent._firstChild;
x != null; x = x._nextSibling)
{
if (x.isElem() && nameHit(name, set, x._name))
{
_child = x;
_n = 0;
break loop;
}
}
}
if (_n < 0)
return null;
if (n > _n)
{
while (n > _n)
{
for (Xobj x = _child._nextSibling; ; x = x._nextSibling)
{
if (x == null)
return null;
if (x.isElem() && nameHit(name, set, x._name))
{
_child = x;
_n++;
break;
}
}
}
}
else if (n < _n)
{
while (n < _n)
{
for (Xobj x = _child._prevSibling; ; x = x._prevSibling)
{
if (x == null)
return null;
if (x.isElem() && nameHit(name, set, x._name))
{
_child = x;
_n--;
break;
}
}
}
}
return _child;
}
private long _version;
private Xobj _parent;
private QName _name;
private QNameSet _set;
private Xobj _child;
private int _n;
}
//
//
//
Dom findDomNthChild ( Dom parent, int n )
{
assert n >= 0;
if (parent == null)
return null;
int da = _domNthCache_A.distance(parent, n);
int db = _domNthCache_B.distance(parent, n);
// the "better" cache should never walk more than 1/2 len
Dom x = null;
boolean bInvalidate = (db - _domNthCache_B._len / 2 > 0) &&
(db - _domNthCache_B._len / 2 - domNthCache.BLITZ_BOUNDARY > 0);
boolean aInvalidate = (da - _domNthCache_A._len / 2 > 0) &&
(da - _domNthCache_A._len / 2 - domNthCache.BLITZ_BOUNDARY > 0);
if (da <= db)
if (!aInvalidate)
x = _domNthCache_A.fetch(parent, n);
else
{
_domNthCache_B._version = -1;//blitz the cache
x = _domNthCache_B.fetch(parent, n);
}
else if (!bInvalidate)
x = _domNthCache_B.fetch(parent, n);
else
{
_domNthCache_A._version = -1;//blitz the cache
x = _domNthCache_A.fetch(parent, n);
}
if (da == db)
{
domNthCache temp = _domNthCache_A;
_domNthCache_A = _domNthCache_B;
_domNthCache_B = temp;
}
return x;
}
int domLength ( Dom parent )
{
if (parent == null)
return 0;
int da = _domNthCache_A.distance( parent, 0 );
int db = _domNthCache_B.distance( parent, 0 );
int len =
da <= db
? _domNthCache_A.length( parent )
: _domNthCache_B.length( parent );
if (da == db)
{
domNthCache temp = _domNthCache_A;
_domNthCache_A = _domNthCache_B;
_domNthCache_B = temp;
}
return len;
}
void invalidateDomCaches ( Dom d )
{
if (_domNthCache_A._parent == d)
_domNthCache_A._version = -1;
if (_domNthCache_B._parent == d)
_domNthCache_B._version = -1;
}
boolean isDomCached ( Dom d )
{
return _domNthCache_A._parent == d || _domNthCache_B._parent == d;
}
class domNthCache
{
int distance ( Dom parent, int n )
{
assert n >= 0;
if (_version != Locale.this.version())
return Integer.MAX_VALUE - 1;
if (parent != _parent)
return Integer.MAX_VALUE;
return n > _n ? n - _n : _n - n;
}
int length ( Dom parent )
{
if (_version != Locale.this.version() || _parent != parent)
{
_parent = parent;
_version = Locale.this.version();
_child = null;
_n = -1;
_len = -1;
}
if (_len == -1)
{
Dom x = null;
if (_child != null && _n != -1)
{
x = _child;
_len = _n;
}
else
{
x = DomImpl.firstChild(_parent);
_len = 0;
// cache the 0th child
_child = x;
_n = 0;
}
for (; x != null; x = DomImpl.nextSibling(x) )
{
_len++;
}
}
return _len;
}
Dom fetch ( Dom parent, int n )
{
assert n >= 0;
if (_version != Locale.this.version() || _parent != parent)
{
_parent = parent;
_version = Locale.this.version();
_child = null;
_n = -1;
_len = -1;
for (Dom x = DomImpl.firstChild(_parent); x != null; x = DomImpl.nextSibling(x) )
{
_n++;
if (_child == null && n == _n )
{
_child = x;
break;
}
}
return _child;
}
if (_n < 0)
return null;
if (n > _n)
{
while ( n > _n )
{
for (Dom x = DomImpl.nextSibling(_child); ; x = DomImpl.nextSibling(x) )
{
if (x == null)
return null;
_child = x;
_n++;
break;
}
}
}
else if (n < _n)
{
while ( n < _n )
{
for (Dom x = DomImpl.prevSibling(_child); ; x = DomImpl.prevSibling(x) )
{
if (x == null)
return null;
_child = x;
_n--;
break;
}
}
}
return _child;
}
public static final int BLITZ_BOUNDARY = 40; //walk small lists
private long _version;
private Dom _parent;
private Dom _child;
private int _n;
private int _len;
}
//
//
//
CharUtil getCharUtil()
{
if (_charUtil == null)
_charUtil = new CharUtil(1024);
return _charUtil;
}
long version()
{
return _versionAll;
}
Cur weakCur(Object o)
{
assert o != null && !(o instanceof Ref);
Cur c = getCur();
assert c._tempFrame == -1;
assert c._ref == null;
c._ref = new Ref(c, o);
return c;
}
final ReferenceQueue refQueue()
{
if (_refQueue == null)
_refQueue = new ReferenceQueue();
return _refQueue;
}
final static class Ref
extends PhantomReference
{
Ref(Cur c, Object obj)
{
super(obj, c._locale.refQueue());
_cur = c;
}
Cur _cur;
}
Cur tempCur()
{
return tempCur(null);
}
Cur tempCur(String id)
{
Cur c = getCur();
assert c._tempFrame == -1;
assert _numTempFramesLeft < _tempFrames.length : "Temp frame not pushed";
int frame = _tempFrames.length - _numTempFramesLeft - 1;
assert frame >= 0 && frame < _tempFrames.length;
Cur next = _tempFrames[frame];
c._nextTemp = next;
assert c._prevTemp == null;
if (next != null)
{
assert next._prevTemp == null;
next._prevTemp = c;
}
_tempFrames[frame] = c;
c._tempFrame = frame;
c._id = id;
return c;
}
Cur getCur()
{
assert _curPool == null || _curPoolCount > 0;
Cur c;
if (_curPool == null)
c = new Cur(this);
else
{
_curPool = _curPool.listRemove(c = _curPool);
_curPoolCount--;
}
assert c._state == Cur.POOLED;
assert c._prev == null && c._next == null;
assert c._xobj == null && c._pos == Cur.NO_POS;
assert c._ref == null;
_registered = c.listInsert(_registered);
c._state = Cur.REGISTERED;
return c;
}
void embedCurs()
{
for (Cur c; (c = _registered) != null;)
{
assert c._xobj != null;
_registered = c.listRemove(_registered);
c._xobj._embedded = c.listInsert(c._xobj._embedded);
c._state = Cur.EMBEDDED;
}
}
TextNode createTextNode()
{
return _saaj == null ? new TextNode(this) : new SaajTextNode(this);
}
CdataNode createCdataNode()
{
return _saaj == null ?
new CdataNode(this) : new SaajCdataNode(this);
}
boolean entered()
{
return _tempFrames.length - _numTempFramesLeft > 0;
}
public void enter(Locale otherLocale)
{
enter();
if (otherLocale != this)
otherLocale.enter();
}
public void enter()
{
assert _numTempFramesLeft >= 0;
if (--_numTempFramesLeft <= 0)
{
Cur[] newTempFrames = new Cur[(_numTempFramesLeft =
_tempFrames.length) *
2];
System.arraycopy(_tempFrames, 0, newTempFrames, 0,
_tempFrames.length);
_tempFrames = newTempFrames;
}
if (++_entryCount > 1000)
{
pollQueue();
_entryCount = 0;
}
}
private void pollQueue()
{
if (_refQueue != null)
{
for (; ;)
{
Ref ref = (Ref) _refQueue.poll();
if (ref == null)
break;
if (ref._cur != null)
ref._cur.release();
}
}
}
public void exit(Locale otherLocale)
{
exit();
if (otherLocale != this)
otherLocale.exit();
}
public void exit()
{
// assert _numTempFramesLeft >= 0;
//asserts computed frame fits between 0 and _tempFrames.length
assert _numTempFramesLeft >= 0 &&
(_numTempFramesLeft <= _tempFrames.length - 1):
" Temp frames mismanaged. Impossible stack frame. Unsynchronized: " +
noSync();
int frame = _tempFrames.length - ++_numTempFramesLeft;
while (_tempFrames[frame] != null)
_tempFrames[frame].release();
}
//
//
//
public boolean noSync()
{
return _noSync;
}
public boolean sync()
{
return !_noSync;
}
static final boolean isWhiteSpace(String s)
{
int l = s.length();
while (l-- > 0)
if (!CharUtil.isWhiteSpace(s.charAt(l)))
return false;
return true;
}
static final boolean isWhiteSpace(StringBuffer sb)
{
int l = sb.length();
while (l-- > 0)
if (!CharUtil.isWhiteSpace(sb.charAt(l)))
return false;
return true;
}
static boolean beginsWithXml(String name)
{
if (name.length() < 3)
return false;
char ch;
if (((ch = name.charAt(0)) == 'x' || ch == 'X') &&
((ch = name.charAt(1)) == 'm' || ch == 'M') &&
((ch = name.charAt(2)) == 'l' || ch == 'L'))
{
return true;
}
return false;
}
static boolean isXmlns(QName name)
{
String prefix = name.getPrefix();
if (prefix.equals("xmlns"))
return true;
return prefix.length() == 0 && name.getLocalPart().equals("xmlns");
}
QName createXmlns(String prefix)
{
if (prefix == null)
prefix = "";
return
prefix.length() == 0
? makeQName(_xmlnsUri, "xmlns", "")
: makeQName(_xmlnsUri, prefix, "xmlns");
}
static String xmlnsPrefix(QName name)
{
return name.getPrefix().equals("xmlns") ? name.getLocalPart() : "";
}
//
// Loading/parsing
//
static abstract class LoadContext
{
protected abstract void startDTD(String name, String publicId,
String systemId);
protected abstract void endDTD();
protected abstract void startElement(QName name);
protected abstract void endElement();
protected abstract void attr(QName name, String value);
protected abstract void attr(String local, String uri, String prefix,
String value);
protected abstract void xmlns(String prefix, String uri);
protected abstract void comment(char[] buff, int off, int cch);
protected abstract void comment(String comment);
protected abstract void procInst(String target, String value);
protected abstract void text(char[] buff, int off, int cch);
protected abstract void text(String s);
protected abstract Cur finish();
protected abstract void abort();
protected abstract void bookmark(XmlBookmark bm);
protected abstract void bookmarkLastNonAttr(XmlBookmark bm);
protected abstract void bookmarkLastAttr(QName attrName,
XmlBookmark bm);
protected abstract void lineNumber(int line, int column, int offset);
protected void addIdAttr(String eName, String aName){
if ( _idAttrs == null )
_idAttrs = new java.util.Hashtable();
_idAttrs.put(aName,eName);
}
protected boolean isAttrOfTypeId(QName aqn, QName eqn){
if (_idAttrs == null)
return false;
String pre = aqn.getPrefix();
String lName = aqn.getLocalPart();
String urnName = "".equals(pre)?lName:pre + ":" + lName;
String eName = (String) _idAttrs.get(urnName);
if (eName == null ) return false;
//get the name of the parent elt
pre = eqn.getPrefix();
lName = eqn.getLocalPart();
lName = eqn.getLocalPart();
urnName = "".equals(pre)?lName:pre + ":" + lName;
return eName.equals(urnName);
}
private java.util.Hashtable _idAttrs;
}
private static class DefaultEntityResolver
implements EntityResolver
{
public InputSource resolveEntity(String publicId, String systemId)
{
return new InputSource(new StringReader(""));
}
}
private static SaxLoader getPiccoloSaxLoader()
{
SaxLoader piccoloLoader = (SaxLoader) SystemCache.get().getSaxLoader();
if (piccoloLoader == null)
{
piccoloLoader = PiccoloSaxLoader.newInstance();
SystemCache.get().setSaxLoader(piccoloLoader);
}
return piccoloLoader;
}
private static SaxLoader getSaxLoader(XmlOptions options)
{
options = XmlOptions.maskNull(options);
EntityResolver er = null;
if (!options.hasOption(XmlOptions.LOAD_USE_DEFAULT_RESOLVER))
{
er = (EntityResolver) options.get(XmlOptions.ENTITY_RESOLVER);
if (er == null)
er = ResolverUtil.getGlobalEntityResolver();
if (er == null)
er = new DefaultEntityResolver();
}
SaxLoader sl;
if (options.hasOption(XmlOptions.LOAD_USE_XMLREADER))
{
XMLReader xr = (XMLReader) options.get(
XmlOptions.LOAD_USE_XMLREADER);
if (xr == null)
throw new IllegalArgumentException("XMLReader is null");
sl = new XmlReaderSaxLoader(xr);
// I've noticed that most XMLReaders don't like a null EntityResolver...
if (er != null)
xr.setEntityResolver(er);
}
else
{
sl = getPiccoloSaxLoader();
// Piccolo doesnot mind a null entity resolver ...
sl.setEntityResolver(er);
}
return sl;
}
private static class XmlReaderSaxLoader
extends SaxLoader
{
XmlReaderSaxLoader(XMLReader xr)
{
super(xr, null);
}
}
private static class PiccoloSaxLoader
extends SaxLoader
{
private PiccoloSaxLoader(Piccolo p)
{
super(p, p.getStartLocator());
_piccolo = p;
}
static PiccoloSaxLoader newInstance()
{
return new PiccoloSaxLoader(new Piccolo());
}
void postLoad(Cur c)
{
XmlDocumentProperties props = getDocProps(c, true);
props.setEncoding(_piccolo.getEncoding());
props.setVersion(_piccolo.getVersion());
super.postLoad(c);
}
private Piccolo _piccolo;
}
private static abstract class SaxHandler
implements ContentHandler, LexicalHandler , DeclHandler, DTDHandler
{
SaxHandler(Locator startLocator)
{
_startLocator = startLocator;
}
SaxHandler()
{
this(null);
}
void initSaxHandler(Locale l, XmlOptions options)
{
_locale = l;
options = XmlOptions.maskNull(options);
_context = new Cur.CurLoadContext(_locale, options);
_wantLineNumbers =
_startLocator != null &&
options.hasOption(XmlOptions.LOAD_LINE_NUMBERS);
_wantLineNumbersAtEndElt =
_startLocator != null &&
options.hasOption(XmlOptions.LOAD_LINE_NUMBERS_END_ELEMENT);
_wantCdataBookmarks =
_startLocator != null &&
options.hasOption(XmlOptions.LOAD_SAVE_CDATA_BOOKMARKS);
}
public void startDocument()
throws SAXException
{
// Do nothing ... start of document is implicit
}
public void endDocument()
throws SAXException
{
// Do nothing ... end of document is implicit
}
public void startElement(String uri, String local, String qName,
Attributes atts)
throws SAXException
{
if (local.length() == 0)
local = qName;
// Out current parser (Piccolo) does not error when a
// namespace is used and not defined. Check for these here
if (qName.indexOf(':') >= 0 && uri.length() == 0)
{
XmlError err =
XmlError.forMessage("Use of undefined namespace prefix: " +
qName.substring(0, qName.indexOf(':')));
throw new XmlRuntimeException(err.toString(), null, err);
}
_context.startElement(_locale.makeQualifiedQName(uri, qName));
if (_wantLineNumbers)
{
_context.bookmark(
new XmlLineNumber(_startLocator.getLineNumber(),
_startLocator.getColumnNumber() - 1, -1));
}
for (int i = 0, len = atts.getLength(); i < len; i++)
{
String aqn = atts.getQName(i);
if (aqn.equals("xmlns"))
{
_context.xmlns("", atts.getValue(i));
}
else if (aqn.startsWith("xmlns:"))
{
String prefix = aqn.substring(6);
if (prefix.length() == 0)
{
XmlError err =
XmlError.forMessage("Prefix not specified",
XmlError.SEVERITY_ERROR);
throw new XmlRuntimeException(err.toString(), null,
err);
}
String attrUri = atts.getValue(i);
if (attrUri.length() == 0)
{
XmlError err =
XmlError.forMessage(
"Prefix can't be mapped to no namespace: " +
prefix,
XmlError.SEVERITY_ERROR);
throw new XmlRuntimeException(err.toString(), null,
err);
}
_context.xmlns(prefix, attrUri);
}
else
{
int colon = aqn.indexOf(':');
if (colon < 0)
_context.attr(aqn, atts.getURI(i), null,
atts.getValue(i));
else
{
_context.attr(aqn.substring(colon + 1), atts.getURI(i), aqn.substring(
0, colon),
atts.getValue(i));
}
}
}
}
public void endElement(String namespaceURI, String localName,
String qName)
throws SAXException
{
_context.endElement();
if (_wantLineNumbersAtEndElt)
{
_context.bookmark(
new XmlLineNumber(_startLocator.getLineNumber(),
_startLocator.getColumnNumber() - 1, -1));
}
}
public void characters(char ch[], int start, int length)
throws SAXException
{
_context.text(ch, start, length);
if (_wantCdataBookmarks && _insideCDATA)
_context.bookmarkLastNonAttr(CDataBookmark.CDATA_BOOKMARK);
}
public void ignorableWhitespace(char ch[], int start, int length)
throws SAXException
{
}
public void comment(char ch[], int start, int length)
throws SAXException
{
_context.comment(ch, start, length);
}
public void processingInstruction(String target, String data)
throws SAXException
{
_context.procInst(target, data);
}
public void startDTD(String name, String publicId, String systemId)
throws SAXException
{
_context.startDTD(name, publicId, systemId);
}
public void endDTD()
throws SAXException
{
_context.endDTD();
}
public void startPrefixMapping(String prefix, String uri)
throws SAXException
{
if (beginsWithXml(prefix) &&
!("xml".equals(prefix) && _xml1998Uri.equals(uri)))
{
XmlError err =
XmlError.forMessage(
"Prefix can't begin with XML: " + prefix,
XmlError.SEVERITY_ERROR);
throw new XmlRuntimeException(err.toString(), null, err);
}
}
public void endPrefixMapping(String prefix)
throws SAXException
{
}
public void skippedEntity(String name)
throws SAXException
{
// throw new RuntimeException( "Not impl: skippedEntity" );
}
public void startCDATA()
throws SAXException
{
_insideCDATA = true;
}
public void endCDATA()
throws SAXException
{
_insideCDATA = false;
}
public void startEntity(String name)
throws SAXException
{
// throw new RuntimeException( "Not impl: startEntity" );
}
public void endEntity(String name)
throws SAXException
{
// throw new RuntimeException( "Not impl: endEntity" );
}
public void setDocumentLocator(Locator locator)
{
// TODO - for non-Piccolo use cases, use a locator to get line numbers
}
//DeclHandler
public void attributeDecl(String eName, String aName, String type, String valueDefault, String value){
if (type.equals("ID")){
_context.addIdAttr(eName,aName);
}
}
public void elementDecl(String name, String model){
}
public void externalEntityDecl(String name, String publicId, String systemId){
}
public void internalEntityDecl(String name, String value){
}
//DTDHandler
public void notationDecl(String name, String publicId, String systemId){
}
public void unparsedEntityDecl(String name, String publicId, String systemId, String notationName){
}
protected Locale _locale;
protected LoadContext _context;
private boolean _wantLineNumbers;
private boolean _wantLineNumbersAtEndElt;
private boolean _wantCdataBookmarks;
private Locator _startLocator;
private boolean _insideCDATA = false;
}
private static abstract class SaxLoader
extends SaxHandler
implements ErrorHandler
{
SaxLoader(XMLReader xr, Locator startLocator)
{
super(startLocator);
_xr = xr;
try
{
_xr.setFeature(
"http://xml.org/sax/features/namespace-prefixes", true);
_xr.setFeature("http://xml.org/sax/features/namespaces", true);
_xr.setFeature("http://xml.org/sax/features/validation", false);
_xr.setProperty(
"http://xml.org/sax/properties/lexical-handler", this);
_xr.setContentHandler(this);
_xr.setProperty("http://xml.org/sax/properties/declaration-handler", this);
_xr.setDTDHandler(this);
_xr.setErrorHandler(this);
}
catch (Throwable e)
{
throw new RuntimeException(e.getMessage(), e);
}
}
void setEntityResolver(EntityResolver er)
{
_xr.setEntityResolver(er);
}
void postLoad(Cur c)
{
// fix garbage collection of Locale -> Xobj -> STL
_locale = null;
_context = null;
}
public Cur load(Locale l, InputSource is, XmlOptions options)
throws XmlException, IOException
{
is.setSystemId("file://");
initSaxHandler(l, options);
try
{
_xr.parse(is);
Cur c = _context.finish();
associateSourceName(c, options);
postLoad(c);
return c;
}
catch (FileFormatException e)
{
_context.abort();
throw new XmlException(e.getMessage(), e);
}
catch (XmlRuntimeException e)
{
_context.abort();
throw new XmlException(e);
}
catch (SAXParseException e)
{
_context.abort();
XmlError err =
XmlError.forLocation(e.getMessage(),
(String) XmlOptions.safeGet(options,
XmlOptions.DOCUMENT_SOURCE_NAME),
e.getLineNumber(), e.getColumnNumber(), -1);
throw new XmlException(err.toString(), e, err);
}
catch (SAXException e)
{
_context.abort();
XmlError err = XmlError.forMessage(e.getMessage());
throw new XmlException(err.toString(), e, err);
}
catch (RuntimeException e)
{
_context.abort();
throw e;
}
}
public void fatalError(SAXParseException e)
throws SAXException
{
throw e;
}
public void error(SAXParseException e)
throws SAXException
{
throw e;
}
public void warning(SAXParseException e)
throws SAXException
{
throw e;
}
private XMLReader _xr;
}
private Dom load(InputSource is, XmlOptions options)
throws XmlException, IOException
{
return getSaxLoader(options).load(this, is, options).getDom();
}
public Dom load(Reader r)
throws XmlException, IOException
{
return load(r, null);
}
public Dom load(Reader r, XmlOptions options)
throws XmlException, IOException
{
return load(new InputSource(r), options);
}
public Dom load(InputStream in)
throws XmlException, IOException
{
return load(in, null);
}
public Dom load(InputStream in, XmlOptions options)
throws XmlException, IOException
{
return load(new InputSource(in), options);
}
public Dom load(String s)
throws XmlException
{
return load(s, null);
}
public Dom load(String s, XmlOptions options)
throws XmlException
{
Reader r = new StringReader(s);
try
{
return load(r, options);
}
catch (IOException e)
{
assert false: "StringReader should not throw IOException";
throw new XmlException(e.getMessage(), e);
}
finally
{
try
{
r.close();
}
catch (IOException e)
{
}
}
}
//
// DOMImplementation methods
//
public Document createDocument(String uri, String qname,
DocumentType doctype)
{
return DomImpl._domImplementation_createDocument(this, uri, qname,
doctype);
}
public DocumentType createDocumentType(String qname, String publicId,
String systemId)
{
throw new RuntimeException("Not implemented");
// return DomImpl._domImplementation_createDocumentType( this, qname, publicId, systemId );
}
public boolean hasFeature(String feature, String version)
{
return DomImpl._domImplementation_hasFeature(this, feature, version);
}
public Object getFeature(String feature, String version)
{
throw new RuntimeException("DOM Level 3 Not implemented");
}
//
// Dom methods
//
private static Dom checkNode(Node n)
{
if (n == null)
throw new IllegalArgumentException("Node is null");
if (!(n instanceof Dom))
throw new IllegalArgumentException("Node is not an XmlBeans node");
return (Dom) n;
}
public static XmlCursor nodeToCursor(Node n)
{
return DomImpl._getXmlCursor(checkNode(n));
}
public static XmlObject nodeToXmlObject(Node n)
{
return DomImpl._getXmlObject(checkNode(n));
}
public static XMLStreamReader nodeToXmlStream(Node n)
{
return DomImpl._getXmlStreamReader(checkNode(n));
}
public static Node streamToNode(XMLStreamReader xs)
{
return Jsr173.nodeFromStream(xs);
}
//
// SaajCallback methods
//
public void setSaajData(Node n, Object o)
{
assert n instanceof Dom;
DomImpl.saajCallback_setSaajData((Dom) n, o);
}
public Object getSaajData(Node n)
{
assert n instanceof Dom;
return DomImpl.saajCallback_getSaajData((Dom) n);
}
public Element createSoapElement(QName name, QName parentName)
{
assert _ownerDoc != null;
return DomImpl.saajCallback_createSoapElement(_ownerDoc, name,
parentName);
}
public Element importSoapElement(Document doc, Element elem, boolean deep,
QName parentName)
{
assert doc instanceof Dom;
return DomImpl.saajCallback_importSoapElement((Dom) doc, elem, deep,
parentName);
}
private static final class DefaultQNameFactory
implements QNameFactory
{
private QNameCache getCache()
{
return XmlBeans.getQNameCache();
}
public QName getQName(String uri, String local)
{
return getCache().getName(uri, local, "");
}
public QName getQName(String uri, String local, String prefix)
{
return getCache().getName(uri, local, prefix);
}
public QName getQName(char[] uriSrc, int uriPos, int uriCch,
char[] localSrc, int localPos, int localCch)
{
return
getCache().getName(new String(uriSrc, uriPos, uriCch),
new String(localSrc, localPos, localCch),
"");
}
public QName getQName(char[] uriSrc, int uriPos, int uriCch,
char[] localSrc, int localPos, int localCch,
char[] prefixSrc, int prefixPos, int prefixCch)
{
return
getCache().getName(new String(uriSrc, uriPos, uriCch),
new String(localSrc, localPos, localCch),
new String(prefixSrc, prefixPos, prefixCch));
}
}
//
//
//
boolean _noSync;
SchemaTypeLoader _schemaTypeLoader;
private ReferenceQueue _refQueue;
private int _entryCount;
int _numTempFramesLeft;
Cur[] _tempFrames;
Cur _curPool;
int _curPoolCount;
Cur _registered;
ChangeListener _changeListeners;
long _versionAll;
long _versionSansText;
Locations _locations;
private CharUtil _charUtil;
int _offSrc;
int _cchSrc;
Saaj _saaj;
Dom _ownerDoc;
QNameFactory _qnameFactory;
boolean _validateOnSet;
int _posTemp;
nthCache _nthCache_A = new nthCache();
nthCache _nthCache_B = new nthCache();
domNthCache _domNthCache_A = new domNthCache();
domNthCache _domNthCache_B = new domNthCache();
}
| Revert DefaultQNameFactory changes.
git-svn-id: 297cb4147f50b389680bb5ad136787e97b9148ae@720988 13f79535-47bb-0310-9956-ffa450edef68
| src/store/org/apache/xmlbeans/impl/store/Locale.java | Revert DefaultQNameFactory changes. | <ide><path>rc/store/org/apache/xmlbeans/impl/store/Locale.java
<ide> private static final class DefaultQNameFactory
<ide> implements QNameFactory
<ide> {
<del> private QNameCache getCache()
<del> {
<del> return XmlBeans.getQNameCache();
<del> }
<add> private QNameCache _cache = XmlBeans.getQNameCache();
<ide>
<ide> public QName getQName(String uri, String local)
<ide> {
<del> return getCache().getName(uri, local, "");
<add> return _cache.getName(uri, local, "");
<ide> }
<ide>
<ide> public QName getQName(String uri, String local, String prefix)
<ide> {
<del> return getCache().getName(uri, local, prefix);
<add> return _cache.getName(uri, local, prefix);
<ide> }
<ide>
<ide> public QName getQName(char[] uriSrc, int uriPos, int uriCch,
<ide> char[] localSrc, int localPos, int localCch)
<ide> {
<ide> return
<del> getCache().getName(new String(uriSrc, uriPos, uriCch),
<add> _cache.getName(new String(uriSrc, uriPos, uriCch),
<ide> new String(localSrc, localPos, localCch),
<ide> "");
<ide> }
<ide> char[] prefixSrc, int prefixPos, int prefixCch)
<ide> {
<ide> return
<del> getCache().getName(new String(uriSrc, uriPos, uriCch),
<add> _cache.getName(new String(uriSrc, uriPos, uriCch),
<ide> new String(localSrc, localPos, localCch),
<ide> new String(prefixSrc, prefixPos, prefixCch));
<ide> } |
|
Java | apache-2.0 | fbac794eb4f807f20fca341832d5500ce38efcb3 | 0 | CSchulz/arquillian-extension-warp,arquillian/arquillian-extension-warp,petrandreev/arquillian-extension-warp,arquillian/arquillian-extension-warp,CSchulz/arquillian-extension-warp | /**
* JBoss, Home of Professional Open Source
* Copyright 2012, Red Hat Middleware LLC, and individual contributors
* by the @authors tag. See the copyright.txt in the distribution for a
* full listing of individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.arquillian.warp.impl.client.execution;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Queue;
import java.util.concurrent.ConcurrentLinkedQueue;
import org.jboss.arquillian.core.spi.ServiceLoader;
import org.jboss.arquillian.test.spi.TestResult;
import org.jboss.arquillian.warp.client.result.WarpGroupResult;
import org.jboss.arquillian.warp.client.result.WarpResult;
import org.jboss.arquillian.warp.impl.shared.ResponsePayload;
import org.jboss.arquillian.warp.spi.observer.RequestObserverChainManager;
/**
* Context of Warp execution which makes available executed groups, holds exceptions and execution results
*
* @author Lukas Fryc
*/
public class WarpContextImpl implements WarpContext {
private Map<Object, WarpGroup> groups = new HashMap<Object, WarpGroup>();
private Queue<Exception> exceptions = new ConcurrentLinkedQueue<Exception>();
private SynchronizationPoint synchronization = new SynchronizationPoint();
private List<RequestObserverChainManager> observerChainManagers;
@Override
public void addGroup(WarpGroup group) {
groups.put(group.getId(), group);
}
@Override
public Collection<WarpGroup> getAllGroups() {
return groups.values();
}
@Override
public WarpGroup getGroup(Object identifier) {
return groups.get(identifier);
}
public Collection<RequestObserverChainManager> getObserverChainManagers() {
return observerChainManagers;
}
@Override
public TestResult getFirstNonSuccessfulResult() {
for (WarpGroup group : getAllGroups()) {
TestResult result = group.getFirstNonSuccessfulResult();
if (result != null) {
return result;
}
}
return null;
}
@Override
public void pushResponsePayload(ResponsePayload payload) {
for (WarpGroup group : groups.values()) {
if (group.pushResponsePayload(payload)) {
synchronization.finishOneResponse();
return;
}
}
throw new IllegalStateException("There was no group found for given response payload");
}
@Override
public void pushException(Exception exception) {
exceptions.add(exception);
synchronization.finishAll();
}
@Override
public Exception getFirstException() {
return exceptions.peek();
}
@Override
public SynchronizationPoint getSynchronization() {
return synchronization;
}
@Override
public WarpResult getResult() {
return new WarpResult() {
@Override
public WarpGroupResult getGroup(Object identifier) {
return groups.get(identifier);
}
};
}
@Override
public void initialize(ServiceLoader serviceLoader) {
// load observer chain managers and sort them by priority
observerChainManagers = new LinkedList<RequestObserverChainManager>(serviceLoader.all(RequestObserverChainManager.class));
Collections.sort(observerChainManagers, new Comparator<RequestObserverChainManager>() {
public int compare(RequestObserverChainManager o1, RequestObserverChainManager o2) {
return o1.priotity() - o2.priotity();
}
});
}
@Override
public int getExpectedRequestCount() {
int count = 0;
for (WarpGroup group : getAllGroups()) {
count += group.getExpectedRequestCount();
}
return count;
}
} | impl/src/main/java/org/jboss/arquillian/warp/impl/client/execution/WarpContextImpl.java | /**
* JBoss, Home of Professional Open Source
* Copyright 2012, Red Hat Middleware LLC, and individual contributors
* by the @authors tag. See the copyright.txt in the distribution for a
* full listing of individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.arquillian.warp.impl.client.execution;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Queue;
import java.util.concurrent.ConcurrentLinkedQueue;
import org.jboss.arquillian.core.spi.ServiceLoader;
import org.jboss.arquillian.test.spi.TestResult;
import org.jboss.arquillian.warp.client.result.WarpGroupResult;
import org.jboss.arquillian.warp.client.result.WarpResult;
import org.jboss.arquillian.warp.impl.shared.ResponsePayload;
import org.jboss.arquillian.warp.spi.observer.RequestObserverChainManager;
public class WarpContextImpl implements WarpContext {
private Map<Object, WarpGroup> groups = new HashMap<Object, WarpGroup>();
private Queue<Exception> exceptions = new ConcurrentLinkedQueue<Exception>();
private SynchronizationPoint synchronization = new SynchronizationPoint();
private List<RequestObserverChainManager> observerChainManagers;
@Override
public void addGroup(WarpGroup group) {
groups.put(group.getId(), group);
}
@Override
public Collection<WarpGroup> getAllGroups() {
return groups.values();
}
@Override
public WarpGroup getGroup(Object identifier) {
return groups.get(identifier);
}
public Collection<RequestObserverChainManager> getObserverChainManagers() {
return observerChainManagers;
}
@Override
public TestResult getFirstNonSuccessfulResult() {
for (WarpGroup group : getAllGroups()) {
TestResult result = group.getFirstNonSuccessfulResult();
if (result != null) {
return result;
}
}
return null;
}
@Override
public void pushResponsePayload(ResponsePayload payload) {
for (WarpGroup group : groups.values()) {
if (group.pushResponsePayload(payload)) {
synchronization.finishOneResponse();
return;
}
}
throw new IllegalStateException("There was no group found for given response payload");
}
@Override
public void pushException(Exception exception) {
exceptions.add(exception);
synchronization.finishAll();
}
@Override
public Exception getFirstException() {
return exceptions.peek();
}
@Override
public SynchronizationPoint getSynchronization() {
return synchronization;
}
@Override
public WarpResult getResult() {
return new WarpResult() {
@Override
public WarpGroupResult getGroup(Object identifier) {
return groups.get(identifier);
}
};
}
@Override
public void initialize(ServiceLoader serviceLoader) {
// load observer chain managers and sort them by priority
observerChainManagers = new LinkedList<RequestObserverChainManager>(serviceLoader.all(RequestObserverChainManager.class));
Collections.sort(observerChainManagers, new Comparator<RequestObserverChainManager>() {
public int compare(RequestObserverChainManager o1, RequestObserverChainManager o2) {
return o1.priotity() - o2.priotity();
}
});
}
@Override
public int getExpectedRequestCount() {
int count = 0;
for (WarpGroup group : getAllGroups()) {
count += group.getExpectedRequestCount();
}
return count;
}
} | added JavaDoc to WarpContextImpl
| impl/src/main/java/org/jboss/arquillian/warp/impl/client/execution/WarpContextImpl.java | added JavaDoc to WarpContextImpl | <ide><path>mpl/src/main/java/org/jboss/arquillian/warp/impl/client/execution/WarpContextImpl.java
<ide> import org.jboss.arquillian.warp.impl.shared.ResponsePayload;
<ide> import org.jboss.arquillian.warp.spi.observer.RequestObserverChainManager;
<ide>
<add>/**
<add> * Context of Warp execution which makes available executed groups, holds exceptions and execution results
<add> *
<add> * @author Lukas Fryc
<add> */
<ide> public class WarpContextImpl implements WarpContext {
<ide>
<ide> private Map<Object, WarpGroup> groups = new HashMap<Object, WarpGroup>(); |
|
JavaScript | mit | ad98f62c5d37589b38d49cebb7255a60a24f769b | 0 | node-modli/modli-dynamodb | const Promise = require('bluebird');
const _ = require('lodash');
import { tables } from './dynamo-data';
import { helpers } from './helpers';
let AWS = require('aws-sdk');
let DOC = require('dynamodb-doc');
/**
* Class constructor
* @class dynamodb
*/
export default class {
constructor(config) {
this.schemas = {};
const dynDb = new AWS.DynamoDB(config);
this.ddb = new DOC.DynamoDB(dynDb);
Promise.promisifyAll(this.ddb);
}
/**
* Sets the schema for the model
* @memberof dynamodb
* @param {String} version The version of the model
* @param {Object} schema Json Object with schema information
*/
setSchema(schema, version) {
this.defaultVersion = version;
this.schemas[version] = schema;
}
/**
* Returns the active schema
* @memberof dynamodb
* @returns {Object} Current JSON Schema Object
*/
getSchema() {
return this.schemas;
}
/**
* Generates a secondary index for a new table
* @memberof dynamodb
* @param {Object} Parameters to deterministically generate a secondary index
* @returns {Object} New Index
*/
generateSecondaryIndex(params) {
let newIndex = _.clone(tables.secondaryIndex, true);
if (params.projectionType) {
newIndex.Projection.ProjectionType = params.projectionType;
if (params.nonKeyAttributes) {
/* istanbul ignore else */
if (params.projectionType === 'INCLUDE') {
newIndex.Projection.NonKeyAttributes = params.nonKeyAttributes;
}
delete params.nonKeyAttributes;
}
delete params.projectionType;
}
newIndex.IndexName = params.value + '-index';
newIndex.KeySchema.push(this.generateKey(params));
return newIndex;
}
/**
* Generates a definition for a create call
* @memberof dynamodb
* @param {Object} Parameters to deterministically generate a definition
* @returns {Object} New Definition
*/
generateDefinition(params) {
let newDefinition = _.clone(tables.attribute, true);
newDefinition.AttributeName = params.value;
newDefinition.AttributeType = params.type;
return newDefinition;
}
/**
* Generates a key for a create call
* @memberof dynamodb
* @param {Object} params Parameters to deterministically generate a key
* @returns {Object} New attribute
*/
generateKey(params) {
let newAtrribute = _.clone(tables.keyData, true);
newAtrribute.AttributeName = params.value;
newAtrribute.KeyType = (params.keytype === 'range') ? 'RANGE' : 'HASH';
return newAtrribute;
}
/**
* Creates a new entry in the database
* @memberof dynamodb
* @param {Object} body Contents to create entry
* @returns {Object} promise
*/
create(body, paramVersion = false) {
const version = (paramVersion === false) ? this.defaultVersion : paramVersion;
return this.validate(body, version)
.then(data => {
return this.ddb.putItemAsync({
TableName: this.schemas[version].tableName,
ReturnValues: 'NONE',
Item: data
})
.then(() => data);
});
}
/**
* Calls create table using explicit table creation parameters
* @memberof dynamodb
* @param {Object} body Contents to create table
* @returns {Object} promise
*/
createTable(params) {
return new Promise((resolve, reject) => {
this.ddb.listTables({}, (err, foundTables) => {
let tableList;
/* istanbul ignore next */
tableList = foundTables || { TableNames: [] };
if (_.contains(tableList.TableNames, params.TableName)) {
resolve({TableName: params.TableName, existed: true});
} else {
this.ddb.createTable(params, (createErr, res) => {
if (createErr) {
reject(createErr);
} else {
resolve(res);
}
});
}
});
});
}
/**
* Deterministic method to call create table using the model as the reference
* @memberof dynamodb
* @returns {Object} promise
*/
createTableFromModel(paramVersion = false) {
const version = (paramVersion === false) ? this.defaultVersion : paramVersion;
let newTable = _.clone(tables.table, true);
newTable.Table.TableName = this.schemas[version].tableName;
_.each(this.schemas[version].indexes, (row) => {
newTable.Table.AttributeDefinitions.push(this.generateDefinition(row));
if (row.keytype === 'hash' || row.keytype === 'range') {
newTable.Table.KeySchema.push(this.generateKey(row));
} else if (row.keytype === 'secondary') {
newTable.Table.GlobalSecondaryIndexes.push(this.generateSecondaryIndex(row));
} else {
return new Error({ error: 'Model has invalid index'});
}
});
if (newTable.Table.GlobalSecondaryIndexes.length < 1) {
delete (newTable.Table.GlobalSecondaryIndexes);
}
return this.createTable(newTable.Table);
}
/**
* Deterministic method to call create table using the model as the reference
* @memberof dynamodb
* @param {Object} params Parameters to find table and delete by
* @property {TableName: VALUE}
* @returns {Object} promise
*/
deleteTable(params) {
return new Promise((resolve, reject) => {
this.ddb.deleteTable(params, (err, res) => {
if (err) {
reject(err);
} else {
resolve(res);
}
});
});
}
/**
* Creates a compatible filter to apply to scanned returns
* @memberof dynamodb
* @returns {Object} scan ready filter object
*/
createFilter(table, filterObject) {
const newFilter = { 'TableName': table };
const returnFilter = _.extend(newFilter, helpers.createExpression(newFilter, filterObject));
return returnFilter;
}
/**
* Performs a full unfiltered scan
* @memberof dynamodb
* @param {Object} filterObject Filter criteria
* @param {Object} options Miscellaneous options like version, limit or lastKey (for pagination)
* @returns {Object} promise
*/
scan(filterObject, options = {}) {
return new Promise((resolve, reject) => {
let opts = {};
opts.version = options.version || false;
opts.limit = options.limit || 1000;
opts.lastKey = options.lastKey || false;
const version = (opts.version === false) ? this.defaultVersion : opts.version;
const table = this.schemas[version].tableName;
let scanObject = {'TableName': table};
if (filterObject) {
scanObject = this.createFilter(table, filterObject);
}
// Set after createFilter() is called above
scanObject.Limit = opts.limit;
if (opts.lastKey) {
try {
scanObject.ExclusiveStartKey = JSON.parse(opts.lastKey);
} catch (err) {
reject(err);
}
}
this.ddb.scan(scanObject, (err, res) => {
if (err) {
reject(err);
} else {
resolve(res);
}
});
});
}
/**
* Gets a list of available tables
* @memberof dynamodb
* @returns {Object} promise
*/
list() {
return new Promise((resolve, reject) => {
this.ddb.listTables({}, (err, res) => {
/* istanbul ignore if */
if (err) {
reject(err);
} else {
resolve(res);
}
});
});
}
/**
* Deterministic method to read a value from an object.
* @memberof dynamodb
* @param {Object} obj
* @property {string} hash/index - Example { authId: '1234'}
* @returns {Object} promise
*/
read(obj, paramVersion = false) {
return new Promise((resolve, reject) => {
const version = (paramVersion === false) ? this.defaultVersion : paramVersion;
const key = Object.keys(obj)[0];
let itemPromise = null;
let type = null;
_.each(this.schemas[version].indexes, function (row) {
if (row.value === key) {
type = row.keytype;
return false;
}
});
if (!type) {
reject(new Error('No type'));
} else {
if (type === 'hash') {
itemPromise = this.getItemByHash(obj, version);
} else {
itemPromise = this.getItemById(obj, version);
}
}
resolve(itemPromise);
});
}
/**
* Reads from the database by secondary index with pagination capabilities
* @memberof dynamodb
* @param {Object} obj The object to search by secondary index on
* @property {string} hash/index - Example { authId: '1234'}
* @param {Object} options Miscellaneous options like version, limit or lastKey (for pagination)
* @returns {Object} promise
*/
readPaginate(obj, options = {}) {
return new Promise((resolve, reject) => {
let opts = {};
opts.version = options.version || false;
opts.limit = options.limit || 1000;
opts.lastKey = options.lastKey || false;
const version = (opts.version === false) ? this.defaultVersion : opts.version;
const table = this.schemas[version].tableName;
const key = Object.keys(obj)[0];
const params = {
TableName: table,
IndexName: key + '-index',
KeyConditionExpression: key + ' = :hk_val',
ExpressionAttributeValues: {
':hk_val': obj[key]
},
Limit: opts.limit
};
if (opts.lastKey) {
try {
params.ExclusiveStartKey = JSON.parse(opts.lastKey);
} catch (err) {
reject(err);
}
}
this.ddb.query(params, (err, data) => {
if (err) {
reject(err);
} else {
let response = _.cloneDeep(data);
let returnValue = [];
const sanitize = this.sanitize;
_.each(response.Items, function (row) {
returnValue.push(sanitize(row));
});
response.Items = returnValue;
resolve(response);
}
});
});
}
/**
* Reads from the database by secondary index
* @memberof dynamodb
* @param {Object} obj The object to search by secondary index on
* @property {string} hash/index - Example { authId: '1234'}
* @returns {Object} promise
*/
getItemById(obj, paramVersion = false) {
return new Promise((resolve, reject) => {
const version = (paramVersion === false) ? this.defaultVersion : paramVersion;
const table = this.schemas[version].tableName;
const key = Object.keys(obj)[0];
const params = {
TableName: table,
IndexName: key + '-index',
KeyConditionExpression: key + ' = :hk_val',
ExpressionAttributeValues: {
':hk_val': obj[key]
}
};
this.ddb.query(params, (err, data) => {
if (err) {
reject(err);
} else {
let returnValue = [];
const sanitize = this.sanitize;
_.each(data.Items, function (row) {
returnValue.push(sanitize(row));
});
resolve(returnValue);
}
});
});
}
/**
* Returns a list of objects in an array
* @memberof dynamodb
* @param {String} hash Name of the hash to search on
* @param {Array} array Array of values to search in
* @returns {Object} promise
*/
getItemsInArray(hash, array, paramVersion = false) {
return new Promise((resolve, reject) => {
const version = (paramVersion === false) ? this.defaultVersion : paramVersion;
if (!array) {
reject(new Error('Array empty'));
}
if (array.length < 1) {
reject(new Error('Array contained no values'));
}
const table = this.schemas[version].tableName;
let params = {
RequestItems: {}
};
params.RequestItems[table] = {
Keys: []
};
_.each(array, (val) => {
let newObj = Object.create({});
newObj[hash] = val;
params.RequestItems[table].Keys.push(newObj);
});
this.ddb.batchGetItem(params, (err, data) => {
if (err) {
reject(new Error(err));
} else {
let returnArray = [];
const sanitize = this.sanitize;
_.each(data, function (row) {
returnArray.push(sanitize(row));
});
resolve(returnArray);
}
});
});
}
/**
* Reads from the database by hash value
* @memberof dynamodb
* @param {Object} obj The hash object to search by
* @property {string} hash/index - Example { authId: '1234'}
* @returns {Object} promise
*/
getItemByHash(obj, paramVersion = false) {
return new Promise((resolve, reject) => {
const version = (paramVersion === false) ? this.defaultVersion : paramVersion;
const table = this.schemas[version].tableName;
const keys = Object.keys(obj);
let params = {
TableName: table,
Key: {}
};
// Allows for HASH and possible RANGE key
keys.forEach((key) => {
params.Key[key] = obj[key];
});
this.ddb.getItem(params, (err, data) => {
if (err) {
reject(new Error(err));
} else {
resolve(this.sanitize(data.Item));
}
});
});
}
/**
* Updates an entry in the database
* @memberof dynamodb
* @param {Object} hashObject The object to search for to update
* @property {string} hash/index - Example { authId: '1234'}
* @param {Object} updatedValuesArray An array of values to update on the found row
* @returns {Object} promise
*/
update(hashObject, updatedValuesArray, paramVersion = false) {
const version = (paramVersion === false) ? this.defaultVersion : paramVersion;
return this.validate(updatedValuesArray, version)
.then(data => {
const keys = Object.keys(hashObject);
// Allows for HASH and possible RANGE key
keys.forEach((key) => {
if (data[key]) {
delete data[key];
}
});
const table = this.schemas[version].tableName;
let params = {
TableName: table,
Key: {},
// Assume a minimum of one param set
UpdateExpression: 'SET #param1 = :val1',
ExpressionAttributeNames: {},
ExpressionAttributeValues: {},
ReturnValues: 'ALL_NEW',
ReturnConsumedCapacity: 'NONE',
ReturnItemCollectionMetrics: 'NONE'
};
// Allows for HASH and possible RANGE key
keys.forEach((key) => {
params.Key[key] = hashObject[key];
});
let i = 0;
Object.keys(data).forEach((valueKey) => {
i++;
params.ExpressionAttributeNames['#param' + i] = valueKey;
params.ExpressionAttributeValues[':val' + i] = data[valueKey];
if (i > 1) {
params.UpdateExpression += ', #param' + i + ' = :val' + i;
}
});
return this.ddb.updateItemAsync(params).then(results => results.Attributes);
});
}
/**
* Deletes an item from the database
* @memberof dynamodb
* @param {Object} hashObject The object to find by hash, to delete
* @property {string} hash/index - Example { authId: '1234'}
* @returns {Object} promise
*/
delete(hashObject, paramVersion = false) {
return new Promise((resolve, reject) => {
const version = (paramVersion === false) ? this.defaultVersion : paramVersion;
const keys = Object.keys(hashObject);
const table = this.schemas[version].tableName;
let params = {
TableName: table,
Key: {}
};
// Allows for HASH and possible RANGE key
keys.forEach((key) => {
params.Key[key] = hashObject[key];
});
this.ddb.deleteItem(params, (err, data) => {
if (err) {
reject(new Error(err));
} else {
resolve(data);
}
});
});
}
/**
* Extends the dynamo object
* @memberof dynamodb
* @param {String} name The name of the method
* @param {Function} fn The function to extend on the object
*/
extend(name, fn) {
this[name] = fn.bind(this);
}
}
| src/index.js | const Promise = require('bluebird');
const _ = require('lodash');
import { tables } from './dynamo-data';
import { helpers } from './helpers';
let AWS = require('aws-sdk');
let DOC = require('dynamodb-doc');
/**
* Class constructor
* @class dynamodb
*/
export default class {
constructor(config) {
this.schemas = {};
const dynDb = new AWS.DynamoDB(config);
this.ddb = new DOC.DynamoDB(dynDb);
Promise.promisifyAll(this.ddb);
}
/**
* Sets the schema for the model
* @memberof dynamodb
* @param {String} version The version of the model
* @param {Object} schema Json Object with schema information
*/
setSchema(schema, version) {
this.defaultVersion = version;
this.schemas[version] = schema;
}
/**
* Returns the active schema
* @memberof dynamodb
* @returns {Object} Current JSON Schema Object
*/
getSchema() {
return this.schemas;
}
/**
* Generates a secondary index for a new table
* @memberof dynamodb
* @param {Object} Parameters to deterministically generate a secondary index
* @returns {Object} New Index
*/
generateSecondaryIndex(params) {
let newIndex = _.clone(tables.secondaryIndex, true);
if (params.projectionType) {
newIndex.Projection.ProjectionType = params.projectionType;
if (params.nonKeyAttributes) {
/* istanbul ignore else */
if (params.projectionType === 'INCLUDE') {
newIndex.Projection.NonKeyAttributes = params.nonKeyAttributes;
}
delete params.nonKeyAttributes;
}
delete params.projectionType;
}
newIndex.IndexName = params.value + '-index';
newIndex.KeySchema.push(this.generateKey(params));
return newIndex;
}
/**
* Generates a definition for a create call
* @memberof dynamodb
* @param {Object} Parameters to deterministically generate a definition
* @returns {Object} New Definition
*/
generateDefinition(params) {
let newDefinition = _.clone(tables.attribute, true);
newDefinition.AttributeName = params.value;
newDefinition.AttributeType = params.type;
return newDefinition;
}
/**
* Generates a key for a create call
* @memberof dynamodb
* @param {Object} params Parameters to deterministically generate a key
* @returns {Object} New attribute
*/
generateKey(params) {
let newAtrribute = _.clone(tables.keyData, true);
newAtrribute.AttributeName = params.value;
newAtrribute.KeyType = (params.keytype === 'range') ? 'RANGE' : 'HASH';
return newAtrribute;
}
/**
* Creates a new entry in the database
* @memberof dynamodb
* @param {Object} body Contents to create entry
* @returns {Object} promise
*/
create(body, paramVersion = false) {
const version = (paramVersion === false) ? this.defaultVersion : paramVersion;
return this.validate(body, version)
.then(data => {
return this.ddb.putItemAsync({
TableName: this.schemas[version].tableName,
ReturnValues: 'NONE',
Item: data
})
.then(() => data);
});
}
/**
* Calls create table using explicit table creation parameters
* @memberof dynamodb
* @param {Object} body Contents to create table
* @returns {Object} promise
*/
createTable(params) {
return new Promise((resolve, reject) => {
this.ddb.listTables({}, (err, foundTables) => {
let tableList;
/* istanbul ignore next */
tableList = foundTables || { TableNames: [] };
if (_.contains(tableList.TableNames, params.TableName)) {
resolve({TableName: params.TableName, existed: true});
} else {
this.ddb.createTable(params, (createErr, res) => {
if (createErr) {
reject(createErr);
} else {
resolve(res);
}
});
}
});
});
}
/**
* Deterministic method to call create table using the model as the reference
* @memberof dynamodb
* @returns {Object} promise
*/
createTableFromModel(paramVersion = false) {
const version = (paramVersion === false) ? this.defaultVersion : paramVersion;
let newTable = _.clone(tables.table, true);
newTable.Table.TableName = this.schemas[version].tableName;
_.each(this.schemas[version].indexes, (row) => {
newTable.Table.AttributeDefinitions.push(this.generateDefinition(row));
if (row.keytype === 'hash' || row.keytype === 'range') {
newTable.Table.KeySchema.push(this.generateKey(row));
} else if (row.keytype === 'secondary') {
newTable.Table.GlobalSecondaryIndexes.push(this.generateSecondaryIndex(row));
} else {
return new Error({ error: 'Model has invalid index'});
}
});
if (newTable.Table.GlobalSecondaryIndexes.length < 1) {
delete (newTable.Table.GlobalSecondaryIndexes);
}
return this.createTable(newTable.Table);
}
/**
* Deterministic method to call create table using the model as the reference
* @memberof dynamodb
* @param {Object} params Parameters to find table and delete by
* @property {TableName: VALUE}
* @returns {Object} promise
*/
deleteTable(params) {
return new Promise((resolve, reject) => {
this.ddb.deleteTable(params, (err, res) => {
if (err) {
reject(err);
} else {
resolve(res);
}
});
});
}
/**
* Creates a compatible filter to apply to scanned returns
* @memberof dynamodb
* @returns {Object} scan ready filter object
*/
createFilter(table, filterObject) {
const newFilter = { 'TableName': table };
const returnFilter = _.extend(newFilter, helpers.createExpression(newFilter, filterObject));
return returnFilter;
}
/**
* Performs a full unfiltered scan
* @memberof dynamodb
* @param {Object} filterObject Filter criteria
* @param {Object} options Miscellaneous options like version, limit or lastKey (for pagination)
* @returns {Object} promise
*/
scan(filterObject, options = {}) {
return new Promise((resolve, reject) => {
let opts = {};
opts.version = options.version || false;
opts.limit = options.limit || 1000;
opts.lastKey = options.lastKey || false;
const version = (opts.version === false) ? this.defaultVersion : opts.version;
const table = this.schemas[version].tableName;
let scanObject = {'TableName': table};
if (filterObject) {
scanObject = this.createFilter(table, filterObject);
}
// Set after createFilter() is called above
scanObject.Limit = opts.limit;
if (opts.lastKey) {
try {
scanObject.ExclusiveStartKey = JSON.parse(opts.lastKey);
} catch (err) {
reject(err);
}
}
this.ddb.scan(scanObject, (err, res) => {
if (err) {
reject(err);
} else {
resolve(res);
}
});
});
}
/**
* Gets a list of available tables
* @memberof dynamodb
* @returns {Object} promise
*/
list() {
return new Promise((resolve, reject) => {
this.ddb.listTables({}, (err, res) => {
/* istanbul ignore if */
if (err) {
reject(err);
} else {
resolve(res);
}
});
});
}
/**
* Deterministic method to read a value from an object.
* @memberof dynamodb
* @param {Object} obj
* @property {string} hash/index - Example { authId: '1234'}
* @returns {Object} promise
*/
read(obj, paramVersion = false) {
return new Promise((resolve, reject) => {
const version = (paramVersion === false) ? this.defaultVersion : paramVersion;
const key = Object.keys(obj)[0];
let itemPromise = null;
let type = null;
_.each(this.schemas[version].indexes, function (row) {
if (row.value === key) {
type = row.keytype;
return false;
}
});
if (!type) {
reject(new Error('No type'));
} else {
if (type === 'hash') {
itemPromise = this.getItemByHash(obj, version);
} else {
itemPromise = this.getItemById(obj, version);
}
}
resolve(itemPromise);
});
}
/**
* Reads from the database by secondary index with pagination capabilities
* @memberof dynamodb
* @param {Object} obj The object to search by secondary index on
* @property {string} hash/index - Example { authId: '1234'}
* @param {Object} options Miscellaneous options like version, limit or lastKey (for pagination)
* @returns {Object} promise
*/
readPaginate(obj, options = {}) {
return new Promise((resolve, reject) => {
let opts = {};
opts.version = options.version || false;
opts.limit = options.limit || 1000;
opts.lastKey = options.lastKey || false;
const version = (opts.version === false) ? this.defaultVersion : opts.version;
const table = this.schemas[version].tableName;
const key = Object.keys(obj)[0];
const params = {
TableName: table,
IndexName: key + '-index',
KeyConditionExpression: key + ' = :hk_val',
ExpressionAttributeValues: {
':hk_val': obj[key]
},
Limit: opts.limit
};
if (opts.lastKey) {
try {
params.ExclusiveStartKey = JSON.parse(opts.lastKey);
} catch (err) {
reject(err);
}
}
this.ddb.query(params, (err, data) => {
if (err) {
reject(err);
} else {
let response = _.cloneDeep(data);
let returnValue = [];
const sanitize = this.sanitize;
_.each(response.Items, function (row) {
returnValue.push(sanitize(row));
});
response.Items = returnValue;
resolve(response);
}
});
});
}
/**
* Reads from the database by secondary index
* @memberof dynamodb
* @param {Object} obj The object to search by secondary index on
* @property {string} hash/index - Example { authId: '1234'}
* @returns {Object} promise
*/
getItemById(obj, paramVersion = false) {
return new Promise((resolve, reject) => {
const version = (paramVersion === false) ? this.defaultVersion : paramVersion;
const table = this.schemas[version].tableName;
const key = Object.keys(obj)[0];
const params = {
TableName: table,
IndexName: key + '-index',
KeyConditionExpression: key + ' = :hk_val',
ExpressionAttributeValues: {
':hk_val': obj[key]
}
};
this.ddb.query(params, (err, data) => {
if (err) {
reject(err);
} else {
let returnValue = [];
const sanitize = this.sanitize;
_.each(data.Items, function (row) {
returnValue.push(sanitize(row));
});
resolve(returnValue);
}
});
});
}
/**
* Returns a list of objects in an array
* @memberof dynamodb
* @param {String} hash Name of the hash to search on
* @param {Array} array Array of values to search in
* @returns {Object} promise
*/
getItemsInArray(hash, array, paramVersion = false) {
return new Promise((resolve, reject) => {
const version = (paramVersion === false) ? this.defaultVersion : paramVersion;
if (!array) {
reject(new Error('Array empty'));
}
if (array.length < 1) {
reject(new Error('Array contained no values'));
}
const table = this.schemas[version].tableName;
let params = {
RequestItems: {}
};
params.RequestItems[table] = {
Keys: []
};
_.each(array, (val) => {
let newObj = Object.create({});
newObj[hash] = val;
params.RequestItems[table].Keys.push(newObj);
});
this.ddb.batchGetItem(params, (err, data) => {
if (err) {
reject(new Error(err));
} else {
let returnArray = [];
const sanitize = this.sanitize;
_.each(data, function (row) {
returnArray.push(sanitize(row));
});
resolve(returnArray);
}
});
});
}
/**
* Reads from the database by hash value
* @memberof dynamodb
* @param {Object} obj The hash object to search by
* @property {string} hash/index - Example { authId: '1234'}
* @returns {Object} promise
*/
getItemByHash(obj, paramVersion = false) {
return new Promise((resolve, reject) => {
const version = (paramVersion === false) ? this.defaultVersion : paramVersion;
const table = this.schemas[version].tableName;
const keys = Object.keys(obj);
let params = {
TableName: table,
Key: {}
};
// Allows for HASH and possible RANGE key
keys.forEach((key) => {
params.Key[key] = obj[key];
});
this.ddb.getItem(params, (err, data) => {
if (err) {
reject(new Error(err));
} else {
resolve(this.sanitize(data.Item));
}
});
});
}
/**
* Updates an entry in the database
* @memberof dynamodb
* @param {Object} hashObject The object to search for to update
* @property {string} hash/index - Example { authId: '1234'}
* @param {Object} updatedValuesArray An array of values to update on the found row
* @returns {Object} promise
*/
update(hashObject, updatedValuesArray, paramVersion = false) {
const version = (paramVersion === false) ? this.defaultVersion : paramVersion;
const keys = Object.keys(hashObject);
// Allows for HASH and possible RANGE key
keys.forEach((key) => {
if (updatedValuesArray[key]) {
delete updatedValuesArray[key];
}
});
return this.validate(updatedValuesArray, version)
.then(data => {
const table = this.schemas[version].tableName;
let params = {
TableName: table,
Key: {},
// Assume a minimum of one param set
UpdateExpression: 'SET #param1 = :val1',
ExpressionAttributeNames: {},
ExpressionAttributeValues: {},
ReturnValues: 'ALL_NEW',
ReturnConsumedCapacity: 'NONE',
ReturnItemCollectionMetrics: 'NONE'
};
// Allows for HASH and possible RANGE key
keys.forEach((key) => {
params.Key[key] = hashObject[key];
});
let i = 0;
Object.keys(data).forEach((valueKey) => {
i++;
params.ExpressionAttributeNames['#param' + i] = valueKey;
params.ExpressionAttributeValues[':val' + i] = data[valueKey];
if (i > 1) {
params.UpdateExpression += ', #param' + i + ' = :val' + i;
}
});
return this.ddb.updateItemAsync(params).then(results => results.Attributes);
});
}
/**
* Deletes an item from the database
* @memberof dynamodb
* @param {Object} hashObject The object to find by hash, to delete
* @property {string} hash/index - Example { authId: '1234'}
* @returns {Object} promise
*/
delete(hashObject, paramVersion = false) {
return new Promise((resolve, reject) => {
const version = (paramVersion === false) ? this.defaultVersion : paramVersion;
const keys = Object.keys(hashObject);
const table = this.schemas[version].tableName;
let params = {
TableName: table,
Key: {}
};
// Allows for HASH and possible RANGE key
keys.forEach((key) => {
params.Key[key] = hashObject[key];
});
this.ddb.deleteItem(params, (err, data) => {
if (err) {
reject(new Error(err));
} else {
resolve(data);
}
});
});
}
/**
* Extends the dynamo object
* @memberof dynamodb
* @param {String} name The name of the method
* @param {Function} fn The function to extend on the object
*/
extend(name, fn) {
this[name] = fn.bind(this);
}
}
| Minor cleanup OoO for update
| src/index.js | Minor cleanup OoO for update | <ide><path>rc/index.js
<ide> update(hashObject, updatedValuesArray, paramVersion = false) {
<ide> const version = (paramVersion === false) ? this.defaultVersion : paramVersion;
<ide>
<del> const keys = Object.keys(hashObject);
<del> // Allows for HASH and possible RANGE key
<del> keys.forEach((key) => {
<del> if (updatedValuesArray[key]) {
<del> delete updatedValuesArray[key];
<del> }
<del> });
<del>
<ide> return this.validate(updatedValuesArray, version)
<ide> .then(data => {
<add> const keys = Object.keys(hashObject);
<add> // Allows for HASH and possible RANGE key
<add> keys.forEach((key) => {
<add> if (data[key]) {
<add> delete data[key];
<add> }
<add> });
<add>
<ide> const table = this.schemas[version].tableName;
<ide>
<ide> let params = { |
|
Java | mit | 81f3d74477975f1698fb12851a0842ff31897ca8 | 0 | mhogrefe/wheels | package mho.wheels.math;
import mho.wheels.iterables.ExhaustiveProvider;
import mho.wheels.iterables.IterableProvider;
import mho.wheels.iterables.RandomProvider;
import mho.wheels.ordering.Ordering;
import mho.wheels.structures.Pair;
import org.jetbrains.annotations.NotNull;
import java.math.BigInteger;
import static mho.wheels.iterables.IterableUtils.*;
import static mho.wheels.math.BinaryFraction.*;
import static mho.wheels.math.BinaryFraction.of;
import static mho.wheels.testing.Testing.*;
@SuppressWarnings({"UnusedDeclaration", "ConstantConditions"})
public class BinaryFractionDemos {
private static final boolean USE_RANDOM = false;
private static final @NotNull String BINARY_FRACTION_CHARS = " -0123456789<>";
private static int LIMIT;
private static IterableProvider P;
private static void initialize() {
if (USE_RANDOM) {
P = RandomProvider.example();
LIMIT = 1000;
} else {
P = ExhaustiveProvider.INSTANCE;
LIMIT = 10000;
}
}
private static void demoGetMantissa() {
initialize();
for (BinaryFraction bf : take(LIMIT, P.binaryFractions())) {
System.out.println("getMantissa(" + bf + ") = " + bf.getMantissa());
}
}
private static void demoGetExponent() {
initialize();
for (BinaryFraction bf : take(LIMIT, P.binaryFractions())) {
System.out.println("getExponent(" + bf + ") = " + bf.getExponent());
}
}
private static void demoOf_BigInteger_int() {
initialize();
Iterable<Pair<BigInteger, Integer>> ps = filter(
p -> (long) p.b + p.a.getLowestSetBit() < Integer.MAX_VALUE,
P.pairs(P.bigIntegers(), P.integers())
);
for (Pair<BigInteger, Integer> p : take(LIMIT, ps)) {
System.out.println("of(" + p.a + ", " + p.b + ") = " + of(p.a, p.b));
}
}
private static void demoOf_BigInteger() {
initialize();
for (BigInteger i : take(LIMIT, P.bigIntegers())) {
System.out.println("of(" + i + ") = " + of(i));
}
}
private static void demoOf_int() {
initialize();
for (int i : take(LIMIT, P.integers())) {
System.out.println("of(" + i + ") = " + of(i));
}
}
private static void demoOf_float() {
initialize();
for (float f : take(LIMIT, P.floats())) {
System.out.println("of(" + f + ") = " + of(f));
}
}
private static void demoOf_double() {
initialize();
for (double d : take(LIMIT, P.doubles())) {
System.out.println("of(" + d + ") = " + of(d));
}
}
private static void demoBigDecimalValue() {
initialize();
for (BinaryFraction bf : take(LIMIT, P.binaryFractions())) {
System.out.println("bigDecimalValue(" + bf + ") = " + bf.bigDecimalValue());
}
}
private static void demoIsInteger() {
initialize();
for (BinaryFraction bf : take(LIMIT, P.binaryFractions())) {
System.out.println(bf + " is " + (bf.isInteger() ? "" : "not ") + "an integer");
}
}
private static void demoAdd() {
initialize();
Iterable<Pair<BinaryFraction, BinaryFraction>> ps = filter(
p -> {
try {
p.a.add(p.b);
return true;
} catch (ArithmeticException e) {
return false;
}
},
P.pairs(P.binaryFractions())
);
for (Pair<BinaryFraction, BinaryFraction> p : take(LIMIT, ps)) {
System.out.println("(" + p.a + ") + (" + p.b + ") = " + p.a.add(p.b));
}
}
private static void demoNegate() {
initialize();
for (BinaryFraction bf : take(LIMIT, P.binaryFractions())) {
System.out.println("-(" + bf + ") = " + bf.negate());
}
}
private static void demoAbs() {
initialize();
for (BinaryFraction bf : take(LIMIT, P.binaryFractions())) {
System.out.println("|" + bf + "| = " + bf.abs());
}
}
private static void demoSignum() {
initialize();
for (BinaryFraction bf : take(LIMIT, P.binaryFractions())) {
System.out.println("sgn(" + bf + ") = " + bf.signum());
}
}
private static void demoSubtract() {
initialize();
Iterable<Pair<BinaryFraction, BinaryFraction>> ps = filter(
p -> {
try {
p.a.subtract(p.b);
return true;
} catch (ArithmeticException e) {
return false;
}
},
P.pairs(P.binaryFractions())
);
for (Pair<BinaryFraction, BinaryFraction> p : take(LIMIT, ps)) {
System.out.println("(" + p.a + ") - (" + p.b + ") = " + p.a.subtract(p.b));
}
}
private static void demoMultiply() {
initialize();
Iterable<Pair<BinaryFraction, BinaryFraction>> ps = filter(
p -> {
long productExponent = (long) p.a.getExponent() + p.b.getExponent();
return productExponent <= Integer.MAX_VALUE && productExponent >= Integer.MIN_VALUE;
},
P.pairs(P.binaryFractions())
);
for (Pair<BinaryFraction, BinaryFraction> p : take(LIMIT, ps)) {
System.out.println("(" + p.a + ") * (" + p.b + ") = " + p.a.multiply(p.b));
}
}
private static void demoShiftLeft() {
initialize();
Iterable<Pair<BinaryFraction, Integer>> ps = filter(
p -> {
long shiftedExponent = (long) p.a.getExponent() + p.b;
return shiftedExponent <= Integer.MAX_VALUE && shiftedExponent >= Integer.MIN_VALUE;
},
P.pairs(P.binaryFractions(), P.integersGeometric())
);
for (Pair<BinaryFraction, Integer> p : take(LIMIT, ps)) {
System.out.println("(" + p.a + ") << " + p.b + " = " + p.a.shiftLeft(p.b));
}
}
private static void demoShiftRight() {
initialize();
Iterable<Pair<BinaryFraction, Integer>> ps = filter(
p -> {
long shiftedExponent = (long) p.a.getExponent() - p.b;
return shiftedExponent <= Integer.MAX_VALUE && shiftedExponent >= Integer.MIN_VALUE;
},
P.pairs(P.binaryFractions(), P.integersGeometric())
);
for (Pair<BinaryFraction, Integer> p : take(LIMIT, ps)) {
System.out.println("(" + p.a + ") >> " + p.b + " = " + p.a.shiftRight(p.b));
}
}
private static void demoEquals_BinaryFraction() {
initialize();
for (Pair<BinaryFraction, BinaryFraction> p : take(LIMIT, P.pairs(P.binaryFractions()))) {
System.out.println(p.a + (p.a.equals(p.b) ? " = " : " ≠ ") + p.b);
}
}
private static void demoEquals_null() {
initialize();
for (BinaryFraction bf : take(LIMIT, P.binaryFractions())) {
//noinspection ObjectEqualsNull
System.out.println(bf + (bf.equals(null) ? " = " : " ≠ ") + null);
}
}
private static void demoHashCode() {
initialize();
for (BinaryFraction bf : take(LIMIT, P.binaryFractions())) {
System.out.println("hashCode(" + bf + ") = " + bf.hashCode());
}
}
private static void demoCompareTo() {
initialize();
for (Pair<BinaryFraction, BinaryFraction> p : take(LIMIT, P.pairs(P.binaryFractions()))) {
System.out.println(p.a + " " + Ordering.compare(p.a, p.b).toChar() + " " + p.b);
}
}
private static void demoRead() {
initialize();
for (String s : take(LIMIT, P.strings())) {
System.out.println("read(" + nicePrint(s) + ") = " + read(s));
}
}
private static void demoRead_targeted() {
initialize();
for (String s : take(LIMIT, P.strings(P.uniformSample(BINARY_FRACTION_CHARS)))) {
System.out.println("read(" + s + ") = " + read(s));
}
}
private static void demoFindIn() {
initialize();
for (String s : take(LIMIT, P.strings())) {
System.out.println("findIn(" + nicePrint(s) + ") = " + findIn(s));
}
}
private static void demoFindIn_targeted() {
initialize();
for (String s : take(LIMIT, P.strings(P.uniformSample(BINARY_FRACTION_CHARS)))) {
System.out.println("findIn(" + s + ") = " + findIn(s));
}
}
private static void demoToString() {
initialize();
for (BinaryFraction bf : take(LIMIT, P.binaryFractions())) {
System.out.println(bf);
}
}
}
| src/test/java/mho/wheels/math/BinaryFractionDemos.java | package mho.wheels.math;
import mho.wheels.iterables.ExhaustiveProvider;
import mho.wheels.iterables.IterableProvider;
import mho.wheels.iterables.RandomProvider;
import mho.wheels.ordering.Ordering;
import mho.wheels.structures.Pair;
import org.jetbrains.annotations.NotNull;
import java.math.BigInteger;
import static mho.wheels.iterables.IterableUtils.*;
import static mho.wheels.math.BinaryFraction.*;
import static mho.wheels.math.BinaryFraction.of;
import static mho.wheels.testing.Testing.*;
@SuppressWarnings({"UnusedDeclaration", "ConstantConditions"})
public class BinaryFractionDemos {
private static final boolean USE_RANDOM = false;
private static final @NotNull String BINARY_FRACTION_CHARS = " -0123456789<>";
private static int LIMIT;
private static IterableProvider P;
private static void initialize() {
if (USE_RANDOM) {
P = RandomProvider.example();
LIMIT = 1000;
} else {
P = ExhaustiveProvider.INSTANCE;
LIMIT = 10000;
}
}
private static void demoGetMantissa() {
initialize();
for (BinaryFraction bf : take(LIMIT, P.binaryFractions())) {
System.out.println("getMantissa(" + bf + ") = " + bf.getMantissa());
}
}
private static void demoGetExponent() {
initialize();
for (BinaryFraction bf : take(LIMIT, P.binaryFractions())) {
System.out.println("getExponent(" + bf + ") = " + bf.getExponent());
}
}
private static void demoOf_BigInteger_int() {
initialize();
Iterable<Pair<BigInteger, Integer>> ps = filter(
p -> (long) p.b + p.a.getLowestSetBit() < Integer.MAX_VALUE,
P.pairs(P.bigIntegers(), P.integers())
);
for (Pair<BigInteger, Integer> p : take(LIMIT, ps)) {
System.out.println("of(" + p.a + ", " + p.b + ") = " + of(p.a, p.b));
}
}
private static void demoOf_BigInteger() {
initialize();
for (BigInteger i : take(LIMIT, P.bigIntegers())) {
System.out.println("of(" + i + ") = " + of(i));
}
}
private static void demoOf_int() {
initialize();
for (int i : take(LIMIT, P.integers())) {
System.out.println("of(" + i + ") = " + of(i));
}
}
private static void demoOf_float() {
initialize();
for (float f : take(LIMIT, P.floats())) {
System.out.println("of(" + f + ") = " + of(f));
}
}
private static void demoOf_double() {
initialize();
for (double d : take(LIMIT, P.doubles())) {
System.out.println("of(" + d + ") = " + of(d));
}
}
private static void demoBigDecimalValue() {
initialize();
for (BinaryFraction bf : take(LIMIT, P.binaryFractions())) {
System.out.println("bigDecimalValue(" + bf + ") = " + bf.bigDecimalValue());
}
}
private static void demoIsInteger() {
initialize();
for (BinaryFraction bf : take(LIMIT, P.binaryFractions())) {
System.out.println(bf + " is " + (bf.isInteger() ? "" : "not ") + "an integer");
}
}
private static void demoAdd() {
initialize();
Iterable<Pair<BinaryFraction, BinaryFraction>> ps = filter(
p -> {
try {
p.a.add(p.b);
return true;
} catch (ArithmeticException e) {
return false;
}
},
P.pairs(P.binaryFractions())
);
for (Pair<BinaryFraction, BinaryFraction> p : take(LIMIT, ps)) {
System.out.println("(" + p.a + ") + (" + p.b + ") = " + p.a.add(p.b));
}
}
private static void demoNegate() {
initialize();
for (BinaryFraction bf : take(LIMIT, P.binaryFractions())) {
System.out.println("-(" + bf + ") = " + bf.negate());
}
}
private static void demoAbs() {
initialize();
for (BinaryFraction bf : take(LIMIT, P.binaryFractions())) {
System.out.println("|" + bf + "| = " + bf.abs());
}
}
private static void demoSignum() {
initialize();
for (BinaryFraction bf : take(LIMIT, P.binaryFractions())) {
System.out.println("sgn(" + bf + ") = " + bf.signum());
}
}
private static void demoSubtract() {
initialize();
Iterable<Pair<BinaryFraction, BinaryFraction>> ps = filter(
p -> {
try {
p.a.subtract(p.b);
return true;
} catch (ArithmeticException e) {
return false;
}
},
P.pairs(P.binaryFractions())
);
for (Pair<BinaryFraction, BinaryFraction> p : take(LIMIT, ps)) {
System.out.println("(" + p.a + ") - (" + p.b + ") = " + p.a.subtract(p.b));
}
}
private static void demoEquals_BinaryFraction() {
initialize();
for (Pair<BinaryFraction, BinaryFraction> p : take(LIMIT, P.pairs(P.binaryFractions()))) {
System.out.println(p.a + (p.a.equals(p.b) ? " = " : " ≠ ") + p.b);
}
}
private static void demoEquals_null() {
initialize();
for (BinaryFraction bf : take(LIMIT, P.binaryFractions())) {
//noinspection ObjectEqualsNull
System.out.println(bf + (bf.equals(null) ? " = " : " ≠ ") + null);
}
}
private static void demoHashCode() {
initialize();
for (BinaryFraction bf : take(LIMIT, P.binaryFractions())) {
System.out.println("hashCode(" + bf + ") = " + bf.hashCode());
}
}
private static void demoCompareTo() {
initialize();
for (Pair<BinaryFraction, BinaryFraction> p : take(LIMIT, P.pairs(P.binaryFractions()))) {
System.out.println(p.a + " " + Ordering.compare(p.a, p.b).toChar() + " " + p.b);
}
}
private static void demoRead() {
initialize();
for (String s : take(LIMIT, P.strings())) {
System.out.println("read(" + nicePrint(s) + ") = " + read(s));
}
}
private static void demoRead_targeted() {
initialize();
for (String s : take(LIMIT, P.strings(P.uniformSample(BINARY_FRACTION_CHARS)))) {
System.out.println("read(" + s + ") = " + read(s));
}
}
private static void demoFindIn() {
initialize();
for (String s : take(LIMIT, P.strings())) {
System.out.println("findIn(" + nicePrint(s) + ") = " + findIn(s));
}
}
private static void demoFindIn_targeted() {
initialize();
for (String s : take(LIMIT, P.strings(P.uniformSample(BINARY_FRACTION_CHARS)))) {
System.out.println("findIn(" + s + ") = " + findIn(s));
}
}
private static void demoToString() {
initialize();
for (BinaryFraction bf : take(LIMIT, P.binaryFractions())) {
System.out.println(bf);
}
}
}
| added multiply and shift demos
| src/test/java/mho/wheels/math/BinaryFractionDemos.java | added multiply and shift demos | <ide><path>rc/test/java/mho/wheels/math/BinaryFractionDemos.java
<ide> }
<ide> }
<ide>
<add> private static void demoMultiply() {
<add> initialize();
<add> Iterable<Pair<BinaryFraction, BinaryFraction>> ps = filter(
<add> p -> {
<add> long productExponent = (long) p.a.getExponent() + p.b.getExponent();
<add> return productExponent <= Integer.MAX_VALUE && productExponent >= Integer.MIN_VALUE;
<add> },
<add> P.pairs(P.binaryFractions())
<add> );
<add> for (Pair<BinaryFraction, BinaryFraction> p : take(LIMIT, ps)) {
<add> System.out.println("(" + p.a + ") * (" + p.b + ") = " + p.a.multiply(p.b));
<add> }
<add> }
<add>
<add> private static void demoShiftLeft() {
<add> initialize();
<add> Iterable<Pair<BinaryFraction, Integer>> ps = filter(
<add> p -> {
<add> long shiftedExponent = (long) p.a.getExponent() + p.b;
<add> return shiftedExponent <= Integer.MAX_VALUE && shiftedExponent >= Integer.MIN_VALUE;
<add> },
<add> P.pairs(P.binaryFractions(), P.integersGeometric())
<add> );
<add> for (Pair<BinaryFraction, Integer> p : take(LIMIT, ps)) {
<add> System.out.println("(" + p.a + ") << " + p.b + " = " + p.a.shiftLeft(p.b));
<add> }
<add> }
<add>
<add> private static void demoShiftRight() {
<add> initialize();
<add> Iterable<Pair<BinaryFraction, Integer>> ps = filter(
<add> p -> {
<add> long shiftedExponent = (long) p.a.getExponent() - p.b;
<add> return shiftedExponent <= Integer.MAX_VALUE && shiftedExponent >= Integer.MIN_VALUE;
<add> },
<add> P.pairs(P.binaryFractions(), P.integersGeometric())
<add> );
<add> for (Pair<BinaryFraction, Integer> p : take(LIMIT, ps)) {
<add> System.out.println("(" + p.a + ") >> " + p.b + " = " + p.a.shiftRight(p.b));
<add> }
<add> }
<add>
<ide> private static void demoEquals_BinaryFraction() {
<ide> initialize();
<ide> for (Pair<BinaryFraction, BinaryFraction> p : take(LIMIT, P.pairs(P.binaryFractions()))) { |
|
Java | apache-2.0 | error: pathspec 'src/test/java/org/howardism/fpjava/ClosureTests.java' did not match any file(s) known to git
| 820ecc926435821904f99536d0d3ee2b7721350e | 1 | howardabrams/fp-for-java | /*
* Copyright (c) 2012, Howard Abrams All rights reserved.
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with this
* work for additional information regarding copyright ownership. The ASF
* licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.howardism.fpjava;
import static org.junit.Assert.*;
import org.junit.Test;
/**
* Validate some basic assumptions for using and extending the {@link Closure}
* interface.
*
* @author Howard Abrams (www.howardabrams.com)
*/
public class ClosureTests {
// List the closures we want to test...
final Closure theTruth = new TheTruth();
final Closure conjecture = new Conjecture();
@Test
public void testBooleans() {
assertTrue( (Boolean) theTruth.apply() );
assertTrue( conjecture.apply() == Boolean.FALSE );
}
class TheTruth implements Closure {
public Object apply(Object... objects) {
return Boolean.TRUE;
}
}
class Conjecture implements Closure {
public Object apply(Object... objects) {
return Boolean.FALSE;
}
}
}
| src/test/java/org/howardism/fpjava/ClosureTests.java | Adding the start of some closure tests.
| src/test/java/org/howardism/fpjava/ClosureTests.java | Adding the start of some closure tests. | <ide><path>rc/test/java/org/howardism/fpjava/ClosureTests.java
<add>/*
<add> * Copyright (c) 2012, Howard Abrams All rights reserved.
<add> *
<add> * Licensed to the Apache Software Foundation (ASF) under one or more
<add> * contributor license agreements. See the NOTICE file distributed with this
<add> * work for additional information regarding copyright ownership. The ASF
<add> * licenses this file to You under the Apache License, Version 2.0 (the
<add> * "License"); you may not use this file except in compliance with the License.
<add> * You may obtain a copy of the License at
<add> *
<add> * http://www.apache.org/licenses/LICENSE-2.0
<add> *
<add> * Unless required by applicable law or agreed to in writing, software
<add> * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
<add> * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
<add> * License for the specific language governing permissions and limitations under
<add> * the License.
<add> */
<add>
<add>package org.howardism.fpjava;
<add>
<add>import static org.junit.Assert.*;
<add>
<add>import org.junit.Test;
<add>
<add>/**
<add> * Validate some basic assumptions for using and extending the {@link Closure}
<add> * interface.
<add> *
<add> * @author Howard Abrams (www.howardabrams.com)
<add> */
<add>public class ClosureTests {
<add>
<add> // List the closures we want to test...
<add> final Closure theTruth = new TheTruth();
<add> final Closure conjecture = new Conjecture();
<add>
<add> @Test
<add> public void testBooleans() {
<add> assertTrue( (Boolean) theTruth.apply() );
<add> assertTrue( conjecture.apply() == Boolean.FALSE );
<add> }
<add>
<add> class TheTruth implements Closure {
<add> public Object apply(Object... objects) {
<add> return Boolean.TRUE;
<add> }
<add> }
<add> class Conjecture implements Closure {
<add> public Object apply(Object... objects) {
<add> return Boolean.FALSE;
<add> }
<add> }
<add>} |
|
JavaScript | mit | d747999449cc0edd2508986b8a31a2860bafc37d | 0 | BrightcoveLearning/Brightcove-API-References,BrightcoveLearning/Brightcove-API-References | // Create a Standard Live Job
/**
* @api {post} /v1/jobs Create a Live Job
* @apiName Create a Live Job
* @apiGroup Live_Jobs
* @apiVersion 1.0.0
*
* @apiDescription Create a live stream
*
* @apiHeader {String} Content-Type: application/json
* @apiHeader {String} X-API-KEY X-API-KEY: {APIKey}
*
* @apiParam (Request Body Fields) {Boolean} live_stream Indicates that the job is a live streaming job.
* @apiParam (Request Body Fields) {String="24x7","event"} [channel_type=event] Indicates whether the job should be billed as event hours or a channel (`24x7`) - see [Channels and Event Hours ](https://support.brightcove.com/overview-brightcove-live-api#Channels_and_hours).
* @apiParam (Request Body Fields) {Boolean} [ad_insertion=false] Setting this parameter to true will enable server side ad insertion (SSAI) on the job. Current support includes, DFP, Freewheel, or any VAST 2.0/3.0 ad tags.
* @apiParam (Request Body Fields) {String="us-west-2","us-east-1","ap-southeast-2", "ap-northeast-1","ap-southeast-1", "eu-central-1", "eu-west-1", "sa-east-1"} region AWS region - you can also specify region as the alias for a list set up for the account by Brightcove Support. See [Supported AWS Regions] (https://support.brightcove.com/overview-brightcove-live-api#Support_aws_regions) for more details on the support in each region.
* @apiParam (Request Body Fields) (String[]) [rtmp_ip_whitelist] Array of IP addresses whitelisted for RTMP delivery
* @apiParam (Request Body Fields) {Number{-60.0-60.0}} [ad_audio_loudness_level] Adjust the loudness level of the audio. This is measured in LUFS and specified in dB. This is useful to set the output loudness level to conform to a standard (`-23dB` for **EBU R.128**) The recommended setting is `-23`.
* @apiParam (Request Body Fields) {String} [beacon_set] ID for a beacon set (for SSAI only).
* @apiParam (Request Body Fields) {Number{1-7200}} [reconnect_time=30] The time, in seconds, to wait for a stream to reconnect to the encoder. If the reconnect time passes without the stream reconnecting, the job will automatically finish.
* @apiParam (Request Body Fields) {String} [slate] Id for a set of slate assets
* @apiParam (Request Body Fields) {Boolean} [static=false] Whether this is a static entry point (SEP) job
* @apiParam (Request Body Fields) {Object} [encryption] Encryption to apply to the stream.
* @apiParam (Request Body Fields) {String="aes-128"} encryption.method The encryption method to use.
* @apiParam (Request Body Fields) {String="internal","external"} encryption.type The encryption type, depending on whether an internal or external key server will be used.
* @apiParam (Request Body Fields) {String} [encryption.key] The encryption key - either a key, passphrase, or key_rotation is required; if the type is external, key is required
* @apiParam (Request Body Fields) {String} [encryption.passphrase] The encryption key - either a key, passphrase, or key_rotation is required
* @apiParam (Request Body Fields) {Boolean} [encryption.key_rotation=false] Whether to use key rotation - either a key, passphrase, or key_rotation is required
* @apiParam (Request Body Fields) {Number} [encryption.rotate_every=10] Interval for key rotation in video segments
* @apiParam (Request Body Fields) {String} [encryption.external_url] The URL for the external encryption key - this field is required if you specify `type` as `external`, and the external key must match the `key` value
* @apiParam (Request Body Fields) {Number{0-93600}} [event_length=0] Used to preset and define an end time for the live event. At any point within the specified `event_length` you may reconnect to your stream. The `event_length` setting goes into effect as soon as streaming begins.
* @apiParam (Request Body Fields) {Boolean} [hls_endlist=true] Whether an `EXT-X-ENDLIST` tag should be added to the stream playlist when you stop the stream or the `reconnect_time` window has been reached. The `EXT-X-ENDLIST` tag indicates that no more Media Segments will be added to the Media Playlist file and helps prevent the player from displaying error messages when the stream stops.
* @apiParam (Request Body Fields) {Number{1-86400}} [live_dvr_sliding_window_duration=100] The time, in seconds, to keep in the live DVR manifest. If the stream duration is longer than the window duration, segment references will be removed first in first out. Default is 100 seconds. **Note: for SSAI jobs, the limit is `7200`.
* @apiParam (Request Body Fields) {Number{1-600}} [live_dvr_ads_window_duration=600] The time, in seconds, to keep in the live DVR manifest. If the stream duration is longer than the window duration, segment references will be removed first in first out. Default is 100 seconds.
* @apiParam (Request Body Fields) {Number{1-5}} [max_hls_protocol_version=3] Sets the maximum HLS protocol version to use. Special features will be used as available. Default is 3.
* @apiParam (Request Body Fields) {Array} [notifications] Array of notification destination objects or strings - notifications defined here are for **job-level events**. A notification will be sent to the destination when selected event occurs. You can use a simple string with a url: "http://log:[email protected]/post", or you can use an object.
* @apiParam (Request Body Fields) {String} [notifications.event] Event to send the notification for.
* @apiParam (Request Body Fields) {String} notifications.url Destination for the notification.
* @apiParam (Request Body Fields) {String} [notifications.credentials] Credentials The name of the credentials configured in your account for this address
* @apiParam (Request Body Fields) {String="first_segment_uploaded", "output_finished", "state_changed"} [notifications.event="state_changed"] Event type to send notifications for. It is recommended to set events on the job and not individual rendition outputs since renditions will finish simultaneously.
* @apiParam (Request Body Fields) {Object[]} [add_cdns] Array of additional CDN providers to be used for manifest generation. For each CDN provided, the manifest will be prepended accordingly
* @apiParam (Request Body Fields) {String} add_cdns.label A label to identify the CDN.
* @apiParam (Request Body Fields) {String} add_cdns.prepend CDN hostname to be prepended to addresses
* @apiParam (Request Body Fields) {String="http","https"} add_cdns.protocol Protocol to use for the stream delivery
* @apiParam (Request Body Fields) {String} add_cdns.vendor CDN vendor such as `akamai`
* @apiParam (Request Body Fields) {Object} [add_cdns.token_auth] Token authentication details
* @apiParam (Request Body Fields) {String} [add_cdns.token_auth.auth_type] Token authentication type - currently, the only supported value is `Akamai2.0`
* @apiParam (Request Body Fields) {String} [add_cdns.token_auth.key] Your Akamai token auth password
* @apiParam (Request Body Fields) {String{5..12}} [add_cdns.token_auth.token_name] Your Akamai token token name
* @apiParam (Request Body Fields) {Object} [add_cdns.token_auth.media] Your Akamai token token name
* @apiParam (Request Body Fields) {Mixed} [add_cdns.token_auth.media.start_time="now"] The time to apply token auth - `"now"` or epoch time in seconds
* @apiParam (Request Body Fields) {Number} [add_cdns.token_auth.media.end_time] The time to end token auth, epoch time in seconds
* @apiParam (Request Body Fields) {ttl} [add_cdns.token_auth.media.ttl] The time to live in seconds - either `end_time` or `ttl` is required
* @apiParam (Request Body Fields) {Object[]} outputs Array of output specifications for live and VOD assets to be created from the live stream.
* @apiParam (Request Body Fields) {String} outputs.label Label for the live or VOD asset.
* @apiParam (Request Body Fields) {Boolean} outputs.live_stream For jobs, setting live_stream to true indicates the output is a live rendition. If `live_stream` is false, or is not set, the output will be treated as a VOD output.
* @apiParam (Request Body Fields) {Number{0-172800}} [outputs.duration] Clipping API option 1. Duration (in seconds) to clip back from Live. Note: Clipping API only requires one of the three options for specifying duration or time.
* @apiParam (Request Body Fields) {Number{-60.0-60.0}} [outputs.ad_audio_loudness_level] Adjust the loudness level of the audio. This is measured in LUFS and specified in dB. This is useful to set the output loudness level to conform to a standard (`-23dB` for **EBU R.128**) The recommended setting is `-23`.
* @apiParam (Request Body Fields) {Number{0-2147483647}} [outputs.stream_start_time] Clipping API option 2. An offset, in seconds, from the start of the live stream to mark the beginning of the clip. Note: Clipping API only requires one of the three options for specifying duration or time.
* @apiParam (Request Body Fields) {Number{stream_start_time-stream_start_time+172800}} [outputs.stream_end_time] Clipping API option 2. An offset, in seconds, from the start of the live stream to mark the end of the clip. Note: Clipping API only requires one of the three options for specifying duration or time.
* @apiParam (Request Body Fields) {Number{current_time-future_time}} [outputs.start_time] Clipping API option 3. Universal epoch time, in seconds, to mark the beginning of the clip. Note: Clipping API only requires one of the three options for specifying duration or time.
* @apiParam (Request Body Fields) {Number{start_time-start_time+172800}} [outputs.end_time] Clipping API option 3. Universal epoch time, in seconds, to mark the end of the clip. Note: Clipping API only requires one of the three options for specifying duration or time.
* @apiParam (Request Body Fields) {Boolean} [outputs.copy_video] Specifying `copy_video` will take the video track from the input video file and transmux it into the resulting output file.
* @apiParam (Request Body Fields) {Boolean} [outputs.copy_audio] Specifying `copy_audio` will take the audio track from the input video file and transmux it into the resulting output file.
* @apiParam (Request Body Fields) {Boolean} [outputs.skip_video] Specifying `skip_video` removes the video track.
* @apiParam (Request Body Fields) {Boolean} [outputs.skip_audio] Specifying `skip_audio` removes the audio track.
* @apiParam (Request Body Fields) {Number} [outputs.width] Video frame width. If no width is supplied, we will use 640 pixels.
* @apiParam (Request Body Fields) {Number} [outputs.height] Video frame height. If no height is supplied, we will use 480 pixels.
* @apiParam (Request Body Fields) {String="h264"} [outputs.video_codec] The output video codec. Note: Only h264 is supported.
* @apiParam (Request Body Fields) {String="baseline","main","high"} [outputs.h264_profile] H.264 has three commonly-used profiles: Baseline (lowest), Main, and High. Lower levels are easier to decode, but higher levels offer better compression. For the best compression quality, choose High. For playback on low-CPU machines or many mobile devices, choose Baseline.
* @apiParam (Request Body Fields) {Number{1-6000}} outputs.keyframe_interval The maximum number of frames between each keyframe. If you set a low keyframe_interval it will increase the size / decrease the quality of your output file, but it will allow more precise scrubbing in most players. It’s recommended to have at least one keyframe per segment. If keyframe_interval is not provided, keyframes will follow the input GOP structure.
* @apiParam (Request Body Fields) {Number{64-10000}} outputs.video_bitrate target video bitrate in kbps
* @apiParam (Request Body Fields) {String="aac"} [outputs.audio_codec] The output audio codec to use. Note: Only aac is supported.
* @apiParam (Request Body Fields) {Number{16-1024}} outputs.audio_bitrate An output bitrate setting for the audio track, in Kbps
* @apiParam (Request Body Fields) {Number{2-20}} outputs.segment_seconds Sets the maximum duration of each segment in a segmented output.
* @apiParam (Request Body Fields) {mixed[]} [outputs.notifications] Array of notification destination objects or strings - notifications defined here are **for events specific to the output**. A notification will be sent to the destination when selected event occurs. You can use a simple string with a url: "http://log:[email protected]/post", or you can use an object.
* @apiParam (Request Body Fields) {String} outputs.notifications.url Destination for the notification.
* @apiParam (Request Body Fields) {String} [outputs.notifications.event] Event to send the notification for.
* @apiParam (Request Body Fields) {String} [outputs.notifications.credentials] Credentials The name of the credentials configured in your account for this address
* @apiParam (Request Body Fields) {String} [outputs.rendition_label] Indicates what rendition to use to create a VOD output (from the live job) or which renditions to use. By default, the system uses any transmuxed rendition or the highest resolution output if there is no transmuxed output.
* @apiParam (Request Body Fields) {String="playlist"} [outputs.type] The only type supported is a playlist. This is used for generating multiple master playlists with different renditions in the HLS manifest with the defined stream labels.
* @apiParam (Request Body Fields) {Array} [outputs.streams] When creating a playlist, the streams field is used to define which output renditions (by label) should be included in the manifest. Example format [{"source": "1080p"}, {"source": "720p"}].
* @apiParam (Request Body Fields) {String} [outputs.url] For VOD, URL is mandatory and sets the destination of the final asset destination. For access restricted origins, the credentials a can be passed along with the URL or stored within the Brightcove system. For Live, this is reserved for future use.
* @apiParam (Request Body Fields) {String} [outputs.credentials] The name for credentials with private and public keys can be stored with Brightcove to avoid passing plain text on API requests. This is required if the S3 or FTP origins are restricted. If credentials are not provided, it will be assumed that the origin restrictions are set to public or credentials are passed along with the URL.
* @apiParam (Request Body Fields) {Object} [outputs.videocloud] Video Cloud customer have the option to push their clips directly through Dynamic Ingest. Options "{"video": {"name"}, "ingest": { }". The video object will be sent to the CMS API and can include (description, tags, etc.). Note: the account_id and reference_id will be added automatically. If overriding the reference_id, ensure that the id does not already exist or the job will fail. For more information see: [CMS-API-CreateVideo](https://brightcovelearning.github.io/Brightcove-API-References/cms-api/v1/doc/index.html#api-videoGroup-Create_Video). The ingest object will be sent to the Dynamic Ingest API and can include (master, profile, poster, callbacks, etc). Note: the account_id and video_id are added automatically. For more information see: [DI-API-IngestVideo](https://brightcovelearning.github.io/Brightcove-API-References/dynamic-ingest-api/v1/doc/index.html#api-Ingest-Ingest_Media_Asset).
* @apiParam (Request Body Fields) {Mixed[]} [outputs.notifications] Array of notification destination objects or strings. A notification will be sent to the destination when selected event occurs. You can use a simple string with a url: "http://log:[email protected]/post", or you can use an object.
* @apiParam (Request Body Fields) {String} [outputs.notifications.event] Event to send the notification for.
* @apiParam (Request Body Fields) {String} outputs.notifications.url Destination for the notification.
*
* @apiParamExample {object} Standard Live Stream Example:
* {
* "live_stream": true,
* "region": "my-region-list",
* "reconnect_time": 20,
* "outputs": [
* {
* "label": "hls1080p",
* "live_stream": true,
* "width": 1920,
* "height": 1080,
* "video_codec": "h264",
* "h264_profile": "main",
* "video_bitrate": 2400,
* "segment_seconds": 6,
* "keyframe_interval": 60
* },
* {
* "label": "hls720p",
* "live_stream": true,
* "width": 1280,
* "height": 720,
* "video_codec": "h264",
* "h264_profile": "main",
* "video_bitrate": 1843,
* "segment_seconds": 6,
* "keyframe_interval": 60
* },
* {
* "label": "hls480p",
* "live_stream": true,
* "width": 640,
* "height": 360,
* "video_codec": "h264",
* "h264_profile": "main",
* "video_bitrate": 819,
* "segment_seconds": 6,
* "keyframe_interval": 60
* }
* ]
* }
*
* @apiParamExample {object} Live Stream Transmuxed Rendition Example:
* {
* "live_stream": true,
* "region": "my-region-list",
* "reconnect_time": 20,
* "outputs": [
* {
* "label": "hls1080p transmux",
* "live_stream": true,
* "copy_video": true,
* "copy_audio": true,
* "segment_seconds": 6
* },
* {
* "label": "hls720p",
* "live_stream": true,
* "width": 960,
* "height": 540,
* "video_codec": "h264",
* "h264_profile": "main",
* "video_bitrate": 1843,
* "segment_seconds": 6
* },
* {
* "label": "hls480p",
* "live_stream": true,
* "width": 640,
* "height": 360,
* "video_codec": "h264",
* "h264_profile": "main",
* "video_bitrate": 819,
* "segment_seconds": 6
* },
* {
* "url":"s3://YOUR_BUCKET/live/20160403004644_test.mp4",
* "credentials": "YOUR_CREDENTIALS"
* }
* ]
* }
*
* @apiParamExample {object} Live Stream with VOD Example:
* {
* "live_stream": true,
* "region": "my-region-list",
* "reconnect_time": 20,
* "outputs": [
* {
* "label": "hls720p",
* "live_stream": true,
* "width": 960,
* "height": 540,
* "video_codec": "h264",
* "h264_profile": "main",
* "video_bitrate": 1843,
* "segment_seconds": 6
* },
* {
* "url":"s3://YOUR_BUCKET/live/20160403004644_test.mp4",
* "credentials": "YOUR_CREDENTIALS"
* }
* ]
* }
*
* @apiParamExample {object} Live Stream with VOD and Notifications Example:
* {
* "live_stream": true,
* "region": "my-region-list",
* "reconnect_time": 20,
* "notifications": [
* "http://httpbin.org/post?liveStateChange",
* {
* "url": "http://httpbin.org/post?liveStarted",
* "event": "first_segment_uploaded"
* },
* {
* "url": "http://httpbin.org/post?liveFinished",
* "event": "output_finished"
* }],
* "outputs": [
* {
* "label": "hls720p",
* "live_stream": true,
* "height": 720,
* "video_bitrate": 2000,
* "segment_seconds": 6,
* "video_codec": "h264",
* "h264_profile": "high",
* "segment_seconds": 6
* },
* {
* "label": "hls360p",
* "live_stream": true,
* "height": 360,
* "video_bitrate": 650,
* "segment_seconds": 6
* },
* {
* "url":"s3://YOUR_BUCKET/path/filename.mp4",
* "credentials": "YOUR_CREDENTIALS",
* "notifications": [{
* "url": "http://httpbin.org/post?vodStateChange"
* },
* {
* "url": "http://httpbin.org/post?vodFinished",
* "event": "output_finished"
* }]
* }]
* }
*
* @apiParamExample {object} Live Stream with Multiple Output Playlists Example:
* {
* "live_stream": true,
* "region": "my-region-list",
* "reconnect_time": 180,
* "outputs": [{
* "label": "hls1080p",
* "live_stream": true,
* "height": 1080,
* "video_bitrate": 3000,
* "segment_seconds": 6,
* "keyframe_interval": 90
* },{
* "label": "hls720p",
* "live_stream": true,
* "height": 720,
* "video_bitrate": 2000,
* "segment_seconds": 6,
* "keyframe_interval": 90
* },{
* "label": "AudioOnly",
* "live_stream": true,
* "skip_video": true
* },{
* "label": "playlistVideoALL",
* "type": "playlist",
* "streams": [{"source": "hls1080p"},{"source": "hls720p"}]
* },{
* "label": "playlistHIGH",
* "type": "playlist",
* "filename":"playlist-high.m3u8",
* "streams": [{"source": "hls1080p"}]
* },{
* "label": "playlistAudio",
* "type": "playlist",
* "filename":"playlist-audio.m3u8",
* "streams": [{"source": "AudioOnly"}]
* },{
* "url":"s3://YOUR_BUCKET/PATH/20160804104116_test.mp4",
* "credentials": "S3_CREDENTIALS"
* }]
* }
*
* @apiParamExample {object} Live Stream with a Multiple CDN Config Example:
* {
* "live_stream": true,
* "region": "my-region-list",
* "reconnect_time": 30,
* "add_cdns":[
* {
* "label": "akamai",
* "prepend": "akamai.playback.com/someApplication",
* "protocol": "http"
* },
* {
* "label": "level3_1",
* "prepend": "l3.playback.io/somPath/someApplication/someFolder",
* "protocol": "http"
* }
* ],
* "outputs": [
* {
* "label": "hls1",
* "live_stream": true,
* "width": 960,
* "height": 540,
* "video_codec": "h264",
* "h264_profile": "main",
* "video_bitrate": 1843,
* "segment_seconds": 6,
* "keyframe_interval": 60
* },
* {
* "label": "hls2",
* "live_stream": true,
* "width": 640,
* "height": 360,
* "video_codec": "h264",
* "h264_profile": "main",
* "video_bitrate": 819,
* "segment_seconds": 6,
* "keyframe_interval": 60
* }
* ]
* }
*
*
*
* @apiParamExample {object} Live Stream with SSAI and VOD output Example:
* {
* "ad_insertion": true,
* "live_stream": true,
* "region": "my-region-list",
* "reconnect_time": 180,
* "slate": "bbbff5ad67a94941be8cb987ba23049d",
* "notifications": [
* "http://httpbin.org/post"
* ],
* "add_cdns":[{
* "label": "akamai-test",
* "prepend": "vrnginx-useast.akamai.com",
* "protocol": "http"
* }],
* "outputs": [
* {
* "label": "hls720p",
* "live_stream": true,
* "height": 720,
* "video_bitrate": 2400,
* "segment_seconds": 6,
* "keyframe_interval": 90
* },{
* "label": "hls480p",
* "live_stream": true,
* "height": 480,
* "video_bitrate": 1000,
* "segment_seconds": 6,
* "keyframe_interval": 90
* }, {
* "url":"s3://YOUR_BUCKET/live/20160403004644_test.mp4",
* "credentials": "YOUR_CREDENTIALS"
* }]
* }
*
* @apiParamExample {object} Standard Live Stream with Internal Encryption Example:
* {
* "live_stream": true,
* "region": "my-region-list",
* "reconnect_time": 20,
* "encryption": {
* "method": "aes-128",
* "type": "internal",
* "key":"01234567890123450123456789012345"
* }
* "outputs": [
* {
* "label": "hls720p",
* "live_stream": true,
* "width": 960,
* "height": 540,
* "video_codec": "h264",
* "h264_profile": "main",
* "video_bitrate": 1843,
* "segment_seconds": 6,
* "keyframe_interval": 60
* },
* {
* "label": "hls480p",
* "live_stream": true,
* "width": 640,
* "height": 360,
* "video_codec": "h264",
* "h264_profile": "main",
* "video_bitrate": 819,
* "segment_seconds": 6,
* "keyframe_interval": 60
* }
* ]
* }
*
* @apiParamExample {object} Standard Live Stream with Internal Encryption and Passphrase Example:
* {
* "live_stream": true,
* "region": "my-region-list",
* "reconnect_time": 20,
* "encryption": {
* "method": "aes-128",
* "type": "internal",
* "passphrase": "SuperSecret
* }
* "outputs": [
* {
* "label": "hls720p",
* "live_stream": true,
* "width": 960,
* "height": 540,
* "video_codec": "h264",
* "h264_profile": "main",
* "video_bitrate": 1843,
* "segment_seconds": 6,
* "keyframe_interval": 60
* },
* {
* "label": "hls480p",
* "live_stream": true,
* "width": 640,
* "height": 360,
* "video_codec": "h264",
* "h264_profile": "main",
* "video_bitrate": 819,
* "segment_seconds": 6,
* "keyframe_interval": 60
* }
* ]
* }
*
* @apiParamExample {object} Standard Live Stream with Internal Encryption and Rotation Example:
* {
* "live_stream": true,
* "region": "my-region-list",
* "reconnect_time": 20,
* "encryption": {
* "method": "aes-128",
* "type": "internal",
* "rotate_every": 5,
* "key_rotation": true,
* }
* "outputs": [
* {
* "label": "hls720p",
* "live_stream": true,
* "width": 960,
* "height": 540,
* "video_codec": "h264",
* "h264_profile": "main",
* "video_bitrate": 1843,
* "segment_seconds": 6,
* "keyframe_interval": 60
* },
* {
* "label": "hls480p",
* "live_stream": true,
* "width": 640,
* "height": 360,
* "video_codec": "h264",
* "h264_profile": "main",
* "video_bitrate": 819,
* "segment_seconds": 6,
* "keyframe_interval": 60
* }
* ]
* }
*
* @apiParamExample {object} Standard Live Stream with External URL and Key Example:
* {
* "live_stream": true,
* "region": "my-region-list",
* "reconnect_time": 20,
* "encryption": {
* "method": "aes-128",
* "type": "external",
* "key": "01234567890123456789012345678901",
* "external_url": "https://myserver/mykey/a.key"
* }
* "outputs": [
* {
* "label": "hls720p",
* "live_stream": true,
* "width": 960,
* "height": 540,
* "video_codec": "h264",
* "h264_profile": "main",
* "video_bitrate": 1843,
* "segment_seconds": 6,
* "keyframe_interval": 60
* },
* {
* "label": "hls480p",
* "live_stream": true,
* "width": 640,
* "height": 360,
* "video_codec": "h264",
* "h264_profile": "main",
* "video_bitrate": 819,
* "segment_seconds": 6,
* "keyframe_interval": 60
* }
* ]
* }
*
*
* @apiSuccess (200) {String} id Id for the stream.
* @apiSuccess (200) {String} stream_url The stream URL to add to your encoder configuration.
* @apiSuccess (200) {String} stream_name The stream name to add to your encoder configuration.
* @apiSuccess (200) {Object[]} outputs Details on each output rendition of the Live job.
* @apiSuccess (200) {String} outputs.id The unique id for the rendition.
* @apiSuccess (200) {String} outputs.playback_url Media HLS manifest for the specified rendition (non-SSAI).
* @apiSuccess (200) {String} outputs.playback_url_dvr Media HLS manifest for the specified rendition (with DVR capability).
* @apiSuccess (200) {String} outputs.playback_url_vod Media HLS manifest for the VOD version of the stream if one was specified in the job settings - note that the VOD will not be available until the live event has finished and the creation of the VOD is complete.
* @apiSuccess (200) {String} outputs.playback_url_dvr Media HLS manifest with a configurable DVR window. Default 100 seconds (non-SSAI).
* @apiSuccess (200) {String} outputs.playback_url_vod Media HLS manifest of the Live stream for the last 24 hours. (non-SSAI).
* @apiSuccess (200) {Boolean} live_stream Indicates that the job is a live streaming job.
* @apiSuccess (200) {Boolean} ad_insertion Setting this parameter to true will enable server side ad insertion (SSAI) on the job. Current support includes, DFP, Freewheel, or any VAST 2.0/3.0 ad tags.
* @apiSuccess (200) {String} region You can specify an Amazon AWS region to use for encoding a job and we will process the job on servers in the region specified. It’s recommended to use the region closest to your encoder.
* @apiSuccess (200) {Number} reconnect_time The time, in seconds, to wait for a stream to reconnect to the encoder. Default is set to 30 seconds.
* @apiSuccess (200) {Number} event_length Used to preset and define an end time for the live event. At any point within the specified `event_length` you may reconnect to your stream. The `event_length` setting goes into effect as soon as streaming begins.
* @apiSuccess (200) {Number} max_hls_protocol_version Sets the maximum HLS protocol version to use. Special features will be used as available. Default is 3.
* @apiSuccess (200) {String} slate id for slate of assets to be included
* @apiSuccess (200) {String} sep_state The current state of the job's SEP (static entry point) - possible values: 'waiting' 'pending_activation', 'activation_in_progress', 'ready', 'pending_deactivation', 'deactivation_in_progress', 'cancelled', 'finished'
* @apiSuccess (200) {mixed[]} notifications Array of notification destination objects or strings. A notification will be sent to the destination when selected event occurs. You can use a simple string with a url: "http://log:[email protected]/post", or you can use an object.
* @apiSuccess (200) {String} notifications.url Destination for the notification.
* @apiSuccess (200) {String} notifications.credentials Credentials for the destination, if required.
* @apiSuccess (200) {String} notifications.event Event type to send notifications for. It’s recommended to set events on the job and not individual rendition outputs since renditions will finish simultaneously.
* @apiSuccess (200) {Object[]} add_cdns Array of additional CDN providers to be used for manifest generation. For each CDN provided, the manifest will be prepended accordingly
* @apiSuccess (200) {String} add_cdns.label A label to identify the CDN.
* @apiSuccess (200) {String} add_cdns.prepend Typically, a domain or path to prepend to addresses
* @apiSuccess (200) {String} add_cdns.protocol Protocol to use in sending the stream to the CDN.
*
* @apiSuccessExample {object} Success Response Standard Live Stream:
* HTTP/1.1 200 OK
* {
* "id": "6666482ac53b4f9bac473157c0897bde",
* "outputs": [
* {
* "id": "0-6666482ac53b4f9bac473157c0897bde",
* "playback_url": "https://bcovlive-a.akamaihd.net/6666482ac53b4f9bac473157c0897bde/us-west-2/NA/profile_0/chunklist.m3u8",
* "playback_url_dvr": "https://bcovlive-a.akamaihd.net/6666482ac53b4f9bac473157c0897bde/us-west-2/NA/profile_0/chunklist_dvr.m3u8",
* "playback_url_vod": "https://bcovlive-a.akamaihd.net/6666482ac53b4f9bac473157c0897bde/us-west-2/NA/profile_0/chunklist_vod.m3u8",
* "playback_added_cdns": [],
* "label": "hls1080p"
* },
* {
* "id": "1-6666482ac53b4f9bac473157c0897bde",
* "playback_url": "https://bcovlive-a.akamaihd.net/6666482ac53b4f9bac473157c0897bde/us-west-2/NA/profile_1/chunklist.m3u8",
* "playback_url_dvr": "https://bcovlive-a.akamaihd.net/6666482ac53b4f9bac473157c0897bde/us-west-2/NA/profile_1/chunklist_dvr.m3u8",
* "playback_url_vod": "https://bcovlive-a.akamaihd.net/6666482ac53b4f9bac473157c0897bde/us-west-2/NA/profile_1/chunklist_vod.m3u8",
* "playback_added_cdns": [],
* "label": "hls720p"
* },
* {
* "id": "2-6666482ac53b4f9bac473157c0897bde",
* "playback_url": "https://bcovlive-a.akamaihd.net/6666482ac53b4f9bac473157c0897bde/us-west-2/NA/profile_2/chunklist.m3u8",
* "playback_url_dvr": "https://bcovlive-a.akamaihd.net/6666482ac53b4f9bac473157c0897bde/us-west-2/NA/profile_2/chunklist_dvr.m3u8",
* "playback_url_vod": "https://bcovlive-a.akamaihd.net/6666482ac53b4f9bac473157c0897bde/us-west-2/NA/profile_2/chunklist_vod.m3u8",
* "playback_added_cdns": [],
* "label": "hls480p"
* },
* {
* "id": "3-6666482ac53b4f9bac473157c0897bde",
* "playlist_type": "defaultS3",
* "type": "playlist",
* "filename": "playlist.m3u8",
* "dvr_filename": "playlist_dvr.m3u8",
* "playback_url": "https://bcovlive-a.akamaihd.net/6666482ac53b4f9bac473157c0897bde/us-west-2/NA/playlist.m3u8",
* "playback_url_dvr": "https://bcovlive-a.akamaihd.net/6666482ac53b4f9bac473157c0897bde/us-west-2/NA/playlist_dvr.m3u8"
* }
* ],
* "stream_url": "rtmp://ep2-usw2.bcovlive.io:1935/6666482ac53b4f9bac473157c0897bde",
* "stream_name": "alive",
* "static": false,
* "encryption": {},
* "playback_url": "https://bcovlive-a.akamaihd.net/6666482ac53b4f9bac473157c0897bde/us-west-2/NA/playlist.m3u8",
* "playback_url_dvr": "https://bcovlive-a.akamaihd.net/6666482ac53b4f9bac473157c0897bde/us-west-2/NA/playlist_dvr.m3u8"
* }
*
* @apiSuccessExample {object} Success Response Live Stream with Multiple Output Playlists:
* HTTP/1.1 200 OK
* {
* "id": "edcd4d356228417d80345a0c91864efe",
* "outputs": [
* {
* "id": "0-edcd4d356228417d80345a0c91864efe",
* "label": "hls1080p"
* },
* {
* "id": "1-edcd4d356228417d80345a0c91864efe",
* "label": "hls720p"
* },
* {
* "id": "2-edcd4d356228417d80345a0c91864efe",
* "label": "AudioOnly"
* },
* {
* "id": "3-edcd4d356228417d80345a0c91864efe",
* "playlist_type": "defaultS3",
* "type": "playlist",
* "label": "playlistVideoALL",
* "filename": "8b31bafdb20d462ea2e6e336a67ed4f3.m3u8",
* "dvr_filename": "8b31bafdb20d462ea2e6e336a67ed4f3_dvr.m3u8",
* "streams": [
* "hls1080p",
* "hls720p"
* ],
* "playback_url": "http://bcovlive-a.akamaihd.net/edcd4d356228417d80345a0c91864efe/us-west-2/8b31bafdb20d462ea2e6e336a67ed4f3.m3u8",
* "playback_url_dvr": "http://bcovlive-a.akamaihd.net/edcd4d356228417d80345a0c91864efe/us-west-2/8b31bafdb20d462ea2e6e336a67ed4f3_dvr.m3u8",
* "playback_url_s3": "s3://BUCKET/edcd4d356228417d80345a0c91864efe/us-west-2/8b31bafdb20d462ea2e6e336a67ed4f3.m3u8"
* },
* {
* "id": "4-edcd4d356228417d80345a0c91864efe",
* "playlist_type": "defaultS3",
* "type": "playlist",
* "label": "playlistHIGH",
* "filename": "playlist-high.m3u8",
* "dvr_filename": "playlist-high_dvr.m3u8",
* "streams": [
* "hls1080p"
* ],
* "playback_url": "http://bcovlive-a.akamaihd.net/edcd4d356228417d80345a0c91864efe/us-west-2/playlist-high.m3u8",
* "playback_url_dvr": "http://bcovlive-a.akamaihd.net/edcd4d356228417d80345a0c91864efe/us-west-2/playlist-high_dvr.m3u8",
* "playback_url_s3": "s3://BUCKET/edcd4d356228417d80345a0c91864efe/us-west-2/playlist-high.m3u8"
* },
* {
* "id": "5-edcd4d356228417d80345a0c91864efe",
* "playlist_type": "defaultS3",
* "type": "playlist",
* "label": "playlistAudio",
* "filename": "playlist-audio.m3u8",
* "dvr_filename": "playlist-audio_dvr.m3u8",
* "streams": [
* "AudioOnly"
* ],
* "playback_url": "http://bcovlive-a.akamaihd.net/edcd4d356228417d80345a0c91864efe/us-west-2/playlist-audio.m3u8",
* "playback_url_dvr": "http://bcovlive-a.akamaihd.net/edcd4d356228417d80345a0c91864efe/us-west-2/playlist-audio_dvr.m3u8",
* "playback_url_s3": "s3://BUCKET/edcd4d356228417d80345a0c91864efe/us-west-2/playlist-audio.m3u8"
* }
* ],
* "stream_url": "rtmp://host/edcd4d356228417d80345a0c91864efe",
* "stream_name": "alive",
* "playback_url": "http://bcovlive-a.akamaihd.net/edcd4d356228417d80345a0c91864efe/us-west-2/8b31bafdb20d462ea2e6e336a67ed4f3.m3u8",
* "playback_url_dvr": "http://bcovlive-a.akamaihd.net/edcd4d356228417d80345a0c91864efe/us-west-2/8b31bafdb20d462ea2e6e336a67ed4f3_dvr.m3u8",
* "playback_url_s3": "s3://BUCKET/edcd4d356228417d80345a0c91864efe/us-west-2/8b31bafdb20d462ea2e6e336a67ed4f3.m3u8",
* }
*
* @apiSuccessExample {json} Success Response Live Stream with a Multiple CDN Config:
* HTTP/1.1 200 OK
* {
* "id": "b3c20e416f964fb1b67334877bade99b",
* "outputs": [
* {
* "id": "0b3c20e416f964fb1b67334877bade99b",
* "playback_url": "http://bcovlive-a.akamaihd.net/b3c20e416f964fb1b67334877bade99b/us-west-2/profile_0/chunklist.m3u8",
* "playback_url_dvr": "http://bcovlive-a.akamaihd.net/b3c20e416f964fb1b67334877bade99b/us-west-2/profile_0/chunklist_dvr.m3u8",
* "playback_url_vod": "http://bcovlive-a.akamaihd.net/b3c20e416f964fb1b67334877bade99b/us-west-2/profile_0/chunklist_vod.m3u8",
* "playback_added_cdns": [
* {
* "label": "akamai",
* "playback_url": "http://akamai.playback.com/someApplication/b3c20e416f964fb1b67334877bade99b/us-west-2/profile_0/chunklist.m3u8",
* "playback_url_dvr": "http://akamai.playback.com/someApplication/b3c20e416f964fb1b67334877bade99b/us-west-2/profile_0/chunklist_dvr.m3u8",
* "playback_url_vod": "http://akamai.playback.com/someApplication/b3c20e416f964fb1b67334877bade99b/us-west-2/profile_0/chunklist_vod.m3u8"
* },
* {
* "label": "level3_1",
* "playback_url": "http://l3.playback.io/somPath/someApplication/someFolder/b3c20e416f964fb1b67334877bade99b/us-west-2/profile_0/chunklist.m3u8",
* "playback_url_dvr": "http://l3.playback.io/somPath/someApplication/someFolder/b3c20e416f964fb1b67334877bade99b/us-west-2/profile_0/chunklist_dvr.m3u8",
* "playback_url_vod": "http://l3.playback.io/somPath/someApplication/someFolder/b3c20e416f964fb1b67334877bade99b/us-west-2/profile_0/chunklist_vod.m3u8"
* }
* ],
* "label": "hls1"
* },
* {
* "id": "1b3c20e416f964fb1b67334877bade99b",
* "playback_url": "http://bcovlive-a.akamaihd.net/b3c20e416f964fb1b67334877bade99b/us-west-2/profile_1/chunklist.m3u8",
* "playback_url_dvr": "http://bcovlive-a.akamaihd.net/b3c20e416f964fb1b67334877bade99b/us-west-2/profile_1/chunklist_dvr.m3u8",
* "playback_url_vod": "http://bcovlive-a.akamaihd.net/b3c20e416f964fb1b67334877bade99b/us-west-2/profile_1/chunklist_vod.m3u8",
* "playback_added_cdns": [
* {
* "label": "akamai",
* "playback_url": "http://akamai.playback.com/someApplication/b3c20e416f964fb1b67334877bade99b/us-west-2/profile_1/chunklist.m3u8",
* "playback_url_dvr": "http://akamai.playback.com/someApplication/b3c20e416f964fb1b67334877bade99b/us-west-2/profile_1/chunklist_dvr.m3u8",
* "playback_url_vod": "http://akamai.playback.com/someApplication/b3c20e416f964fb1b67334877bade99b/us-west-2/profile_1/chunklist_vod.m3u8"
* },
* {
* "label": "level3_1",
* "playback_url": "http://l3.playback.io/somPath/someApplication/someFolder/b3c20e416f964fb1b67334877bade99b/us-west-2/profile_1/chunklist.m3u8",
* "playback_url_dvr": "http://l3.playback.io/somPath/someApplication/someFolder/b3c20e416f964fb1b67334877bade99b/us-west-2/profile_1/chunklist_dvr.m3u8",
* "playback_url_vod": "http://l3.playback.io/somPath/someApplication/someFolder/b3c20e416f964fb1b67334877bade99b/us-west-2/profile_1/chunklist_vod.m3u8"
* }
* ],
* "label": "hls2"
* }
* ],
* "stream_url": "rtmp://ep16-usw2.a-live.io:1935/b3c20e416f964fb1b67334877bade99b",
* "stream_name": "alive",
* "playback_url": "http://bcovlive-a.akamaihd.net/b3c20e416f964fb1b67334877bade99b/us-west-2/playlist.m3u8",
* "playback_url_dvr": "http://bcovlive-a.akamaihd.net/b3c20e416f964fb1b67334877bade99b/us-west-2/playlist_dvr.m3u8",
* "playback_url_vod": "http://bcovlive-a.akamaihd.net/b3c20e416f964fb1b67334877bade99b/us-west-2/profile_0/chunklist_vod.m3u8",
* "playback_added_cdns": [
* {
* "label": "akamai",
* "playback_url": "http://akamai.playback.com/someApplication/b3c20e416f964fb1b67334877bade99b/us-west-2/playlist.m3u8",
* "playback_url_dvr": "http://akamai.playback.com/someApplication/b3c20e416f964fb1b67334877bade99b/us-west-2/playlist_dvr.m3u8",
* "playback_url_vod": "http://akamai.playback.com/someApplication/b3c20e416f964fb1b67334877bade99b/us-west-2/profile_0/chunklist_vod.m3u8"
* },
* {
* "label": "level3_1",
* "playback_url": "http://l3.playback.io/somPath/someApplication/someFolder/b3c20e416f964fb1b67334877bade99b/us-west-2/playlist.m3u8",
* "playback_url_dvr": "http://l3.playback.io/somPath/someApplication/someFolder/b3c20e416f964fb1b67334877bade99b/us-west-2/playlist_dvr.m3u8",
* "playback_url_vod": "http://l3.playback.io/somPath/someApplication/someFolder/b3c20e416f964fb1b67334877bade99b/us-west-2/profile_0/chunklist_vod.m3u8"
* }
* ]
* }
*
*
* @apiSuccessExample {object} Success Response Live Stream with SSAI and VOD output:
* HTTP/1.1 200 OK
* {
* "id": "3158f1c9bc5c462182079f434ba4ae0a",
* "outputs": [
* {
* "id": "03158f1c9bc5c462182079f434ba4ae0a",
* "playback_url":"http://host/job_id/us-west-2/profile_0/chunklist.m3u8",
* "Playback_url_dvr": "http://host/job_id/us-west-2/profile_0/chunklist_dvr.m3u8",
* "playback_url_vod": "http://host/job_id/us-west-2/profile_0/chunklist_vod.m3u8",
* "label": "Out0"
* },
* {
* "id": "13158f1c9bc5c462182079f434ba4ae0a",
* "playback_url": "http://host/job_id/us-west-2/profile_1/chunklist.m3u8",
* "playback_url_dvr": "http://host/job_id/us-west-2/profile_1/chunklist_dvr.m3u8",
* "playback_url_vod": "http://host/job_id/us-west-2/profile_1/chunklist_vod.m3u8",
* "label": "Out1"
* },
* {
* "id": "23158f1c9bc5c462182079f434ba4ae0a",
* "playback_url": "http://host/job_id/us-west-2/profile_2/chunklist.m3u8",
* "playback_url_dvr": "http://host/job_id/us-west-2/profile_2/chunklist_dvr.m3u8",
* "playback_url_vod": "http://host/job_id/us-west-2/profile_2/chunklist_vod.m3u8",
* "label": "Out2"
* }
* ],
* "stream_url": "rtmp://ep6-usw2.a-live.io:1935/3158f1c9bc5c462182079f434ba4ae0a",
* "stream_name": "alive",
* "playback_url": "http://host/job_id/us-west-2/playlist.m3u8",
* "playback_url_dvr": "http://host/job_id/us-west-2/playlist_dvr.m3u8"
* }
*
* @apiError (400) {object} BAD_REQUEST: Invalid region - see [Live API Error Messages](https://support.brightcove.com/live-api-error-messages) for more details
* @apiError (400) {object} BAD_REQUEST: Invalid input value - see [Live API Error Messages](https://support.brightcove.com/live-api-error-messages) for more details
* @apiError (400) {object} BAD_REQUEST: The notification target type is not supported currently - see [Live API Error Messages](https://support.brightcove.com/live-api-error-messages) for more details
* @apiError (401) {object} UNAUTHORIZED: Unauthorized - see [Live API Error Messages](https://support.brightcove.com/live-api-error-messages) for more details
* @apiError (404) {object} RESOURCE_NOT_FOUND: The api couldn't find the resource you requested - see [Live API Error Messages](https://support.brightcove.com/live-api-error-messages) for more details
* @apiError (500) {object} INTERNAL_SERVER_ERROR: After filtering, there is no job to process - see [Live API Error Messages](https://support.brightcove.com/live-api-error-messages) for more details
*
* @apiErrorExample {object} 404 Error Response
* HTTP/1.1 404 Not Found
* {
* "error_code": "NOT_FOUND",
* "message": "Requested resource does not exist",
* "request_id": "df35af83-ac9b-44b0-b172-a80a11bd0bfa"
* }
*
*
*/
// Cancel Live Job
/**
* @api {put} /v1/jobs/{job_id}/cancel Cancel Live Job
* @apiName Cancel Live Job
* @apiGroup Live_Jobs
* @apiVersion 1.0.0
*
* @apiDescription Cancel a live stream
*
* @apiHeader {String} Content-Type: application/json
* @apiHeader {String} X-API-KEY X-API-KEY: {APIKey}
*
* @apiParam (Path) {String} job_id The job id.
*
* @apiParamExample {String} Stop Live Stream Example:
* https://api.bcovlive.io/v1/jobs/3158f1c9bc5c462182079f434ba4ae0a/cancel
*
* @apiSuccess (200) {String} id The job id for the stream that was stopped
*
* @apiSuccessExample {object} Success Response Stop a Live Stream:
* HTTP/1.1 200 OK
* {
* "id": "3158f1c9bc5c462182079f434ba4ae0a"
* }
*
* @apiError (401) {object} UNAUTHORIZED: Unauthorized - see [Live API Error Messages](https://support.brightcove.com/live-api-error-messages) for more details
* @apiError (404) {object} RESOURCE_NOT_FOUND: The api couldn't find the resource you requested - see [Live API Error Messages](https://support.brightcove.com/live-api-error-messages) for more details
* @apiError (500) {object} INTERNAL_SERVER_ERROR: DB getItem, no results found - see [Live API Error Messages](https://support.brightcove.com/live-api-error-messages) for more details
* @apiError (500) {object} INTERNAL_SERVER_ERROR: After filtering, there is no job to process - see [Live API Error Messages](https://support.brightcove.com/live-api-error-messages) for more details
*
*/
// Activate SEP Stream
/**
* @api {put} /v1/jobs/{job_id}/activate Activate SEP Stream
* @apiName Activate SEP Stream
* @apiGroup Live_Jobs
* @apiVersion 1.0.0
*
* @apiDescription Activate SEP (static entry point) Stream
*
* @apiHeader {String} Content-Type: application/json
* @apiHeader {String} X-API-KEY X-API-KEY: {APIKey}
*
* @apiParam (Path) {String} job_id The job id.
*
* @apiParamExample {String} Activate SEP Job Example:
* https://api.bcovlive.io/v1/jobs/3158f1c9bc5c462182079f434ba4ae0a/activate
*
* @apiSuccess (200) {String} id The job id for the stream that was activated
*
* @apiSuccessExample {object} Success Response Stop a Live Stream:
* HTTP/1.1 200 OK
* {
* "id": "3158f1c9bc5c462182079f434ba4ae0a"
* }
*
* @apiError (401) {object} UNAUTHORIZED: Unauthorized - see [Live API Error Messages](https://support.brightcove.com/live-api-error-messages) for more details
* @apiError (404) {object} RESOURCE_NOT_FOUND: The api couldn't find the resource you requested - see [Live API Error Messages](https://support.brightcove.com/live-api-error-messages) for more details
* @apiError (500) {object} INTERNAL_SERVER_ERROR: DB getItem, no results found - see [Live API Error Messages](https://support.brightcove.com/live-api-error-messages) for more details
* @apiError (500) {object} INTERNAL_SERVER_ERROR: After filtering, there is no job to process - see [Live API Error Messages](https://support.brightcove.com/live-api-error-messages) for more details
*
*/
// Deactivate SEP Stream
/**
* @api {put} /v1/jobs/{job_id}/deactivate Deactivate SEP Stream
* @apiName Deactivate SEP Stream
* @apiGroup Live_Jobs
* @apiVersion 1.0.0
*
* @apiDescription Deactivate SEP (static entry point) Stream
*
* @apiHeader {String} Content-Type: application/json
* @apiHeader {String} X-API-KEY X-API-KEY: {APIKey}
*
* @apiParam (Path) {String} job_id The job id.
*
* @apiParamExample {String} Deactivate SEP Job Example:
* https://api.bcovlive.io/v1/jobs/3158f1c9bc5c462182079f434ba4ae0a/deactivate
*
* @apiSuccess (200) {String} id The job id for the stream that was Deactivated
*
* @apiSuccessExample {json} Success Response Stop a Live Stream:
* HTTP/1.1 200 OK
* {
* "id": "3158f1c9bc5c462182079f434ba4ae0a"
* }
*
* @apiError (401) {object} UNAUTHORIZED: Unauthorized - see [Live API Error Messages](https://support.brightcove.com/live-api-error-messages) for more details
* @apiError (404) {object} RESOURCE_NOT_FOUND: The api couldn't find the resource you requested - see [Live API Error Messages](https://support.brightcove.com/live-api-error-messages) for more details
* @apiError (500) {object} INTERNAL_SERVER_ERROR: DB getItem, no results found - see [Live API Error Messages](https://support.brightcove.com/live-api-error-messages) for more details
* @apiError (500) {object} INTERNAL_SERVER_ERROR: After filtering, there is no job to process - see [Live API Error Messages](https://support.brightcove.com/live-api-error-messages) for more details
*
*/
// Get Live Job Details
/**
* @api {get} /v1/jobs/{job_id} Get Live Job Details
* @apiName Get Live Job Details
* @apiGroup Live_Jobs
* @apiVersion 1.0.0
*
* @apiDescription Get Live Job Details
*
* @apiHeader {String} Content-Type: application/json
* @apiHeader {String} X-API-KEY X-API-KEY: {APIKey}
*
* @apiParam (URL Parameters) {String} job_id The job id.
*
* @apiParamExample {String} Get Live Job Details Example:
* https://api.bcovlive.io/v1/jobs/3158f1c9bc5c462182079f434ba4ae0a
*
* @apiSuccess (200) {Object} job Object containing the job details
* @apiSuccess (200) {string} job.created_at ISO 8601 date-time string representing when the job was created
* @apiSuccess (200) {string} job.finished_at ISO 8601 date-time string representing when the live stream was stopped
* @apiSuccess (200) {String} job.id The live job id
* @apiSuccess (200) {String} job.state The current state of the job - possible values for Live jobs are `standby`, `waiting`, `processing`, `disconnected`. `finishing`, `finished`, `cancelling`, `cancelled`, `failed`; possible values for VOD jobs are `waiting_finish_live`, `waiting`, `processing`, `creating_asset`, `cancelling`, `cancelled`, `finished`, `failed`
* @apiSuccess (200) {string} job.submitted_at ISO 8601 date-time string representing when the job was submitted
* @apiSuccess (200) {string} job.updated_at ISO 8601 date-time string representing when the job was last modified
* @apiSuccess (200) {String} job.region The Amazon AWS region to use for encoding the job
* @apiSuccess (200) {Number} job.reconnect_time The time, in seconds, that the system will wait for a stream to reconnect to the encoder
* @apiSuccess (200) {Number} job.event_length Used to preset and define an end time for the live event. At any point within the specified `event_length` you may reconnect to your stream. The `event_length` setting goes into effect as soon as streaming begins.
* @apiSuccess (200) {Boolean} job.live_stream Indicates whether this is a live stream or VOD
* @apiSuccess (200) {Boolean} job.ad_insertion Indicates whether SSAI is enabled
* @apiSuccess (200) {String} job.playback_url Playback URL for the live stream
* @apiSuccess (200) {String} job.playback_url_dvr Playback URL for the live DVR
* @apiSuccess (200) {Object} job.input_media_file Object containing properties for the input media file
* @apiSuccess (200) {Number} job.input_media_file.audio_bitrate_in_kbps Audio bitrate of the input media file
* @apiSuccess (200) {String} job.input_media_file.audio_codec Audio codec of the input media file
* @apiSuccess (200) {Number} job.input_media_file.audio_sample_rate Audio sample rate of the input media file
* @apiSuccess (200) {Number} job.input_media_file.audio_tracks The number of audio tracks
* @apiSuccess (200) {Number} job.input_media_file.channels The number of audio channels
* @apiSuccess (200) {string} job.input_media_file.created_at ISO 8601 date-time string representing when the input file was created
* @apiSuccess (200) {Number} job.input_media_file.duration_in_ms duration_in_ms.
* @apiSuccess (200) {String} job.input_media_file.error_class Type of error thrown
* @apiSuccess (200) {String} job.input_media_file.error_message Error message thrown
* @apiSuccess (200) {Number} job.input_media_file.file_size_bytes File size
* @apiSuccess (200) {string} job.input_media_file.finished_at ISO 8601 date-time string representing when the input file was finished
* @apiSuccess (200) {String} job.input_media_file.format Format of the input file
* @apiSuccess (200) {Number} job.input_media_file.frame_rate Frame rate of the input file
* @apiSuccess (200) {Number} job.input_media_file.height Frame height of the input file
* @apiSuccess (200) {String} job.input_media_file.id System id of the input file
* @apiSuccess (200) {String} job.input_media_file.md5_checksum Checksum for the input file
* @apiSuccess (200) {String} job.input_media_file.state Current state of input file processing
* @apiSuccess (200) {string} job.input_media_file.updated_at ISO 8601 date-time string representing when the input file was last modified
* @apiSuccess (200) {Number} job.input_media_file.video_bitrate_in_kbps Video bitrate of the input media file
* @apiSuccess (200) {String} job.input_media_file.video_codec Video codec of the input media file
* @apiSuccess (200) {Number} job.input_media_file.width Frame width of the input media file
* @apiSuccess (200) {Number} job.input_media_file.total_bitrate_in_kbps Total bitrate of the input media file
* @apiSuccess (200) {String} job.input_media_file.url URL for the input media file
* @apiSuccess (200) {String} job.slate id for a slate of assets included
* @apiSuccess (200) {Object} job.stream Object containing properties for the live stream
* @apiSuccess (200) {string} job.stream.created_at ISO 8601 date-time string representing when the stream was created
* @apiSuccess (200) {Number} job.stream.duration ISO Duration of the stream in seconds
* @apiSuccess (200) {string} job.stream.finished_at ISO 8601 date-time string representing when the stream was finished
* @apiSuccess (200) {Number} job.stream.height Frame height of the stream
* @apiSuccess (200) {String} job.stream.id System id of the stream
* @apiSuccess (200) {String} job.stream.name Name of the stream
* @apiSuccess (200) {String} job.stream.protocol Protocol of the stream
* @apiSuccess (200) {string} job.stream.updated_at ISO 8601 date-time string representing when the stream was last modified
* @apiSuccess (200) {Number} job.stream.video_bitrate_in_kbps Video bitrate of the input media file
* @apiSuccess (200) {String} job.stream.video_codec Video codec of the input media file
* @apiSuccess (200) {Number} job.stream.width Frame width of the stream
* @apiSuccess (200) {Number} job.stream.total_bitrate_in_kbps Total bitrate of the stream
* @apiSuccess (200) {String} job.stream.region AWS region list specified for the account
* @apiSuccess (200) {String} job.stream.url URL for the stream
* @apiSuccess (200) {Object} job.stream.location Object representing the location of the stream
* @apiSuccess (200) {Object} job.stream.location.source Object representing the location source of the stream
* @apiSuccess (200) {Object} job.stream.destination Object representing the destination of the stream
* @apiSuccess (200) {Object} job.stream.destination.source Object representing the destination source of the stream
* @apiSuccess (200) {Object[]} job.output_media_files Array of objects containing properties for the output media files
* @apiSuccess (200) {Number} job.output_media_files.audio_bitrate_in_kbps Audio bitrate of the output media file
* @apiSuccess (200) {String} job.output_media_files.audio_codec Audio codec of the output media file
* @apiSuccess (200) {Number} job.output_media_files.audio_sample_rate Audio sample rate of the output media file
* @apiSuccess (200) {Number} job.output_media_files.audio_tracks The number of audio tracks
* @apiSuccess (200) {Number} job.output_media_files.channels The number of audio channels
* @apiSuccess (200) {string} job.output_media_files.created_at ISO 8601 date-time string representing when the output file was created
* @apiSuccess (200) {Number} job.output_media_files.duration_in_ms ISO 8601 date-time string representing when the output file was created
* @apiSuccess (200) {String} job.output_media_files.error_class Type of error thrown
* @apiSuccess (200) {String} job.output_media_files.error_message Error message thrown
* @apiSuccess (200) {Number} job.output_media_files.file_size_bytes File size
* @apiSuccess (200) {string} job.output_media_files.finished_at ISO 8601 date-time string representing when the output file was finished
* @apiSuccess (200) {String} job.output_media_files.format Format of the output file
* @apiSuccess (200) {Number} job.output_media_files.frame_rate Frame rate of the output file
* @apiSuccess (200) {Number} job.output_media_files.height Frame height of the output file
* @apiSuccess (200) {String} job.output_media_files.id System id of the output file
* @apiSuccess (200) {String} job.output_media_files.md5_checksum Checksum for the output file
* @apiSuccess (200) {String} job.output_media_files.state Current state of output file processing
* @apiSuccess (200) {string} job.output_media_files.updated_at ISO 8601 date-time string representing when the output file was last modified
* @apiSuccess (200) {Number} job.output_media_files.video_bitrate_in_kbps Video bitrate of the output media file
* @apiSuccess (200) {String} job.output_media_files.video_codec Video codec of the output media file
* @apiSuccess (200) {Number} job.output_media_files.width Frame width of the output media file
* @apiSuccess (200) {Number} job.output_media_files.total_bitrate_in_kbps Total bitrate of the output media file
* @apiSuccess (200) {Number} job.output_media_files.keyframe_interval Keyframe interval for the output media file
* @apiSuccess (200) {Boolean} job.output_media_files.keyframe_interval_follow_source Whether keyframe rate for the output matches the source
* @apiSuccess (200) {Number} job.output_media_files.live_stream Whether the output is a live stream
* @apiSuccess (200) {Boolean} job.output_media_files.keyframe_interval Keyframe interval for the output media file
* @apiSuccess (200) {String} job.output_media_files.playback_url URL for the output file
* @apiSuccess (200) {String} job.output_media_files.playback_url_dvr Live DVR url for live stream output
* @apiSuccess (200) {String} job.output_media_files.playback_url_vod URL for VOD output
* @apiSuccess (200) {String} job.output_media_files.playlist_type Playlist type for playlist output
* @apiSuccess (200) {String} job.output_media_files.type Will be `playlist` for playlist output
* @apiSuccess (200) {String} job.output_media_files.filename File name for the playlist manifest
* @apiSuccess (200) {String} job.output_media_files.dvr_filename File name for the DVR playlist manifest
*
* @apiSuccessExample {object} Success Response Get Live Job Details:
* HTTP/1.1 200 OK
* {
* "job": {
* "job_out_bytes": 0,
* "job_playlists": [
* {
* "delivery": {
* "media_cdn_prepend": "bcovlive-a.akamaihd.net",
* "ssai_cdn_prepend": "bcovlive-a.akamaihd.net",
* "media_cdn_protocol": "https",
* "ssai_cdn_protocol": "https",
* "ssai_manifest_append": "_ssaiM"
* },
* "masterLocation": "alive-pr-us-west-2-media",
* "default_playlist_name": "playlist.m3u8",
* "type": {
* "protocol": "HTTP",
* "profile": "BcovAliveCupertino",
* "name": "defaultS3",
* "streaming_delivery_format": "hls",
* "video_format": "mpeg-ts",
* "pushpublish": "bcov",
* "type": "S3"
* },
* "profile_sources": [
* "profile_0",
* "profile_1",
* "profile_2"
* ],
* "default_dvr_playlist_name": "playlist_dvr.m3u8",
* "playlist_name": "playlist.m3u8",
* "default_chunklist_name": "chunklist.m3u8",
* "default_dvr_chunklist_name": "chunklist_dvr.m3u8",
* "playback_url": "https://bcovlive-a.akamaihd.net/95064b4274e243f4814dd6971a3c2dd7/us-west-2/NA/playlist.m3u8",
* "default_vod_chunklist_name": "chunklist_vod.m3u8",
* "default_s3_bucket": "alive-pr-[[region]]-media",
* "dvr_playlist_name": "playlist_dvr.m3u8",
* "default_s3_bucket_ads": "alive-pr-[[region]]-ads",
* "playback_url_dvr": "https://bcovlive-a.akamaihd.net/95064b4274e243f4814dd6971a3c2dd7/us-west-2/NA/playlist_dvr.m3u8"
* }
* ],
* "worker_id": "20f41b48fc7648e697fe464063f1a14e",
* "job_raw_input": {
* "outputs": [
* {
* "video_codec": "h264",
* "keyframe_interval": 60,
* "live_stream": true,
* "segment_seconds": 6,
* "width": 1920,
* "h264_profile": "main",
* "label": "hls1080p",
* "video_bitrate": 2400,
* "height": 1080
* },
* {
* "video_codec": "h264",
* "keyframe_interval": 60,
* "live_stream": true,
* "segment_seconds": 6,
* "width": 1280,
* "h264_profile": "main",
* "label": "hls720p",
* "video_bitrate": 1843,
* "height": 720
* },
* {
* "video_codec": "h264",
* "keyframe_interval": 60,
* "live_stream": true,
* "segment_seconds": 6,
* "width": 640,
* "h264_profile": "main",
* "label": "hls480p",
* "video_bitrate": 819,
* "height": 360
* }
* ],
* "live_stream": true,
* "live_sliding_window_duration": 30,
* "region": "us-west-2",
* "reconnect_time": 20
* },
* "job_id": "95064b4274e243f4814dd6971a3c2dd7",
* "job_group_all_stream_name": "all",
* "job_in_bytes_rate": 0,
* "job_region": "us-west-2",
* "job_in_bytes": 0,
* "job_load": 390,
* "job_input": {
* "live_dvr_ads_window_duration_ms": 600000,
* "vod_sliding_window_duration_ms": 86399000,
* "ext_x_version": 3,
* "static": false,
* "number_of_segments_in_dvr_chunklist": 5,
* "reconnect_time_ms": 20000,
* "randomize_chunk_url": true,
* "ad_insertion": false,
* "max_waiting_time_ms": 1800000,
* "metadata_passthrough": false,
* "vod_enabled": true,
* "live_sliding_window_duration_ms": 30000,
* "segment_duration_ms": 6000,
* "live_dvr_sliding_window_duration_ms": 30000,
* "event_length_ms": 0,
* "number_of_segments_in_live_chunklist": 3,
* "number_of_segments_in_vod_chunklist": 14399,
* "channel_type": "event",
* "region": "us-west-2"
* },
* "account_billing_id": "NA",
* "job_cleaning_started_at": 1526243489574,
* "job_stream_url": "rtmp://ep6-usw2.bcovlive.io:1935/95064b4274e243f4814dd6971a3c2dd7",
* "job_ssai_state": "none",
* "job_outputs": [
* {
* "video_height": 1080,
* "video_width": 1920,
* "audio_codec": "AAC",
* "type": {
* "protocol": "HTTP",
* "profile": "BcovAliveCupertino",
* "name": "defaultS3",
* "streaming_delivery_format": "hls",
* "video_format": "mpeg-ts",
* "pushpublish": "bcov",
* "type": "S3"
* },
* "video_fit_mode": "stretch",
* "profile_name": "profile_0",
* "segment_duration_ms": 6000,
* "playback_url": "https://bcovlive-a.akamaihd.net/95064b4274e243f4814dd6971a3c2dd7/us-west-2/NA/profile_0/chunklist.m3u8",
* "video_keyframe_interval_follow_source": false,
* "playback_url_dvr": "https://bcovlive-a.akamaihd.net/95064b4274e243f4814dd6971a3c2dd7/us-west-2/NA/profile_0/chunklist_dvr.m3u8",
* "video_codec": "H.264",
* "video_codec_level": "3.1",
* "playback_url_vod": "https://bcovlive-a.akamaihd.net/95064b4274e243f4814dd6971a3c2dd7/us-west-2/NA/profile_0/chunklist_vod.m3u8",
* "audio_bitrate_bps": 196608,
* "gpuid": -1,
* "video_keyframe_interval": 60,
* "label": "hls1080p",
* "info_streaming_delivery_format": "hls",
* "video_codec_profile": "main",
* "name": "Out0",
* "info_format": "mpeg-ts",
* "video_bitrate_bps": 2457600,
* "ui_playback_url": "https://bcovlive-a.akamaihd.net/95064b4274e243f4814dd6971a3c2dd7/us-west-2/NA/profile_0/chunklist.m3u8",
* "ui_playback_url_dvr": "https://bcovlive-a.akamaihd.net/95064b4274e243f4814dd6971a3c2dd7/us-west-2/NA/profile_0/chunklist_dvr.m3u8",
* "ui_playback_url_vod": "https://bcovlive-a.akamaihd.net/95064b4274e243f4814dd6971a3c2dd7/us-west-2/NA/profile_0/chunklist_vod.m3u8"
* },
* {
* "video_height": 720,
* "video_width": 1280,
* "audio_codec": "AAC",
* "type": {
* "protocol": "HTTP",
* "profile": "BcovAliveCupertino",
* "name": "defaultS3",
* "streaming_delivery_format": "hls",
* "video_format": "mpeg-ts",
* "pushpublish": "bcov",
* "type": "S3"
* },
* "video_fit_mode": "stretch",
* "profile_name": "profile_1",
* "segment_duration_ms": 6000,
* "playback_url": "https://bcovlive-a.akamaihd.net/95064b4274e243f4814dd6971a3c2dd7/us-west-2/NA/profile_1/chunklist.m3u8",
* "video_keyframe_interval_follow_source": false,
* "playback_url_dvr": "https://bcovlive-a.akamaihd.net/95064b4274e243f4814dd6971a3c2dd7/us-west-2/NA/profile_1/chunklist_dvr.m3u8",
* "video_codec": "H.264",
* "video_codec_level": "3.1",
* "playback_url_vod": "https://bcovlive-a.akamaihd.net/95064b4274e243f4814dd6971a3c2dd7/us-west-2/NA/profile_1/chunklist_vod.m3u8",
* "audio_bitrate_bps": 196608,
* "gpuid": -1,
* "video_keyframe_interval": 60,
* "label": "hls720p",
* "info_streaming_delivery_format": "hls",
* "video_codec_profile": "main",
* "name": "Out1",
* "info_format": "mpeg-ts",
* "video_bitrate_bps": 1887232,
* "ui_playback_url": "https://bcovlive-a.akamaihd.net/95064b4274e243f4814dd6971a3c2dd7/us-west-2/NA/profile_1/chunklist.m3u8",
* "ui_playback_url_dvr": "https://bcovlive-a.akamaihd.net/95064b4274e243f4814dd6971a3c2dd7/us-west-2/NA/profile_1/chunklist_dvr.m3u8",
* "ui_playback_url_vod": "https://bcovlive-a.akamaihd.net/95064b4274e243f4814dd6971a3c2dd7/us-west-2/NA/profile_1/chunklist_vod.m3u8"
* },
* {
* "video_height": 360,
* "video_width": 640,
* "audio_codec": "AAC",
* "type": {
* "protocol": "HTTP",
* "profile": "BcovAliveCupertino",
* "name": "defaultS3",
* "streaming_delivery_format": "hls",
* "video_format": "mpeg-ts",
* "pushpublish": "bcov",
* "type": "S3"
* },
* "video_fit_mode": "stretch",
* "profile_name": "profile_2",
* "segment_duration_ms": 6000,
* "playback_url": "https://bcovlive-a.akamaihd.net/95064b4274e243f4814dd6971a3c2dd7/us-west-2/NA/profile_2/chunklist.m3u8",
* "video_keyframe_interval_follow_source": false,
* "playback_url_dvr": "https://bcovlive-a.akamaihd.net/95064b4274e243f4814dd6971a3c2dd7/us-west-2/NA/profile_2/chunklist_dvr.m3u8",
* "video_codec": "H.264",
* "video_codec_level": "3.1",
* "playback_url_vod": "https://bcovlive-a.akamaihd.net/95064b4274e243f4814dd6971a3c2dd7/us-west-2/NA/profile_2/chunklist_vod.m3u8",
* "audio_bitrate_bps": 196608,
* "gpuid": -1,
* "video_keyframe_interval": 60,
* "label": "hls480p",
* "info_streaming_delivery_format": "hls",
* "video_codec_profile": "main",
* "name": "Out2",
* "info_format": "mpeg-ts",
* "video_bitrate_bps": 838656,
* "ui_playback_url": "https://bcovlive-a.akamaihd.net/95064b4274e243f4814dd6971a3c2dd7/us-west-2/NA/profile_2/chunklist.m3u8",
* "ui_playback_url_dvr": "https://bcovlive-a.akamaihd.net/95064b4274e243f4814dd6971a3c2dd7/us-west-2/NA/profile_2/chunklist_dvr.m3u8",
* "ui_playback_url_vod": "https://bcovlive-a.akamaihd.net/95064b4274e243f4814dd6971a3c2dd7/us-west-2/NA/profile_2/chunklist_vod.m3u8"
* }
* ],
* "job_tick": 2,
* "job_transcoding_template_name": "95064b4274e243f4814dd6971a3c2dd7_trans_template",
* "job_cleaning_in_progress": false,
* "cloud_id": "ede5802b4167493eae6f8a93ac98f352",
* "job_sep_state": "none",
* "job_state": "waiting",
* "sep_data": {
*
* },
* "job_created_at": 1526243482713,
* "account_id": "a95ac581551b4478b27910e5675db1f8",
* "job_stream_name": "alive",
* "job_cleaning_error": false,
* "job_out_bytes_rate": 0,
* "job_finished_at": 0,
* "user_id": "c2691d4d039040be96c190a949d754a7",
* "job_streaming_started_at": 0,
* "job_app_name": "95064b4274e243f4814dd6971a3c2dd7",
* "job_cancelling_flag": false,
* "job_last_state_change_at": 1526243482713,
* "encryption": {
*
* },
* "permitted_to_add_dependent_vods": true,
* "vods": [
*
* ],
* "job_playback_url": "https://bcovlive-a.akamaihd.net/95064b4274e243f4814dd6971a3c2dd7/us-west-2/NA/playlist.m3u8",
* "job_playback_url_dvr": "https://bcovlive-a.akamaihd.net/95064b4274e243f4814dd6971a3c2dd7/us-west-2/NA/playlist_dvr.m3u8",
* "job_ui_playback_url": "https://bcovlive-a.akamaihd.net/95064b4274e243f4814dd6971a3c2dd7/us-west-2/NA/playlist.m3u8",
* "job_ui_playback_url_dvr": "https://bcovlive-a.akamaihd.net/95064b4274e243f4814dd6971a3c2dd7/us-west-2/NA/playlist_dvr.m3u8"
* }
* }
*
* @apiError (401) {object} UNAUTHORIZED: Unauthorized - see [Live API Error Messages](https://support.brightcove.com/live-api-error-messages) for more details
* @apiError (404) {object} RESOURCE_NOT_FOUND: The api couldn't find the resource you requested - see [Live API Error Messages](https://support.brightcove.com/live-api-error-messages) for more details
* @apiError (500) {object} INTERNAL_SERVER_ERROR: DB getItem, no results found - see [Live API Error Messages](https://support.brightcove.com/live-api-error-messages) for more details
*
*
*/
// Manual Ad Cue Point Insertion
/**
* @api {post} /v1/jobs/{job_id}/cuepoint Manual Ad Cue Point Insertion
* @apiName Manual Ad Cue Point Insertion
* @apiGroup Live_Jobs
* @apiVersion 1.0.0
*
* @apiDescription Inserts a manual Cue-Out with a duration to the Live ingest point.
*
* @apiHeader {String} Content-Type: application/json
* @apiHeader {String} X-API-KEY X-API-KEY: {APIKey}
*
* @apiParam (URL Parameters) {String} job_id The job id you want details for.
* @apiParam (Request Body Fields) {Number} duration An integer value to indicate the length of the ad break in seconds
* @apiParam (Request Body Fields) {String} timecode When to insert the cuepoint in HH:MM:SS:FF from the stream start (FF = frames); if omitted, the cuepoint will be inserted immediately
* @apiParam (Request Body Fields) {Object} [ad_server_data] a set of any variables (key/value pairs) that should be passed to the adServer
*
* @apiParamExample {json} Live Stream Cuepoint Insertion Request Body Example:
* {
* "duration": 30,
* "ad_server_data" : {
* "varToAdServer": "Hello",
* "adBreakId": 12312
* "adBreakCategory": "summer"
* }
* }
*
* @apiSuccess (200) {String} id The id of the live stream job
* @apiSuccess (200) {Object} cue_point The cuepoint data
* @apiSuccess (200) {String} cue_point.id The cuepoint id
* @apiSuccess (200) {Number} cue_point.duration The cuepoint duration in seconds
* @apiSuccess (200) {String} cue_point.accuracy The cuepoint insertion accuracy - may be `segment` or `frame`
* @apiSuccess (200) {string} cue_point.inserted_at Time when the cue point was inserted in the stream
*
* @apiSuccessExample {json} Success response for cuepoint Insertion
* {
* "id": "JOB_ID",
* "cue_point": {
* "id": "adBreak-2f58393ada1442d98eca0817fa565ba4",
* "duration": 30,
* "accuracy": "segment", [ Can be segment or frame ]
* "inserted_at": "2017-07-21T09:30:46.307Z" [ Time when the cue point was inserted in the stream ]
* },
* }
*
* @apiError (Error 4xx) {json} BAD_REQUEST 400: Invalid input value - see [Live API Error Messages](https://support.brightcove.com/live-api-error-messages) for more details
* @apiError (Error 4xx) {json} BAD_REQUEST 400: The notification target type is not supported currently - see [Live API Error Messages](https://support.brightcove.com/live-api-error-messages) for more details
* @apiError (Error 4xx) {json} UNAUTHORIZED 401: Unauthorized - see [Live API Error Messages](https://support.brightcove.com/live-api-error-messages) for more details
* @apiError (Error 4xx) {json} RESOURCE_NOT_FOUND 404: The api couldn't find the resource you requested - see [Live API Error Messages](https://support.brightcove.com/live-api-error-messages) for more details
* @apiError (Error 5xx) {json} INTERNAL_SERVER_ERROR 500: DB getItem, no results found - see [Live API Error Messages](https://support.brightcove.com/live-api-error-messages) for more details
*
*/
// Insert ID3 timed metadata
/**
* @api {post} /v1/jobs/{job_id}/id3tag Insert ID3 timed metadata
* @apiName Insert ID3 timed metadata
* @apiGroup Live_Jobs
* @apiVersion 1.0.0
*
* @apiDescription Inserts an ID3 timed metadata tag for an ongoing job. Note that: 1) If using timecode property, the job only stores the most recent request for insertion; 2) If using timecode property, the encoder must be sending SMPTE-formatted (HH:MM:SS:FF) timecode stored in the tc property via OnFI
*
* @apiHeader {String} Content-Type: application/json
* @apiHeader {String} X-API-KEY X-API-KEY: {APIKey}
*
* @apiParam (URL Parameters) {String} job_id The job id you want details for.
* @apiParam (Request Body Fields) {Object} id3_tag An object containing variables for the ID3 timed metadata
* @apiParam (Request Body Fields) {String{1..4}} id3_tag.name A name for the tag
* @apiParam (Request Body Fields) {String} id3_tag.value A value for the tag (maximum string data size 256KB)
* @apiParam (Request Body Fields) {String} [id3_tag.timecode] Time to insert - by default, insertion is immediate - **Note: 1) If you use the `timecode` property, the job only only stores the most recent request for insertion; 2) If you use the `timecode` property, the encoder must be sending SMPTE-formatted (HH:MM:SS:FF) timecode stored in the `tc` property via OnFI; 3) Software encoders such as Wirecast and OBS *do not* support the sending timecode via OnFI packets in the RTMP stream; 4) Elemental hardware encoders *do* support the sending timecode via OnFI packets in the RTMP stream**
*
* @apiParamExample {json} ID3 timed metadata Insertion Request Body Example:
* {
* "id3_tag": {
* "name": "BCOV",
* "value": "my value",
* "timecode": "15:50:49:16"
* }
* }
*
* @apiSuccess (200) {String} id The job id
* @apiSuccess (200) {Object} id3_tag The ID3 tag details
* @apiSuccess (200) {String} id3_tag.tag_name The ID3 tag name
* @apiSuccess (200) {String} id3_tag.tag_value The ID3 tag value
*
* @apiSuccessExample {object} Success response for ID3 timed metadata Insertion
* HTTP/1.1 200 OK
* {
* "id": "JOB_ID",
* "id3_tag": {
* "tag_name": "BCOV",
* "tag_value": "my value"
* }
* }
*
* @apiError (400) {object} BAD_REQUEST: Invalid input value - see [Live API Error Messages](https://support.brightcove.com/live-api-error-messages) for more details
* @apiError (401) {object} UNAUTHORIZED: Unauthorized - see [Live API Error Messages](https://support.brightcove.com/live-api-error-messages) for more details
* @apiError (404) {object} RESOURCE_NOT_FOUND: The api couldn't find the resource you requested - see [Live API Error Messages](https://support.brightcove.com/live-api-error-messages) for more details
* @apiError (500) {object} INTERNAL_SERVER_ERROR: DB getItem, no results found - see [Live API Error Messages](https://support.brightcove.com/live-api-error-messages) for more details
*
*/
| live-api/v1/src/jobs.js | // Create a Standard Live Job
/**
* @api {post} /v1/jobs Create a Live Job
* @apiName Create a Live Job
* @apiGroup Live_Jobs
* @apiVersion 1.0.0
*
* @apiDescription Create a live stream
*
* @apiHeader {String} Content-Type: application/json
* @apiHeader {String} X-API-KEY X-API-KEY: {APIKey}
*
* @apiParam (Request Body Fields) {Boolean} live_stream Indicates that the job is a live streaming job.
* @apiParam (Request Body Fields) {String="24x7","event"} [channel_type=event] Indicates whether the job should be billed as event hours or a channel (`24x7`) - see [Channels and Event Hours ](https://support.brightcove.com/overview-brightcove-live-api#Channels_and_hours).
* @apiParam (Request Body Fields) {Boolean} [ad_insertion=false] Setting this parameter to true will enable server side ad insertion (SSAI) on the job. Current support includes, DFP, Freewheel, or any VAST 2.0/3.0 ad tags.
* @apiParam (Request Body Fields) {String="us-west-2","us-east-1","ap-southeast-2", "ap-northeast-1","ap-southeast-1", "eu-central-1", "eu-west-1", "sa-east-1"} region AWS region - you can also specify region as the alias for a list set up for the account by Brightcove Support. See [Supported AWS Regions] (https://support.brightcove.com/overview-brightcove-live-api#Support_aws_regions) for more details on the support in each region.
* @apiParam (Request Body Fields) (String[]) [rtmp_ip_whitelist] Array of IP addresses whitelisted for RTMP delivery
* @apiParam (Request Body Fields) {Number{-60.0-60.0}} [ad_audio_loudness_level] Adjust the loudness level of the audio. This is measured in LUFS and specified in dB. This is useful to set the output loudness level to conform to a standard (`-23dB` for **EBU R.128**) The recommended setting is `-23`.
* @apiParam (Request Body Fields) {String} [beacon_set] ID for a beacon set (for SSAI only).
* @apiParam (Request Body Fields) {Number{1-7200}} [reconnect_time=30] The time, in seconds, to wait for a stream to reconnect to the encoder. If the reconnect time passes without the stream reconnecting, the job will automatically finish.
* @apiParam (Request Body Fields) {String} [slate] Id for a set of slate assets
* @apiParam (Request Body Fields) {Boolean} [static=false] Whether this is a static entry point (SEP) job
* @apiParam (Request Body Fields) {Object} [encryption] Encryption to apply to the stream.
* @apiParam (Request Body Fields) {String="aes-128"} encryption.method The encryption method to use.
* @apiParam (Request Body Fields) {String="internal","external"} encryption.type The encryption type, depending on whether an internal or external key server will be used.
* @apiParam (Request Body Fields) {String} [encryption.key] The encryption key - either a key, passphrase, or key_rotation is required; if the type is external, key is required
* @apiParam (Request Body Fields) {String} [encryption.passphrase] The encryption key - either a key, passphrase, or key_rotation is required
* @apiParam (Request Body Fields) {Boolean} [encryption.key_rotation=false] Whether to use key rotation - either a key, passphrase, or key_rotation is required
* @apiParam (Request Body Fields) {Number} [encryption.rotate_every=10] Interval for key rotation in video segments
* @apiParam (Request Body Fields) {String} [encryption.external_url] The URL for the external encryption key - this field is required if you specify `type` as `external`, and the external key must match the `key` value
* @apiParam (Request Body Fields) {Number{0-93600}} [event_length=0] Used to preset and define an end time for the live event. At any point within the specified `event_length` you may reconnect to your stream. The `event_length` setting goes into effect as soon as streaming begins.
* @apiParam (Request Body Fields) {Number{1-86400}} [live_dvr_sliding_window_duration=100] The time, in seconds, to keep in the live DVR manifest. If the stream duration is longer than the window duration, segment references will be removed first in first out. Default is 100 seconds. **Note: for SSAI jobs, the limit is `7200`.
* @apiParam (Request Body Fields) {Number{1-600}} [live_dvr_ads_window_duration=600] The time, in seconds, to keep in the live DVR manifest. If the stream duration is longer than the window duration, segment references will be removed first in first out. Default is 100 seconds.
* @apiParam (Request Body Fields) {Number{1-5}} [max_hls_protocol_version=3] Sets the maximum HLS protocol version to use. Special features will be used as available. Default is 3.
* @apiParam (Request Body Fields) {Array} [notifications] Array of notification destination objects or strings - notifications defined here are for **job-level events**. A notification will be sent to the destination when selected event occurs. You can use a simple string with a url: "http://log:[email protected]/post", or you can use an object.
* @apiParam (Request Body Fields) {String} [notifications.event] Event to send the notification for.
* @apiParam (Request Body Fields) {String} notifications.url Destination for the notification.
* @apiParam (Request Body Fields) {String} [notifications.credentials] Credentials The name of the credentials configured in your account for this address
* @apiParam (Request Body Fields) {String="first_segment_uploaded", "output_finished", "state_changed"} [notifications.event="state_changed"] Event type to send notifications for. It is recommended to set events on the job and not individual rendition outputs since renditions will finish simultaneously.
* @apiParam (Request Body Fields) {Object[]} [add_cdns] Array of additional CDN providers to be used for manifest generation. For each CDN provided, the manifest will be prepended accordingly
* @apiParam (Request Body Fields) {String} add_cdns.label A label to identify the CDN.
* @apiParam (Request Body Fields) {String} add_cdns.prepend CDN hostname to be prepended to addresses
* @apiParam (Request Body Fields) {String="http","https"} add_cdns.protocol Protocol to use for the stream delivery
* @apiParam (Request Body Fields) {String} add_cdns.vendor CDN vendor such as `akamai`
* @apiParam (Request Body Fields) {Object} [add_cdns.token_auth] Token authentication details
* @apiParam (Request Body Fields) {String} [add_cdns.token_auth.auth_type] Token authentication type - currently, the only supported value is `Akamai2.0`
* @apiParam (Request Body Fields) {String} [add_cdns.token_auth.key] Your Akamai token auth password
* @apiParam (Request Body Fields) {String{5..12}} [add_cdns.token_auth.token_name] Your Akamai token token name
* @apiParam (Request Body Fields) {Object} [add_cdns.token_auth.media] Your Akamai token token name
* @apiParam (Request Body Fields) {Mixed} [add_cdns.token_auth.media.start_time="now"] The time to apply token auth - `"now"` or epoch time in seconds
* @apiParam (Request Body Fields) {Number} [add_cdns.token_auth.media.end_time] The time to end token auth, epoch time in seconds
* @apiParam (Request Body Fields) {ttl} [add_cdns.token_auth.media.ttl] The time to live in seconds - either `end_time` or `ttl` is required
* @apiParam (Request Body Fields) {Object[]} outputs Array of output specifications for live and VOD assets to be created from the live stream.
* @apiParam (Request Body Fields) {String} outputs.label Label for the live or VOD asset.
* @apiParam (Request Body Fields) {Boolean} outputs.live_stream For jobs, setting live_stream to true indicates the output is a live rendition. If `live_stream` is false, or is not set, the output will be treated as a VOD output.
* @apiParam (Request Body Fields) {Number{0-172800}} [outputs.duration] Clipping API option 1. Duration (in seconds) to clip back from Live. Note: Clipping API only requires one of the three options for specifying duration or time.
* @apiParam (Request Body Fields) {Number{-60.0-60.0}} [outputs.ad_audio_loudness_level] Adjust the loudness level of the audio. This is measured in LUFS and specified in dB. This is useful to set the output loudness level to conform to a standard (`-23dB` for **EBU R.128**) The recommended setting is `-23`.
* @apiParam (Request Body Fields) {Number{0-2147483647}} [outputs.stream_start_time] Clipping API option 2. An offset, in seconds, from the start of the live stream to mark the beginning of the clip. Note: Clipping API only requires one of the three options for specifying duration or time.
* @apiParam (Request Body Fields) {Number{stream_start_time-stream_start_time+172800}} [outputs.stream_end_time] Clipping API option 2. An offset, in seconds, from the start of the live stream to mark the end of the clip. Note: Clipping API only requires one of the three options for specifying duration or time.
* @apiParam (Request Body Fields) {Number{current_time-future_time}} [outputs.start_time] Clipping API option 3. Universal epoch time, in seconds, to mark the beginning of the clip. Note: Clipping API only requires one of the three options for specifying duration or time.
* @apiParam (Request Body Fields) {Number{start_time-start_time+172800}} [outputs.end_time] Clipping API option 3. Universal epoch time, in seconds, to mark the end of the clip. Note: Clipping API only requires one of the three options for specifying duration or time.
* @apiParam (Request Body Fields) {Boolean} [outputs.copy_video] Specifying `copy_video` will take the video track from the input video file and transmux it into the resulting output file.
* @apiParam (Request Body Fields) {Boolean} [outputs.copy_audio] Specifying `copy_audio` will take the audio track from the input video file and transmux it into the resulting output file.
* @apiParam (Request Body Fields) {Boolean} [outputs.skip_video] Specifying `skip_video` removes the video track.
* @apiParam (Request Body Fields) {Boolean} [outputs.skip_audio] Specifying `skip_audio` removes the audio track.
* @apiParam (Request Body Fields) {Number} [outputs.width] Video frame width. If no width is supplied, we will use 640 pixels.
* @apiParam (Request Body Fields) {Number} [outputs.height] Video frame height. If no height is supplied, we will use 480 pixels.
* @apiParam (Request Body Fields) {String="h264"} [outputs.video_codec] The output video codec. Note: Only h264 is supported.
* @apiParam (Request Body Fields) {String="baseline","main","high"} [outputs.h264_profile] H.264 has three commonly-used profiles: Baseline (lowest), Main, and High. Lower levels are easier to decode, but higher levels offer better compression. For the best compression quality, choose High. For playback on low-CPU machines or many mobile devices, choose Baseline.
* @apiParam (Request Body Fields) {Number{1-6000}} outputs.keyframe_interval The maximum number of frames between each keyframe. If you set a low keyframe_interval it will increase the size / decrease the quality of your output file, but it will allow more precise scrubbing in most players. It’s recommended to have at least one keyframe per segment. If keyframe_interval is not provided, keyframes will follow the input GOP structure.
* @apiParam (Request Body Fields) {Number{64-10000}} outputs.video_bitrate target video bitrate in kbps
* @apiParam (Request Body Fields) {String="aac"} [outputs.audio_codec] The output audio codec to use. Note: Only aac is supported.
* @apiParam (Request Body Fields) {Number{16-1024}} outputs.audio_bitrate An output bitrate setting for the audio track, in Kbps
* @apiParam (Request Body Fields) {Number{2-20}} outputs.segment_seconds Sets the maximum duration of each segment in a segmented output.
* @apiParam (Request Body Fields) {mixed[]} [outputs.notifications] Array of notification destination objects or strings - notifications defined here are **for events specific to the output**. A notification will be sent to the destination when selected event occurs. You can use a simple string with a url: "http://log:[email protected]/post", or you can use an object.
* @apiParam (Request Body Fields) {String} outputs.notifications.url Destination for the notification.
* @apiParam (Request Body Fields) {String} [outputs.notifications.event] Event to send the notification for.
* @apiParam (Request Body Fields) {String} [outputs.notifications.credentials] Credentials The name of the credentials configured in your account for this address
* @apiParam (Request Body Fields) {String} [outputs.rendition_label] Indicates what rendition to use to create a VOD output (from the live job) or which renditions to use. By default, the system uses any transmuxed rendition or the highest resolution output if there is no transmuxed output.
* @apiParam (Request Body Fields) {String="playlist"} [outputs.type] The only type supported is a playlist. This is used for generating multiple master playlists with different renditions in the HLS manifest with the defined stream labels.
* @apiParam (Request Body Fields) {Array} [outputs.streams] When creating a playlist, the streams field is used to define which output renditions (by label) should be included in the manifest. Example format [{"source": "1080p"}, {"source": "720p"}].
* @apiParam (Request Body Fields) {String} [outputs.url] For VOD, URL is mandatory and sets the destination of the final asset destination. For access restricted origins, the credentials a can be passed along with the URL or stored within the Brightcove system. For Live, this is reserved for future use.
* @apiParam (Request Body Fields) {String} [outputs.credentials] The name for credentials with private and public keys can be stored with Brightcove to avoid passing plain text on API requests. This is required if the S3 or FTP origins are restricted. If credentials are not provided, it will be assumed that the origin restrictions are set to public or credentials are passed along with the URL.
* @apiParam (Request Body Fields) {Object} [outputs.videocloud] Video Cloud customer have the option to push their clips directly through Dynamic Ingest. Options "{"video": {"name"}, "ingest": { }". The video object will be sent to the CMS API and can include (description, tags, etc.). Note: the account_id and reference_id will be added automatically. If overriding the reference_id, ensure that the id does not already exist or the job will fail. For more information see: [CMS-API-CreateVideo](https://brightcovelearning.github.io/Brightcove-API-References/cms-api/v1/doc/index.html#api-videoGroup-Create_Video). The ingest object will be sent to the Dynamic Ingest API and can include (master, profile, poster, callbacks, etc). Note: the account_id and video_id are added automatically. For more information see: [DI-API-IngestVideo](https://brightcovelearning.github.io/Brightcove-API-References/dynamic-ingest-api/v1/doc/index.html#api-Ingest-Ingest_Media_Asset).
* @apiParam (Request Body Fields) {Mixed[]} [outputs.notifications] Array of notification destination objects or strings. A notification will be sent to the destination when selected event occurs. You can use a simple string with a url: "http://log:[email protected]/post", or you can use an object.
* @apiParam (Request Body Fields) {String} [outputs.notifications.event] Event to send the notification for.
* @apiParam (Request Body Fields) {String} outputs.notifications.url Destination for the notification.
*
* @apiParamExample {object} Standard Live Stream Example:
* {
* "live_stream": true,
* "region": "my-region-list",
* "reconnect_time": 20,
* "outputs": [
* {
* "label": "hls1080p",
* "live_stream": true,
* "width": 1920,
* "height": 1080,
* "video_codec": "h264",
* "h264_profile": "main",
* "video_bitrate": 2400,
* "segment_seconds": 6,
* "keyframe_interval": 60
* },
* {
* "label": "hls720p",
* "live_stream": true,
* "width": 1280,
* "height": 720,
* "video_codec": "h264",
* "h264_profile": "main",
* "video_bitrate": 1843,
* "segment_seconds": 6,
* "keyframe_interval": 60
* },
* {
* "label": "hls480p",
* "live_stream": true,
* "width": 640,
* "height": 360,
* "video_codec": "h264",
* "h264_profile": "main",
* "video_bitrate": 819,
* "segment_seconds": 6,
* "keyframe_interval": 60
* }
* ]
* }
*
* @apiParamExample {object} Live Stream Transmuxed Rendition Example:
* {
* "live_stream": true,
* "region": "my-region-list",
* "reconnect_time": 20,
* "outputs": [
* {
* "label": "hls1080p transmux",
* "live_stream": true,
* "copy_video": true,
* "copy_audio": true,
* "segment_seconds": 6
* },
* {
* "label": "hls720p",
* "live_stream": true,
* "width": 960,
* "height": 540,
* "video_codec": "h264",
* "h264_profile": "main",
* "video_bitrate": 1843,
* "segment_seconds": 6
* },
* {
* "label": "hls480p",
* "live_stream": true,
* "width": 640,
* "height": 360,
* "video_codec": "h264",
* "h264_profile": "main",
* "video_bitrate": 819,
* "segment_seconds": 6
* },
* {
* "url":"s3://YOUR_BUCKET/live/20160403004644_test.mp4",
* "credentials": "YOUR_CREDENTIALS"
* }
* ]
* }
*
* @apiParamExample {object} Live Stream with VOD Example:
* {
* "live_stream": true,
* "region": "my-region-list",
* "reconnect_time": 20,
* "outputs": [
* {
* "label": "hls720p",
* "live_stream": true,
* "width": 960,
* "height": 540,
* "video_codec": "h264",
* "h264_profile": "main",
* "video_bitrate": 1843,
* "segment_seconds": 6
* },
* {
* "url":"s3://YOUR_BUCKET/live/20160403004644_test.mp4",
* "credentials": "YOUR_CREDENTIALS"
* }
* ]
* }
*
* @apiParamExample {object} Live Stream with VOD and Notifications Example:
* {
* "live_stream": true,
* "region": "my-region-list",
* "reconnect_time": 20,
* "notifications": [
* "http://httpbin.org/post?liveStateChange",
* {
* "url": "http://httpbin.org/post?liveStarted",
* "event": "first_segment_uploaded"
* },
* {
* "url": "http://httpbin.org/post?liveFinished",
* "event": "output_finished"
* }],
* "outputs": [
* {
* "label": "hls720p",
* "live_stream": true,
* "height": 720,
* "video_bitrate": 2000,
* "segment_seconds": 6,
* "video_codec": "h264",
* "h264_profile": "high",
* "segment_seconds": 6
* },
* {
* "label": "hls360p",
* "live_stream": true,
* "height": 360,
* "video_bitrate": 650,
* "segment_seconds": 6
* },
* {
* "url":"s3://YOUR_BUCKET/path/filename.mp4",
* "credentials": "YOUR_CREDENTIALS",
* "notifications": [{
* "url": "http://httpbin.org/post?vodStateChange"
* },
* {
* "url": "http://httpbin.org/post?vodFinished",
* "event": "output_finished"
* }]
* }]
* }
*
* @apiParamExample {object} Live Stream with Multiple Output Playlists Example:
* {
* "live_stream": true,
* "region": "my-region-list",
* "reconnect_time": 180,
* "outputs": [{
* "label": "hls1080p",
* "live_stream": true,
* "height": 1080,
* "video_bitrate": 3000,
* "segment_seconds": 6,
* "keyframe_interval": 90
* },{
* "label": "hls720p",
* "live_stream": true,
* "height": 720,
* "video_bitrate": 2000,
* "segment_seconds": 6,
* "keyframe_interval": 90
* },{
* "label": "AudioOnly",
* "live_stream": true,
* "skip_video": true
* },{
* "label": "playlistVideoALL",
* "type": "playlist",
* "streams": [{"source": "hls1080p"},{"source": "hls720p"}]
* },{
* "label": "playlistHIGH",
* "type": "playlist",
* "filename":"playlist-high.m3u8",
* "streams": [{"source": "hls1080p"}]
* },{
* "label": "playlistAudio",
* "type": "playlist",
* "filename":"playlist-audio.m3u8",
* "streams": [{"source": "AudioOnly"}]
* },{
* "url":"s3://YOUR_BUCKET/PATH/20160804104116_test.mp4",
* "credentials": "S3_CREDENTIALS"
* }]
* }
*
* @apiParamExample {object} Live Stream with a Multiple CDN Config Example:
* {
* "live_stream": true,
* "region": "my-region-list",
* "reconnect_time": 30,
* "add_cdns":[
* {
* "label": "akamai",
* "prepend": "akamai.playback.com/someApplication",
* "protocol": "http"
* },
* {
* "label": "level3_1",
* "prepend": "l3.playback.io/somPath/someApplication/someFolder",
* "protocol": "http"
* }
* ],
* "outputs": [
* {
* "label": "hls1",
* "live_stream": true,
* "width": 960,
* "height": 540,
* "video_codec": "h264",
* "h264_profile": "main",
* "video_bitrate": 1843,
* "segment_seconds": 6,
* "keyframe_interval": 60
* },
* {
* "label": "hls2",
* "live_stream": true,
* "width": 640,
* "height": 360,
* "video_codec": "h264",
* "h264_profile": "main",
* "video_bitrate": 819,
* "segment_seconds": 6,
* "keyframe_interval": 60
* }
* ]
* }
*
*
*
* @apiParamExample {object} Live Stream with SSAI and VOD output Example:
* {
* "ad_insertion": true,
* "live_stream": true,
* "region": "my-region-list",
* "reconnect_time": 180,
* "slate": "bbbff5ad67a94941be8cb987ba23049d",
* "notifications": [
* "http://httpbin.org/post"
* ],
* "add_cdns":[{
* "label": "akamai-test",
* "prepend": "vrnginx-useast.akamai.com",
* "protocol": "http"
* }],
* "outputs": [
* {
* "label": "hls720p",
* "live_stream": true,
* "height": 720,
* "video_bitrate": 2400,
* "segment_seconds": 6,
* "keyframe_interval": 90
* },{
* "label": "hls480p",
* "live_stream": true,
* "height": 480,
* "video_bitrate": 1000,
* "segment_seconds": 6,
* "keyframe_interval": 90
* }, {
* "url":"s3://YOUR_BUCKET/live/20160403004644_test.mp4",
* "credentials": "YOUR_CREDENTIALS"
* }]
* }
*
* @apiParamExample {object} Standard Live Stream with Internal Encryption Example:
* {
* "live_stream": true,
* "region": "my-region-list",
* "reconnect_time": 20,
* "encryption": {
* "method": "aes-128",
* "type": "internal",
* "key":"01234567890123450123456789012345"
* }
* "outputs": [
* {
* "label": "hls720p",
* "live_stream": true,
* "width": 960,
* "height": 540,
* "video_codec": "h264",
* "h264_profile": "main",
* "video_bitrate": 1843,
* "segment_seconds": 6,
* "keyframe_interval": 60
* },
* {
* "label": "hls480p",
* "live_stream": true,
* "width": 640,
* "height": 360,
* "video_codec": "h264",
* "h264_profile": "main",
* "video_bitrate": 819,
* "segment_seconds": 6,
* "keyframe_interval": 60
* }
* ]
* }
*
* @apiParamExample {object} Standard Live Stream with Internal Encryption and Passphrase Example:
* {
* "live_stream": true,
* "region": "my-region-list",
* "reconnect_time": 20,
* "encryption": {
* "method": "aes-128",
* "type": "internal",
* "passphrase": "SuperSecret
* }
* "outputs": [
* {
* "label": "hls720p",
* "live_stream": true,
* "width": 960,
* "height": 540,
* "video_codec": "h264",
* "h264_profile": "main",
* "video_bitrate": 1843,
* "segment_seconds": 6,
* "keyframe_interval": 60
* },
* {
* "label": "hls480p",
* "live_stream": true,
* "width": 640,
* "height": 360,
* "video_codec": "h264",
* "h264_profile": "main",
* "video_bitrate": 819,
* "segment_seconds": 6,
* "keyframe_interval": 60
* }
* ]
* }
*
* @apiParamExample {object} Standard Live Stream with Internal Encryption and Rotation Example:
* {
* "live_stream": true,
* "region": "my-region-list",
* "reconnect_time": 20,
* "encryption": {
* "method": "aes-128",
* "type": "internal",
* "rotate_every": 5,
* "key_rotation": true,
* }
* "outputs": [
* {
* "label": "hls720p",
* "live_stream": true,
* "width": 960,
* "height": 540,
* "video_codec": "h264",
* "h264_profile": "main",
* "video_bitrate": 1843,
* "segment_seconds": 6,
* "keyframe_interval": 60
* },
* {
* "label": "hls480p",
* "live_stream": true,
* "width": 640,
* "height": 360,
* "video_codec": "h264",
* "h264_profile": "main",
* "video_bitrate": 819,
* "segment_seconds": 6,
* "keyframe_interval": 60
* }
* ]
* }
*
* @apiParamExample {object} Standard Live Stream with External URL and Key Example:
* {
* "live_stream": true,
* "region": "my-region-list",
* "reconnect_time": 20,
* "encryption": {
* "method": "aes-128",
* "type": "external",
* "key": "01234567890123456789012345678901",
* "external_url": "https://myserver/mykey/a.key"
* }
* "outputs": [
* {
* "label": "hls720p",
* "live_stream": true,
* "width": 960,
* "height": 540,
* "video_codec": "h264",
* "h264_profile": "main",
* "video_bitrate": 1843,
* "segment_seconds": 6,
* "keyframe_interval": 60
* },
* {
* "label": "hls480p",
* "live_stream": true,
* "width": 640,
* "height": 360,
* "video_codec": "h264",
* "h264_profile": "main",
* "video_bitrate": 819,
* "segment_seconds": 6,
* "keyframe_interval": 60
* }
* ]
* }
*
*
* @apiSuccess (200) {String} id Id for the stream.
* @apiSuccess (200) {String} stream_url The stream URL to add to your encoder configuration.
* @apiSuccess (200) {String} stream_name The stream name to add to your encoder configuration.
* @apiSuccess (200) {Object[]} outputs Details on each output rendition of the Live job.
* @apiSuccess (200) {String} outputs.id The unique id for the rendition.
* @apiSuccess (200) {String} outputs.playback_url Media HLS manifest for the specified rendition (non-SSAI).
* @apiSuccess (200) {String} outputs.playback_url_dvr Media HLS manifest for the specified rendition (with DVR capability).
* @apiSuccess (200) {String} outputs.playback_url_vod Media HLS manifest for the VOD version of the stream if one was specified in the job settings - note that the VOD will not be available until the live event has finished and the creation of the VOD is complete.
* @apiSuccess (200) {String} outputs.playback_url_dvr Media HLS manifest with a configurable DVR window. Default 100 seconds (non-SSAI).
* @apiSuccess (200) {String} outputs.playback_url_vod Media HLS manifest of the Live stream for the last 24 hours. (non-SSAI).
* @apiSuccess (200) {Boolean} live_stream Indicates that the job is a live streaming job.
* @apiSuccess (200) {Boolean} ad_insertion Setting this parameter to true will enable server side ad insertion (SSAI) on the job. Current support includes, DFP, Freewheel, or any VAST 2.0/3.0 ad tags.
* @apiSuccess (200) {String} region You can specify an Amazon AWS region to use for encoding a job and we will process the job on servers in the region specified. It’s recommended to use the region closest to your encoder.
* @apiSuccess (200) {Number} reconnect_time The time, in seconds, to wait for a stream to reconnect to the encoder. Default is set to 30 seconds.
* @apiSuccess (200) {Number} event_length Used to preset and define an end time for the live event. At any point within the specified `event_length` you may reconnect to your stream. The `event_length` setting goes into effect as soon as streaming begins.
* @apiSuccess (200) {Number} max_hls_protocol_version Sets the maximum HLS protocol version to use. Special features will be used as available. Default is 3.
* @apiSuccess (200) {String} slate id for slate of assets to be included
* @apiSuccess (200) {String} sep_state The current state of the job's SEP (static entry point) - possible values: 'waiting' 'pending_activation', 'activation_in_progress', 'ready', 'pending_deactivation', 'deactivation_in_progress', 'cancelled', 'finished'
* @apiSuccess (200) {mixed[]} notifications Array of notification destination objects or strings. A notification will be sent to the destination when selected event occurs. You can use a simple string with a url: "http://log:[email protected]/post", or you can use an object.
* @apiSuccess (200) {String} notifications.url Destination for the notification.
* @apiSuccess (200) {String} notifications.credentials Credentials for the destination, if required.
* @apiSuccess (200) {String} notifications.event Event type to send notifications for. It’s recommended to set events on the job and not individual rendition outputs since renditions will finish simultaneously.
* @apiSuccess (200) {Object[]} add_cdns Array of additional CDN providers to be used for manifest generation. For each CDN provided, the manifest will be prepended accordingly
* @apiSuccess (200) {String} add_cdns.label A label to identify the CDN.
* @apiSuccess (200) {String} add_cdns.prepend Typically, a domain or path to prepend to addresses
* @apiSuccess (200) {String} add_cdns.protocol Protocol to use in sending the stream to the CDN.
*
* @apiSuccessExample {object} Success Response Standard Live Stream:
* HTTP/1.1 200 OK
* {
* "id": "6666482ac53b4f9bac473157c0897bde",
* "outputs": [
* {
* "id": "0-6666482ac53b4f9bac473157c0897bde",
* "playback_url": "https://bcovlive-a.akamaihd.net/6666482ac53b4f9bac473157c0897bde/us-west-2/NA/profile_0/chunklist.m3u8",
* "playback_url_dvr": "https://bcovlive-a.akamaihd.net/6666482ac53b4f9bac473157c0897bde/us-west-2/NA/profile_0/chunklist_dvr.m3u8",
* "playback_url_vod": "https://bcovlive-a.akamaihd.net/6666482ac53b4f9bac473157c0897bde/us-west-2/NA/profile_0/chunklist_vod.m3u8",
* "playback_added_cdns": [],
* "label": "hls1080p"
* },
* {
* "id": "1-6666482ac53b4f9bac473157c0897bde",
* "playback_url": "https://bcovlive-a.akamaihd.net/6666482ac53b4f9bac473157c0897bde/us-west-2/NA/profile_1/chunklist.m3u8",
* "playback_url_dvr": "https://bcovlive-a.akamaihd.net/6666482ac53b4f9bac473157c0897bde/us-west-2/NA/profile_1/chunklist_dvr.m3u8",
* "playback_url_vod": "https://bcovlive-a.akamaihd.net/6666482ac53b4f9bac473157c0897bde/us-west-2/NA/profile_1/chunklist_vod.m3u8",
* "playback_added_cdns": [],
* "label": "hls720p"
* },
* {
* "id": "2-6666482ac53b4f9bac473157c0897bde",
* "playback_url": "https://bcovlive-a.akamaihd.net/6666482ac53b4f9bac473157c0897bde/us-west-2/NA/profile_2/chunklist.m3u8",
* "playback_url_dvr": "https://bcovlive-a.akamaihd.net/6666482ac53b4f9bac473157c0897bde/us-west-2/NA/profile_2/chunklist_dvr.m3u8",
* "playback_url_vod": "https://bcovlive-a.akamaihd.net/6666482ac53b4f9bac473157c0897bde/us-west-2/NA/profile_2/chunklist_vod.m3u8",
* "playback_added_cdns": [],
* "label": "hls480p"
* },
* {
* "id": "3-6666482ac53b4f9bac473157c0897bde",
* "playlist_type": "defaultS3",
* "type": "playlist",
* "filename": "playlist.m3u8",
* "dvr_filename": "playlist_dvr.m3u8",
* "playback_url": "https://bcovlive-a.akamaihd.net/6666482ac53b4f9bac473157c0897bde/us-west-2/NA/playlist.m3u8",
* "playback_url_dvr": "https://bcovlive-a.akamaihd.net/6666482ac53b4f9bac473157c0897bde/us-west-2/NA/playlist_dvr.m3u8"
* }
* ],
* "stream_url": "rtmp://ep2-usw2.bcovlive.io:1935/6666482ac53b4f9bac473157c0897bde",
* "stream_name": "alive",
* "static": false,
* "encryption": {},
* "playback_url": "https://bcovlive-a.akamaihd.net/6666482ac53b4f9bac473157c0897bde/us-west-2/NA/playlist.m3u8",
* "playback_url_dvr": "https://bcovlive-a.akamaihd.net/6666482ac53b4f9bac473157c0897bde/us-west-2/NA/playlist_dvr.m3u8"
* }
*
* @apiSuccessExample {object} Success Response Live Stream with Multiple Output Playlists:
* HTTP/1.1 200 OK
* {
* "id": "edcd4d356228417d80345a0c91864efe",
* "outputs": [
* {
* "id": "0-edcd4d356228417d80345a0c91864efe",
* "label": "hls1080p"
* },
* {
* "id": "1-edcd4d356228417d80345a0c91864efe",
* "label": "hls720p"
* },
* {
* "id": "2-edcd4d356228417d80345a0c91864efe",
* "label": "AudioOnly"
* },
* {
* "id": "3-edcd4d356228417d80345a0c91864efe",
* "playlist_type": "defaultS3",
* "type": "playlist",
* "label": "playlistVideoALL",
* "filename": "8b31bafdb20d462ea2e6e336a67ed4f3.m3u8",
* "dvr_filename": "8b31bafdb20d462ea2e6e336a67ed4f3_dvr.m3u8",
* "streams": [
* "hls1080p",
* "hls720p"
* ],
* "playback_url": "http://bcovlive-a.akamaihd.net/edcd4d356228417d80345a0c91864efe/us-west-2/8b31bafdb20d462ea2e6e336a67ed4f3.m3u8",
* "playback_url_dvr": "http://bcovlive-a.akamaihd.net/edcd4d356228417d80345a0c91864efe/us-west-2/8b31bafdb20d462ea2e6e336a67ed4f3_dvr.m3u8",
* "playback_url_s3": "s3://BUCKET/edcd4d356228417d80345a0c91864efe/us-west-2/8b31bafdb20d462ea2e6e336a67ed4f3.m3u8"
* },
* {
* "id": "4-edcd4d356228417d80345a0c91864efe",
* "playlist_type": "defaultS3",
* "type": "playlist",
* "label": "playlistHIGH",
* "filename": "playlist-high.m3u8",
* "dvr_filename": "playlist-high_dvr.m3u8",
* "streams": [
* "hls1080p"
* ],
* "playback_url": "http://bcovlive-a.akamaihd.net/edcd4d356228417d80345a0c91864efe/us-west-2/playlist-high.m3u8",
* "playback_url_dvr": "http://bcovlive-a.akamaihd.net/edcd4d356228417d80345a0c91864efe/us-west-2/playlist-high_dvr.m3u8",
* "playback_url_s3": "s3://BUCKET/edcd4d356228417d80345a0c91864efe/us-west-2/playlist-high.m3u8"
* },
* {
* "id": "5-edcd4d356228417d80345a0c91864efe",
* "playlist_type": "defaultS3",
* "type": "playlist",
* "label": "playlistAudio",
* "filename": "playlist-audio.m3u8",
* "dvr_filename": "playlist-audio_dvr.m3u8",
* "streams": [
* "AudioOnly"
* ],
* "playback_url": "http://bcovlive-a.akamaihd.net/edcd4d356228417d80345a0c91864efe/us-west-2/playlist-audio.m3u8",
* "playback_url_dvr": "http://bcovlive-a.akamaihd.net/edcd4d356228417d80345a0c91864efe/us-west-2/playlist-audio_dvr.m3u8",
* "playback_url_s3": "s3://BUCKET/edcd4d356228417d80345a0c91864efe/us-west-2/playlist-audio.m3u8"
* }
* ],
* "stream_url": "rtmp://host/edcd4d356228417d80345a0c91864efe",
* "stream_name": "alive",
* "playback_url": "http://bcovlive-a.akamaihd.net/edcd4d356228417d80345a0c91864efe/us-west-2/8b31bafdb20d462ea2e6e336a67ed4f3.m3u8",
* "playback_url_dvr": "http://bcovlive-a.akamaihd.net/edcd4d356228417d80345a0c91864efe/us-west-2/8b31bafdb20d462ea2e6e336a67ed4f3_dvr.m3u8",
* "playback_url_s3": "s3://BUCKET/edcd4d356228417d80345a0c91864efe/us-west-2/8b31bafdb20d462ea2e6e336a67ed4f3.m3u8",
* }
*
* @apiSuccessExample {json} Success Response Live Stream with a Multiple CDN Config:
* HTTP/1.1 200 OK
* {
* "id": "b3c20e416f964fb1b67334877bade99b",
* "outputs": [
* {
* "id": "0b3c20e416f964fb1b67334877bade99b",
* "playback_url": "http://bcovlive-a.akamaihd.net/b3c20e416f964fb1b67334877bade99b/us-west-2/profile_0/chunklist.m3u8",
* "playback_url_dvr": "http://bcovlive-a.akamaihd.net/b3c20e416f964fb1b67334877bade99b/us-west-2/profile_0/chunklist_dvr.m3u8",
* "playback_url_vod": "http://bcovlive-a.akamaihd.net/b3c20e416f964fb1b67334877bade99b/us-west-2/profile_0/chunklist_vod.m3u8",
* "playback_added_cdns": [
* {
* "label": "akamai",
* "playback_url": "http://akamai.playback.com/someApplication/b3c20e416f964fb1b67334877bade99b/us-west-2/profile_0/chunklist.m3u8",
* "playback_url_dvr": "http://akamai.playback.com/someApplication/b3c20e416f964fb1b67334877bade99b/us-west-2/profile_0/chunklist_dvr.m3u8",
* "playback_url_vod": "http://akamai.playback.com/someApplication/b3c20e416f964fb1b67334877bade99b/us-west-2/profile_0/chunklist_vod.m3u8"
* },
* {
* "label": "level3_1",
* "playback_url": "http://l3.playback.io/somPath/someApplication/someFolder/b3c20e416f964fb1b67334877bade99b/us-west-2/profile_0/chunklist.m3u8",
* "playback_url_dvr": "http://l3.playback.io/somPath/someApplication/someFolder/b3c20e416f964fb1b67334877bade99b/us-west-2/profile_0/chunklist_dvr.m3u8",
* "playback_url_vod": "http://l3.playback.io/somPath/someApplication/someFolder/b3c20e416f964fb1b67334877bade99b/us-west-2/profile_0/chunklist_vod.m3u8"
* }
* ],
* "label": "hls1"
* },
* {
* "id": "1b3c20e416f964fb1b67334877bade99b",
* "playback_url": "http://bcovlive-a.akamaihd.net/b3c20e416f964fb1b67334877bade99b/us-west-2/profile_1/chunklist.m3u8",
* "playback_url_dvr": "http://bcovlive-a.akamaihd.net/b3c20e416f964fb1b67334877bade99b/us-west-2/profile_1/chunklist_dvr.m3u8",
* "playback_url_vod": "http://bcovlive-a.akamaihd.net/b3c20e416f964fb1b67334877bade99b/us-west-2/profile_1/chunklist_vod.m3u8",
* "playback_added_cdns": [
* {
* "label": "akamai",
* "playback_url": "http://akamai.playback.com/someApplication/b3c20e416f964fb1b67334877bade99b/us-west-2/profile_1/chunklist.m3u8",
* "playback_url_dvr": "http://akamai.playback.com/someApplication/b3c20e416f964fb1b67334877bade99b/us-west-2/profile_1/chunklist_dvr.m3u8",
* "playback_url_vod": "http://akamai.playback.com/someApplication/b3c20e416f964fb1b67334877bade99b/us-west-2/profile_1/chunklist_vod.m3u8"
* },
* {
* "label": "level3_1",
* "playback_url": "http://l3.playback.io/somPath/someApplication/someFolder/b3c20e416f964fb1b67334877bade99b/us-west-2/profile_1/chunklist.m3u8",
* "playback_url_dvr": "http://l3.playback.io/somPath/someApplication/someFolder/b3c20e416f964fb1b67334877bade99b/us-west-2/profile_1/chunklist_dvr.m3u8",
* "playback_url_vod": "http://l3.playback.io/somPath/someApplication/someFolder/b3c20e416f964fb1b67334877bade99b/us-west-2/profile_1/chunklist_vod.m3u8"
* }
* ],
* "label": "hls2"
* }
* ],
* "stream_url": "rtmp://ep16-usw2.a-live.io:1935/b3c20e416f964fb1b67334877bade99b",
* "stream_name": "alive",
* "playback_url": "http://bcovlive-a.akamaihd.net/b3c20e416f964fb1b67334877bade99b/us-west-2/playlist.m3u8",
* "playback_url_dvr": "http://bcovlive-a.akamaihd.net/b3c20e416f964fb1b67334877bade99b/us-west-2/playlist_dvr.m3u8",
* "playback_url_vod": "http://bcovlive-a.akamaihd.net/b3c20e416f964fb1b67334877bade99b/us-west-2/profile_0/chunklist_vod.m3u8",
* "playback_added_cdns": [
* {
* "label": "akamai",
* "playback_url": "http://akamai.playback.com/someApplication/b3c20e416f964fb1b67334877bade99b/us-west-2/playlist.m3u8",
* "playback_url_dvr": "http://akamai.playback.com/someApplication/b3c20e416f964fb1b67334877bade99b/us-west-2/playlist_dvr.m3u8",
* "playback_url_vod": "http://akamai.playback.com/someApplication/b3c20e416f964fb1b67334877bade99b/us-west-2/profile_0/chunklist_vod.m3u8"
* },
* {
* "label": "level3_1",
* "playback_url": "http://l3.playback.io/somPath/someApplication/someFolder/b3c20e416f964fb1b67334877bade99b/us-west-2/playlist.m3u8",
* "playback_url_dvr": "http://l3.playback.io/somPath/someApplication/someFolder/b3c20e416f964fb1b67334877bade99b/us-west-2/playlist_dvr.m3u8",
* "playback_url_vod": "http://l3.playback.io/somPath/someApplication/someFolder/b3c20e416f964fb1b67334877bade99b/us-west-2/profile_0/chunklist_vod.m3u8"
* }
* ]
* }
*
*
* @apiSuccessExample {object} Success Response Live Stream with SSAI and VOD output:
* HTTP/1.1 200 OK
* {
* "id": "3158f1c9bc5c462182079f434ba4ae0a",
* "outputs": [
* {
* "id": "03158f1c9bc5c462182079f434ba4ae0a",
* "playback_url":"http://host/job_id/us-west-2/profile_0/chunklist.m3u8",
* "Playback_url_dvr": "http://host/job_id/us-west-2/profile_0/chunklist_dvr.m3u8",
* "playback_url_vod": "http://host/job_id/us-west-2/profile_0/chunklist_vod.m3u8",
* "label": "Out0"
* },
* {
* "id": "13158f1c9bc5c462182079f434ba4ae0a",
* "playback_url": "http://host/job_id/us-west-2/profile_1/chunklist.m3u8",
* "playback_url_dvr": "http://host/job_id/us-west-2/profile_1/chunklist_dvr.m3u8",
* "playback_url_vod": "http://host/job_id/us-west-2/profile_1/chunklist_vod.m3u8",
* "label": "Out1"
* },
* {
* "id": "23158f1c9bc5c462182079f434ba4ae0a",
* "playback_url": "http://host/job_id/us-west-2/profile_2/chunklist.m3u8",
* "playback_url_dvr": "http://host/job_id/us-west-2/profile_2/chunklist_dvr.m3u8",
* "playback_url_vod": "http://host/job_id/us-west-2/profile_2/chunklist_vod.m3u8",
* "label": "Out2"
* }
* ],
* "stream_url": "rtmp://ep6-usw2.a-live.io:1935/3158f1c9bc5c462182079f434ba4ae0a",
* "stream_name": "alive",
* "playback_url": "http://host/job_id/us-west-2/playlist.m3u8",
* "playback_url_dvr": "http://host/job_id/us-west-2/playlist_dvr.m3u8"
* }
*
* @apiError (400) {object} BAD_REQUEST: Invalid region - see [Live API Error Messages](https://support.brightcove.com/live-api-error-messages) for more details
* @apiError (400) {object} BAD_REQUEST: Invalid input value - see [Live API Error Messages](https://support.brightcove.com/live-api-error-messages) for more details
* @apiError (400) {object} BAD_REQUEST: The notification target type is not supported currently - see [Live API Error Messages](https://support.brightcove.com/live-api-error-messages) for more details
* @apiError (401) {object} UNAUTHORIZED: Unauthorized - see [Live API Error Messages](https://support.brightcove.com/live-api-error-messages) for more details
* @apiError (404) {object} RESOURCE_NOT_FOUND: The api couldn't find the resource you requested - see [Live API Error Messages](https://support.brightcove.com/live-api-error-messages) for more details
* @apiError (500) {object} INTERNAL_SERVER_ERROR: After filtering, there is no job to process - see [Live API Error Messages](https://support.brightcove.com/live-api-error-messages) for more details
*
* @apiErrorExample {object} 404 Error Response
* HTTP/1.1 404 Not Found
* {
* "error_code": "NOT_FOUND",
* "message": "Requested resource does not exist",
* "request_id": "df35af83-ac9b-44b0-b172-a80a11bd0bfa"
* }
*
*
*/
// Cancel Live Job
/**
* @api {put} /v1/jobs/{job_id}/cancel Cancel Live Job
* @apiName Cancel Live Job
* @apiGroup Live_Jobs
* @apiVersion 1.0.0
*
* @apiDescription Cancel a live stream
*
* @apiHeader {String} Content-Type: application/json
* @apiHeader {String} X-API-KEY X-API-KEY: {APIKey}
*
* @apiParam (Path) {String} job_id The job id.
*
* @apiParamExample {String} Stop Live Stream Example:
* https://api.bcovlive.io/v1/jobs/3158f1c9bc5c462182079f434ba4ae0a/cancel
*
* @apiSuccess (200) {String} id The job id for the stream that was stopped
*
* @apiSuccessExample {object} Success Response Stop a Live Stream:
* HTTP/1.1 200 OK
* {
* "id": "3158f1c9bc5c462182079f434ba4ae0a"
* }
*
* @apiError (401) {object} UNAUTHORIZED: Unauthorized - see [Live API Error Messages](https://support.brightcove.com/live-api-error-messages) for more details
* @apiError (404) {object} RESOURCE_NOT_FOUND: The api couldn't find the resource you requested - see [Live API Error Messages](https://support.brightcove.com/live-api-error-messages) for more details
* @apiError (500) {object} INTERNAL_SERVER_ERROR: DB getItem, no results found - see [Live API Error Messages](https://support.brightcove.com/live-api-error-messages) for more details
* @apiError (500) {object} INTERNAL_SERVER_ERROR: After filtering, there is no job to process - see [Live API Error Messages](https://support.brightcove.com/live-api-error-messages) for more details
*
*/
// Activate SEP Stream
/**
* @api {put} /v1/jobs/{job_id}/activate Activate SEP Stream
* @apiName Activate SEP Stream
* @apiGroup Live_Jobs
* @apiVersion 1.0.0
*
* @apiDescription Activate SEP (static entry point) Stream
*
* @apiHeader {String} Content-Type: application/json
* @apiHeader {String} X-API-KEY X-API-KEY: {APIKey}
*
* @apiParam (Path) {String} job_id The job id.
*
* @apiParamExample {String} Activate SEP Job Example:
* https://api.bcovlive.io/v1/jobs/3158f1c9bc5c462182079f434ba4ae0a/activate
*
* @apiSuccess (200) {String} id The job id for the stream that was activated
*
* @apiSuccessExample {object} Success Response Stop a Live Stream:
* HTTP/1.1 200 OK
* {
* "id": "3158f1c9bc5c462182079f434ba4ae0a"
* }
*
* @apiError (401) {object} UNAUTHORIZED: Unauthorized - see [Live API Error Messages](https://support.brightcove.com/live-api-error-messages) for more details
* @apiError (404) {object} RESOURCE_NOT_FOUND: The api couldn't find the resource you requested - see [Live API Error Messages](https://support.brightcove.com/live-api-error-messages) for more details
* @apiError (500) {object} INTERNAL_SERVER_ERROR: DB getItem, no results found - see [Live API Error Messages](https://support.brightcove.com/live-api-error-messages) for more details
* @apiError (500) {object} INTERNAL_SERVER_ERROR: After filtering, there is no job to process - see [Live API Error Messages](https://support.brightcove.com/live-api-error-messages) for more details
*
*/
// Deactivate SEP Stream
/**
* @api {put} /v1/jobs/{job_id}/deactivate Deactivate SEP Stream
* @apiName Deactivate SEP Stream
* @apiGroup Live_Jobs
* @apiVersion 1.0.0
*
* @apiDescription Deactivate SEP (static entry point) Stream
*
* @apiHeader {String} Content-Type: application/json
* @apiHeader {String} X-API-KEY X-API-KEY: {APIKey}
*
* @apiParam (Path) {String} job_id The job id.
*
* @apiParamExample {String} Deactivate SEP Job Example:
* https://api.bcovlive.io/v1/jobs/3158f1c9bc5c462182079f434ba4ae0a/deactivate
*
* @apiSuccess (200) {String} id The job id for the stream that was Deactivated
*
* @apiSuccessExample {json} Success Response Stop a Live Stream:
* HTTP/1.1 200 OK
* {
* "id": "3158f1c9bc5c462182079f434ba4ae0a"
* }
*
* @apiError (401) {object} UNAUTHORIZED: Unauthorized - see [Live API Error Messages](https://support.brightcove.com/live-api-error-messages) for more details
* @apiError (404) {object} RESOURCE_NOT_FOUND: The api couldn't find the resource you requested - see [Live API Error Messages](https://support.brightcove.com/live-api-error-messages) for more details
* @apiError (500) {object} INTERNAL_SERVER_ERROR: DB getItem, no results found - see [Live API Error Messages](https://support.brightcove.com/live-api-error-messages) for more details
* @apiError (500) {object} INTERNAL_SERVER_ERROR: After filtering, there is no job to process - see [Live API Error Messages](https://support.brightcove.com/live-api-error-messages) for more details
*
*/
// Get Live Job Details
/**
* @api {get} /v1/jobs/{job_id} Get Live Job Details
* @apiName Get Live Job Details
* @apiGroup Live_Jobs
* @apiVersion 1.0.0
*
* @apiDescription Get Live Job Details
*
* @apiHeader {String} Content-Type: application/json
* @apiHeader {String} X-API-KEY X-API-KEY: {APIKey}
*
* @apiParam (URL Parameters) {String} job_id The job id.
*
* @apiParamExample {String} Get Live Job Details Example:
* https://api.bcovlive.io/v1/jobs/3158f1c9bc5c462182079f434ba4ae0a
*
* @apiSuccess (200) {Object} job Object containing the job details
* @apiSuccess (200) {string} job.created_at ISO 8601 date-time string representing when the job was created
* @apiSuccess (200) {string} job.finished_at ISO 8601 date-time string representing when the live stream was stopped
* @apiSuccess (200) {String} job.id The live job id
* @apiSuccess (200) {String} job.state The current state of the job - possible values for Live jobs are `standby`, `waiting`, `processing`, `disconnected`. `finishing`, `finished`, `cancelling`, `cancelled`, `failed`; possible values for VOD jobs are `waiting_finish_live`, `waiting`, `processing`, `creating_asset`, `cancelling`, `cancelled`, `finished`, `failed`
* @apiSuccess (200) {string} job.submitted_at ISO 8601 date-time string representing when the job was submitted
* @apiSuccess (200) {string} job.updated_at ISO 8601 date-time string representing when the job was last modified
* @apiSuccess (200) {String} job.region The Amazon AWS region to use for encoding the job
* @apiSuccess (200) {Number} job.reconnect_time The time, in seconds, that the system will wait for a stream to reconnect to the encoder
* @apiSuccess (200) {Number} job.event_length Used to preset and define an end time for the live event. At any point within the specified `event_length` you may reconnect to your stream. The `event_length` setting goes into effect as soon as streaming begins.
* @apiSuccess (200) {Boolean} job.live_stream Indicates whether this is a live stream or VOD
* @apiSuccess (200) {Boolean} job.ad_insertion Indicates whether SSAI is enabled
* @apiSuccess (200) {String} job.playback_url Playback URL for the live stream
* @apiSuccess (200) {String} job.playback_url_dvr Playback URL for the live DVR
* @apiSuccess (200) {Object} job.input_media_file Object containing properties for the input media file
* @apiSuccess (200) {Number} job.input_media_file.audio_bitrate_in_kbps Audio bitrate of the input media file
* @apiSuccess (200) {String} job.input_media_file.audio_codec Audio codec of the input media file
* @apiSuccess (200) {Number} job.input_media_file.audio_sample_rate Audio sample rate of the input media file
* @apiSuccess (200) {Number} job.input_media_file.audio_tracks The number of audio tracks
* @apiSuccess (200) {Number} job.input_media_file.channels The number of audio channels
* @apiSuccess (200) {string} job.input_media_file.created_at ISO 8601 date-time string representing when the input file was created
* @apiSuccess (200) {Number} job.input_media_file.duration_in_ms duration_in_ms.
* @apiSuccess (200) {String} job.input_media_file.error_class Type of error thrown
* @apiSuccess (200) {String} job.input_media_file.error_message Error message thrown
* @apiSuccess (200) {Number} job.input_media_file.file_size_bytes File size
* @apiSuccess (200) {string} job.input_media_file.finished_at ISO 8601 date-time string representing when the input file was finished
* @apiSuccess (200) {String} job.input_media_file.format Format of the input file
* @apiSuccess (200) {Number} job.input_media_file.frame_rate Frame rate of the input file
* @apiSuccess (200) {Number} job.input_media_file.height Frame height of the input file
* @apiSuccess (200) {String} job.input_media_file.id System id of the input file
* @apiSuccess (200) {String} job.input_media_file.md5_checksum Checksum for the input file
* @apiSuccess (200) {String} job.input_media_file.state Current state of input file processing
* @apiSuccess (200) {string} job.input_media_file.updated_at ISO 8601 date-time string representing when the input file was last modified
* @apiSuccess (200) {Number} job.input_media_file.video_bitrate_in_kbps Video bitrate of the input media file
* @apiSuccess (200) {String} job.input_media_file.video_codec Video codec of the input media file
* @apiSuccess (200) {Number} job.input_media_file.width Frame width of the input media file
* @apiSuccess (200) {Number} job.input_media_file.total_bitrate_in_kbps Total bitrate of the input media file
* @apiSuccess (200) {String} job.input_media_file.url URL for the input media file
* @apiSuccess (200) {String} job.slate id for a slate of assets included
* @apiSuccess (200) {Object} job.stream Object containing properties for the live stream
* @apiSuccess (200) {string} job.stream.created_at ISO 8601 date-time string representing when the stream was created
* @apiSuccess (200) {Number} job.stream.duration ISO Duration of the stream in seconds
* @apiSuccess (200) {string} job.stream.finished_at ISO 8601 date-time string representing when the stream was finished
* @apiSuccess (200) {Number} job.stream.height Frame height of the stream
* @apiSuccess (200) {String} job.stream.id System id of the stream
* @apiSuccess (200) {String} job.stream.name Name of the stream
* @apiSuccess (200) {String} job.stream.protocol Protocol of the stream
* @apiSuccess (200) {string} job.stream.updated_at ISO 8601 date-time string representing when the stream was last modified
* @apiSuccess (200) {Number} job.stream.video_bitrate_in_kbps Video bitrate of the input media file
* @apiSuccess (200) {String} job.stream.video_codec Video codec of the input media file
* @apiSuccess (200) {Number} job.stream.width Frame width of the stream
* @apiSuccess (200) {Number} job.stream.total_bitrate_in_kbps Total bitrate of the stream
* @apiSuccess (200) {String} job.stream.region AWS region list specified for the account
* @apiSuccess (200) {String} job.stream.url URL for the stream
* @apiSuccess (200) {Object} job.stream.location Object representing the location of the stream
* @apiSuccess (200) {Object} job.stream.location.source Object representing the location source of the stream
* @apiSuccess (200) {Object} job.stream.destination Object representing the destination of the stream
* @apiSuccess (200) {Object} job.stream.destination.source Object representing the destination source of the stream
* @apiSuccess (200) {Object[]} job.output_media_files Array of objects containing properties for the output media files
* @apiSuccess (200) {Number} job.output_media_files.audio_bitrate_in_kbps Audio bitrate of the output media file
* @apiSuccess (200) {String} job.output_media_files.audio_codec Audio codec of the output media file
* @apiSuccess (200) {Number} job.output_media_files.audio_sample_rate Audio sample rate of the output media file
* @apiSuccess (200) {Number} job.output_media_files.audio_tracks The number of audio tracks
* @apiSuccess (200) {Number} job.output_media_files.channels The number of audio channels
* @apiSuccess (200) {string} job.output_media_files.created_at ISO 8601 date-time string representing when the output file was created
* @apiSuccess (200) {Number} job.output_media_files.duration_in_ms ISO 8601 date-time string representing when the output file was created
* @apiSuccess (200) {String} job.output_media_files.error_class Type of error thrown
* @apiSuccess (200) {String} job.output_media_files.error_message Error message thrown
* @apiSuccess (200) {Number} job.output_media_files.file_size_bytes File size
* @apiSuccess (200) {string} job.output_media_files.finished_at ISO 8601 date-time string representing when the output file was finished
* @apiSuccess (200) {String} job.output_media_files.format Format of the output file
* @apiSuccess (200) {Number} job.output_media_files.frame_rate Frame rate of the output file
* @apiSuccess (200) {Number} job.output_media_files.height Frame height of the output file
* @apiSuccess (200) {String} job.output_media_files.id System id of the output file
* @apiSuccess (200) {String} job.output_media_files.md5_checksum Checksum for the output file
* @apiSuccess (200) {String} job.output_media_files.state Current state of output file processing
* @apiSuccess (200) {string} job.output_media_files.updated_at ISO 8601 date-time string representing when the output file was last modified
* @apiSuccess (200) {Number} job.output_media_files.video_bitrate_in_kbps Video bitrate of the output media file
* @apiSuccess (200) {String} job.output_media_files.video_codec Video codec of the output media file
* @apiSuccess (200) {Number} job.output_media_files.width Frame width of the output media file
* @apiSuccess (200) {Number} job.output_media_files.total_bitrate_in_kbps Total bitrate of the output media file
* @apiSuccess (200) {Number} job.output_media_files.keyframe_interval Keyframe interval for the output media file
* @apiSuccess (200) {Boolean} job.output_media_files.keyframe_interval_follow_source Whether keyframe rate for the output matches the source
* @apiSuccess (200) {Number} job.output_media_files.live_stream Whether the output is a live stream
* @apiSuccess (200) {Boolean} job.output_media_files.keyframe_interval Keyframe interval for the output media file
* @apiSuccess (200) {String} job.output_media_files.playback_url URL for the output file
* @apiSuccess (200) {String} job.output_media_files.playback_url_dvr Live DVR url for live stream output
* @apiSuccess (200) {String} job.output_media_files.playback_url_vod URL for VOD output
* @apiSuccess (200) {String} job.output_media_files.playlist_type Playlist type for playlist output
* @apiSuccess (200) {String} job.output_media_files.type Will be `playlist` for playlist output
* @apiSuccess (200) {String} job.output_media_files.filename File name for the playlist manifest
* @apiSuccess (200) {String} job.output_media_files.dvr_filename File name for the DVR playlist manifest
*
* @apiSuccessExample {object} Success Response Get Live Job Details:
* HTTP/1.1 200 OK
* {
* "job": {
* "job_out_bytes": 0,
* "job_playlists": [
* {
* "delivery": {
* "media_cdn_prepend": "bcovlive-a.akamaihd.net",
* "ssai_cdn_prepend": "bcovlive-a.akamaihd.net",
* "media_cdn_protocol": "https",
* "ssai_cdn_protocol": "https",
* "ssai_manifest_append": "_ssaiM"
* },
* "masterLocation": "alive-pr-us-west-2-media",
* "default_playlist_name": "playlist.m3u8",
* "type": {
* "protocol": "HTTP",
* "profile": "BcovAliveCupertino",
* "name": "defaultS3",
* "streaming_delivery_format": "hls",
* "video_format": "mpeg-ts",
* "pushpublish": "bcov",
* "type": "S3"
* },
* "profile_sources": [
* "profile_0",
* "profile_1",
* "profile_2"
* ],
* "default_dvr_playlist_name": "playlist_dvr.m3u8",
* "playlist_name": "playlist.m3u8",
* "default_chunklist_name": "chunklist.m3u8",
* "default_dvr_chunklist_name": "chunklist_dvr.m3u8",
* "playback_url": "https://bcovlive-a.akamaihd.net/95064b4274e243f4814dd6971a3c2dd7/us-west-2/NA/playlist.m3u8",
* "default_vod_chunklist_name": "chunklist_vod.m3u8",
* "default_s3_bucket": "alive-pr-[[region]]-media",
* "dvr_playlist_name": "playlist_dvr.m3u8",
* "default_s3_bucket_ads": "alive-pr-[[region]]-ads",
* "playback_url_dvr": "https://bcovlive-a.akamaihd.net/95064b4274e243f4814dd6971a3c2dd7/us-west-2/NA/playlist_dvr.m3u8"
* }
* ],
* "worker_id": "20f41b48fc7648e697fe464063f1a14e",
* "job_raw_input": {
* "outputs": [
* {
* "video_codec": "h264",
* "keyframe_interval": 60,
* "live_stream": true,
* "segment_seconds": 6,
* "width": 1920,
* "h264_profile": "main",
* "label": "hls1080p",
* "video_bitrate": 2400,
* "height": 1080
* },
* {
* "video_codec": "h264",
* "keyframe_interval": 60,
* "live_stream": true,
* "segment_seconds": 6,
* "width": 1280,
* "h264_profile": "main",
* "label": "hls720p",
* "video_bitrate": 1843,
* "height": 720
* },
* {
* "video_codec": "h264",
* "keyframe_interval": 60,
* "live_stream": true,
* "segment_seconds": 6,
* "width": 640,
* "h264_profile": "main",
* "label": "hls480p",
* "video_bitrate": 819,
* "height": 360
* }
* ],
* "live_stream": true,
* "live_sliding_window_duration": 30,
* "region": "us-west-2",
* "reconnect_time": 20
* },
* "job_id": "95064b4274e243f4814dd6971a3c2dd7",
* "job_group_all_stream_name": "all",
* "job_in_bytes_rate": 0,
* "job_region": "us-west-2",
* "job_in_bytes": 0,
* "job_load": 390,
* "job_input": {
* "live_dvr_ads_window_duration_ms": 600000,
* "vod_sliding_window_duration_ms": 86399000,
* "ext_x_version": 3,
* "static": false,
* "number_of_segments_in_dvr_chunklist": 5,
* "reconnect_time_ms": 20000,
* "randomize_chunk_url": true,
* "ad_insertion": false,
* "max_waiting_time_ms": 1800000,
* "metadata_passthrough": false,
* "vod_enabled": true,
* "live_sliding_window_duration_ms": 30000,
* "segment_duration_ms": 6000,
* "live_dvr_sliding_window_duration_ms": 30000,
* "event_length_ms": 0,
* "number_of_segments_in_live_chunklist": 3,
* "number_of_segments_in_vod_chunklist": 14399,
* "channel_type": "event",
* "region": "us-west-2"
* },
* "account_billing_id": "NA",
* "job_cleaning_started_at": 1526243489574,
* "job_stream_url": "rtmp://ep6-usw2.bcovlive.io:1935/95064b4274e243f4814dd6971a3c2dd7",
* "job_ssai_state": "none",
* "job_outputs": [
* {
* "video_height": 1080,
* "video_width": 1920,
* "audio_codec": "AAC",
* "type": {
* "protocol": "HTTP",
* "profile": "BcovAliveCupertino",
* "name": "defaultS3",
* "streaming_delivery_format": "hls",
* "video_format": "mpeg-ts",
* "pushpublish": "bcov",
* "type": "S3"
* },
* "video_fit_mode": "stretch",
* "profile_name": "profile_0",
* "segment_duration_ms": 6000,
* "playback_url": "https://bcovlive-a.akamaihd.net/95064b4274e243f4814dd6971a3c2dd7/us-west-2/NA/profile_0/chunklist.m3u8",
* "video_keyframe_interval_follow_source": false,
* "playback_url_dvr": "https://bcovlive-a.akamaihd.net/95064b4274e243f4814dd6971a3c2dd7/us-west-2/NA/profile_0/chunklist_dvr.m3u8",
* "video_codec": "H.264",
* "video_codec_level": "3.1",
* "playback_url_vod": "https://bcovlive-a.akamaihd.net/95064b4274e243f4814dd6971a3c2dd7/us-west-2/NA/profile_0/chunklist_vod.m3u8",
* "audio_bitrate_bps": 196608,
* "gpuid": -1,
* "video_keyframe_interval": 60,
* "label": "hls1080p",
* "info_streaming_delivery_format": "hls",
* "video_codec_profile": "main",
* "name": "Out0",
* "info_format": "mpeg-ts",
* "video_bitrate_bps": 2457600,
* "ui_playback_url": "https://bcovlive-a.akamaihd.net/95064b4274e243f4814dd6971a3c2dd7/us-west-2/NA/profile_0/chunklist.m3u8",
* "ui_playback_url_dvr": "https://bcovlive-a.akamaihd.net/95064b4274e243f4814dd6971a3c2dd7/us-west-2/NA/profile_0/chunklist_dvr.m3u8",
* "ui_playback_url_vod": "https://bcovlive-a.akamaihd.net/95064b4274e243f4814dd6971a3c2dd7/us-west-2/NA/profile_0/chunklist_vod.m3u8"
* },
* {
* "video_height": 720,
* "video_width": 1280,
* "audio_codec": "AAC",
* "type": {
* "protocol": "HTTP",
* "profile": "BcovAliveCupertino",
* "name": "defaultS3",
* "streaming_delivery_format": "hls",
* "video_format": "mpeg-ts",
* "pushpublish": "bcov",
* "type": "S3"
* },
* "video_fit_mode": "stretch",
* "profile_name": "profile_1",
* "segment_duration_ms": 6000,
* "playback_url": "https://bcovlive-a.akamaihd.net/95064b4274e243f4814dd6971a3c2dd7/us-west-2/NA/profile_1/chunklist.m3u8",
* "video_keyframe_interval_follow_source": false,
* "playback_url_dvr": "https://bcovlive-a.akamaihd.net/95064b4274e243f4814dd6971a3c2dd7/us-west-2/NA/profile_1/chunklist_dvr.m3u8",
* "video_codec": "H.264",
* "video_codec_level": "3.1",
* "playback_url_vod": "https://bcovlive-a.akamaihd.net/95064b4274e243f4814dd6971a3c2dd7/us-west-2/NA/profile_1/chunklist_vod.m3u8",
* "audio_bitrate_bps": 196608,
* "gpuid": -1,
* "video_keyframe_interval": 60,
* "label": "hls720p",
* "info_streaming_delivery_format": "hls",
* "video_codec_profile": "main",
* "name": "Out1",
* "info_format": "mpeg-ts",
* "video_bitrate_bps": 1887232,
* "ui_playback_url": "https://bcovlive-a.akamaihd.net/95064b4274e243f4814dd6971a3c2dd7/us-west-2/NA/profile_1/chunklist.m3u8",
* "ui_playback_url_dvr": "https://bcovlive-a.akamaihd.net/95064b4274e243f4814dd6971a3c2dd7/us-west-2/NA/profile_1/chunklist_dvr.m3u8",
* "ui_playback_url_vod": "https://bcovlive-a.akamaihd.net/95064b4274e243f4814dd6971a3c2dd7/us-west-2/NA/profile_1/chunklist_vod.m3u8"
* },
* {
* "video_height": 360,
* "video_width": 640,
* "audio_codec": "AAC",
* "type": {
* "protocol": "HTTP",
* "profile": "BcovAliveCupertino",
* "name": "defaultS3",
* "streaming_delivery_format": "hls",
* "video_format": "mpeg-ts",
* "pushpublish": "bcov",
* "type": "S3"
* },
* "video_fit_mode": "stretch",
* "profile_name": "profile_2",
* "segment_duration_ms": 6000,
* "playback_url": "https://bcovlive-a.akamaihd.net/95064b4274e243f4814dd6971a3c2dd7/us-west-2/NA/profile_2/chunklist.m3u8",
* "video_keyframe_interval_follow_source": false,
* "playback_url_dvr": "https://bcovlive-a.akamaihd.net/95064b4274e243f4814dd6971a3c2dd7/us-west-2/NA/profile_2/chunklist_dvr.m3u8",
* "video_codec": "H.264",
* "video_codec_level": "3.1",
* "playback_url_vod": "https://bcovlive-a.akamaihd.net/95064b4274e243f4814dd6971a3c2dd7/us-west-2/NA/profile_2/chunklist_vod.m3u8",
* "audio_bitrate_bps": 196608,
* "gpuid": -1,
* "video_keyframe_interval": 60,
* "label": "hls480p",
* "info_streaming_delivery_format": "hls",
* "video_codec_profile": "main",
* "name": "Out2",
* "info_format": "mpeg-ts",
* "video_bitrate_bps": 838656,
* "ui_playback_url": "https://bcovlive-a.akamaihd.net/95064b4274e243f4814dd6971a3c2dd7/us-west-2/NA/profile_2/chunklist.m3u8",
* "ui_playback_url_dvr": "https://bcovlive-a.akamaihd.net/95064b4274e243f4814dd6971a3c2dd7/us-west-2/NA/profile_2/chunklist_dvr.m3u8",
* "ui_playback_url_vod": "https://bcovlive-a.akamaihd.net/95064b4274e243f4814dd6971a3c2dd7/us-west-2/NA/profile_2/chunklist_vod.m3u8"
* }
* ],
* "job_tick": 2,
* "job_transcoding_template_name": "95064b4274e243f4814dd6971a3c2dd7_trans_template",
* "job_cleaning_in_progress": false,
* "cloud_id": "ede5802b4167493eae6f8a93ac98f352",
* "job_sep_state": "none",
* "job_state": "waiting",
* "sep_data": {
*
* },
* "job_created_at": 1526243482713,
* "account_id": "a95ac581551b4478b27910e5675db1f8",
* "job_stream_name": "alive",
* "job_cleaning_error": false,
* "job_out_bytes_rate": 0,
* "job_finished_at": 0,
* "user_id": "c2691d4d039040be96c190a949d754a7",
* "job_streaming_started_at": 0,
* "job_app_name": "95064b4274e243f4814dd6971a3c2dd7",
* "job_cancelling_flag": false,
* "job_last_state_change_at": 1526243482713,
* "encryption": {
*
* },
* "permitted_to_add_dependent_vods": true,
* "vods": [
*
* ],
* "job_playback_url": "https://bcovlive-a.akamaihd.net/95064b4274e243f4814dd6971a3c2dd7/us-west-2/NA/playlist.m3u8",
* "job_playback_url_dvr": "https://bcovlive-a.akamaihd.net/95064b4274e243f4814dd6971a3c2dd7/us-west-2/NA/playlist_dvr.m3u8",
* "job_ui_playback_url": "https://bcovlive-a.akamaihd.net/95064b4274e243f4814dd6971a3c2dd7/us-west-2/NA/playlist.m3u8",
* "job_ui_playback_url_dvr": "https://bcovlive-a.akamaihd.net/95064b4274e243f4814dd6971a3c2dd7/us-west-2/NA/playlist_dvr.m3u8"
* }
* }
*
* @apiError (401) {object} UNAUTHORIZED: Unauthorized - see [Live API Error Messages](https://support.brightcove.com/live-api-error-messages) for more details
* @apiError (404) {object} RESOURCE_NOT_FOUND: The api couldn't find the resource you requested - see [Live API Error Messages](https://support.brightcove.com/live-api-error-messages) for more details
* @apiError (500) {object} INTERNAL_SERVER_ERROR: DB getItem, no results found - see [Live API Error Messages](https://support.brightcove.com/live-api-error-messages) for more details
*
*
*/
// Manual Ad Cue Point Insertion
/**
* @api {post} /v1/jobs/{job_id}/cuepoint Manual Ad Cue Point Insertion
* @apiName Manual Ad Cue Point Insertion
* @apiGroup Live_Jobs
* @apiVersion 1.0.0
*
* @apiDescription Inserts a manual Cue-Out with a duration to the Live ingest point.
*
* @apiHeader {String} Content-Type: application/json
* @apiHeader {String} X-API-KEY X-API-KEY: {APIKey}
*
* @apiParam (URL Parameters) {String} job_id The job id you want details for.
* @apiParam (Request Body Fields) {Number} duration An integer value to indicate the length of the ad break in seconds
* @apiParam (Request Body Fields) {String} timecode When to insert the cuepoint in HH:MM:SS:FF from the stream start (FF = frames); if omitted, the cuepoint will be inserted immediately
* @apiParam (Request Body Fields) {Object} [ad_server_data] a set of any variables (key/value pairs) that should be passed to the adServer
*
* @apiParamExample {json} Live Stream Cuepoint Insertion Request Body Example:
* {
* "duration": 30,
* "ad_server_data" : {
* "varToAdServer": "Hello",
* "adBreakId": 12312
* "adBreakCategory": "summer"
* }
* }
*
* @apiSuccess (200) {String} id The id of the live stream job
* @apiSuccess (200) {Object} cue_point The cuepoint data
* @apiSuccess (200) {String} cue_point.id The cuepoint id
* @apiSuccess (200) {Number} cue_point.duration The cuepoint duration in seconds
* @apiSuccess (200) {String} cue_point.accuracy The cuepoint insertion accuracy - may be `segment` or `frame`
* @apiSuccess (200) {string} cue_point.inserted_at Time when the cue point was inserted in the stream
*
* @apiSuccessExample {json} Success response for cuepoint Insertion
* {
* "id": "JOB_ID",
* "cue_point": {
* "id": "adBreak-2f58393ada1442d98eca0817fa565ba4",
* "duration": 30,
* "accuracy": "segment", [ Can be segment or frame ]
* "inserted_at": "2017-07-21T09:30:46.307Z" [ Time when the cue point was inserted in the stream ]
* },
* }
*
* @apiError (Error 4xx) {json} BAD_REQUEST 400: Invalid input value - see [Live API Error Messages](https://support.brightcove.com/live-api-error-messages) for more details
* @apiError (Error 4xx) {json} BAD_REQUEST 400: The notification target type is not supported currently - see [Live API Error Messages](https://support.brightcove.com/live-api-error-messages) for more details
* @apiError (Error 4xx) {json} UNAUTHORIZED 401: Unauthorized - see [Live API Error Messages](https://support.brightcove.com/live-api-error-messages) for more details
* @apiError (Error 4xx) {json} RESOURCE_NOT_FOUND 404: The api couldn't find the resource you requested - see [Live API Error Messages](https://support.brightcove.com/live-api-error-messages) for more details
* @apiError (Error 5xx) {json} INTERNAL_SERVER_ERROR 500: DB getItem, no results found - see [Live API Error Messages](https://support.brightcove.com/live-api-error-messages) for more details
*
*/
// Insert ID3 timed metadata
/**
* @api {post} /v1/jobs/{job_id}/id3tag Insert ID3 timed metadata
* @apiName Insert ID3 timed metadata
* @apiGroup Live_Jobs
* @apiVersion 1.0.0
*
* @apiDescription Inserts an ID3 timed metadata tag for an ongoing job. Note that: 1) If using timecode property, the job only stores the most recent request for insertion; 2) If using timecode property, the encoder must be sending SMPTE-formatted (HH:MM:SS:FF) timecode stored in the tc property via OnFI
*
* @apiHeader {String} Content-Type: application/json
* @apiHeader {String} X-API-KEY X-API-KEY: {APIKey}
*
* @apiParam (URL Parameters) {String} job_id The job id you want details for.
* @apiParam (Request Body Fields) {Object} id3_tag An object containing variables for the ID3 timed metadata
* @apiParam (Request Body Fields) {String{1..4}} id3_tag.name A name for the tag
* @apiParam (Request Body Fields) {String} id3_tag.value A value for the tag (maximum string data size 256KB)
* @apiParam (Request Body Fields) {String} [id3_tag.timecode] Time to insert - by default, insertion is immediate - **Note: 1) If you use the `timecode` property, the job only only stores the most recent request for insertion; 2) If you use the `timecode` property, the encoder must be sending SMPTE-formatted (HH:MM:SS:FF) timecode stored in the `tc` property via OnFI; 3) Software encoders such as Wirecast and OBS *do not* support the sending timecode via OnFI packets in the RTMP stream; 4) Elemental hardware encoders *do* support the sending timecode via OnFI packets in the RTMP stream**
*
* @apiParamExample {json} ID3 timed metadata Insertion Request Body Example:
* {
* "id3_tag": {
* "name": "BCOV",
* "value": "my value",
* "timecode": "15:50:49:16"
* }
* }
*
* @apiSuccess (200) {String} id The job id
* @apiSuccess (200) {Object} id3_tag The ID3 tag details
* @apiSuccess (200) {String} id3_tag.tag_name The ID3 tag name
* @apiSuccess (200) {String} id3_tag.tag_value The ID3 tag value
*
* @apiSuccessExample {object} Success response for ID3 timed metadata Insertion
* HTTP/1.1 200 OK
* {
* "id": "JOB_ID",
* "id3_tag": {
* "tag_name": "BCOV",
* "tag_value": "my value"
* }
* }
*
* @apiError (400) {object} BAD_REQUEST: Invalid input value - see [Live API Error Messages](https://support.brightcove.com/live-api-error-messages) for more details
* @apiError (401) {object} UNAUTHORIZED: Unauthorized - see [Live API Error Messages](https://support.brightcove.com/live-api-error-messages) for more details
* @apiError (404) {object} RESOURCE_NOT_FOUND: The api couldn't find the resource you requested - see [Live API Error Messages](https://support.brightcove.com/live-api-error-messages) for more details
* @apiError (500) {object} INTERNAL_SERVER_ERROR: DB getItem, no results found - see [Live API Error Messages](https://support.brightcove.com/live-api-error-messages) for more details
*
*/
| add new field to create job method
| live-api/v1/src/jobs.js | add new field to create job method | <ide><path>ive-api/v1/src/jobs.js
<ide> * @apiParam (Request Body Fields) {Number} [encryption.rotate_every=10] Interval for key rotation in video segments
<ide> * @apiParam (Request Body Fields) {String} [encryption.external_url] The URL for the external encryption key - this field is required if you specify `type` as `external`, and the external key must match the `key` value
<ide> * @apiParam (Request Body Fields) {Number{0-93600}} [event_length=0] Used to preset and define an end time for the live event. At any point within the specified `event_length` you may reconnect to your stream. The `event_length` setting goes into effect as soon as streaming begins.
<add> * @apiParam (Request Body Fields) {Boolean} [hls_endlist=true] Whether an `EXT-X-ENDLIST` tag should be added to the stream playlist when you stop the stream or the `reconnect_time` window has been reached. The `EXT-X-ENDLIST` tag indicates that no more Media Segments will be added to the Media Playlist file and helps prevent the player from displaying error messages when the stream stops.
<ide> * @apiParam (Request Body Fields) {Number{1-86400}} [live_dvr_sliding_window_duration=100] The time, in seconds, to keep in the live DVR manifest. If the stream duration is longer than the window duration, segment references will be removed first in first out. Default is 100 seconds. **Note: for SSAI jobs, the limit is `7200`.
<ide> * @apiParam (Request Body Fields) {Number{1-600}} [live_dvr_ads_window_duration=600] The time, in seconds, to keep in the live DVR manifest. If the stream duration is longer than the window duration, segment references will be removed first in first out. Default is 100 seconds.
<ide> * @apiParam (Request Body Fields) {Number{1-5}} [max_hls_protocol_version=3] Sets the maximum HLS protocol version to use. Special features will be used as available. Default is 3. |
|
JavaScript | mit | 1ff61b06507a65ebed9da1256284f13df1dd3ccd | 0 | modulexcite/dancer.js,ngokevin/dancer.js,kyroskoh/dancer.js,hoboman313/dancer.js,suryasingh/dancer.js,kyroskoh/dancer.js,ngokevin/dancer.js,suryasingh/dancer.js,hoboman313/dancer.js,jsantell/dancer.js,modulexcite/dancer.js | (function() {
var adapter = function ( dancer ) {
this.dancer = dancer;
this.audio = new Audio();
this.loaded = false;
};
adapter.prototype = {
load : function ( path ) {
var _this = this;
this.audio.src = path;
this.audio.addEventListener( 'loadedmetadata', function( e ) {
_this.fbLength = _this.audio.mozFrameBufferLength;
_this.channels = _this.audio.mozChannels;
_this.rate = _this.audio.mozSampleRate;
_this.fft = new FFT( _this.fbLength / _this.channels, _this.rate );
_this.signal = new Float32Array( _this.fbLength / _this.channels );
_this.loaded = true;
_this.dancer.trigger( 'loaded' );
}, false);
this.audio.addEventListener( 'MozAudioAvailable', function( e ) {
_this.update( e );
}, false);
},
play : function () {
this.audio.play();
},
stop : function () {
this.audio.pause();
},
getSpectrum : function () {
return this.fft.spectrum;
},
getTime : function () {
return this.audio.currentTime;
},
update : function ( e ) {
if ( !this.loaded ) return;
for ( var i = 0, j = this.fbLength / 2; i < j; i++ ) {
this.signal[ i ] = ( e.frameBuffer[ 2 * i ] + e.frameBuffer[ 2 * i + 1 ] ) / 2;
}
this.fft.forward( this.signal );
this.dancer.trigger( 'update' );
}
};
Dancer.adapters.moz = adapter;
})();
| src/adapterMoz.js | (function() {
var adapter = function ( dancer ) {
this.dancer = dancer;
this.audio = new Audio();
this.loaded = false;
};
adapter.prototype = {
load : function ( path ) {
var _this = this;
this.audio.src = path;
this.audio.addEventListener( 'loadedmetadata', function( e ) {
_this.fbLength = _this.audio.mozFrameBufferLength;
_this.channels = _this.audio.mozChannels;
_this.rate = _this.audio.mozSampleRate;
_this.fft = new FFT( _this.fbLength / _this.channels, _this.rate );
_this.signal = new Float32Array( _this.fbLength / _this.channels );
_this.loaded = true;
_this.dancer.trigger( 'loaded' );
}, false);
this.audio.addEventListener( 'MozAudioAvailable', function( e ) {
_this.update( e );
}, false);
},
play : function () {
this.audio.play();
},
stop : function () {
this.audio.pause();
},
getSpectrum : function () {
return this.fft.spectrum;
},
getTime : function () {
return this.time;
},
update : function ( e ) {
if ( !this.loaded ) return;
for ( var i = 0, j = this.fbLength / 2; i < j; i++ ) {
this.signal[ i ] = ( e.frameBuffer[ 2 * i ] + e.frameBuffer[ 2 * i + 1 ] ) / 2;
}
this.time = e.time;
this.fft.forward( this.signal );
this.dancer.trigger( 'update' );
}
};
Dancer.adapters.moz = adapter;
})();
| Use Mozilla's audio object for current time rather than event's
| src/adapterMoz.js | Use Mozilla's audio object for current time rather than event's | <ide><path>rc/adapterMoz.js
<ide> },
<ide>
<ide> getTime : function () {
<del> return this.time;
<add> return this.audio.currentTime;
<ide> },
<ide>
<ide> update : function ( e ) {
<ide> this.signal[ i ] = ( e.frameBuffer[ 2 * i ] + e.frameBuffer[ 2 * i + 1 ] ) / 2;
<ide> }
<ide>
<del> this.time = e.time;
<del>
<ide> this.fft.forward( this.signal );
<ide> this.dancer.trigger( 'update' );
<ide> } |
|
JavaScript | apache-2.0 | cb8a98d115be479bb72a50618f1e2c570b6b4924 | 0 | TracklistMe/client,TracklistMe/client,TracklistMe/client | module.exports = {
angular: {
files: [{
expand: true,
src: "**",
cwd: 'bower_components/bootstrap/fonts',
dest: "angular/fonts"
}, {
expand: true,
src: "**",
cwd: 'bower_components/videogular/',
dest: "angular/bower_components/videogular"
}, {
expand: true,
src: "**",
cwd: 'bower_components/jquery.sparkline/dist/',
dest: "angular/bower_components/jquery.sparkline/dist"
}, {
expand: true,
src: "**",
cwd: 'bower_components/angularjs-toaster/',
dest: "angular/bower_components/angularjs-toaster"
}, {
expand: true,
src: "**",
cwd: 'bower_components/',
dest: "angular/bower_components"
}, {
expand: true,
src: "**",
cwd: 'bower_components/videogular-buffering/',
dest: "angular/bower_components/videogular-buffering"
}, {
expand: true,
src: "**",
cwd: 'bower_components/screenfull/',
dest: "angular/bower_components/screenfull"
}, {
expand: true,
src: "**",
cwd: 'bower_components/videogular-controls/',
dest: "angular/bower_components/videogular-controls"
}, {
expand: true,
src: "**",
cwd: 'bower_components/videogular-poster/',
dest: "angular/bower_components/videogular-poster"
}, {
expand: true,
src: "**",
cwd: 'bower_components/videogular-overlay-play/',
dest: "angular/bower_components/videogular-overlay-play"
}, {
expand: true,
src: "**",
cwd: 'bower_components/font-awesome/fonts',
dest: "angular/fonts"
}, {
expand: true,
src: "**",
cwd: 'bower_components/jquery.sparkline/dist/',
dest: "angular/bower_components/jquery.sparkline/dist"
}, {
expand: true,
src: "**",
cwd: 'bower_components/simple-line-icons/fonts',
dest: "angular/fonts"
}, {
expand: true,
src: "**",
cwd: 'src/fonts',
dest: "angular/fonts"
}, {
expand: true,
src: "**",
cwd: 'src/l10n',
dest: "angular/l10n"
}, {
expand: true,
src: "**",
cwd: 'src/img',
dest: "angular/img"
}, {
expand: true,
src: "**",
cwd: 'src/js',
dest: "angular/js"
}, {
expand: true,
src: "**",
cwd: 'src/tpl',
dest: "angular/tpl"
}, {
src: 'src/index.min.html',
dest: 'angular/index.html'
}]
},
html: {
files: [{
expand: true,
src: "**",
cwd: 'bower_components/bootstrap/fonts',
dest: "html/fonts"
}, {
expand: true,
src: "**",
cwd: 'bower_components/font-awesome/fonts',
dest: "html/fonts"
}, {
expand: true,
src: "**",
cwd: 'bower_components/Simple-Line-Icons/fonts',
dest: "html/fonts"
}, {
expand: true,
src: '**',
cwd: 'src/fonts/',
dest: 'html/fonts/'
}, {
expand: true,
src: "**",
cwd: 'src/api',
dest: "html/api"
}, {
expand: true,
src: '**',
cwd: 'src/img/',
dest: 'html/img/'
}, {
expand: true,
src: '*.css',
cwd: 'src/css/',
dest: 'html/css/'
}, {
expand: true,
src: '**',
cwd: 'swig/js/',
dest: 'html/js/'
}]
},
landing: {
files: [{
expand: true,
src: "**",
cwd: 'bower_components/bootstrap/fonts',
dest: "landing/fonts"
}, {
expand: true,
src: "**",
cwd: 'bower_components/font-awesome/fonts',
dest: "landing/fonts"
}, {
expand: true,
src: "**",
cwd: 'bower_components/simple-line-icons/fonts',
dest: "landing/fonts"
}, {
expand: true,
src: '**',
cwd: 'src/fonts/',
dest: 'landing/fonts/'
}, {
expand: true,
src: '*.css',
cwd: 'src/css/',
dest: 'landing/css/'
}, {
src: 'html/css/app.min.css',
dest: 'landing/css/app.min.css'
}]
}
}; | grunt/copy.js | module.exports = {
angular: {
files: [{
expand: true,
src: "**",
cwd: 'bower_components/bootstrap/fonts',
dest: "angular/fonts"
}, {
expand: true,
src: "**",
cwd: 'bower_components/videogular/',
dest: "angular/bower_components/videogular"
}, {
expand: true,
src: "**",
cwd: 'bower_components/jquery.sparkline/dist/',
dest: "angular/bower_components/jquery.sparkline/dist"
}, {
expand: true,
src: "**",
cwd: 'bower_components/angularjs-toaster/',
dest: "angular/bower_components/angularjs-toaster"
}, {
expand: true,
src: "**",
cwd: 'bower_components/',
dest: "angular/bower_components"
}, {
expand: true,
src: "**",
cwd: 'bower_components/videogular-buffering/',
dest: "angular/bower_components/videogular-buffering"
}, {
expand: true,
src: "**",
cwd: 'bower_components/screenfull/',
dest: "angular/bower_components/screenfull"
}, {
expand: true,
src: "**",
cwd: 'bower_components/videogular-controls/',
dest: "angular/bower_components/videogular-controls"
}, {
expand: true,
src: "**",
cwd: 'bower_components/videogular-poster/',
dest: "angular/bower_components/videogular-poster"
}, {
expand: true,
src: "**",
cwd: 'bower_components/videogular-overlay-play/',
dest: "angular/bower_components/videogular-overlay-play"
}, {
expand: true,
src: "**",
cwd: 'bower_components/font-awesome/fonts',
dest: "angular/fonts"
}, {
expand: true,
src: "**",
cwd: 'bower_components/jquery.sparkline/dist',
dest: "angular/bower_components/jquery.sparkline/dist"
}, {
expand: true,
src: "**",
cwd: 'bower_components/simple-line-icons/fonts',
dest: "angular/fonts"
}, {
expand: true,
src: "**",
cwd: 'src/fonts',
dest: "angular/fonts"
}, {
expand: true,
src: "**",
cwd: 'src/l10n',
dest: "angular/l10n"
}, {
expand: true,
src: "**",
cwd: 'src/img',
dest: "angular/img"
}, {
expand: true,
src: "**",
cwd: 'src/js',
dest: "angular/js"
}, {
expand: true,
src: "**",
cwd: 'src/tpl',
dest: "angular/tpl"
}, {
src: 'src/index.min.html',
dest: 'angular/index.html'
}]
},
html: {
files: [{
expand: true,
src: "**",
cwd: 'bower_components/bootstrap/fonts',
dest: "html/fonts"
}, {
expand: true,
src: "**",
cwd: 'bower_components/font-awesome/fonts',
dest: "html/fonts"
}, {
expand: true,
src: "**",
cwd: 'bower_components/Simple-Line-Icons/fonts',
dest: "html/fonts"
}, {
expand: true,
src: '**',
cwd: 'src/fonts/',
dest: 'html/fonts/'
}, {
expand: true,
src: "**",
cwd: 'src/api',
dest: "html/api"
}, {
expand: true,
src: '**',
cwd: 'src/img/',
dest: 'html/img/'
}, {
expand: true,
src: '*.css',
cwd: 'src/css/',
dest: 'html/css/'
}, {
expand: true,
src: '**',
cwd: 'swig/js/',
dest: 'html/js/'
}]
},
landing: {
files: [{
expand: true,
src: "**",
cwd: 'bower_components/bootstrap/fonts',
dest: "landing/fonts"
}, {
expand: true,
src: "**",
cwd: 'bower_components/font-awesome/fonts',
dest: "landing/fonts"
}, {
expand: true,
src: "**",
cwd: 'bower_components/simple-line-icons/fonts',
dest: "landing/fonts"
}, {
expand: true,
src: '**',
cwd: 'src/fonts/',
dest: 'landing/fonts/'
}, {
expand: true,
src: '*.css',
cwd: 'src/css/',
dest: 'landing/css/'
}, {
src: 'html/css/app.min.css',
dest: 'landing/css/app.min.css'
}]
}
}; | again sparkline
| grunt/copy.js | again sparkline | <ide><path>runt/copy.js
<ide> }, {
<ide> expand: true,
<ide> src: "**",
<del> cwd: 'bower_components/jquery.sparkline/dist',
<add> cwd: 'bower_components/jquery.sparkline/dist/',
<ide> dest: "angular/bower_components/jquery.sparkline/dist"
<ide> }, {
<ide> expand: true, |
|
Java | mit | 8e58273fbe489dc4faf2339b9616a5ef16ef1d6b | 0 | t28hub/RxWeather | package com.t28.rxweather.model;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
import com.fasterxml.jackson.databind.annotation.JsonPOJOBuilder;
import com.t28.rxweather.request.ForecastRequest;
import com.t28.rxweather.util.CollectionUtils;
import com.t28.rxweather.volley.RxSupport;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import rx.Observable;
@JsonDeserialize(builder = Forecast.Builder.class)
public class Forecast implements Model {
private final City mCity;
private final List<Weather> mWeathers;
private Forecast(Builder builder) {
mCity = builder.mCity;
if (CollectionUtils.isEmpty(builder.mWeathers)) {
mWeathers = Collections.emptyList();
} else {
mWeathers = new ArrayList<>(builder.mWeathers);
}
}
@Override
public String toString() {
final StringBuilder builder = new StringBuilder();
builder.append(Forecast.class.getSimpleName());
try {
final ObjectMapper mapper = new ObjectMapper();
final String jsonString = mapper.writeValueAsString(this);
builder.append(jsonString);
return builder.toString();
} catch (JsonProcessingException e) {
return super.toString();
}
}
@Override
public boolean isValid() {
if (mCity == null || !mCity.isValid()) {
return false;
}
for (Weather weather : mWeathers) {
if (weather.isValid()) {
continue;
}
return false;
}
return true;
}
public City getCity() {
return mCity;
}
public List<Weather> getWeathers() {
return new ArrayList<>(mWeathers);
}
public static Observable<Forecast> findByName(RxSupport support, String name) {
final ForecastRequest request = new ForecastRequest.Builder("")
.setCityName(name)
.build();
return support.createObservableRequest(request);
}
public static Observable<Forecast> findByCoordinate(RxSupport support, Coordinate coordinate) {
final ForecastRequest request = new ForecastRequest.Builder("")
.setLat(coordinate.getLat())
.setLon(coordinate.getLon())
.build();
return support.createObservableRequest(request);
}
@JsonPOJOBuilder(withPrefix = "set")
@JsonIgnoreProperties(ignoreUnknown = true)
public static class Builder {
private City mCity;
private List<Weather> mWeathers;
public Builder() {
}
public Builder setCity(City city) {
mCity = city;
return this;
}
@JsonProperty("list")
public Builder setWeathers(List<Weather> weathers) {
mWeathers = weathers;
return this;
}
public Forecast build() {
return new Forecast(this);
}
}
}
| app/src/main/java/com/t28/rxweather/model/Forecast.java | package com.t28.rxweather.model;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
import com.fasterxml.jackson.databind.annotation.JsonPOJOBuilder;
import com.t28.rxweather.request.ForecastRequest;
import com.t28.rxweather.util.CollectionUtils;
import com.t28.rxweather.volley.RxSupport;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import rx.Observable;
@JsonDeserialize(builder = Forecast.Builder.class)
public class Forecast implements Model {
private final City mCity;
private final List<Weather> mWeathers;
private Forecast(Builder builder) {
mCity = builder.mCity;
if (CollectionUtils.isEmpty(builder.mWeathers)) {
mWeathers = Collections.emptyList();
} else {
mWeathers = new ArrayList<>(builder.mWeathers);
}
}
@Override
public boolean isValid() {
if (mCity == null || !mCity.isValid()) {
return false;
}
for (Weather weather : mWeathers) {
if (weather.isValid()) {
continue;
}
return false;
}
return true;
}
public City getCity() {
return mCity;
}
public List<Weather> getWeathers() {
return new ArrayList<>(mWeathers);
}
public static Observable<Forecast> findByName(RxSupport support, String name) {
final ForecastRequest request = new ForecastRequest.Builder("")
.setCityName(name)
.build();
return support.createObservableRequest(request);
}
public static Observable<Forecast> findByCoordinate(RxSupport support, Coordinate coordinate) {
final ForecastRequest request = new ForecastRequest.Builder("")
.setLat(coordinate.getLat())
.setLon(coordinate.getLon())
.build();
return support.createObservableRequest(request);
}
@JsonPOJOBuilder(withPrefix = "set")
@JsonIgnoreProperties(ignoreUnknown = true)
public static class Builder {
private City mCity;
private List<Weather> mWeathers;
public Builder() {
}
public Builder setCity(City city) {
mCity = city;
return this;
}
@JsonProperty("list")
public Builder setWeathers(List<Weather> weathers) {
mWeathers = weathers;
return this;
}
public Forecast build() {
return new Forecast(this);
}
}
}
| Forecast#toString()の実装追加
| app/src/main/java/com/t28/rxweather/model/Forecast.java | Forecast#toString()の実装追加 | <ide><path>pp/src/main/java/com/t28/rxweather/model/Forecast.java
<ide>
<ide> import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
<ide> import com.fasterxml.jackson.annotation.JsonProperty;
<add>import com.fasterxml.jackson.core.JsonProcessingException;
<add>import com.fasterxml.jackson.databind.ObjectMapper;
<ide> import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
<ide> import com.fasterxml.jackson.databind.annotation.JsonPOJOBuilder;
<ide> import com.t28.rxweather.request.ForecastRequest;
<ide> mWeathers = Collections.emptyList();
<ide> } else {
<ide> mWeathers = new ArrayList<>(builder.mWeathers);
<add> }
<add> }
<add>
<add> @Override
<add> public String toString() {
<add> final StringBuilder builder = new StringBuilder();
<add> builder.append(Forecast.class.getSimpleName());
<add>
<add> try {
<add> final ObjectMapper mapper = new ObjectMapper();
<add> final String jsonString = mapper.writeValueAsString(this);
<add> builder.append(jsonString);
<add> return builder.toString();
<add> } catch (JsonProcessingException e) {
<add> return super.toString();
<ide> }
<ide> }
<ide> |
|
Java | mit | f4a828e6752e92ecb168976960dcf26cd949ac08 | 0 | Nanopublication/nanopub-java,Nanopublication/nanopub-java | package org.nanopub.extra.server;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.List;
import org.apache.http.HttpResponse;
import org.apache.http.client.HttpClient;
import org.apache.http.client.config.RequestConfig;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
import org.apache.http.util.EntityUtils;
import org.eclipse.rdf4j.model.IRI;
import org.eclipse.rdf4j.model.Statement;
import org.eclipse.rdf4j.model.impl.SimpleValueFactory;
import org.eclipse.rdf4j.model.vocabulary.RDF;
import org.nanopub.Nanopub;
public class NanopubServerUtils {
// Version numbers have the form MAJOR.MINOR (for example, 0.12 is a newer version than 0.9!)
public static final String requiredProtocolVersion = "0.2";
public static final int requiredProtocolVersionValue = getVersionValue(requiredProtocolVersion);
private static HttpClient httpClient;
protected NanopubServerUtils() {
throw new RuntimeException("no instances allowed");
}
public static List<String> loadPeerList(String serverUrl) throws IOException {
return loadList(serverUrl + "peers");
}
public static List<String> loadPeerList(ServerInfo si) throws IOException {
return loadPeerList(si.getPublicUrl());
}
public static List<String> loadNanopubUriList(String serverUrl, int page) throws IOException {
return loadList(serverUrl + "nanopubs?page=" + page);
}
public static List<String> loadNanopubUriList(ServerInfo si, int page) throws IOException {
return loadNanopubUriList(si.getPublicUrl(), page);
}
public static List<String> loadList(String url) throws IOException {
List<String> list = new ArrayList<String>();
HttpGet get = new HttpGet(url);
get.setHeader("Content-Type", "text/plain");
BufferedReader r = null;
try {
if (httpClient == null) {
RequestConfig requestConfig = RequestConfig.custom().setConnectTimeout(2000)
.setConnectionRequestTimeout(100).setSocketTimeout(2000).build();
PoolingHttpClientConnectionManager connManager = new PoolingHttpClientConnectionManager();
connManager.setDefaultMaxPerRoute(10);
connManager.setMaxTotal(1000);
httpClient = HttpClientBuilder.create().setDefaultRequestConfig(requestConfig)
.setConnectionManager(connManager).build();
}
HttpResponse resp = httpClient.execute(get);
int code = resp.getStatusLine().getStatusCode();
if (code < 200 || code > 299) {
EntityUtils.consumeQuietly(resp.getEntity());
throw new IOException("HTTP error: " + code + " " + resp.getStatusLine().getReasonPhrase());
}
InputStream in = resp.getEntity().getContent();
r = new BufferedReader(new InputStreamReader(in, Charset.forName("UTF-8")));
String line = null;
while ((line = r.readLine()) != null) {
list.add(line.trim());
}
} finally {
if (r != null) r.close();
}
return list;
}
private static final List<String> bootstrapServerList = new ArrayList<>();
static {
// Hard-coded server instances:
bootstrapServerList.add("http://server.nanopubs.lod.labs.vu.nl/");
bootstrapServerList.add("http://130.60.24.146:7880/");
bootstrapServerList.add("https://server.nanopubs.knows.idlab.ugent.be/");
bootstrapServerList.add("https://openphacts.cs.man.ac.uk/nanopub/server/");
bootstrapServerList.add("http://server.np.scify.org/");
bootstrapServerList.add("http://app.tkuhn.eculture.labs.vu.nl/nanopub-server-1/");
bootstrapServerList.add("http://app.tkuhn.eculture.labs.vu.nl/nanopub-server-2/");
bootstrapServerList.add("http://app.tkuhn.eculture.labs.vu.nl/nanopub-server-3/");
bootstrapServerList.add("http://app.tkuhn.eculture.labs.vu.nl/nanopub-server-4/");
}
public static List<String> getBootstrapServerList() {
return bootstrapServerList;
}
public static int getVersionValue(String versionString) {
try {
int major = Integer.parseInt(versionString.split("\\.")[0]);
int minor = Integer.parseInt(versionString.split("\\.")[1]);
return (major * 1000) + minor;
} catch (Exception ex) {
return 0;
}
}
public static final IRI PROTECTED_NANOPUB = SimpleValueFactory.getInstance().createIRI("http://purl.org/nanopub/x/ProtectedNanopub");
public static boolean isProtectedNanopub(Nanopub np) {
for (Statement st : np.getPubinfo()) {
if (!st.getSubject().equals(np.getUri())) continue;
if (!st.getPredicate().equals(RDF.TYPE)) continue;
if (st.getObject().equals(PROTECTED_NANOPUB)) return true;
}
return false;
}
}
| src/main/java/org/nanopub/extra/server/NanopubServerUtils.java | package org.nanopub.extra.server;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.List;
import org.apache.http.HttpResponse;
import org.apache.http.client.HttpClient;
import org.apache.http.client.config.RequestConfig;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
import org.apache.http.util.EntityUtils;
import org.eclipse.rdf4j.model.IRI;
import org.eclipse.rdf4j.model.Statement;
import org.eclipse.rdf4j.model.impl.SimpleValueFactory;
import org.eclipse.rdf4j.model.vocabulary.RDF;
import org.nanopub.Nanopub;
public class NanopubServerUtils {
// Version numbers have the form MAJOR.MINOR (for example, 0.12 is a newer version than 0.9!)
public static final String requiredProtocolVersion = "0.2";
public static final int requiredProtocolVersionValue = getVersionValue(requiredProtocolVersion);
private static HttpClient httpClient;
protected NanopubServerUtils() {
throw new RuntimeException("no instances allowed");
}
public static List<String> loadPeerList(String serverUrl) throws IOException {
return loadList(serverUrl + "peers");
}
public static List<String> loadPeerList(ServerInfo si) throws IOException {
return loadPeerList(si.getPublicUrl());
}
public static List<String> loadNanopubUriList(String serverUrl, int page) throws IOException {
return loadList(serverUrl + "nanopubs?page=" + page);
}
public static List<String> loadNanopubUriList(ServerInfo si, int page) throws IOException {
return loadNanopubUriList(si.getPublicUrl(), page);
}
public static List<String> loadList(String url) throws IOException {
List<String> list = new ArrayList<String>();
HttpGet get = new HttpGet(url);
get.setHeader("Content-Type", "text/plain");
BufferedReader r = null;
try {
if (httpClient == null) {
RequestConfig requestConfig = RequestConfig.custom().setConnectTimeout(2000)
.setConnectionRequestTimeout(100).setSocketTimeout(2000).build();
PoolingHttpClientConnectionManager connManager = new PoolingHttpClientConnectionManager();
connManager.setDefaultMaxPerRoute(10);
connManager.setMaxTotal(1000);
httpClient = HttpClientBuilder.create().setDefaultRequestConfig(requestConfig)
.setConnectionManager(connManager).build();
}
HttpResponse resp = httpClient.execute(get);
int code = resp.getStatusLine().getStatusCode();
if (code < 200 || code > 299) {
EntityUtils.consumeQuietly(resp.getEntity());
throw new IOException("HTTP error: " + code + " " + resp.getStatusLine().getReasonPhrase());
}
InputStream in = resp.getEntity().getContent();
r = new BufferedReader(new InputStreamReader(in, Charset.forName("UTF-8")));
String line = null;
while ((line = r.readLine()) != null) {
list.add(line.trim());
}
} finally {
if (r != null) r.close();
}
return list;
}
private static final List<String> bootstrapServerList = new ArrayList<>();
static {
// Hard-coded server instances:
//bootstrapServerList.add("http://np.inn.ac/");
bootstrapServerList.add("http://server.nanopubs.lod.labs.vu.nl/");
bootstrapServerList.add("http://130.60.24.146:7880/");
bootstrapServerList.add("https://server.nanopubs.knows.idlab.ugent.be/");
bootstrapServerList.add("https://openphacts.cs.man.ac.uk/nanopub/server/");
bootstrapServerList.add("http://server.np.scify.org/");
bootstrapServerList.add("http://app.tkuhn.eculture.labs.vu.nl/nanopub-server-1/");
bootstrapServerList.add("http://app.tkuhn.eculture.labs.vu.nl/nanopub-server-2/");
bootstrapServerList.add("http://app.tkuhn.eculture.labs.vu.nl/nanopub-server-3/");
bootstrapServerList.add("http://app.tkuhn.eculture.labs.vu.nl/nanopub-server-4/");
bootstrapServerList.add("http://rdf.disgenet.org/nanopub-server/");
}
public static List<String> getBootstrapServerList() {
return bootstrapServerList;
}
public static int getVersionValue(String versionString) {
try {
int major = Integer.parseInt(versionString.split("\\.")[0]);
int minor = Integer.parseInt(versionString.split("\\.")[1]);
return (major * 1000) + minor;
} catch (Exception ex) {
return 0;
}
}
public static final IRI PROTECTED_NANOPUB = SimpleValueFactory.getInstance().createIRI("http://purl.org/nanopub/x/ProtectedNanopub");
public static boolean isProtectedNanopub(Nanopub np) {
for (Statement st : np.getPubinfo()) {
if (!st.getSubject().equals(np.getUri())) continue;
if (!st.getPredicate().equals(RDF.TYPE)) continue;
if (st.getObject().equals(PROTECTED_NANOPUB)) return true;
}
return false;
}
}
| Update server list | src/main/java/org/nanopub/extra/server/NanopubServerUtils.java | Update server list | <ide><path>rc/main/java/org/nanopub/extra/server/NanopubServerUtils.java
<ide>
<ide> static {
<ide> // Hard-coded server instances:
<del> //bootstrapServerList.add("http://np.inn.ac/");
<ide> bootstrapServerList.add("http://server.nanopubs.lod.labs.vu.nl/");
<ide> bootstrapServerList.add("http://130.60.24.146:7880/");
<ide> bootstrapServerList.add("https://server.nanopubs.knows.idlab.ugent.be/");
<ide> bootstrapServerList.add("http://app.tkuhn.eculture.labs.vu.nl/nanopub-server-2/");
<ide> bootstrapServerList.add("http://app.tkuhn.eculture.labs.vu.nl/nanopub-server-3/");
<ide> bootstrapServerList.add("http://app.tkuhn.eculture.labs.vu.nl/nanopub-server-4/");
<del> bootstrapServerList.add("http://rdf.disgenet.org/nanopub-server/");
<ide> }
<ide>
<ide> public static List<String> getBootstrapServerList() { |
|
JavaScript | mit | 6301b3482c433fceeea2ee7d73c0fc4cdd254163 | 0 | ggonzale/jstat,smarden1/jstat,JeebsM/jstat,akrawitz/jstat,JeebsM/jstat,cmpolis/jstat,ggonzale/jstat,jstat/jstat,jamescgibson/jstat,NorthDecoder/jstat,NorthDecoder/jstat,akrawitz/jstat,smarden1/jstat,jstat/jstat | (function( jStat, Math ) {
// generate all distribution instance methods
(function( list ) {
for ( var i = 0; i < list.length; i++ ) (function( func ) {
// distribution instance method
jStat[ func ] = function( a, b, c ) {
if (!( this instanceof arguments.callee )) return new arguments.callee( a, b, c );
this._a = a;
this._b = b;
this._c = c;
for ( var i in jStat[ func ].prototype ) this[ i ] = this[ i ].bind( this );
};
// distribution method to be used on a jStat instance
jStat.fn[ func ] = function( a, b, c ) {
var newthis = jStat[ func ]( a, b, c );
newthis.data = this;
return newthis;
};
// sample instance method
jStat[ func ].prototype.sample = function( arr ) {
var a = this._a,
b = this._b,
c = this._c;
if ( arr )
return jStat.alter( arr, function() {
return jStat[ func ].sample( a, b, c );
});
else
return jStat[ func ].sample( a, b, c );
};
// generate the pdf, cdf and inv instance methods
(function( vals ) {
for ( var i = 0; i < vals.length; i++ ) (function( fnfunc ) {
jStat[ func ].prototype[ fnfunc ] = function( x ) {
var a = this._a,
b = this._b,
c = this._c;
if ( isNaN( x )) {
return jStat.fn.map.call( this.data, function( x ) {
return jStat[ func ][ fnfunc ]( x, a, b, c );
});
}
return jStat[ func ][ fnfunc ]( x, a, b, c );
};
})( vals[ i ]);
})( 'pdf cdf inv'.split( ' ' ));
// generate the mean, median, mode and variance instance methods
(function( vals ) {
for ( var i = 0; i < vals.length; i++ ) (function( fnfunc ) {
jStat[ func ].prototype[ fnfunc ] = function() {
return jStat[ func ][ fnfunc ]( this._a, this._b, this._c );
};
})( vals[ i ]);
})( 'mean median mode variance'.split( ' ' ));
})( list[ i ]);
})((
'beta cauchy chisquare exponential gamma kumaraswamy lognormal normal ' +
'pareto studentt weibull uniform uniformmv binomial negbin hypgeom poisson'
).split( ' ' ));
// extend beta function with static methods
jStat.extend( jStat.beta, {
pdf : function( x, alpha, beta ) {
return ( Math.pow( x, alpha - 1 ) * Math.pow( 1 - x, beta - 1 )) / jStat.betafn( alpha, beta );
},
cdf : function( x, alpha, beta ) {
return jStat.incompleteBeta( x, alpha, beta );
},
inv : function( x, alpha, beta ) {
return jStat.incompleteBetaInv( x, alpha, beta );
},
mean : function( alpha, beta ) {
return alpha / ( alpha + beta );
},
median : function( alpha, beta ) {
// TODO: implement beta median
},
mode : function( alpha, beta ) {
return ( alpha * beta ) / ( Math.pow( alpha + beta, 2 ) * ( alpha + beta + 1 ));
},
// return a random sample
sample : function( alpha, beta ) {
var u = jStat.randg( alpha );
return u / ( u + jStat.randg( beta ));
},
variance : function( alpha, beta ) {
return ( alpha * beta ) / ( Math.pow( alpha + beta, 2 ) * ( alpha + beta + 1 ));
}
});
// extend cauchy function with static methods
jStat.extend( jStat.cauchy, {
pdf : function( x, local, scale ) {
return ( scale / ( Math.pow( x - local, 2 ) + Math.pow( scale, 2 ))) / Math.PI;
},
cdf : function( x, local, scale ) {
return Math.atan(( x - local) / scale ) / Math.PI + 0.5;
},
inv : function( p, local, scale ) {
return local + scale * Math.tan( Math.PI * ( p - 0.5 ));
},
mean : function( local, scale ) {
// TODO: implement this
},
median: function( local, scale ) {
return local;
},
mode : function( local, scale ) {
return local;
},
sample : function( local, scale ) {
return jStat.randn() * Math.sqrt( 1 / ( 2 * jStat.randg( 0.5 ))) * scale + local;
},
variance : function( local, scale ) {
// TODO: implement this
}
});
// extend chisquare function with static methods
jStat.extend( jStat.chisquare, {
pdf : function( x, dof ) {
return ( Math.pow( x, dof / 2 - 1) * Math.exp( -x / 2 )) / ( Math.pow( 2, dof / 2) * jStat.gammafn( dof / 2 ));
},
cdf : function( x, dof ) {
return jStat.gammap( x / 2, dof / 2 );
},
inv : function( p, dof ) {
return 2 * jStat.gammapInv( p, 0.5 * dof );
},
mean : function( dof ) {
return dof;
},
//TODO: this is an approximation (is there a better way?)
median : function( dof ) {
return dof * Math.pow( 1 - ( 2 / ( 9 * dof )), 3 );
},
mode : function( dof ) {
return ( dof - 2 > 0 ) ? dof - 2 : 0;
},
sample : function( dof ) {
return jStat.randg( dof / 2 ) * 2;
},
variance: function( dof ) {
return 2 * dof;
}
});
// extend exponential function with static methods
jStat.extend( jStat.exponential, {
pdf : function( x, rate ) {
return x < 0 ? 0 : rate * Math.exp( -rate * x );
},
cdf : function( x, rate ) {
return x < 0 ? 0 : 1 - Math.exp( -rate * x );
},
inv : function( p, rate ) {
return -Math.log( 1 - p ) / rate;
},
mean : function( rate ) {
return 1 / rate;
},
median : function ( rate ) {
return ( 1 / rate ) * Math.log( 2 );
},
mode : function( rate ) {
return 0;
},
sample : function( rate ) {
return -1 / rate * Math.log( Math.random());
},
variance : function( rate ) {
return Math.pow( rate, -2 );
}
});
// extend gamma function with static methods
jStat.extend( jStat.gamma, {
pdf : function( x, shape, scale ) {
return Math.pow( x, shape - 1 ) * ( Math.exp( -x / scale ) / ( jStat.gammafn( shape ) * Math.pow( scale, shape )));
},
cdf : function( x, shape, scale ) {
return jStat.gammap( x / scale, shape );
},
inv : function( p, shape, scale ) {
return jStat.gammapInv( p, shape ) * scale;
},
mean : function( shape, scale ) {
return shape * scale;
},
mode : function( shape, scale ) {
if( shape > 1 ) return ( shape - 1 ) * scale;
return undefined;
},
sample : function( shape, scale ) {
return jStat.randg( shape ) * scale;
},
variance: function( shape, scale ) {
return shape * scale * scale;
}
});
// extend kumaraswamy function with static methods
jStat.extend( jStat.kumaraswamy, {
pdf : function( x, alpha, beta ) {
return alpha * beta * Math.pow( x, alpha - 1 ) * Math.pow( 1 - Math.pow( x, alpha ), beta - 1 );
},
cdf : function( x, alpha, beta ) {
return ( 1 - Math.pow( 1 - Math.pow( x, alpha ), beta ));
},
mean : function( alpha, beta ) {
return ( beta * jStat.gammafn( 1 + 1 / alpha ) * jStat.gammafn( beta )) / ( jStat.gammafn( 1 + 1 / alpha + beta ));
},
median : function( alpha, beta ) {
return Math.pow( 1 - Math.pow( 2, -1 / beta ), 1 / alpha );
},
mode : function( alpha, beta ) {
return ( alpha >= 1 && beta >= 1 && ( alpha !== 1 && beta !== 1 )) ? Math.pow(( alpha - 1 ) / ( alpha * beta - 1 ), 1 / alpha ) : undefined;
},
variance: function( alpha, beta ) {
}
});
// extend lognormal function with static methods
jStat.extend( jStat.lognormal, {
pdf : function( x, mu, sigma ) {
return ( 1 / ( x * sigma * Math.sqrt( 2 * Math.PI ))) * Math.exp( -Math.pow( Math.log( x ) - mu, 2 ) / ( 2 * sigma * sigma ));
},
cdf : function( x, mu, sigma ) {
return 0.5 + ( 0.5 * jStat.erf(( Math.log( x ) - mu ) / Math.sqrt( 2 * sigma * sigma )));
},
inv : function( p, mu, sigma ) {
return Math.exp( -1.41421356237309505 * sigma * jStat.erfcinv( 2 * p ) + mu);
},
mean : function( mu, sigma ) {
return Math.exp( mu + sigma * sigma / 2);
},
median : function( mu, sigma ) {
return Math.exp( mu );
},
mode : function( mu, sigma ) {
return Math.exp( mu - sigma * sigma );
},
sample : function( mu, sigma ) {
return Math.exp( jStat.randn() * sigma + mu );
},
variance : function( mu, sigma ) {
return ( Math.exp( sigma * sigma ) - 1 ) * Math.exp( 2 * mu + sigma * sigma );
}
});
// extend normal function with static methods
jStat.extend( jStat.normal, {
pdf : function( x, mean, std ) {
return ( 1 / ( Math.sqrt( 2 * Math.PI * std * std))) * Math.exp( -( Math.pow( x - mean, 2 ) / 2 * std * std ));
},
cdf : function( x, mean, std ) {
return 0.5 * ( 1 + jStat.erf(( x - mean ) / Math.sqrt( 2 * std * std )));
},
inv : function( p, mean, std ) {
return -1.41421356237309505 * std * jStat.erfcinv( 2 * p ) + mean;
},
mean : function( mean, std ) {
return mean;
},
median : function( mean, std ) {
return mean;
},
mode : function ( mean, std ) {
return mean;
},
sample : function( mean, std ) {
return jStat.randn() * std + mean;
},
variance : function( mean, std ) {
return std * std;
}
});
// extend pareto function with static methods
jStat.extend( jStat.pareto, {
pdf : function( x, scale, shape ) {
return ( x > scale ) ? ( shape * Math.pow( scale, shape )) / Math.pow( x, shape + 1 ) : undefined;
},
cdf : function( x, scale, shape ) {
return 1 - Math.pow( scale / x, shape );
},
mean : function( scale, shape ) {
return ( shape > 1 ) ? ( shape * Math.pow( scale, shape )) / ( shape - 1 ) : undefined;
},
median : function( scale, shape ) {
return scale * ( shape * Math.SQRT2 );
},
mode : function( scale, shape ) {
return scale;
},
variance : function( scale, shape ) {
return ( shape > 2 ) ? ( scale*scale * shape ) / ( Math.pow( shape - 1, 2 ) * ( shape - 2 )) : undefined;
}
});
// extend studentt function with static methods
jStat.extend( jStat.studentt, {
pdf : function( x, dof ) {
return ( jStat.gammafn(( dof + 1 ) / 2 ) / ( Math.sqrt( dof * Math.PI ) * jStat.gammafn( dof / 2 ))) * Math.pow( 1 + (( x*x ) / dof ), -(( dof + 1 ) / 2 ));
},
cdf : function( x, dof ) {
var dof2 = dof / 2;
return jStat.incompleteBeta(( x + Math.sqrt( x * x + dof )) / ( 2 * Math.sqrt( x * x + dof )), dof2, dof2 );
},
inv : function( p, dof ) {
var x = jStat.incompleteBetaInv( 2 * Math.min( p, 1 - p ), 0.5 * dof, 0.5 );
x = Math.sqrt( dof * ( 1 - x ) / x );
return ( p > 0 ) ? x : -x;
},
mean : function( dof ) {
return ( dof > 1 ) ? 0 : undefined;
},
median : function ( dof ) {
return 0;
},
mode : function( dof ) {
return 0;
},
sample : function( dof ) {
return jStat.randn() * Math.sqrt( dof / ( 2 * jStat.randg( dof / 2)));
},
variance : function( dof ) {
return ( dof > 2 ) ? dof / ( dof - 2 ) : ( dof > 1 ) ? Infinity : undefined;
}
});
// extend weibull function with static methods
jStat.extend( jStat.weibull, {
pdf : function( x, scale, shape ) {
return x < 0 ? 0 : ( shape / scale ) * Math.pow(( x / scale ),( shape - 1 )) * Math.exp(-( Math.pow(( x / scale ), shape )));
},
cdf : function( x, scale, shape ) {
return x < 0 ? 0 : 1 - Math.exp( -Math.pow(( x / scale ), shape ));
},
inv : function( p, scale, shape ) {
return scale * Math.pow( -Math.log( 1 - p ), 1 / shape );
},
mean : function( scale, shape ) {
return scale * jStat.gammafn( 1 + 1 / shape );
},
median : function( scale, shape ) {
return scale * Math.pow( Math.log( 2 ), 1 / shape );
},
mode : function( scale, shape ) {
return ( shape > 1 ) ? scale * Math.pow(( shape - 1 ) / shape, 1 / shape ) : undefined;
},
sample : function( scale, shape ) {
return scale * Math.pow( -Math.log( Math.random()), 1 / shape );
},
variance : function( scale, shape ) {
return scale * scale * jStat.gammafn( 1 + 2 / shape ) - Math.pow( this.mean( scale, shape ), 2 );
}
});
// extend uniform function with static methods
jStat.extend( jStat.uniform, {
pdf : function( x, a, b ) {
return ( x < a || x > b ) ? 0 : 1 / ( b - a );
},
cdf : function( x, a, b ) {
if ( x < a ) {
return 0;
} else if ( x < b ) {
return ( x - a ) / ( b - a );
}
return 1;
},
mean : function( a, b ) {
return 0.5 * ( a + b );
},
median : function( a, b ) {
return jStat.mean( a, b );
},
mode : function( a, b ) {
},
sample : function( a, b ) {
return ( a / 2 + b / 2 ) + ( b / 2 - a / 2) * ( 2 * Math.random() - 1);
},
variance : function( a, b ) {
return 0.08333333333333333 * Math.pow( b - a, 2 );
}
});
// extend uniform function with static methods
jStat.extend( jStat.uniformmv, {
pdf : function( x, m, s ) {
var sqrtt = Math.sqrt( -3 );
return ( -s * sqrtt <= x - m || x - m <= s * sqrtt ) ? 1 / ( 2 * s * sqrtt ) : 0;
},
cdf : function( x, m, s ) {
var sqrtt = Math.sqrt( -3 );
return ( x - m < -s * sqrtt ) ? 0 : ( x - m >= s * sqrtt ) ? 1 : 0.5 * (( x - m ) / ( s * sqrtt ) + 1 );
}
});
// extend uniform function with static methods
jStat.extend( jStat.binomial, {
pdf : function( k, n, p ) {
return jStat.combination( n, k ) * Math.pow( p, k ) * Math.pow( 1 - p, n - k );
},
cdf : function( x, n, p ) {
var binomarr = [],
k = 0,
i = 0,
sum = 0;
if ( x < 0 ) {
return 0;
}
if ( x < n ) {
for ( ; k < n; k++ ) {
binomarr[ k ] = jStat.binomial( k, n, p );
}
for ( ; i <= x; i++ ) {
sum += binomarr[ i ];
}
return sum;
}
return 1;
}
});
// extend uniform function with static methods
jStat.extend( jStat.negbin, {
pdf : function( k, r, p ) {
return k !== k | 0 ? false
: k < 0 ? 0
: jStat.combination( k + r - 1, k ) * Math.pow( 1 - p, r ) * Math.pow( p, k );
},
cdf : function( x, r, p ) {
var sum = 0,
k = 0;
if ( x < 0 ) return 0;
for ( ; k <= x; k++ ) {
sum += jStat.negbin( k, r, p );
}
return sum;
}
});
// extend uniform function with static methods
jStat.extend( jStat.hypgeom, {
pdf : function( k, N, m, n ) {
return k !== k | 0 ? false
: ( k < 0) ? 0
: jStat.combination( m, k ) * jStat.combination( N - m , n - k ) / jStat.combination( N, n );
},
cdf : function( x, N, m, n ) {
var sum = 0,
k = 0;
if ( x < 0 ) return 0;
for ( ; k <= x; k++ ) {
sum += jStat.hypgeom( k, N, m, n );
}
return sum;
}
});
// extend uniform function with static methods
jStat.extend( jStat.poisson, {
pdf : function( k, l ) {
return Math.pow( l, k ) * Math.exp( -l ) / jStat.factorial( k );
},
cdf : function( x, l ) {
var sum = 0,
k = 0;
if ( x < 0 ) return 0;
for ( ; k <= x; k++ ) {
sum += jStat.poisson( k, l );
}
return sum;
}
});
})( this.jStat, Math );
| src/distribution.js | (function( jStat, Math ) {
// generate all distribution instance methods
(function( list ) {
for ( var i = 0; i < list.length; i++ ) (function( func ) {
// distribution instance method
jStat[ func ] = function( a, b, c ) {
if (!( this instanceof arguments.callee )) return new arguments.callee( a, b, c );
this._a = a;
this._b = b;
this._c = c;
for ( var i in jStat[ func ].prototype ) this[ i ] = this[ i ].bind( this );
};
// distribution method to be used on a jStat instance
jStat.fn[ func ] = function( a, b, c ) {
var newthis = jStat[ func ]( a, b, c );
newthis.data = this;
return newthis;
};
// sample instance method
jStat[ func ].prototype.sample = function( arr ) {
var a = this._a,
b = this._b,
c = this._c;
if ( arr )
return jStat.alter( arr, function() {
return jStat[ func ].sample( a, b, c );
});
else
return jStat[ func ].sample( a, b, c );
};
// generate the pdf, cdf and inv instance methods
(function( vals ) {
for ( var i = 0; i < vals.length; i++ ) (function( fnfunc ) {
jStat[ func ].prototype[ fnfunc ] = function( x ) {
var a = this._a,
b = this._b,
c = this._c;
if ( isNaN( x )) {
return jStat.fn.map.call( this.data, function( x ) {
return jStat[ func ][ fnfunc ]( x, a, b, c );
});
}
return jStat[ func ][ fnfunc ]( x, a, b, c );
};
})( vals[ i ]);
})( 'pdf cdf inv'.split( ' ' ));
// generate the mean, median, mode and variance instance methods
(function( vals ) {
for ( var i = 0; i < vals.length; i++ ) (function( fnfunc ) {
jStat[ func ].prototype[ fnfunc ] = function() {
return jStat[ func ][ fnfunc ]( this._a, this._b, this._c );
};
})( vals[ i ]);
})( 'mean median mode variance'.split( ' ' ));
})( list[ i ]);
})((
'beta cauchy chisquare exponential gamma kumaraswamy lognormal normal ' +
'pareto studentt weibull uniform uniformmv binomial negbin hypgeom poisson'
).split( ' ' ));
// extend beta function with static methods
jStat.extend( jStat.beta, {
pdf : function( x, alpha, beta ) {
return ( Math.pow( x, alpha - 1 ) * Math.pow( 1 - x, beta - 1 )) / jStat.betafn( alpha, beta );
},
cdf : function( x, alpha, beta ) {
return jStat.incompleteBeta( x, alpha, beta );
},
inv : function( x, alpha, beta ) {
return jStat.incompleteBetaInv( x, alpha, beta );
},
mean : function( alpha, beta ) {
return alpha / ( alpha + beta );
},
median : function( alpha, beta ) {
// TODO: implement beta median
},
mode : function( alpha, beta ) {
return ( alpha * beta ) / ( Math.pow( alpha + beta, 2 ) * ( alpha + beta + 1 ));
},
// return a random sample
sample : function( alpha, beta ) {
var u = jStat.randg( alpha );
return u / ( u + jStat.randg( beta ));
},
variance : function( alpha, beta ) {
return ( alpha * beta ) / ( Math.pow( alpha + beta, 2 ) * ( alpha + beta + 1 ) );
}
});
// extend cauchy function with static methods
jStat.extend( jStat.cauchy, {
pdf : function( x, local, scale ) {
return ( scale / ( Math.pow( x - local, 2 ) + Math.pow( scale, 2 ))) / Math.PI;
},
cdf : function( x, local, scale ) {
return Math.atan(( x - local) / scale ) / Math.PI + 0.5;
},
inv : function( p, local, scale ) {
return local + scale * Math.tan( Math.PI * ( p - 0.5 ) );
},
mean : function( local, scale ) {
// TODO: implement this
},
median: function( local, scale ) {
return local;
},
mode : function( local, scale ) {
return local;
},
sample : function( local, scale ) {
return jStat.randn() * Math.sqrt( 1 / ( 2 * jStat.randg( 0.5 ) ) ) * scale + local;
},
variance : function( local, scale ) {
// TODO: implement this
}
});
// extend chisquare function with static methods
jStat.extend( jStat.chisquare, {
pdf : function( x, dof ) {
return (Math.pow( x, dof / 2 - 1) * Math.exp( -x / 2 )) / ( Math.pow( 2, dof / 2) * jStat.gammafn( dof / 2 ));
},
cdf : function( x, dof ) {
return jStat.gammap( x / 2, dof / 2 );
},
inv : function( p, dof ) {
return 2 * jStat.gammapInv( p, 0.5 * dof );
},
mean : function( dof ) {
return dof;
},
//TODO: this is an approximation (is there a better way?)
median : function( dof ) {
return dof * Math.pow( 1 - ( 2 / ( 9 * dof )), 3 );
},
mode : function( dof ) {
return ( dof - 2 > 0 ) ? dof - 2 : 0;
},
sample : function( dof ) {
return jStat.randg( dof/2 ) * 2;
},
variance: function( dof ) {
return 2 * dof;
}
});
// extend exponential function with static methods
jStat.extend( jStat.exponential, {
pdf : function( x, rate ) {
return x < 0 ? 0 : rate * Math.exp( -rate * x );
},
cdf : function( x, rate ) {
return x < 0 ? 0 : 1 - Math.exp( -rate * x );
},
inv : function( p, rate ) {
return -Math.log( 1 - p ) / rate;
},
mean : function( rate ) {
return 1 / rate;
},
median : function ( rate ) {
return ( 1 / rate ) * Math.log(2);
},
mode : function( rate ) {
return 0;
},
sample : function( rate ) {
return -1 / rate * Math.log( Math.random() );
},
variance : function( rate ) {
return Math.pow( rate, -2 );
}
});
// extend gamma function with static methods
jStat.extend( jStat.gamma, {
pdf : function( x, shape, scale ) {
return Math.pow( x, shape - 1 ) * ( Math.exp( -x / scale ) / ( jStat.gammafn( shape ) * Math.pow( scale, shape ) ) );
},
cdf : function( x, shape, scale ) {
return jStat.gammap( x / scale, shape );
},
inv : function( p, shape, scale ) {
return jStat.gammapInv( p, shape ) * scale;
},
mean : function( shape, scale ) {
return shape * scale;
},
mode : function( shape, scale ) {
if( shape > 1 ) return ( shape - 1 ) * scale;
return undefined;
},
sample : function( shape, scale ) {
return jStat.randg( shape ) * scale;
},
variance: function( shape, scale ) {
return shape * scale * scale;
}
});
// extend kumaraswamy function with static methods
jStat.extend( jStat.kumaraswamy, {
pdf : function( x, alpha, beta ) {
return alpha * beta * Math.pow( x, alpha - 1 ) * Math.pow( 1 - Math.pow( x, alpha ), beta - 1 );
},
cdf : function( x, alpha, beta ) {
return ( 1 - Math.pow( 1 - Math.pow( x, alpha ), beta ) );
},
mean : function( alpha, beta ) {
return ( beta * jStat.gammafn( 1 + 1 / alpha ) * jStat.gammafn( beta ) ) / ( jStat.gammafn( 1 + 1 / alpha + beta ) );
},
median : function( alpha, beta ) {
return Math.pow( 1 - Math.pow( 2, -1 / beta ), 1 / alpha );
},
mode : function( alpha, beta ) {
return ( alpha >= 1 && beta >= 1 && ( alpha !== 1 && beta !== 1 ) ) ? Math.pow( ( alpha - 1 ) / ( alpha * beta - 1 ), 1 / alpha ) : undefined;
},
variance: function( alpha, beta ) {
}
});
// extend lognormal function with static methods
jStat.extend( jStat.lognormal, {
pdf : function( x, mu, sigma ) {
return ( 1 / ( x * sigma * Math.sqrt( 2 * Math.PI ) ) ) * Math.exp( -Math.pow( Math.log( x ) - mu, 2) / ( 2 * sigma*sigma ) );
},
cdf : function( x, mu, sigma ) {
return 0.5 + ( 0.5 * jStat.erf( ( Math.log( x ) - mu ) / Math.sqrt( 2 * sigma*sigma ) ) );
},
inv : function( p, mu, sigma ) {
return Math.exp( -1.41421356237309505 * sigma * jStat.erfcinv( 2 * p ) + mu);
},
mean : function( mu, sigma ) {
return Math.exp( mu + sigma*sigma / 2);
},
median : function( mu, sigma ) {
return Math.exp(mu);
},
mode : function( mu, sigma ) {
return Math.exp( mu - sigma*sigma );
},
sample : function( mu, sigma ) {
return Math.exp( jStat.randn() * sigma + mu );
},
variance : function( mu, sigma ) {
return ( Math.exp( sigma*sigma ) - 1 ) * Math.exp( 2 * mu + sigma*sigma );
}
});
// extend normal function with static methods
jStat.extend( jStat.normal, {
pdf : function( x, mean, std ) {
return ( 1 / ( Math.sqrt( 2 * Math.PI * std * std))) * Math.exp( -( Math.pow( x - mean, 2 ) / 2 * std * std ) );
},
cdf : function( x, mean, std ) {
return 0.5 * ( 1 + jStat.erf( ( x - mean ) / Math.sqrt( 2 * std * std ) ) );
},
inv : function( p, mean, std ) {
return -1.41421356237309505 * std * jStat.erfcinv( 2 * p ) + mean;
},
mean : function( mean, std ) {
return mean;
},
median : function( mean, std ) {
return mean;
},
mode : function ( mean, std ) {
return mean;
},
sample : function( mean, std ) {
return jStat.randn() * std + mean;
},
variance : function( mean, std ) {
return std * std;
}
});
// extend pareto function with static methods
jStat.extend( jStat.pareto, {
pdf : function( x, scale, shape ) {
return ( x > scale ) ? ( shape * Math.pow( scale, shape ) ) / Math.pow( x, shape + 1 ) : undefined;
},
cdf : function( x, scale, shape ) {
return 1 - Math.pow( scale / x, shape );
},
mean : function( scale, shape ) {
return ( shape > 1 ) ? ( shape * Math.pow( scale, shape ) ) / ( shape - 1 ) : undefined;
},
median : function( scale, shape ) {
return scale * ( shape * Math.SQRT2 );
},
mode : function( scale, shape ) {
return scale;
},
variance : function( scale, shape ) {
return ( shape > 2 ) ? ( scale*scale * shape ) / ( Math.pow( shape - 1, 2 ) * ( shape - 2 ) ) : undefined;
}
});
// extend studentt function with static methods
jStat.extend( jStat.studentt, {
pdf : function( x, dof ) {
return ( jStat.gammafn( ( dof + 1 ) / 2 ) / ( Math.sqrt( dof * Math.PI ) * jStat.gammafn( dof / 2 ) ) ) * Math.pow( 1 + ( ( x*x ) / dof ), -( ( dof + 1 ) / 2 ) );
},
cdf : function( x, dof ) {
var dof2 = dof / 2;
return jStat.incompleteBeta( ( x + Math.sqrt( x*x + dof ) ) / ( 2 * Math.sqrt( x*x + dof ) ), dof2, dof2 );
},
inv : function( p, dof ) {
var x = jStat.incompleteBetaInv( 2 * Math.min( p, 1 - p ), 0.5 * dof, 0.5 );
x = Math.sqrt( dof * ( 1 - x ) / x );
return ( p > 0 ) ? x : -x;
},
mean : function( dof ) {
return ( dof > 1 ) ? 0 : undefined;
},
median : function ( dof ) {
return 0;
},
mode : function( dof ) {
return 0;
},
sample : function( dof ) {
return jStat.randn() * Math.sqrt( dof / ( 2 * jStat.randg( dof / 2) ) );
},
variance : function( dof ) {
return ( dof > 2 ) ? dof / ( dof - 2 ) : ( dof > 1 ) ? Infinity : undefined;
}
});
// extend weibull function with static methods
jStat.extend( jStat.weibull, {
pdf : function( x, scale, shape ) {
return x < 0 ? 0 : ( shape / scale ) * Math.pow(( x / scale ),( shape - 1 )) * Math.exp(-( Math.pow(( x / scale ), shape )));
},
cdf : function( x, scale, shape ) {
return x < 0 ? 0 : 1 - Math.exp( -Math.pow(( x / scale ), shape ));
},
inv : function( p, scale, shape ) {
return scale * Math.pow( -Math.log( 1 - p ), 1 / shape );
},
mean : function( scale, shape ) {
return scale * jStat.gammafn( 1 + 1 / shape );
},
median : function( scale, shape ) {
return scale * Math.pow( Math.log( 2 ), 1 / shape );
},
mode : function( scale, shape ) {
return ( shape > 1 ) ? scale * Math.pow(( shape - 1 ) / shape, 1 / shape ) : undefined;
},
sample : function( scale, shape ) {
return scale * Math.pow( -Math.log( Math.random() ), 1 / shape );
},
variance : function( scale, shape ) {
return scale * scale * jStat.gammafn( 1 + 2 / shape ) - Math.pow( this.mean( scale, shape ), 2 );
}
});
// extend uniform function with static methods
jStat.extend( jStat.uniform, {
pdf : function( x, a, b ) {
return ( x < a || x > b ) ? 0 : 1 / ( b - a );
},
cdf : function( x, a, b ) {
if ( x < a ) {
return 0;
} else if ( x < b ) {
return ( x - a ) / ( b - a );
}
return 1;
},
mean : function( a, b ) {
return 0.5 * ( a + b );
},
median : function( a, b ) {
return jStat.mean( a, b );
},
mode : function( a, b ) {
},
sample : function( a, b ) {
return ( a / 2 + b / 2 ) + ( b / 2 - a / 2) * ( 2 * Math.random() - 1);
},
variance : function( a, b ) {
return 0.08333333333333333 * Math.pow( b - a, 2 );
}
});
// extend uniform function with static methods
jStat.extend( jStat.uniformmv, {
pdf : function( x, m, s ) {
var sqrtt = Math.sqrt( -3 );
return ( -s * sqrtt <= x - m || x - m <= s * sqrtt )
? 1 / ( 2 * s * sqrtt )
: 0;
},
cdf : function( x, m, s ) {
var sqrtt = Math.sqrt( -3 );
return ( x - m < -s * sqrtt )
? 0
: ( x - m >= s * sqrtt )
? 1
: 0.5 * (( x - m ) / ( s * sqrtt ) + 1 );
}
});
// extend uniform function with static methods
jStat.extend( jStat.binomial, {
pdf : function( k, n, p ) {
return jStat.combination( n, k ) * Math.pow( p, k ) * Math.pow( 1 - p, n - k );
},
cdf : function( x, n, p ) {
var binomarr = [],
k = 0,
i = 0,
sum = 0;
if ( x < 0 ) {
return 0;
}
if ( x < n ) {
for ( ; k < n; k++ ) {
binomarr[ k ] = jStat.binomial( k, n, p );
}
for ( ; i <= x; i++ ) {
sum += binomarr[ i ];
}
return sum;
}
return 1;
}
});
// extend uniform function with static methods
jStat.extend( jStat.negbin, {
pdf : function( k, r, p ) {
return k !== k | 0
? false
: k < 0
? 0
: jStat.combination( k + r - 1, k ) * Math.pow( 1 - p, r ) * Math.pow( p, k );
},
cdf : function( x, r, p ) {
var sum = 0,
k = 0;
if ( x < 0 ) return 0;
for ( ; k <= x; k++ ) {
sum += jStat.negbin( k, r, p );
}
return sum;
}
});
// extend uniform function with static methods
jStat.extend( jStat.hypgeom, {
pdf : function( k, N, m, n ) {
return k !== k | 0
? false
: ( k < 0)
? 0
: jStat.combination( m, k ) * jStat.combination( N - m , n - k ) / jStat.combination( N, n );
},
cdf : function( x, N, m, n ) {
var sum = 0,
k = 0;
if ( x < 0 ) return 0;
for ( ; k <= x; k++ ) {
sum += jStat.hypgeom( k, N, m, n );
}
return sum;
}
});
// extend uniform function with static methods
jStat.extend( jStat.poisson, {
pdf : function( k, l ) {
return Math.pow( l, k ) * Math.exp( -l ) / jStat.factorial( k );
},
cdf : function( x, l ) {
var sum = 0,
k = 0;
if ( x < 0 ) return 0;
for ( ; k <= x; k++ ) {
sum += jStat.poisson( k, l );
}
return sum;
}
});
})( this.jStat, Math );
| small spacing fixes
| src/distribution.js | small spacing fixes | <ide><path>rc/distribution.js
<ide> },
<ide>
<ide> variance : function( alpha, beta ) {
<del> return ( alpha * beta ) / ( Math.pow( alpha + beta, 2 ) * ( alpha + beta + 1 ) );
<add> return ( alpha * beta ) / ( Math.pow( alpha + beta, 2 ) * ( alpha + beta + 1 ));
<ide> }
<ide> });
<ide>
<ide> },
<ide>
<ide> inv : function( p, local, scale ) {
<del> return local + scale * Math.tan( Math.PI * ( p - 0.5 ) );
<add> return local + scale * Math.tan( Math.PI * ( p - 0.5 ));
<ide> },
<ide>
<ide> mean : function( local, scale ) {
<ide> },
<ide>
<ide> sample : function( local, scale ) {
<del> return jStat.randn() * Math.sqrt( 1 / ( 2 * jStat.randg( 0.5 ) ) ) * scale + local;
<add> return jStat.randn() * Math.sqrt( 1 / ( 2 * jStat.randg( 0.5 ))) * scale + local;
<ide> },
<ide>
<ide> variance : function( local, scale ) {
<ide> // extend chisquare function with static methods
<ide> jStat.extend( jStat.chisquare, {
<ide> pdf : function( x, dof ) {
<del> return (Math.pow( x, dof / 2 - 1) * Math.exp( -x / 2 )) / ( Math.pow( 2, dof / 2) * jStat.gammafn( dof / 2 ));
<add> return ( Math.pow( x, dof / 2 - 1) * Math.exp( -x / 2 )) / ( Math.pow( 2, dof / 2) * jStat.gammafn( dof / 2 ));
<ide> },
<ide>
<ide> cdf : function( x, dof ) {
<ide> },
<ide>
<ide> sample : function( dof ) {
<del> return jStat.randg( dof/2 ) * 2;
<add> return jStat.randg( dof / 2 ) * 2;
<ide> },
<ide>
<ide> variance: function( dof ) {
<ide> },
<ide>
<ide> median : function ( rate ) {
<del> return ( 1 / rate ) * Math.log(2);
<add> return ( 1 / rate ) * Math.log( 2 );
<ide> },
<ide>
<ide> mode : function( rate ) {
<ide> },
<ide>
<ide> sample : function( rate ) {
<del> return -1 / rate * Math.log( Math.random() );
<add> return -1 / rate * Math.log( Math.random());
<ide> },
<ide>
<ide> variance : function( rate ) {
<ide> // extend gamma function with static methods
<ide> jStat.extend( jStat.gamma, {
<ide> pdf : function( x, shape, scale ) {
<del> return Math.pow( x, shape - 1 ) * ( Math.exp( -x / scale ) / ( jStat.gammafn( shape ) * Math.pow( scale, shape ) ) );
<add> return Math.pow( x, shape - 1 ) * ( Math.exp( -x / scale ) / ( jStat.gammafn( shape ) * Math.pow( scale, shape )));
<ide> },
<ide>
<ide> cdf : function( x, shape, scale ) {
<ide> },
<ide>
<ide> cdf : function( x, alpha, beta ) {
<del> return ( 1 - Math.pow( 1 - Math.pow( x, alpha ), beta ) );
<add> return ( 1 - Math.pow( 1 - Math.pow( x, alpha ), beta ));
<ide> },
<ide>
<ide> mean : function( alpha, beta ) {
<del> return ( beta * jStat.gammafn( 1 + 1 / alpha ) * jStat.gammafn( beta ) ) / ( jStat.gammafn( 1 + 1 / alpha + beta ) );
<add> return ( beta * jStat.gammafn( 1 + 1 / alpha ) * jStat.gammafn( beta )) / ( jStat.gammafn( 1 + 1 / alpha + beta ));
<ide> },
<ide>
<ide> median : function( alpha, beta ) {
<ide> },
<ide>
<ide> mode : function( alpha, beta ) {
<del> return ( alpha >= 1 && beta >= 1 && ( alpha !== 1 && beta !== 1 ) ) ? Math.pow( ( alpha - 1 ) / ( alpha * beta - 1 ), 1 / alpha ) : undefined;
<add> return ( alpha >= 1 && beta >= 1 && ( alpha !== 1 && beta !== 1 )) ? Math.pow(( alpha - 1 ) / ( alpha * beta - 1 ), 1 / alpha ) : undefined;
<ide> },
<ide>
<ide> variance: function( alpha, beta ) {
<ide> // extend lognormal function with static methods
<ide> jStat.extend( jStat.lognormal, {
<ide> pdf : function( x, mu, sigma ) {
<del> return ( 1 / ( x * sigma * Math.sqrt( 2 * Math.PI ) ) ) * Math.exp( -Math.pow( Math.log( x ) - mu, 2) / ( 2 * sigma*sigma ) );
<add> return ( 1 / ( x * sigma * Math.sqrt( 2 * Math.PI ))) * Math.exp( -Math.pow( Math.log( x ) - mu, 2 ) / ( 2 * sigma * sigma ));
<ide> },
<ide>
<ide> cdf : function( x, mu, sigma ) {
<del> return 0.5 + ( 0.5 * jStat.erf( ( Math.log( x ) - mu ) / Math.sqrt( 2 * sigma*sigma ) ) );
<add> return 0.5 + ( 0.5 * jStat.erf(( Math.log( x ) - mu ) / Math.sqrt( 2 * sigma * sigma )));
<ide> },
<ide>
<ide> inv : function( p, mu, sigma ) {
<ide> },
<ide>
<ide> mean : function( mu, sigma ) {
<del> return Math.exp( mu + sigma*sigma / 2);
<add> return Math.exp( mu + sigma * sigma / 2);
<ide> },
<ide>
<ide> median : function( mu, sigma ) {
<del> return Math.exp(mu);
<add> return Math.exp( mu );
<ide> },
<ide>
<ide> mode : function( mu, sigma ) {
<del> return Math.exp( mu - sigma*sigma );
<add> return Math.exp( mu - sigma * sigma );
<ide> },
<ide>
<ide> sample : function( mu, sigma ) {
<ide> },
<ide>
<ide> variance : function( mu, sigma ) {
<del> return ( Math.exp( sigma*sigma ) - 1 ) * Math.exp( 2 * mu + sigma*sigma );
<add> return ( Math.exp( sigma * sigma ) - 1 ) * Math.exp( 2 * mu + sigma * sigma );
<ide> }
<ide> });
<ide>
<ide> // extend normal function with static methods
<ide> jStat.extend( jStat.normal, {
<ide> pdf : function( x, mean, std ) {
<del> return ( 1 / ( Math.sqrt( 2 * Math.PI * std * std))) * Math.exp( -( Math.pow( x - mean, 2 ) / 2 * std * std ) );
<add> return ( 1 / ( Math.sqrt( 2 * Math.PI * std * std))) * Math.exp( -( Math.pow( x - mean, 2 ) / 2 * std * std ));
<ide> },
<ide>
<ide> cdf : function( x, mean, std ) {
<del> return 0.5 * ( 1 + jStat.erf( ( x - mean ) / Math.sqrt( 2 * std * std ) ) );
<add> return 0.5 * ( 1 + jStat.erf(( x - mean ) / Math.sqrt( 2 * std * std )));
<ide> },
<ide>
<ide> inv : function( p, mean, std ) {
<ide> // extend pareto function with static methods
<ide> jStat.extend( jStat.pareto, {
<ide> pdf : function( x, scale, shape ) {
<del> return ( x > scale ) ? ( shape * Math.pow( scale, shape ) ) / Math.pow( x, shape + 1 ) : undefined;
<add> return ( x > scale ) ? ( shape * Math.pow( scale, shape )) / Math.pow( x, shape + 1 ) : undefined;
<ide> },
<ide>
<ide> cdf : function( x, scale, shape ) {
<ide> },
<ide>
<ide> mean : function( scale, shape ) {
<del> return ( shape > 1 ) ? ( shape * Math.pow( scale, shape ) ) / ( shape - 1 ) : undefined;
<add> return ( shape > 1 ) ? ( shape * Math.pow( scale, shape )) / ( shape - 1 ) : undefined;
<ide> },
<ide>
<ide> median : function( scale, shape ) {
<ide> },
<ide>
<ide> variance : function( scale, shape ) {
<del> return ( shape > 2 ) ? ( scale*scale * shape ) / ( Math.pow( shape - 1, 2 ) * ( shape - 2 ) ) : undefined;
<add> return ( shape > 2 ) ? ( scale*scale * shape ) / ( Math.pow( shape - 1, 2 ) * ( shape - 2 )) : undefined;
<ide> }
<ide> });
<ide>
<ide> // extend studentt function with static methods
<ide> jStat.extend( jStat.studentt, {
<ide> pdf : function( x, dof ) {
<del> return ( jStat.gammafn( ( dof + 1 ) / 2 ) / ( Math.sqrt( dof * Math.PI ) * jStat.gammafn( dof / 2 ) ) ) * Math.pow( 1 + ( ( x*x ) / dof ), -( ( dof + 1 ) / 2 ) );
<add> return ( jStat.gammafn(( dof + 1 ) / 2 ) / ( Math.sqrt( dof * Math.PI ) * jStat.gammafn( dof / 2 ))) * Math.pow( 1 + (( x*x ) / dof ), -(( dof + 1 ) / 2 ));
<ide> },
<ide>
<ide> cdf : function( x, dof ) {
<ide> var dof2 = dof / 2;
<del> return jStat.incompleteBeta( ( x + Math.sqrt( x*x + dof ) ) / ( 2 * Math.sqrt( x*x + dof ) ), dof2, dof2 );
<add> return jStat.incompleteBeta(( x + Math.sqrt( x * x + dof )) / ( 2 * Math.sqrt( x * x + dof )), dof2, dof2 );
<ide> },
<ide>
<ide> inv : function( p, dof ) {
<ide> },
<ide>
<ide> sample : function( dof ) {
<del> return jStat.randn() * Math.sqrt( dof / ( 2 * jStat.randg( dof / 2) ) );
<add> return jStat.randn() * Math.sqrt( dof / ( 2 * jStat.randg( dof / 2)));
<ide> },
<ide>
<ide> variance : function( dof ) {
<ide> },
<ide>
<ide> sample : function( scale, shape ) {
<del> return scale * Math.pow( -Math.log( Math.random() ), 1 / shape );
<add> return scale * Math.pow( -Math.log( Math.random()), 1 / shape );
<ide> },
<ide>
<ide> variance : function( scale, shape ) {
<ide> jStat.extend( jStat.uniformmv, {
<ide> pdf : function( x, m, s ) {
<ide> var sqrtt = Math.sqrt( -3 );
<del> return ( -s * sqrtt <= x - m || x - m <= s * sqrtt )
<del> ? 1 / ( 2 * s * sqrtt )
<del> : 0;
<add> return ( -s * sqrtt <= x - m || x - m <= s * sqrtt ) ? 1 / ( 2 * s * sqrtt ) : 0;
<ide> },
<ide>
<ide> cdf : function( x, m, s ) {
<ide> var sqrtt = Math.sqrt( -3 );
<del> return ( x - m < -s * sqrtt )
<del> ? 0
<del> : ( x - m >= s * sqrtt )
<del> ? 1
<del> : 0.5 * (( x - m ) / ( s * sqrtt ) + 1 );
<add> return ( x - m < -s * sqrtt ) ? 0 : ( x - m >= s * sqrtt ) ? 1 : 0.5 * (( x - m ) / ( s * sqrtt ) + 1 );
<ide> }
<ide> });
<ide>
<ide> // extend uniform function with static methods
<ide> jStat.extend( jStat.negbin, {
<ide> pdf : function( k, r, p ) {
<del> return k !== k | 0
<del> ? false
<del> : k < 0
<del> ? 0
<add> return k !== k | 0 ? false
<add> : k < 0 ? 0
<ide> : jStat.combination( k + r - 1, k ) * Math.pow( 1 - p, r ) * Math.pow( p, k );
<ide> },
<ide>
<ide> // extend uniform function with static methods
<ide> jStat.extend( jStat.hypgeom, {
<ide> pdf : function( k, N, m, n ) {
<del> return k !== k | 0
<del> ? false
<del> : ( k < 0)
<del> ? 0
<add> return k !== k | 0 ? false
<add> : ( k < 0) ? 0
<ide> : jStat.combination( m, k ) * jStat.combination( N - m , n - k ) / jStat.combination( N, n );
<ide> },
<ide> |
|
JavaScript | mit | f2be8703fa2e4157c2995aaf81681637ed37d8c3 | 0 | brendannee/gtfs-to-html,BlinkTagInc/gtfs-to-html,BlinkTagInc/gtfs-to-html,brendannee/gtfs-to-html | const _ = require('lodash');
const gtfs = require('gtfs');
const moment = require('moment');
const fileUtils = require('./file-utils');
const formatters = require('./formatters');
const geoJSONUtils = require('./geojson-utils');
const timeUtils = require('./time-utils');
const { version } = require('../package.json');
/*
* Get all of the route colors for a timetable page.
*/
const getTimetablePageColors = async timetablePage => {
const routes = await gtfs.getRoutes({
agency_key: timetablePage.agency_key,
route_id: {$in: timetablePage.routeIds}
});
return _.compact(_.uniq(_.map(routes, 'route_color')));
};
/*
* Determine if a stoptime is a timepoint.
*/
const isTimepoint = stoptime => {
if (stoptime.timepoint === undefined) {
return stoptime.arrival_time !== '' && stoptime.departure_time !== '';
}
return stoptime.timepoint === 1;
};
/*
* Find the longest trip (most stops) in a group of trips and return stoptimes.
*/
const getLongestTripStoptimes = (trips, config) => {
let filteredTripStoptimes;
// If `showOnlyTimepoint` is true, then filter out all non-timepoints
if (config.showOnlyTimepoint === true) {
filteredTripStoptimes = trips.map(trip => _.filter(trip.stoptimes, isTimepoint));
} else {
filteredTripStoptimes = trips.map(trip => trip.stoptimes);
}
return _.maxBy(filteredTripStoptimes, stoptimes => _.size(stoptimes));
};
/*
* Find the first stop_id that all trips have in common, otherwise use the first
* stoptime.
*/
const findCommonStopId = (trips, config) => {
const longestTripStoptimes = getLongestTripStoptimes(trips, config);
if (!longestTripStoptimes) {
return null;
}
const commonStoptime = _.find(longestTripStoptimes, (stoptime, idx) => {
// If longest trip is a loop (first and last stops the same), then skip first stoptime
if (idx === 0 && stoptime.stop_id === _.last(longestTripStoptimes).stop_id) {
return false;
}
// If stoptime isn't a timepoint, skip it
if (stoptime.arrival_time === '') {
return false;
}
return _.every(trips, trip => {
return _.find(trip.stoptimes, {stop_id: stoptime.stop_id});
});
});
return commonStoptime ? commonStoptime.stop_id : null;
};
/*
* Return a set of unique trips (with at least one unique stop time) from an
* array of trips.
*/
const deduplicateTrips = (trips, commonStopId) => {
// Remove duplicate trips (from overlapping service_ids)
const deduplicatedTrips = trips.reduce((memo, trip) => {
if (memo.length === 0 || trip.stoptimes.length === 0) {
memo.push(trip);
} else {
const stoptimes = _.map(trip.stoptimes, 'departure_time');
let selectedStoptime;
if (commonStopId) {
selectedStoptime = _.find(trip.stoptimes, {stop_id: commonStopId});
} else {
selectedStoptime = trip.stoptimes[0];
}
// Find all other trips where the common stop has the same departure time
const similarTrips = _.filter(memo, trip => {
const stoptime = _.find(trip.stoptimes, {stop_id: selectedStoptime.stop_id});
if (!stoptime) {
return false;
}
return stoptime.departure_time === selectedStoptime.departure_time;
});
// Only add trip if no existing trip with the same set of timepoints has already been added
const tripIsUnique = _.every(similarTrips, similarTrip => {
const similarTripStoptimes = _.map(similarTrip.stoptimes, 'departure_time');
return !_.isEqual(stoptimes, similarTripStoptimes);
});
if (tripIsUnique) {
memo.push(trip);
}
}
return memo;
}, []);
return deduplicatedTrips;
};
/*
* Sort trips chronologically, using a common stop id if available, otherwise
* use the first stoptime.
* Edited by Pawajoro - more sorting options
*/
const sortTrips = (trips, config) => {
let sortedTrips = trips;
let commonStopId;
if (_.includes(['beginning', 'end'], config.sortingAlgorithm)) {
let referenceStoptimes;
let sortingDirection;
let sortingOrder;
if (config.sortingAlgorithm === 'end') {
referenceStoptimes = _.orderBy(getLongestTripStoptimes(trips, config), ['stop_sequence'], 'desc');
sortingDirection = -1;
sortingOrder = 'desc';
} else {
referenceStoptimes = _.sortBy(getLongestTripStoptimes(trips, config), ['stop_sequence']);
sortingDirection = 1;
sortingOrder = 'asc';
}
for (const stop of referenceStoptimes) {
let previousSortingStoptime;
for (const trip of sortedTrips) {
if (trip.stoptimes.length === 0) {
trip.sortingStoptime = undefined;
}
const selectedStoptime = _.find(trip.stoptimes, {stop_id: stop.stop_id});
if (!selectedStoptime) {
if (!trip.sortingStoptime || trip.sortingStoptime * sortingDirection < previousSortingStoptime * sortingDirection) {
trip.sortingStoptime = previousSortingStoptime;
}
} else if (isTimepoint(selectedStoptime)) {
trip.sortingStoptime = formatters.timeToSeconds(selectedStoptime.departure_time);
} else if (!trip.sortingStoptime || trip.sortingStoptime * sortingDirection < previousSortingStoptime * sortingDirection) {
trip.sortingStoptime = previousSortingStoptime;
}
if (selectedStoptime) {
selectedStoptime.sortingTime = trip.sortingStoptime;
}
previousSortingStoptime = trip.sortingStoptime;
}
sortedTrips = _.orderBy(sortedTrips, ['sortingStoptime'], sortingOrder);
}
if (sortingOrder === 'desc') {
sortedTrips = sortedTrips.reverse();
}
} else {
if (config.sortingAlgorithm === 'common') {
commonStopId = findCommonStopId(trips, config);
}
sortedTrips = _.sortBy(trips, trip => {
if (trip.stoptimes.length === 0) {
return;
}
let selectedStoptime;
if (commonStopId) {
selectedStoptime = _.find(trip.stoptimes, {stop_id: commonStopId});
} else if (config.sortingAlgorithm !== 'last') {
selectedStoptime = _.first(trip.stoptimes);
}
if (config.sortingAlgorithm === 'last') {
selectedStoptime = _.last(trip.stoptimes);
}
return formatters.timeToSeconds(selectedStoptime.departure_time);
});
}
return deduplicateTrips(sortedTrips, commonStopId);
};
/*
* Find all timetables for a specified timetable page id and sort by
* timetable_sequence.
*/
const filterAndSortTimetables = async (timetables, timetablePageId) => {
const selectedTimetables = _.filter(timetables, {timetable_page_id: timetablePageId});
return _.sortBy(selectedTimetables, 'timetable_sequence');
};
/*
* Get all calendar dates for a specific timetable.
*/
const getCalendarDates = async (timetable, config) => {
const calendarDates = await gtfs.getCalendarDates({
agency_key: timetable.agency_key,
service_id: {
$in: timetable.serviceIds
}
})
.sort('date')
.lean();
const start = timeUtils.fromGTFSDate(timetable.start_date);
const end = timeUtils.fromGTFSDate(timetable.end_date);
const filteredCalendarDates = calendarDates.reduce((memo, calendarDate) => {
if (moment(calendarDate.date, 'YYYYMMDD').isBetween(start, end)) {
if (calendarDate.exception_type === 1) {
memo.includedDates.push(formatters.formatDate(calendarDate, config.dateFormat));
} else if (calendarDate.exception_type === 2) {
memo.excludedDates.push(formatters.formatDate(calendarDate, config.dateFormat));
}
}
return memo;
}, {
excludedDates: [],
includedDates: []
});
return filteredCalendarDates;
};
/*
* Get days of the week from calendars
*/
const getDaysFromCalendars = calendars => {
const days = {
monday: 0,
tuesday: 0,
wednesday: 0,
thursday: 0,
friday: 0,
saturday: 0,
sunday: 0
};
for (const calendar of calendars) {
Object.entries(days).forEach(([day, value]) => {
days[day] = value | calendar[day];
});
}
return days;
};
/*
* Get the route for a specific timetable.
*/
const getRouteFromTimetable = async (timetable, config) => {
const routes = await gtfs.getRoutes({
agency_key: timetable.agency_key,
route_id: timetable.route_id
});
if (routes.length === 0) {
config.logWarning(`No route found for route_id=${timetable.route_id}, timetable_id=${timetable.timetable_id}`);
return null;
}
return _.first(routes);
};
/*
* Get the trip_headsign for a specific timetable.
*/
const getDirectionHeadsignFromTimetable = async timetable => {
const directions = await gtfs.getDirectionsByRoute({
agency_key: timetable.agency_key,
route_id: timetable.route_id,
direction_id: timetable.direction_id
});
if (directions.length === 0) {
return '';
}
return _.first(directions).trip_headsign;
};
/*
* Create a timetable page from a single timetable. Used if no
* `timetable_pages.txt` is present.
*/
const convertTimetableToTimetablePage = async (timetable, config) => {
if (!timetable.route) {
timetable.route = await getRouteFromTimetable(timetable, config);
}
const filename = await fileUtils.generateFileName(timetable, config);
return {
agency_key: timetable.agency_key,
timetable_page_id: timetable.timetable_id,
timetable_page_label: timetable.timetable_label,
timetables: [timetable],
filename
};
};
/*
* Create a timetable page from a single route. Used if no `timetables.txt`
* is present.
*/
const convertRouteToTimetablePage = (route, direction, calendars, calendarDates, config) => {
const timetable = {
agency_key: route.agency_key,
route_id: route.route_id,
direction_id: direction ? direction.direction_id : undefined,
direction_name: direction ? direction.trip_headsign : undefined,
route,
include_exceptions: (calendarDates && calendarDates.length) ? 1 : 0,
service_id: (calendarDates && calendarDates.length) ? calendarDates[0].service_id : null
};
// Get days of week from calendars and assign to timetable
Object.assign(timetable, getDaysFromCalendars(calendars || []));
timetable.timetable_id = formatters.formatTimetableId(timetable);
return convertTimetableToTimetablePage(timetable, config);
};
/*
* Create timetable pages for all routes in an agency. Used if no
* `timetables.txt` is present.
*/
const convertRoutesToTimetablePages = async (agencyKey, config) => {
const routes = await gtfs.getRoutes({agency_key: agencyKey});
const timetablePages = await Promise.all(routes.map(async route => {
const directions = await gtfs.getDirectionsByRoute({
agency_key: agencyKey,
route_id: route.route_id
});
const calendars = await gtfs.getCalendars({
agency_key: agencyKey,
route_id: route.route_id
}, undefined, { lean: true });
// Find all calendar dates with service_ids not present in calendar.txt
const calendarDates = await gtfs.getCalendarDates({
agency_key: agencyKey,
exception_type: 1,
service_id: { $nin: _.map(calendars, 'service_id') }
});
const directionGroups = _.groupBy(directions, direction => direction.direction_id);
const dayGroups = _.groupBy(calendars, timeUtils.calendarToCalendarCode);
const calendarDateGroups = _.groupBy(calendarDates, 'service_id');
return Promise.all(_.map(directionGroups, directionGroup => {
const direction = _.first(directionGroup);
return Promise.all([
Promise.all(_.map(dayGroups, calendars => {
return convertRouteToTimetablePage(route, direction, calendars, null, config);
})),
Promise.all(_.map(calendarDateGroups, calendarDates => {
return convertRouteToTimetablePage(route, direction, null, calendarDates, config);
}))
])
}));
}));
return _.compact(_.flattenDeep(timetablePages));
};
/*
* Generate all trips based on a start trip and an array of frequencies.
*/
const generateTripsByFrequencies = (trip, frequencies) => {
const resetTrip = formatters.resetStoptimesToMidnight(trip);
return frequencies.reduce((memo, frequency) => {
const startSeconds = timeUtils.secondsAfterMidnight(frequency.start_time);
const endSeconds = timeUtils.secondsAfterMidnight(frequency.end_time);
for (let offset = startSeconds; offset < endSeconds; offset += frequency.headway_secs) {
const newTrip = _.omit(_.cloneDeep(resetTrip), ['_id']);
newTrip.trip_id = `${resetTrip.trip_id}_freq_${memo.length}`;
newTrip.stoptimes = formatters.updateStoptimesByOffset(newTrip, offset);
memo.push(newTrip);
}
return memo;
}, []);
};
/*
* Get an array of stop_ids for a specific timetable.
*/
const getStopIds = async (timetable, config) => {
const timetableStopOrders = await gtfs.getTimetableStopOrders({
agency_key: timetable.agency_key,
timetable_id: timetable.timetable_id
});
if (timetableStopOrders && timetableStopOrders.length !== 0) {
// Use the stop_sequence from `timetable_stop_order.txt`
return _.map(timetableStopOrders, 'stop_id');
}
let stopIds = [];
const longestTripStoptimes = getLongestTripStoptimes(timetable.orderedTrips, config);
for (const stoptime of longestTripStoptimes) {
stopIds[stoptime.stop_sequence] = stoptime.stop_id;
}
// Remove any missing values from missing stop_sequence
stopIds = _.compact(stopIds);
/*
* Check if any stoptimes have different arrival and departure times and
* if they do, duplicate the stop id unless it is the first or last stop.
* Edited by Pawajoro - minimal difference specified in config, or NULL
*/
for (const trip of timetable.orderedTrips) {
for (const stoptime of trip.stoptimes) {
const timepointDifference = timeUtils.fromGTFSTime(stoptime.departure_time).diff(timeUtils.fromGTFSTime(stoptime.arrival_time), 'minutes');
if (config.showArrivalOnDifference !== null && timepointDifference >= config.showArrivalOnDifference) {
const index = stopIds.indexOf(stoptime.stop_id);
if (index === 0 || index === stopIds.length - 1) {
continue;
}
if (stopIds[index] === stopIds[index + 1] || stopIds[index] === stopIds[index - 1]) {
continue;
}
stopIds.splice(index, 0, stoptime.stop_id);
}
}
}
return stopIds;
};
/*
* Get an array of stops for a specific timetable.
*/
const getStops = async (timetable, config) => {
if (timetable.orderedTrips.length === 0) {
return [];
}
const stopIds = await getStopIds(timetable, config);
// Convert stops to array of objects
const stops = await Promise.all(stopIds.map(async (stopId, idx) => {
const stopQuery = {
agency_key: timetable.agency_key,
stop_id: stopId
};
const stops = await gtfs.getStops(stopQuery, undefined, {limit: 1, lean: true});
if (stops.length === 0) {
config.logWarning(`No stop found for agency_key=${timetable.agency_key}, stop_id=${stopId}`);
return null;
}
const stop = _.first(stops);
stop.trips = [];
if (idx < (stopIds.length - 1) && stopId === stopIds[idx + 1]) {
stop.type = 'arrival';
} else if (idx > 0 && stopId === stopIds[idx - 1]) {
stop.type = 'departure';
}
// If `showStopCity` is true, look up stop attributes.
if (timetable.showStopCity) {
const stopAttribute = await gtfs.getStopAttributes(stopQuery);
if (stopAttribute.length > 0) {
stop.stop_city = _.first(stopAttribute).stop_city;
}
}
return stop;
}));
const formattedStops = formatters.formatStops(_.compact(stops), timetable, config);
return formattedStops;
};
/*
* Get all calendars from a specific timetable.
*/
const getCalendarsFromTimetable = async timetable => {
const calendarQuery = {
agency_key: timetable.agency_key
};
if (timetable.end_date) {
calendarQuery.start_date = {$lt: timetable.end_date};
}
if (timetable.start_date) {
calendarQuery.end_date = {$gte: timetable.start_date};
}
const days = getDaysFromCalendars([timetable]);
// Create an $or query array of days based on calendars
const dayQuery = _.reduce(days, (memo, value, key) => {
if (value === 1) {
const queryItem = {};
queryItem[key] = value;
memo.push(queryItem);
}
return memo;
}, []);
if (dayQuery.length > 0) {
calendarQuery.$or = dayQuery;
}
return gtfs.getCalendars(calendarQuery);
};
/*
* Get all calendar date service ids for an agency between two dates.
*/
const getCalendarDatesServiceIds = async (agencyKey, startDate, endDate) => {
const calendarDateQuery = {
agency_key: agencyKey,
exception_type: 1
};
if (endDate) {
if (!calendarDateQuery.date) {
calendarDateQuery.date = {};
}
calendarDateQuery.date.$lt = endDate;
}
if (startDate) {
if (!calendarDateQuery.date) {
calendarDateQuery.date = {};
}
calendarDateQuery.date.$gte = startDate;
}
const calendarDates = await gtfs.getCalendarDates(calendarDateQuery);
return _.map(calendarDates, 'service_id');
};
/*
* Get formatted freuqencies for a specific trip.
*/
const getFrequenciesByTrip = async trip => {
const frequencies = await gtfs.getFrequencies({
agency_key: trip.agency_key,
trip_id: trip.trip_id
});
return frequencies.map(formatters.formatFrequency);
};
/*
* Get all stoptimes for a trip.
*/
const getStoptimesByTrip = async trip => {
const stoptimes = await gtfs.getStoptimes({
agency_key: trip.agency_key,
trip_id: trip.trip_id
});
// Remove stoptimes that are duplicates
const deduplicatedStoptimes = _.filter(stoptimes, (stoptime, idx) => {
if (idx === 0 || stoptime.arrival_time === '') {
return true;
}
// Remove duplicate entries in stop_times.txt
if (stoptime.stop_sequence === stoptimes[idx - 1].stop_sequence) {
return false;
}
if (stoptime.arrival_time !== stoptimes[idx - 1].departure_time) {
return true;
}
return false;
});
return deduplicatedStoptimes;
};
/*
* For a specific stop_id, returms an array all stop_ids within a parent station
* and the stop_id of parent station itself. If no parent station, it returns the
* stop_id.
*/
const getAllStationStopIds = async (stopId, agencyKey) => {
const stop = await gtfs.getStops({
agency_key: agencyKey,
stop_id: stopId
});
if (stop[0].parent_station === '' || stop[0].parent_station === undefined) {
return [stopId];
}
const stopsInParentStation = await gtfs.getStops({
parent_station: stop[0].parent_station
}, {stop_id: 1});
return [stop[0].parent_station, ..._.map(stopsInParentStation, 'stop_id')];
};
/*
* Get trips with the same blockId
*/
const getTripsWithSameBlock = async (trip, timetable) => {
const tripQuery = {
agency_key: trip.agency_key,
block_id: trip.block_id,
service_id: {
$in: timetable.serviceIds
}
};
const trips = await gtfs.getTrips(tripQuery, {trip_id: 1, route_id: 1, _id: 0});
await Promise.all(trips.map(async blockTrip => {
const firstStoptime = await gtfs.getStoptimes({
agency_key: timetable.agency_key,
trip_id: blockTrip.trip_id
}, undefined, {lean: true, sort: {stop_sequence: 1}, limit: 1});
if (firstStoptime.length === 0) {
throw new Error(`No stoptimes found found for trip_id=${blockTrip.trip_id}, agency_key=${blockTrip.agency_key}`);
}
blockTrip.firstStoptime = firstStoptime[0];
const lastStoptime = await gtfs.getStoptimes({
agency_key: timetable.agency_key,
trip_id: blockTrip.trip_id
}, undefined, {lean: true, sort: {stop_sequence: -1}, limit: 1});
if (lastStoptime.length === 0) {
throw new Error(`No stoptimes found found for trip_id=${blockTrip.trip_id}, agency_key=${blockTrip.agency_key}`);
}
blockTrip.lastStoptime = lastStoptime[0];
}));
return _.sortBy(trips, trip => trip.firstStoptime.departure_timestamp);
};
/*
* Get next trip and previous trip with the same block_id if it arrives/departs
* from the same stop and is a different route.
*/
const addTripContinuation = async (trip, timetable) => {
if (!trip.block_id) {
return;
}
const maxContinuesAsWaitingTimeSeconds = 60 * 60;
const firstStoptime = _.first(trip.stoptimes);
const firstStopIds = await getAllStationStopIds(firstStoptime.stop_id, trip.agency_key);
const lastStoptime = _.last(trip.stoptimes);
const lastStopIds = await getAllStationStopIds(lastStoptime.stop_id, trip.agency_key);
const blockTrips = await getTripsWithSameBlock(trip, timetable);
// "Continues From" trips must be the previous trip chronologically.
const previousTrip = _.findLast(blockTrips, blockTrip => {
return blockTrip.lastStoptime.arrival_timestamp <= firstStoptime.departure_timestamp;
});
// "Continues From" trips must be a different route_id.
if (previousTrip && previousTrip.route_id !== trip.route_id) {
// "Comtinues From" trips must not be more than 60 minutes before.
if (previousTrip.lastStoptime.arrival_timestamp >= firstStoptime.departure_timestamp - maxContinuesAsWaitingTimeSeconds) {
// "Continues From" trips must have their last stop_id be the same as the next trip's first stop_id.
if (firstStopIds.includes(previousTrip.lastStoptime.stop_id)) {
const routes = await gtfs.getRoutes({
agency_key: timetable.agency_key,
route_id: previousTrip.route_id
});
previousTrip.route = routes[0];
trip.continues_from_route = previousTrip;
}
}
}
// "Continues As" trips must be the next trip chronologically.
const nextTrip = _.find(blockTrips, blockTrip => {
return blockTrip.firstStoptime.departure_timestamp >= lastStoptime.arrival_timestamp;
});
// "Continues As" trips must be a different route_id.
if (nextTrip && nextTrip.route_id !== trip.route_id) {
// "Comtinues As" trips must not be more than 60 minutes later.
if (nextTrip.firstStoptime.departure_timestamp <= lastStoptime.arrival_timestamp + maxContinuesAsWaitingTimeSeconds) {
// "Continues As" trips must have their first stop_id be the same as the previous trip's last stop_id.
if (lastStopIds.includes(nextTrip.firstStoptime.stop_id)) {
const routes = await gtfs.getRoutes({
agency_key: timetable.agency_key,
route_id: nextTrip.route_id
});
nextTrip.route = routes[0];
trip.continues_as_route = nextTrip;
}
}
}
};
/*
* Get all trips from a timetable.
*/
const getTripsFromTimetable = async (timetable, calendars, config) => {
const tripQuery = {
agency_key: timetable.agency_key,
route_id: timetable.route_id,
service_id: {
$in: timetable.serviceIds
}
};
if (timetable.direction_id !== '' && timetable.direction_id !== null) {
tripQuery.direction_id = timetable.direction_id;
}
const trips = await gtfs.getTrips(tripQuery);
if (trips.length === 0) {
config.logWarning(`No trips found for route_id=${timetable.route_id}, direction_id=${timetable.direction_id}, service_ids=${JSON.stringify(timetable.serviceIds)}, timetable_id=${timetable.timetable_id}`);
}
// Updated timetable.serviceIds with only the service IDs actually used in one or more trip
timetable.serviceIds = _.uniq(_.map(trips, 'service_id'));
const formattedTrips = [];
await Promise.all(trips.map(async trip => {
const formattedTrip = formatters.formatTrip(trip, timetable, calendars, config);
formattedTrip.stoptimes = await getStoptimesByTrip(formattedTrip);
if (timetable.show_trip_continuation) {
await addTripContinuation(formattedTrip, timetable);
if (formattedTrip.continues_as_route) {
timetable.has_continues_as_route = true;
}
if (formattedTrip.continues_from_route) {
timetable.has_continues_from_route = true;
}
}
if (formattedTrip.stoptimes.length === 0) {
config.logWarning(`No stoptimes found for agency_key=${timetable.agency_key}, trip_id=${formattedTrip.trip_id}, route_id=${timetable.route_id}, timetable_id=${timetable.timetable_id}`);
}
const frequencies = await getFrequenciesByTrip(formattedTrip, config);
if (frequencies.length === 0) {
formattedTrips.push(formattedTrip);
} else {
const frequencyTrips = generateTripsByFrequencies(formattedTrip, frequencies);
formattedTrips.push(...frequencyTrips);
timetable.frequencies = frequencies;
timetable.frequencyExactTimes = _.some(frequencies, {exact_times: 1});
}
}));
return sortTrips(formattedTrips, config);
};
/*
* Discern if a day list should be shown for a specific timetable (if some
* trips happen on different days).
*/
const getShowDayList = timetable => {
return !_.every(timetable.orderedTrips, (trip, idx) => {
if (idx === 0) {
return true;
}
return trip.dayList === timetable.orderedTrips[idx - 1].dayList;
});
};
/*
* Format timetables for display.
*/
const formatTimetables = async (timetables, config) => {
return Promise.all(timetables.map(async timetable => {
const dayList = formatters.formatDays(timetable, config);
const calendars = await getCalendarsFromTimetable(timetable);
let serviceIds = _.map(calendars, 'service_id');
if (timetable.include_exceptions === 1) {
const calendarDatesServiceIds = await getCalendarDatesServiceIds(timetable.agency_key, timetable.start_date, timetable.end_date);
serviceIds = _.uniq([...serviceIds, ...calendarDatesServiceIds]);
}
Object.assign(timetable, {
noServiceSymbolUsed: false,
requestDropoffSymbolUsed: false,
noDropoffSymbolUsed: false,
requestPickupSymbolUsed: false,
noPickupSymbolUsed: false,
interpolatedStopSymbolUsed: false,
showStopCity: config.showStopCity,
showStopDescription: config.showStopDescription,
noServiceSymbol: config.noServiceSymbol,
requestDropoffSymbol: config.requestDropoffSymbol,
noDropoffSymbol: config.noDropoffSymbol,
requestPickupSymbol: config.requestPickupSymbol,
noPickupSymbol: config.noPickupSymbol,
interpolatedStopSymbol: config.interpolatedStopSymbol,
serviceIds,
dayList,
dayListLong: formatters.formatDaysLong(dayList, config)
});
timetable.orderedTrips = await getTripsFromTimetable(timetable, calendars, config);
timetable.stops = await getStops(timetable, config);
timetable.calendarDates = await getCalendarDates(timetable, config);
timetable.showDayList = getShowDayList(timetable);
timetable.timetable_label = formatters.formatTimetableLabel(timetable);
if (config.showMap) {
timetable.geojson = await geoJSONUtils.getTimetableGeoJSON(timetable, config);
}
return timetable;
}));
};
/*
* Get all timetable pages for an agency.
*/
exports.getTimetablePages = async (agencyKey, config) => {
const timetables = await gtfs.getTimetables({agency_key: agencyKey});
// If no timetables, build each route and direction into a timetable
if (!timetables || timetables.length === 0) {
return convertRoutesToTimetablePages(agencyKey, config);
}
const timetablePages = await gtfs.getTimetablePages({agency_key: agencyKey});
// Check if there are any timetable pages defined in timetable_pages.txt
if (!timetablePages || timetablePages.length === 0) {
// If no timetablepages, use timetables
return Promise.all(timetables.map(timetable => convertTimetableToTimetablePage(timetable, config)));
}
// Otherwise, use timetable pages defined in timetable_pages.txt
return Promise.all(timetablePages.map(async timetablePage => {
timetablePage.timetables = await filterAndSortTimetables(timetables, timetablePage.timetable_page_id);
// Add route for each Timetable
await Promise.all(timetablePage.timetables.map(async timetable => {
timetable.route = await getRouteFromTimetable(timetable, config);
}));
return timetablePage;
}));
};
/*
* Format a timetable page for display.
*/
exports.formatTimetablePage = async (timetablePage, config) => {
timetablePage.dayList = formatters.formatDays(getDaysFromCalendars(timetablePage.timetables), config);
timetablePage.dayLists = _.uniq(timetablePage.timetables.map(timetable => timetable.dayList));
timetablePage.routeIds = _.uniq(_.map(timetablePage.timetables, 'route_id'));
timetablePage.routeColors = await getTimetablePageColors(timetablePage);
// Set default filename
if (!timetablePage.filename) {
timetablePage.filename = `${timetablePage.timetable_page_id}.html`;
}
// Get direction_name for each timetable
await Promise.all(timetablePage.timetables.map(async timetable => {
if (timetable.direction_name === undefined || timetable.direction_name === '') {
timetable.direction_name = await getDirectionHeadsignFromTimetable(timetable);
}
if (!timetable.route) {
timetable.route = await getRouteFromTimetable(timetable, config);
}
}));
timetablePage.directionNames = _.uniq(_.map(timetablePage.timetables, 'direction_name'));
return timetablePage;
};
/*
* Initialize configuration with defaults.
*/
exports.setDefaultConfig = config => {
const defaults = {
beautify: false,
coordinatePrecision: 5,
dateFormat: 'MMM D, YYYY',
daysShortStrings: ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun'],
daysStrings: ['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday', 'Sunday'],
defaultOrientation: 'vertical',
interpolatedStopSymbol: '•',
gtfsToHtmlVersion: version,
menuType: 'jump',
noDropoffSymbol: '‡',
noHead: false,
noPickupSymbol: '***',
noServiceSymbol: '-',
requestDropoffSymbol: '†',
requestPickupSymbol: '***',
showArrivalOnDifference: 0.2,
showMap: false,
showOnlyTimepoint: false,
showRouteTitle: true,
showStopCity: false,
showStopDescription: false,
skipImport: false,
sortingAlgorithm: 'common',
timeFormat: 'h:mma',
verbose: true,
zipOutput: false
};
return Object.assign(defaults, config);
};
/*
* Get a timetable page by id.
*/
exports.getFormattedTimetablePage = async (agencyKey, timetablePageId, config) => {
const timetables = await gtfs.getTimetables({agency_key: agencyKey});
let timetablePage;
// Check if there are any timetable pages defined in timetable_pages.txt
const timetablePages = await gtfs.getTimetablePages({
agency_key: agencyKey,
timetable_page_id: timetablePageId
});
if (timetablePages.length > 1) {
throw new Error(`Multiple timetable_pages found for timetable_page_id=${timetablePageId}`);
}
if (!timetables || timetables.length === 0) {
// If no timetables, build the route and direction into a timetable
let calendarCode;
let calendars;
let calendarDates;
let serviceId;
let directionId = '';
const parts = timetablePageId.split('|');
if (parts.length > 1) {
directionId = parseInt(parts.pop(), 10);
calendarCode = parts.pop();
}
const routeId = parts.join('|');
const routeQuery = {
agency_key: agencyKey,
route_id: routeId
};
const route = await getRouteFromTimetable(routeQuery, config);
const directions = await gtfs.getDirectionsByRoute(routeQuery);
if (calendarCode.match(/^[01]*$/)) {
calendars = await gtfs.getCalendars({
...routeQuery,
...timeUtils.calendarCodeToCalendar(calendarCode)
});
} else {
serviceId = calendarCode;
calendarDates = await gtfs.getCalendarDates({
agency_key: agencyKey,
exception_type: 1,
service_id: serviceId
});
}
const direction = _.find(directions, direction => direction.direction_id === directionId);
timetablePage = await convertRouteToTimetablePage(route, direction, calendars, calendarDates, config);
} else if (timetablePages.length === 0) {
// If no timetablepage, use timetable
const timetable = _.find(timetables, {timetable_id: timetablePageId});
if (!timetable) {
throw new Error(`No timetable found for timetable_page_id=${timetablePageId}`);
}
timetablePage = await convertTimetableToTimetablePage(timetable, config);
} else {
// Otherwise, use timetablepage defined in timetable_pages.txt
timetablePage = _.first(timetablePages);
timetablePage.timetables = await filterAndSortTimetables(timetables, timetablePage.timetable_page_id);
// Add route for each Timetable
await Promise.all(timetablePage.timetables.map(async timetable => {
timetable.route = await getRouteFromTimetable(timetable, config);
}));
}
timetablePage.consolidatedTimetables = await formatTimetables(timetablePage.timetables, config);
if (!timetablePage.consolidatedTimetables || timetablePage.consolidatedTimetables.length === 0) {
throw new Error(`No timetables found for timetable_page_id=${timetablePage.timetable_page_id}`);
}
return exports.formatTimetablePage(timetablePage, config);
};
/*
* Generate stats about timetable
*/
const generateStats = timetablePage => {
return timetablePage.timetables.reduce((memo, timetable) => {
memo.stops += timetable.stops.length;
memo.trips += timetable.orderedTrips.length;
for (const serviceId of timetable.serviceIds) {
memo.serviceIds[serviceId] = true;
}
memo.routeIds[timetable.route_id] = true;
memo.routes = _.size(memo.routeIds);
memo.calendars = _.size(memo.serviceIds);
return memo;
}, {
stops: 0,
trips: 0,
routeIds: {},
serviceIds: {}
});
};
/*
* Generate the HTML timetable for a timetable page.
*/
exports.generateHTML = async (timetablePage, config) => {
const templateVars = {
timetablePage,
config
};
const html = await fileUtils.renderFile('timetablepage', templateVars, config);
const stats = generateStats(timetablePage);
return {html, stats};
};
/*
* Generate the HTML for the agency overview page.
*/
exports.generateOverviewHTML = async (agencyKey, timetablePages, config) => {
const agencies = await gtfs.getAgencies({agency_key: agencyKey});
if (!agencies || agencies.length === 0) {
throw new Error(`No agency found for agency_key=${agencyKey}`);
}
const agency = _.first(agencies);
if (config.showMap) {
agency.geojson = await geoJSONUtils.getAgencyGeoJSON(agencyKey, config);
}
const templateVars = {
agencyKey,
agency,
config,
timetablePages: _.sortBy(timetablePages, 'timetable_page_label')
};
return fileUtils.renderFile('overview', templateVars, config);
};
| lib/utils.js | const _ = require('lodash');
const gtfs = require('gtfs');
const moment = require('moment');
const fileUtils = require('./file-utils');
const formatters = require('./formatters');
const geoJSONUtils = require('./geojson-utils');
const timeUtils = require('./time-utils');
const { version } = require('../package.json');
/*
* Get all of the route colors for a timetable page.
*/
const getTimetablePageColors = async timetablePage => {
const routes = await gtfs.getRoutes({
agency_key: timetablePage.agency_key,
route_id: {$in: timetablePage.routeIds}
});
return _.compact(_.uniq(_.map(routes, 'route_color')));
};
/*
* Determine if a stoptime is a timepoint.
*/
const isTimepoint = stoptime => {
if (stoptime.timepoint === undefined) {
return stoptime.arrival_time !== '' && stoptime.departure_time !== '';
}
return stoptime.timepoint === 1;
};
/*
* Find the longest trip (most stops) in a group of trips and return stoptimes.
*/
const getLongestTripStoptimes = (trips, config) => {
let filteredTripStoptimes;
// If `showOnlyTimepoint` is true, then filter out all non-timepoints
if (config.showOnlyTimepoint === true) {
filteredTripStoptimes = trips.map(trip => _.filter(trip.stoptimes, isTimepoint));
} else {
filteredTripStoptimes = trips.map(trip => trip.stoptimes);
}
return _.maxBy(filteredTripStoptimes, stoptimes => _.size(stoptimes));
};
/*
* Find the first stop_id that all trips have in common, otherwise use the first
* stoptime.
*/
const findCommonStopId = (trips, config) => {
const longestTripStoptimes = getLongestTripStoptimes(trips, config);
if (!longestTripStoptimes) {
return null;
}
const commonStoptime = _.find(longestTripStoptimes, (stoptime, idx) => {
// If longest trip is a loop (first and last stops the same), then skip first stoptime
if (idx === 0 && stoptime.stop_id === _.last(longestTripStoptimes).stop_id) {
return false;
}
// If stoptime isn't a timepoint, skip it
if (stoptime.arrival_time === '') {
return false;
}
return _.every(trips, trip => {
return _.find(trip.stoptimes, {stop_id: stoptime.stop_id});
});
});
return commonStoptime ? commonStoptime.stop_id : null;
};
/*
* Return a set of unique trips (with at least one unique stop time) from an
* array of trips.
*/
const deduplicateTrips = (trips, commonStopId) => {
// Remove duplicate trips (from overlapping service_ids)
const deduplicatedTrips = trips.reduce((memo, trip) => {
if (memo.length === 0 || trip.stoptimes.length === 0) {
memo.push(trip);
} else {
const stoptimes = _.map(trip.stoptimes, 'departure_time');
let selectedStoptime;
if (commonStopId) {
selectedStoptime = _.find(trip.stoptimes, {stop_id: commonStopId});
} else {
selectedStoptime = trip.stoptimes[0];
}
// Find all other trips where the common stop has the same departure time
const similarTrips = _.filter(memo, trip => {
const stoptime = _.find(trip.stoptimes, {stop_id: selectedStoptime.stop_id});
if (!stoptime) {
return false;
}
return stoptime.departure_time === selectedStoptime.departure_time;
});
// Only add trip if no existing trip with the same set of timepoints has already been added
const tripIsUnique = _.every(similarTrips, similarTrip => {
const similarTripStoptimes = _.map(similarTrip.stoptimes, 'departure_time');
return !_.isEqual(stoptimes, similarTripStoptimes);
});
if (tripIsUnique) {
memo.push(trip);
}
}
return memo;
}, []);
return deduplicatedTrips;
};
/*
* Sort trips chronologically, using a common stop id if available, otherwise
* use the first stoptime.
* Edited by Pawajoro - more sorting options
*/
const sortTrips = (trips, config) => {
let sortedTrips = trips;
let commonStopId;
if (_.includes(['beginning', 'end'], config.sortingAlgorithm)) {
let referenceStoptimes;
let sortingDirection;
let sortingOrder;
if (config.sortingAlgorithm === 'end') {
referenceStoptimes = _.orderBy(getLongestTripStoptimes(trips, config), ['stop_sequence'], 'desc');
sortingDirection = -1;
sortingOrder = 'desc';
} else {
referenceStoptimes = _.sortBy(getLongestTripStoptimes(trips, config), ['stop_sequence']);
sortingDirection = 1;
sortingOrder = 'asc';
}
for (const stop of referenceStoptimes) {
let previousSortingStoptime;
for (const trip of sortedTrips) {
if (trip.stoptimes.length === 0) {
trip.sortingStoptime = undefined;
}
const selectedStoptime = _.find(trip.stoptimes, {stop_id: stop.stop_id});
if (!selectedStoptime) {
if (!trip.sortingStoptime || trip.sortingStoptime * sortingDirection < previousSortingStoptime * sortingDirection) {
trip.sortingStoptime = previousSortingStoptime;
}
} else if (isTimepoint(selectedStoptime)) {
trip.sortingStoptime = formatters.timeToSeconds(selectedStoptime.departure_time);
} else if (!trip.sortingStoptime || trip.sortingStoptime * sortingDirection < previousSortingStoptime * sortingDirection) {
trip.sortingStoptime = previousSortingStoptime;
}
if (selectedStoptime) {
selectedStoptime.sortingTime = trip.sortingStoptime;
}
previousSortingStoptime = trip.sortingStoptime;
}
sortedTrips = _.orderBy(sortedTrips, ['sortingStoptime'], sortingOrder);
}
if (sortingOrder === 'desc') {
sortedTrips = sortedTrips.reverse();
}
} else {
if (config.sortingAlgorithm === 'common') {
commonStopId = findCommonStopId(trips, config);
}
sortedTrips = _.sortBy(trips, trip => {
if (trip.stoptimes.length === 0) {
return;
}
let selectedStoptime;
if (commonStopId) {
selectedStoptime = _.find(trip.stoptimes, {stop_id: commonStopId});
} else if (config.sortingAlgorithm !== 'last') {
selectedStoptime = _.first(trip.stoptimes);
}
if (config.sortingAlgorithm === 'last') {
selectedStoptime = _.last(trip.stoptimes);
}
return formatters.timeToSeconds(selectedStoptime.departure_time);
});
}
return deduplicateTrips(sortedTrips, commonStopId);
};
/*
* Find all timetables for a specified timetable page id and sort by
* timetable_sequence.
*/
const filterAndSortTimetables = async (timetables, timetablePageId) => {
const selectedTimetables = _.filter(timetables, {timetable_page_id: timetablePageId});
return _.sortBy(selectedTimetables, 'timetable_sequence');
};
/*
* Get all calendar dates for a specific timetable.
*/
const getCalendarDates = async (timetable, config) => {
const calendarDates = await gtfs.getCalendarDates({
agency_key: timetable.agency_key,
service_id: {
$in: timetable.serviceIds
}
})
.sort('date')
.lean();
const start = timeUtils.fromGTFSDate(timetable.start_date);
const end = timeUtils.fromGTFSDate(timetable.end_date);
const filteredCalendarDates = calendarDates.reduce((memo, calendarDate) => {
if (moment(calendarDate.date, 'YYYYMMDD').isBetween(start, end)) {
if (calendarDate.exception_type === 1) {
memo.includedDates.push(formatters.formatDate(calendarDate, config.dateFormat));
} else if (calendarDate.exception_type === 2) {
memo.excludedDates.push(formatters.formatDate(calendarDate, config.dateFormat));
}
}
return memo;
}, {
excludedDates: [],
includedDates: []
});
return filteredCalendarDates;
};
/*
* Get days of the week from calendars
*/
const getDaysFromCalendars = calendars => {
const days = {
monday: 0,
tuesday: 0,
wednesday: 0,
thursday: 0,
friday: 0,
saturday: 0,
sunday: 0
};
for (const calendar of calendars) {
Object.entries(days).forEach(([day, value]) => {
days[day] = value | calendar[day];
});
}
return days;
};
/*
* Get the route for a specific timetable.
*/
const getRouteFromTimetable = async (timetable, config) => {
const routes = await gtfs.getRoutes({
agency_key: timetable.agency_key,
route_id: timetable.route_id
});
if (routes.length === 0) {
config.logWarning(`No route found for route_id=${timetable.route_id}, timetable_id=${timetable.timetable_id}`);
return null;
}
return _.first(routes);
};
/*
* Get the trip_headsign for a specific timetable.
*/
const getDirectionHeadsignFromTimetable = async timetable => {
const directions = await gtfs.getDirectionsByRoute({
agency_key: timetable.agency_key,
route_id: timetable.route_id,
direction_id: timetable.direction_id
});
if (directions.length === 0) {
return '';
}
return _.first(directions).trip_headsign;
};
/*
* Create a timetable page from a single timetable. Used if no
* `timetable_pages.txt` is present.
*/
const convertTimetableToTimetablePage = async (timetable, config) => {
if (!timetable.route) {
timetable.route = await getRouteFromTimetable(timetable, config);
}
const filename = await fileUtils.generateFileName(timetable, config);
return {
agency_key: timetable.agency_key,
timetable_page_id: timetable.timetable_id,
timetable_page_label: timetable.timetable_label,
timetables: [timetable],
filename
};
};
/*
* Create a timetable page from a single route. Used if no `timetables.txt`
* is present.
*/
const convertRouteToTimetablePage = (route, direction, calendars, calendarDates, config) => {
const timetable = {
agency_key: route.agency_key,
route_id: route.route_id,
direction_id: direction ? direction.direction_id : undefined,
direction_name: direction ? direction.trip_headsign : undefined,
route,
include_exceptions: (calendarDates && calendarDates.length) ? 1 : 0,
service_id: (calendarDates && calendarDates.length) ? calendarDates[0].service_id : null
};
// Get days of week from calendars and assign to timetable
Object.assign(timetable, getDaysFromCalendars(calendars || []));
timetable.timetable_id = formatters.formatTimetableId(timetable);
return convertTimetableToTimetablePage(timetable, config);
};
/*
* Create timetable pages for all routes in an agency. Used if no
* `timetables.txt` is present.
*/
const convertRoutesToTimetablePages = async (agencyKey, config) => {
const routes = await gtfs.getRoutes({agency_key: agencyKey});
const timetablePages = await Promise.all(routes.map(async route => {
const directions = await gtfs.getDirectionsByRoute({
agency_key: agencyKey,
route_id: route.route_id
});
const calendars = await gtfs.getCalendars({
agency_key: agencyKey,
route_id: route.route_id
}, undefined, { lean: true });
// Find all calendar dates with service_ids not present in calendar.txt
const calendarDates = await gtfs.getCalendarDates({
agency_key: agencyKey,
exception_type: 1,
service_id: { $nin: _.map(calendars, 'service_id') }
});
const directionGroups = _.groupBy(directions, direction => direction.direction_id);
const dayGroups = _.groupBy(calendars, timeUtils.calendarToCalendarCode);
const calendarDateGroups = _.groupBy(calendarDates, 'service_id');
return Promise.all(_.map(directionGroups, directionGroup => {
const direction = _.first(directionGroup);
return Promise.all([
Promise.all(_.map(dayGroups, calendars => {
return convertRouteToTimetablePage(route, direction, calendars, null, config);
})),
Promise.all(_.map(calendarDateGroups, calendarDates => {
return convertRouteToTimetablePage(route, direction, null, calendarDates, config);
}))
])
}));
}));
return _.compact(_.flattenDeep(timetablePages));
};
/*
* Generate all trips based on a start trip and an array of frequencies.
*/
const generateTripsByFrequencies = (trip, frequencies) => {
const resetTrip = formatters.resetStoptimesToMidnight(trip);
return frequencies.reduce((memo, frequency) => {
const startSeconds = timeUtils.secondsAfterMidnight(frequency.start_time);
const endSeconds = timeUtils.secondsAfterMidnight(frequency.end_time);
for (let offset = startSeconds; offset < endSeconds; offset += frequency.headway_secs) {
const newTrip = _.omit(_.cloneDeep(resetTrip), ['_id']);
newTrip.trip_id = `${resetTrip.trip_id}_freq_${memo.length}`;
newTrip.stoptimes = formatters.updateStoptimesByOffset(newTrip, offset);
memo.push(newTrip);
}
return memo;
}, []);
};
/*
* Get an array of stop_ids for a specific timetable.
*/
const getStopIds = async (timetable, config) => {
const timetableStopOrders = await gtfs.getTimetableStopOrders({
agency_key: timetable.agency_key,
timetable_id: timetable.timetable_id
});
if (timetableStopOrders && timetableStopOrders.length !== 0) {
// Use the stop_sequence from `timetable_stop_order.txt`
return _.map(timetableStopOrders, 'stop_id');
}
let stopIds = [];
const longestTripStoptimes = getLongestTripStoptimes(timetable.orderedTrips, config);
for (const stoptime of longestTripStoptimes) {
stopIds[stoptime.stop_sequence] = stoptime.stop_id;
}
// Remove any missing values from missing stop_sequence
stopIds = _.compact(stopIds);
/*
* Check if any stoptimes have different arrival and departure times and
* if they do, duplicate the stop id unless it is the first or last stop.
* Edited by Pawajoro - minimal difference specified in config, or NULL
*/
for (const trip of timetable.orderedTrips) {
for (const stoptime of trip.stoptimes) {
const timepointDifference = timeUtils.fromGTFSTime(stoptime.departure_time).diff(timeUtils.fromGTFSTime(stoptime.arrival_time), 'minutes');
if (config.showArrivalOnDifference !== null && timepointDifference >= config.showArrivalOnDifference) {
const index = stopIds.indexOf(stoptime.stop_id);
if (index === 0 || index === stopIds.length - 1) {
continue;
}
if (stopIds[index] === stopIds[index + 1] || stopIds[index] === stopIds[index - 1]) {
continue;
}
stopIds.splice(index, 0, stoptime.stop_id);
}
}
}
return stopIds;
};
/*
* Get an array of stops for a specific timetable.
*/
const getStops = async (timetable, config) => {
if (timetable.orderedTrips.length === 0) {
return [];
}
const stopIds = await getStopIds(timetable, config);
// Convert stops to array of objects
const stops = await Promise.all(stopIds.map(async (stopId, idx) => {
const stopQuery = {
agency_key: timetable.agency_key,
stop_id: stopId
};
const stops = await gtfs.getStops(stopQuery, undefined, {limit: 1, lean: true});
if (stops.length === 0) {
config.logWarning(`No stop found for agency_key=${timetable.agency_key}, stop_id=${stopId}`);
}
const stop = _.first(stops);
stop.trips = [];
if (idx < (stopIds.length - 1) && stopId === stopIds[idx + 1]) {
stop.type = 'arrival';
} else if (idx > 0 && stopId === stopIds[idx - 1]) {
stop.type = 'departure';
}
// If `showStopCity` is true, look up stop attributes.
if (timetable.showStopCity) {
const stopAttribute = await gtfs.getStopAttributes(stopQuery);
if (stopAttribute.length > 0) {
stop.stop_city = _.first(stopAttribute).stop_city;
}
}
return stop;
}));
const formattedStops = formatters.formatStops(stops, timetable, config);
return formattedStops;
};
/*
* Get all calendars from a specific timetable.
*/
const getCalendarsFromTimetable = async timetable => {
const calendarQuery = {
agency_key: timetable.agency_key
};
if (timetable.end_date) {
calendarQuery.start_date = {$lt: timetable.end_date};
}
if (timetable.start_date) {
calendarQuery.end_date = {$gte: timetable.start_date};
}
const days = getDaysFromCalendars([timetable]);
// Create an $or query array of days based on calendars
const dayQuery = _.reduce(days, (memo, value, key) => {
if (value === 1) {
const queryItem = {};
queryItem[key] = value;
memo.push(queryItem);
}
return memo;
}, []);
if (dayQuery.length > 0) {
calendarQuery.$or = dayQuery;
}
return gtfs.getCalendars(calendarQuery);
};
/*
* Get all calendar date service ids for an agency between two dates.
*/
const getCalendarDatesServiceIds = async (agencyKey, startDate, endDate) => {
const calendarDateQuery = {
agency_key: agencyKey,
exception_type: 1
};
if (endDate) {
if (!calendarDateQuery.date) {
calendarDateQuery.date = {};
}
calendarDateQuery.date.$lt = endDate;
}
if (startDate) {
if (!calendarDateQuery.date) {
calendarDateQuery.date = {};
}
calendarDateQuery.date.$gte = startDate;
}
const calendarDates = await gtfs.getCalendarDates(calendarDateQuery);
return _.map(calendarDates, 'service_id');
};
/*
* Get formatted freuqencies for a specific trip.
*/
const getFrequenciesByTrip = async trip => {
const frequencies = await gtfs.getFrequencies({
agency_key: trip.agency_key,
trip_id: trip.trip_id
});
return frequencies.map(formatters.formatFrequency);
};
/*
* Get all stoptimes for a trip.
*/
const getStoptimesByTrip = async trip => {
const stoptimes = await gtfs.getStoptimes({
agency_key: trip.agency_key,
trip_id: trip.trip_id
});
// Remove stoptimes that are duplicates
const deduplicatedStoptimes = _.filter(stoptimes, (stoptime, idx) => {
if (idx === 0 || stoptime.arrival_time === '') {
return true;
}
// Remove duplicate entries in stop_times.txt
if (stoptime.stop_sequence === stoptimes[idx - 1].stop_sequence) {
return false;
}
if (stoptime.arrival_time !== stoptimes[idx - 1].departure_time) {
return true;
}
return false;
});
return deduplicatedStoptimes;
};
/*
* For a specific stop_id, returms an array all stop_ids within a parent station
* and the stop_id of parent station itself. If no parent station, it returns the
* stop_id.
*/
const getAllStationStopIds = async (stopId, agencyKey) => {
const stop = await gtfs.getStops({
agency_key: agencyKey,
stop_id: stopId
});
if (stop[0].parent_station === '' || stop[0].parent_station === undefined) {
return [stopId];
}
const stopsInParentStation = await gtfs.getStops({
parent_station: stop[0].parent_station
}, {stop_id: 1});
return [stop[0].parent_station, ..._.map(stopsInParentStation, 'stop_id')];
};
/*
* Get trips with the same blockId
*/
const getTripsWithSameBlock = async (trip, timetable) => {
const tripQuery = {
agency_key: trip.agency_key,
block_id: trip.block_id,
service_id: {
$in: timetable.serviceIds
}
};
const trips = await gtfs.getTrips(tripQuery, {trip_id: 1, route_id: 1, _id: 0});
await Promise.all(trips.map(async blockTrip => {
const firstStoptime = await gtfs.getStoptimes({
agency_key: timetable.agency_key,
trip_id: blockTrip.trip_id
}, undefined, {lean: true, sort: {stop_sequence: 1}, limit: 1});
if (firstStoptime.length === 0) {
throw new Error(`No stoptimes found found for trip_id=${blockTrip.trip_id}, agency_key=${blockTrip.agency_key}`);
}
blockTrip.firstStoptime = firstStoptime[0];
const lastStoptime = await gtfs.getStoptimes({
agency_key: timetable.agency_key,
trip_id: blockTrip.trip_id
}, undefined, {lean: true, sort: {stop_sequence: -1}, limit: 1});
if (lastStoptime.length === 0) {
throw new Error(`No stoptimes found found for trip_id=${blockTrip.trip_id}, agency_key=${blockTrip.agency_key}`);
}
blockTrip.lastStoptime = lastStoptime[0];
}));
return _.sortBy(trips, trip => trip.firstStoptime.departure_timestamp);
};
/*
* Get next trip and previous trip with the same block_id if it arrives/departs
* from the same stop and is a different route.
*/
const addTripContinuation = async (trip, timetable) => {
if (!trip.block_id) {
return;
}
const maxContinuesAsWaitingTimeSeconds = 60 * 60;
const firstStoptime = _.first(trip.stoptimes);
const firstStopIds = await getAllStationStopIds(firstStoptime.stop_id, trip.agency_key);
const lastStoptime = _.last(trip.stoptimes);
const lastStopIds = await getAllStationStopIds(lastStoptime.stop_id, trip.agency_key);
const blockTrips = await getTripsWithSameBlock(trip, timetable);
// "Continues From" trips must be the previous trip chronologically.
const previousTrip = _.findLast(blockTrips, blockTrip => {
return blockTrip.lastStoptime.arrival_timestamp <= firstStoptime.departure_timestamp;
});
// "Continues From" trips must be a different route_id.
if (previousTrip && previousTrip.route_id !== trip.route_id) {
// "Comtinues From" trips must not be more than 60 minutes before.
if (previousTrip.lastStoptime.arrival_timestamp >= firstStoptime.departure_timestamp - maxContinuesAsWaitingTimeSeconds) {
// "Continues From" trips must have their last stop_id be the same as the next trip's first stop_id.
if (firstStopIds.includes(previousTrip.lastStoptime.stop_id)) {
const routes = await gtfs.getRoutes({
agency_key: timetable.agency_key,
route_id: previousTrip.route_id
});
previousTrip.route = routes[0];
trip.continues_from_route = previousTrip;
}
}
}
// "Continues As" trips must be the next trip chronologically.
const nextTrip = _.find(blockTrips, blockTrip => {
return blockTrip.firstStoptime.departure_timestamp >= lastStoptime.arrival_timestamp;
});
// "Continues As" trips must be a different route_id.
if (nextTrip && nextTrip.route_id !== trip.route_id) {
// "Comtinues As" trips must not be more than 60 minutes later.
if (nextTrip.firstStoptime.departure_timestamp <= lastStoptime.arrival_timestamp + maxContinuesAsWaitingTimeSeconds) {
// "Continues As" trips must have their first stop_id be the same as the previous trip's last stop_id.
if (lastStopIds.includes(nextTrip.firstStoptime.stop_id)) {
const routes = await gtfs.getRoutes({
agency_key: timetable.agency_key,
route_id: nextTrip.route_id
});
nextTrip.route = routes[0];
trip.continues_as_route = nextTrip;
}
}
}
};
/*
* Get all trips from a timetable.
*/
const getTripsFromTimetable = async (timetable, calendars, config) => {
const tripQuery = {
agency_key: timetable.agency_key,
route_id: timetable.route_id,
service_id: {
$in: timetable.serviceIds
}
};
if (timetable.direction_id !== '' && timetable.direction_id !== null) {
tripQuery.direction_id = timetable.direction_id;
}
const trips = await gtfs.getTrips(tripQuery);
if (trips.length === 0) {
config.logWarning(`No trips found for route_id=${timetable.route_id}, direction_id=${timetable.direction_id}, service_ids=${JSON.stringify(timetable.serviceIds)}, timetable_id=${timetable.timetable_id}`);
}
// Updated timetable.serviceIds with only the service IDs actually used in one or more trip
timetable.serviceIds = _.uniq(_.map(trips, 'service_id'));
const formattedTrips = [];
await Promise.all(trips.map(async trip => {
const formattedTrip = formatters.formatTrip(trip, timetable, calendars, config);
formattedTrip.stoptimes = await getStoptimesByTrip(formattedTrip);
if (timetable.show_trip_continuation) {
await addTripContinuation(formattedTrip, timetable);
if (formattedTrip.continues_as_route) {
timetable.has_continues_as_route = true;
}
if (formattedTrip.continues_from_route) {
timetable.has_continues_from_route = true;
}
}
if (formattedTrip.stoptimes.length === 0) {
config.logWarning(`No stoptimes found for agency_key=${timetable.agency_key}, trip_id=${formattedTrip.trip_id}, route_id=${timetable.route_id}, timetable_id=${timetable.timetable_id}`);
}
const frequencies = await getFrequenciesByTrip(formattedTrip, config);
if (frequencies.length === 0) {
formattedTrips.push(formattedTrip);
} else {
const frequencyTrips = generateTripsByFrequencies(formattedTrip, frequencies);
formattedTrips.push(...frequencyTrips);
timetable.frequencies = frequencies;
timetable.frequencyExactTimes = _.some(frequencies, {exact_times: 1});
}
}));
return sortTrips(formattedTrips, config);
};
/*
* Discern if a day list should be shown for a specific timetable (if some
* trips happen on different days).
*/
const getShowDayList = timetable => {
return !_.every(timetable.orderedTrips, (trip, idx) => {
if (idx === 0) {
return true;
}
return trip.dayList === timetable.orderedTrips[idx - 1].dayList;
});
};
/*
* Format timetables for display.
*/
const formatTimetables = async (timetables, config) => {
return Promise.all(timetables.map(async timetable => {
const dayList = formatters.formatDays(timetable, config);
const calendars = await getCalendarsFromTimetable(timetable);
let serviceIds = _.map(calendars, 'service_id');
if (timetable.include_exceptions === 1) {
const calendarDatesServiceIds = await getCalendarDatesServiceIds(timetable.agency_key, timetable.start_date, timetable.end_date);
serviceIds = _.uniq([...serviceIds, ...calendarDatesServiceIds]);
}
Object.assign(timetable, {
noServiceSymbolUsed: false,
requestDropoffSymbolUsed: false,
noDropoffSymbolUsed: false,
requestPickupSymbolUsed: false,
noPickupSymbolUsed: false,
interpolatedStopSymbolUsed: false,
showStopCity: config.showStopCity,
showStopDescription: config.showStopDescription,
noServiceSymbol: config.noServiceSymbol,
requestDropoffSymbol: config.requestDropoffSymbol,
noDropoffSymbol: config.noDropoffSymbol,
requestPickupSymbol: config.requestPickupSymbol,
noPickupSymbol: config.noPickupSymbol,
interpolatedStopSymbol: config.interpolatedStopSymbol,
serviceIds,
dayList,
dayListLong: formatters.formatDaysLong(dayList, config)
});
timetable.orderedTrips = await getTripsFromTimetable(timetable, calendars, config);
timetable.stops = await getStops(timetable, config);
timetable.calendarDates = await getCalendarDates(timetable, config);
timetable.showDayList = getShowDayList(timetable);
timetable.timetable_label = formatters.formatTimetableLabel(timetable);
if (config.showMap) {
timetable.geojson = await geoJSONUtils.getTimetableGeoJSON(timetable, config);
}
return timetable;
}));
};
/*
* Get all timetable pages for an agency.
*/
exports.getTimetablePages = async (agencyKey, config) => {
const timetables = await gtfs.getTimetables({agency_key: agencyKey});
// If no timetables, build each route and direction into a timetable
if (!timetables || timetables.length === 0) {
return convertRoutesToTimetablePages(agencyKey, config);
}
const timetablePages = await gtfs.getTimetablePages({agency_key: agencyKey});
// Check if there are any timetable pages defined in timetable_pages.txt
if (!timetablePages || timetablePages.length === 0) {
// If no timetablepages, use timetables
return Promise.all(timetables.map(timetable => convertTimetableToTimetablePage(timetable, config)));
}
// Otherwise, use timetable pages defined in timetable_pages.txt
return Promise.all(timetablePages.map(async timetablePage => {
timetablePage.timetables = await filterAndSortTimetables(timetables, timetablePage.timetable_page_id);
// Add route for each Timetable
await Promise.all(timetablePage.timetables.map(async timetable => {
timetable.route = await getRouteFromTimetable(timetable, config);
}));
return timetablePage;
}));
};
/*
* Format a timetable page for display.
*/
exports.formatTimetablePage = async (timetablePage, config) => {
timetablePage.dayList = formatters.formatDays(getDaysFromCalendars(timetablePage.timetables), config);
timetablePage.dayLists = _.uniq(timetablePage.timetables.map(timetable => timetable.dayList));
timetablePage.routeIds = _.uniq(_.map(timetablePage.timetables, 'route_id'));
timetablePage.routeColors = await getTimetablePageColors(timetablePage);
// Set default filename
if (!timetablePage.filename) {
timetablePage.filename = `${timetablePage.timetable_page_id}.html`;
}
// Get direction_name for each timetable
await Promise.all(timetablePage.timetables.map(async timetable => {
if (timetable.direction_name === undefined || timetable.direction_name === '') {
timetable.direction_name = await getDirectionHeadsignFromTimetable(timetable);
}
if (!timetable.route) {
timetable.route = await getRouteFromTimetable(timetable, config);
}
}));
timetablePage.directionNames = _.uniq(_.map(timetablePage.timetables, 'direction_name'));
return timetablePage;
};
/*
* Initialize configuration with defaults.
*/
exports.setDefaultConfig = config => {
const defaults = {
beautify: false,
coordinatePrecision: 5,
dateFormat: 'MMM D, YYYY',
daysShortStrings: ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun'],
daysStrings: ['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday', 'Sunday'],
defaultOrientation: 'vertical',
interpolatedStopSymbol: '•',
gtfsToHtmlVersion: version,
menuType: 'jump',
noDropoffSymbol: '‡',
noHead: false,
noPickupSymbol: '***',
noServiceSymbol: '-',
requestDropoffSymbol: '†',
requestPickupSymbol: '***',
showArrivalOnDifference: 0.2,
showMap: false,
showOnlyTimepoint: false,
showRouteTitle: true,
showStopCity: false,
showStopDescription: false,
skipImport: false,
sortingAlgorithm: 'common',
timeFormat: 'h:mma',
verbose: true,
zipOutput: false
};
return Object.assign(defaults, config);
};
/*
* Get a timetable page by id.
*/
exports.getFormattedTimetablePage = async (agencyKey, timetablePageId, config) => {
const timetables = await gtfs.getTimetables({agency_key: agencyKey});
let timetablePage;
// Check if there are any timetable pages defined in timetable_pages.txt
const timetablePages = await gtfs.getTimetablePages({
agency_key: agencyKey,
timetable_page_id: timetablePageId
});
if (timetablePages.length > 1) {
throw new Error(`Multiple timetable_pages found for timetable_page_id=${timetablePageId}`);
}
if (!timetables || timetables.length === 0) {
// If no timetables, build the route and direction into a timetable
let calendarCode;
let calendars;
let calendarDates;
let serviceId;
let directionId = '';
const parts = timetablePageId.split('|');
if (parts.length > 1) {
directionId = parseInt(parts.pop(), 10);
calendarCode = parts.pop();
}
const routeId = parts.join('|');
const routeQuery = {
agency_key: agencyKey,
route_id: routeId
};
const route = await getRouteFromTimetable(routeQuery, config);
const directions = await gtfs.getDirectionsByRoute(routeQuery);
if (calendarCode.match(/^[01]*$/)) {
calendars = await gtfs.getCalendars({
...routeQuery,
...timeUtils.calendarCodeToCalendar(calendarCode)
});
} else {
serviceId = calendarCode;
calendarDates = await gtfs.getCalendarDates({
agency_key: agencyKey,
exception_type: 1,
service_id: serviceId
});
}
const direction = _.find(directions, direction => direction.direction_id === directionId);
timetablePage = await convertRouteToTimetablePage(route, direction, calendars, calendarDates, config);
} else if (timetablePages.length === 0) {
// If no timetablepage, use timetable
const timetable = _.find(timetables, {timetable_id: timetablePageId});
if (!timetable) {
throw new Error(`No timetable found for timetable_page_id=${timetablePageId}`);
}
timetablePage = await convertTimetableToTimetablePage(timetable, config);
} else {
// Otherwise, use timetablepage defined in timetable_pages.txt
timetablePage = _.first(timetablePages);
timetablePage.timetables = await filterAndSortTimetables(timetables, timetablePage.timetable_page_id);
// Add route for each Timetable
await Promise.all(timetablePage.timetables.map(async timetable => {
timetable.route = await getRouteFromTimetable(timetable, config);
}));
}
timetablePage.consolidatedTimetables = await formatTimetables(timetablePage.timetables, config);
if (!timetablePage.consolidatedTimetables || timetablePage.consolidatedTimetables.length === 0) {
throw new Error(`No timetables found for timetable_page_id=${timetablePage.timetable_page_id}`);
}
return exports.formatTimetablePage(timetablePage, config);
};
/*
* Generate stats about timetable
*/
const generateStats = timetablePage => {
return timetablePage.timetables.reduce((memo, timetable) => {
memo.stops += timetable.stops.length;
memo.trips += timetable.orderedTrips.length;
for (const serviceId of timetable.serviceIds) {
memo.serviceIds[serviceId] = true;
}
memo.routeIds[timetable.route_id] = true;
memo.routes = _.size(memo.routeIds);
memo.calendars = _.size(memo.serviceIds);
return memo;
}, {
stops: 0,
trips: 0,
routeIds: {},
serviceIds: {}
});
};
/*
* Generate the HTML timetable for a timetable page.
*/
exports.generateHTML = async (timetablePage, config) => {
const templateVars = {
timetablePage,
config
};
const html = await fileUtils.renderFile('timetablepage', templateVars, config);
const stats = generateStats(timetablePage);
return {html, stats};
};
/*
* Generate the HTML for the agency overview page.
*/
exports.generateOverviewHTML = async (agencyKey, timetablePages, config) => {
const agencies = await gtfs.getAgencies({agency_key: agencyKey});
if (!agencies || agencies.length === 0) {
throw new Error(`No agency found for agency_key=${agencyKey}`);
}
const agency = _.first(agencies);
if (config.showMap) {
agency.geojson = await geoJSONUtils.getAgencyGeoJSON(agencyKey, config);
}
const templateVars = {
agencyKey,
agency,
config,
timetablePages: _.sortBy(timetablePages, 'timetable_page_label')
};
return fileUtils.renderFile('overview', templateVars, config);
};
| Handle invalid stops
| lib/utils.js | Handle invalid stops | <ide><path>ib/utils.js
<ide>
<ide> if (stops.length === 0) {
<ide> config.logWarning(`No stop found for agency_key=${timetable.agency_key}, stop_id=${stopId}`);
<add> return null;
<ide> }
<ide>
<ide> const stop = _.first(stops);
<ide> return stop;
<ide> }));
<ide>
<del> const formattedStops = formatters.formatStops(stops, timetable, config);
<add> const formattedStops = formatters.formatStops(_.compact(stops), timetable, config);
<ide> return formattedStops;
<ide> };
<ide> |
|
Java | apache-2.0 | d9acd2a2f1f41f2d51cff25283a6dc8650ae03db | 0 | MaJin1996/zstack,HeathHose/zstack,zstackorg/zstack,Alvin-Lau/zstack,liningone/zstack,zxwing/zstack-1,zstackio/zstack,AlanJinTS/zstack,liningone/zstack,MatheMatrix/zstack,camilesing/zstack,camilesing/zstack,HeathHose/zstack,winger007/zstack,WangXijue/zstack,AlanJinTS/zstack,zxwing/zstack-1,liningone/zstack,AlanJager/zstack,zstackio/zstack,camilesing/zstack,AlanJager/zstack,MatheMatrix/zstack,Alvin-Lau/zstack,WangXijue/zstack,mingjian2049/zstack,MatheMatrix/zstack,AlanJinTS/zstack,zstackorg/zstack,mingjian2049/zstack,MaJin1996/zstack,winger007/zstack,zstackio/zstack,zsyzsyhao/zstack,HeathHose/zstack,AlanJager/zstack,WangXijue/zstack,zxwing/zstack-1,hhjuliet/zstack,mingjian2049/zstack,Alvin-Lau/zstack,hhjuliet/zstack,zsyzsyhao/zstack | package org.zstack.core.cloudbus;
import org.apache.commons.lang.StringUtils;
import org.zstack.header.exception.CloudRuntimeException;
import org.zstack.header.message.GsonTransient;
import org.zstack.header.message.NeedJsonSchema;
import org.zstack.header.message.NoJsonSchema;
import org.zstack.header.search.Inventory;
import org.zstack.utils.FieldUtils;
import org.zstack.utils.TypeUtils;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.util.*;
import java.util.Map.Entry;
import java.util.concurrent.ConcurrentHashMap;
/**
*/
public class MessageJsonSchemaBuilder {
private static Map<Field, Field> skipMap = new ConcurrentHashMap<Field, Field>();
private static boolean isSkip(Field f) {
if (skipMap.containsKey(f)) {
return true;
}
if (TypeUtils.isPrimitiveOrWrapper(f.getType())) {
skipMap.put(f, f);
return true;
}
if (f.isAnnotationPresent(NoJsonSchema.class)) {
skipMap.put(f, f);
return true;
}
if (Modifier.isStatic(f.getModifiers())) {
skipMap.put(f, f);
return true;
}
if (f.isAnnotationPresent(GsonTransient.class)) {
skipMap.put(f, f);
return true;
}
return false;
}
private static Object getValue(Field f, Object obj) throws IllegalAccessException {
f.setAccessible(true);
return f.get(obj);
}
private static boolean isNullValue(Field f, Object obj) throws IllegalAccessException {
return getValue(f, obj) == null;
}
private static void buildSchema(Object obj, Map<String, List<String>> schema, Stack<String> trace, Stack<Object> paths) throws IllegalAccessException {
List<Field> fs = FieldUtils.getAllFields(obj.getClass());
for (Field f : fs) {
if (isSkip(f)) {
continue;
}
if (Map.class.isAssignableFrom(f.getType())) {
schemaMap(f, obj, schema, trace, paths);
continue;
}
if (Collection.class.isAssignableFrom(f.getType())) {
Class genericType = FieldUtils.getGenericType(f);
if (genericType != null && TypeUtils.isPrimitiveOrWrapper(genericType)) {
continue;
}
if (!List.class.isAssignableFrom(f.getType())) {
throw new CloudRuntimeException(String.format("the collection type in message can only be List, but %s.%s is %s",
f.getDeclaringClass().getName(), f.getName(), f.getType().getName()));
}
schemaList(f, obj, schema, trace, paths);
continue;
}
schemaObject(f, obj, schema, trace, paths);
}
}
private static void schemaList(Field f, Object obj, Map<String, List<String>> schema, Stack<String> trace, Stack<Object> paths) throws IllegalAccessException {
if (isNullValue(f, obj)) {
return;
}
Object value = getValue(f, obj);
if (paths.contains(value)) {
paths.push(value);
throw new CloudRuntimeException(String.format("recursive object graph: %s", StringUtils.join(paths, " --> ")));
}
paths.push(value);
List col = (List) value;
for (Object item : col) {
String itemName = String.format("%s[%s]", f.getName(), col.indexOf(item));
if (isObjectNeedSchema(item)) {
addToSchema(item.getClass(), itemName, schema, trace);
}
trace.push(itemName);
buildSchema(item, schema, trace, paths);
trace.pop();
}
paths.pop();
}
public static Map<String, List<String>> buildSchema(Object msg) {
try {
Stack<Object> paths = new Stack<Object>();
Stack<String> trace = new Stack<String>();
Map<String, List<String>> schema = new LinkedHashMap<String, List<String>>();
buildSchema(msg, schema, trace, paths);
return schema;
} catch (Exception e) {
throw new CloudRuntimeException(e);
}
}
private static void schemaObject(Field f, Object obj, Map<String, List<String>> schema, Stack<String> trace, Stack<Object> paths) throws IllegalAccessException {
if (isNullValue(f, obj)) {
return;
}
Object value = getValue(f, obj);
if (paths.contains(value)) {
paths.push(value);
throw new CloudRuntimeException(String.format("recursive object graph: %s", StringUtils.join(paths, " --> ")));
}
if (isObjectNeedSchema(value)) {
addToSchema(value.getClass(), f.getName(), schema, trace);
}
paths.push(value);
trace.push(f.getName());
buildSchema(value, schema, trace, paths);
trace.pop();
paths.pop();
}
private static void addToSchema(Class<?> realClass, String name, Map<String, List<String>> schema, Stack<String> trace) {
String base = StringUtils.join(trace, ".");
List<String> path = schema.get(realClass.getName());
if (path == null) {
path = new ArrayList<String>();
schema.put(realClass.getName(), path);
}
if (base.equals("")) {
path.add(name);
} else {
path.add(String.format("%s.%s", base, name));
}
}
private static boolean isObjectNeedSchema(Object obj) {
return obj != null && (obj.getClass().isAnnotationPresent(Inventory.class) || obj.getClass().isAnnotationPresent(NeedJsonSchema.class));
}
private static void schemaMap(Field f, Object obj, Map<String, List<String>> schema, Stack<String> trace, Stack<Object> paths) throws IllegalAccessException {
Class genericType = FieldUtils.getGenericType(f);
if (genericType != null && TypeUtils.isPrimitiveOrWrapper(genericType)) {
return;
}
if (isNullValue(f, obj)) {
return;
}
Object value = getValue(f, obj);
if (paths.contains(value)) {
paths.push(value);
throw new CloudRuntimeException(String.format("recursive object graph: %s", StringUtils.join(paths, " --> ")));
}
paths.push(obj);
Map map = (Map) value;
Iterator<Entry> it = map.entrySet().iterator();
while (it.hasNext()) {
Entry e = it.next();
String key = e.getKey().toString();
Object item = e.getValue();
String itemName = String.format("%s[\"%s\"]", f.getName(), key);
if (isObjectNeedSchema(item)) {
addToSchema(item.getClass(), itemName, schema, trace);
}
trace.push(itemName);
buildSchema(item, schema, trace, paths);
trace.pop();
}
paths.pop();
}
}
| core/src/main/java/org/zstack/core/cloudbus/MessageJsonSchemaBuilder.java | package org.zstack.core.cloudbus;
import org.apache.commons.lang.StringUtils;
import org.zstack.header.exception.CloudRuntimeException;
import org.zstack.header.message.GsonTransient;
import org.zstack.header.message.NeedJsonSchema;
import org.zstack.header.message.NoJsonSchema;
import org.zstack.header.search.Inventory;
import org.zstack.utils.FieldUtils;
import org.zstack.utils.TypeUtils;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.util.*;
import java.util.Map.Entry;
import java.util.concurrent.ConcurrentHashMap;
/**
*/
public class MessageJsonSchemaBuilder {
private static Map<Field, Field> skipMap = new ConcurrentHashMap<Field, Field>();
private static boolean isSkip(Field f) {
if (skipMap.containsKey(f)) {
return true;
}
if (TypeUtils.isPrimitiveOrWrapper(f.getType())) {
skipMap.put(f, f);
return true;
}
if (f.isAnnotationPresent(NoJsonSchema.class)) {
skipMap.put(f, f);
return true;
}
if (Modifier.isStatic(f.getModifiers())) {
skipMap.put(f, f);
return true;
}
if (f.isAnnotationPresent(GsonTransient.class)) {
skipMap.put(f, f);
return true;
}
return false;
}
private static Object getValue(Field f, Object obj) throws IllegalAccessException {
f.setAccessible(true);
return f.get(obj);
}
private static boolean isNullValue(Field f, Object obj) throws IllegalAccessException {
return getValue(f, obj) == null;
}
private static void buildSchema(Object obj, Map<String, List<String>> schema, Stack<String> trace, Stack<Object> paths) throws IllegalAccessException {
List<Field> fs = FieldUtils.getAllFields(obj.getClass());
for (Field f : fs) {
if (isSkip(f)) {
continue;
}
if (Map.class.isAssignableFrom(f.getType())) {
schemaMap(f, obj, schema, trace, paths);
continue;
}
if (Collection.class.isAssignableFrom(f.getType())) {
Class genericType = FieldUtils.getGenericType(f);
if (genericType != null && TypeUtils.isPrimitiveOrWrapper(genericType)) {
continue;
}
if (!List.class.isAssignableFrom(f.getType())) {
throw new CloudRuntimeException(String.format("the collection type in message can only be List, but %s.%s is %s",
f.getDeclaringClass().getName(), f.getName(), f.getType().getName()));
}
schemaList(f, obj, schema, trace, paths);
continue;
}
schemaObject(f, obj, schema, trace, paths);
}
}
private static void schemaList(Field f, Object obj, Map<String, List<String>> schema, Stack<String> trace, Stack<Object> paths) throws IllegalAccessException {
if (isNullValue(f, obj)) {
return;
}
Object value = getValue(f, obj);
if (paths.contains(value)) {
paths.push(value);
throw new CloudRuntimeException(String.format("recursive object graph: %s", StringUtils.join(paths, " --> ")));
}
paths.push(value);
List col = (List) value;
for (Object item : col) {
String itemName = String.format("%s[%s]", f.getName(), col.indexOf(item));
if (isObjectNeedSchema(item)) {
addToSchema(item.getClass(), itemName, schema, trace);
}
trace.push(itemName);
buildSchema(item, schema, trace, paths);
trace.pop();
}
paths.pop();
}
public static Map<String, List<String>> buildSchema(Object msg) {
try {
Stack<Object> paths = new Stack<Object>();
Stack<String> trace = new Stack<String>();
Map<String, List<String>> schema = new LinkedHashMap<String, List<String>>();
buildSchema(msg, schema, trace, paths);
return schema;
} catch (Exception e) {
throw new CloudRuntimeException(e);
}
}
private static void schemaObject(Field f, Object obj, Map<String, List<String>> schema, Stack<String> trace, Stack<Object> paths) throws IllegalAccessException {
if (isNullValue(f, obj)) {
return;
}
Object value = getValue(f, obj);
if (paths.contains(value)) {
paths.push(value);
throw new CloudRuntimeException(String.format("recursive object graph: %s", StringUtils.join(paths, " --> ")));
}
if (isObjectNeedSchema(value)) {
addToSchema(value.getClass(), f.getName(), schema, trace);
}
paths.push(value);
trace.push(f.getName());
buildSchema(value, schema, trace, paths);
trace.pop();
paths.pop();
}
private static void addToSchema(Class<?> realClass, String name, Map<String, List<String>> schema, Stack<String> trace) {
String base = StringUtils.join(trace, ".");
List<String> path = schema.get(realClass.getName());
if (path == null) {
path = new ArrayList<String>();
schema.put(realClass.getName(), path);
}
if (base.equals("")) {
path.add(name);
} else {
path.add(String.format("%s.%s", base, name));
}
}
private static boolean isObjectNeedSchema(Object obj) {
return obj.getClass().isAnnotationPresent(Inventory.class) || obj.getClass().isAnnotationPresent(NeedJsonSchema.class);
}
private static void schemaMap(Field f, Object obj, Map<String, List<String>> schema, Stack<String> trace, Stack<Object> paths) throws IllegalAccessException {
Class genericType = FieldUtils.getGenericType(f);
if (genericType != null && TypeUtils.isPrimitiveOrWrapper(genericType)) {
return;
}
if (isNullValue(f, obj)) {
return;
}
Object value = getValue(f, obj);
if (paths.contains(value)) {
paths.push(value);
throw new CloudRuntimeException(String.format("recursive object graph: %s", StringUtils.join(paths, " --> ")));
}
paths.push(obj);
Map map = (Map) value;
Iterator<Entry> it = map.entrySet().iterator();
while (it.hasNext()) {
Entry e = it.next();
String key = e.getKey().toString();
Object item = e.getValue();
String itemName = String.format("%s[\"%s\"]", f.getName(), key);
if (isObjectNeedSchema(item)) {
addToSchema(item.getClass(), itemName, schema, trace);
}
trace.push(itemName);
buildSchema(item, schema, trace, paths);
trace.pop();
}
paths.pop();
}
}
| Add a null pointer check
for https://github.com/zxwing/premium/issues/1387
Signed-off-by: David Lee <[email protected]>
| core/src/main/java/org/zstack/core/cloudbus/MessageJsonSchemaBuilder.java | Add a null pointer check | <ide><path>ore/src/main/java/org/zstack/core/cloudbus/MessageJsonSchemaBuilder.java
<ide> }
<ide>
<ide> private static boolean isObjectNeedSchema(Object obj) {
<del> return obj.getClass().isAnnotationPresent(Inventory.class) || obj.getClass().isAnnotationPresent(NeedJsonSchema.class);
<add> return obj != null && (obj.getClass().isAnnotationPresent(Inventory.class) || obj.getClass().isAnnotationPresent(NeedJsonSchema.class));
<ide> }
<ide>
<ide> private static void schemaMap(Field f, Object obj, Map<String, List<String>> schema, Stack<String> trace, Stack<Object> paths) throws IllegalAccessException { |
|
Java | apache-2.0 | 5f5823d35ab944c49579ed9ab4873e4bcca74770 | 0 | apache/commons-validator,apache/commons-validator,apache/commons-validator | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.validator.routines;
import java.io.BufferedReader;
import java.io.Closeable;
import java.io.File;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.net.HttpURLConnection;
import java.net.IDN;
import java.net.URL;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Locale;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.TreeMap;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.validator.routines.DomainValidator.ArrayType;
import junit.framework.TestCase;
/**
* Tests for the DomainValidator.
*
*/
public class DomainValidatorTest extends TestCase {
private DomainValidator validator;
@Override
public void setUp() {
validator = DomainValidator.getInstance();
}
public void testValidDomains() {
assertTrue("apache.org should validate", validator.isValid("apache.org"));
assertTrue("www.google.com should validate", validator.isValid("www.google.com"));
assertTrue("test-domain.com should validate", validator.isValid("test-domain.com"));
assertTrue("test---domain.com should validate", validator.isValid("test---domain.com"));
assertTrue("test-d-o-m-ain.com should validate", validator.isValid("test-d-o-m-ain.com"));
assertTrue("two-letter domain label should validate", validator.isValid("as.uk"));
assertTrue("case-insensitive ApAchE.Org should validate", validator.isValid("ApAchE.Org"));
assertTrue("single-character domain label should validate", validator.isValid("z.com"));
assertTrue("i.have.an-example.domain.name should validate", validator.isValid("i.have.an-example.domain.name"));
}
public void testInvalidDomains() {
assertFalse("bare TLD .org shouldn't validate", validator.isValid(".org"));
assertFalse("domain name with spaces shouldn't validate", validator.isValid(" apache.org "));
assertFalse("domain name containing spaces shouldn't validate", validator.isValid("apa che.org"));
assertFalse("domain name starting with dash shouldn't validate", validator.isValid("-testdomain.name"));
assertFalse("domain name ending with dash shouldn't validate", validator.isValid("testdomain-.name"));
assertFalse("domain name starting with multiple dashes shouldn't validate", validator.isValid("---c.com"));
assertFalse("domain name ending with multiple dashes shouldn't validate", validator.isValid("c--.com"));
assertFalse("domain name with invalid TLD shouldn't validate", validator.isValid("apache.rog"));
assertFalse("URL shouldn't validate", validator.isValid("http://www.apache.org"));
assertFalse("Empty string shouldn't validate as domain name", validator.isValid(" "));
assertFalse("Null shouldn't validate as domain name", validator.isValid(null));
}
public void testTopLevelDomains() {
// infrastructure TLDs
assertTrue(".arpa should validate as iTLD", validator.isValidInfrastructureTld(".arpa"));
assertFalse(".com shouldn't validate as iTLD", validator.isValidInfrastructureTld(".com"));
// generic TLDs
assertTrue(".name should validate as gTLD", validator.isValidGenericTld(".name"));
assertFalse(".us shouldn't validate as gTLD", validator.isValidGenericTld(".us"));
// country code TLDs
assertTrue(".uk should validate as ccTLD", validator.isValidCountryCodeTld(".uk"));
assertFalse(".org shouldn't validate as ccTLD", validator.isValidCountryCodeTld(".org"));
// case-insensitive
assertTrue(".COM should validate as TLD", validator.isValidTld(".COM"));
assertTrue(".BiZ should validate as TLD", validator.isValidTld(".BiZ"));
// corner cases
assertFalse("invalid TLD shouldn't validate", validator.isValid(".nope")); // TODO this is not guaranteed invalid forever
assertFalse("empty string shouldn't validate as TLD", validator.isValid(""));
assertFalse("null shouldn't validate as TLD", validator.isValid(null));
}
public void testAllowLocal() {
final DomainValidator noLocal = DomainValidator.getInstance(false);
final DomainValidator allowLocal = DomainValidator.getInstance(true);
// Default is false, and should use singletons
assertEquals(noLocal, validator);
// Default won't allow local
assertFalse("localhost.localdomain should validate", noLocal.isValid("localhost.localdomain"));
assertFalse("localhost should validate", noLocal.isValid("localhost"));
// But it may be requested
assertTrue("localhost.localdomain should validate", allowLocal.isValid("localhost.localdomain"));
assertTrue("localhost should validate", allowLocal.isValid("localhost"));
assertTrue("hostname should validate", allowLocal.isValid("hostname"));
assertTrue("machinename should validate", allowLocal.isValid("machinename"));
// Check the localhost one with a few others
assertTrue("apache.org should validate", allowLocal.isValid("apache.org"));
assertFalse("domain name with spaces shouldn't validate", allowLocal.isValid(" apache.org "));
}
public void testIDN() {
assertTrue("b\u00fccher.ch in IDN should validate", validator.isValid("www.xn--bcher-kva.ch"));
}
public void testIDNJava6OrLater() {
final String version = System.getProperty("java.version");
if (version.compareTo("1.6") < 0) {
System.out.println("Cannot run Unicode IDN tests");
return; // Cannot run the test
} // xn--d1abbgf6aiiy.xn--p1ai http://президент.рф
assertTrue("b\u00fccher.ch should validate", validator.isValid("www.b\u00fccher.ch"));
assertTrue("xn--d1abbgf6aiiy.xn--p1ai should validate", validator.isValid("xn--d1abbgf6aiiy.xn--p1ai"));
assertTrue("президент.рф should validate", validator.isValid("президент.рф"));
assertFalse("www.\uFFFD.ch FFFD should fail", validator.isValid("www.\uFFFD.ch"));
}
// RFC2396: domainlabel = alphanum | alphanum *( alphanum | "-" ) alphanum
public void testRFC2396domainlabel() { // use fixed valid TLD
assertTrue("a.ch should validate", validator.isValid("a.ch"));
assertTrue("9.ch should validate", validator.isValid("9.ch"));
assertTrue("az.ch should validate", validator.isValid("az.ch"));
assertTrue("09.ch should validate", validator.isValid("09.ch"));
assertTrue("9-1.ch should validate", validator.isValid("9-1.ch"));
assertFalse("91-.ch should not validate", validator.isValid("91-.ch"));
assertFalse("-.ch should not validate", validator.isValid("-.ch"));
}
// RFC2396 toplabel = alpha | alpha *( alphanum | "-" ) alphanum
public void testRFC2396toplabel() {
// These tests use non-existent TLDs so currently need to use a package protected method
assertTrue("a.c (alpha) should validate", validator.isValidDomainSyntax("a.c"));
assertTrue("a.cc (alpha alpha) should validate", validator.isValidDomainSyntax("a.cc"));
assertTrue("a.c9 (alpha alphanum) should validate", validator.isValidDomainSyntax("a.c9"));
assertTrue("a.c-9 (alpha - alphanum) should validate", validator.isValidDomainSyntax("a.c-9"));
assertTrue("a.c-z (alpha - alpha) should validate", validator.isValidDomainSyntax("a.c-z"));
assertFalse("a.9c (alphanum alpha) should fail", validator.isValidDomainSyntax("a.9c"));
assertFalse("a.c- (alpha -) should fail", validator.isValidDomainSyntax("a.c-"));
assertFalse("a.- (-) should fail", validator.isValidDomainSyntax("a.-"));
assertFalse("a.-9 (- alphanum) should fail", validator.isValidDomainSyntax("a.-9"));
}
public void testDomainNoDots() {// rfc1123
assertTrue("a (alpha) should validate", validator.isValidDomainSyntax("a"));
assertTrue("9 (alphanum) should validate", validator.isValidDomainSyntax("9"));
assertTrue("c-z (alpha - alpha) should validate", validator.isValidDomainSyntax("c-z"));
assertFalse("c- (alpha -) should fail", validator.isValidDomainSyntax("c-"));
assertFalse("-c (- alpha) should fail", validator.isValidDomainSyntax("-c"));
assertFalse("- (-) should fail", validator.isValidDomainSyntax("-"));
}
public void testValidator297() {
assertTrue("xn--d1abbgf6aiiy.xn--p1ai should validate", validator.isValid("xn--d1abbgf6aiiy.xn--p1ai")); // This uses a valid TLD
}
// labels are a max of 63 chars and domains 253
public void testValidator306() {
final String longString = "abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz0123456789A";
assertEquals(63, longString.length()); // 26 * 2 + 11
assertTrue("63 chars label should validate", validator.isValidDomainSyntax(longString+".com"));
assertFalse("64 chars label should fail", validator.isValidDomainSyntax(longString+"x.com"));
assertTrue("63 chars TLD should validate", validator.isValidDomainSyntax("test."+longString));
assertFalse("64 chars TLD should fail", validator.isValidDomainSyntax("test.x"+longString));
final String longDomain =
longString
+ "." + longString
+ "." + longString
+ "." + longString.substring(0,61)
;
assertEquals(253, longDomain.length());
assertTrue("253 chars domain should validate", validator.isValidDomainSyntax(longDomain));
assertFalse("254 chars domain should fail", validator.isValidDomainSyntax(longDomain+"x"));
}
// Check that IDN.toASCII behaves as it should (when wrapped by DomainValidator.unicodeToASCII)
// Tests show that method incorrectly trims a trailing "." character
public void testUnicodeToASCII() {
final String[] asciidots = {
"",
",",
".", // fails IDN.toASCII, but should pass wrapped version
"a.", // ditto
"a.b",
"a..b",
"a...b",
".a",
"..a",
};
for(final String s : asciidots) {
assertEquals(s,DomainValidator.unicodeToASCII(s));
}
// RFC3490 3.1. 1)
// Whenever dots are used as label separators, the following
// characters MUST be recognized as dots: U+002E (full stop), U+3002
// (ideographic full stop), U+FF0E (fullwidth full stop), U+FF61
// (halfwidth ideographic full stop).
final String otherDots[][] = {
{"b\u3002", "b.",},
{"b\uFF0E", "b.",},
{"b\uFF61", "b.",},
{"\u3002", ".",},
{"\uFF0E", ".",},
{"\uFF61", ".",},
};
for(final String s[] : otherDots) {
assertEquals(s[1],DomainValidator.unicodeToASCII(s[0]));
}
}
// Check if IDN.toASCII is broken or not
public void testIsIDNtoASCIIBroken() {
System.out.println(">>DomainValidatorTest.testIsIDNtoASCIIBroken()");
final String input = ".";
final boolean ok = input.equals(IDN.toASCII(input));
System.out.println("IDN.toASCII is " + (ok? "OK" : "BROKEN"));
final String props[] = {
"java.version", // Java Runtime Environment version
"java.vendor", // Java Runtime Environment vendor
"java.vm.specification.version", // Java Virtual Machine specification version
"java.vm.specification.vendor", // Java Virtual Machine specification vendor
"java.vm.specification.name", // Java Virtual Machine specification name
"java.vm.version", // Java Virtual Machine implementation version
"java.vm.vendor", // Java Virtual Machine implementation vendor
"java.vm.name", // Java Virtual Machine implementation name
"java.specification.version", // Java Runtime Environment specification version
"java.specification.vendor", // Java Runtime Environment specification vendor
"java.specification.name", // Java Runtime Environment specification name
"java.class.version", // Java class format version number
};
for(final String t : props) {
System.out.println(t + "=" + System.getProperty(t));
}
System.out.println("<<DomainValidatorTest.testIsIDNtoASCIIBroken()");
assertTrue(true); // dummy assertion to satisfy lint
}
// Check array is sorted and is lower-case
public void test_INFRASTRUCTURE_TLDS_sortedAndLowerCase() throws Exception {
final boolean sorted = isSortedLowerCase("INFRASTRUCTURE_TLDS");
assertTrue(sorted);
}
// Check array is sorted and is lower-case
public void test_COUNTRY_CODE_TLDS_sortedAndLowerCase() throws Exception {
final boolean sorted = isSortedLowerCase("COUNTRY_CODE_TLDS");
assertTrue(sorted);
}
// Check array is sorted and is lower-case
public void test_GENERIC_TLDS_sortedAndLowerCase() throws Exception {
final boolean sorted = isSortedLowerCase("GENERIC_TLDS");
assertTrue(sorted);
}
// Check array is sorted and is lower-case
public void test_LOCAL_TLDS_sortedAndLowerCase() throws Exception {
final boolean sorted = isSortedLowerCase("LOCAL_TLDS");
assertTrue(sorted);
}
public void testEnumIsPublic() {
assertTrue(Modifier.isPublic(DomainValidator.ArrayType.class.getModifiers()));
}
public void testGetArray() {
assertNotNull(DomainValidator.getTLDEntries(ArrayType.COUNTRY_CODE_MINUS));
assertNotNull(DomainValidator.getTLDEntries(ArrayType.COUNTRY_CODE_PLUS));
assertNotNull(DomainValidator.getTLDEntries(ArrayType.GENERIC_MINUS));
assertNotNull(DomainValidator.getTLDEntries(ArrayType.GENERIC_PLUS));
assertNotNull(DomainValidator.getTLDEntries(ArrayType.LOCAL_MINUS));
assertNotNull(DomainValidator.getTLDEntries(ArrayType.LOCAL_PLUS));
assertNotNull(DomainValidator.getTLDEntries(ArrayType.COUNTRY_CODE_RO));
assertNotNull(DomainValidator.getTLDEntries(ArrayType.GENERIC_RO));
assertNotNull(DomainValidator.getTLDEntries(ArrayType.INFRASTRUCTURE_RO));
assertNotNull(DomainValidator.getTLDEntries(ArrayType.LOCAL_RO));
}
// Download and process local copy of http://data.iana.org/TLD/tlds-alpha-by-domain.txt
// Check if the internal TLD table is up to date
// Check if the internal TLD tables have any spurious entries
public static void main(final String a[]) throws Exception {
// Check the arrays first as this affects later checks
// Doing this here makes it easier when updating the lists
boolean OK = true;
for(final String list : new String[]{"INFRASTRUCTURE_TLDS","COUNTRY_CODE_TLDS","GENERIC_TLDS","LOCAL_TLDS"}) {
OK &= isSortedLowerCase(list);
}
if (!OK) {
System.out.println("Fix arrays before retrying; cannot continue");
return;
}
final Set<String> ianaTlds = new HashSet<>(); // keep for comparison with array contents
final DomainValidator dv = DomainValidator.getInstance();
final File txtFile = new File("target/tlds-alpha-by-domain.txt");
final long timestamp = download(txtFile, "https://data.iana.org/TLD/tlds-alpha-by-domain.txt", 0L);
final File htmlFile = new File("target/tlds-alpha-by-domain.html");
// N.B. sometimes the html file may be updated a day or so after the txt file
// if the txt file contains entries not found in the html file, try again in a day or two
download(htmlFile,"https://www.iana.org/domains/root/db", timestamp);
final BufferedReader br = new BufferedReader(new FileReader(txtFile));
String line;
final String header;
line = br.readLine(); // header
if (!line.startsWith("# Version ")) {
br.close();
throw new IOException("File does not have expected Version header");
}
header = line.substring(2);
final boolean generateUnicodeTlds = false; // Change this to generate Unicode TLDs as well
// Parse html page to get entries
final Map<String, String[]> htmlInfo = getHtmlInfo(htmlFile);
final Map<String, String> missingTLD = new TreeMap<>(); // stores entry and comments as String[]
final Map<String, String> missingCC = new TreeMap<>();
while((line = br.readLine()) != null) {
if (!line.startsWith("#")) {
final String unicodeTld; // only different from asciiTld if that was punycode
final String asciiTld = line.toLowerCase(Locale.ENGLISH);
if (line.startsWith("XN--")) {
unicodeTld = IDN.toUnicode(line);
} else {
unicodeTld = asciiTld;
}
if (!dv.isValidTld(asciiTld)) {
final String [] info = htmlInfo.get(asciiTld);
if (info != null) {
final String type = info[0];
final String comment = info[1];
if ("country-code".equals(type)) { // Which list to use?
missingCC.put(asciiTld, unicodeTld + " " + comment);
if (generateUnicodeTlds) {
missingCC.put(unicodeTld, asciiTld + " " + comment);
}
} else {
missingTLD.put(asciiTld, unicodeTld + " " + comment);
if (generateUnicodeTlds) {
missingTLD.put(unicodeTld, asciiTld + " " + comment);
}
}
} else {
System.err.println("Expected to find HTML info for "+ asciiTld);
}
}
ianaTlds.add(asciiTld);
// Don't merge these conditions; generateUnicodeTlds is final so needs to be separate to avoid a warning
if (generateUnicodeTlds && !unicodeTld.equals(asciiTld)) {
ianaTlds.add(unicodeTld);
}
}
}
br.close();
// List html entries not in TLD text list
for(final String key : (new TreeMap<>(htmlInfo)).keySet()) {
if (!ianaTlds.contains(key)) {
if (isNotInRootZone(key)) {
System.out.println("INFO: HTML entry not yet in root zone: "+key);
} else {
System.err.println("WARN: Expected to find text entry for html: "+key);
}
}
}
if (!missingTLD.isEmpty()) {
printMap(header, missingTLD, "TLD");
}
if (!missingCC.isEmpty()) {
printMap(header, missingCC, "CC");
}
// Check if internal tables contain any additional entries
isInIanaList("INFRASTRUCTURE_TLDS", ianaTlds);
isInIanaList("COUNTRY_CODE_TLDS", ianaTlds);
isInIanaList("GENERIC_TLDS", ianaTlds);
// Don't check local TLDS isInIanaList("LOCAL_TLDS", ianaTlds);
System.out.println("Finished checks");
}
private static void printMap(final String header, final Map<String, String> map, final String string) {
System.out.println("Entries missing from "+ string +" List\n");
if (header != null) {
System.out.println(" // Taken from " + header);
}
for (Entry<String, String> me : map.entrySet()) {
System.out.println(" \"" + me.getKey() + "\", // " + me.getValue());
}
System.out.println("\nDone");
}
private static Map<String, String[]> getHtmlInfo(final File f) throws IOException {
final Map<String, String[]> info = new HashMap<>();
// <td><span class="domain tld"><a href="/domains/root/db/ax.html">.ax</a></span></td>
final Pattern domain = Pattern.compile(".*<a href=\"/domains/root/db/([^.]+)\\.html");
// <td>country-code</td>
final Pattern type = Pattern.compile("\\s+<td>([^<]+)</td>");
// <!-- <td>Åland Islands<br/><span class="tld-table-so">Ålands landskapsregering</span></td> </td> -->
// <td>Ålands landskapsregering</td>
final Pattern comment = Pattern.compile("\\s+<td>([^<]+)</td>");
final BufferedReader br = new BufferedReader(new FileReader(f));
String line;
while((line=br.readLine())!=null){
final Matcher m = domain.matcher(line);
if (m.lookingAt()) {
final String dom = m.group(1);
String typ = "??";
String com = "??";
line = br.readLine();
while (line.matches("^\\s*$")) { // extra blank lines introduced
line = br.readLine();
}
final Matcher t = type.matcher(line);
if (t.lookingAt()) {
typ = t.group(1);
line = br.readLine();
if (line.matches("\\s+<!--.*")) {
while(!line.matches(".*-->.*")){
line = br.readLine();
}
line = br.readLine();
}
// Should have comment; is it wrapped?
while(!line.matches(".*</td>.*")){
line += " " +br.readLine();
}
final Matcher n = comment.matcher(line);
if (n.lookingAt()) {
com = n.group(1);
}
// Don't save unused entries
if (com.contains("Not assigned") || com.contains("Retired") || typ.equals("test")) {
// System.out.println("Ignored: " + typ + " " + dom + " " +com);
} else {
info.put(dom.toLowerCase(Locale.ENGLISH), new String[]{typ, com});
// System.out.println("Storing: " + typ + " " + dom + " " +com);
}
} else {
System.err.println("Unexpected type: " + line);
}
}
}
br.close();
return info;
}
/*
* Download a file if it is more recent than our cached copy.
* Unfortunately the server does not seem to honor If-Modified-Since for the
* Html page, so we check if it is newer than the txt file and skip download if so
*/
private static long download(final File f, final String tldurl, final long timestamp) throws IOException {
final int HOUR = 60*60*1000; // an hour in ms
final long modTime;
// For testing purposes, don't download files more than once an hour
if (f.canRead()) {
modTime = f.lastModified();
if (modTime > System.currentTimeMillis()-HOUR) {
System.out.println("Skipping download - found recent " + f);
return modTime;
}
} else {
modTime = 0;
}
final HttpURLConnection hc = (HttpURLConnection) new URL(tldurl).openConnection();
if (modTime > 0) {
final SimpleDateFormat sdf = new SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss z");//Sun, 06 Nov 1994 08:49:37 GMT
final String since = sdf.format(new Date(modTime));
hc.addRequestProperty("If-Modified-Since", since);
System.out.println("Found " + f + " with date " + since);
}
if (hc.getResponseCode() == 304) {
System.out.println("Already have most recent " + tldurl);
} else {
System.out.println("Downloading " + tldurl);
final byte buff[] = new byte[1024];
final InputStream is = hc.getInputStream();
final FileOutputStream fos = new FileOutputStream(f);
int len;
while((len=is.read(buff)) != -1) {
fos.write(buff, 0, len);
}
fos.close();
is.close();
System.out.println("Done");
}
return f.lastModified();
}
/**
* Check whether the domain is in the root zone currently.
* Reads the URL http://www.iana.org/domains/root/db/*domain*.html
* (using a local disk cache)
* and checks for the string "This domain is not present in the root zone at this time."
* @param domain the domain to check
* @return true if the string is found
*/
private static boolean isNotInRootZone(final String domain) {
final String tldurl = "http://www.iana.org/domains/root/db/" + domain + ".html";
final File rootCheck = new File("target","tld_" + domain + ".html");
BufferedReader in = null;
try {
download(rootCheck, tldurl, 0L);
in = new BufferedReader(new FileReader(rootCheck));
String inputLine;
while ((inputLine = in.readLine()) != null) {
if (inputLine.contains("This domain is not present in the root zone at this time.")) {
return true;
}
}
in.close();
} catch (final IOException e) {
} finally {
closeQuietly(in);
}
return false;
}
private static void closeQuietly(final Closeable in) {
if (in != null) {
try {
in.close();
} catch (final IOException e) {
}
}
}
// isInIanaList and isSorted are split into two methods.
// If/when access to the arrays is possible without reflection, the intermediate
// methods can be dropped
private static boolean isInIanaList(final String arrayName, final Set<String> ianaTlds) throws Exception {
final Field f = DomainValidator.class.getDeclaredField(arrayName);
final boolean isPrivate = Modifier.isPrivate(f.getModifiers());
if (isPrivate) {
f.setAccessible(true);
}
final String[] array = (String[]) f.get(null);
try {
return isInIanaList(arrayName, array, ianaTlds);
} finally {
if (isPrivate) {
f.setAccessible(false);
}
}
}
private static boolean isInIanaList(final String name, final String [] array, final Set<String> ianaTlds) {
for (final String element : array) {
if (!ianaTlds.contains(element)) {
System.out.println(name + " contains unexpected value: " + element);
}
}
return true;
}
private static boolean isSortedLowerCase(final String arrayName) throws Exception {
final Field f = DomainValidator.class.getDeclaredField(arrayName);
final boolean isPrivate = Modifier.isPrivate(f.getModifiers());
if (isPrivate) {
f.setAccessible(true);
}
final String[] array = (String[]) f.get(null);
try {
return isSortedLowerCase(arrayName, array);
} finally {
if (isPrivate) {
f.setAccessible(false);
}
}
}
private static boolean isLowerCase(final String string) {
return string.equals(string.toLowerCase(Locale.ENGLISH));
}
// Check if an array is strictly sorted - and lowerCase
private static boolean isSortedLowerCase(final String name, final String [] array) {
boolean sorted = true;
boolean strictlySorted = true;
final int length = array.length;
boolean lowerCase = isLowerCase(array[length-1]); // Check the last entry
for(int i = 0; i < length-1; i++) { // compare all but last entry with next
final String entry = array[i];
final String nextEntry = array[i+1];
final int cmp = entry.compareTo(nextEntry);
if (cmp > 0) { // out of order
System.out.println("Out of order entry: " + entry + " < " + nextEntry + " in " + name);
sorted = false;
} else if (cmp == 0) {
strictlySorted = false;
System.out.println("Duplicated entry: " + entry + " in " + name);
}
if (!isLowerCase(entry)) {
System.out.println("Non lowerCase entry: " + entry + " in " + name);
lowerCase = false;
}
}
return sorted && strictlySorted && lowerCase;
}
}
| src/test/java/org/apache/commons/validator/routines/DomainValidatorTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.validator.routines;
import java.io.BufferedReader;
import java.io.Closeable;
import java.io.File;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.net.HttpURLConnection;
import java.net.IDN;
import java.net.URL;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.validator.routines.DomainValidator.ArrayType;
import junit.framework.TestCase;
/**
* Tests for the DomainValidator.
*
*/
public class DomainValidatorTest extends TestCase {
private DomainValidator validator;
@Override
public void setUp() {
validator = DomainValidator.getInstance();
}
public void testValidDomains() {
assertTrue("apache.org should validate", validator.isValid("apache.org"));
assertTrue("www.google.com should validate", validator.isValid("www.google.com"));
assertTrue("test-domain.com should validate", validator.isValid("test-domain.com"));
assertTrue("test---domain.com should validate", validator.isValid("test---domain.com"));
assertTrue("test-d-o-m-ain.com should validate", validator.isValid("test-d-o-m-ain.com"));
assertTrue("two-letter domain label should validate", validator.isValid("as.uk"));
assertTrue("case-insensitive ApAchE.Org should validate", validator.isValid("ApAchE.Org"));
assertTrue("single-character domain label should validate", validator.isValid("z.com"));
assertTrue("i.have.an-example.domain.name should validate", validator.isValid("i.have.an-example.domain.name"));
}
public void testInvalidDomains() {
assertFalse("bare TLD .org shouldn't validate", validator.isValid(".org"));
assertFalse("domain name with spaces shouldn't validate", validator.isValid(" apache.org "));
assertFalse("domain name containing spaces shouldn't validate", validator.isValid("apa che.org"));
assertFalse("domain name starting with dash shouldn't validate", validator.isValid("-testdomain.name"));
assertFalse("domain name ending with dash shouldn't validate", validator.isValid("testdomain-.name"));
assertFalse("domain name starting with multiple dashes shouldn't validate", validator.isValid("---c.com"));
assertFalse("domain name ending with multiple dashes shouldn't validate", validator.isValid("c--.com"));
assertFalse("domain name with invalid TLD shouldn't validate", validator.isValid("apache.rog"));
assertFalse("URL shouldn't validate", validator.isValid("http://www.apache.org"));
assertFalse("Empty string shouldn't validate as domain name", validator.isValid(" "));
assertFalse("Null shouldn't validate as domain name", validator.isValid(null));
}
public void testTopLevelDomains() {
// infrastructure TLDs
assertTrue(".arpa should validate as iTLD", validator.isValidInfrastructureTld(".arpa"));
assertFalse(".com shouldn't validate as iTLD", validator.isValidInfrastructureTld(".com"));
// generic TLDs
assertTrue(".name should validate as gTLD", validator.isValidGenericTld(".name"));
assertFalse(".us shouldn't validate as gTLD", validator.isValidGenericTld(".us"));
// country code TLDs
assertTrue(".uk should validate as ccTLD", validator.isValidCountryCodeTld(".uk"));
assertFalse(".org shouldn't validate as ccTLD", validator.isValidCountryCodeTld(".org"));
// case-insensitive
assertTrue(".COM should validate as TLD", validator.isValidTld(".COM"));
assertTrue(".BiZ should validate as TLD", validator.isValidTld(".BiZ"));
// corner cases
assertFalse("invalid TLD shouldn't validate", validator.isValid(".nope")); // TODO this is not guaranteed invalid forever
assertFalse("empty string shouldn't validate as TLD", validator.isValid(""));
assertFalse("null shouldn't validate as TLD", validator.isValid(null));
}
public void testAllowLocal() {
final DomainValidator noLocal = DomainValidator.getInstance(false);
final DomainValidator allowLocal = DomainValidator.getInstance(true);
// Default is false, and should use singletons
assertEquals(noLocal, validator);
// Default won't allow local
assertFalse("localhost.localdomain should validate", noLocal.isValid("localhost.localdomain"));
assertFalse("localhost should validate", noLocal.isValid("localhost"));
// But it may be requested
assertTrue("localhost.localdomain should validate", allowLocal.isValid("localhost.localdomain"));
assertTrue("localhost should validate", allowLocal.isValid("localhost"));
assertTrue("hostname should validate", allowLocal.isValid("hostname"));
assertTrue("machinename should validate", allowLocal.isValid("machinename"));
// Check the localhost one with a few others
assertTrue("apache.org should validate", allowLocal.isValid("apache.org"));
assertFalse("domain name with spaces shouldn't validate", allowLocal.isValid(" apache.org "));
}
public void testIDN() {
assertTrue("b\u00fccher.ch in IDN should validate", validator.isValid("www.xn--bcher-kva.ch"));
}
public void testIDNJava6OrLater() {
final String version = System.getProperty("java.version");
if (version.compareTo("1.6") < 0) {
System.out.println("Cannot run Unicode IDN tests");
return; // Cannot run the test
} // xn--d1abbgf6aiiy.xn--p1ai http://президент.рф
assertTrue("b\u00fccher.ch should validate", validator.isValid("www.b\u00fccher.ch"));
assertTrue("xn--d1abbgf6aiiy.xn--p1ai should validate", validator.isValid("xn--d1abbgf6aiiy.xn--p1ai"));
assertTrue("президент.рф should validate", validator.isValid("президент.рф"));
assertFalse("www.\uFFFD.ch FFFD should fail", validator.isValid("www.\uFFFD.ch"));
}
// RFC2396: domainlabel = alphanum | alphanum *( alphanum | "-" ) alphanum
public void testRFC2396domainlabel() { // use fixed valid TLD
assertTrue("a.ch should validate", validator.isValid("a.ch"));
assertTrue("9.ch should validate", validator.isValid("9.ch"));
assertTrue("az.ch should validate", validator.isValid("az.ch"));
assertTrue("09.ch should validate", validator.isValid("09.ch"));
assertTrue("9-1.ch should validate", validator.isValid("9-1.ch"));
assertFalse("91-.ch should not validate", validator.isValid("91-.ch"));
assertFalse("-.ch should not validate", validator.isValid("-.ch"));
}
// RFC2396 toplabel = alpha | alpha *( alphanum | "-" ) alphanum
public void testRFC2396toplabel() {
// These tests use non-existent TLDs so currently need to use a package protected method
assertTrue("a.c (alpha) should validate", validator.isValidDomainSyntax("a.c"));
assertTrue("a.cc (alpha alpha) should validate", validator.isValidDomainSyntax("a.cc"));
assertTrue("a.c9 (alpha alphanum) should validate", validator.isValidDomainSyntax("a.c9"));
assertTrue("a.c-9 (alpha - alphanum) should validate", validator.isValidDomainSyntax("a.c-9"));
assertTrue("a.c-z (alpha - alpha) should validate", validator.isValidDomainSyntax("a.c-z"));
assertFalse("a.9c (alphanum alpha) should fail", validator.isValidDomainSyntax("a.9c"));
assertFalse("a.c- (alpha -) should fail", validator.isValidDomainSyntax("a.c-"));
assertFalse("a.- (-) should fail", validator.isValidDomainSyntax("a.-"));
assertFalse("a.-9 (- alphanum) should fail", validator.isValidDomainSyntax("a.-9"));
}
public void testDomainNoDots() {// rfc1123
assertTrue("a (alpha) should validate", validator.isValidDomainSyntax("a"));
assertTrue("9 (alphanum) should validate", validator.isValidDomainSyntax("9"));
assertTrue("c-z (alpha - alpha) should validate", validator.isValidDomainSyntax("c-z"));
assertFalse("c- (alpha -) should fail", validator.isValidDomainSyntax("c-"));
assertFalse("-c (- alpha) should fail", validator.isValidDomainSyntax("-c"));
assertFalse("- (-) should fail", validator.isValidDomainSyntax("-"));
}
public void testValidator297() {
assertTrue("xn--d1abbgf6aiiy.xn--p1ai should validate", validator.isValid("xn--d1abbgf6aiiy.xn--p1ai")); // This uses a valid TLD
}
// labels are a max of 63 chars and domains 253
public void testValidator306() {
final String longString = "abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz0123456789A";
assertEquals(63, longString.length()); // 26 * 2 + 11
assertTrue("63 chars label should validate", validator.isValidDomainSyntax(longString+".com"));
assertFalse("64 chars label should fail", validator.isValidDomainSyntax(longString+"x.com"));
assertTrue("63 chars TLD should validate", validator.isValidDomainSyntax("test."+longString));
assertFalse("64 chars TLD should fail", validator.isValidDomainSyntax("test.x"+longString));
final String longDomain =
longString
+ "." + longString
+ "." + longString
+ "." + longString.substring(0,61)
;
assertEquals(253, longDomain.length());
assertTrue("253 chars domain should validate", validator.isValidDomainSyntax(longDomain));
assertFalse("254 chars domain should fail", validator.isValidDomainSyntax(longDomain+"x"));
}
// Check that IDN.toASCII behaves as it should (when wrapped by DomainValidator.unicodeToASCII)
// Tests show that method incorrectly trims a trailing "." character
public void testUnicodeToASCII() {
final String[] asciidots = {
"",
",",
".", // fails IDN.toASCII, but should pass wrapped version
"a.", // ditto
"a.b",
"a..b",
"a...b",
".a",
"..a",
};
for(final String s : asciidots) {
assertEquals(s,DomainValidator.unicodeToASCII(s));
}
// RFC3490 3.1. 1)
// Whenever dots are used as label separators, the following
// characters MUST be recognized as dots: U+002E (full stop), U+3002
// (ideographic full stop), U+FF0E (fullwidth full stop), U+FF61
// (halfwidth ideographic full stop).
final String otherDots[][] = {
{"b\u3002", "b.",},
{"b\uFF0E", "b.",},
{"b\uFF61", "b.",},
{"\u3002", ".",},
{"\uFF0E", ".",},
{"\uFF61", ".",},
};
for(final String s[] : otherDots) {
assertEquals(s[1],DomainValidator.unicodeToASCII(s[0]));
}
}
// Check if IDN.toASCII is broken or not
public void testIsIDNtoASCIIBroken() {
System.out.println(">>DomainValidatorTest.testIsIDNtoASCIIBroken()");
final String input = ".";
final boolean ok = input.equals(IDN.toASCII(input));
System.out.println("IDN.toASCII is " + (ok? "OK" : "BROKEN"));
final String props[] = {
"java.version", // Java Runtime Environment version
"java.vendor", // Java Runtime Environment vendor
"java.vm.specification.version", // Java Virtual Machine specification version
"java.vm.specification.vendor", // Java Virtual Machine specification vendor
"java.vm.specification.name", // Java Virtual Machine specification name
"java.vm.version", // Java Virtual Machine implementation version
"java.vm.vendor", // Java Virtual Machine implementation vendor
"java.vm.name", // Java Virtual Machine implementation name
"java.specification.version", // Java Runtime Environment specification version
"java.specification.vendor", // Java Runtime Environment specification vendor
"java.specification.name", // Java Runtime Environment specification name
"java.class.version", // Java class format version number
};
for(final String t : props) {
System.out.println(t + "=" + System.getProperty(t));
}
System.out.println("<<DomainValidatorTest.testIsIDNtoASCIIBroken()");
assertTrue(true); // dummy assertion to satisfy lint
}
// Check array is sorted and is lower-case
public void test_INFRASTRUCTURE_TLDS_sortedAndLowerCase() throws Exception {
final boolean sorted = isSortedLowerCase("INFRASTRUCTURE_TLDS");
assertTrue(sorted);
}
// Check array is sorted and is lower-case
public void test_COUNTRY_CODE_TLDS_sortedAndLowerCase() throws Exception {
final boolean sorted = isSortedLowerCase("COUNTRY_CODE_TLDS");
assertTrue(sorted);
}
// Check array is sorted and is lower-case
public void test_GENERIC_TLDS_sortedAndLowerCase() throws Exception {
final boolean sorted = isSortedLowerCase("GENERIC_TLDS");
assertTrue(sorted);
}
// Check array is sorted and is lower-case
public void test_LOCAL_TLDS_sortedAndLowerCase() throws Exception {
final boolean sorted = isSortedLowerCase("LOCAL_TLDS");
assertTrue(sorted);
}
public void testEnumIsPublic() {
assertTrue(Modifier.isPublic(DomainValidator.ArrayType.class.getModifiers()));
}
public void testGetArray() {
assertNotNull(DomainValidator.getTLDEntries(ArrayType.COUNTRY_CODE_MINUS));
assertNotNull(DomainValidator.getTLDEntries(ArrayType.COUNTRY_CODE_PLUS));
assertNotNull(DomainValidator.getTLDEntries(ArrayType.GENERIC_MINUS));
assertNotNull(DomainValidator.getTLDEntries(ArrayType.GENERIC_PLUS));
assertNotNull(DomainValidator.getTLDEntries(ArrayType.LOCAL_MINUS));
assertNotNull(DomainValidator.getTLDEntries(ArrayType.LOCAL_PLUS));
assertNotNull(DomainValidator.getTLDEntries(ArrayType.COUNTRY_CODE_RO));
assertNotNull(DomainValidator.getTLDEntries(ArrayType.GENERIC_RO));
assertNotNull(DomainValidator.getTLDEntries(ArrayType.INFRASTRUCTURE_RO));
assertNotNull(DomainValidator.getTLDEntries(ArrayType.LOCAL_RO));
}
// Download and process local copy of http://data.iana.org/TLD/tlds-alpha-by-domain.txt
// Check if the internal TLD table is up to date
// Check if the internal TLD tables have any spurious entries
public static void main(final String a[]) throws Exception {
// Check the arrays first as this affects later checks
// Doing this here makes it easier when updating the lists
boolean OK = true;
for(final String list : new String[]{"INFRASTRUCTURE_TLDS","COUNTRY_CODE_TLDS","GENERIC_TLDS","LOCAL_TLDS"}) {
OK &= isSortedLowerCase(list);
}
if (!OK) {
System.out.println("Fix arrays before retrying; cannot continue");
return;
}
final Set<String> ianaTlds = new HashSet<>(); // keep for comparison with array contents
final DomainValidator dv = DomainValidator.getInstance();
final File txtFile = new File("target/tlds-alpha-by-domain.txt");
final long timestamp = download(txtFile, "https://data.iana.org/TLD/tlds-alpha-by-domain.txt", 0L);
final File htmlFile = new File("target/tlds-alpha-by-domain.html");
// N.B. sometimes the html file may be updated a day or so after the txt file
// if the txt file contains entries not found in the html file, try again in a day or two
download(htmlFile,"https://www.iana.org/domains/root/db", timestamp);
final BufferedReader br = new BufferedReader(new FileReader(txtFile));
String line;
final String header;
line = br.readLine(); // header
if (!line.startsWith("# Version ")) {
br.close();
throw new IOException("File does not have expected Version header");
}
header = line.substring(2);
final boolean generateUnicodeTlds = false; // Change this to generate Unicode TLDs as well
// Parse html page to get entries
final Map<String, String[]> htmlInfo = getHtmlInfo(htmlFile);
final Map<String, String> missingTLD = new TreeMap<>(); // stores entry and comments as String[]
final Map<String, String> missingCC = new TreeMap<>();
while((line = br.readLine()) != null) {
if (!line.startsWith("#")) {
final String unicodeTld; // only different from asciiTld if that was punycode
final String asciiTld = line.toLowerCase(Locale.ENGLISH);
if (line.startsWith("XN--")) {
unicodeTld = IDN.toUnicode(line);
} else {
unicodeTld = asciiTld;
}
if (!dv.isValidTld(asciiTld)) {
final String [] info = htmlInfo.get(asciiTld);
if (info != null) {
final String type = info[0];
final String comment = info[1];
if ("country-code".equals(type)) { // Which list to use?
missingCC.put(asciiTld, unicodeTld + " " + comment);
if (generateUnicodeTlds) {
missingCC.put(unicodeTld, asciiTld + " " + comment);
}
} else {
missingTLD.put(asciiTld, unicodeTld + " " + comment);
if (generateUnicodeTlds) {
missingTLD.put(unicodeTld, asciiTld + " " + comment);
}
}
} else {
System.err.println("Expected to find HTML info for "+ asciiTld);
}
}
ianaTlds.add(asciiTld);
// Don't merge these conditions; generateUnicodeTlds is final so needs to be separate to avoid a warning
if (generateUnicodeTlds && !unicodeTld.equals(asciiTld)) {
ianaTlds.add(unicodeTld);
}
}
}
br.close();
// List html entries not in TLD text list
for(final String key : (new TreeMap<>(htmlInfo)).keySet()) {
if (!ianaTlds.contains(key)) {
if (isNotInRootZone(key)) {
System.out.println("INFO: HTML entry not yet in root zone: "+key);
} else {
System.err.println("WARN: Expected to find text entry for html: "+key);
}
}
}
if (!missingTLD.isEmpty()) {
printMap(header, missingTLD, "TLD");
}
if (!missingCC.isEmpty()) {
printMap(header, missingCC, "CC");
}
// Check if internal tables contain any additional entries
isInIanaList("INFRASTRUCTURE_TLDS", ianaTlds);
isInIanaList("COUNTRY_CODE_TLDS", ianaTlds);
isInIanaList("GENERIC_TLDS", ianaTlds);
// Don't check local TLDS isInIanaList("LOCAL_TLDS", ianaTlds);
System.out.println("Finished checks");
}
private static void printMap(final String header, final Map<String, String> map, final String string) {
System.out.println("Entries missing from "+ string +" List\n");
if (header != null) {
System.out.println(" // Taken from " + header);
}
final Iterator<Map.Entry<String, String>> it = map.entrySet().iterator();
while(it.hasNext()){
final Map.Entry<String, String> me = it.next();
System.out.println(" \"" + me.getKey() + "\", // " + me.getValue());
}
System.out.println("\nDone");
}
private static Map<String, String[]> getHtmlInfo(final File f) throws IOException {
final Map<String, String[]> info = new HashMap<>();
// <td><span class="domain tld"><a href="/domains/root/db/ax.html">.ax</a></span></td>
final Pattern domain = Pattern.compile(".*<a href=\"/domains/root/db/([^.]+)\\.html");
// <td>country-code</td>
final Pattern type = Pattern.compile("\\s+<td>([^<]+)</td>");
// <!-- <td>Åland Islands<br/><span class="tld-table-so">Ålands landskapsregering</span></td> </td> -->
// <td>Ålands landskapsregering</td>
final Pattern comment = Pattern.compile("\\s+<td>([^<]+)</td>");
final BufferedReader br = new BufferedReader(new FileReader(f));
String line;
while((line=br.readLine())!=null){
final Matcher m = domain.matcher(line);
if (m.lookingAt()) {
final String dom = m.group(1);
String typ = "??";
String com = "??";
line = br.readLine();
while (line.matches("^\\s*$")) { // extra blank lines introduced
line = br.readLine();
}
final Matcher t = type.matcher(line);
if (t.lookingAt()) {
typ = t.group(1);
line = br.readLine();
if (line.matches("\\s+<!--.*")) {
while(!line.matches(".*-->.*")){
line = br.readLine();
}
line = br.readLine();
}
// Should have comment; is it wrapped?
while(!line.matches(".*</td>.*")){
line += " " +br.readLine();
}
final Matcher n = comment.matcher(line);
if (n.lookingAt()) {
com = n.group(1);
}
// Don't save unused entries
if (com.contains("Not assigned") || com.contains("Retired") || typ.equals("test")) {
// System.out.println("Ignored: " + typ + " " + dom + " " +com);
} else {
info.put(dom.toLowerCase(Locale.ENGLISH), new String[]{typ, com});
// System.out.println("Storing: " + typ + " " + dom + " " +com);
}
} else {
System.err.println("Unexpected type: " + line);
}
}
}
br.close();
return info;
}
/*
* Download a file if it is more recent than our cached copy.
* Unfortunately the server does not seem to honor If-Modified-Since for the
* Html page, so we check if it is newer than the txt file and skip download if so
*/
private static long download(final File f, final String tldurl, final long timestamp) throws IOException {
final int HOUR = 60*60*1000; // an hour in ms
final long modTime;
// For testing purposes, don't download files more than once an hour
if (f.canRead()) {
modTime = f.lastModified();
if (modTime > System.currentTimeMillis()-HOUR) {
System.out.println("Skipping download - found recent " + f);
return modTime;
}
} else {
modTime = 0;
}
final HttpURLConnection hc = (HttpURLConnection) new URL(tldurl).openConnection();
if (modTime > 0) {
final SimpleDateFormat sdf = new SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss z");//Sun, 06 Nov 1994 08:49:37 GMT
final String since = sdf.format(new Date(modTime));
hc.addRequestProperty("If-Modified-Since", since);
System.out.println("Found " + f + " with date " + since);
}
if (hc.getResponseCode() == 304) {
System.out.println("Already have most recent " + tldurl);
} else {
System.out.println("Downloading " + tldurl);
final byte buff[] = new byte[1024];
final InputStream is = hc.getInputStream();
final FileOutputStream fos = new FileOutputStream(f);
int len;
while((len=is.read(buff)) != -1) {
fos.write(buff, 0, len);
}
fos.close();
is.close();
System.out.println("Done");
}
return f.lastModified();
}
/**
* Check whether the domain is in the root zone currently.
* Reads the URL http://www.iana.org/domains/root/db/*domain*.html
* (using a local disk cache)
* and checks for the string "This domain is not present in the root zone at this time."
* @param domain the domain to check
* @return true if the string is found
*/
private static boolean isNotInRootZone(final String domain) {
final String tldurl = "http://www.iana.org/domains/root/db/" + domain + ".html";
final File rootCheck = new File("target","tld_" + domain + ".html");
BufferedReader in = null;
try {
download(rootCheck, tldurl, 0L);
in = new BufferedReader(new FileReader(rootCheck));
String inputLine;
while ((inputLine = in.readLine()) != null) {
if (inputLine.contains("This domain is not present in the root zone at this time.")) {
return true;
}
}
in.close();
} catch (final IOException e) {
} finally {
closeQuietly(in);
}
return false;
}
private static void closeQuietly(final Closeable in) {
if (in != null) {
try {
in.close();
} catch (final IOException e) {
}
}
}
// isInIanaList and isSorted are split into two methods.
// If/when access to the arrays is possible without reflection, the intermediate
// methods can be dropped
private static boolean isInIanaList(final String arrayName, final Set<String> ianaTlds) throws Exception {
final Field f = DomainValidator.class.getDeclaredField(arrayName);
final boolean isPrivate = Modifier.isPrivate(f.getModifiers());
if (isPrivate) {
f.setAccessible(true);
}
final String[] array = (String[]) f.get(null);
try {
return isInIanaList(arrayName, array, ianaTlds);
} finally {
if (isPrivate) {
f.setAccessible(false);
}
}
}
private static boolean isInIanaList(final String name, final String [] array, final Set<String> ianaTlds) {
for (final String element : array) {
if (!ianaTlds.contains(element)) {
System.out.println(name + " contains unexpected value: " + element);
}
}
return true;
}
private static boolean isSortedLowerCase(final String arrayName) throws Exception {
final Field f = DomainValidator.class.getDeclaredField(arrayName);
final boolean isPrivate = Modifier.isPrivate(f.getModifiers());
if (isPrivate) {
f.setAccessible(true);
}
final String[] array = (String[]) f.get(null);
try {
return isSortedLowerCase(arrayName, array);
} finally {
if (isPrivate) {
f.setAccessible(false);
}
}
}
private static boolean isLowerCase(final String string) {
return string.equals(string.toLowerCase(Locale.ENGLISH));
}
// Check if an array is strictly sorted - and lowerCase
private static boolean isSortedLowerCase(final String name, final String [] array) {
boolean sorted = true;
boolean strictlySorted = true;
final int length = array.length;
boolean lowerCase = isLowerCase(array[length-1]); // Check the last entry
for(int i = 0; i < length-1; i++) { // compare all but last entry with next
final String entry = array[i];
final String nextEntry = array[i+1];
final int cmp = entry.compareTo(nextEntry);
if (cmp > 0) { // out of order
System.out.println("Out of order entry: " + entry + " < " + nextEntry + " in " + name);
sorted = false;
} else if (cmp == 0) {
strictlySorted = false;
System.out.println("Duplicated entry: " + entry + " in " + name);
}
if (!isLowerCase(entry)) {
System.out.println("Non lowerCase entry: " + entry + " in " + name);
lowerCase = false;
}
}
return sorted && strictlySorted && lowerCase;
}
}
| Use Java 5 for each loop
| src/test/java/org/apache/commons/validator/routines/DomainValidatorTest.java | Use Java 5 for each loop | <ide><path>rc/test/java/org/apache/commons/validator/routines/DomainValidatorTest.java
<ide> import java.util.Date;
<ide> import java.util.HashMap;
<ide> import java.util.HashSet;
<del>import java.util.Iterator;
<ide> import java.util.Locale;
<ide> import java.util.Map;
<add>import java.util.Map.Entry;
<ide> import java.util.Set;
<ide> import java.util.TreeMap;
<ide> import java.util.regex.Matcher;
<ide> if (header != null) {
<ide> System.out.println(" // Taken from " + header);
<ide> }
<del> final Iterator<Map.Entry<String, String>> it = map.entrySet().iterator();
<del> while(it.hasNext()){
<del> final Map.Entry<String, String> me = it.next();
<add> for (Entry<String, String> me : map.entrySet()) {
<ide> System.out.println(" \"" + me.getKey() + "\", // " + me.getValue());
<ide> }
<ide> System.out.println("\nDone"); |
Subsets and Splits